x86asm: Add lzcnt/tzcnt support
[tinycc.git] / tccgen.c
blob28924fd7b1606cbb8d28f612662d18c9ec7868d5
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
59 #define CODE_OFF() (nocode_wanted |= 0x20000000)
60 #define CODE_ON() (nocode_wanted &= ~0x20000000)
62 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
63 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
64 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
65 ST_DATA int func_vc;
66 ST_DATA int func_ind;
67 ST_DATA const char *funcname;
68 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
69 static CString initstr;
71 #if PTR_SIZE == 4
72 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
73 #define VT_PTRDIFF_T VT_INT
74 #elif LONG_SIZE == 4
75 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
76 #define VT_PTRDIFF_T VT_LLONG
77 #else
78 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
79 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
80 #endif
82 static struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int nocode_wanted;
89 int *bsym;
90 struct scope *scope;
91 struct switch_t *prev;
92 SValue sv;
93 } *cur_switch; /* current switch */
95 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
96 /*list of temporary local variables on the stack in current function. */
97 static struct temp_local_variable {
98 int location; //offset on stack. Svalue.c.i
99 short size;
100 short align;
101 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
102 static int nb_temp_local_vars;
104 static struct scope {
105 struct scope *prev;
106 struct { int loc, locorig, num; } vla;
107 struct { Sym *s; int n; } cl;
108 int *bsym, *csym;
109 Sym *lstk, *llstk;
110 } *cur_scope, *loop_scope, *root_scope;
112 typedef struct {
113 Section *sec;
114 int local_offset;
115 Sym *flex_array_ref;
116 } init_params;
118 #if 1
119 #define precedence_parser
120 static void init_prec(void);
121 #endif
123 static void gen_cast(CType *type);
124 static void gen_cast_s(int t);
125 static inline CType *pointed_type(CType *type);
126 static int is_compatible_types(CType *type1, CType *type2);
127 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
128 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
129 static void parse_expr_type(CType *type);
130 static void init_putv(init_params *p, CType *type, unsigned long c);
131 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
132 static void block(int is_expr);
133 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
134 static int decl(int l);
135 static void expr_eq(void);
136 static void vpush_type_size(CType *type, int *a);
137 static int is_compatible_unqualified_types(CType *type1, CType *type2);
138 static inline int64_t expr_const64(void);
139 static void vpush64(int ty, unsigned long long v);
140 static void vpush(CType *type);
141 static int gvtst(int inv, int t);
142 static void gen_inline_functions(TCCState *s);
143 static void free_inline_functions(TCCState *s);
144 static void skip_or_save_block(TokenString **str);
145 static void gv_dup(void);
146 static int get_temp_local_var(int size,int align);
147 static void clear_temp_local_var_list();
148 static void cast_error(CType *st, CType *dt);
150 /* ------------------------------------------------------------------------- */
151 /* Automagical code suppression */
153 /* Clear 'nocode_wanted' at forward label if it was used */
154 ST_FUNC void gsym(int t)
156 if (t) {
157 gsym_addr(t, ind);
158 CODE_ON();
162 /* Clear 'nocode_wanted' if current pc is a label */
163 static int gind()
165 int t = ind;
166 CODE_ON();
167 if (debug_modes)
168 tcc_tcov_block_begin(tcc_state);
169 return t;
172 /* Set 'nocode_wanted' after unconditional (backwards) jump */
173 static void gjmp_addr_acs(int t)
175 gjmp_addr(t);
176 CODE_OFF();
179 /* Set 'nocode_wanted' after unconditional (forwards) jump */
180 static int gjmp_acs(int t)
182 t = gjmp(t);
183 CODE_OFF();
184 return t;
187 /* These are #undef'd at the end of this file */
188 #define gjmp_addr gjmp_addr_acs
189 #define gjmp gjmp_acs
190 /* ------------------------------------------------------------------------- */
192 ST_INLN int is_float(int t)
194 int bt = t & VT_BTYPE;
195 return bt == VT_LDOUBLE
196 || bt == VT_DOUBLE
197 || bt == VT_FLOAT
198 || bt == VT_QFLOAT;
201 static inline int is_integer_btype(int bt)
203 return bt == VT_BYTE
204 || bt == VT_BOOL
205 || bt == VT_SHORT
206 || bt == VT_INT
207 || bt == VT_LLONG;
210 static int btype_size(int bt)
212 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
213 bt == VT_SHORT ? 2 :
214 bt == VT_INT ? 4 :
215 bt == VT_LLONG ? 8 :
216 bt == VT_PTR ? PTR_SIZE : 0;
219 /* returns function return register from type */
220 static int R_RET(int t)
222 if (!is_float(t))
223 return REG_IRET;
224 #ifdef TCC_TARGET_X86_64
225 if ((t & VT_BTYPE) == VT_LDOUBLE)
226 return TREG_ST0;
227 #elif defined TCC_TARGET_RISCV64
228 if ((t & VT_BTYPE) == VT_LDOUBLE)
229 return REG_IRET;
230 #endif
231 return REG_FRET;
234 /* returns 2nd function return register, if any */
235 static int R2_RET(int t)
237 t &= VT_BTYPE;
238 #if PTR_SIZE == 4
239 if (t == VT_LLONG)
240 return REG_IRE2;
241 #elif defined TCC_TARGET_X86_64
242 if (t == VT_QLONG)
243 return REG_IRE2;
244 if (t == VT_QFLOAT)
245 return REG_FRE2;
246 #elif defined TCC_TARGET_RISCV64
247 if (t == VT_LDOUBLE)
248 return REG_IRE2;
249 #endif
250 return VT_CONST;
253 /* returns true for two-word types */
254 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
256 /* put function return registers to stack value */
257 static void PUT_R_RET(SValue *sv, int t)
259 sv->r = R_RET(t), sv->r2 = R2_RET(t);
262 /* returns function return register class for type t */
263 static int RC_RET(int t)
265 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
268 /* returns generic register class for type t */
269 static int RC_TYPE(int t)
271 if (!is_float(t))
272 return RC_INT;
273 #ifdef TCC_TARGET_X86_64
274 if ((t & VT_BTYPE) == VT_LDOUBLE)
275 return RC_ST0;
276 if ((t & VT_BTYPE) == VT_QFLOAT)
277 return RC_FRET;
278 #elif defined TCC_TARGET_RISCV64
279 if ((t & VT_BTYPE) == VT_LDOUBLE)
280 return RC_INT;
281 #endif
282 return RC_FLOAT;
285 /* returns 2nd register class corresponding to t and rc */
286 static int RC2_TYPE(int t, int rc)
288 if (!USING_TWO_WORDS(t))
289 return 0;
290 #ifdef RC_IRE2
291 if (rc == RC_IRET)
292 return RC_IRE2;
293 #endif
294 #ifdef RC_FRE2
295 if (rc == RC_FRET)
296 return RC_FRE2;
297 #endif
298 if (rc & RC_FLOAT)
299 return RC_FLOAT;
300 return RC_INT;
303 /* we use our own 'finite' function to avoid potential problems with
304 non standard math libs */
305 /* XXX: endianness dependent */
306 ST_FUNC int ieee_finite(double d)
308 int p[4];
309 memcpy(p, &d, sizeof(double));
310 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
313 /* compiling intel long double natively */
314 #if (defined __i386__ || defined __x86_64__) \
315 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
316 # define TCC_IS_NATIVE_387
317 #endif
319 ST_FUNC void test_lvalue(void)
321 if (!(vtop->r & VT_LVAL))
322 expect("lvalue");
325 ST_FUNC void check_vstack(void)
327 if (vtop != vstack - 1)
328 tcc_error("internal compiler error: vstack leak (%d)",
329 (int)(vtop - vstack + 1));
332 /* vstack debugging aid */
333 #if 0
334 void pv (const char *lbl, int a, int b)
336 int i;
337 for (i = a; i < a + b; ++i) {
338 SValue *p = &vtop[-i];
339 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
340 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
343 #endif
345 /* ------------------------------------------------------------------------- */
346 /* initialize vstack and types. This must be done also for tcc -E */
347 ST_FUNC void tccgen_init(TCCState *s1)
349 vtop = vstack - 1;
350 memset(vtop, 0, sizeof *vtop);
352 /* define some often used types */
353 int_type.t = VT_INT;
355 char_type.t = VT_BYTE;
356 if (s1->char_is_unsigned)
357 char_type.t |= VT_UNSIGNED;
358 char_pointer_type = char_type;
359 mk_pointer(&char_pointer_type);
361 func_old_type.t = VT_FUNC;
362 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
363 func_old_type.ref->f.func_call = FUNC_CDECL;
364 func_old_type.ref->f.func_type = FUNC_OLD;
365 #ifdef precedence_parser
366 init_prec();
367 #endif
368 cstr_new(&initstr);
371 ST_FUNC int tccgen_compile(TCCState *s1)
373 cur_text_section = NULL;
374 funcname = "";
375 func_ind = -1;
376 anon_sym = SYM_FIRST_ANOM;
377 const_wanted = 0;
378 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
379 local_scope = 0;
380 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
382 tcc_debug_start(s1);
383 tcc_tcov_start (s1);
384 #ifdef TCC_TARGET_ARM
385 arm_init(s1);
386 #endif
387 #ifdef INC_DEBUG
388 printf("%s: **** new file\n", file->filename);
389 #endif
390 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
391 next();
392 decl(VT_CONST);
393 gen_inline_functions(s1);
394 check_vstack();
395 /* end of translation unit info */
396 tcc_debug_end(s1);
397 tcc_tcov_end(s1);
398 return 0;
401 ST_FUNC void tccgen_finish(TCCState *s1)
403 cstr_free(&initstr);
404 free_inline_functions(s1);
405 sym_pop(&global_stack, NULL, 0);
406 sym_pop(&local_stack, NULL, 0);
407 /* free preprocessor macros */
408 free_defines(NULL);
409 /* free sym_pools */
410 dynarray_reset(&sym_pools, &nb_sym_pools);
411 sym_free_first = NULL;
414 /* ------------------------------------------------------------------------- */
415 ST_FUNC ElfSym *elfsym(Sym *s)
417 if (!s || !s->c)
418 return NULL;
419 return &((ElfSym *)symtab_section->data)[s->c];
422 /* apply storage attributes to Elf symbol */
423 ST_FUNC void update_storage(Sym *sym)
425 ElfSym *esym;
426 int sym_bind, old_sym_bind;
428 esym = elfsym(sym);
429 if (!esym)
430 return;
432 if (sym->a.visibility)
433 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
434 | sym->a.visibility;
436 if (sym->type.t & (VT_STATIC | VT_INLINE))
437 sym_bind = STB_LOCAL;
438 else if (sym->a.weak)
439 sym_bind = STB_WEAK;
440 else
441 sym_bind = STB_GLOBAL;
442 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
443 if (sym_bind != old_sym_bind) {
444 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
447 #ifdef TCC_TARGET_PE
448 if (sym->a.dllimport)
449 esym->st_other |= ST_PE_IMPORT;
450 if (sym->a.dllexport)
451 esym->st_other |= ST_PE_EXPORT;
452 #endif
454 #if 0
455 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
456 get_tok_str(sym->v, NULL),
457 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
458 sym->a.visibility,
459 sym->a.dllexport,
460 sym->a.dllimport
462 #endif
465 /* ------------------------------------------------------------------------- */
466 /* update sym->c so that it points to an external symbol in section
467 'section' with value 'value' */
469 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
470 addr_t value, unsigned long size,
471 int can_add_underscore)
473 int sym_type, sym_bind, info, other, t;
474 ElfSym *esym;
475 const char *name;
476 char buf1[256];
478 if (!sym->c) {
479 name = get_tok_str(sym->v, NULL);
480 t = sym->type.t;
481 if ((t & VT_BTYPE) == VT_FUNC) {
482 sym_type = STT_FUNC;
483 } else if ((t & VT_BTYPE) == VT_VOID) {
484 sym_type = STT_NOTYPE;
485 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
486 sym_type = STT_FUNC;
487 } else {
488 sym_type = STT_OBJECT;
490 if (t & (VT_STATIC | VT_INLINE))
491 sym_bind = STB_LOCAL;
492 else
493 sym_bind = STB_GLOBAL;
494 other = 0;
496 #ifdef TCC_TARGET_PE
497 if (sym_type == STT_FUNC && sym->type.ref) {
498 Sym *ref = sym->type.ref;
499 if (ref->a.nodecorate) {
500 can_add_underscore = 0;
502 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
503 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
504 name = buf1;
505 other |= ST_PE_STDCALL;
506 can_add_underscore = 0;
509 #endif
511 if (sym->asm_label) {
512 name = get_tok_str(sym->asm_label, NULL);
513 can_add_underscore = 0;
516 if (tcc_state->leading_underscore && can_add_underscore) {
517 buf1[0] = '_';
518 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
519 name = buf1;
522 info = ELFW(ST_INFO)(sym_bind, sym_type);
523 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
525 if (debug_modes)
526 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
528 } else {
529 esym = elfsym(sym);
530 esym->st_value = value;
531 esym->st_size = size;
532 esym->st_shndx = sh_num;
534 update_storage(sym);
537 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
539 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
540 return;
541 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
544 /* add a new relocation entry to symbol 'sym' in section 's' */
545 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
546 addr_t addend)
548 int c = 0;
550 if (nocode_wanted && s == cur_text_section)
551 return;
553 if (sym) {
554 if (0 == sym->c)
555 put_extern_sym(sym, NULL, 0, 0);
556 c = sym->c;
559 /* now we can add ELF relocation info */
560 put_elf_reloca(symtab_section, s, offset, type, c, addend);
563 #if PTR_SIZE == 4
564 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
566 greloca(s, sym, offset, type, 0);
568 #endif
570 /* ------------------------------------------------------------------------- */
571 /* symbol allocator */
572 static Sym *__sym_malloc(void)
574 Sym *sym_pool, *sym, *last_sym;
575 int i;
577 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
578 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
580 last_sym = sym_free_first;
581 sym = sym_pool;
582 for(i = 0; i < SYM_POOL_NB; i++) {
583 sym->next = last_sym;
584 last_sym = sym;
585 sym++;
587 sym_free_first = last_sym;
588 return last_sym;
591 static inline Sym *sym_malloc(void)
593 Sym *sym;
594 #ifndef SYM_DEBUG
595 sym = sym_free_first;
596 if (!sym)
597 sym = __sym_malloc();
598 sym_free_first = sym->next;
599 return sym;
600 #else
601 sym = tcc_malloc(sizeof(Sym));
602 return sym;
603 #endif
606 ST_INLN void sym_free(Sym *sym)
608 #ifndef SYM_DEBUG
609 sym->next = sym_free_first;
610 sym_free_first = sym;
611 #else
612 tcc_free(sym);
613 #endif
616 /* push, without hashing */
617 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
619 Sym *s;
621 s = sym_malloc();
622 memset(s, 0, sizeof *s);
623 s->v = v;
624 s->type.t = t;
625 s->c = c;
626 /* add in stack */
627 s->prev = *ps;
628 *ps = s;
629 return s;
632 /* find a symbol and return its associated structure. 's' is the top
633 of the symbol stack */
634 ST_FUNC Sym *sym_find2(Sym *s, int v)
636 while (s) {
637 if (s->v == v)
638 return s;
639 else if (s->v == -1)
640 return NULL;
641 s = s->prev;
643 return NULL;
646 /* structure lookup */
647 ST_INLN Sym *struct_find(int v)
649 v -= TOK_IDENT;
650 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
651 return NULL;
652 return table_ident[v]->sym_struct;
655 /* find an identifier */
656 ST_INLN Sym *sym_find(int v)
658 v -= TOK_IDENT;
659 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
660 return NULL;
661 return table_ident[v]->sym_identifier;
664 static int sym_scope(Sym *s)
666 if (IS_ENUM_VAL (s->type.t))
667 return s->type.ref->sym_scope;
668 else
669 return s->sym_scope;
672 /* push a given symbol on the symbol stack */
673 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
675 Sym *s, **ps;
676 TokenSym *ts;
678 if (local_stack)
679 ps = &local_stack;
680 else
681 ps = &global_stack;
682 s = sym_push2(ps, v, type->t, c);
683 s->type.ref = type->ref;
684 s->r = r;
685 /* don't record fields or anonymous symbols */
686 /* XXX: simplify */
687 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
688 /* record symbol in token array */
689 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
690 if (v & SYM_STRUCT)
691 ps = &ts->sym_struct;
692 else
693 ps = &ts->sym_identifier;
694 s->prev_tok = *ps;
695 *ps = s;
696 s->sym_scope = local_scope;
697 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
698 tcc_error("redeclaration of '%s'",
699 get_tok_str(v & ~SYM_STRUCT, NULL));
701 return s;
704 /* push a global identifier */
705 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
707 Sym *s, **ps;
708 s = sym_push2(&global_stack, v, t, c);
709 s->r = VT_CONST | VT_SYM;
710 /* don't record anonymous symbol */
711 if (v < SYM_FIRST_ANOM) {
712 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
713 /* modify the top most local identifier, so that sym_identifier will
714 point to 's' when popped; happens when called from inline asm */
715 while (*ps != NULL && (*ps)->sym_scope)
716 ps = &(*ps)->prev_tok;
717 s->prev_tok = *ps;
718 *ps = s;
720 return s;
723 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
724 pop them yet from the list, but do remove them from the token array. */
725 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
727 Sym *s, *ss, **ps;
728 TokenSym *ts;
729 int v;
731 s = *ptop;
732 while(s != b) {
733 ss = s->prev;
734 v = s->v;
735 /* remove symbol in token array */
736 /* XXX: simplify */
737 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
738 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
739 if (v & SYM_STRUCT)
740 ps = &ts->sym_struct;
741 else
742 ps = &ts->sym_identifier;
743 *ps = s->prev_tok;
745 if (!keep)
746 sym_free(s);
747 s = ss;
749 if (!keep)
750 *ptop = b;
753 /* ------------------------------------------------------------------------- */
754 static void vcheck_cmp(void)
756 /* cannot let cpu flags if other instruction are generated. Also
757 avoid leaving VT_JMP anywhere except on the top of the stack
758 because it would complicate the code generator.
760 Don't do this when nocode_wanted. vtop might come from
761 !nocode_wanted regions (see 88_codeopt.c) and transforming
762 it to a register without actually generating code is wrong
763 as their value might still be used for real. All values
764 we push under nocode_wanted will eventually be popped
765 again, so that the VT_CMP/VT_JMP value will be in vtop
766 when code is unsuppressed again. */
768 if (vtop->r == VT_CMP && !nocode_wanted)
769 gv(RC_INT);
772 static void vsetc(CType *type, int r, CValue *vc)
774 if (vtop >= vstack + (VSTACK_SIZE - 1))
775 tcc_error("memory full (vstack)");
776 vcheck_cmp();
777 vtop++;
778 vtop->type = *type;
779 vtop->r = r;
780 vtop->r2 = VT_CONST;
781 vtop->c = *vc;
782 vtop->sym = NULL;
785 ST_FUNC void vswap(void)
787 SValue tmp;
789 vcheck_cmp();
790 tmp = vtop[0];
791 vtop[0] = vtop[-1];
792 vtop[-1] = tmp;
795 /* pop stack value */
796 ST_FUNC void vpop(void)
798 int v;
799 v = vtop->r & VT_VALMASK;
800 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
801 /* for x86, we need to pop the FP stack */
802 if (v == TREG_ST0) {
803 o(0xd8dd); /* fstp %st(0) */
804 } else
805 #endif
806 if (v == VT_CMP) {
807 /* need to put correct jump if && or || without test */
808 gsym(vtop->jtrue);
809 gsym(vtop->jfalse);
811 vtop--;
814 /* push constant of type "type" with useless value */
815 static void vpush(CType *type)
817 vset(type, VT_CONST, 0);
820 /* push arbitrary 64bit constant */
821 static void vpush64(int ty, unsigned long long v)
823 CValue cval;
824 CType ctype;
825 ctype.t = ty;
826 ctype.ref = NULL;
827 cval.i = v;
828 vsetc(&ctype, VT_CONST, &cval);
831 /* push integer constant */
832 ST_FUNC void vpushi(int v)
834 vpush64(VT_INT, v);
837 /* push a pointer sized constant */
838 static void vpushs(addr_t v)
840 vpush64(VT_SIZE_T, v);
843 /* push long long constant */
844 static inline void vpushll(long long v)
846 vpush64(VT_LLONG, v);
849 ST_FUNC void vset(CType *type, int r, int v)
851 CValue cval;
852 cval.i = v;
853 vsetc(type, r, &cval);
856 static void vseti(int r, int v)
858 CType type;
859 type.t = VT_INT;
860 type.ref = NULL;
861 vset(&type, r, v);
864 ST_FUNC void vpushv(SValue *v)
866 if (vtop >= vstack + (VSTACK_SIZE - 1))
867 tcc_error("memory full (vstack)");
868 vtop++;
869 *vtop = *v;
872 static void vdup(void)
874 vpushv(vtop);
877 /* rotate n first stack elements to the bottom
878 I1 ... In -> I2 ... In I1 [top is right]
880 ST_FUNC void vrotb(int n)
882 int i;
883 SValue tmp;
885 vcheck_cmp();
886 tmp = vtop[-n + 1];
887 for(i=-n+1;i!=0;i++)
888 vtop[i] = vtop[i+1];
889 vtop[0] = tmp;
892 /* rotate the n elements before entry e towards the top
893 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
895 ST_FUNC void vrote(SValue *e, int n)
897 int i;
898 SValue tmp;
900 vcheck_cmp();
901 tmp = *e;
902 for(i = 0;i < n - 1; i++)
903 e[-i] = e[-i - 1];
904 e[-n + 1] = tmp;
907 /* rotate n first stack elements to the top
908 I1 ... In -> In I1 ... I(n-1) [top is right]
910 ST_FUNC void vrott(int n)
912 vrote(vtop, n);
915 /* ------------------------------------------------------------------------- */
916 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
918 /* called from generators to set the result from relational ops */
919 ST_FUNC void vset_VT_CMP(int op)
921 vtop->r = VT_CMP;
922 vtop->cmp_op = op;
923 vtop->jfalse = 0;
924 vtop->jtrue = 0;
927 /* called once before asking generators to load VT_CMP to a register */
928 static void vset_VT_JMP(void)
930 int op = vtop->cmp_op;
932 if (vtop->jtrue || vtop->jfalse) {
933 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
934 int inv = op & (op < 2); /* small optimization */
935 vseti(VT_JMP+inv, gvtst(inv, 0));
936 } else {
937 /* otherwise convert flags (rsp. 0/1) to register */
938 vtop->c.i = op;
939 if (op < 2) /* doesn't seem to happen */
940 vtop->r = VT_CONST;
944 /* Set CPU Flags, doesn't yet jump */
945 static void gvtst_set(int inv, int t)
947 int *p;
949 if (vtop->r != VT_CMP) {
950 vpushi(0);
951 gen_op(TOK_NE);
952 if (vtop->r != VT_CMP) /* must be VT_CONST then */
953 vset_VT_CMP(vtop->c.i != 0);
956 p = inv ? &vtop->jfalse : &vtop->jtrue;
957 *p = gjmp_append(*p, t);
960 /* Generate value test
962 * Generate a test for any value (jump, comparison and integers) */
963 static int gvtst(int inv, int t)
965 int op, x, u;
967 gvtst_set(inv, t);
968 t = vtop->jtrue, u = vtop->jfalse;
969 if (inv)
970 x = u, u = t, t = x;
971 op = vtop->cmp_op;
973 /* jump to the wanted target */
974 if (op > 1)
975 t = gjmp_cond(op ^ inv, t);
976 else if (op != inv)
977 t = gjmp(t);
978 /* resolve complementary jumps to here */
979 gsym(u);
981 vtop--;
982 return t;
985 /* generate a zero or nozero test */
986 static void gen_test_zero(int op)
988 if (vtop->r == VT_CMP) {
989 int j;
990 if (op == TOK_EQ) {
991 j = vtop->jfalse;
992 vtop->jfalse = vtop->jtrue;
993 vtop->jtrue = j;
994 vtop->cmp_op ^= 1;
996 } else {
997 vpushi(0);
998 gen_op(op);
1002 /* ------------------------------------------------------------------------- */
1003 /* push a symbol value of TYPE */
1004 ST_FUNC void vpushsym(CType *type, Sym *sym)
1006 CValue cval;
1007 cval.i = 0;
1008 vsetc(type, VT_CONST | VT_SYM, &cval);
1009 vtop->sym = sym;
1012 /* Return a static symbol pointing to a section */
1013 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1015 int v;
1016 Sym *sym;
1018 v = anon_sym++;
1019 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1020 sym->type.t |= VT_STATIC;
1021 put_extern_sym(sym, sec, offset, size);
1022 return sym;
1025 /* push a reference to a section offset by adding a dummy symbol */
1026 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1028 vpushsym(type, get_sym_ref(type, sec, offset, size));
1031 /* define a new external reference to a symbol 'v' of type 'u' */
1032 ST_FUNC Sym *external_global_sym(int v, CType *type)
1034 Sym *s;
1036 s = sym_find(v);
1037 if (!s) {
1038 /* push forward reference */
1039 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1040 s->type.ref = type->ref;
1041 } else if (IS_ASM_SYM(s)) {
1042 s->type.t = type->t | (s->type.t & VT_EXTERN);
1043 s->type.ref = type->ref;
1044 update_storage(s);
1046 return s;
1049 /* create an external reference with no specific type similar to asm labels.
1050 This avoids type conflicts if the symbol is used from C too */
1051 ST_FUNC Sym *external_helper_sym(int v)
1053 CType ct = { VT_ASM_FUNC, NULL };
1054 return external_global_sym(v, &ct);
1057 /* push a reference to an helper function (such as memmove) */
1058 ST_FUNC void vpush_helper_func(int v)
1060 vpushsym(&func_old_type, external_helper_sym(v));
1063 /* Merge symbol attributes. */
1064 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1066 if (sa1->aligned && !sa->aligned)
1067 sa->aligned = sa1->aligned;
1068 sa->packed |= sa1->packed;
1069 sa->weak |= sa1->weak;
1070 if (sa1->visibility != STV_DEFAULT) {
1071 int vis = sa->visibility;
1072 if (vis == STV_DEFAULT
1073 || vis > sa1->visibility)
1074 vis = sa1->visibility;
1075 sa->visibility = vis;
1077 sa->dllexport |= sa1->dllexport;
1078 sa->nodecorate |= sa1->nodecorate;
1079 sa->dllimport |= sa1->dllimport;
1082 /* Merge function attributes. */
1083 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1085 if (fa1->func_call && !fa->func_call)
1086 fa->func_call = fa1->func_call;
1087 if (fa1->func_type && !fa->func_type)
1088 fa->func_type = fa1->func_type;
1089 if (fa1->func_args && !fa->func_args)
1090 fa->func_args = fa1->func_args;
1091 if (fa1->func_noreturn)
1092 fa->func_noreturn = 1;
1093 if (fa1->func_ctor)
1094 fa->func_ctor = 1;
1095 if (fa1->func_dtor)
1096 fa->func_dtor = 1;
1099 /* Merge attributes. */
1100 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1102 merge_symattr(&ad->a, &ad1->a);
1103 merge_funcattr(&ad->f, &ad1->f);
1105 if (ad1->section)
1106 ad->section = ad1->section;
1107 if (ad1->alias_target)
1108 ad->alias_target = ad1->alias_target;
1109 if (ad1->asm_label)
1110 ad->asm_label = ad1->asm_label;
1111 if (ad1->attr_mode)
1112 ad->attr_mode = ad1->attr_mode;
1115 /* Merge some type attributes. */
1116 static void patch_type(Sym *sym, CType *type)
1118 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1119 if (!(sym->type.t & VT_EXTERN))
1120 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1121 sym->type.t &= ~VT_EXTERN;
1124 if (IS_ASM_SYM(sym)) {
1125 /* stay static if both are static */
1126 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1127 sym->type.ref = type->ref;
1130 if (!is_compatible_types(&sym->type, type)) {
1131 tcc_error("incompatible types for redefinition of '%s'",
1132 get_tok_str(sym->v, NULL));
1134 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1135 int static_proto = sym->type.t & VT_STATIC;
1136 /* warn if static follows non-static function declaration */
1137 if ((type->t & VT_STATIC) && !static_proto
1138 /* XXX this test for inline shouldn't be here. Until we
1139 implement gnu-inline mode again it silences a warning for
1140 mingw caused by our workarounds. */
1141 && !((type->t | sym->type.t) & VT_INLINE))
1142 tcc_warning("static storage ignored for redefinition of '%s'",
1143 get_tok_str(sym->v, NULL));
1145 /* set 'inline' if both agree or if one has static */
1146 if ((type->t | sym->type.t) & VT_INLINE) {
1147 if (!((type->t ^ sym->type.t) & VT_INLINE)
1148 || ((type->t | sym->type.t) & VT_STATIC))
1149 static_proto |= VT_INLINE;
1152 if (0 == (type->t & VT_EXTERN)) {
1153 struct FuncAttr f = sym->type.ref->f;
1154 /* put complete type, use static from prototype */
1155 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1156 sym->type.ref = type->ref;
1157 merge_funcattr(&sym->type.ref->f, &f);
1158 } else {
1159 sym->type.t &= ~VT_INLINE | static_proto;
1162 if (sym->type.ref->f.func_type == FUNC_OLD
1163 && type->ref->f.func_type != FUNC_OLD) {
1164 sym->type.ref = type->ref;
1167 } else {
1168 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1169 /* set array size if it was omitted in extern declaration */
1170 sym->type.ref->c = type->ref->c;
1172 if ((type->t ^ sym->type.t) & VT_STATIC)
1173 tcc_warning("storage mismatch for redefinition of '%s'",
1174 get_tok_str(sym->v, NULL));
1178 /* Merge some storage attributes. */
1179 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1181 if (type)
1182 patch_type(sym, type);
1184 #ifdef TCC_TARGET_PE
1185 if (sym->a.dllimport != ad->a.dllimport)
1186 tcc_error("incompatible dll linkage for redefinition of '%s'",
1187 get_tok_str(sym->v, NULL));
1188 #endif
1189 merge_symattr(&sym->a, &ad->a);
1190 if (ad->asm_label)
1191 sym->asm_label = ad->asm_label;
1192 update_storage(sym);
1195 /* copy sym to other stack */
1196 static Sym *sym_copy(Sym *s0, Sym **ps)
1198 Sym *s;
1199 s = sym_malloc(), *s = *s0;
1200 s->prev = *ps, *ps = s;
1201 if (s->v < SYM_FIRST_ANOM) {
1202 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1203 s->prev_tok = *ps, *ps = s;
1205 return s;
1208 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1209 static void sym_copy_ref(Sym *s, Sym **ps)
1211 int bt = s->type.t & VT_BTYPE;
1212 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1213 Sym **sp = &s->type.ref;
1214 for (s = *sp, *sp = NULL; s; s = s->next) {
1215 Sym *s2 = sym_copy(s, ps);
1216 sp = &(*sp = s2)->next;
1217 sym_copy_ref(s2, ps);
1222 /* define a new external reference to a symbol 'v' */
1223 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1225 Sym *s;
1227 /* look for global symbol */
1228 s = sym_find(v);
1229 while (s && s->sym_scope)
1230 s = s->prev_tok;
1232 if (!s) {
1233 /* push forward reference */
1234 s = global_identifier_push(v, type->t, 0);
1235 s->r |= r;
1236 s->a = ad->a;
1237 s->asm_label = ad->asm_label;
1238 s->type.ref = type->ref;
1239 /* copy type to the global stack */
1240 if (local_stack)
1241 sym_copy_ref(s, &global_stack);
1242 } else {
1243 patch_storage(s, ad, type);
1245 /* push variables on local_stack if any */
1246 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1247 s = sym_copy(s, &local_stack);
1248 return s;
1251 /* save registers up to (vtop - n) stack entry */
1252 ST_FUNC void save_regs(int n)
1254 SValue *p, *p1;
1255 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1256 save_reg(p->r);
1259 /* save r to the memory stack, and mark it as being free */
1260 ST_FUNC void save_reg(int r)
1262 save_reg_upstack(r, 0);
1265 /* save r to the memory stack, and mark it as being free,
1266 if seen up to (vtop - n) stack entry */
1267 ST_FUNC void save_reg_upstack(int r, int n)
1269 int l, size, align, bt;
1270 SValue *p, *p1, sv;
1272 if ((r &= VT_VALMASK) >= VT_CONST)
1273 return;
1274 if (nocode_wanted)
1275 return;
1276 l = 0;
1277 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1278 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1279 /* must save value on stack if not already done */
1280 if (!l) {
1281 bt = p->type.t & VT_BTYPE;
1282 if (bt == VT_VOID)
1283 continue;
1284 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1285 bt = VT_PTR;
1286 sv.type.t = bt;
1287 size = type_size(&sv.type, &align);
1288 l = get_temp_local_var(size,align);
1289 sv.r = VT_LOCAL | VT_LVAL;
1290 sv.c.i = l;
1291 store(p->r & VT_VALMASK, &sv);
1292 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1293 /* x86 specific: need to pop fp register ST0 if saved */
1294 if (r == TREG_ST0) {
1295 o(0xd8dd); /* fstp %st(0) */
1297 #endif
1298 /* special long long case */
1299 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1300 sv.c.i += PTR_SIZE;
1301 store(p->r2, &sv);
1304 /* mark that stack entry as being saved on the stack */
1305 if (p->r & VT_LVAL) {
1306 /* also clear the bounded flag because the
1307 relocation address of the function was stored in
1308 p->c.i */
1309 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1310 } else {
1311 p->r = VT_LVAL | VT_LOCAL;
1313 p->sym = NULL;
1314 p->r2 = VT_CONST;
1315 p->c.i = l;
1320 #ifdef TCC_TARGET_ARM
1321 /* find a register of class 'rc2' with at most one reference on stack.
1322 * If none, call get_reg(rc) */
1323 ST_FUNC int get_reg_ex(int rc, int rc2)
1325 int r;
1326 SValue *p;
1328 for(r=0;r<NB_REGS;r++) {
1329 if (reg_classes[r] & rc2) {
1330 int n;
1331 n=0;
1332 for(p = vstack; p <= vtop; p++) {
1333 if ((p->r & VT_VALMASK) == r ||
1334 p->r2 == r)
1335 n++;
1337 if (n <= 1)
1338 return r;
1341 return get_reg(rc);
1343 #endif
1345 /* find a free register of class 'rc'. If none, save one register */
1346 ST_FUNC int get_reg(int rc)
1348 int r;
1349 SValue *p;
1351 /* find a free register */
1352 for(r=0;r<NB_REGS;r++) {
1353 if (reg_classes[r] & rc) {
1354 if (nocode_wanted)
1355 return r;
1356 for(p=vstack;p<=vtop;p++) {
1357 if ((p->r & VT_VALMASK) == r ||
1358 p->r2 == r)
1359 goto notfound;
1361 return r;
1363 notfound: ;
1366 /* no register left : free the first one on the stack (VERY
1367 IMPORTANT to start from the bottom to ensure that we don't
1368 spill registers used in gen_opi()) */
1369 for(p=vstack;p<=vtop;p++) {
1370 /* look at second register (if long long) */
1371 r = p->r2;
1372 if (r < VT_CONST && (reg_classes[r] & rc))
1373 goto save_found;
1374 r = p->r & VT_VALMASK;
1375 if (r < VT_CONST && (reg_classes[r] & rc)) {
1376 save_found:
1377 save_reg(r);
1378 return r;
1381 /* Should never comes here */
1382 return -1;
1385 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1386 static int get_temp_local_var(int size,int align){
1387 int i;
1388 struct temp_local_variable *temp_var;
1389 int found_var;
1390 SValue *p;
1391 int r;
1392 char free;
1393 char found;
1394 found=0;
1395 for(i=0;i<nb_temp_local_vars;i++){
1396 temp_var=&arr_temp_local_vars[i];
1397 if(temp_var->size<size||align!=temp_var->align){
1398 continue;
1400 /*check if temp_var is free*/
1401 free=1;
1402 for(p=vstack;p<=vtop;p++) {
1403 r=p->r&VT_VALMASK;
1404 if(r==VT_LOCAL||r==VT_LLOCAL){
1405 if(p->c.i==temp_var->location){
1406 free=0;
1407 break;
1411 if(free){
1412 found_var=temp_var->location;
1413 found=1;
1414 break;
1417 if(!found){
1418 loc = (loc - size) & -align;
1419 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1420 temp_var=&arr_temp_local_vars[i];
1421 temp_var->location=loc;
1422 temp_var->size=size;
1423 temp_var->align=align;
1424 nb_temp_local_vars++;
1426 found_var=loc;
1428 return found_var;
1431 static void clear_temp_local_var_list(){
1432 nb_temp_local_vars=0;
1435 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1436 if needed */
1437 static void move_reg(int r, int s, int t)
1439 SValue sv;
1441 if (r != s) {
1442 save_reg(r);
1443 sv.type.t = t;
1444 sv.type.ref = NULL;
1445 sv.r = s;
1446 sv.c.i = 0;
1447 load(r, &sv);
1451 /* get address of vtop (vtop MUST BE an lvalue) */
1452 ST_FUNC void gaddrof(void)
1454 vtop->r &= ~VT_LVAL;
1455 /* tricky: if saved lvalue, then we can go back to lvalue */
1456 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1457 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1460 #ifdef CONFIG_TCC_BCHECK
1461 /* generate a bounded pointer addition */
1462 static void gen_bounded_ptr_add(void)
1464 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1465 if (save) {
1466 vpushv(&vtop[-1]);
1467 vrott(3);
1469 vpush_helper_func(TOK___bound_ptr_add);
1470 vrott(3);
1471 gfunc_call(2);
1472 vtop -= save;
1473 vpushi(0);
1474 /* returned pointer is in REG_IRET */
1475 vtop->r = REG_IRET | VT_BOUNDED;
1476 if (nocode_wanted)
1477 return;
1478 /* relocation offset of the bounding function call point */
1479 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1482 /* patch pointer addition in vtop so that pointer dereferencing is
1483 also tested */
1484 static void gen_bounded_ptr_deref(void)
1486 addr_t func;
1487 int size, align;
1488 ElfW_Rel *rel;
1489 Sym *sym;
1491 if (nocode_wanted)
1492 return;
1494 size = type_size(&vtop->type, &align);
1495 switch(size) {
1496 case 1: func = TOK___bound_ptr_indir1; break;
1497 case 2: func = TOK___bound_ptr_indir2; break;
1498 case 4: func = TOK___bound_ptr_indir4; break;
1499 case 8: func = TOK___bound_ptr_indir8; break;
1500 case 12: func = TOK___bound_ptr_indir12; break;
1501 case 16: func = TOK___bound_ptr_indir16; break;
1502 default:
1503 /* may happen with struct member access */
1504 return;
1506 sym = external_helper_sym(func);
1507 if (!sym->c)
1508 put_extern_sym(sym, NULL, 0, 0);
1509 /* patch relocation */
1510 /* XXX: find a better solution ? */
1511 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1512 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1515 /* generate lvalue bound code */
1516 static void gbound(void)
1518 CType type1;
1520 vtop->r &= ~VT_MUSTBOUND;
1521 /* if lvalue, then use checking code before dereferencing */
1522 if (vtop->r & VT_LVAL) {
1523 /* if not VT_BOUNDED value, then make one */
1524 if (!(vtop->r & VT_BOUNDED)) {
1525 /* must save type because we must set it to int to get pointer */
1526 type1 = vtop->type;
1527 vtop->type.t = VT_PTR;
1528 gaddrof();
1529 vpushi(0);
1530 gen_bounded_ptr_add();
1531 vtop->r |= VT_LVAL;
1532 vtop->type = type1;
1534 /* then check for dereferencing */
1535 gen_bounded_ptr_deref();
1539 /* we need to call __bound_ptr_add before we start to load function
1540 args into registers */
1541 ST_FUNC void gbound_args(int nb_args)
1543 int i, v;
1544 SValue *sv;
1546 for (i = 1; i <= nb_args; ++i)
1547 if (vtop[1 - i].r & VT_MUSTBOUND) {
1548 vrotb(i);
1549 gbound();
1550 vrott(i);
1553 sv = vtop - nb_args;
1554 if (sv->r & VT_SYM) {
1555 v = sv->sym->v;
1556 if (v == TOK_setjmp
1557 || v == TOK__setjmp
1558 #ifndef TCC_TARGET_PE
1559 || v == TOK_sigsetjmp
1560 || v == TOK___sigsetjmp
1561 #endif
1563 vpush_helper_func(TOK___bound_setjmp);
1564 vpushv(sv + 1);
1565 gfunc_call(1);
1566 func_bound_add_epilog = 1;
1568 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1569 if (v == TOK_alloca)
1570 func_bound_add_epilog = 1;
1571 #endif
1572 #if TARGETOS_NetBSD
1573 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1574 sv->sym->asm_label = TOK___bound_longjmp;
1575 #endif
1579 /* Add bounds for local symbols from S to E (via ->prev) */
1580 static void add_local_bounds(Sym *s, Sym *e)
1582 for (; s != e; s = s->prev) {
1583 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1584 continue;
1585 /* Add arrays/structs/unions because we always take address */
1586 if ((s->type.t & VT_ARRAY)
1587 || (s->type.t & VT_BTYPE) == VT_STRUCT
1588 || s->a.addrtaken) {
1589 /* add local bound info */
1590 int align, size = type_size(&s->type, &align);
1591 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1592 2 * sizeof(addr_t));
1593 bounds_ptr[0] = s->c;
1594 bounds_ptr[1] = size;
1598 #endif
1600 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1601 static void pop_local_syms(Sym *b, int keep)
1603 #ifdef CONFIG_TCC_BCHECK
1604 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1605 add_local_bounds(local_stack, b);
1606 #endif
1607 if (debug_modes)
1608 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1609 sym_pop(&local_stack, b, keep);
1612 static void incr_bf_adr(int o)
1614 vtop->type = char_pointer_type;
1615 gaddrof();
1616 vpushs(o);
1617 gen_op('+');
1618 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1619 vtop->r |= VT_LVAL;
1622 /* single-byte load mode for packed or otherwise unaligned bitfields */
1623 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1625 int n, o, bits;
1626 save_reg_upstack(vtop->r, 1);
1627 vpush64(type->t & VT_BTYPE, 0); // B X
1628 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1629 do {
1630 vswap(); // X B
1631 incr_bf_adr(o);
1632 vdup(); // X B B
1633 n = 8 - bit_pos;
1634 if (n > bit_size)
1635 n = bit_size;
1636 if (bit_pos)
1637 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1638 if (n < 8)
1639 vpushi((1 << n) - 1), gen_op('&');
1640 gen_cast(type);
1641 if (bits)
1642 vpushi(bits), gen_op(TOK_SHL);
1643 vrotb(3); // B Y X
1644 gen_op('|'); // B X
1645 bits += n, bit_size -= n, o = 1;
1646 } while (bit_size);
1647 vswap(), vpop();
1648 if (!(type->t & VT_UNSIGNED)) {
1649 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1650 vpushi(n), gen_op(TOK_SHL);
1651 vpushi(n), gen_op(TOK_SAR);
1655 /* single-byte store mode for packed or otherwise unaligned bitfields */
1656 static void store_packed_bf(int bit_pos, int bit_size)
1658 int bits, n, o, m, c;
1659 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1660 vswap(); // X B
1661 save_reg_upstack(vtop->r, 1);
1662 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1663 do {
1664 incr_bf_adr(o); // X B
1665 vswap(); //B X
1666 c ? vdup() : gv_dup(); // B V X
1667 vrott(3); // X B V
1668 if (bits)
1669 vpushi(bits), gen_op(TOK_SHR);
1670 if (bit_pos)
1671 vpushi(bit_pos), gen_op(TOK_SHL);
1672 n = 8 - bit_pos;
1673 if (n > bit_size)
1674 n = bit_size;
1675 if (n < 8) {
1676 m = ((1 << n) - 1) << bit_pos;
1677 vpushi(m), gen_op('&'); // X B V1
1678 vpushv(vtop-1); // X B V1 B
1679 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1680 gen_op('&'); // X B V1 B1
1681 gen_op('|'); // X B V2
1683 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1684 vstore(), vpop(); // X B
1685 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1686 } while (bit_size);
1687 vpop(), vpop();
1690 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1692 int t;
1693 if (0 == sv->type.ref)
1694 return 0;
1695 t = sv->type.ref->auxtype;
1696 if (t != -1 && t != VT_STRUCT) {
1697 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1698 sv->r |= VT_LVAL;
1700 return t;
1703 /* store vtop a register belonging to class 'rc'. lvalues are
1704 converted to values. Cannot be used if cannot be converted to
1705 register value (such as structures). */
1706 ST_FUNC int gv(int rc)
1708 int r, r2, r_ok, r2_ok, rc2, bt;
1709 int bit_pos, bit_size, size, align;
1711 /* NOTE: get_reg can modify vstack[] */
1712 if (vtop->type.t & VT_BITFIELD) {
1713 CType type;
1715 bit_pos = BIT_POS(vtop->type.t);
1716 bit_size = BIT_SIZE(vtop->type.t);
1717 /* remove bit field info to avoid loops */
1718 vtop->type.t &= ~VT_STRUCT_MASK;
1720 type.ref = NULL;
1721 type.t = vtop->type.t & VT_UNSIGNED;
1722 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1723 type.t |= VT_UNSIGNED;
1725 r = adjust_bf(vtop, bit_pos, bit_size);
1727 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1728 type.t |= VT_LLONG;
1729 else
1730 type.t |= VT_INT;
1732 if (r == VT_STRUCT) {
1733 load_packed_bf(&type, bit_pos, bit_size);
1734 } else {
1735 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1736 /* cast to int to propagate signedness in following ops */
1737 gen_cast(&type);
1738 /* generate shifts */
1739 vpushi(bits - (bit_pos + bit_size));
1740 gen_op(TOK_SHL);
1741 vpushi(bits - bit_size);
1742 /* NOTE: transformed to SHR if unsigned */
1743 gen_op(TOK_SAR);
1745 r = gv(rc);
1746 } else {
1747 if (is_float(vtop->type.t) &&
1748 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1749 /* CPUs usually cannot use float constants, so we store them
1750 generically in data segment */
1751 init_params p = { rodata_section };
1752 unsigned long offset;
1753 size = type_size(&vtop->type, &align);
1754 if (NODATA_WANTED)
1755 size = 0, align = 1;
1756 offset = section_add(p.sec, size, align);
1757 vpush_ref(&vtop->type, p.sec, offset, size);
1758 vswap();
1759 init_putv(&p, &vtop->type, offset);
1760 vtop->r |= VT_LVAL;
1762 #ifdef CONFIG_TCC_BCHECK
1763 if (vtop->r & VT_MUSTBOUND)
1764 gbound();
1765 #endif
1767 bt = vtop->type.t & VT_BTYPE;
1769 #ifdef TCC_TARGET_RISCV64
1770 /* XXX mega hack */
1771 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1772 rc = RC_INT;
1773 #endif
1774 rc2 = RC2_TYPE(bt, rc);
1776 /* need to reload if:
1777 - constant
1778 - lvalue (need to dereference pointer)
1779 - already a register, but not in the right class */
1780 r = vtop->r & VT_VALMASK;
1781 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1782 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1784 if (!r_ok || !r2_ok) {
1785 if (!r_ok)
1786 r = get_reg(rc);
1787 if (rc2) {
1788 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1789 int original_type = vtop->type.t;
1791 /* two register type load :
1792 expand to two words temporarily */
1793 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1794 /* load constant */
1795 unsigned long long ll = vtop->c.i;
1796 vtop->c.i = ll; /* first word */
1797 load(r, vtop);
1798 vtop->r = r; /* save register value */
1799 vpushi(ll >> 32); /* second word */
1800 } else if (vtop->r & VT_LVAL) {
1801 /* We do not want to modifier the long long pointer here.
1802 So we save any other instances down the stack */
1803 save_reg_upstack(vtop->r, 1);
1804 /* load from memory */
1805 vtop->type.t = load_type;
1806 load(r, vtop);
1807 vdup();
1808 vtop[-1].r = r; /* save register value */
1809 /* increment pointer to get second word */
1810 vtop->type.t = VT_PTRDIFF_T;
1811 gaddrof();
1812 vpushs(PTR_SIZE);
1813 gen_op('+');
1814 vtop->r |= VT_LVAL;
1815 vtop->type.t = load_type;
1816 } else {
1817 /* move registers */
1818 if (!r_ok)
1819 load(r, vtop);
1820 if (r2_ok && vtop->r2 < VT_CONST)
1821 goto done;
1822 vdup();
1823 vtop[-1].r = r; /* save register value */
1824 vtop->r = vtop[-1].r2;
1826 /* Allocate second register. Here we rely on the fact that
1827 get_reg() tries first to free r2 of an SValue. */
1828 r2 = get_reg(rc2);
1829 load(r2, vtop);
1830 vpop();
1831 /* write second register */
1832 vtop->r2 = r2;
1833 done:
1834 vtop->type.t = original_type;
1835 } else {
1836 if (vtop->r == VT_CMP)
1837 vset_VT_JMP();
1838 /* one register type load */
1839 load(r, vtop);
1842 vtop->r = r;
1843 #ifdef TCC_TARGET_C67
1844 /* uses register pairs for doubles */
1845 if (bt == VT_DOUBLE)
1846 vtop->r2 = r+1;
1847 #endif
1849 return r;
1852 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1853 ST_FUNC void gv2(int rc1, int rc2)
1855 /* generate more generic register first. But VT_JMP or VT_CMP
1856 values must be generated first in all cases to avoid possible
1857 reload errors */
1858 if (vtop->r != VT_CMP && rc1 <= rc2) {
1859 vswap();
1860 gv(rc1);
1861 vswap();
1862 gv(rc2);
1863 /* test if reload is needed for first register */
1864 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1865 vswap();
1866 gv(rc1);
1867 vswap();
1869 } else {
1870 gv(rc2);
1871 vswap();
1872 gv(rc1);
1873 vswap();
1874 /* test if reload is needed for first register */
1875 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1876 gv(rc2);
1881 #if PTR_SIZE == 4
1882 /* expand 64bit on stack in two ints */
1883 ST_FUNC void lexpand(void)
1885 int u, v;
1886 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1887 v = vtop->r & (VT_VALMASK | VT_LVAL);
1888 if (v == VT_CONST) {
1889 vdup();
1890 vtop[0].c.i >>= 32;
1891 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1892 vdup();
1893 vtop[0].c.i += 4;
1894 } else {
1895 gv(RC_INT);
1896 vdup();
1897 vtop[0].r = vtop[-1].r2;
1898 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1900 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1902 #endif
1904 #if PTR_SIZE == 4
1905 /* build a long long from two ints */
1906 static void lbuild(int t)
1908 gv2(RC_INT, RC_INT);
1909 vtop[-1].r2 = vtop[0].r;
1910 vtop[-1].type.t = t;
1911 vpop();
1913 #endif
1915 /* convert stack entry to register and duplicate its value in another
1916 register */
1917 static void gv_dup(void)
1919 int t, rc, r;
1921 t = vtop->type.t;
1922 #if PTR_SIZE == 4
1923 if ((t & VT_BTYPE) == VT_LLONG) {
1924 if (t & VT_BITFIELD) {
1925 gv(RC_INT);
1926 t = vtop->type.t;
1928 lexpand();
1929 gv_dup();
1930 vswap();
1931 vrotb(3);
1932 gv_dup();
1933 vrotb(4);
1934 /* stack: H L L1 H1 */
1935 lbuild(t);
1936 vrotb(3);
1937 vrotb(3);
1938 vswap();
1939 lbuild(t);
1940 vswap();
1941 return;
1943 #endif
1944 /* duplicate value */
1945 rc = RC_TYPE(t);
1946 gv(rc);
1947 r = get_reg(rc);
1948 vdup();
1949 load(r, vtop);
1950 vtop->r = r;
1953 #if PTR_SIZE == 4
1954 /* generate CPU independent (unsigned) long long operations */
1955 static void gen_opl(int op)
1957 int t, a, b, op1, c, i;
1958 int func;
1959 unsigned short reg_iret = REG_IRET;
1960 unsigned short reg_lret = REG_IRE2;
1961 SValue tmp;
1963 switch(op) {
1964 case '/':
1965 case TOK_PDIV:
1966 func = TOK___divdi3;
1967 goto gen_func;
1968 case TOK_UDIV:
1969 func = TOK___udivdi3;
1970 goto gen_func;
1971 case '%':
1972 func = TOK___moddi3;
1973 goto gen_mod_func;
1974 case TOK_UMOD:
1975 func = TOK___umoddi3;
1976 gen_mod_func:
1977 #ifdef TCC_ARM_EABI
1978 reg_iret = TREG_R2;
1979 reg_lret = TREG_R3;
1980 #endif
1981 gen_func:
1982 /* call generic long long function */
1983 vpush_helper_func(func);
1984 vrott(3);
1985 gfunc_call(2);
1986 vpushi(0);
1987 vtop->r = reg_iret;
1988 vtop->r2 = reg_lret;
1989 break;
1990 case '^':
1991 case '&':
1992 case '|':
1993 case '*':
1994 case '+':
1995 case '-':
1996 //pv("gen_opl A",0,2);
1997 t = vtop->type.t;
1998 vswap();
1999 lexpand();
2000 vrotb(3);
2001 lexpand();
2002 /* stack: L1 H1 L2 H2 */
2003 tmp = vtop[0];
2004 vtop[0] = vtop[-3];
2005 vtop[-3] = tmp;
2006 tmp = vtop[-2];
2007 vtop[-2] = vtop[-3];
2008 vtop[-3] = tmp;
2009 vswap();
2010 /* stack: H1 H2 L1 L2 */
2011 //pv("gen_opl B",0,4);
2012 if (op == '*') {
2013 vpushv(vtop - 1);
2014 vpushv(vtop - 1);
2015 gen_op(TOK_UMULL);
2016 lexpand();
2017 /* stack: H1 H2 L1 L2 ML MH */
2018 for(i=0;i<4;i++)
2019 vrotb(6);
2020 /* stack: ML MH H1 H2 L1 L2 */
2021 tmp = vtop[0];
2022 vtop[0] = vtop[-2];
2023 vtop[-2] = tmp;
2024 /* stack: ML MH H1 L2 H2 L1 */
2025 gen_op('*');
2026 vrotb(3);
2027 vrotb(3);
2028 gen_op('*');
2029 /* stack: ML MH M1 M2 */
2030 gen_op('+');
2031 gen_op('+');
2032 } else if (op == '+' || op == '-') {
2033 /* XXX: add non carry method too (for MIPS or alpha) */
2034 if (op == '+')
2035 op1 = TOK_ADDC1;
2036 else
2037 op1 = TOK_SUBC1;
2038 gen_op(op1);
2039 /* stack: H1 H2 (L1 op L2) */
2040 vrotb(3);
2041 vrotb(3);
2042 gen_op(op1 + 1); /* TOK_xxxC2 */
2043 } else {
2044 gen_op(op);
2045 /* stack: H1 H2 (L1 op L2) */
2046 vrotb(3);
2047 vrotb(3);
2048 /* stack: (L1 op L2) H1 H2 */
2049 gen_op(op);
2050 /* stack: (L1 op L2) (H1 op H2) */
2052 /* stack: L H */
2053 lbuild(t);
2054 break;
2055 case TOK_SAR:
2056 case TOK_SHR:
2057 case TOK_SHL:
2058 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2059 t = vtop[-1].type.t;
2060 vswap();
2061 lexpand();
2062 vrotb(3);
2063 /* stack: L H shift */
2064 c = (int)vtop->c.i;
2065 /* constant: simpler */
2066 /* NOTE: all comments are for SHL. the other cases are
2067 done by swapping words */
2068 vpop();
2069 if (op != TOK_SHL)
2070 vswap();
2071 if (c >= 32) {
2072 /* stack: L H */
2073 vpop();
2074 if (c > 32) {
2075 vpushi(c - 32);
2076 gen_op(op);
2078 if (op != TOK_SAR) {
2079 vpushi(0);
2080 } else {
2081 gv_dup();
2082 vpushi(31);
2083 gen_op(TOK_SAR);
2085 vswap();
2086 } else {
2087 vswap();
2088 gv_dup();
2089 /* stack: H L L */
2090 vpushi(c);
2091 gen_op(op);
2092 vswap();
2093 vpushi(32 - c);
2094 if (op == TOK_SHL)
2095 gen_op(TOK_SHR);
2096 else
2097 gen_op(TOK_SHL);
2098 vrotb(3);
2099 /* stack: L L H */
2100 vpushi(c);
2101 if (op == TOK_SHL)
2102 gen_op(TOK_SHL);
2103 else
2104 gen_op(TOK_SHR);
2105 gen_op('|');
2107 if (op != TOK_SHL)
2108 vswap();
2109 lbuild(t);
2110 } else {
2111 /* XXX: should provide a faster fallback on x86 ? */
2112 switch(op) {
2113 case TOK_SAR:
2114 func = TOK___ashrdi3;
2115 goto gen_func;
2116 case TOK_SHR:
2117 func = TOK___lshrdi3;
2118 goto gen_func;
2119 case TOK_SHL:
2120 func = TOK___ashldi3;
2121 goto gen_func;
2124 break;
2125 default:
2126 /* compare operations */
2127 t = vtop->type.t;
2128 vswap();
2129 lexpand();
2130 vrotb(3);
2131 lexpand();
2132 /* stack: L1 H1 L2 H2 */
2133 tmp = vtop[-1];
2134 vtop[-1] = vtop[-2];
2135 vtop[-2] = tmp;
2136 /* stack: L1 L2 H1 H2 */
2137 save_regs(4);
2138 /* compare high */
2139 op1 = op;
2140 /* when values are equal, we need to compare low words. since
2141 the jump is inverted, we invert the test too. */
2142 if (op1 == TOK_LT)
2143 op1 = TOK_LE;
2144 else if (op1 == TOK_GT)
2145 op1 = TOK_GE;
2146 else if (op1 == TOK_ULT)
2147 op1 = TOK_ULE;
2148 else if (op1 == TOK_UGT)
2149 op1 = TOK_UGE;
2150 a = 0;
2151 b = 0;
2152 gen_op(op1);
2153 if (op == TOK_NE) {
2154 b = gvtst(0, 0);
2155 } else {
2156 a = gvtst(1, 0);
2157 if (op != TOK_EQ) {
2158 /* generate non equal test */
2159 vpushi(0);
2160 vset_VT_CMP(TOK_NE);
2161 b = gvtst(0, 0);
2164 /* compare low. Always unsigned */
2165 op1 = op;
2166 if (op1 == TOK_LT)
2167 op1 = TOK_ULT;
2168 else if (op1 == TOK_LE)
2169 op1 = TOK_ULE;
2170 else if (op1 == TOK_GT)
2171 op1 = TOK_UGT;
2172 else if (op1 == TOK_GE)
2173 op1 = TOK_UGE;
2174 gen_op(op1);
2175 #if 0//def TCC_TARGET_I386
2176 if (op == TOK_NE) { gsym(b); break; }
2177 if (op == TOK_EQ) { gsym(a); break; }
2178 #endif
2179 gvtst_set(1, a);
2180 gvtst_set(0, b);
2181 break;
2184 #endif
2186 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2188 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2189 return (a ^ b) >> 63 ? -x : x;
2192 static int gen_opic_lt(uint64_t a, uint64_t b)
2194 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2197 /* handle integer constant optimizations and various machine
2198 independent opt */
2199 static void gen_opic(int op)
2201 SValue *v1 = vtop - 1;
2202 SValue *v2 = vtop;
2203 int t1 = v1->type.t & VT_BTYPE;
2204 int t2 = v2->type.t & VT_BTYPE;
2205 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2206 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2207 int nonconst = (v1->r | v2->r) & VT_NONCONST;
2208 uint64_t l1 = c1 ? v1->c.i : 0;
2209 uint64_t l2 = c2 ? v2->c.i : 0;
2210 int shm = (t1 == VT_LLONG) ? 63 : 31;
2212 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2213 l1 = ((uint32_t)l1 |
2214 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2215 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2216 l2 = ((uint32_t)l2 |
2217 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2219 if (c1 && c2) {
2220 switch(op) {
2221 case '+': l1 += l2; break;
2222 case '-': l1 -= l2; break;
2223 case '&': l1 &= l2; break;
2224 case '^': l1 ^= l2; break;
2225 case '|': l1 |= l2; break;
2226 case '*': l1 *= l2; break;
2228 case TOK_PDIV:
2229 case '/':
2230 case '%':
2231 case TOK_UDIV:
2232 case TOK_UMOD:
2233 /* if division by zero, generate explicit division */
2234 if (l2 == 0) {
2235 if (const_wanted && !(nocode_wanted & unevalmask))
2236 tcc_error("division by zero in constant");
2237 goto general_case;
2239 switch(op) {
2240 default: l1 = gen_opic_sdiv(l1, l2); break;
2241 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2242 case TOK_UDIV: l1 = l1 / l2; break;
2243 case TOK_UMOD: l1 = l1 % l2; break;
2245 break;
2246 case TOK_SHL: l1 <<= (l2 & shm); break;
2247 case TOK_SHR: l1 >>= (l2 & shm); break;
2248 case TOK_SAR:
2249 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2250 break;
2251 /* tests */
2252 case TOK_ULT: l1 = l1 < l2; break;
2253 case TOK_UGE: l1 = l1 >= l2; break;
2254 case TOK_EQ: l1 = l1 == l2; break;
2255 case TOK_NE: l1 = l1 != l2; break;
2256 case TOK_ULE: l1 = l1 <= l2; break;
2257 case TOK_UGT: l1 = l1 > l2; break;
2258 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2259 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2260 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2261 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2262 /* logical */
2263 case TOK_LAND: l1 = l1 && l2; break;
2264 case TOK_LOR: l1 = l1 || l2; break;
2265 default:
2266 goto general_case;
2268 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2269 l1 = ((uint32_t)l1 |
2270 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2271 v1->c.i = l1;
2272 vtop--;
2273 } else {
2274 nonconst = VT_NONCONST;
2275 /* if commutative ops, put c2 as constant */
2276 if (c1 && (op == '+' || op == '&' || op == '^' ||
2277 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2278 vswap();
2279 c2 = c1; //c = c1, c1 = c2, c2 = c;
2280 l2 = l1; //l = l1, l1 = l2, l2 = l;
2282 if (!const_wanted &&
2283 c1 && ((l1 == 0 &&
2284 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2285 (l1 == -1 && op == TOK_SAR))) {
2286 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2287 vtop--;
2288 } else if (!const_wanted &&
2289 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2290 (op == '|' &&
2291 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2292 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2293 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2294 if (l2 == 1)
2295 vtop->c.i = 0;
2296 vswap();
2297 vtop--;
2298 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2299 op == TOK_PDIV) &&
2300 l2 == 1) ||
2301 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2302 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2303 l2 == 0) ||
2304 (op == '&' &&
2305 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2306 /* filter out NOP operations like x*1, x-0, x&-1... */
2307 vtop--;
2308 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2309 /* try to use shifts instead of muls or divs */
2310 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2311 int n = -1;
2312 while (l2) {
2313 l2 >>= 1;
2314 n++;
2316 vtop->c.i = n;
2317 if (op == '*')
2318 op = TOK_SHL;
2319 else if (op == TOK_PDIV)
2320 op = TOK_SAR;
2321 else
2322 op = TOK_SHR;
2324 goto general_case;
2325 } else if (c2 && (op == '+' || op == '-') &&
2326 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2327 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2328 /* symbol + constant case */
2329 if (op == '-')
2330 l2 = -l2;
2331 l2 += vtop[-1].c.i;
2332 /* The backends can't always deal with addends to symbols
2333 larger than +-1<<31. Don't construct such. */
2334 if ((int)l2 != l2)
2335 goto general_case;
2336 vtop--;
2337 vtop->c.i = l2;
2338 } else {
2339 general_case:
2340 /* call low level op generator */
2341 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2342 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2343 gen_opl(op);
2344 else
2345 gen_opi(op);
2348 if (vtop->r == VT_CONST)
2349 vtop->r |= nonconst;
2352 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2353 # define gen_negf gen_opf
2354 #elif defined TCC_TARGET_ARM
2355 void gen_negf(int op)
2357 /* arm will detect 0-x and replace by vneg */
2358 vpushi(0), vswap(), gen_op('-');
2360 #else
2361 /* XXX: implement in gen_opf() for other backends too */
2362 void gen_negf(int op)
2364 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2365 subtract(-0, x), but with them it's really a sign flip
2366 operation. We implement this with bit manipulation and have
2367 to do some type reinterpretation for this, which TCC can do
2368 only via memory. */
2370 int align, size, bt;
2372 size = type_size(&vtop->type, &align);
2373 bt = vtop->type.t & VT_BTYPE;
2374 save_reg(gv(RC_TYPE(bt)));
2375 vdup();
2376 incr_bf_adr(size - 1);
2377 vdup();
2378 vpushi(0x80); /* flip sign */
2379 gen_op('^');
2380 vstore();
2381 vpop();
2383 #endif
2385 /* generate a floating point operation with constant propagation */
2386 static void gen_opif(int op)
2388 int c1, c2;
2389 SValue *v1, *v2;
2390 #if defined _MSC_VER && defined __x86_64__
2391 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2392 volatile
2393 #endif
2394 long double f1, f2;
2396 v1 = vtop - 1;
2397 v2 = vtop;
2398 if (op == TOK_NEG)
2399 v1 = v2;
2401 /* currently, we cannot do computations with forward symbols */
2402 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2403 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2404 if (c1 && c2) {
2405 if (v1->type.t == VT_FLOAT) {
2406 f1 = v1->c.f;
2407 f2 = v2->c.f;
2408 } else if (v1->type.t == VT_DOUBLE) {
2409 f1 = v1->c.d;
2410 f2 = v2->c.d;
2411 } else {
2412 f1 = v1->c.ld;
2413 f2 = v2->c.ld;
2415 /* NOTE: we only do constant propagation if finite number (not
2416 NaN or infinity) (ANSI spec) */
2417 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2418 goto general_case;
2419 switch(op) {
2420 case '+': f1 += f2; break;
2421 case '-': f1 -= f2; break;
2422 case '*': f1 *= f2; break;
2423 case '/':
2424 if (f2 == 0.0) {
2425 union { float f; unsigned u; } x1, x2, y;
2426 /* If not in initializer we need to potentially generate
2427 FP exceptions at runtime, otherwise we want to fold. */
2428 if (!const_wanted)
2429 goto general_case;
2430 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2431 when used to compile the f1 /= f2 below, would be -nan */
2432 x1.f = f1, x2.f = f2;
2433 if (f1 == 0.0)
2434 y.u = 0x7fc00000; /* nan */
2435 else
2436 y.u = 0x7f800000; /* infinity */
2437 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2438 f1 = y.f;
2439 break;
2441 f1 /= f2;
2442 break;
2443 case TOK_NEG:
2444 f1 = -f1;
2445 goto unary_result;
2446 /* XXX: also handles tests ? */
2447 default:
2448 goto general_case;
2450 vtop--;
2451 unary_result:
2452 /* XXX: overflow test ? */
2453 if (v1->type.t == VT_FLOAT) {
2454 v1->c.f = f1;
2455 } else if (v1->type.t == VT_DOUBLE) {
2456 v1->c.d = f1;
2457 } else {
2458 v1->c.ld = f1;
2460 } else {
2461 general_case:
2462 if (op == TOK_NEG) {
2463 gen_negf(op);
2464 } else {
2465 gen_opf(op);
2470 /* print a type. If 'varstr' is not NULL, then the variable is also
2471 printed in the type */
2472 /* XXX: union */
2473 /* XXX: add array and function pointers */
2474 static void type_to_str(char *buf, int buf_size,
2475 CType *type, const char *varstr)
2477 int bt, v, t;
2478 Sym *s, *sa;
2479 char buf1[256];
2480 const char *tstr;
2482 t = type->t;
2483 bt = t & VT_BTYPE;
2484 buf[0] = '\0';
2486 if (t & VT_EXTERN)
2487 pstrcat(buf, buf_size, "extern ");
2488 if (t & VT_STATIC)
2489 pstrcat(buf, buf_size, "static ");
2490 if (t & VT_TYPEDEF)
2491 pstrcat(buf, buf_size, "typedef ");
2492 if (t & VT_INLINE)
2493 pstrcat(buf, buf_size, "inline ");
2494 if (bt != VT_PTR) {
2495 if (t & VT_VOLATILE)
2496 pstrcat(buf, buf_size, "volatile ");
2497 if (t & VT_CONSTANT)
2498 pstrcat(buf, buf_size, "const ");
2500 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2501 || ((t & VT_UNSIGNED)
2502 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2503 && !IS_ENUM(t)
2505 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2507 buf_size -= strlen(buf);
2508 buf += strlen(buf);
2510 switch(bt) {
2511 case VT_VOID:
2512 tstr = "void";
2513 goto add_tstr;
2514 case VT_BOOL:
2515 tstr = "_Bool";
2516 goto add_tstr;
2517 case VT_BYTE:
2518 tstr = "char";
2519 goto add_tstr;
2520 case VT_SHORT:
2521 tstr = "short";
2522 goto add_tstr;
2523 case VT_INT:
2524 tstr = "int";
2525 goto maybe_long;
2526 case VT_LLONG:
2527 tstr = "long long";
2528 maybe_long:
2529 if (t & VT_LONG)
2530 tstr = "long";
2531 if (!IS_ENUM(t))
2532 goto add_tstr;
2533 tstr = "enum ";
2534 goto tstruct;
2535 case VT_FLOAT:
2536 tstr = "float";
2537 goto add_tstr;
2538 case VT_DOUBLE:
2539 tstr = "double";
2540 if (!(t & VT_LONG))
2541 goto add_tstr;
2542 case VT_LDOUBLE:
2543 tstr = "long double";
2544 add_tstr:
2545 pstrcat(buf, buf_size, tstr);
2546 break;
2547 case VT_STRUCT:
2548 tstr = "struct ";
2549 if (IS_UNION(t))
2550 tstr = "union ";
2551 tstruct:
2552 pstrcat(buf, buf_size, tstr);
2553 v = type->ref->v & ~SYM_STRUCT;
2554 if (v >= SYM_FIRST_ANOM)
2555 pstrcat(buf, buf_size, "<anonymous>");
2556 else
2557 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2558 break;
2559 case VT_FUNC:
2560 s = type->ref;
2561 buf1[0]=0;
2562 if (varstr && '*' == *varstr) {
2563 pstrcat(buf1, sizeof(buf1), "(");
2564 pstrcat(buf1, sizeof(buf1), varstr);
2565 pstrcat(buf1, sizeof(buf1), ")");
2567 pstrcat(buf1, buf_size, "(");
2568 sa = s->next;
2569 while (sa != NULL) {
2570 char buf2[256];
2571 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2572 pstrcat(buf1, sizeof(buf1), buf2);
2573 sa = sa->next;
2574 if (sa)
2575 pstrcat(buf1, sizeof(buf1), ", ");
2577 if (s->f.func_type == FUNC_ELLIPSIS)
2578 pstrcat(buf1, sizeof(buf1), ", ...");
2579 pstrcat(buf1, sizeof(buf1), ")");
2580 type_to_str(buf, buf_size, &s->type, buf1);
2581 goto no_var;
2582 case VT_PTR:
2583 s = type->ref;
2584 if (t & (VT_ARRAY|VT_VLA)) {
2585 if (varstr && '*' == *varstr)
2586 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2587 else
2588 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2589 type_to_str(buf, buf_size, &s->type, buf1);
2590 goto no_var;
2592 pstrcpy(buf1, sizeof(buf1), "*");
2593 if (t & VT_CONSTANT)
2594 pstrcat(buf1, buf_size, "const ");
2595 if (t & VT_VOLATILE)
2596 pstrcat(buf1, buf_size, "volatile ");
2597 if (varstr)
2598 pstrcat(buf1, sizeof(buf1), varstr);
2599 type_to_str(buf, buf_size, &s->type, buf1);
2600 goto no_var;
2602 if (varstr) {
2603 pstrcat(buf, buf_size, " ");
2604 pstrcat(buf, buf_size, varstr);
2606 no_var: ;
2609 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2611 char buf1[256], buf2[256];
2612 type_to_str(buf1, sizeof(buf1), st, NULL);
2613 type_to_str(buf2, sizeof(buf2), dt, NULL);
2614 tcc_error(fmt, buf1, buf2);
2617 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2619 char buf1[256], buf2[256];
2620 type_to_str(buf1, sizeof(buf1), st, NULL);
2621 type_to_str(buf2, sizeof(buf2), dt, NULL);
2622 tcc_warning(fmt, buf1, buf2);
2625 static int pointed_size(CType *type)
2627 int align;
2628 return type_size(pointed_type(type), &align);
2631 static inline int is_null_pointer(SValue *p)
2633 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2634 return 0;
2635 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2636 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2637 ((p->type.t & VT_BTYPE) == VT_PTR &&
2638 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2639 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2640 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2644 /* compare function types. OLD functions match any new functions */
2645 static int is_compatible_func(CType *type1, CType *type2)
2647 Sym *s1, *s2;
2649 s1 = type1->ref;
2650 s2 = type2->ref;
2651 if (s1->f.func_call != s2->f.func_call)
2652 return 0;
2653 if (s1->f.func_type != s2->f.func_type
2654 && s1->f.func_type != FUNC_OLD
2655 && s2->f.func_type != FUNC_OLD)
2656 return 0;
2657 for (;;) {
2658 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2659 return 0;
2660 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2661 return 1;
2662 s1 = s1->next;
2663 s2 = s2->next;
2664 if (!s1)
2665 return !s2;
2666 if (!s2)
2667 return 0;
2671 /* return true if type1 and type2 are the same. If unqualified is
2672 true, qualifiers on the types are ignored.
2674 static int compare_types(CType *type1, CType *type2, int unqualified)
2676 int bt1, t1, t2;
2678 t1 = type1->t & VT_TYPE;
2679 t2 = type2->t & VT_TYPE;
2680 if (unqualified) {
2681 /* strip qualifiers before comparing */
2682 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2683 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2686 /* Default Vs explicit signedness only matters for char */
2687 if ((t1 & VT_BTYPE) != VT_BYTE) {
2688 t1 &= ~VT_DEFSIGN;
2689 t2 &= ~VT_DEFSIGN;
2691 /* XXX: bitfields ? */
2692 if (t1 != t2)
2693 return 0;
2695 if ((t1 & VT_ARRAY)
2696 && !(type1->ref->c < 0
2697 || type2->ref->c < 0
2698 || type1->ref->c == type2->ref->c))
2699 return 0;
2701 /* test more complicated cases */
2702 bt1 = t1 & VT_BTYPE;
2703 if (bt1 == VT_PTR) {
2704 type1 = pointed_type(type1);
2705 type2 = pointed_type(type2);
2706 return is_compatible_types(type1, type2);
2707 } else if (bt1 == VT_STRUCT) {
2708 return (type1->ref == type2->ref);
2709 } else if (bt1 == VT_FUNC) {
2710 return is_compatible_func(type1, type2);
2711 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2712 /* If both are enums then they must be the same, if only one is then
2713 t1 and t2 must be equal, which was checked above already. */
2714 return type1->ref == type2->ref;
2715 } else {
2716 return 1;
2720 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2721 type is stored in DEST if non-null (except for pointer plus/minus) . */
2722 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2724 CType *type1 = &op1->type, *type2 = &op2->type, type;
2725 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2726 int ret = 1;
2728 type.t = VT_VOID;
2729 type.ref = NULL;
2731 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2732 ret = op == '?' ? 1 : 0;
2733 /* NOTE: as an extension, we accept void on only one side */
2734 type.t = VT_VOID;
2735 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2736 if (op == '+') ; /* Handled in caller */
2737 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2738 /* If one is a null ptr constant the result type is the other. */
2739 else if (is_null_pointer (op2)) type = *type1;
2740 else if (is_null_pointer (op1)) type = *type2;
2741 else if (bt1 != bt2) {
2742 /* accept comparison or cond-expr between pointer and integer
2743 with a warning */
2744 if ((op == '?' || TOK_ISCOND(op))
2745 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2746 tcc_warning("pointer/integer mismatch in %s",
2747 op == '?' ? "conditional expression" : "comparison");
2748 else if (op != '-' || !is_integer_btype(bt2))
2749 ret = 0;
2750 type = *(bt1 == VT_PTR ? type1 : type2);
2751 } else {
2752 CType *pt1 = pointed_type(type1);
2753 CType *pt2 = pointed_type(type2);
2754 int pbt1 = pt1->t & VT_BTYPE;
2755 int pbt2 = pt2->t & VT_BTYPE;
2756 int newquals, copied = 0;
2757 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2758 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2759 if (op != '?' && !TOK_ISCOND(op))
2760 ret = 0;
2761 else
2762 type_incompatibility_warning(type1, type2,
2763 op == '?'
2764 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2765 : "pointer type mismatch in comparison('%s' and '%s')");
2767 if (op == '?') {
2768 /* pointers to void get preferred, otherwise the
2769 pointed to types minus qualifs should be compatible */
2770 type = *((pbt1 == VT_VOID) ? type1 : type2);
2771 /* combine qualifs */
2772 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2773 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2774 & newquals)
2776 /* copy the pointer target symbol */
2777 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2778 0, type.ref->c);
2779 copied = 1;
2780 pointed_type(&type)->t |= newquals;
2782 /* pointers to incomplete arrays get converted to
2783 pointers to completed ones if possible */
2784 if (pt1->t & VT_ARRAY
2785 && pt2->t & VT_ARRAY
2786 && pointed_type(&type)->ref->c < 0
2787 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2789 if (!copied)
2790 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2791 0, type.ref->c);
2792 pointed_type(&type)->ref =
2793 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2794 0, pointed_type(&type)->ref->c);
2795 pointed_type(&type)->ref->c =
2796 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2800 if (TOK_ISCOND(op))
2801 type.t = VT_SIZE_T;
2802 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2803 if (op != '?' || !compare_types(type1, type2, 1))
2804 ret = 0;
2805 type = *type1;
2806 } else if (is_float(bt1) || is_float(bt2)) {
2807 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2808 type.t = VT_LDOUBLE;
2809 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2810 type.t = VT_DOUBLE;
2811 } else {
2812 type.t = VT_FLOAT;
2814 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2815 /* cast to biggest op */
2816 type.t = VT_LLONG | VT_LONG;
2817 if (bt1 == VT_LLONG)
2818 type.t &= t1;
2819 if (bt2 == VT_LLONG)
2820 type.t &= t2;
2821 /* convert to unsigned if it does not fit in a long long */
2822 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2823 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2824 type.t |= VT_UNSIGNED;
2825 } else {
2826 /* integer operations */
2827 type.t = VT_INT | (VT_LONG & (t1 | t2));
2828 /* convert to unsigned if it does not fit in an integer */
2829 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2830 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2831 type.t |= VT_UNSIGNED;
2833 if (dest)
2834 *dest = type;
2835 return ret;
2838 /* generic gen_op: handles types problems */
2839 ST_FUNC void gen_op(int op)
2841 int t1, t2, bt1, bt2, t;
2842 CType type1, combtype;
2844 redo:
2845 t1 = vtop[-1].type.t;
2846 t2 = vtop[0].type.t;
2847 bt1 = t1 & VT_BTYPE;
2848 bt2 = t2 & VT_BTYPE;
2850 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2851 if (bt2 == VT_FUNC) {
2852 mk_pointer(&vtop->type);
2853 gaddrof();
2855 if (bt1 == VT_FUNC) {
2856 vswap();
2857 mk_pointer(&vtop->type);
2858 gaddrof();
2859 vswap();
2861 goto redo;
2862 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2863 tcc_error_noabort("invalid operand types for binary operation");
2864 vpop();
2865 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2866 /* at least one operand is a pointer */
2867 /* relational op: must be both pointers */
2868 int align;
2869 if (TOK_ISCOND(op))
2870 goto std_op;
2871 /* if both pointers, then it must be the '-' op */
2872 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2873 if (op != '-')
2874 tcc_error("cannot use pointers here");
2875 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2876 vrott(3);
2877 gen_opic(op);
2878 vtop->type.t = VT_PTRDIFF_T;
2879 vswap();
2880 gen_op(TOK_PDIV);
2881 } else {
2882 /* exactly one pointer : must be '+' or '-'. */
2883 if (op != '-' && op != '+')
2884 tcc_error("cannot use pointers here");
2885 /* Put pointer as first operand */
2886 if (bt2 == VT_PTR) {
2887 vswap();
2888 t = t1, t1 = t2, t2 = t;
2890 #if PTR_SIZE == 4
2891 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2892 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2893 gen_cast_s(VT_INT);
2894 #endif
2895 type1 = vtop[-1].type;
2896 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2897 gen_op('*');
2898 #ifdef CONFIG_TCC_BCHECK
2899 if (tcc_state->do_bounds_check && !const_wanted) {
2900 /* if bounded pointers, we generate a special code to
2901 test bounds */
2902 if (op == '-') {
2903 vpushi(0);
2904 vswap();
2905 gen_op('-');
2907 gen_bounded_ptr_add();
2908 } else
2909 #endif
2911 gen_opic(op);
2913 type1.t &= ~(VT_ARRAY|VT_VLA);
2914 /* put again type if gen_opic() swaped operands */
2915 vtop->type = type1;
2917 } else {
2918 /* floats can only be used for a few operations */
2919 if (is_float(combtype.t)
2920 && op != '+' && op != '-' && op != '*' && op != '/'
2921 && !TOK_ISCOND(op))
2922 tcc_error("invalid operands for binary operation");
2923 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2924 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2925 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2926 t |= VT_UNSIGNED;
2927 t |= (VT_LONG & t1);
2928 combtype.t = t;
2930 std_op:
2931 t = t2 = combtype.t;
2932 /* XXX: currently, some unsigned operations are explicit, so
2933 we modify them here */
2934 if (t & VT_UNSIGNED) {
2935 if (op == TOK_SAR)
2936 op = TOK_SHR;
2937 else if (op == '/')
2938 op = TOK_UDIV;
2939 else if (op == '%')
2940 op = TOK_UMOD;
2941 else if (op == TOK_LT)
2942 op = TOK_ULT;
2943 else if (op == TOK_GT)
2944 op = TOK_UGT;
2945 else if (op == TOK_LE)
2946 op = TOK_ULE;
2947 else if (op == TOK_GE)
2948 op = TOK_UGE;
2950 vswap();
2951 gen_cast_s(t);
2952 vswap();
2953 /* special case for shifts and long long: we keep the shift as
2954 an integer */
2955 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2956 t2 = VT_INT;
2957 gen_cast_s(t2);
2958 if (is_float(t))
2959 gen_opif(op);
2960 else
2961 gen_opic(op);
2962 if (TOK_ISCOND(op)) {
2963 /* relational op: the result is an int */
2964 vtop->type.t = VT_INT;
2965 } else {
2966 vtop->type.t = t;
2969 // Make sure that we have converted to an rvalue:
2970 if (vtop->r & VT_LVAL)
2971 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2974 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2975 #define gen_cvt_itof1 gen_cvt_itof
2976 #else
2977 /* generic itof for unsigned long long case */
2978 static void gen_cvt_itof1(int t)
2980 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2981 (VT_LLONG | VT_UNSIGNED)) {
2983 if (t == VT_FLOAT)
2984 vpush_helper_func(TOK___floatundisf);
2985 #if LDOUBLE_SIZE != 8
2986 else if (t == VT_LDOUBLE)
2987 vpush_helper_func(TOK___floatundixf);
2988 #endif
2989 else
2990 vpush_helper_func(TOK___floatundidf);
2991 vrott(2);
2992 gfunc_call(1);
2993 vpushi(0);
2994 PUT_R_RET(vtop, t);
2995 } else {
2996 gen_cvt_itof(t);
2999 #endif
3001 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3002 #define gen_cvt_ftoi1 gen_cvt_ftoi
3003 #else
3004 /* generic ftoi for unsigned long long case */
3005 static void gen_cvt_ftoi1(int t)
3007 int st;
3008 if (t == (VT_LLONG | VT_UNSIGNED)) {
3009 /* not handled natively */
3010 st = vtop->type.t & VT_BTYPE;
3011 if (st == VT_FLOAT)
3012 vpush_helper_func(TOK___fixunssfdi);
3013 #if LDOUBLE_SIZE != 8
3014 else if (st == VT_LDOUBLE)
3015 vpush_helper_func(TOK___fixunsxfdi);
3016 #endif
3017 else
3018 vpush_helper_func(TOK___fixunsdfdi);
3019 vrott(2);
3020 gfunc_call(1);
3021 vpushi(0);
3022 PUT_R_RET(vtop, t);
3023 } else {
3024 gen_cvt_ftoi(t);
3027 #endif
3029 /* special delayed cast for char/short */
3030 static void force_charshort_cast(void)
3032 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3033 int dbt = vtop->type.t;
3034 vtop->r &= ~VT_MUSTCAST;
3035 vtop->type.t = sbt;
3036 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3037 vtop->type.t = dbt;
3040 static void gen_cast_s(int t)
3042 CType type;
3043 type.t = t;
3044 type.ref = NULL;
3045 gen_cast(&type);
3048 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3049 static void gen_cast(CType *type)
3051 int sbt, dbt, sf, df, c;
3052 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3054 /* special delayed cast for char/short */
3055 if (vtop->r & VT_MUSTCAST)
3056 force_charshort_cast();
3058 /* bitfields first get cast to ints */
3059 if (vtop->type.t & VT_BITFIELD)
3060 gv(RC_INT);
3062 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3063 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3064 if (sbt == VT_FUNC)
3065 sbt = VT_PTR;
3067 again:
3068 if (sbt != dbt) {
3069 sf = is_float(sbt);
3070 df = is_float(dbt);
3071 dbt_bt = dbt & VT_BTYPE;
3072 sbt_bt = sbt & VT_BTYPE;
3073 if (dbt_bt == VT_VOID)
3074 goto done;
3075 if (sbt_bt == VT_VOID) {
3076 error:
3077 cast_error(&vtop->type, type);
3080 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3081 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3082 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3083 #endif
3084 if (c) {
3085 /* constant case: we can do it now */
3086 /* XXX: in ISOC, cannot do it if error in convert */
3087 if (sbt == VT_FLOAT)
3088 vtop->c.ld = vtop->c.f;
3089 else if (sbt == VT_DOUBLE)
3090 vtop->c.ld = vtop->c.d;
3092 if (df) {
3093 if (sbt_bt == VT_LLONG) {
3094 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3095 vtop->c.ld = vtop->c.i;
3096 else
3097 vtop->c.ld = -(long double)-vtop->c.i;
3098 } else if(!sf) {
3099 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3100 vtop->c.ld = (uint32_t)vtop->c.i;
3101 else
3102 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3105 if (dbt == VT_FLOAT)
3106 vtop->c.f = (float)vtop->c.ld;
3107 else if (dbt == VT_DOUBLE)
3108 vtop->c.d = (double)vtop->c.ld;
3109 } else if (sf && dbt == VT_BOOL) {
3110 vtop->c.i = (vtop->c.ld != 0);
3111 } else {
3112 if(sf)
3113 vtop->c.i = vtop->c.ld;
3114 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3116 else if (sbt & VT_UNSIGNED)
3117 vtop->c.i = (uint32_t)vtop->c.i;
3118 else
3119 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3121 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3123 else if (dbt == VT_BOOL)
3124 vtop->c.i = (vtop->c.i != 0);
3125 else {
3126 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3127 dbt_bt == VT_SHORT ? 0xffff :
3128 0xffffffff;
3129 vtop->c.i &= m;
3130 if (!(dbt & VT_UNSIGNED))
3131 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3134 goto done;
3136 } else if (dbt == VT_BOOL
3137 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3138 == (VT_CONST | VT_SYM)) {
3139 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3140 vtop->r = VT_CONST;
3141 vtop->c.i = 1;
3142 goto done;
3145 /* cannot generate code for global or static initializers */
3146 if (nocode_wanted & DATA_ONLY_WANTED)
3147 goto done;
3149 /* non constant case: generate code */
3150 if (dbt == VT_BOOL) {
3151 gen_test_zero(TOK_NE);
3152 goto done;
3155 if (sf || df) {
3156 if (sf && df) {
3157 /* convert from fp to fp */
3158 gen_cvt_ftof(dbt);
3159 } else if (df) {
3160 /* convert int to fp */
3161 gen_cvt_itof1(dbt);
3162 } else {
3163 /* convert fp to int */
3164 sbt = dbt;
3165 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3166 sbt = VT_INT;
3167 gen_cvt_ftoi1(sbt);
3168 goto again; /* may need char/short cast */
3170 goto done;
3173 ds = btype_size(dbt_bt);
3174 ss = btype_size(sbt_bt);
3175 if (ds == 0 || ss == 0)
3176 goto error;
3178 if (IS_ENUM(type->t) && type->ref->c < 0)
3179 tcc_error("cast to incomplete type");
3181 /* same size and no sign conversion needed */
3182 if (ds == ss && ds >= 4)
3183 goto done;
3184 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3185 tcc_warning("cast between pointer and integer of different size");
3186 if (sbt_bt == VT_PTR) {
3187 /* put integer type to allow logical operations below */
3188 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3192 /* processor allows { int a = 0, b = *(char*)&a; }
3193 That means that if we cast to less width, we can just
3194 change the type and read it still later. */
3195 #define ALLOW_SUBTYPE_ACCESS 1
3197 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3198 /* value still in memory */
3199 if (ds <= ss)
3200 goto done;
3201 /* ss <= 4 here */
3202 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3203 gv(RC_INT);
3204 goto done; /* no 64bit envolved */
3207 gv(RC_INT);
3209 trunc = 0;
3210 #if PTR_SIZE == 4
3211 if (ds == 8) {
3212 /* generate high word */
3213 if (sbt & VT_UNSIGNED) {
3214 vpushi(0);
3215 gv(RC_INT);
3216 } else {
3217 gv_dup();
3218 vpushi(31);
3219 gen_op(TOK_SAR);
3221 lbuild(dbt);
3222 } else if (ss == 8) {
3223 /* from long long: just take low order word */
3224 lexpand();
3225 vpop();
3227 ss = 4;
3229 #elif PTR_SIZE == 8
3230 if (ds == 8) {
3231 /* need to convert from 32bit to 64bit */
3232 if (sbt & VT_UNSIGNED) {
3233 #if defined(TCC_TARGET_RISCV64)
3234 /* RISC-V keeps 32bit vals in registers sign-extended.
3235 So here we need a zero-extension. */
3236 trunc = 32;
3237 #else
3238 goto done;
3239 #endif
3240 } else {
3241 gen_cvt_sxtw();
3242 goto done;
3244 ss = ds, ds = 4, dbt = sbt;
3245 } else if (ss == 8) {
3246 /* RISC-V keeps 32bit vals in registers sign-extended.
3247 So here we need a sign-extension for signed types and
3248 zero-extension. for unsigned types. */
3249 #if !defined(TCC_TARGET_RISCV64)
3250 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3251 #endif
3252 } else {
3253 ss = 4;
3255 #endif
3257 if (ds >= ss)
3258 goto done;
3259 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3260 if (ss == 4) {
3261 gen_cvt_csti(dbt);
3262 goto done;
3264 #endif
3265 bits = (ss - ds) * 8;
3266 /* for unsigned, gen_op will convert SAR to SHR */
3267 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3268 vpushi(bits);
3269 gen_op(TOK_SHL);
3270 vpushi(bits - trunc);
3271 gen_op(TOK_SAR);
3272 vpushi(trunc);
3273 gen_op(TOK_SHR);
3275 done:
3276 vtop->type = *type;
3277 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3280 /* return type size as known at compile time. Put alignment at 'a' */
3281 ST_FUNC int type_size(CType *type, int *a)
3283 Sym *s;
3284 int bt;
3286 bt = type->t & VT_BTYPE;
3287 if (bt == VT_STRUCT) {
3288 /* struct/union */
3289 s = type->ref;
3290 *a = s->r;
3291 return s->c;
3292 } else if (bt == VT_PTR) {
3293 if (type->t & VT_ARRAY) {
3294 int ts;
3296 s = type->ref;
3297 ts = type_size(&s->type, a);
3299 if (ts < 0 && s->c < 0)
3300 ts = -ts;
3302 return ts * s->c;
3303 } else {
3304 *a = PTR_SIZE;
3305 return PTR_SIZE;
3307 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3308 *a = 0;
3309 return -1; /* incomplete enum */
3310 } else if (bt == VT_LDOUBLE) {
3311 *a = LDOUBLE_ALIGN;
3312 return LDOUBLE_SIZE;
3313 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3314 #ifdef TCC_TARGET_I386
3315 #ifdef TCC_TARGET_PE
3316 *a = 8;
3317 #else
3318 *a = 4;
3319 #endif
3320 #elif defined(TCC_TARGET_ARM)
3321 #ifdef TCC_ARM_EABI
3322 *a = 8;
3323 #else
3324 *a = 4;
3325 #endif
3326 #else
3327 *a = 8;
3328 #endif
3329 return 8;
3330 } else if (bt == VT_INT || bt == VT_FLOAT) {
3331 *a = 4;
3332 return 4;
3333 } else if (bt == VT_SHORT) {
3334 *a = 2;
3335 return 2;
3336 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3337 *a = 8;
3338 return 16;
3339 } else {
3340 /* char, void, function, _Bool */
3341 *a = 1;
3342 return 1;
3346 /* push type size as known at runtime time on top of value stack. Put
3347 alignment at 'a' */
3348 static void vpush_type_size(CType *type, int *a)
3350 if (type->t & VT_VLA) {
3351 type_size(&type->ref->type, a);
3352 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3353 } else {
3354 int size = type_size(type, a);
3355 if (size < 0)
3356 tcc_error("unknown type size");
3357 #if PTR_SIZE == 8
3358 vpushll(size);
3359 #else
3360 vpushi(size);
3361 #endif
3365 /* return the pointed type of t */
3366 static inline CType *pointed_type(CType *type)
3368 return &type->ref->type;
3371 /* modify type so that its it is a pointer to type. */
3372 ST_FUNC void mk_pointer(CType *type)
3374 Sym *s;
3375 s = sym_push(SYM_FIELD, type, 0, -1);
3376 type->t = VT_PTR | (type->t & VT_STORAGE);
3377 type->ref = s;
3380 /* return true if type1 and type2 are exactly the same (including
3381 qualifiers).
3383 static int is_compatible_types(CType *type1, CType *type2)
3385 return compare_types(type1,type2,0);
3388 /* return true if type1 and type2 are the same (ignoring qualifiers).
3390 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3392 return compare_types(type1,type2,1);
3395 static void cast_error(CType *st, CType *dt)
3397 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3400 /* verify type compatibility to store vtop in 'dt' type */
3401 static void verify_assign_cast(CType *dt)
3403 CType *st, *type1, *type2;
3404 int dbt, sbt, qualwarn, lvl;
3406 st = &vtop->type; /* source type */
3407 dbt = dt->t & VT_BTYPE;
3408 sbt = st->t & VT_BTYPE;
3409 if (dt->t & VT_CONSTANT)
3410 tcc_warning("assignment of read-only location");
3411 switch(dbt) {
3412 case VT_VOID:
3413 if (sbt != dbt)
3414 tcc_error("assignment to void expression");
3415 break;
3416 case VT_PTR:
3417 /* special cases for pointers */
3418 /* '0' can also be a pointer */
3419 if (is_null_pointer(vtop))
3420 break;
3421 /* accept implicit pointer to integer cast with warning */
3422 if (is_integer_btype(sbt)) {
3423 tcc_warning("assignment makes pointer from integer without a cast");
3424 break;
3426 type1 = pointed_type(dt);
3427 if (sbt == VT_PTR)
3428 type2 = pointed_type(st);
3429 else if (sbt == VT_FUNC)
3430 type2 = st; /* a function is implicitly a function pointer */
3431 else
3432 goto error;
3433 if (is_compatible_types(type1, type2))
3434 break;
3435 for (qualwarn = lvl = 0;; ++lvl) {
3436 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3437 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3438 qualwarn = 1;
3439 dbt = type1->t & (VT_BTYPE|VT_LONG);
3440 sbt = type2->t & (VT_BTYPE|VT_LONG);
3441 if (dbt != VT_PTR || sbt != VT_PTR)
3442 break;
3443 type1 = pointed_type(type1);
3444 type2 = pointed_type(type2);
3446 if (!is_compatible_unqualified_types(type1, type2)) {
3447 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3448 /* void * can match anything */
3449 } else if (dbt == sbt
3450 && is_integer_btype(sbt & VT_BTYPE)
3451 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3452 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3453 /* Like GCC don't warn by default for merely changes
3454 in pointer target signedness. Do warn for different
3455 base types, though, in particular for unsigned enums
3456 and signed int targets. */
3457 } else {
3458 tcc_warning("assignment from incompatible pointer type");
3459 break;
3462 if (qualwarn)
3463 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3464 break;
3465 case VT_BYTE:
3466 case VT_SHORT:
3467 case VT_INT:
3468 case VT_LLONG:
3469 if (sbt == VT_PTR || sbt == VT_FUNC) {
3470 tcc_warning("assignment makes integer from pointer without a cast");
3471 } else if (sbt == VT_STRUCT) {
3472 goto case_VT_STRUCT;
3474 /* XXX: more tests */
3475 break;
3476 case VT_STRUCT:
3477 case_VT_STRUCT:
3478 if (!is_compatible_unqualified_types(dt, st)) {
3479 error:
3480 cast_error(st, dt);
3482 break;
3486 static void gen_assign_cast(CType *dt)
3488 verify_assign_cast(dt);
3489 gen_cast(dt);
3492 /* store vtop in lvalue pushed on stack */
3493 ST_FUNC void vstore(void)
3495 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3497 ft = vtop[-1].type.t;
3498 sbt = vtop->type.t & VT_BTYPE;
3499 dbt = ft & VT_BTYPE;
3500 verify_assign_cast(&vtop[-1].type);
3502 if (sbt == VT_STRUCT) {
3503 /* if structure, only generate pointer */
3504 /* structure assignment : generate memcpy */
3505 size = type_size(&vtop->type, &align);
3506 /* destination, keep on stack() as result */
3507 vpushv(vtop - 1);
3508 #ifdef CONFIG_TCC_BCHECK
3509 if (vtop->r & VT_MUSTBOUND)
3510 gbound(); /* check would be wrong after gaddrof() */
3511 #endif
3512 vtop->type.t = VT_PTR;
3513 gaddrof();
3514 /* source */
3515 vswap();
3516 #ifdef CONFIG_TCC_BCHECK
3517 if (vtop->r & VT_MUSTBOUND)
3518 gbound();
3519 #endif
3520 vtop->type.t = VT_PTR;
3521 gaddrof();
3523 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3524 if (1
3525 #ifdef CONFIG_TCC_BCHECK
3526 && !tcc_state->do_bounds_check
3527 #endif
3529 gen_struct_copy(size);
3530 } else
3531 #endif
3533 /* type size */
3534 vpushi(size);
3535 /* Use memmove, rather than memcpy, as dest and src may be same: */
3536 #ifdef TCC_ARM_EABI
3537 if(!(align & 7))
3538 vpush_helper_func(TOK_memmove8);
3539 else if(!(align & 3))
3540 vpush_helper_func(TOK_memmove4);
3541 else
3542 #endif
3543 vpush_helper_func(TOK_memmove);
3544 vrott(4);
3545 gfunc_call(3);
3548 } else if (ft & VT_BITFIELD) {
3549 /* bitfield store handling */
3551 /* save lvalue as expression result (example: s.b = s.a = n;) */
3552 vdup(), vtop[-1] = vtop[-2];
3554 bit_pos = BIT_POS(ft);
3555 bit_size = BIT_SIZE(ft);
3556 /* remove bit field info to avoid loops */
3557 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3559 if (dbt == VT_BOOL) {
3560 gen_cast(&vtop[-1].type);
3561 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3563 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3564 if (dbt != VT_BOOL) {
3565 gen_cast(&vtop[-1].type);
3566 dbt = vtop[-1].type.t & VT_BTYPE;
3568 if (r == VT_STRUCT) {
3569 store_packed_bf(bit_pos, bit_size);
3570 } else {
3571 unsigned long long mask = (1ULL << bit_size) - 1;
3572 if (dbt != VT_BOOL) {
3573 /* mask source */
3574 if (dbt == VT_LLONG)
3575 vpushll(mask);
3576 else
3577 vpushi((unsigned)mask);
3578 gen_op('&');
3580 /* shift source */
3581 vpushi(bit_pos);
3582 gen_op(TOK_SHL);
3583 vswap();
3584 /* duplicate destination */
3585 vdup();
3586 vrott(3);
3587 /* load destination, mask and or with source */
3588 if (dbt == VT_LLONG)
3589 vpushll(~(mask << bit_pos));
3590 else
3591 vpushi(~((unsigned)mask << bit_pos));
3592 gen_op('&');
3593 gen_op('|');
3594 /* store result */
3595 vstore();
3596 /* ... and discard */
3597 vpop();
3599 } else if (dbt == VT_VOID) {
3600 --vtop;
3601 } else {
3602 /* optimize char/short casts */
3603 delayed_cast = 0;
3604 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3605 && is_integer_btype(sbt)
3607 if ((vtop->r & VT_MUSTCAST)
3608 && btype_size(dbt) > btype_size(sbt)
3610 force_charshort_cast();
3611 delayed_cast = 1;
3612 } else {
3613 gen_cast(&vtop[-1].type);
3616 #ifdef CONFIG_TCC_BCHECK
3617 /* bound check case */
3618 if (vtop[-1].r & VT_MUSTBOUND) {
3619 vswap();
3620 gbound();
3621 vswap();
3623 #endif
3624 gv(RC_TYPE(dbt)); /* generate value */
3626 if (delayed_cast) {
3627 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3628 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3629 vtop->type.t = ft & VT_TYPE;
3632 /* if lvalue was saved on stack, must read it */
3633 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3634 SValue sv;
3635 r = get_reg(RC_INT);
3636 sv.type.t = VT_PTRDIFF_T;
3637 sv.r = VT_LOCAL | VT_LVAL;
3638 sv.c.i = vtop[-1].c.i;
3639 load(r, &sv);
3640 vtop[-1].r = r | VT_LVAL;
3643 r = vtop->r & VT_VALMASK;
3644 /* two word case handling :
3645 store second register at word + 4 (or +8 for x86-64) */
3646 if (USING_TWO_WORDS(dbt)) {
3647 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3648 vtop[-1].type.t = load_type;
3649 store(r, vtop - 1);
3650 vswap();
3651 /* convert to int to increment easily */
3652 vtop->type.t = VT_PTRDIFF_T;
3653 gaddrof();
3654 vpushs(PTR_SIZE);
3655 gen_op('+');
3656 vtop->r |= VT_LVAL;
3657 vswap();
3658 vtop[-1].type.t = load_type;
3659 /* XXX: it works because r2 is spilled last ! */
3660 store(vtop->r2, vtop - 1);
3661 } else {
3662 /* single word */
3663 store(r, vtop - 1);
3665 vswap();
3666 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3670 /* post defines POST/PRE add. c is the token ++ or -- */
3671 ST_FUNC void inc(int post, int c)
3673 test_lvalue();
3674 vdup(); /* save lvalue */
3675 if (post) {
3676 gv_dup(); /* duplicate value */
3677 vrotb(3);
3678 vrotb(3);
3680 /* add constant */
3681 vpushi(c - TOK_MID);
3682 gen_op('+');
3683 vstore(); /* store value */
3684 if (post)
3685 vpop(); /* if post op, return saved value */
3688 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3690 /* read the string */
3691 if (tok != TOK_STR)
3692 expect(msg);
3693 cstr_new(astr);
3694 while (tok == TOK_STR) {
3695 /* XXX: add \0 handling too ? */
3696 cstr_cat(astr, tokc.str.data, -1);
3697 next();
3699 cstr_ccat(astr, '\0');
3702 /* If I is >= 1 and a power of two, returns log2(i)+1.
3703 If I is 0 returns 0. */
3704 ST_FUNC int exact_log2p1(int i)
3706 int ret;
3707 if (!i)
3708 return 0;
3709 for (ret = 1; i >= 1 << 8; ret += 8)
3710 i >>= 8;
3711 if (i >= 1 << 4)
3712 ret += 4, i >>= 4;
3713 if (i >= 1 << 2)
3714 ret += 2, i >>= 2;
3715 if (i >= 1 << 1)
3716 ret++;
3717 return ret;
3720 /* Parse __attribute__((...)) GNUC extension. */
3721 static void parse_attribute(AttributeDef *ad)
3723 int t, n;
3724 CString astr;
3726 redo:
3727 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3728 return;
3729 next();
3730 skip('(');
3731 skip('(');
3732 while (tok != ')') {
3733 if (tok < TOK_IDENT)
3734 expect("attribute name");
3735 t = tok;
3736 next();
3737 switch(t) {
3738 case TOK_CLEANUP1:
3739 case TOK_CLEANUP2:
3741 Sym *s;
3743 skip('(');
3744 s = sym_find(tok);
3745 if (!s) {
3746 tcc_warning_c(warn_implicit_function_declaration)(
3747 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3748 s = external_global_sym(tok, &func_old_type);
3749 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3750 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3751 ad->cleanup_func = s;
3752 next();
3753 skip(')');
3754 break;
3756 case TOK_CONSTRUCTOR1:
3757 case TOK_CONSTRUCTOR2:
3758 ad->f.func_ctor = 1;
3759 break;
3760 case TOK_DESTRUCTOR1:
3761 case TOK_DESTRUCTOR2:
3762 ad->f.func_dtor = 1;
3763 break;
3764 case TOK_ALWAYS_INLINE1:
3765 case TOK_ALWAYS_INLINE2:
3766 ad->f.func_alwinl = 1;
3767 break;
3768 case TOK_SECTION1:
3769 case TOK_SECTION2:
3770 skip('(');
3771 parse_mult_str(&astr, "section name");
3772 ad->section = find_section(tcc_state, (char *)astr.data);
3773 skip(')');
3774 cstr_free(&astr);
3775 break;
3776 case TOK_ALIAS1:
3777 case TOK_ALIAS2:
3778 skip('(');
3779 parse_mult_str(&astr, "alias(\"target\")");
3780 ad->alias_target = /* save string as token, for later */
3781 tok_alloc((char*)astr.data, astr.size-1)->tok;
3782 skip(')');
3783 cstr_free(&astr);
3784 break;
3785 case TOK_VISIBILITY1:
3786 case TOK_VISIBILITY2:
3787 skip('(');
3788 parse_mult_str(&astr,
3789 "visibility(\"default|hidden|internal|protected\")");
3790 if (!strcmp (astr.data, "default"))
3791 ad->a.visibility = STV_DEFAULT;
3792 else if (!strcmp (astr.data, "hidden"))
3793 ad->a.visibility = STV_HIDDEN;
3794 else if (!strcmp (astr.data, "internal"))
3795 ad->a.visibility = STV_INTERNAL;
3796 else if (!strcmp (astr.data, "protected"))
3797 ad->a.visibility = STV_PROTECTED;
3798 else
3799 expect("visibility(\"default|hidden|internal|protected\")");
3800 skip(')');
3801 cstr_free(&astr);
3802 break;
3803 case TOK_ALIGNED1:
3804 case TOK_ALIGNED2:
3805 if (tok == '(') {
3806 next();
3807 n = expr_const();
3808 if (n <= 0 || (n & (n - 1)) != 0)
3809 tcc_error("alignment must be a positive power of two");
3810 skip(')');
3811 } else {
3812 n = MAX_ALIGN;
3814 ad->a.aligned = exact_log2p1(n);
3815 if (n != 1 << (ad->a.aligned - 1))
3816 tcc_error("alignment of %d is larger than implemented", n);
3817 break;
3818 case TOK_PACKED1:
3819 case TOK_PACKED2:
3820 ad->a.packed = 1;
3821 break;
3822 case TOK_WEAK1:
3823 case TOK_WEAK2:
3824 ad->a.weak = 1;
3825 break;
3826 case TOK_UNUSED1:
3827 case TOK_UNUSED2:
3828 /* currently, no need to handle it because tcc does not
3829 track unused objects */
3830 break;
3831 case TOK_NORETURN1:
3832 case TOK_NORETURN2:
3833 ad->f.func_noreturn = 1;
3834 break;
3835 case TOK_CDECL1:
3836 case TOK_CDECL2:
3837 case TOK_CDECL3:
3838 ad->f.func_call = FUNC_CDECL;
3839 break;
3840 case TOK_STDCALL1:
3841 case TOK_STDCALL2:
3842 case TOK_STDCALL3:
3843 ad->f.func_call = FUNC_STDCALL;
3844 break;
3845 #ifdef TCC_TARGET_I386
3846 case TOK_REGPARM1:
3847 case TOK_REGPARM2:
3848 skip('(');
3849 n = expr_const();
3850 if (n > 3)
3851 n = 3;
3852 else if (n < 0)
3853 n = 0;
3854 if (n > 0)
3855 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3856 skip(')');
3857 break;
3858 case TOK_FASTCALL1:
3859 case TOK_FASTCALL2:
3860 case TOK_FASTCALL3:
3861 ad->f.func_call = FUNC_FASTCALLW;
3862 break;
3863 #endif
3864 case TOK_MODE:
3865 skip('(');
3866 switch(tok) {
3867 case TOK_MODE_DI:
3868 ad->attr_mode = VT_LLONG + 1;
3869 break;
3870 case TOK_MODE_QI:
3871 ad->attr_mode = VT_BYTE + 1;
3872 break;
3873 case TOK_MODE_HI:
3874 ad->attr_mode = VT_SHORT + 1;
3875 break;
3876 case TOK_MODE_SI:
3877 case TOK_MODE_word:
3878 ad->attr_mode = VT_INT + 1;
3879 break;
3880 default:
3881 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3882 break;
3884 next();
3885 skip(')');
3886 break;
3887 case TOK_DLLEXPORT:
3888 ad->a.dllexport = 1;
3889 break;
3890 case TOK_NODECORATE:
3891 ad->a.nodecorate = 1;
3892 break;
3893 case TOK_DLLIMPORT:
3894 ad->a.dllimport = 1;
3895 break;
3896 default:
3897 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3898 /* skip parameters */
3899 if (tok == '(') {
3900 int parenthesis = 0;
3901 do {
3902 if (tok == '(')
3903 parenthesis++;
3904 else if (tok == ')')
3905 parenthesis--;
3906 next();
3907 } while (parenthesis && tok != -1);
3909 break;
3911 if (tok != ',')
3912 break;
3913 next();
3915 skip(')');
3916 skip(')');
3917 goto redo;
3920 static Sym * find_field (CType *type, int v, int *cumofs)
3922 Sym *s = type->ref;
3923 v |= SYM_FIELD;
3924 while ((s = s->next) != NULL) {
3925 if ((s->v & SYM_FIELD) &&
3926 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3927 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3928 Sym *ret = find_field (&s->type, v, cumofs);
3929 if (ret) {
3930 *cumofs += s->c;
3931 return ret;
3934 if (s->v == v)
3935 break;
3937 return s;
3940 static void check_fields (CType *type, int check)
3942 Sym *s = type->ref;
3944 while ((s = s->next) != NULL) {
3945 int v = s->v & ~SYM_FIELD;
3946 if (v < SYM_FIRST_ANOM) {
3947 TokenSym *ts = table_ident[v - TOK_IDENT];
3948 if (check && (ts->tok & SYM_FIELD))
3949 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3950 ts->tok ^= SYM_FIELD;
3951 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3952 check_fields (&s->type, check);
3956 static void struct_layout(CType *type, AttributeDef *ad)
3958 int size, align, maxalign, offset, c, bit_pos, bit_size;
3959 int packed, a, bt, prevbt, prev_bit_size;
3960 int pcc = !tcc_state->ms_bitfields;
3961 int pragma_pack = *tcc_state->pack_stack_ptr;
3962 Sym *f;
3964 maxalign = 1;
3965 offset = 0;
3966 c = 0;
3967 bit_pos = 0;
3968 prevbt = VT_STRUCT; /* make it never match */
3969 prev_bit_size = 0;
3971 //#define BF_DEBUG
3973 for (f = type->ref->next; f; f = f->next) {
3974 if (f->type.t & VT_BITFIELD)
3975 bit_size = BIT_SIZE(f->type.t);
3976 else
3977 bit_size = -1;
3978 size = type_size(&f->type, &align);
3979 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3980 packed = 0;
3982 if (pcc && bit_size == 0) {
3983 /* in pcc mode, packing does not affect zero-width bitfields */
3985 } else {
3986 /* in pcc mode, attribute packed overrides if set. */
3987 if (pcc && (f->a.packed || ad->a.packed))
3988 align = packed = 1;
3990 /* pragma pack overrides align if lesser and packs bitfields always */
3991 if (pragma_pack) {
3992 packed = 1;
3993 if (pragma_pack < align)
3994 align = pragma_pack;
3995 /* in pcc mode pragma pack also overrides individual align */
3996 if (pcc && pragma_pack < a)
3997 a = 0;
4000 /* some individual align was specified */
4001 if (a)
4002 align = a;
4004 if (type->ref->type.t == VT_UNION) {
4005 if (pcc && bit_size >= 0)
4006 size = (bit_size + 7) >> 3;
4007 offset = 0;
4008 if (size > c)
4009 c = size;
4011 } else if (bit_size < 0) {
4012 if (pcc)
4013 c += (bit_pos + 7) >> 3;
4014 c = (c + align - 1) & -align;
4015 offset = c;
4016 if (size > 0)
4017 c += size;
4018 bit_pos = 0;
4019 prevbt = VT_STRUCT;
4020 prev_bit_size = 0;
4022 } else {
4023 /* A bit-field. Layout is more complicated. There are two
4024 options: PCC (GCC) compatible and MS compatible */
4025 if (pcc) {
4026 /* In PCC layout a bit-field is placed adjacent to the
4027 preceding bit-fields, except if:
4028 - it has zero-width
4029 - an individual alignment was given
4030 - it would overflow its base type container and
4031 there is no packing */
4032 if (bit_size == 0) {
4033 new_field:
4034 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4035 bit_pos = 0;
4036 } else if (f->a.aligned) {
4037 goto new_field;
4038 } else if (!packed) {
4039 int a8 = align * 8;
4040 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4041 if (ofs > size / align)
4042 goto new_field;
4045 /* in pcc mode, long long bitfields have type int if they fit */
4046 if (size == 8 && bit_size <= 32)
4047 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4049 while (bit_pos >= align * 8)
4050 c += align, bit_pos -= align * 8;
4051 offset = c;
4053 /* In PCC layout named bit-fields influence the alignment
4054 of the containing struct using the base types alignment,
4055 except for packed fields (which here have correct align). */
4056 if (f->v & SYM_FIRST_ANOM
4057 // && bit_size // ??? gcc on ARM/rpi does that
4059 align = 1;
4061 } else {
4062 bt = f->type.t & VT_BTYPE;
4063 if ((bit_pos + bit_size > size * 8)
4064 || (bit_size > 0) == (bt != prevbt)
4066 c = (c + align - 1) & -align;
4067 offset = c;
4068 bit_pos = 0;
4069 /* In MS bitfield mode a bit-field run always uses
4070 at least as many bits as the underlying type.
4071 To start a new run it's also required that this
4072 or the last bit-field had non-zero width. */
4073 if (bit_size || prev_bit_size)
4074 c += size;
4076 /* In MS layout the records alignment is normally
4077 influenced by the field, except for a zero-width
4078 field at the start of a run (but by further zero-width
4079 fields it is again). */
4080 if (bit_size == 0 && prevbt != bt)
4081 align = 1;
4082 prevbt = bt;
4083 prev_bit_size = bit_size;
4086 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4087 | (bit_pos << VT_STRUCT_SHIFT);
4088 bit_pos += bit_size;
4090 if (align > maxalign)
4091 maxalign = align;
4093 #ifdef BF_DEBUG
4094 printf("set field %s offset %-2d size %-2d align %-2d",
4095 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4096 if (f->type.t & VT_BITFIELD) {
4097 printf(" pos %-2d bits %-2d",
4098 BIT_POS(f->type.t),
4099 BIT_SIZE(f->type.t)
4102 printf("\n");
4103 #endif
4105 f->c = offset;
4106 f->r = 0;
4109 if (pcc)
4110 c += (bit_pos + 7) >> 3;
4112 /* store size and alignment */
4113 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4114 if (a < maxalign)
4115 a = maxalign;
4116 type->ref->r = a;
4117 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4118 /* can happen if individual align for some member was given. In
4119 this case MSVC ignores maxalign when aligning the size */
4120 a = pragma_pack;
4121 if (a < bt)
4122 a = bt;
4124 c = (c + a - 1) & -a;
4125 type->ref->c = c;
4127 #ifdef BF_DEBUG
4128 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4129 #endif
4131 /* check whether we can access bitfields by their type */
4132 for (f = type->ref->next; f; f = f->next) {
4133 int s, px, cx, c0;
4134 CType t;
4136 if (0 == (f->type.t & VT_BITFIELD))
4137 continue;
4138 f->type.ref = f;
4139 f->auxtype = -1;
4140 bit_size = BIT_SIZE(f->type.t);
4141 if (bit_size == 0)
4142 continue;
4143 bit_pos = BIT_POS(f->type.t);
4144 size = type_size(&f->type, &align);
4146 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4147 #ifdef TCC_TARGET_ARM
4148 && !(f->c & (align - 1))
4149 #endif
4151 continue;
4153 /* try to access the field using a different type */
4154 c0 = -1, s = align = 1;
4155 t.t = VT_BYTE;
4156 for (;;) {
4157 px = f->c * 8 + bit_pos;
4158 cx = (px >> 3) & -align;
4159 px = px - (cx << 3);
4160 if (c0 == cx)
4161 break;
4162 s = (px + bit_size + 7) >> 3;
4163 if (s > 4) {
4164 t.t = VT_LLONG;
4165 } else if (s > 2) {
4166 t.t = VT_INT;
4167 } else if (s > 1) {
4168 t.t = VT_SHORT;
4169 } else {
4170 t.t = VT_BYTE;
4172 s = type_size(&t, &align);
4173 c0 = cx;
4176 if (px + bit_size <= s * 8 && cx + s <= c
4177 #ifdef TCC_TARGET_ARM
4178 && !(cx & (align - 1))
4179 #endif
4181 /* update offset and bit position */
4182 f->c = cx;
4183 bit_pos = px;
4184 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4185 | (bit_pos << VT_STRUCT_SHIFT);
4186 if (s != size)
4187 f->auxtype = t.t;
4188 #ifdef BF_DEBUG
4189 printf("FIX field %s offset %-2d size %-2d align %-2d "
4190 "pos %-2d bits %-2d\n",
4191 get_tok_str(f->v & ~SYM_FIELD, NULL),
4192 cx, s, align, px, bit_size);
4193 #endif
4194 } else {
4195 /* fall back to load/store single-byte wise */
4196 f->auxtype = VT_STRUCT;
4197 #ifdef BF_DEBUG
4198 printf("FIX field %s : load byte-wise\n",
4199 get_tok_str(f->v & ~SYM_FIELD, NULL));
4200 #endif
4205 static void do_Static_assert(void);
4207 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4208 static void struct_decl(CType *type, int u)
4210 int v, c, size, align, flexible;
4211 int bit_size, bsize, bt;
4212 Sym *s, *ss, **ps;
4213 AttributeDef ad, ad1;
4214 CType type1, btype;
4216 memset(&ad, 0, sizeof ad);
4217 next();
4218 parse_attribute(&ad);
4219 if (tok != '{') {
4220 v = tok;
4221 next();
4222 /* struct already defined ? return it */
4223 if (v < TOK_IDENT)
4224 expect("struct/union/enum name");
4225 s = struct_find(v);
4226 if (s && (s->sym_scope == local_scope || tok != '{')) {
4227 if (u == s->type.t)
4228 goto do_decl;
4229 if (u == VT_ENUM && IS_ENUM(s->type.t))
4230 goto do_decl;
4231 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4233 } else {
4234 v = anon_sym++;
4236 /* Record the original enum/struct/union token. */
4237 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4238 type1.ref = NULL;
4239 /* we put an undefined size for struct/union */
4240 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4241 s->r = 0; /* default alignment is zero as gcc */
4242 do_decl:
4243 type->t = s->type.t;
4244 type->ref = s;
4246 if (tok == '{') {
4247 next();
4248 if (s->c != -1)
4249 tcc_error("struct/union/enum already defined");
4250 s->c = -2;
4251 /* cannot be empty */
4252 /* non empty enums are not allowed */
4253 ps = &s->next;
4254 if (u == VT_ENUM) {
4255 long long ll = 0, pl = 0, nl = 0;
4256 CType t;
4257 t.ref = s;
4258 /* enum symbols have static storage */
4259 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4260 for(;;) {
4261 v = tok;
4262 if (v < TOK_UIDENT)
4263 expect("identifier");
4264 ss = sym_find(v);
4265 if (ss && !local_stack)
4266 tcc_error("redefinition of enumerator '%s'",
4267 get_tok_str(v, NULL));
4268 next();
4269 if (tok == '=') {
4270 next();
4271 ll = expr_const64();
4273 ss = sym_push(v, &t, VT_CONST, 0);
4274 ss->enum_val = ll;
4275 *ps = ss, ps = &ss->next;
4276 if (ll < nl)
4277 nl = ll;
4278 if (ll > pl)
4279 pl = ll;
4280 if (tok != ',')
4281 break;
4282 next();
4283 ll++;
4284 /* NOTE: we accept a trailing comma */
4285 if (tok == '}')
4286 break;
4288 skip('}');
4289 /* set integral type of the enum */
4290 t.t = VT_INT;
4291 if (nl >= 0) {
4292 if (pl != (unsigned)pl)
4293 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4294 t.t |= VT_UNSIGNED;
4295 } else if (pl != (int)pl || nl != (int)nl)
4296 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4297 s->type.t = type->t = t.t | VT_ENUM;
4298 s->c = 0;
4299 /* set type for enum members */
4300 for (ss = s->next; ss; ss = ss->next) {
4301 ll = ss->enum_val;
4302 if (ll == (int)ll) /* default is int if it fits */
4303 continue;
4304 if (t.t & VT_UNSIGNED) {
4305 ss->type.t |= VT_UNSIGNED;
4306 if (ll == (unsigned)ll)
4307 continue;
4309 ss->type.t = (ss->type.t & ~VT_BTYPE)
4310 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4312 } else {
4313 c = 0;
4314 flexible = 0;
4315 while (tok != '}') {
4316 if (tok == TOK_STATIC_ASSERT) {
4317 do_Static_assert();
4318 continue;
4320 if (!parse_btype(&btype, &ad1, 0)) {
4321 skip(';');
4322 continue;
4324 while (1) {
4325 if (flexible)
4326 tcc_error("flexible array member '%s' not at the end of struct",
4327 get_tok_str(v, NULL));
4328 bit_size = -1;
4329 v = 0;
4330 type1 = btype;
4331 if (tok != ':') {
4332 if (tok != ';')
4333 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4334 if (v == 0) {
4335 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4336 expect("identifier");
4337 else {
4338 int v = btype.ref->v;
4339 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4340 if (tcc_state->ms_extensions == 0)
4341 expect("identifier");
4345 if (type_size(&type1, &align) < 0) {
4346 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4347 flexible = 1;
4348 else
4349 tcc_error("field '%s' has incomplete type",
4350 get_tok_str(v, NULL));
4352 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4353 (type1.t & VT_BTYPE) == VT_VOID ||
4354 (type1.t & VT_STORAGE))
4355 tcc_error("invalid type for '%s'",
4356 get_tok_str(v, NULL));
4358 if (tok == ':') {
4359 next();
4360 bit_size = expr_const();
4361 /* XXX: handle v = 0 case for messages */
4362 if (bit_size < 0)
4363 tcc_error("negative width in bit-field '%s'",
4364 get_tok_str(v, NULL));
4365 if (v && bit_size == 0)
4366 tcc_error("zero width for bit-field '%s'",
4367 get_tok_str(v, NULL));
4368 parse_attribute(&ad1);
4370 size = type_size(&type1, &align);
4371 if (bit_size >= 0) {
4372 bt = type1.t & VT_BTYPE;
4373 if (bt != VT_INT &&
4374 bt != VT_BYTE &&
4375 bt != VT_SHORT &&
4376 bt != VT_BOOL &&
4377 bt != VT_LLONG)
4378 tcc_error("bitfields must have scalar type");
4379 bsize = size * 8;
4380 if (bit_size > bsize) {
4381 tcc_error("width of '%s' exceeds its type",
4382 get_tok_str(v, NULL));
4383 } else if (bit_size == bsize
4384 && !ad.a.packed && !ad1.a.packed) {
4385 /* no need for bit fields */
4387 } else if (bit_size == 64) {
4388 tcc_error("field width 64 not implemented");
4389 } else {
4390 type1.t = (type1.t & ~VT_STRUCT_MASK)
4391 | VT_BITFIELD
4392 | (bit_size << (VT_STRUCT_SHIFT + 6));
4395 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4396 /* Remember we've seen a real field to check
4397 for placement of flexible array member. */
4398 c = 1;
4400 /* If member is a struct or bit-field, enforce
4401 placing into the struct (as anonymous). */
4402 if (v == 0 &&
4403 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4404 bit_size >= 0)) {
4405 v = anon_sym++;
4407 if (v) {
4408 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4409 ss->a = ad1.a;
4410 *ps = ss;
4411 ps = &ss->next;
4413 if (tok == ';' || tok == TOK_EOF)
4414 break;
4415 skip(',');
4417 skip(';');
4419 skip('}');
4420 parse_attribute(&ad);
4421 if (ad.cleanup_func) {
4422 tcc_warning("attribute '__cleanup__' ignored on type");
4424 check_fields(type, 1);
4425 check_fields(type, 0);
4426 struct_layout(type, &ad);
4427 if (debug_modes)
4428 tcc_debug_fix_anon(tcc_state, type);
4433 static void sym_to_attr(AttributeDef *ad, Sym *s)
4435 merge_symattr(&ad->a, &s->a);
4436 merge_funcattr(&ad->f, &s->f);
4439 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4440 are added to the element type, copied because it could be a typedef. */
4441 static void parse_btype_qualify(CType *type, int qualifiers)
4443 while (type->t & VT_ARRAY) {
4444 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4445 type = &type->ref->type;
4447 type->t |= qualifiers;
4450 /* return 0 if no type declaration. otherwise, return the basic type
4451 and skip it.
4453 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4455 int t, u, bt, st, type_found, typespec_found, g, n;
4456 Sym *s;
4457 CType type1;
4459 memset(ad, 0, sizeof(AttributeDef));
4460 type_found = 0;
4461 typespec_found = 0;
4462 t = VT_INT;
4463 bt = st = -1;
4464 type->ref = NULL;
4466 while(1) {
4467 switch(tok) {
4468 case TOK_EXTENSION:
4469 /* currently, we really ignore extension */
4470 next();
4471 continue;
4473 /* basic types */
4474 case TOK_CHAR:
4475 u = VT_BYTE;
4476 basic_type:
4477 next();
4478 basic_type1:
4479 if (u == VT_SHORT || u == VT_LONG) {
4480 if (st != -1 || (bt != -1 && bt != VT_INT))
4481 tmbt: tcc_error("too many basic types");
4482 st = u;
4483 } else {
4484 if (bt != -1 || (st != -1 && u != VT_INT))
4485 goto tmbt;
4486 bt = u;
4488 if (u != VT_INT)
4489 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4490 typespec_found = 1;
4491 break;
4492 case TOK_VOID:
4493 u = VT_VOID;
4494 goto basic_type;
4495 case TOK_SHORT:
4496 u = VT_SHORT;
4497 goto basic_type;
4498 case TOK_INT:
4499 u = VT_INT;
4500 goto basic_type;
4501 case TOK_ALIGNAS:
4502 { int n;
4503 AttributeDef ad1;
4504 next();
4505 skip('(');
4506 memset(&ad1, 0, sizeof(AttributeDef));
4507 if (parse_btype(&type1, &ad1, 0)) {
4508 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4509 if (ad1.a.aligned)
4510 n = 1 << (ad1.a.aligned - 1);
4511 else
4512 type_size(&type1, &n);
4513 } else {
4514 n = expr_const();
4515 if (n < 0 || (n & (n - 1)) != 0)
4516 tcc_error("alignment must be a positive power of two");
4518 skip(')');
4519 ad->a.aligned = exact_log2p1(n);
4521 continue;
4522 case TOK_LONG:
4523 if ((t & VT_BTYPE) == VT_DOUBLE) {
4524 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4525 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4526 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4527 } else {
4528 u = VT_LONG;
4529 goto basic_type;
4531 next();
4532 break;
4533 #ifdef TCC_TARGET_ARM64
4534 case TOK_UINT128:
4535 /* GCC's __uint128_t appears in some Linux header files. Make it a
4536 synonym for long double to get the size and alignment right. */
4537 u = VT_LDOUBLE;
4538 goto basic_type;
4539 #endif
4540 case TOK_BOOL:
4541 u = VT_BOOL;
4542 goto basic_type;
4543 case TOK_COMPLEX:
4544 tcc_error("_Complex is not yet supported");
4545 case TOK_FLOAT:
4546 u = VT_FLOAT;
4547 goto basic_type;
4548 case TOK_DOUBLE:
4549 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4550 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4551 } else {
4552 u = VT_DOUBLE;
4553 goto basic_type;
4555 next();
4556 break;
4557 case TOK_ENUM:
4558 struct_decl(&type1, VT_ENUM);
4559 basic_type2:
4560 u = type1.t;
4561 type->ref = type1.ref;
4562 goto basic_type1;
4563 case TOK_STRUCT:
4564 struct_decl(&type1, VT_STRUCT);
4565 goto basic_type2;
4566 case TOK_UNION:
4567 struct_decl(&type1, VT_UNION);
4568 goto basic_type2;
4570 /* type modifiers */
4571 case TOK__Atomic:
4572 next();
4573 type->t = t;
4574 parse_btype_qualify(type, VT_ATOMIC);
4575 t = type->t;
4576 if (tok == '(') {
4577 parse_expr_type(&type1);
4578 /* remove all storage modifiers except typedef */
4579 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4580 if (type1.ref)
4581 sym_to_attr(ad, type1.ref);
4582 goto basic_type2;
4584 break;
4585 case TOK_CONST1:
4586 case TOK_CONST2:
4587 case TOK_CONST3:
4588 type->t = t;
4589 parse_btype_qualify(type, VT_CONSTANT);
4590 t = type->t;
4591 next();
4592 break;
4593 case TOK_VOLATILE1:
4594 case TOK_VOLATILE2:
4595 case TOK_VOLATILE3:
4596 type->t = t;
4597 parse_btype_qualify(type, VT_VOLATILE);
4598 t = type->t;
4599 next();
4600 break;
4601 case TOK_SIGNED1:
4602 case TOK_SIGNED2:
4603 case TOK_SIGNED3:
4604 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4605 tcc_error("signed and unsigned modifier");
4606 t |= VT_DEFSIGN;
4607 next();
4608 typespec_found = 1;
4609 break;
4610 case TOK_REGISTER:
4611 case TOK_AUTO:
4612 case TOK_RESTRICT1:
4613 case TOK_RESTRICT2:
4614 case TOK_RESTRICT3:
4615 next();
4616 break;
4617 case TOK_UNSIGNED:
4618 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4619 tcc_error("signed and unsigned modifier");
4620 t |= VT_DEFSIGN | VT_UNSIGNED;
4621 next();
4622 typespec_found = 1;
4623 break;
4625 /* storage */
4626 case TOK_EXTERN:
4627 g = VT_EXTERN;
4628 goto storage;
4629 case TOK_STATIC:
4630 g = VT_STATIC;
4631 goto storage;
4632 case TOK_TYPEDEF:
4633 g = VT_TYPEDEF;
4634 goto storage;
4635 storage:
4636 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4637 tcc_error("multiple storage classes");
4638 t |= g;
4639 next();
4640 break;
4641 case TOK_INLINE1:
4642 case TOK_INLINE2:
4643 case TOK_INLINE3:
4644 t |= VT_INLINE;
4645 next();
4646 break;
4647 case TOK_NORETURN3:
4648 next();
4649 ad->f.func_noreturn = 1;
4650 break;
4651 /* GNUC attribute */
4652 case TOK_ATTRIBUTE1:
4653 case TOK_ATTRIBUTE2:
4654 parse_attribute(ad);
4655 if (ad->attr_mode) {
4656 u = ad->attr_mode -1;
4657 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4659 continue;
4660 /* GNUC typeof */
4661 case TOK_TYPEOF1:
4662 case TOK_TYPEOF2:
4663 case TOK_TYPEOF3:
4664 next();
4665 parse_expr_type(&type1);
4666 /* remove all storage modifiers except typedef */
4667 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4668 if (type1.ref)
4669 sym_to_attr(ad, type1.ref);
4670 goto basic_type2;
4671 case TOK_THREAD_LOCAL:
4672 tcc_error("_Thread_local is not implemented");
4673 default:
4674 if (typespec_found)
4675 goto the_end;
4676 s = sym_find(tok);
4677 if (!s || !(s->type.t & VT_TYPEDEF))
4678 goto the_end;
4680 n = tok, next();
4681 if (tok == ':' && ignore_label) {
4682 /* ignore if it's a label */
4683 unget_tok(n);
4684 goto the_end;
4687 t &= ~(VT_BTYPE|VT_LONG);
4688 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4689 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4690 type->ref = s->type.ref;
4691 if (t)
4692 parse_btype_qualify(type, t);
4693 t = type->t;
4694 /* get attributes from typedef */
4695 sym_to_attr(ad, s);
4696 typespec_found = 1;
4697 st = bt = -2;
4698 break;
4700 type_found = 1;
4702 the_end:
4703 if (tcc_state->char_is_unsigned) {
4704 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4705 t |= VT_UNSIGNED;
4707 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4708 bt = t & (VT_BTYPE|VT_LONG);
4709 if (bt == VT_LONG)
4710 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4711 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4712 if (bt == VT_LDOUBLE)
4713 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4714 #endif
4715 type->t = t;
4716 return type_found;
4719 /* convert a function parameter type (array to pointer and function to
4720 function pointer) */
4721 static inline void convert_parameter_type(CType *pt)
4723 /* remove const and volatile qualifiers (XXX: const could be used
4724 to indicate a const function parameter */
4725 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4726 /* array must be transformed to pointer according to ANSI C */
4727 pt->t &= ~VT_ARRAY;
4728 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4729 mk_pointer(pt);
4733 ST_FUNC void parse_asm_str(CString *astr)
4735 skip('(');
4736 parse_mult_str(astr, "string constant");
4739 /* Parse an asm label and return the token */
4740 static int asm_label_instr(void)
4742 int v;
4743 CString astr;
4745 next();
4746 parse_asm_str(&astr);
4747 skip(')');
4748 #ifdef ASM_DEBUG
4749 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4750 #endif
4751 v = tok_alloc(astr.data, astr.size - 1)->tok;
4752 cstr_free(&astr);
4753 return v;
4756 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4758 int n, l, t1, arg_size, align;
4759 Sym **plast, *s, *first;
4760 AttributeDef ad1;
4761 CType pt;
4762 TokenString *vla_array_tok = NULL;
4763 int *vla_array_str = NULL;
4765 if (tok == '(') {
4766 /* function type, or recursive declarator (return if so) */
4767 next();
4768 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4769 return 0;
4770 if (tok == ')')
4771 l = 0;
4772 else if (parse_btype(&pt, &ad1, 0))
4773 l = FUNC_NEW;
4774 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4775 merge_attr (ad, &ad1);
4776 return 0;
4777 } else
4778 l = FUNC_OLD;
4780 first = NULL;
4781 plast = &first;
4782 arg_size = 0;
4783 ++local_scope;
4784 if (l) {
4785 for(;;) {
4786 /* read param name and compute offset */
4787 if (l != FUNC_OLD) {
4788 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4789 break;
4790 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4791 if ((pt.t & VT_BTYPE) == VT_VOID)
4792 tcc_error("parameter declared as void");
4793 if (n == 0)
4794 n = SYM_FIELD;
4795 } else {
4796 n = tok;
4797 pt.t = VT_VOID; /* invalid type */
4798 pt.ref = NULL;
4799 next();
4801 if (n < TOK_UIDENT)
4802 expect("identifier");
4803 convert_parameter_type(&pt);
4804 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4805 /* these symbols may be evaluated for VLArrays (see below, under
4806 nocode_wanted) which is why we push them here as normal symbols
4807 temporarily. Example: int func(int a, int b[++a]); */
4808 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4809 *plast = s;
4810 plast = &s->next;
4811 if (tok == ')')
4812 break;
4813 skip(',');
4814 if (l == FUNC_NEW && tok == TOK_DOTS) {
4815 l = FUNC_ELLIPSIS;
4816 next();
4817 break;
4819 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4820 tcc_error("invalid type");
4822 } else
4823 /* if no parameters, then old type prototype */
4824 l = FUNC_OLD;
4825 skip(')');
4826 /* remove parameter symbols from token table, keep on stack */
4827 if (first) {
4828 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4829 for (s = first; s; s = s->next)
4830 s->v |= SYM_FIELD;
4832 --local_scope;
4833 /* NOTE: const is ignored in returned type as it has a special
4834 meaning in gcc / C++ */
4835 type->t &= ~VT_CONSTANT;
4836 /* some ancient pre-K&R C allows a function to return an array
4837 and the array brackets to be put after the arguments, such
4838 that "int c()[]" means something like "int[] c()" */
4839 if (tok == '[') {
4840 next();
4841 skip(']'); /* only handle simple "[]" */
4842 mk_pointer(type);
4844 /* we push a anonymous symbol which will contain the function prototype */
4845 ad->f.func_args = arg_size;
4846 ad->f.func_type = l;
4847 s = sym_push(SYM_FIELD, type, 0, 0);
4848 s->a = ad->a;
4849 s->f = ad->f;
4850 s->next = first;
4851 type->t = VT_FUNC;
4852 type->ref = s;
4853 } else if (tok == '[') {
4854 int saved_nocode_wanted = nocode_wanted;
4855 /* array definition */
4856 next();
4857 n = -1;
4858 t1 = 0;
4859 if (td & TYPE_PARAM) while (1) {
4860 /* XXX The optional type-quals and static should only be accepted
4861 in parameter decls. The '*' as well, and then even only
4862 in prototypes (not function defs). */
4863 switch (tok) {
4864 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4865 case TOK_CONST1:
4866 case TOK_VOLATILE1:
4867 case TOK_STATIC:
4868 case '*':
4869 next();
4870 continue;
4871 default:
4872 break;
4874 if (tok != ']') {
4875 /* Code generation is not done now but has to be done
4876 at start of function. Save code here for later use. */
4877 nocode_wanted = 1;
4878 skip_or_save_block(&vla_array_tok);
4879 unget_tok(0);
4880 vla_array_str = vla_array_tok->str;
4881 begin_macro(vla_array_tok, 2);
4882 next();
4883 gexpr();
4884 end_macro();
4885 next();
4886 goto check;
4888 break;
4890 } else if (tok != ']') {
4891 if (!local_stack || (storage & VT_STATIC))
4892 vpushi(expr_const());
4893 else {
4894 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4895 length must always be evaluated, even under nocode_wanted,
4896 so that its size slot is initialized (e.g. under sizeof
4897 or typeof). */
4898 nocode_wanted = 0;
4899 gexpr();
4901 check:
4902 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4903 n = vtop->c.i;
4904 if (n < 0)
4905 tcc_error("invalid array size");
4906 } else {
4907 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4908 tcc_error("size of variable length array should be an integer");
4909 n = 0;
4910 t1 = VT_VLA;
4913 skip(']');
4914 /* parse next post type */
4915 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4917 if ((type->t & VT_BTYPE) == VT_FUNC)
4918 tcc_error("declaration of an array of functions");
4919 if ((type->t & VT_BTYPE) == VT_VOID
4920 || type_size(type, &align) < 0)
4921 tcc_error("declaration of an array of incomplete type elements");
4923 t1 |= type->t & VT_VLA;
4925 if (t1 & VT_VLA) {
4926 if (n < 0) {
4927 if (td & TYPE_NEST)
4928 tcc_error("need explicit inner array size in VLAs");
4930 else {
4931 loc -= type_size(&int_type, &align);
4932 loc &= -align;
4933 n = loc;
4935 vpush_type_size(type, &align);
4936 gen_op('*');
4937 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4938 vswap();
4939 vstore();
4942 if (n != -1)
4943 vpop();
4944 nocode_wanted = saved_nocode_wanted;
4946 /* we push an anonymous symbol which will contain the array
4947 element type */
4948 s = sym_push(SYM_FIELD, type, 0, n);
4949 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4950 type->ref = s;
4952 if (vla_array_str) {
4953 if (t1 & VT_VLA)
4954 s->vla_array_str = vla_array_str;
4955 else
4956 tok_str_free_str(vla_array_str);
4959 return 1;
4962 /* Parse a type declarator (except basic type), and return the type
4963 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4964 expected. 'type' should contain the basic type. 'ad' is the
4965 attribute definition of the basic type. It can be modified by
4966 type_decl(). If this (possibly abstract) declarator is a pointer chain
4967 it returns the innermost pointed to type (equals *type, but is a different
4968 pointer), otherwise returns type itself, that's used for recursive calls. */
4969 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4971 CType *post, *ret;
4972 int qualifiers, storage;
4974 /* recursive type, remove storage bits first, apply them later again */
4975 storage = type->t & VT_STORAGE;
4976 type->t &= ~VT_STORAGE;
4977 post = ret = type;
4979 while (tok == '*') {
4980 qualifiers = 0;
4981 redo:
4982 next();
4983 switch(tok) {
4984 case TOK__Atomic:
4985 qualifiers |= VT_ATOMIC;
4986 goto redo;
4987 case TOK_CONST1:
4988 case TOK_CONST2:
4989 case TOK_CONST3:
4990 qualifiers |= VT_CONSTANT;
4991 goto redo;
4992 case TOK_VOLATILE1:
4993 case TOK_VOLATILE2:
4994 case TOK_VOLATILE3:
4995 qualifiers |= VT_VOLATILE;
4996 goto redo;
4997 case TOK_RESTRICT1:
4998 case TOK_RESTRICT2:
4999 case TOK_RESTRICT3:
5000 goto redo;
5001 /* XXX: clarify attribute handling */
5002 case TOK_ATTRIBUTE1:
5003 case TOK_ATTRIBUTE2:
5004 parse_attribute(ad);
5005 break;
5007 mk_pointer(type);
5008 type->t |= qualifiers;
5009 if (ret == type)
5010 /* innermost pointed to type is the one for the first derivation */
5011 ret = pointed_type(type);
5014 if (tok == '(') {
5015 /* This is possibly a parameter type list for abstract declarators
5016 ('int ()'), use post_type for testing this. */
5017 if (!post_type(type, ad, 0, td)) {
5018 /* It's not, so it's a nested declarator, and the post operations
5019 apply to the innermost pointed to type (if any). */
5020 /* XXX: this is not correct to modify 'ad' at this point, but
5021 the syntax is not clear */
5022 parse_attribute(ad);
5023 post = type_decl(type, ad, v, td);
5024 skip(')');
5025 } else
5026 goto abstract;
5027 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5028 /* type identifier */
5029 *v = tok;
5030 next();
5031 } else {
5032 abstract:
5033 if (!(td & TYPE_ABSTRACT))
5034 expect("identifier");
5035 *v = 0;
5037 post_type(post, ad, post != ret ? 0 : storage,
5038 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5039 parse_attribute(ad);
5040 type->t |= storage;
5041 return ret;
5044 /* indirection with full error checking and bound check */
5045 ST_FUNC void indir(void)
5047 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5048 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5049 return;
5050 expect("pointer");
5052 if (vtop->r & VT_LVAL)
5053 gv(RC_INT);
5054 vtop->type = *pointed_type(&vtop->type);
5055 /* Arrays and functions are never lvalues */
5056 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5057 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5058 vtop->r |= VT_LVAL;
5059 /* if bound checking, the referenced pointer must be checked */
5060 #ifdef CONFIG_TCC_BCHECK
5061 if (tcc_state->do_bounds_check)
5062 vtop->r |= VT_MUSTBOUND;
5063 #endif
5067 /* pass a parameter to a function and do type checking and casting */
5068 static void gfunc_param_typed(Sym *func, Sym *arg)
5070 int func_type;
5071 CType type;
5073 func_type = func->f.func_type;
5074 if (func_type == FUNC_OLD ||
5075 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5076 /* default casting : only need to convert float to double */
5077 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5078 gen_cast_s(VT_DOUBLE);
5079 } else if (vtop->type.t & VT_BITFIELD) {
5080 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5081 type.ref = vtop->type.ref;
5082 gen_cast(&type);
5083 } else if (vtop->r & VT_MUSTCAST) {
5084 force_charshort_cast();
5086 } else if (arg == NULL) {
5087 tcc_error("too many arguments to function");
5088 } else {
5089 type = arg->type;
5090 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5091 gen_assign_cast(&type);
5095 /* parse an expression and return its type without any side effect. */
5096 static void expr_type(CType *type, void (*expr_fn)(void))
5098 nocode_wanted++;
5099 expr_fn();
5100 *type = vtop->type;
5101 vpop();
5102 nocode_wanted--;
5105 /* parse an expression of the form '(type)' or '(expr)' and return its
5106 type */
5107 static void parse_expr_type(CType *type)
5109 int n;
5110 AttributeDef ad;
5112 skip('(');
5113 if (parse_btype(type, &ad, 0)) {
5114 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5115 } else {
5116 expr_type(type, gexpr);
5118 skip(')');
5121 static void parse_type(CType *type)
5123 AttributeDef ad;
5124 int n;
5126 if (!parse_btype(type, &ad, 0)) {
5127 expect("type");
5129 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5132 static void parse_builtin_params(int nc, const char *args)
5134 char c, sep = '(';
5135 CType type;
5136 if (nc)
5137 nocode_wanted++;
5138 next();
5139 if (*args == 0)
5140 skip(sep);
5141 while ((c = *args++)) {
5142 skip(sep);
5143 sep = ',';
5144 if (c == 't') {
5145 parse_type(&type);
5146 vpush(&type);
5147 continue;
5149 expr_eq();
5150 type.ref = NULL;
5151 type.t = 0;
5152 switch (c) {
5153 case 'e':
5154 continue;
5155 case 'V':
5156 type.t = VT_CONSTANT;
5157 case 'v':
5158 type.t |= VT_VOID;
5159 mk_pointer (&type);
5160 break;
5161 case 'S':
5162 type.t = VT_CONSTANT;
5163 case 's':
5164 type.t |= char_type.t;
5165 mk_pointer (&type);
5166 break;
5167 case 'i':
5168 type.t = VT_INT;
5169 break;
5170 case 'l':
5171 type.t = VT_SIZE_T;
5172 break;
5173 default:
5174 break;
5176 gen_assign_cast(&type);
5178 skip(')');
5179 if (nc)
5180 nocode_wanted--;
5183 static void parse_atomic(int atok)
5185 int size, align, arg, t, save = 0;
5186 CType *atom, *atom_ptr, ct = {0};
5187 SValue store;
5188 char buf[40];
5189 static const char *const templates[] = {
5191 * Each entry consists of callback and function template.
5192 * The template represents argument types and return type.
5194 * ? void (return-only)
5195 * b bool
5196 * a atomic
5197 * A read-only atomic
5198 * p pointer to memory
5199 * v value
5200 * l load pointer
5201 * s save pointer
5202 * m memory model
5205 /* keep in order of appearance in tcctok.h: */
5206 /* __atomic_store */ "alm.?",
5207 /* __atomic_load */ "Asm.v",
5208 /* __atomic_exchange */ "alsm.v",
5209 /* __atomic_compare_exchange */ "aplbmm.b",
5210 /* __atomic_fetch_add */ "avm.v",
5211 /* __atomic_fetch_sub */ "avm.v",
5212 /* __atomic_fetch_or */ "avm.v",
5213 /* __atomic_fetch_xor */ "avm.v",
5214 /* __atomic_fetch_and */ "avm.v",
5215 /* __atomic_fetch_nand */ "avm.v",
5216 /* __atomic_and_fetch */ "avm.v",
5217 /* __atomic_sub_fetch */ "avm.v",
5218 /* __atomic_or_fetch */ "avm.v",
5219 /* __atomic_xor_fetch */ "avm.v",
5220 /* __atomic_and_fetch */ "avm.v",
5221 /* __atomic_nand_fetch */ "avm.v"
5223 const char *template = templates[(atok - TOK___atomic_store)];
5225 atom = atom_ptr = NULL;
5226 size = 0; /* pacify compiler */
5227 next();
5228 skip('(');
5229 for (arg = 0;;) {
5230 expr_eq();
5231 switch (template[arg]) {
5232 case 'a':
5233 case 'A':
5234 atom_ptr = &vtop->type;
5235 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5236 expect("pointer");
5237 atom = pointed_type(atom_ptr);
5238 size = type_size(atom, &align);
5239 if (size > 8
5240 || (size & (size - 1))
5241 || (atok > TOK___atomic_compare_exchange
5242 && (0 == btype_size(atom->t & VT_BTYPE)
5243 || (atom->t & VT_BTYPE) == VT_PTR)))
5244 expect("integral or integer-sized pointer target type");
5245 /* GCC does not care either: */
5246 /* if (!(atom->t & VT_ATOMIC))
5247 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5248 break;
5250 case 'p':
5251 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5252 || type_size(pointed_type(&vtop->type), &align) != size)
5253 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5254 gen_assign_cast(atom_ptr);
5255 break;
5256 case 'v':
5257 gen_assign_cast(atom);
5258 break;
5259 case 'l':
5260 indir();
5261 gen_assign_cast(atom);
5262 break;
5263 case 's':
5264 save = 1;
5265 indir();
5266 store = *vtop;
5267 vpop();
5268 break;
5269 case 'm':
5270 gen_assign_cast(&int_type);
5271 break;
5272 case 'b':
5273 ct.t = VT_BOOL;
5274 gen_assign_cast(&ct);
5275 break;
5277 if ('.' == template[++arg])
5278 break;
5279 skip(',');
5281 skip(')');
5283 ct.t = VT_VOID;
5284 switch (template[arg + 1]) {
5285 case 'b':
5286 ct.t = VT_BOOL;
5287 break;
5288 case 'v':
5289 ct = *atom;
5290 break;
5293 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5294 vpush_helper_func(tok_alloc_const(buf));
5295 vrott(arg - save + 1);
5296 gfunc_call(arg - save);
5298 vpush(&ct);
5299 PUT_R_RET(vtop, ct.t);
5300 t = ct.t & VT_BTYPE;
5301 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5302 #ifdef PROMOTE_RET
5303 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5304 #else
5305 vtop->type.t = VT_INT;
5306 #endif
5308 gen_cast(&ct);
5309 if (save) {
5310 vpush(&ct);
5311 *vtop = store;
5312 vswap();
5313 vstore();
5317 ST_FUNC void unary(void)
5319 int n, t, align, size, r, sizeof_caller;
5320 CType type;
5321 Sym *s;
5322 AttributeDef ad;
5324 /* generate line number info */
5325 if (debug_modes)
5326 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5328 sizeof_caller = in_sizeof;
5329 in_sizeof = 0;
5330 type.ref = NULL;
5331 /* XXX: GCC 2.95.3 does not generate a table although it should be
5332 better here */
5333 tok_next:
5334 switch(tok) {
5335 case TOK_EXTENSION:
5336 next();
5337 goto tok_next;
5338 case TOK_LCHAR:
5339 #ifdef TCC_TARGET_PE
5340 t = VT_SHORT|VT_UNSIGNED;
5341 goto push_tokc;
5342 #endif
5343 case TOK_CINT:
5344 case TOK_CCHAR:
5345 t = VT_INT;
5346 push_tokc:
5347 type.t = t;
5348 vsetc(&type, VT_CONST, &tokc);
5349 next();
5350 break;
5351 case TOK_CUINT:
5352 t = VT_INT | VT_UNSIGNED;
5353 goto push_tokc;
5354 case TOK_CLLONG:
5355 t = VT_LLONG;
5356 goto push_tokc;
5357 case TOK_CULLONG:
5358 t = VT_LLONG | VT_UNSIGNED;
5359 goto push_tokc;
5360 case TOK_CFLOAT:
5361 t = VT_FLOAT;
5362 goto push_tokc;
5363 case TOK_CDOUBLE:
5364 t = VT_DOUBLE;
5365 goto push_tokc;
5366 case TOK_CLDOUBLE:
5367 t = VT_LDOUBLE;
5368 goto push_tokc;
5369 case TOK_CLONG:
5370 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5371 goto push_tokc;
5372 case TOK_CULONG:
5373 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5374 goto push_tokc;
5375 case TOK___FUNCTION__:
5376 if (!gnu_ext)
5377 goto tok_identifier;
5378 /* fall thru */
5379 case TOK___FUNC__:
5381 Section *sec;
5382 int len;
5383 /* special function name identifier */
5384 len = strlen(funcname) + 1;
5385 /* generate char[len] type */
5386 type.t = char_type.t;
5387 if (tcc_state->warn_write_strings & WARN_ON)
5388 type.t |= VT_CONSTANT;
5389 mk_pointer(&type);
5390 type.t |= VT_ARRAY;
5391 type.ref->c = len;
5392 sec = rodata_section;
5393 vpush_ref(&type, sec, sec->data_offset, len);
5394 if (!NODATA_WANTED)
5395 memcpy(section_ptr_add(sec, len), funcname, len);
5396 next();
5398 break;
5399 case TOK_LSTR:
5400 #ifdef TCC_TARGET_PE
5401 t = VT_SHORT | VT_UNSIGNED;
5402 #else
5403 t = VT_INT;
5404 #endif
5405 goto str_init;
5406 case TOK_STR:
5407 /* string parsing */
5408 t = char_type.t;
5409 str_init:
5410 if (tcc_state->warn_write_strings & WARN_ON)
5411 t |= VT_CONSTANT;
5412 type.t = t;
5413 mk_pointer(&type);
5414 type.t |= VT_ARRAY;
5415 memset(&ad, 0, sizeof(AttributeDef));
5416 ad.section = rodata_section;
5417 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5418 break;
5419 case '(':
5420 next();
5421 /* cast ? */
5422 if (parse_btype(&type, &ad, 0)) {
5423 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5424 skip(')');
5425 /* check ISOC99 compound literal */
5426 if (tok == '{') {
5427 /* data is allocated locally by default */
5428 if (global_expr)
5429 r = VT_CONST;
5430 else
5431 r = VT_LOCAL;
5432 /* all except arrays are lvalues */
5433 if (!(type.t & VT_ARRAY))
5434 r |= VT_LVAL;
5435 memset(&ad, 0, sizeof(AttributeDef));
5436 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5437 } else {
5438 if (sizeof_caller) {
5439 vpush(&type);
5440 return;
5442 unary();
5443 gen_cast(&type);
5445 } else if (tok == '{') {
5446 int saved_nocode_wanted = nocode_wanted;
5447 if (const_wanted && !(nocode_wanted & unevalmask))
5448 expect("constant");
5449 if (0 == local_scope)
5450 tcc_error("statement expression outside of function");
5451 /* save all registers */
5452 save_regs(0);
5453 /* statement expression : we do not accept break/continue
5454 inside as GCC does. We do retain the nocode_wanted state,
5455 as statement expressions can't ever be entered from the
5456 outside, so any reactivation of code emission (from labels
5457 or loop heads) can be disabled again after the end of it. */
5458 block(1);
5459 /* If the statement expr can be entered, then we retain the current
5460 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5461 If it can't be entered then the state is that from before the
5462 statement expression. */
5463 if (saved_nocode_wanted)
5464 nocode_wanted = saved_nocode_wanted;
5465 skip(')');
5466 } else {
5467 gexpr();
5468 skip(')');
5470 break;
5471 case '*':
5472 next();
5473 unary();
5474 indir();
5475 break;
5476 case '&':
5477 next();
5478 unary();
5479 /* functions names must be treated as function pointers,
5480 except for unary '&' and sizeof. Since we consider that
5481 functions are not lvalues, we only have to handle it
5482 there and in function calls. */
5483 /* arrays can also be used although they are not lvalues */
5484 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5485 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5486 test_lvalue();
5487 if (vtop->sym)
5488 vtop->sym->a.addrtaken = 1;
5489 mk_pointer(&vtop->type);
5490 gaddrof();
5491 break;
5492 case '!':
5493 next();
5494 unary();
5495 gen_test_zero(TOK_EQ);
5496 break;
5497 case '~':
5498 next();
5499 unary();
5500 vpushi(-1);
5501 gen_op('^');
5502 break;
5503 case '+':
5504 next();
5505 unary();
5506 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5507 tcc_error("pointer not accepted for unary plus");
5508 /* In order to force cast, we add zero, except for floating point
5509 where we really need an noop (otherwise -0.0 will be transformed
5510 into +0.0). */
5511 if (!is_float(vtop->type.t)) {
5512 vpushi(0);
5513 gen_op('+');
5515 break;
5516 case TOK_SIZEOF:
5517 case TOK_ALIGNOF1:
5518 case TOK_ALIGNOF2:
5519 case TOK_ALIGNOF3:
5520 t = tok;
5521 next();
5522 in_sizeof++;
5523 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5524 if (t == TOK_SIZEOF) {
5525 vpush_type_size(&type, &align);
5526 gen_cast_s(VT_SIZE_T);
5527 } else {
5528 type_size(&type, &align);
5529 s = NULL;
5530 if (vtop[1].r & VT_SYM)
5531 s = vtop[1].sym; /* hack: accessing previous vtop */
5532 if (s && s->a.aligned)
5533 align = 1 << (s->a.aligned - 1);
5534 vpushs(align);
5536 break;
5538 case TOK_builtin_expect:
5539 /* __builtin_expect is a no-op for now */
5540 parse_builtin_params(0, "ee");
5541 vpop();
5542 break;
5543 case TOK_builtin_types_compatible_p:
5544 parse_builtin_params(0, "tt");
5545 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5546 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5547 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5548 vtop -= 2;
5549 vpushi(n);
5550 break;
5551 case TOK_builtin_choose_expr:
5553 int64_t c;
5554 next();
5555 skip('(');
5556 c = expr_const64();
5557 skip(',');
5558 if (!c) {
5559 nocode_wanted++;
5561 expr_eq();
5562 if (!c) {
5563 vpop();
5564 nocode_wanted--;
5566 skip(',');
5567 if (c) {
5568 nocode_wanted++;
5570 expr_eq();
5571 if (c) {
5572 vpop();
5573 nocode_wanted--;
5575 skip(')');
5577 break;
5578 case TOK_builtin_constant_p:
5579 constant_p = 1;
5580 parse_builtin_params(1, "e");
5581 n = constant_p &&
5582 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5583 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5584 vtop--;
5585 vpushi(n);
5586 break;
5587 case TOK_builtin_frame_address:
5588 case TOK_builtin_return_address:
5590 int tok1 = tok;
5591 int level;
5592 next();
5593 skip('(');
5594 if (tok != TOK_CINT) {
5595 tcc_error("%s only takes positive integers",
5596 tok1 == TOK_builtin_return_address ?
5597 "__builtin_return_address" :
5598 "__builtin_frame_address");
5600 level = (uint32_t)tokc.i;
5601 next();
5602 skip(')');
5603 type.t = VT_VOID;
5604 mk_pointer(&type);
5605 vset(&type, VT_LOCAL, 0); /* local frame */
5606 while (level--) {
5607 #ifdef TCC_TARGET_RISCV64
5608 vpushi(2*PTR_SIZE);
5609 gen_op('-');
5610 #endif
5611 mk_pointer(&vtop->type);
5612 indir(); /* -> parent frame */
5614 if (tok1 == TOK_builtin_return_address) {
5615 // assume return address is just above frame pointer on stack
5616 #ifdef TCC_TARGET_ARM
5617 vpushi(2*PTR_SIZE);
5618 gen_op('+');
5619 #elif defined TCC_TARGET_RISCV64
5620 vpushi(PTR_SIZE);
5621 gen_op('-');
5622 #else
5623 vpushi(PTR_SIZE);
5624 gen_op('+');
5625 #endif
5626 mk_pointer(&vtop->type);
5627 indir();
5630 break;
5631 #ifdef TCC_TARGET_RISCV64
5632 case TOK_builtin_va_start:
5633 parse_builtin_params(0, "ee");
5634 r = vtop->r & VT_VALMASK;
5635 if (r == VT_LLOCAL)
5636 r = VT_LOCAL;
5637 if (r != VT_LOCAL)
5638 tcc_error("__builtin_va_start expects a local variable");
5639 gen_va_start();
5640 vstore();
5641 break;
5642 #endif
5643 #ifdef TCC_TARGET_X86_64
5644 #ifdef TCC_TARGET_PE
5645 case TOK_builtin_va_start:
5646 parse_builtin_params(0, "ee");
5647 r = vtop->r & VT_VALMASK;
5648 if (r == VT_LLOCAL)
5649 r = VT_LOCAL;
5650 if (r != VT_LOCAL)
5651 tcc_error("__builtin_va_start expects a local variable");
5652 vtop->r = r;
5653 vtop->type = char_pointer_type;
5654 vtop->c.i += 8;
5655 vstore();
5656 break;
5657 #else
5658 case TOK_builtin_va_arg_types:
5659 parse_builtin_params(0, "t");
5660 vpushi(classify_x86_64_va_arg(&vtop->type));
5661 vswap();
5662 vpop();
5663 break;
5664 #endif
5665 #endif
5667 #ifdef TCC_TARGET_ARM64
5668 case TOK_builtin_va_start: {
5669 parse_builtin_params(0, "ee");
5670 //xx check types
5671 gen_va_start();
5672 vpushi(0);
5673 vtop->type.t = VT_VOID;
5674 break;
5676 case TOK_builtin_va_arg: {
5677 parse_builtin_params(0, "et");
5678 type = vtop->type;
5679 vpop();
5680 //xx check types
5681 gen_va_arg(&type);
5682 vtop->type = type;
5683 break;
5685 case TOK___arm64_clear_cache: {
5686 parse_builtin_params(0, "ee");
5687 gen_clear_cache();
5688 vpushi(0);
5689 vtop->type.t = VT_VOID;
5690 break;
5692 #endif
5694 /* atomic operations */
5695 case TOK___atomic_store:
5696 case TOK___atomic_load:
5697 case TOK___atomic_exchange:
5698 case TOK___atomic_compare_exchange:
5699 case TOK___atomic_fetch_add:
5700 case TOK___atomic_fetch_sub:
5701 case TOK___atomic_fetch_or:
5702 case TOK___atomic_fetch_xor:
5703 case TOK___atomic_fetch_and:
5704 case TOK___atomic_fetch_nand:
5705 case TOK___atomic_add_fetch:
5706 case TOK___atomic_sub_fetch:
5707 case TOK___atomic_or_fetch:
5708 case TOK___atomic_xor_fetch:
5709 case TOK___atomic_and_fetch:
5710 case TOK___atomic_nand_fetch:
5711 parse_atomic(tok);
5712 break;
5714 /* pre operations */
5715 case TOK_INC:
5716 case TOK_DEC:
5717 t = tok;
5718 next();
5719 unary();
5720 inc(0, t);
5721 break;
5722 case '-':
5723 next();
5724 unary();
5725 if (is_float(vtop->type.t)) {
5726 gen_opif(TOK_NEG);
5727 } else {
5728 vpushi(0);
5729 vswap();
5730 gen_op('-');
5732 break;
5733 case TOK_LAND:
5734 if (!gnu_ext)
5735 goto tok_identifier;
5736 next();
5737 /* allow to take the address of a label */
5738 if (tok < TOK_UIDENT)
5739 expect("label identifier");
5740 s = label_find(tok);
5741 if (!s) {
5742 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5743 } else {
5744 if (s->r == LABEL_DECLARED)
5745 s->r = LABEL_FORWARD;
5747 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5748 s->type.t = VT_VOID;
5749 mk_pointer(&s->type);
5750 s->type.t |= VT_STATIC;
5752 vpushsym(&s->type, s);
5753 next();
5754 break;
5756 case TOK_GENERIC:
5758 CType controlling_type;
5759 int has_default = 0;
5760 int has_match = 0;
5761 int learn = 0;
5762 TokenString *str = NULL;
5763 int saved_const_wanted = const_wanted;
5765 next();
5766 skip('(');
5767 const_wanted = 0;
5768 expr_type(&controlling_type, expr_eq);
5769 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5770 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5771 mk_pointer(&controlling_type);
5772 const_wanted = saved_const_wanted;
5773 for (;;) {
5774 learn = 0;
5775 skip(',');
5776 if (tok == TOK_DEFAULT) {
5777 if (has_default)
5778 tcc_error("too many 'default'");
5779 has_default = 1;
5780 if (!has_match)
5781 learn = 1;
5782 next();
5783 } else {
5784 AttributeDef ad_tmp;
5785 int itmp;
5786 CType cur_type;
5788 parse_btype(&cur_type, &ad_tmp, 0);
5789 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5790 if (compare_types(&controlling_type, &cur_type, 0)) {
5791 if (has_match) {
5792 tcc_error("type match twice");
5794 has_match = 1;
5795 learn = 1;
5798 skip(':');
5799 if (learn) {
5800 if (str)
5801 tok_str_free(str);
5802 skip_or_save_block(&str);
5803 } else {
5804 skip_or_save_block(NULL);
5806 if (tok == ')')
5807 break;
5809 if (!str) {
5810 char buf[60];
5811 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5812 tcc_error("type '%s' does not match any association", buf);
5814 begin_macro(str, 1);
5815 next();
5816 expr_eq();
5817 if (tok != TOK_EOF)
5818 expect(",");
5819 end_macro();
5820 next();
5821 break;
5823 // special qnan , snan and infinity values
5824 case TOK___NAN__:
5825 n = 0x7fc00000;
5826 special_math_val:
5827 vpushi(n);
5828 vtop->type.t = VT_FLOAT;
5829 next();
5830 break;
5831 case TOK___SNAN__:
5832 n = 0x7f800001;
5833 goto special_math_val;
5834 case TOK___INF__:
5835 n = 0x7f800000;
5836 goto special_math_val;
5838 default:
5839 tok_identifier:
5840 t = tok;
5841 next();
5842 if (t < TOK_UIDENT)
5843 expect("identifier");
5844 s = sym_find(t);
5845 if (!s || IS_ASM_SYM(s)) {
5846 const char *name = get_tok_str(t, NULL);
5847 if (tok != '(')
5848 tcc_error("'%s' undeclared", name);
5849 /* for simple function calls, we tolerate undeclared
5850 external reference to int() function */
5851 tcc_warning_c(warn_implicit_function_declaration)(
5852 "implicit declaration of function '%s'", name);
5853 s = external_global_sym(t, &func_old_type);
5856 r = s->r;
5857 /* A symbol that has a register is a local register variable,
5858 which starts out as VT_LOCAL value. */
5859 if ((r & VT_VALMASK) < VT_CONST)
5860 r = (r & ~VT_VALMASK) | VT_LOCAL;
5862 vset(&s->type, r, s->c);
5863 /* Point to s as backpointer (even without r&VT_SYM).
5864 Will be used by at least the x86 inline asm parser for
5865 regvars. */
5866 vtop->sym = s;
5868 if (r & VT_SYM) {
5869 vtop->c.i = 0;
5870 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5871 vtop->c.i = s->enum_val;
5873 break;
5876 /* post operations */
5877 while (1) {
5878 if (tok == TOK_INC || tok == TOK_DEC) {
5879 inc(1, tok);
5880 next();
5881 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5882 int qualifiers, cumofs = 0;
5883 /* field */
5884 if (tok == TOK_ARROW)
5885 indir();
5886 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5887 test_lvalue();
5888 gaddrof();
5889 /* expect pointer on structure */
5890 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5891 expect("struct or union");
5892 if (tok == TOK_CDOUBLE)
5893 expect("field name");
5894 next();
5895 if (tok == TOK_CINT || tok == TOK_CUINT)
5896 expect("field name");
5897 s = find_field(&vtop->type, tok, &cumofs);
5898 if (!s)
5899 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5900 /* add field offset to pointer */
5901 vtop->type = char_pointer_type; /* change type to 'char *' */
5902 vpushi(cumofs + s->c);
5903 gen_op('+');
5904 /* change type to field type, and set to lvalue */
5905 vtop->type = s->type;
5906 vtop->type.t |= qualifiers;
5907 /* an array is never an lvalue */
5908 if (!(vtop->type.t & VT_ARRAY)) {
5909 vtop->r |= VT_LVAL;
5910 #ifdef CONFIG_TCC_BCHECK
5911 /* if bound checking, the referenced pointer must be checked */
5912 if (tcc_state->do_bounds_check)
5913 vtop->r |= VT_MUSTBOUND;
5914 #endif
5916 next();
5917 } else if (tok == '[') {
5918 next();
5919 gexpr();
5920 gen_op('+');
5921 indir();
5922 skip(']');
5923 } else if (tok == '(') {
5924 SValue ret;
5925 Sym *sa;
5926 int nb_args, ret_nregs, ret_align, regsize, variadic;
5928 /* function call */
5929 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5930 /* pointer test (no array accepted) */
5931 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5932 vtop->type = *pointed_type(&vtop->type);
5933 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5934 goto error_func;
5935 } else {
5936 error_func:
5937 expect("function pointer");
5939 } else {
5940 vtop->r &= ~VT_LVAL; /* no lvalue */
5942 /* get return type */
5943 s = vtop->type.ref;
5944 next();
5945 sa = s->next; /* first parameter */
5946 nb_args = regsize = 0;
5947 ret.r2 = VT_CONST;
5948 /* compute first implicit argument if a structure is returned */
5949 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5950 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5951 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5952 &ret_align, &regsize);
5953 if (ret_nregs <= 0) {
5954 /* get some space for the returned structure */
5955 size = type_size(&s->type, &align);
5956 #ifdef TCC_TARGET_ARM64
5957 /* On arm64, a small struct is return in registers.
5958 It is much easier to write it to memory if we know
5959 that we are allowed to write some extra bytes, so
5960 round the allocated space up to a power of 2: */
5961 if (size < 16)
5962 while (size & (size - 1))
5963 size = (size | (size - 1)) + 1;
5964 #endif
5965 loc = (loc - size) & -align;
5966 ret.type = s->type;
5967 ret.r = VT_LOCAL | VT_LVAL;
5968 /* pass it as 'int' to avoid structure arg passing
5969 problems */
5970 vseti(VT_LOCAL, loc);
5971 #ifdef CONFIG_TCC_BCHECK
5972 if (tcc_state->do_bounds_check)
5973 --loc;
5974 #endif
5975 ret.c = vtop->c;
5976 if (ret_nregs < 0)
5977 vtop--;
5978 else
5979 nb_args++;
5981 } else {
5982 ret_nregs = 1;
5983 ret.type = s->type;
5986 if (ret_nregs > 0) {
5987 /* return in register */
5988 ret.c.i = 0;
5989 PUT_R_RET(&ret, ret.type.t);
5991 if (tok != ')') {
5992 for(;;) {
5993 expr_eq();
5994 gfunc_param_typed(s, sa);
5995 nb_args++;
5996 if (sa)
5997 sa = sa->next;
5998 if (tok == ')')
5999 break;
6000 skip(',');
6003 if (sa)
6004 tcc_error("too few arguments to function");
6005 skip(')');
6006 gfunc_call(nb_args);
6008 if (ret_nregs < 0) {
6009 vsetc(&ret.type, ret.r, &ret.c);
6010 #ifdef TCC_TARGET_RISCV64
6011 arch_transfer_ret_regs(1);
6012 #endif
6013 } else {
6014 /* return value */
6015 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6016 vsetc(&ret.type, r, &ret.c);
6017 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6020 /* handle packed struct return */
6021 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6022 int addr, offset;
6024 size = type_size(&s->type, &align);
6025 /* We're writing whole regs often, make sure there's enough
6026 space. Assume register size is power of 2. */
6027 if (regsize > align)
6028 align = regsize;
6029 loc = (loc - size) & -align;
6030 addr = loc;
6031 offset = 0;
6032 for (;;) {
6033 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6034 vswap();
6035 vstore();
6036 vtop--;
6037 if (--ret_nregs == 0)
6038 break;
6039 offset += regsize;
6041 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6044 /* Promote char/short return values. This is matters only
6045 for calling function that were not compiled by TCC and
6046 only on some architectures. For those where it doesn't
6047 matter we expect things to be already promoted to int,
6048 but not larger. */
6049 t = s->type.t & VT_BTYPE;
6050 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6051 #ifdef PROMOTE_RET
6052 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6053 #else
6054 vtop->type.t = VT_INT;
6055 #endif
6058 if (s->f.func_noreturn) {
6059 if (debug_modes)
6060 tcc_tcov_block_end(tcc_state, -1);
6061 CODE_OFF();
6063 } else {
6064 break;
6069 #ifndef precedence_parser /* original top-down parser */
6071 static void expr_prod(void)
6073 int t;
6075 unary();
6076 while ((t = tok) == '*' || t == '/' || t == '%') {
6077 next();
6078 unary();
6079 gen_op(t);
6083 static void expr_sum(void)
6085 int t;
6087 expr_prod();
6088 while ((t = tok) == '+' || t == '-') {
6089 next();
6090 expr_prod();
6091 gen_op(t);
6095 static void expr_shift(void)
6097 int t;
6099 expr_sum();
6100 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6101 next();
6102 expr_sum();
6103 gen_op(t);
6107 static void expr_cmp(void)
6109 int t;
6111 expr_shift();
6112 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6113 t == TOK_ULT || t == TOK_UGE) {
6114 next();
6115 expr_shift();
6116 gen_op(t);
6120 static void expr_cmpeq(void)
6122 int t;
6124 expr_cmp();
6125 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6126 next();
6127 expr_cmp();
6128 gen_op(t);
6132 static void expr_and(void)
6134 expr_cmpeq();
6135 while (tok == '&') {
6136 next();
6137 expr_cmpeq();
6138 gen_op('&');
6142 static void expr_xor(void)
6144 expr_and();
6145 while (tok == '^') {
6146 next();
6147 expr_and();
6148 gen_op('^');
6152 static void expr_or(void)
6154 expr_xor();
6155 while (tok == '|') {
6156 next();
6157 expr_xor();
6158 gen_op('|');
6162 static void expr_landor(int op);
6164 static void expr_land(void)
6166 expr_or();
6167 if (tok == TOK_LAND)
6168 expr_landor(tok);
6171 static void expr_lor(void)
6173 expr_land();
6174 if (tok == TOK_LOR)
6175 expr_landor(tok);
6178 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6179 #else /* defined precedence_parser */
6180 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6181 # define expr_lor() unary(), expr_infix(1)
6183 static int precedence(int tok)
6185 switch (tok) {
6186 case TOK_LOR: return 1;
6187 case TOK_LAND: return 2;
6188 case '|': return 3;
6189 case '^': return 4;
6190 case '&': return 5;
6191 case TOK_EQ: case TOK_NE: return 6;
6192 relat: case TOK_ULT: case TOK_UGE: return 7;
6193 case TOK_SHL: case TOK_SAR: return 8;
6194 case '+': case '-': return 9;
6195 case '*': case '/': case '%': return 10;
6196 default:
6197 if (tok >= TOK_ULE && tok <= TOK_GT)
6198 goto relat;
6199 return 0;
6202 static unsigned char prec[256];
6203 static void init_prec(void)
6205 int i;
6206 for (i = 0; i < 256; i++)
6207 prec[i] = precedence(i);
6209 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6211 static void expr_landor(int op);
6213 static void expr_infix(int p)
6215 int t = tok, p2;
6216 while ((p2 = precedence(t)) >= p) {
6217 if (t == TOK_LOR || t == TOK_LAND) {
6218 expr_landor(t);
6219 } else {
6220 next();
6221 unary();
6222 if (precedence(tok) > p2)
6223 expr_infix(p2 + 1);
6224 gen_op(t);
6226 t = tok;
6229 #endif
6231 /* Assuming vtop is a value used in a conditional context
6232 (i.e. compared with zero) return 0 if it's false, 1 if
6233 true and -1 if it can't be statically determined. */
6234 static int condition_3way(void)
6236 int c = -1;
6237 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6238 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6239 vdup();
6240 gen_cast_s(VT_BOOL);
6241 c = vtop->c.i;
6242 vpop();
6244 return c;
6247 static void expr_landor(int op)
6249 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6250 for(;;) {
6251 c = f ? i : condition_3way();
6252 if (c < 0)
6253 save_regs(1), cc = 0;
6254 else if (c != i)
6255 nocode_wanted++, f = 1;
6256 if (tok != op)
6257 break;
6258 if (c < 0)
6259 t = gvtst(i, t);
6260 else
6261 vpop();
6262 next();
6263 expr_landor_next(op);
6265 if (cc || f) {
6266 vpop();
6267 vpushi(i ^ f);
6268 gsym(t);
6269 nocode_wanted -= f;
6270 } else {
6271 gvtst_set(i, t);
6275 static int is_cond_bool(SValue *sv)
6277 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6278 && (sv->type.t & VT_BTYPE) == VT_INT)
6279 return (unsigned)sv->c.i < 2;
6280 if (sv->r == VT_CMP)
6281 return 1;
6282 return 0;
6285 static void expr_cond(void)
6287 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6288 SValue sv;
6289 CType type;
6290 int ncw_prev;
6292 expr_lor();
6293 if (tok == '?') {
6294 next();
6295 c = condition_3way();
6296 g = (tok == ':' && gnu_ext);
6297 tt = 0;
6298 if (!g) {
6299 if (c < 0) {
6300 save_regs(1);
6301 tt = gvtst(1, 0);
6302 } else {
6303 vpop();
6305 } else if (c < 0) {
6306 /* needed to avoid having different registers saved in
6307 each branch */
6308 save_regs(1);
6309 gv_dup();
6310 tt = gvtst(0, 0);
6313 ncw_prev = nocode_wanted;
6314 if (c == 0)
6315 nocode_wanted++;
6316 if (!g)
6317 gexpr();
6319 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6320 mk_pointer(&vtop->type);
6321 sv = *vtop; /* save value to handle it later */
6322 vtop--; /* no vpop so that FP stack is not flushed */
6324 if (g) {
6325 u = tt;
6326 } else if (c < 0) {
6327 u = gjmp(0);
6328 gsym(tt);
6329 } else
6330 u = 0;
6332 nocode_wanted = ncw_prev;
6333 if (c == 1)
6334 nocode_wanted++;
6335 skip(':');
6336 expr_cond();
6338 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6339 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6340 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6341 this code jumps directly to the if's then/else branches. */
6342 t1 = gvtst(0, 0);
6343 t2 = gjmp(0);
6344 gsym(u);
6345 vpushv(&sv);
6346 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6347 gvtst_set(0, t1);
6348 gvtst_set(1, t2);
6349 nocode_wanted = ncw_prev;
6350 // tcc_warning("two conditions expr_cond");
6351 return;
6354 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6355 mk_pointer(&vtop->type);
6357 /* cast operands to correct type according to ISOC rules */
6358 if (!combine_types(&type, &sv, vtop, '?'))
6359 type_incompatibility_error(&sv.type, &vtop->type,
6360 "type mismatch in conditional expression (have '%s' and '%s')");
6361 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6362 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6363 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6365 /* now we convert second operand */
6366 if (c != 1) {
6367 gen_cast(&type);
6368 if (islv) {
6369 mk_pointer(&vtop->type);
6370 gaddrof();
6371 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6372 gaddrof();
6375 rc = RC_TYPE(type.t);
6376 /* for long longs, we use fixed registers to avoid having
6377 to handle a complicated move */
6378 if (USING_TWO_WORDS(type.t))
6379 rc = RC_RET(type.t);
6381 tt = r2 = 0;
6382 if (c < 0) {
6383 r2 = gv(rc);
6384 tt = gjmp(0);
6386 gsym(u);
6387 nocode_wanted = ncw_prev;
6389 /* this is horrible, but we must also convert first
6390 operand */
6391 if (c != 0) {
6392 *vtop = sv;
6393 gen_cast(&type);
6394 if (islv) {
6395 mk_pointer(&vtop->type);
6396 gaddrof();
6397 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6398 gaddrof();
6401 if (c < 0) {
6402 r1 = gv(rc);
6403 move_reg(r2, r1, islv ? VT_PTR : type.t);
6404 vtop->r = r2;
6405 gsym(tt);
6408 if (islv)
6409 indir();
6413 static void expr_eq(void)
6415 int t;
6417 expr_cond();
6418 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6419 test_lvalue();
6420 next();
6421 if (t == '=') {
6422 expr_eq();
6423 } else {
6424 vdup();
6425 expr_eq();
6426 gen_op(TOK_ASSIGN_OP(t));
6428 vstore();
6432 ST_FUNC void gexpr(void)
6434 while (1) {
6435 expr_eq();
6436 if (tok != ',')
6437 break;
6438 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6439 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6440 vpop();
6441 next();
6445 /* parse a constant expression and return value in vtop. */
6446 static void expr_const1(void)
6448 const_wanted++;
6449 nocode_wanted += unevalmask + 1;
6450 expr_cond();
6451 nocode_wanted -= unevalmask + 1;
6452 const_wanted--;
6455 /* parse an integer constant and return its value. */
6456 static inline int64_t expr_const64(void)
6458 int64_t c;
6459 expr_const1();
6460 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6461 expect("constant expression");
6462 c = vtop->c.i;
6463 vpop();
6464 return c;
6467 /* parse an integer constant and return its value.
6468 Complain if it doesn't fit 32bit (signed or unsigned). */
6469 ST_FUNC int expr_const(void)
6471 int c;
6472 int64_t wc = expr_const64();
6473 c = wc;
6474 if (c != wc && (unsigned)c != wc)
6475 tcc_error("constant exceeds 32 bit");
6476 return c;
6479 /* ------------------------------------------------------------------------- */
6480 /* return from function */
6482 #ifndef TCC_TARGET_ARM64
6483 static void gfunc_return(CType *func_type)
6485 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6486 CType type, ret_type;
6487 int ret_align, ret_nregs, regsize;
6488 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6489 &ret_align, &regsize);
6490 if (ret_nregs < 0) {
6491 #ifdef TCC_TARGET_RISCV64
6492 arch_transfer_ret_regs(0);
6493 #endif
6494 } else if (0 == ret_nregs) {
6495 /* if returning structure, must copy it to implicit
6496 first pointer arg location */
6497 type = *func_type;
6498 mk_pointer(&type);
6499 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6500 indir();
6501 vswap();
6502 /* copy structure value to pointer */
6503 vstore();
6504 } else {
6505 /* returning structure packed into registers */
6506 int size, addr, align, rc;
6507 size = type_size(func_type,&align);
6508 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6509 (vtop->c.i & (ret_align-1)))
6510 && (align & (ret_align-1))) {
6511 loc = (loc - size) & -ret_align;
6512 addr = loc;
6513 type = *func_type;
6514 vset(&type, VT_LOCAL | VT_LVAL, addr);
6515 vswap();
6516 vstore();
6517 vpop();
6518 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6520 vtop->type = ret_type;
6521 rc = RC_RET(ret_type.t);
6522 if (ret_nregs == 1)
6523 gv(rc);
6524 else {
6525 for (;;) {
6526 vdup();
6527 gv(rc);
6528 vpop();
6529 if (--ret_nregs == 0)
6530 break;
6531 /* We assume that when a structure is returned in multiple
6532 registers, their classes are consecutive values of the
6533 suite s(n) = 2^n */
6534 rc <<= 1;
6535 vtop->c.i += regsize;
6539 } else {
6540 gv(RC_RET(func_type->t));
6542 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6544 #endif
6546 static void check_func_return(void)
6548 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6549 return;
6550 if (!strcmp (funcname, "main")
6551 && (func_vt.t & VT_BTYPE) == VT_INT) {
6552 /* main returns 0 by default */
6553 vpushi(0);
6554 gen_assign_cast(&func_vt);
6555 gfunc_return(&func_vt);
6556 } else {
6557 tcc_warning("function might return no value: '%s'", funcname);
6561 /* ------------------------------------------------------------------------- */
6562 /* switch/case */
6564 static int case_cmpi(const void *pa, const void *pb)
6566 int64_t a = (*(struct case_t**) pa)->v1;
6567 int64_t b = (*(struct case_t**) pb)->v1;
6568 return a < b ? -1 : a > b;
6571 static int case_cmpu(const void *pa, const void *pb)
6573 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6574 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6575 return a < b ? -1 : a > b;
6578 static void gtst_addr(int t, int a)
6580 gsym_addr(gvtst(0, t), a);
6583 static void gcase(struct case_t **base, int len, int *bsym)
6585 struct case_t *p;
6586 int e;
6587 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6588 while (len > 8) {
6589 /* binary search */
6590 p = base[len/2];
6591 vdup();
6592 if (ll)
6593 vpushll(p->v2);
6594 else
6595 vpushi(p->v2);
6596 gen_op(TOK_LE);
6597 e = gvtst(1, 0);
6598 vdup();
6599 if (ll)
6600 vpushll(p->v1);
6601 else
6602 vpushi(p->v1);
6603 gen_op(TOK_GE);
6604 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6605 /* x < v1 */
6606 gcase(base, len/2, bsym);
6607 /* x > v2 */
6608 gsym(e);
6609 e = len/2 + 1;
6610 base += e; len -= e;
6612 /* linear scan */
6613 while (len--) {
6614 p = *base++;
6615 vdup();
6616 if (ll)
6617 vpushll(p->v2);
6618 else
6619 vpushi(p->v2);
6620 if (p->v1 == p->v2) {
6621 gen_op(TOK_EQ);
6622 gtst_addr(0, p->sym);
6623 } else {
6624 gen_op(TOK_LE);
6625 e = gvtst(1, 0);
6626 vdup();
6627 if (ll)
6628 vpushll(p->v1);
6629 else
6630 vpushi(p->v1);
6631 gen_op(TOK_GE);
6632 gtst_addr(0, p->sym);
6633 gsym(e);
6636 *bsym = gjmp(*bsym);
6639 /* ------------------------------------------------------------------------- */
6640 /* __attribute__((cleanup(fn))) */
6642 static void try_call_scope_cleanup(Sym *stop)
6644 Sym *cls = cur_scope->cl.s;
6646 for (; cls != stop; cls = cls->ncl) {
6647 Sym *fs = cls->next;
6648 Sym *vs = cls->prev_tok;
6650 vpushsym(&fs->type, fs);
6651 vset(&vs->type, vs->r, vs->c);
6652 vtop->sym = vs;
6653 mk_pointer(&vtop->type);
6654 gaddrof();
6655 gfunc_call(1);
6659 static void try_call_cleanup_goto(Sym *cleanupstate)
6661 Sym *oc, *cc;
6662 int ocd, ccd;
6664 if (!cur_scope->cl.s)
6665 return;
6667 /* search NCA of both cleanup chains given parents and initial depth */
6668 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6669 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6671 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6673 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6676 try_call_scope_cleanup(cc);
6679 /* call 'func' for each __attribute__((cleanup(func))) */
6680 static void block_cleanup(struct scope *o)
6682 int jmp = 0;
6683 Sym *g, **pg;
6684 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6685 if (g->prev_tok->r & LABEL_FORWARD) {
6686 Sym *pcl = g->next;
6687 if (!jmp)
6688 jmp = gjmp(0);
6689 gsym(pcl->jnext);
6690 try_call_scope_cleanup(o->cl.s);
6691 pcl->jnext = gjmp(0);
6692 if (!o->cl.n)
6693 goto remove_pending;
6694 g->c = o->cl.n;
6695 pg = &g->prev;
6696 } else {
6697 remove_pending:
6698 *pg = g->prev;
6699 sym_free(g);
6702 gsym(jmp);
6703 try_call_scope_cleanup(o->cl.s);
6706 /* ------------------------------------------------------------------------- */
6707 /* VLA */
6709 static void vla_restore(int loc)
6711 if (loc)
6712 gen_vla_sp_restore(loc);
6715 static void vla_leave(struct scope *o)
6717 struct scope *c = cur_scope, *v = NULL;
6718 for (; c != o && c; c = c->prev)
6719 if (c->vla.num)
6720 v = c;
6721 if (v)
6722 vla_restore(v->vla.locorig);
6725 /* ------------------------------------------------------------------------- */
6726 /* local scopes */
6728 static void new_scope(struct scope *o)
6730 /* copy and link previous scope */
6731 *o = *cur_scope;
6732 o->prev = cur_scope;
6733 cur_scope = o;
6734 cur_scope->vla.num = 0;
6736 /* record local declaration stack position */
6737 o->lstk = local_stack;
6738 o->llstk = local_label_stack;
6739 ++local_scope;
6741 if (debug_modes)
6742 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6745 static void prev_scope(struct scope *o, int is_expr)
6747 vla_leave(o->prev);
6749 if (o->cl.s != o->prev->cl.s)
6750 block_cleanup(o->prev);
6752 /* pop locally defined labels */
6753 label_pop(&local_label_stack, o->llstk, is_expr);
6755 /* In the is_expr case (a statement expression is finished here),
6756 vtop might refer to symbols on the local_stack. Either via the
6757 type or via vtop->sym. We can't pop those nor any that in turn
6758 might be referred to. To make it easier we don't roll back
6759 any symbols in that case; some upper level call to block() will
6760 do that. We do have to remove such symbols from the lookup
6761 tables, though. sym_pop will do that. */
6763 /* pop locally defined symbols */
6764 pop_local_syms(o->lstk, is_expr);
6765 cur_scope = o->prev;
6766 --local_scope;
6768 if (debug_modes)
6769 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6772 /* leave a scope via break/continue(/goto) */
6773 static void leave_scope(struct scope *o)
6775 if (!o)
6776 return;
6777 try_call_scope_cleanup(o->cl.s);
6778 vla_leave(o);
6781 /* ------------------------------------------------------------------------- */
6782 /* call block from 'for do while' loops */
6784 static void lblock(int *bsym, int *csym)
6786 struct scope *lo = loop_scope, *co = cur_scope;
6787 int *b = co->bsym, *c = co->csym;
6788 if (csym) {
6789 co->csym = csym;
6790 loop_scope = co;
6792 co->bsym = bsym;
6793 block(0);
6794 co->bsym = b;
6795 if (csym) {
6796 co->csym = c;
6797 loop_scope = lo;
6801 static void block(int is_expr)
6803 int a, b, c, d, e, t;
6804 struct scope o;
6805 Sym *s;
6807 if (is_expr) {
6808 /* default return value is (void) */
6809 vpushi(0);
6810 vtop->type.t = VT_VOID;
6813 again:
6814 t = tok;
6815 /* If the token carries a value, next() might destroy it. Only with
6816 invalid code such as f(){"123"4;} */
6817 if (TOK_HAS_VALUE(t))
6818 goto expr;
6819 next();
6821 if (debug_modes)
6822 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6824 if (t == TOK_IF) {
6825 skip('(');
6826 gexpr();
6827 skip(')');
6828 a = gvtst(1, 0);
6829 block(0);
6830 if (tok == TOK_ELSE) {
6831 d = gjmp(0);
6832 gsym(a);
6833 next();
6834 block(0);
6835 gsym(d); /* patch else jmp */
6836 } else {
6837 gsym(a);
6840 } else if (t == TOK_WHILE) {
6841 d = gind();
6842 skip('(');
6843 gexpr();
6844 skip(')');
6845 a = gvtst(1, 0);
6846 b = 0;
6847 lblock(&a, &b);
6848 gjmp_addr(d);
6849 gsym_addr(b, d);
6850 gsym(a);
6852 } else if (t == '{') {
6853 new_scope(&o);
6855 /* handle local labels declarations */
6856 while (tok == TOK_LABEL) {
6857 do {
6858 next();
6859 if (tok < TOK_UIDENT)
6860 expect("label identifier");
6861 label_push(&local_label_stack, tok, LABEL_DECLARED);
6862 next();
6863 } while (tok == ',');
6864 skip(';');
6867 while (tok != '}') {
6868 decl(VT_LOCAL);
6869 if (tok != '}') {
6870 if (is_expr)
6871 vpop();
6872 block(is_expr);
6876 prev_scope(&o, is_expr);
6877 if (local_scope)
6878 next();
6879 else if (!nocode_wanted)
6880 check_func_return();
6882 } else if (t == TOK_RETURN) {
6883 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6884 if (tok != ';') {
6885 gexpr();
6886 if (b) {
6887 gen_assign_cast(&func_vt);
6888 } else {
6889 if (vtop->type.t != VT_VOID)
6890 tcc_warning("void function returns a value");
6891 vtop--;
6893 } else if (b) {
6894 tcc_warning("'return' with no value");
6895 b = 0;
6897 leave_scope(root_scope);
6898 if (b)
6899 gfunc_return(&func_vt);
6900 skip(';');
6901 /* jump unless last stmt in top-level block */
6902 if (tok != '}' || local_scope != 1)
6903 rsym = gjmp(rsym);
6904 if (debug_modes)
6905 tcc_tcov_block_end (tcc_state, -1);
6906 CODE_OFF();
6908 } else if (t == TOK_BREAK) {
6909 /* compute jump */
6910 if (!cur_scope->bsym)
6911 tcc_error("cannot break");
6912 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6913 leave_scope(cur_switch->scope);
6914 else
6915 leave_scope(loop_scope);
6916 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6917 skip(';');
6919 } else if (t == TOK_CONTINUE) {
6920 /* compute jump */
6921 if (!cur_scope->csym)
6922 tcc_error("cannot continue");
6923 leave_scope(loop_scope);
6924 *cur_scope->csym = gjmp(*cur_scope->csym);
6925 skip(';');
6927 } else if (t == TOK_FOR) {
6928 new_scope(&o);
6930 skip('(');
6931 if (tok != ';') {
6932 /* c99 for-loop init decl? */
6933 if (!decl(VT_JMP)) {
6934 /* no, regular for-loop init expr */
6935 gexpr();
6936 vpop();
6939 skip(';');
6940 a = b = 0;
6941 c = d = gind();
6942 if (tok != ';') {
6943 gexpr();
6944 a = gvtst(1, 0);
6946 skip(';');
6947 if (tok != ')') {
6948 e = gjmp(0);
6949 d = gind();
6950 gexpr();
6951 vpop();
6952 gjmp_addr(c);
6953 gsym(e);
6955 skip(')');
6956 lblock(&a, &b);
6957 gjmp_addr(d);
6958 gsym_addr(b, d);
6959 gsym(a);
6960 prev_scope(&o, 0);
6962 } else if (t == TOK_DO) {
6963 a = b = 0;
6964 d = gind();
6965 lblock(&a, &b);
6966 gsym(b);
6967 skip(TOK_WHILE);
6968 skip('(');
6969 gexpr();
6970 skip(')');
6971 skip(';');
6972 c = gvtst(0, 0);
6973 gsym_addr(c, d);
6974 gsym(a);
6976 } else if (t == TOK_SWITCH) {
6977 struct switch_t *sw;
6979 sw = tcc_mallocz(sizeof *sw);
6980 sw->bsym = &a;
6981 sw->scope = cur_scope;
6982 sw->prev = cur_switch;
6983 sw->nocode_wanted = nocode_wanted;
6984 cur_switch = sw;
6986 skip('(');
6987 gexpr();
6988 skip(')');
6989 sw->sv = *vtop--; /* save switch value */
6991 a = 0;
6992 b = gjmp(0); /* jump to first case */
6993 lblock(&a, NULL);
6994 a = gjmp(a); /* add implicit break */
6995 /* case lookup */
6996 gsym(b);
6998 if (sw->nocode_wanted)
6999 goto skip_switch;
7000 if (sw->sv.type.t & VT_UNSIGNED)
7001 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7002 else
7003 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7004 for (b = 1; b < sw->n; b++)
7005 if (sw->sv.type.t & VT_UNSIGNED
7006 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7007 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7008 tcc_error("duplicate case value");
7009 vpushv(&sw->sv);
7010 gv(RC_INT);
7011 d = 0, gcase(sw->p, sw->n, &d);
7012 vpop();
7013 if (sw->def_sym)
7014 gsym_addr(d, sw->def_sym);
7015 else
7016 gsym(d);
7017 skip_switch:
7018 /* break label */
7019 gsym(a);
7021 dynarray_reset(&sw->p, &sw->n);
7022 cur_switch = sw->prev;
7023 tcc_free(sw);
7025 } else if (t == TOK_CASE) {
7026 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7027 if (!cur_switch)
7028 expect("switch");
7029 cr->v1 = cr->v2 = expr_const64();
7030 if (gnu_ext && tok == TOK_DOTS) {
7031 next();
7032 cr->v2 = expr_const64();
7033 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7034 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7035 tcc_warning("empty case range");
7037 /* case and default are unreachable from a switch under nocode_wanted */
7038 if (!cur_switch->nocode_wanted)
7039 cr->sym = gind();
7040 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7041 skip(':');
7042 is_expr = 0;
7043 goto block_after_label;
7045 } else if (t == TOK_DEFAULT) {
7046 if (!cur_switch)
7047 expect("switch");
7048 if (cur_switch->def_sym)
7049 tcc_error("too many 'default'");
7050 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7051 skip(':');
7052 is_expr = 0;
7053 goto block_after_label;
7055 } else if (t == TOK_GOTO) {
7056 if (cur_scope->vla.num)
7057 vla_restore(cur_scope->vla.locorig);
7058 if (tok == '*' && gnu_ext) {
7059 /* computed goto */
7060 next();
7061 gexpr();
7062 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7063 expect("pointer");
7064 ggoto();
7066 } else if (tok >= TOK_UIDENT) {
7067 s = label_find(tok);
7068 /* put forward definition if needed */
7069 if (!s)
7070 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7071 else if (s->r == LABEL_DECLARED)
7072 s->r = LABEL_FORWARD;
7074 if (s->r & LABEL_FORWARD) {
7075 /* start new goto chain for cleanups, linked via label->next */
7076 if (cur_scope->cl.s && !nocode_wanted) {
7077 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7078 pending_gotos->prev_tok = s;
7079 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7080 pending_gotos->next = s;
7082 s->jnext = gjmp(s->jnext);
7083 } else {
7084 try_call_cleanup_goto(s->cleanupstate);
7085 gjmp_addr(s->jnext);
7087 next();
7089 } else {
7090 expect("label identifier");
7092 skip(';');
7094 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7095 asm_instr();
7097 } else {
7098 if (tok == ':' && t >= TOK_UIDENT) {
7099 /* label case */
7100 next();
7101 s = label_find(t);
7102 if (s) {
7103 if (s->r == LABEL_DEFINED)
7104 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7105 s->r = LABEL_DEFINED;
7106 if (s->next) {
7107 Sym *pcl; /* pending cleanup goto */
7108 for (pcl = s->next; pcl; pcl = pcl->prev)
7109 gsym(pcl->jnext);
7110 sym_pop(&s->next, NULL, 0);
7111 } else
7112 gsym(s->jnext);
7113 } else {
7114 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7116 s->jnext = gind();
7117 s->cleanupstate = cur_scope->cl.s;
7119 block_after_label:
7121 /* Accept attributes after labels (e.g. 'unused') */
7122 AttributeDef ad_tmp;
7123 parse_attribute(&ad_tmp);
7125 if (debug_modes)
7126 tcc_tcov_reset_ind(tcc_state);
7127 vla_restore(cur_scope->vla.loc);
7128 if (tok != '}')
7129 goto again;
7130 /* we accept this, but it is a mistake */
7131 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7133 } else {
7134 /* expression case */
7135 if (t != ';') {
7136 unget_tok(t);
7137 expr:
7138 if (is_expr) {
7139 vpop();
7140 gexpr();
7141 } else {
7142 gexpr();
7143 vpop();
7145 skip(';');
7150 if (debug_modes)
7151 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7154 /* This skips over a stream of tokens containing balanced {} and ()
7155 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7156 with a '{'). If STR then allocates and stores the skipped tokens
7157 in *STR. This doesn't check if () and {} are nested correctly,
7158 i.e. "({)}" is accepted. */
7159 static void skip_or_save_block(TokenString **str)
7161 int braces = tok == '{';
7162 int level = 0;
7163 if (str)
7164 *str = tok_str_alloc();
7166 while (1) {
7167 int t = tok;
7168 if (level == 0
7169 && (t == ','
7170 || t == ';'
7171 || t == '}'
7172 || t == ')'
7173 || t == ']'))
7174 break;
7175 if (t == TOK_EOF) {
7176 if (str || level > 0)
7177 tcc_error("unexpected end of file");
7178 else
7179 break;
7181 if (str)
7182 tok_str_add_tok(*str);
7183 next();
7184 if (t == '{' || t == '(' || t == '[') {
7185 level++;
7186 } else if (t == '}' || t == ')' || t == ']') {
7187 level--;
7188 if (level == 0 && braces && t == '}')
7189 break;
7192 if (str) {
7193 tok_str_add(*str, -1);
7194 tok_str_add(*str, 0);
7198 #define EXPR_CONST 1
7199 #define EXPR_ANY 2
7201 static void parse_init_elem(int expr_type)
7203 int saved_global_expr;
7204 switch(expr_type) {
7205 case EXPR_CONST:
7206 /* compound literals must be allocated globally in this case */
7207 saved_global_expr = global_expr;
7208 global_expr = 1;
7209 expr_const1();
7210 global_expr = saved_global_expr;
7211 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7212 (compound literals). */
7213 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7214 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7215 || vtop->sym->v < SYM_FIRST_ANOM))
7216 #ifdef TCC_TARGET_PE
7217 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7218 #endif
7220 tcc_error("initializer element is not constant");
7221 break;
7222 case EXPR_ANY:
7223 expr_eq();
7224 break;
7228 #if 1
7229 static void init_assert(init_params *p, int offset)
7231 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7232 : !nocode_wanted && offset > p->local_offset)
7233 tcc_internal_error("initializer overflow");
7235 #else
7236 #define init_assert(sec, offset)
7237 #endif
7239 /* put zeros for variable based init */
7240 static void init_putz(init_params *p, unsigned long c, int size)
7242 init_assert(p, c + size);
7243 if (p->sec) {
7244 /* nothing to do because globals are already set to zero */
7245 } else {
7246 vpush_helper_func(TOK_memset);
7247 vseti(VT_LOCAL, c);
7248 #ifdef TCC_TARGET_ARM
7249 vpushs(size);
7250 vpushi(0);
7251 #else
7252 vpushi(0);
7253 vpushs(size);
7254 #endif
7255 gfunc_call(3);
7259 #define DIF_FIRST 1
7260 #define DIF_SIZE_ONLY 2
7261 #define DIF_HAVE_ELEM 4
7262 #define DIF_CLEAR 8
7264 /* delete relocations for specified range c ... c + size. Unfortunatly
7265 in very special cases, relocations may occur unordered */
7266 static void decl_design_delrels(Section *sec, int c, int size)
7268 ElfW_Rel *rel, *rel2, *rel_end;
7269 if (!sec || !sec->reloc)
7270 return;
7271 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7272 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7273 while (rel < rel_end) {
7274 if (rel->r_offset >= c && rel->r_offset < c + size) {
7275 sec->reloc->data_offset -= sizeof *rel;
7276 } else {
7277 if (rel2 != rel)
7278 memcpy(rel2, rel, sizeof *rel);
7279 ++rel2;
7281 ++rel;
7285 static void decl_design_flex(init_params *p, Sym *ref, int index)
7287 if (ref == p->flex_array_ref) {
7288 if (index >= ref->c)
7289 ref->c = index + 1;
7290 } else if (ref->c < 0)
7291 tcc_error("flexible array has zero size in this context");
7294 /* t is the array or struct type. c is the array or struct
7295 address. cur_field is the pointer to the current
7296 field, for arrays the 'c' member contains the current start
7297 index. 'flags' is as in decl_initializer.
7298 'al' contains the already initialized length of the
7299 current container (starting at c). This returns the new length of that. */
7300 static int decl_designator(init_params *p, CType *type, unsigned long c,
7301 Sym **cur_field, int flags, int al)
7303 Sym *s, *f;
7304 int index, index_last, align, l, nb_elems, elem_size;
7305 unsigned long corig = c;
7307 elem_size = 0;
7308 nb_elems = 1;
7310 if (flags & DIF_HAVE_ELEM)
7311 goto no_designator;
7313 if (gnu_ext && tok >= TOK_UIDENT) {
7314 l = tok, next();
7315 if (tok == ':')
7316 goto struct_field;
7317 unget_tok(l);
7320 /* NOTE: we only support ranges for last designator */
7321 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7322 if (tok == '[') {
7323 if (!(type->t & VT_ARRAY))
7324 expect("array type");
7325 next();
7326 index = index_last = expr_const();
7327 if (tok == TOK_DOTS && gnu_ext) {
7328 next();
7329 index_last = expr_const();
7331 skip(']');
7332 s = type->ref;
7333 decl_design_flex(p, s, index_last);
7334 if (index < 0 || index_last >= s->c || index_last < index)
7335 tcc_error("index exceeds array bounds or range is empty");
7336 if (cur_field)
7337 (*cur_field)->c = index_last;
7338 type = pointed_type(type);
7339 elem_size = type_size(type, &align);
7340 c += index * elem_size;
7341 nb_elems = index_last - index + 1;
7342 } else {
7343 int cumofs;
7344 next();
7345 l = tok;
7346 struct_field:
7347 next();
7348 if ((type->t & VT_BTYPE) != VT_STRUCT)
7349 expect("struct/union type");
7350 cumofs = 0;
7351 f = find_field(type, l, &cumofs);
7352 if (!f)
7353 expect("field");
7354 if (cur_field)
7355 *cur_field = f;
7356 type = &f->type;
7357 c += cumofs + f->c;
7359 cur_field = NULL;
7361 if (!cur_field) {
7362 if (tok == '=') {
7363 next();
7364 } else if (!gnu_ext) {
7365 expect("=");
7367 } else {
7368 no_designator:
7369 if (type->t & VT_ARRAY) {
7370 index = (*cur_field)->c;
7371 s = type->ref;
7372 decl_design_flex(p, s, index);
7373 if (index >= s->c)
7374 tcc_error("too many initializers");
7375 type = pointed_type(type);
7376 elem_size = type_size(type, &align);
7377 c += index * elem_size;
7378 } else {
7379 f = *cur_field;
7380 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7381 *cur_field = f = f->next;
7382 if (!f)
7383 tcc_error("too many initializers");
7384 type = &f->type;
7385 c += f->c;
7389 if (!elem_size) /* for structs */
7390 elem_size = type_size(type, &align);
7392 /* Using designators the same element can be initialized more
7393 than once. In that case we need to delete possibly already
7394 existing relocations. */
7395 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7396 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7397 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7400 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7402 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7403 Sym aref = {0};
7404 CType t1;
7405 int i;
7406 if (p->sec || (type->t & VT_ARRAY)) {
7407 /* make init_putv/vstore believe it were a struct */
7408 aref.c = elem_size;
7409 t1.t = VT_STRUCT, t1.ref = &aref;
7410 type = &t1;
7412 if (p->sec)
7413 vpush_ref(type, p->sec, c, elem_size);
7414 else
7415 vset(type, VT_LOCAL|VT_LVAL, c);
7416 for (i = 1; i < nb_elems; i++) {
7417 vdup();
7418 init_putv(p, type, c + elem_size * i);
7420 vpop();
7423 c += nb_elems * elem_size;
7424 if (c - corig > al)
7425 al = c - corig;
7426 return al;
7429 /* store a value or an expression directly in global data or in local array */
7430 static void init_putv(init_params *p, CType *type, unsigned long c)
7432 int bt;
7433 void *ptr;
7434 CType dtype;
7435 int size, align;
7436 Section *sec = p->sec;
7437 uint64_t val;
7439 dtype = *type;
7440 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7442 size = type_size(type, &align);
7443 if (type->t & VT_BITFIELD)
7444 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7445 init_assert(p, c + size);
7447 if (sec) {
7448 /* XXX: not portable */
7449 /* XXX: generate error if incorrect relocation */
7450 gen_assign_cast(&dtype);
7451 bt = type->t & VT_BTYPE;
7453 if ((vtop->r & VT_SYM)
7454 && bt != VT_PTR
7455 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7456 || (type->t & VT_BITFIELD))
7457 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7459 tcc_error("initializer element is not computable at load time");
7461 if (NODATA_WANTED) {
7462 vtop--;
7463 return;
7466 ptr = sec->data + c;
7467 val = vtop->c.i;
7469 /* XXX: make code faster ? */
7470 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7471 vtop->sym->v >= SYM_FIRST_ANOM &&
7472 /* XXX This rejects compound literals like
7473 '(void *){ptr}'. The problem is that '&sym' is
7474 represented the same way, which would be ruled out
7475 by the SYM_FIRST_ANOM check above, but also '"string"'
7476 in 'char *p = "string"' is represented the same
7477 with the type being VT_PTR and the symbol being an
7478 anonymous one. That is, there's no difference in vtop
7479 between '(void *){x}' and '&(void *){x}'. Ignore
7480 pointer typed entities here. Hopefully no real code
7481 will ever use compound literals with scalar type. */
7482 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7483 /* These come from compound literals, memcpy stuff over. */
7484 Section *ssec;
7485 ElfSym *esym;
7486 ElfW_Rel *rel;
7487 esym = elfsym(vtop->sym);
7488 ssec = tcc_state->sections[esym->st_shndx];
7489 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7490 if (ssec->reloc) {
7491 /* We need to copy over all memory contents, and that
7492 includes relocations. Use the fact that relocs are
7493 created it order, so look from the end of relocs
7494 until we hit one before the copied region. */
7495 unsigned long relofs = ssec->reloc->data_offset;
7496 while (relofs >= sizeof(*rel)) {
7497 relofs -= sizeof(*rel);
7498 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7499 if (rel->r_offset >= esym->st_value + size)
7500 continue;
7501 if (rel->r_offset < esym->st_value)
7502 break;
7503 put_elf_reloca(symtab_section, sec,
7504 c + rel->r_offset - esym->st_value,
7505 ELFW(R_TYPE)(rel->r_info),
7506 ELFW(R_SYM)(rel->r_info),
7507 #if PTR_SIZE == 8
7508 rel->r_addend
7509 #else
7511 #endif
7515 } else {
7516 if (type->t & VT_BITFIELD) {
7517 int bit_pos, bit_size, bits, n;
7518 unsigned char *p, v, m;
7519 bit_pos = BIT_POS(vtop->type.t);
7520 bit_size = BIT_SIZE(vtop->type.t);
7521 p = (unsigned char*)ptr + (bit_pos >> 3);
7522 bit_pos &= 7, bits = 0;
7523 while (bit_size) {
7524 n = 8 - bit_pos;
7525 if (n > bit_size)
7526 n = bit_size;
7527 v = val >> bits << bit_pos;
7528 m = ((1 << n) - 1) << bit_pos;
7529 *p = (*p & ~m) | (v & m);
7530 bits += n, bit_size -= n, bit_pos = 0, ++p;
7532 } else
7533 switch(bt) {
7534 case VT_BOOL:
7535 *(char *)ptr = val != 0;
7536 break;
7537 case VT_BYTE:
7538 *(char *)ptr = val;
7539 break;
7540 case VT_SHORT:
7541 write16le(ptr, val);
7542 break;
7543 case VT_FLOAT:
7544 write32le(ptr, val);
7545 break;
7546 case VT_DOUBLE:
7547 write64le(ptr, val);
7548 break;
7549 case VT_LDOUBLE:
7550 #if defined TCC_IS_NATIVE_387
7551 /* Host and target platform may be different but both have x87.
7552 On windows, tcc does not use VT_LDOUBLE, except when it is a
7553 cross compiler. In this case a mingw gcc as host compiler
7554 comes here with 10-byte long doubles, while msvc or tcc won't.
7555 tcc itself can still translate by asm.
7556 In any case we avoid possibly random bytes 11 and 12.
7558 if (sizeof (long double) >= 10)
7559 memcpy(ptr, &vtop->c.ld, 10);
7560 #ifdef __TINYC__
7561 else if (sizeof (long double) == sizeof (double))
7562 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7563 #endif
7564 else if (vtop->c.ld == 0.0)
7566 else
7567 #endif
7568 /* For other platforms it should work natively, but may not work
7569 for cross compilers */
7570 if (sizeof(long double) == LDOUBLE_SIZE)
7571 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7572 else if (sizeof(double) == LDOUBLE_SIZE)
7573 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7574 #ifndef TCC_CROSS_TEST
7575 else
7576 tcc_error("can't cross compile long double constants");
7577 #endif
7578 break;
7580 #if PTR_SIZE == 8
7581 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7582 case VT_LLONG:
7583 case VT_PTR:
7584 if (vtop->r & VT_SYM)
7585 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7586 else
7587 write64le(ptr, val);
7588 break;
7589 case VT_INT:
7590 write32le(ptr, val);
7591 break;
7592 #else
7593 case VT_LLONG:
7594 write64le(ptr, val);
7595 break;
7596 case VT_PTR:
7597 case VT_INT:
7598 if (vtop->r & VT_SYM)
7599 greloc(sec, vtop->sym, c, R_DATA_PTR);
7600 write32le(ptr, val);
7601 break;
7602 #endif
7603 default:
7604 //tcc_internal_error("unexpected type");
7605 break;
7608 vtop--;
7609 } else {
7610 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7611 vswap();
7612 vstore();
7613 vpop();
7617 /* 't' contains the type and storage info. 'c' is the offset of the
7618 object in section 'sec'. If 'sec' is NULL, it means stack based
7619 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7620 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7621 size only evaluation is wanted (only for arrays). */
7622 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7624 int len, n, no_oblock, i;
7625 int size1, align1;
7626 Sym *s, *f;
7627 Sym indexsym;
7628 CType *t1;
7630 /* generate line number info */
7631 if (debug_modes && !p->sec)
7632 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7634 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7635 /* In case of strings we have special handling for arrays, so
7636 don't consume them as initializer value (which would commit them
7637 to some anonymous symbol). */
7638 tok != TOK_LSTR && tok != TOK_STR &&
7639 (!(flags & DIF_SIZE_ONLY)
7640 /* a struct may be initialized from a struct of same type, as in
7641 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7642 In that case we need to parse the element in order to check
7643 it for compatibility below */
7644 || (type->t & VT_BTYPE) == VT_STRUCT)
7646 int ncw_prev = nocode_wanted;
7647 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7648 ++nocode_wanted;
7649 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7650 nocode_wanted = ncw_prev;
7651 flags |= DIF_HAVE_ELEM;
7654 if (type->t & VT_ARRAY) {
7655 no_oblock = 1;
7656 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7657 tok == '{') {
7658 skip('{');
7659 no_oblock = 0;
7662 s = type->ref;
7663 n = s->c;
7664 t1 = pointed_type(type);
7665 size1 = type_size(t1, &align1);
7667 /* only parse strings here if correct type (otherwise: handle
7668 them as ((w)char *) expressions */
7669 if ((tok == TOK_LSTR &&
7670 #ifdef TCC_TARGET_PE
7671 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7672 #else
7673 (t1->t & VT_BTYPE) == VT_INT
7674 #endif
7675 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7676 len = 0;
7677 cstr_reset(&initstr);
7678 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7679 tcc_error("unhandled string literal merging");
7680 while (tok == TOK_STR || tok == TOK_LSTR) {
7681 if (initstr.size)
7682 initstr.size -= size1;
7683 if (tok == TOK_STR)
7684 len += tokc.str.size;
7685 else
7686 len += tokc.str.size / sizeof(nwchar_t);
7687 len--;
7688 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7689 next();
7691 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7692 && tok != TOK_EOF) {
7693 /* Not a lone literal but part of a bigger expression. */
7694 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7695 tokc.str.size = initstr.size;
7696 tokc.str.data = initstr.data;
7697 goto do_init_array;
7700 decl_design_flex(p, s, len);
7701 if (!(flags & DIF_SIZE_ONLY)) {
7702 int nb = n, ch;
7703 if (len < nb)
7704 nb = len;
7705 if (len > nb)
7706 tcc_warning("initializer-string for array is too long");
7707 /* in order to go faster for common case (char
7708 string in global variable, we handle it
7709 specifically */
7710 if (p->sec && size1 == 1) {
7711 init_assert(p, c + nb);
7712 if (!NODATA_WANTED)
7713 memcpy(p->sec->data + c, initstr.data, nb);
7714 } else {
7715 for(i=0;i<n;i++) {
7716 if (i >= nb) {
7717 /* only add trailing zero if enough storage (no
7718 warning in this case since it is standard) */
7719 if (flags & DIF_CLEAR)
7720 break;
7721 if (n - i >= 4) {
7722 init_putz(p, c + i * size1, (n - i) * size1);
7723 break;
7725 ch = 0;
7726 } else if (size1 == 1)
7727 ch = ((unsigned char *)initstr.data)[i];
7728 else
7729 ch = ((nwchar_t *)initstr.data)[i];
7730 vpushi(ch);
7731 init_putv(p, t1, c + i * size1);
7735 } else {
7737 do_init_array:
7738 indexsym.c = 0;
7739 f = &indexsym;
7741 do_init_list:
7742 /* zero memory once in advance */
7743 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7744 init_putz(p, c, n*size1);
7745 flags |= DIF_CLEAR;
7748 len = 0;
7749 /* GNU extension: if the initializer is empty for a flex array,
7750 it's size is zero. We won't enter the loop, so set the size
7751 now. */
7752 decl_design_flex(p, s, len);
7753 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7754 len = decl_designator(p, type, c, &f, flags, len);
7755 flags &= ~DIF_HAVE_ELEM;
7756 if (type->t & VT_ARRAY) {
7757 ++indexsym.c;
7758 /* special test for multi dimensional arrays (may not
7759 be strictly correct if designators are used at the
7760 same time) */
7761 if (no_oblock && len >= n*size1)
7762 break;
7763 } else {
7764 if (s->type.t == VT_UNION)
7765 f = NULL;
7766 else
7767 f = f->next;
7768 if (no_oblock && f == NULL)
7769 break;
7772 if (tok == '}')
7773 break;
7774 skip(',');
7777 if (!no_oblock)
7778 skip('}');
7780 } else if ((flags & DIF_HAVE_ELEM)
7781 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7782 The source type might have VT_CONSTANT set, which is
7783 of course assignable to non-const elements. */
7784 && is_compatible_unqualified_types(type, &vtop->type)) {
7785 goto one_elem;
7787 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7788 no_oblock = 1;
7789 if ((flags & DIF_FIRST) || tok == '{') {
7790 skip('{');
7791 no_oblock = 0;
7793 s = type->ref;
7794 f = s->next;
7795 n = s->c;
7796 size1 = 1;
7797 goto do_init_list;
7799 } else if (tok == '{') {
7800 if (flags & DIF_HAVE_ELEM)
7801 skip(';');
7802 next();
7803 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7804 skip('}');
7806 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7807 /* If we supported only ISO C we wouldn't have to accept calling
7808 this on anything than an array if DIF_SIZE_ONLY (and even then
7809 only on the outermost level, so no recursion would be needed),
7810 because initializing a flex array member isn't supported.
7811 But GNU C supports it, so we need to recurse even into
7812 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7813 /* just skip expression */
7814 if (flags & DIF_HAVE_ELEM)
7815 vpop();
7816 else
7817 skip_or_save_block(NULL);
7819 } else {
7820 if (!(flags & DIF_HAVE_ELEM)) {
7821 /* This should happen only when we haven't parsed
7822 the init element above for fear of committing a
7823 string constant to memory too early. */
7824 if (tok != TOK_STR && tok != TOK_LSTR)
7825 expect("string constant");
7826 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7828 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7829 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7830 && vtop->c.i == 0
7831 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7833 vpop();
7834 else
7835 init_putv(p, type, c);
7839 /* parse an initializer for type 't' if 'has_init' is non zero, and
7840 allocate space in local or global data space ('r' is either
7841 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7842 variable 'v' of scope 'scope' is declared before initializers
7843 are parsed. If 'v' is zero, then a reference to the new object
7844 is put in the value stack. If 'has_init' is 2, a special parsing
7845 is done to handle string constants. */
7846 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7847 int has_init, int v, int global)
7849 int size, align, addr;
7850 TokenString *init_str = NULL;
7852 Section *sec;
7853 Sym *flexible_array;
7854 Sym *sym;
7855 int saved_nocode_wanted = nocode_wanted;
7856 #ifdef CONFIG_TCC_BCHECK
7857 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7858 #endif
7859 init_params p = {0};
7861 /* Always allocate static or global variables */
7862 if (v && (r & VT_VALMASK) == VT_CONST)
7863 nocode_wanted |= DATA_ONLY_WANTED;
7865 flexible_array = NULL;
7866 size = type_size(type, &align);
7868 /* exactly one flexible array may be initialized, either the
7869 toplevel array or the last member of the toplevel struct */
7871 if (size < 0) {
7872 /* If the base type itself was an array type of unspecified size
7873 (like in 'typedef int arr[]; arr x = {1};') then we will
7874 overwrite the unknown size by the real one for this decl.
7875 We need to unshare the ref symbol holding that size. */
7876 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7877 p.flex_array_ref = type->ref;
7879 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7880 Sym *field = type->ref->next;
7881 if (field) {
7882 while (field->next)
7883 field = field->next;
7884 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7885 flexible_array = field;
7886 p.flex_array_ref = field->type.ref;
7887 size = -1;
7892 if (size < 0) {
7893 /* If unknown size, do a dry-run 1st pass */
7894 if (!has_init)
7895 tcc_error("unknown type size");
7896 if (has_init == 2) {
7897 /* only get strings */
7898 init_str = tok_str_alloc();
7899 while (tok == TOK_STR || tok == TOK_LSTR) {
7900 tok_str_add_tok(init_str);
7901 next();
7903 tok_str_add(init_str, -1);
7904 tok_str_add(init_str, 0);
7905 } else
7906 skip_or_save_block(&init_str);
7907 unget_tok(0);
7909 /* compute size */
7910 begin_macro(init_str, 1);
7911 next();
7912 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7913 /* prepare second initializer parsing */
7914 macro_ptr = init_str->str;
7915 next();
7917 /* if still unknown size, error */
7918 size = type_size(type, &align);
7919 if (size < 0)
7920 tcc_error("unknown type size");
7922 /* If there's a flex member and it was used in the initializer
7923 adjust size. */
7924 if (flexible_array && flexible_array->type.ref->c > 0)
7925 size += flexible_array->type.ref->c
7926 * pointed_size(&flexible_array->type);
7929 /* take into account specified alignment if bigger */
7930 if (ad->a.aligned) {
7931 int speca = 1 << (ad->a.aligned - 1);
7932 if (speca > align)
7933 align = speca;
7934 } else if (ad->a.packed) {
7935 align = 1;
7938 if (!v && NODATA_WANTED)
7939 size = 0, align = 1;
7941 if ((r & VT_VALMASK) == VT_LOCAL) {
7942 sec = NULL;
7943 #ifdef CONFIG_TCC_BCHECK
7944 if (bcheck && v) {
7945 /* add padding between stack variables for bound checking */
7946 loc -= align;
7948 #endif
7949 loc = (loc - size) & -align;
7950 addr = loc;
7951 p.local_offset = addr + size;
7952 #ifdef CONFIG_TCC_BCHECK
7953 if (bcheck && v) {
7954 /* add padding between stack variables for bound checking */
7955 loc -= align;
7957 #endif
7958 if (v) {
7959 /* local variable */
7960 #ifdef CONFIG_TCC_ASM
7961 if (ad->asm_label) {
7962 int reg = asm_parse_regvar(ad->asm_label);
7963 if (reg >= 0)
7964 r = (r & ~VT_VALMASK) | reg;
7966 #endif
7967 sym = sym_push(v, type, r, addr);
7968 if (ad->cleanup_func) {
7969 Sym *cls = sym_push2(&all_cleanups,
7970 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7971 cls->prev_tok = sym;
7972 cls->next = ad->cleanup_func;
7973 cls->ncl = cur_scope->cl.s;
7974 cur_scope->cl.s = cls;
7977 sym->a = ad->a;
7978 } else {
7979 /* push local reference */
7980 vset(type, r, addr);
7982 } else {
7983 sym = NULL;
7984 if (v && global) {
7985 /* see if the symbol was already defined */
7986 sym = sym_find(v);
7987 if (sym) {
7988 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
7989 && sym->type.ref->c > type->ref->c) {
7990 /* flex array was already declared with explicit size
7991 extern int arr[10];
7992 int arr[] = { 1,2,3 }; */
7993 type->ref->c = sym->type.ref->c;
7994 size = type_size(type, &align);
7996 patch_storage(sym, ad, type);
7997 /* we accept several definitions of the same global variable. */
7998 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7999 goto no_alloc;
8003 /* allocate symbol in corresponding section */
8004 sec = ad->section;
8005 if (!sec) {
8006 CType *tp = type;
8007 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8008 tp = &tp->ref->type;
8009 if (tp->t & VT_CONSTANT) {
8010 sec = rodata_section;
8011 } else if (has_init) {
8012 sec = data_section;
8013 /*if (tcc_state->g_debug & 4)
8014 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8015 } else if (tcc_state->nocommon)
8016 sec = bss_section;
8019 if (sec) {
8020 addr = section_add(sec, size, align);
8021 #ifdef CONFIG_TCC_BCHECK
8022 /* add padding if bound check */
8023 if (bcheck)
8024 section_add(sec, 1, 1);
8025 #endif
8026 } else {
8027 addr = align; /* SHN_COMMON is special, symbol value is align */
8028 sec = common_section;
8031 if (v) {
8032 if (!sym) {
8033 sym = sym_push(v, type, r | VT_SYM, 0);
8034 patch_storage(sym, ad, NULL);
8036 /* update symbol definition */
8037 put_extern_sym(sym, sec, addr, size);
8038 } else {
8039 /* push global reference */
8040 vpush_ref(type, sec, addr, size);
8041 sym = vtop->sym;
8042 vtop->r |= r;
8045 #ifdef CONFIG_TCC_BCHECK
8046 /* handles bounds now because the symbol must be defined
8047 before for the relocation */
8048 if (bcheck) {
8049 addr_t *bounds_ptr;
8051 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8052 /* then add global bound info */
8053 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8054 bounds_ptr[0] = 0; /* relocated */
8055 bounds_ptr[1] = size;
8057 #endif
8060 if (type->t & VT_VLA) {
8061 int a;
8063 if (NODATA_WANTED)
8064 goto no_alloc;
8066 /* save before-VLA stack pointer if needed */
8067 if (cur_scope->vla.num == 0) {
8068 if (cur_scope->prev && cur_scope->prev->vla.num) {
8069 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8070 } else {
8071 gen_vla_sp_save(loc -= PTR_SIZE);
8072 cur_scope->vla.locorig = loc;
8076 vpush_type_size(type, &a);
8077 gen_vla_alloc(type, a);
8078 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8079 /* on _WIN64, because of the function args scratch area, the
8080 result of alloca differs from RSP and is returned in RAX. */
8081 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8082 #endif
8083 gen_vla_sp_save(addr);
8084 cur_scope->vla.loc = addr;
8085 cur_scope->vla.num++;
8086 } else if (has_init) {
8087 p.sec = sec;
8088 decl_initializer(&p, type, addr, DIF_FIRST);
8089 /* patch flexible array member size back to -1, */
8090 /* for possible subsequent similar declarations */
8091 if (flexible_array)
8092 flexible_array->type.ref->c = -1;
8095 no_alloc:
8096 /* restore parse state if needed */
8097 if (init_str) {
8098 end_macro();
8099 next();
8102 nocode_wanted = saved_nocode_wanted;
8105 /* generate vla code saved in post_type() */
8106 static void func_vla_arg_code(Sym *arg)
8108 int align;
8109 TokenString *vla_array_tok = NULL;
8111 if (arg->type.ref)
8112 func_vla_arg_code(arg->type.ref);
8114 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8115 loc -= type_size(&int_type, &align);
8116 loc &= -align;
8117 arg->type.ref->c = loc;
8119 unget_tok(0);
8120 vla_array_tok = tok_str_alloc();
8121 vla_array_tok->str = arg->type.ref->vla_array_str;
8122 begin_macro(vla_array_tok, 1);
8123 next();
8124 gexpr();
8125 end_macro();
8126 next();
8127 vpush_type_size(&arg->type.ref->type, &align);
8128 gen_op('*');
8129 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8130 vswap();
8131 vstore();
8132 vpop();
8136 static void func_vla_arg(Sym *sym)
8138 Sym *arg;
8140 for (arg = sym->type.ref->next; arg; arg = arg->next)
8141 if (arg->type.t & VT_VLA)
8142 func_vla_arg_code(arg);
8145 /* parse a function defined by symbol 'sym' and generate its code in
8146 'cur_text_section' */
8147 static void gen_function(Sym *sym)
8149 struct scope f = { 0 };
8150 cur_scope = root_scope = &f;
8151 nocode_wanted = 0;
8152 ind = cur_text_section->data_offset;
8153 if (sym->a.aligned) {
8154 size_t newoff = section_add(cur_text_section, 0,
8155 1 << (sym->a.aligned - 1));
8156 gen_fill_nops(newoff - ind);
8158 /* NOTE: we patch the symbol size later */
8159 put_extern_sym(sym, cur_text_section, ind, 0);
8160 if (sym->type.ref->f.func_ctor)
8161 add_array (tcc_state, ".init_array", sym->c);
8162 if (sym->type.ref->f.func_dtor)
8163 add_array (tcc_state, ".fini_array", sym->c);
8165 funcname = get_tok_str(sym->v, NULL);
8166 func_ind = ind;
8167 func_vt = sym->type.ref->type;
8168 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8170 /* put debug symbol */
8171 tcc_debug_funcstart(tcc_state, sym);
8172 /* push a dummy symbol to enable local sym storage */
8173 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8174 local_scope = 1; /* for function parameters */
8175 gfunc_prolog(sym);
8176 tcc_debug_prolog_epilog(tcc_state, 0);
8177 local_scope = 0;
8178 rsym = 0;
8179 clear_temp_local_var_list();
8180 func_vla_arg(sym);
8181 block(0);
8182 gsym(rsym);
8183 nocode_wanted = 0;
8184 /* reset local stack */
8185 pop_local_syms(NULL, 0);
8186 tcc_debug_prolog_epilog(tcc_state, 1);
8187 gfunc_epilog();
8188 cur_text_section->data_offset = ind;
8189 local_scope = 0;
8190 label_pop(&global_label_stack, NULL, 0);
8191 sym_pop(&all_cleanups, NULL, 0);
8192 /* patch symbol size */
8193 elfsym(sym)->st_size = ind - func_ind;
8194 /* end of function */
8195 tcc_debug_funcend(tcc_state, ind - func_ind);
8196 /* It's better to crash than to generate wrong code */
8197 cur_text_section = NULL;
8198 funcname = ""; /* for safety */
8199 func_vt.t = VT_VOID; /* for safety */
8200 func_var = 0; /* for safety */
8201 ind = 0; /* for safety */
8202 func_ind = -1;
8203 nocode_wanted = DATA_ONLY_WANTED;
8204 check_vstack();
8205 /* do this after funcend debug info */
8206 next();
8209 static void gen_inline_functions(TCCState *s)
8211 Sym *sym;
8212 int inline_generated, i;
8213 struct InlineFunc *fn;
8215 tcc_open_bf(s, ":inline:", 0);
8216 /* iterate while inline function are referenced */
8217 do {
8218 inline_generated = 0;
8219 for (i = 0; i < s->nb_inline_fns; ++i) {
8220 fn = s->inline_fns[i];
8221 sym = fn->sym;
8222 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8223 /* the function was used or forced (and then not internal):
8224 generate its code and convert it to a normal function */
8225 fn->sym = NULL;
8226 tcc_debug_putfile(s, fn->filename);
8227 begin_macro(fn->func_str, 1);
8228 next();
8229 cur_text_section = text_section;
8230 gen_function(sym);
8231 end_macro();
8233 inline_generated = 1;
8236 } while (inline_generated);
8237 tcc_close();
8240 static void free_inline_functions(TCCState *s)
8242 int i;
8243 /* free tokens of unused inline functions */
8244 for (i = 0; i < s->nb_inline_fns; ++i) {
8245 struct InlineFunc *fn = s->inline_fns[i];
8246 if (fn->sym)
8247 tok_str_free(fn->func_str);
8249 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8252 static void do_Static_assert(void){
8253 CString error_str;
8254 int c;
8256 next();
8257 skip('(');
8258 c = expr_const();
8260 if (tok == ')') {
8261 if (!c)
8262 tcc_error("_Static_assert fail");
8263 next();
8264 goto static_assert_out;
8267 skip(',');
8268 parse_mult_str(&error_str, "string constant");
8269 if (c == 0)
8270 tcc_error("%s", (char *)error_str.data);
8271 cstr_free(&error_str);
8272 skip(')');
8273 static_assert_out:
8274 skip(';');
8277 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8278 or VT_CMP if parsing old style parameter list
8279 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8280 static int decl(int l)
8282 int v, has_init, r, oldint;
8283 CType type, btype;
8284 Sym *sym;
8285 AttributeDef ad, adbase;
8287 while (1) {
8288 if (tok == TOK_STATIC_ASSERT) {
8289 do_Static_assert();
8290 continue;
8293 oldint = 0;
8294 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8295 if (l == VT_JMP)
8296 return 0;
8297 /* skip redundant ';' if not in old parameter decl scope */
8298 if (tok == ';' && l != VT_CMP) {
8299 next();
8300 continue;
8302 if (l != VT_CONST)
8303 break;
8304 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8305 /* global asm block */
8306 asm_global_instr();
8307 continue;
8309 if (tok >= TOK_UIDENT) {
8310 /* special test for old K&R protos without explicit int
8311 type. Only accepted when defining global data */
8312 btype.t = VT_INT;
8313 oldint = 1;
8314 } else {
8315 if (tok != TOK_EOF)
8316 expect("declaration");
8317 break;
8321 if (tok == ';') {
8322 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8323 v = btype.ref->v;
8324 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8325 tcc_warning("unnamed struct/union that defines no instances");
8326 next();
8327 continue;
8329 if (IS_ENUM(btype.t)) {
8330 next();
8331 continue;
8335 while (1) { /* iterate thru each declaration */
8336 type = btype;
8337 ad = adbase;
8338 type_decl(&type, &ad, &v, TYPE_DIRECT);
8339 #if 0
8341 char buf[500];
8342 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8343 printf("type = '%s'\n", buf);
8345 #endif
8346 if ((type.t & VT_BTYPE) == VT_FUNC) {
8347 if ((type.t & VT_STATIC) && (l != VT_CONST))
8348 tcc_error("function without file scope cannot be static");
8349 /* if old style function prototype, we accept a
8350 declaration list */
8351 sym = type.ref;
8352 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8353 func_vt = type;
8354 decl(VT_CMP);
8356 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8357 if (sym->f.func_alwinl
8358 && ((type.t & (VT_EXTERN | VT_INLINE))
8359 == (VT_EXTERN | VT_INLINE))) {
8360 /* always_inline functions must be handled as if they
8361 don't generate multiple global defs, even if extern
8362 inline, i.e. GNU inline semantics for those. Rewrite
8363 them into static inline. */
8364 type.t &= ~VT_EXTERN;
8365 type.t |= VT_STATIC;
8367 #endif
8368 /* always compile 'extern inline' */
8369 if (type.t & VT_EXTERN)
8370 type.t &= ~VT_INLINE;
8372 } else if (oldint) {
8373 tcc_warning("type defaults to int");
8376 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8377 ad.asm_label = asm_label_instr();
8378 /* parse one last attribute list, after asm label */
8379 parse_attribute(&ad);
8380 #if 0
8381 /* gcc does not allow __asm__("label") with function definition,
8382 but why not ... */
8383 if (tok == '{')
8384 expect(";");
8385 #endif
8388 #ifdef TCC_TARGET_PE
8389 if (ad.a.dllimport || ad.a.dllexport) {
8390 if (type.t & VT_STATIC)
8391 tcc_error("cannot have dll linkage with static");
8392 if (type.t & VT_TYPEDEF) {
8393 tcc_warning("'%s' attribute ignored for typedef",
8394 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8395 (ad.a.dllexport = 0, "dllexport"));
8396 } else if (ad.a.dllimport) {
8397 if ((type.t & VT_BTYPE) == VT_FUNC)
8398 ad.a.dllimport = 0;
8399 else
8400 type.t |= VT_EXTERN;
8403 #endif
8404 if (tok == '{') {
8405 if (l != VT_CONST)
8406 tcc_error("cannot use local functions");
8407 if ((type.t & VT_BTYPE) != VT_FUNC)
8408 expect("function definition");
8410 /* reject abstract declarators in function definition
8411 make old style params without decl have int type */
8412 sym = type.ref;
8413 while ((sym = sym->next) != NULL) {
8414 if (!(sym->v & ~SYM_FIELD))
8415 expect("identifier");
8416 if (sym->type.t == VT_VOID)
8417 sym->type = int_type;
8420 /* apply post-declaraton attributes */
8421 merge_funcattr(&type.ref->f, &ad.f);
8423 /* put function symbol */
8424 type.t &= ~VT_EXTERN;
8425 sym = external_sym(v, &type, 0, &ad);
8427 /* static inline functions are just recorded as a kind
8428 of macro. Their code will be emitted at the end of
8429 the compilation unit only if they are used */
8430 if (sym->type.t & VT_INLINE) {
8431 struct InlineFunc *fn;
8432 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8433 strcpy(fn->filename, file->filename);
8434 fn->sym = sym;
8435 skip_or_save_block(&fn->func_str);
8436 dynarray_add(&tcc_state->inline_fns,
8437 &tcc_state->nb_inline_fns, fn);
8438 } else {
8439 /* compute text section */
8440 cur_text_section = ad.section;
8441 if (!cur_text_section)
8442 cur_text_section = text_section;
8443 gen_function(sym);
8445 break;
8446 } else {
8447 if (l == VT_CMP) {
8448 /* find parameter in function parameter list */
8449 for (sym = func_vt.ref->next; sym; sym = sym->next)
8450 if ((sym->v & ~SYM_FIELD) == v)
8451 goto found;
8452 tcc_error("declaration for parameter '%s' but no such parameter",
8453 get_tok_str(v, NULL));
8454 found:
8455 if (type.t & VT_STORAGE) /* 'register' is okay */
8456 tcc_error("storage class specified for '%s'",
8457 get_tok_str(v, NULL));
8458 if (sym->type.t != VT_VOID)
8459 tcc_error("redefinition of parameter '%s'",
8460 get_tok_str(v, NULL));
8461 convert_parameter_type(&type);
8462 sym->type = type;
8463 } else if (type.t & VT_TYPEDEF) {
8464 /* save typedefed type */
8465 /* XXX: test storage specifiers ? */
8466 sym = sym_find(v);
8467 if (sym && sym->sym_scope == local_scope) {
8468 if (!is_compatible_types(&sym->type, &type)
8469 || !(sym->type.t & VT_TYPEDEF))
8470 tcc_error("incompatible redefinition of '%s'",
8471 get_tok_str(v, NULL));
8472 sym->type = type;
8473 } else {
8474 sym = sym_push(v, &type, 0, 0);
8476 sym->a = ad.a;
8477 sym->f = ad.f;
8478 if (debug_modes)
8479 tcc_debug_typedef (tcc_state, sym);
8480 } else if ((type.t & VT_BTYPE) == VT_VOID
8481 && !(type.t & VT_EXTERN)) {
8482 tcc_error("declaration of void object");
8483 } else {
8484 r = 0;
8485 if ((type.t & VT_BTYPE) == VT_FUNC) {
8486 /* external function definition */
8487 /* specific case for func_call attribute */
8488 type.ref->f = ad.f;
8489 } else if (!(type.t & VT_ARRAY)) {
8490 /* not lvalue if array */
8491 r |= VT_LVAL;
8493 has_init = (tok == '=');
8494 if (has_init && (type.t & VT_VLA))
8495 tcc_error("variable length array cannot be initialized");
8496 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8497 || (type.t & VT_BTYPE) == VT_FUNC
8498 /* as with GCC, uninitialized global arrays with no size
8499 are considered extern: */
8500 || ((type.t & VT_ARRAY) && !has_init
8501 && l == VT_CONST && type.ref->c < 0)
8503 /* external variable or function */
8504 type.t |= VT_EXTERN;
8505 sym = external_sym(v, &type, r, &ad);
8506 if (ad.alias_target) {
8507 /* Aliases need to be emitted when their target
8508 symbol is emitted, even if perhaps unreferenced.
8509 We only support the case where the base is
8510 already defined, otherwise we would need
8511 deferring to emit the aliases until the end of
8512 the compile unit. */
8513 Sym *alias_target = sym_find(ad.alias_target);
8514 ElfSym *esym = elfsym(alias_target);
8515 if (!esym)
8516 tcc_error("unsupported forward __alias__ attribute");
8517 put_extern_sym2(sym, esym->st_shndx,
8518 esym->st_value, esym->st_size, 1);
8520 } else {
8521 if (l == VT_CONST || (type.t & VT_STATIC))
8522 r |= VT_CONST;
8523 else
8524 r |= VT_LOCAL;
8525 if (has_init)
8526 next();
8527 else if (l == VT_CONST)
8528 /* uninitialized global variables may be overridden */
8529 type.t |= VT_EXTERN;
8530 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8533 if (tok != ',') {
8534 if (l == VT_JMP)
8535 return 1;
8536 skip(';');
8537 break;
8539 next();
8543 return 0;
8546 /* ------------------------------------------------------------------------- */
8547 #undef gjmp_addr
8548 #undef gjmp
8549 /* ------------------------------------------------------------------------- */