Support asm goto
[tinycc.git] / tccgen.c
blobcc7cc9eb036541b5ca14d48feefc4b462ed9d232
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 ST_DATA char debug_modes;
49 ST_DATA SValue *vtop;
50 static SValue _vstack[1 + VSTACK_SIZE];
51 #define vstack (_vstack + 1)
53 ST_DATA int const_wanted; /* true if constant wanted */
54 ST_DATA int nocode_wanted; /* no code generation wanted */
55 #define unevalmask 0xffff /* unevaluated subexpression */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
65 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(tcc_state); return t; }
67 /* Set 'nocode_wanted' after unconditional jumps */
68 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
69 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
71 /* These are #undef'd at the end of this file */
72 #define gjmp_addr gjmp_addr_acs
73 #define gjmp gjmp_acs
74 /* <---- */
76 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
77 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
78 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
79 ST_DATA int func_vc;
80 ST_DATA int func_ind;
81 ST_DATA const char *funcname;
82 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
83 static CString initstr;
85 #if PTR_SIZE == 4
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
88 #elif LONG_SIZE == 4
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
91 #else
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
94 #endif
96 static struct switch_t {
97 struct case_t {
98 int64_t v1, v2;
99 int sym;
100 } **p; int n; /* list of case ranges */
101 int def_sym; /* default symbol */
102 int *bsym;
103 struct scope *scope;
104 struct switch_t *prev;
105 SValue sv;
106 } *cur_switch; /* current switch */
108 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
109 /*list of temporary local variables on the stack in current function. */
110 static struct temp_local_variable {
111 int location; //offset on stack. Svalue.c.i
112 short size;
113 short align;
114 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
115 static int nb_temp_local_vars;
117 static struct scope {
118 struct scope *prev;
119 struct { int loc, locorig, num; } vla;
120 struct { Sym *s; int n; } cl;
121 int *bsym, *csym;
122 Sym *lstk, *llstk;
123 } *cur_scope, *loop_scope, *root_scope;
125 typedef struct {
126 Section *sec;
127 int local_offset;
128 Sym *flex_array_ref;
129 } init_params;
131 #if 1
132 #define precedence_parser
133 static void init_prec(void);
134 #endif
136 static void gen_cast(CType *type);
137 static void gen_cast_s(int t);
138 static inline CType *pointed_type(CType *type);
139 static int is_compatible_types(CType *type1, CType *type2);
140 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
141 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
142 static void parse_expr_type(CType *type);
143 static void init_putv(init_params *p, CType *type, unsigned long c);
144 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
145 static void block(int is_expr);
146 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
147 static void decl(int l);
148 static int decl0(int l, int is_for_loop_init, Sym *);
149 static void expr_eq(void);
150 static void vpush_type_size(CType *type, int *a);
151 static int is_compatible_unqualified_types(CType *type1, CType *type2);
152 static inline int64_t expr_const64(void);
153 static void vpush64(int ty, unsigned long long v);
154 static void vpush(CType *type);
155 static int gvtst(int inv, int t);
156 static void gen_inline_functions(TCCState *s);
157 static void free_inline_functions(TCCState *s);
158 static void skip_or_save_block(TokenString **str);
159 static void gv_dup(void);
160 static int get_temp_local_var(int size,int align);
161 static void clear_temp_local_var_list();
162 static void cast_error(CType *st, CType *dt);
164 /* ------------------------------------------------------------------------- */
166 ST_INLN int is_float(int t)
168 int bt = t & VT_BTYPE;
169 return bt == VT_LDOUBLE
170 || bt == VT_DOUBLE
171 || bt == VT_FLOAT
172 || bt == VT_QFLOAT;
175 static inline int is_integer_btype(int bt)
177 return bt == VT_BYTE
178 || bt == VT_BOOL
179 || bt == VT_SHORT
180 || bt == VT_INT
181 || bt == VT_LLONG;
184 static int btype_size(int bt)
186 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
187 bt == VT_SHORT ? 2 :
188 bt == VT_INT ? 4 :
189 bt == VT_LLONG ? 8 :
190 bt == VT_PTR ? PTR_SIZE : 0;
193 /* returns function return register from type */
194 static int R_RET(int t)
196 if (!is_float(t))
197 return REG_IRET;
198 #ifdef TCC_TARGET_X86_64
199 if ((t & VT_BTYPE) == VT_LDOUBLE)
200 return TREG_ST0;
201 #elif defined TCC_TARGET_RISCV64
202 if ((t & VT_BTYPE) == VT_LDOUBLE)
203 return REG_IRET;
204 #endif
205 return REG_FRET;
208 /* returns 2nd function return register, if any */
209 static int R2_RET(int t)
211 t &= VT_BTYPE;
212 #if PTR_SIZE == 4
213 if (t == VT_LLONG)
214 return REG_IRE2;
215 #elif defined TCC_TARGET_X86_64
216 if (t == VT_QLONG)
217 return REG_IRE2;
218 if (t == VT_QFLOAT)
219 return REG_FRE2;
220 #elif defined TCC_TARGET_RISCV64
221 if (t == VT_LDOUBLE)
222 return REG_IRE2;
223 #endif
224 return VT_CONST;
227 /* returns true for two-word types */
228 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
230 /* put function return registers to stack value */
231 static void PUT_R_RET(SValue *sv, int t)
233 sv->r = R_RET(t), sv->r2 = R2_RET(t);
236 /* returns function return register class for type t */
237 static int RC_RET(int t)
239 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
242 /* returns generic register class for type t */
243 static int RC_TYPE(int t)
245 if (!is_float(t))
246 return RC_INT;
247 #ifdef TCC_TARGET_X86_64
248 if ((t & VT_BTYPE) == VT_LDOUBLE)
249 return RC_ST0;
250 if ((t & VT_BTYPE) == VT_QFLOAT)
251 return RC_FRET;
252 #elif defined TCC_TARGET_RISCV64
253 if ((t & VT_BTYPE) == VT_LDOUBLE)
254 return RC_INT;
255 #endif
256 return RC_FLOAT;
259 /* returns 2nd register class corresponding to t and rc */
260 static int RC2_TYPE(int t, int rc)
262 if (!USING_TWO_WORDS(t))
263 return 0;
264 #ifdef RC_IRE2
265 if (rc == RC_IRET)
266 return RC_IRE2;
267 #endif
268 #ifdef RC_FRE2
269 if (rc == RC_FRET)
270 return RC_FRE2;
271 #endif
272 if (rc & RC_FLOAT)
273 return RC_FLOAT;
274 return RC_INT;
277 /* we use our own 'finite' function to avoid potential problems with
278 non standard math libs */
279 /* XXX: endianness dependent */
280 ST_FUNC int ieee_finite(double d)
282 int p[4];
283 memcpy(p, &d, sizeof(double));
284 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
287 /* compiling intel long double natively */
288 #if (defined __i386__ || defined __x86_64__) \
289 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
290 # define TCC_IS_NATIVE_387
291 #endif
293 ST_FUNC void test_lvalue(void)
295 if (!(vtop->r & VT_LVAL))
296 expect("lvalue");
299 ST_FUNC void check_vstack(void)
301 if (vtop != vstack - 1)
302 tcc_error("internal compiler error: vstack leak (%d)",
303 (int)(vtop - vstack + 1));
306 /* vstack debugging aid */
307 #if 0
308 void pv (const char *lbl, int a, int b)
310 int i;
311 for (i = a; i < a + b; ++i) {
312 SValue *p = &vtop[-i];
313 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
314 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
317 #endif
319 /* ------------------------------------------------------------------------- */
320 /* initialize vstack and types. This must be done also for tcc -E */
321 ST_FUNC void tccgen_init(TCCState *s1)
323 vtop = vstack - 1;
324 memset(vtop, 0, sizeof *vtop);
326 /* define some often used types */
327 int_type.t = VT_INT;
329 char_type.t = VT_BYTE;
330 if (s1->char_is_unsigned)
331 char_type.t |= VT_UNSIGNED;
332 char_pointer_type = char_type;
333 mk_pointer(&char_pointer_type);
335 func_old_type.t = VT_FUNC;
336 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
337 func_old_type.ref->f.func_call = FUNC_CDECL;
338 func_old_type.ref->f.func_type = FUNC_OLD;
339 #ifdef precedence_parser
340 init_prec();
341 #endif
342 cstr_new(&initstr);
345 ST_FUNC int tccgen_compile(TCCState *s1)
347 cur_text_section = NULL;
348 funcname = "";
349 func_ind = -1;
350 anon_sym = SYM_FIRST_ANOM;
351 const_wanted = 0;
352 nocode_wanted = 0x80000000;
353 local_scope = 0;
354 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
356 tcc_debug_start(s1);
357 tcc_tcov_start (s1);
358 #ifdef TCC_TARGET_ARM
359 arm_init(s1);
360 #endif
361 #ifdef INC_DEBUG
362 printf("%s: **** new file\n", file->filename);
363 #endif
364 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
365 next();
366 decl(VT_CONST);
367 gen_inline_functions(s1);
368 check_vstack();
369 /* end of translation unit info */
370 tcc_debug_end(s1);
371 tcc_tcov_end(s1);
372 return 0;
375 ST_FUNC void tccgen_finish(TCCState *s1)
377 cstr_free(&initstr);
378 free_inline_functions(s1);
379 sym_pop(&global_stack, NULL, 0);
380 sym_pop(&local_stack, NULL, 0);
381 /* free preprocessor macros */
382 free_defines(NULL);
383 /* free sym_pools */
384 dynarray_reset(&sym_pools, &nb_sym_pools);
385 sym_free_first = NULL;
388 /* ------------------------------------------------------------------------- */
389 ST_FUNC ElfSym *elfsym(Sym *s)
391 if (!s || !s->c)
392 return NULL;
393 return &((ElfSym *)symtab_section->data)[s->c];
396 /* apply storage attributes to Elf symbol */
397 ST_FUNC void update_storage(Sym *sym)
399 ElfSym *esym;
400 int sym_bind, old_sym_bind;
402 esym = elfsym(sym);
403 if (!esym)
404 return;
406 if (sym->a.visibility)
407 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
408 | sym->a.visibility;
410 if (sym->type.t & (VT_STATIC | VT_INLINE))
411 sym_bind = STB_LOCAL;
412 else if (sym->a.weak)
413 sym_bind = STB_WEAK;
414 else
415 sym_bind = STB_GLOBAL;
416 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
417 if (sym_bind != old_sym_bind) {
418 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
421 #ifdef TCC_TARGET_PE
422 if (sym->a.dllimport)
423 esym->st_other |= ST_PE_IMPORT;
424 if (sym->a.dllexport)
425 esym->st_other |= ST_PE_EXPORT;
426 #endif
428 #if 0
429 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
430 get_tok_str(sym->v, NULL),
431 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
432 sym->a.visibility,
433 sym->a.dllexport,
434 sym->a.dllimport
436 #endif
439 /* ------------------------------------------------------------------------- */
440 /* update sym->c so that it points to an external symbol in section
441 'section' with value 'value' */
443 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
444 addr_t value, unsigned long size,
445 int can_add_underscore)
447 int sym_type, sym_bind, info, other, t;
448 ElfSym *esym;
449 const char *name;
450 char buf1[256];
452 if (!sym->c) {
453 name = get_tok_str(sym->v, NULL);
454 t = sym->type.t;
455 if ((t & VT_BTYPE) == VT_FUNC) {
456 sym_type = STT_FUNC;
457 } else if ((t & VT_BTYPE) == VT_VOID) {
458 sym_type = STT_NOTYPE;
459 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
460 sym_type = STT_FUNC;
461 } else {
462 sym_type = STT_OBJECT;
464 if (t & (VT_STATIC | VT_INLINE))
465 sym_bind = STB_LOCAL;
466 else
467 sym_bind = STB_GLOBAL;
468 other = 0;
470 #ifdef TCC_TARGET_PE
471 if (sym_type == STT_FUNC && sym->type.ref) {
472 Sym *ref = sym->type.ref;
473 if (ref->a.nodecorate) {
474 can_add_underscore = 0;
476 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
477 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
478 name = buf1;
479 other |= ST_PE_STDCALL;
480 can_add_underscore = 0;
483 #endif
485 if (sym->asm_label) {
486 name = get_tok_str(sym->asm_label, NULL);
487 can_add_underscore = 0;
490 if (tcc_state->leading_underscore && can_add_underscore) {
491 buf1[0] = '_';
492 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
493 name = buf1;
496 info = ELFW(ST_INFO)(sym_bind, sym_type);
497 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
499 if (debug_modes)
500 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
502 } else {
503 esym = elfsym(sym);
504 esym->st_value = value;
505 esym->st_size = size;
506 esym->st_shndx = sh_num;
508 update_storage(sym);
511 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
513 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
514 return;
515 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
518 /* add a new relocation entry to symbol 'sym' in section 's' */
519 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
520 addr_t addend)
522 int c = 0;
524 if (nocode_wanted && s == cur_text_section)
525 return;
527 if (sym) {
528 if (0 == sym->c)
529 put_extern_sym(sym, NULL, 0, 0);
530 c = sym->c;
533 /* now we can add ELF relocation info */
534 put_elf_reloca(symtab_section, s, offset, type, c, addend);
537 #if PTR_SIZE == 4
538 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
540 greloca(s, sym, offset, type, 0);
542 #endif
544 /* ------------------------------------------------------------------------- */
545 /* symbol allocator */
546 static Sym *__sym_malloc(void)
548 Sym *sym_pool, *sym, *last_sym;
549 int i;
551 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
552 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
554 last_sym = sym_free_first;
555 sym = sym_pool;
556 for(i = 0; i < SYM_POOL_NB; i++) {
557 sym->next = last_sym;
558 last_sym = sym;
559 sym++;
561 sym_free_first = last_sym;
562 return last_sym;
565 static inline Sym *sym_malloc(void)
567 Sym *sym;
568 #ifndef SYM_DEBUG
569 sym = sym_free_first;
570 if (!sym)
571 sym = __sym_malloc();
572 sym_free_first = sym->next;
573 return sym;
574 #else
575 sym = tcc_malloc(sizeof(Sym));
576 return sym;
577 #endif
580 ST_INLN void sym_free(Sym *sym)
582 #ifndef SYM_DEBUG
583 sym->next = sym_free_first;
584 sym_free_first = sym;
585 #else
586 tcc_free(sym);
587 #endif
590 /* push, without hashing */
591 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
593 Sym *s;
595 s = sym_malloc();
596 memset(s, 0, sizeof *s);
597 s->v = v;
598 s->type.t = t;
599 s->c = c;
600 /* add in stack */
601 s->prev = *ps;
602 *ps = s;
603 return s;
606 /* find a symbol and return its associated structure. 's' is the top
607 of the symbol stack */
608 ST_FUNC Sym *sym_find2(Sym *s, int v)
610 while (s) {
611 if (s->v == v)
612 return s;
613 else if (s->v == -1)
614 return NULL;
615 s = s->prev;
617 return NULL;
620 /* structure lookup */
621 ST_INLN Sym *struct_find(int v)
623 v -= TOK_IDENT;
624 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
625 return NULL;
626 return table_ident[v]->sym_struct;
629 /* find an identifier */
630 ST_INLN Sym *sym_find(int v)
632 v -= TOK_IDENT;
633 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
634 return NULL;
635 return table_ident[v]->sym_identifier;
638 static int sym_scope(Sym *s)
640 if (IS_ENUM_VAL (s->type.t))
641 return s->type.ref->sym_scope;
642 else
643 return s->sym_scope;
646 /* push a given symbol on the symbol stack */
647 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
649 Sym *s, **ps;
650 TokenSym *ts;
652 if (local_stack)
653 ps = &local_stack;
654 else
655 ps = &global_stack;
656 s = sym_push2(ps, v, type->t, c);
657 s->type.ref = type->ref;
658 s->r = r;
659 /* don't record fields or anonymous symbols */
660 /* XXX: simplify */
661 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
662 /* record symbol in token array */
663 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
664 if (v & SYM_STRUCT)
665 ps = &ts->sym_struct;
666 else
667 ps = &ts->sym_identifier;
668 s->prev_tok = *ps;
669 *ps = s;
670 s->sym_scope = local_scope;
671 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
672 tcc_error("redeclaration of '%s'",
673 get_tok_str(v & ~SYM_STRUCT, NULL));
675 return s;
678 /* push a global identifier */
679 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
681 Sym *s, **ps;
682 s = sym_push2(&global_stack, v, t, c);
683 s->r = VT_CONST | VT_SYM;
684 /* don't record anonymous symbol */
685 if (v < SYM_FIRST_ANOM) {
686 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
687 /* modify the top most local identifier, so that sym_identifier will
688 point to 's' when popped; happens when called from inline asm */
689 while (*ps != NULL && (*ps)->sym_scope)
690 ps = &(*ps)->prev_tok;
691 s->prev_tok = *ps;
692 *ps = s;
694 return s;
697 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
698 pop them yet from the list, but do remove them from the token array. */
699 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
701 Sym *s, *ss, **ps;
702 TokenSym *ts;
703 int v;
705 s = *ptop;
706 while(s != b) {
707 ss = s->prev;
708 v = s->v;
709 /* remove symbol in token array */
710 /* XXX: simplify */
711 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
712 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
713 if (v & SYM_STRUCT)
714 ps = &ts->sym_struct;
715 else
716 ps = &ts->sym_identifier;
717 *ps = s->prev_tok;
719 if (!keep)
720 sym_free(s);
721 s = ss;
723 if (!keep)
724 *ptop = b;
727 /* ------------------------------------------------------------------------- */
728 static void vcheck_cmp(void)
730 /* cannot let cpu flags if other instruction are generated. Also
731 avoid leaving VT_JMP anywhere except on the top of the stack
732 because it would complicate the code generator.
734 Don't do this when nocode_wanted. vtop might come from
735 !nocode_wanted regions (see 88_codeopt.c) and transforming
736 it to a register without actually generating code is wrong
737 as their value might still be used for real. All values
738 we push under nocode_wanted will eventually be popped
739 again, so that the VT_CMP/VT_JMP value will be in vtop
740 when code is unsuppressed again. */
742 if (vtop->r == VT_CMP && !nocode_wanted)
743 gv(RC_INT);
746 static void vsetc(CType *type, int r, CValue *vc)
748 if (vtop >= vstack + (VSTACK_SIZE - 1))
749 tcc_error("memory full (vstack)");
750 vcheck_cmp();
751 vtop++;
752 vtop->type = *type;
753 vtop->r = r;
754 vtop->r2 = VT_CONST;
755 vtop->c = *vc;
756 vtop->sym = NULL;
759 ST_FUNC void vswap(void)
761 SValue tmp;
763 vcheck_cmp();
764 tmp = vtop[0];
765 vtop[0] = vtop[-1];
766 vtop[-1] = tmp;
769 /* pop stack value */
770 ST_FUNC void vpop(void)
772 int v;
773 v = vtop->r & VT_VALMASK;
774 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
775 /* for x86, we need to pop the FP stack */
776 if (v == TREG_ST0) {
777 o(0xd8dd); /* fstp %st(0) */
778 } else
779 #endif
780 if (v == VT_CMP) {
781 /* need to put correct jump if && or || without test */
782 gsym(vtop->jtrue);
783 gsym(vtop->jfalse);
785 vtop--;
788 /* push constant of type "type" with useless value */
789 static void vpush(CType *type)
791 vset(type, VT_CONST, 0);
794 /* push arbitrary 64bit constant */
795 static void vpush64(int ty, unsigned long long v)
797 CValue cval;
798 CType ctype;
799 ctype.t = ty;
800 ctype.ref = NULL;
801 cval.i = v;
802 vsetc(&ctype, VT_CONST, &cval);
805 /* push integer constant */
806 ST_FUNC void vpushi(int v)
808 vpush64(VT_INT, v);
811 /* push a pointer sized constant */
812 static void vpushs(addr_t v)
814 vpush64(VT_SIZE_T, v);
817 /* push long long constant */
818 static inline void vpushll(long long v)
820 vpush64(VT_LLONG, v);
823 ST_FUNC void vset(CType *type, int r, int v)
825 CValue cval;
826 cval.i = v;
827 vsetc(type, r, &cval);
830 static void vseti(int r, int v)
832 CType type;
833 type.t = VT_INT;
834 type.ref = NULL;
835 vset(&type, r, v);
838 ST_FUNC void vpushv(SValue *v)
840 if (vtop >= vstack + (VSTACK_SIZE - 1))
841 tcc_error("memory full (vstack)");
842 vtop++;
843 *vtop = *v;
846 static void vdup(void)
848 vpushv(vtop);
851 /* rotate n first stack elements to the bottom
852 I1 ... In -> I2 ... In I1 [top is right]
854 ST_FUNC void vrotb(int n)
856 int i;
857 SValue tmp;
859 vcheck_cmp();
860 tmp = vtop[-n + 1];
861 for(i=-n+1;i!=0;i++)
862 vtop[i] = vtop[i+1];
863 vtop[0] = tmp;
866 /* rotate the n elements before entry e towards the top
867 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
869 ST_FUNC void vrote(SValue *e, int n)
871 int i;
872 SValue tmp;
874 vcheck_cmp();
875 tmp = *e;
876 for(i = 0;i < n - 1; i++)
877 e[-i] = e[-i - 1];
878 e[-n + 1] = tmp;
881 /* rotate n first stack elements to the top
882 I1 ... In -> In I1 ... I(n-1) [top is right]
884 ST_FUNC void vrott(int n)
886 vrote(vtop, n);
889 /* ------------------------------------------------------------------------- */
890 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
892 /* called from generators to set the result from relational ops */
893 ST_FUNC void vset_VT_CMP(int op)
895 vtop->r = VT_CMP;
896 vtop->cmp_op = op;
897 vtop->jfalse = 0;
898 vtop->jtrue = 0;
901 /* called once before asking generators to load VT_CMP to a register */
902 static void vset_VT_JMP(void)
904 int op = vtop->cmp_op;
906 if (vtop->jtrue || vtop->jfalse) {
907 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
908 int inv = op & (op < 2); /* small optimization */
909 vseti(VT_JMP+inv, gvtst(inv, 0));
910 } else {
911 /* otherwise convert flags (rsp. 0/1) to register */
912 vtop->c.i = op;
913 if (op < 2) /* doesn't seem to happen */
914 vtop->r = VT_CONST;
918 /* Set CPU Flags, doesn't yet jump */
919 static void gvtst_set(int inv, int t)
921 int *p;
923 if (vtop->r != VT_CMP) {
924 vpushi(0);
925 gen_op(TOK_NE);
926 if (vtop->r != VT_CMP) /* must be VT_CONST then */
927 vset_VT_CMP(vtop->c.i != 0);
930 p = inv ? &vtop->jfalse : &vtop->jtrue;
931 *p = gjmp_append(*p, t);
934 /* Generate value test
936 * Generate a test for any value (jump, comparison and integers) */
937 static int gvtst(int inv, int t)
939 int op, x, u;
941 gvtst_set(inv, t);
942 t = vtop->jtrue, u = vtop->jfalse;
943 if (inv)
944 x = u, u = t, t = x;
945 op = vtop->cmp_op;
947 /* jump to the wanted target */
948 if (op > 1)
949 t = gjmp_cond(op ^ inv, t);
950 else if (op != inv)
951 t = gjmp(t);
952 /* resolve complementary jumps to here */
953 gsym(u);
955 vtop--;
956 return t;
959 /* generate a zero or nozero test */
960 static void gen_test_zero(int op)
962 if (vtop->r == VT_CMP) {
963 int j;
964 if (op == TOK_EQ) {
965 j = vtop->jfalse;
966 vtop->jfalse = vtop->jtrue;
967 vtop->jtrue = j;
968 vtop->cmp_op ^= 1;
970 } else {
971 vpushi(0);
972 gen_op(op);
976 /* ------------------------------------------------------------------------- */
977 /* push a symbol value of TYPE */
978 ST_FUNC void vpushsym(CType *type, Sym *sym)
980 CValue cval;
981 cval.i = 0;
982 vsetc(type, VT_CONST | VT_SYM, &cval);
983 vtop->sym = sym;
986 /* Return a static symbol pointing to a section */
987 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
989 int v;
990 Sym *sym;
992 v = anon_sym++;
993 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
994 sym->type.t |= VT_STATIC;
995 put_extern_sym(sym, sec, offset, size);
996 return sym;
999 /* push a reference to a section offset by adding a dummy symbol */
1000 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1002 vpushsym(type, get_sym_ref(type, sec, offset, size));
1005 /* define a new external reference to a symbol 'v' of type 'u' */
1006 ST_FUNC Sym *external_global_sym(int v, CType *type)
1008 Sym *s;
1010 s = sym_find(v);
1011 if (!s) {
1012 /* push forward reference */
1013 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1014 s->type.ref = type->ref;
1015 } else if (IS_ASM_SYM(s)) {
1016 s->type.t = type->t | (s->type.t & VT_EXTERN);
1017 s->type.ref = type->ref;
1018 update_storage(s);
1020 return s;
1023 /* create an external reference with no specific type similar to asm labels.
1024 This avoids type conflicts if the symbol is used from C too */
1025 ST_FUNC Sym *external_helper_sym(int v)
1027 CType ct = { VT_ASM_FUNC, NULL };
1028 return external_global_sym(v, &ct);
1031 /* push a reference to an helper function (such as memmove) */
1032 ST_FUNC void vpush_helper_func(int v)
1034 vpushsym(&func_old_type, external_helper_sym(v));
1037 /* Merge symbol attributes. */
1038 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1040 if (sa1->aligned && !sa->aligned)
1041 sa->aligned = sa1->aligned;
1042 sa->packed |= sa1->packed;
1043 sa->weak |= sa1->weak;
1044 if (sa1->visibility != STV_DEFAULT) {
1045 int vis = sa->visibility;
1046 if (vis == STV_DEFAULT
1047 || vis > sa1->visibility)
1048 vis = sa1->visibility;
1049 sa->visibility = vis;
1051 sa->dllexport |= sa1->dllexport;
1052 sa->nodecorate |= sa1->nodecorate;
1053 sa->dllimport |= sa1->dllimport;
1056 /* Merge function attributes. */
1057 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1059 if (fa1->func_call && !fa->func_call)
1060 fa->func_call = fa1->func_call;
1061 if (fa1->func_type && !fa->func_type)
1062 fa->func_type = fa1->func_type;
1063 if (fa1->func_args && !fa->func_args)
1064 fa->func_args = fa1->func_args;
1065 if (fa1->func_noreturn)
1066 fa->func_noreturn = 1;
1067 if (fa1->func_ctor)
1068 fa->func_ctor = 1;
1069 if (fa1->func_dtor)
1070 fa->func_dtor = 1;
1073 /* Merge attributes. */
1074 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1076 merge_symattr(&ad->a, &ad1->a);
1077 merge_funcattr(&ad->f, &ad1->f);
1079 if (ad1->section)
1080 ad->section = ad1->section;
1081 if (ad1->alias_target)
1082 ad->alias_target = ad1->alias_target;
1083 if (ad1->asm_label)
1084 ad->asm_label = ad1->asm_label;
1085 if (ad1->attr_mode)
1086 ad->attr_mode = ad1->attr_mode;
1089 /* Merge some type attributes. */
1090 static void patch_type(Sym *sym, CType *type)
1092 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1093 if (!(sym->type.t & VT_EXTERN))
1094 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1095 sym->type.t &= ~VT_EXTERN;
1098 if (IS_ASM_SYM(sym)) {
1099 /* stay static if both are static */
1100 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1101 sym->type.ref = type->ref;
1104 if (!is_compatible_types(&sym->type, type)) {
1105 tcc_error("incompatible types for redefinition of '%s'",
1106 get_tok_str(sym->v, NULL));
1108 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1109 int static_proto = sym->type.t & VT_STATIC;
1110 /* warn if static follows non-static function declaration */
1111 if ((type->t & VT_STATIC) && !static_proto
1112 /* XXX this test for inline shouldn't be here. Until we
1113 implement gnu-inline mode again it silences a warning for
1114 mingw caused by our workarounds. */
1115 && !((type->t | sym->type.t) & VT_INLINE))
1116 tcc_warning("static storage ignored for redefinition of '%s'",
1117 get_tok_str(sym->v, NULL));
1119 /* set 'inline' if both agree or if one has static */
1120 if ((type->t | sym->type.t) & VT_INLINE) {
1121 if (!((type->t ^ sym->type.t) & VT_INLINE)
1122 || ((type->t | sym->type.t) & VT_STATIC))
1123 static_proto |= VT_INLINE;
1126 if (0 == (type->t & VT_EXTERN)) {
1127 struct FuncAttr f = sym->type.ref->f;
1128 /* put complete type, use static from prototype */
1129 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1130 sym->type.ref = type->ref;
1131 merge_funcattr(&sym->type.ref->f, &f);
1132 } else {
1133 sym->type.t &= ~VT_INLINE | static_proto;
1136 if (sym->type.ref->f.func_type == FUNC_OLD
1137 && type->ref->f.func_type != FUNC_OLD) {
1138 sym->type.ref = type->ref;
1141 } else {
1142 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1143 /* set array size if it was omitted in extern declaration */
1144 sym->type.ref->c = type->ref->c;
1146 if ((type->t ^ sym->type.t) & VT_STATIC)
1147 tcc_warning("storage mismatch for redefinition of '%s'",
1148 get_tok_str(sym->v, NULL));
1152 /* Merge some storage attributes. */
1153 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1155 if (type)
1156 patch_type(sym, type);
1158 #ifdef TCC_TARGET_PE
1159 if (sym->a.dllimport != ad->a.dllimport)
1160 tcc_error("incompatible dll linkage for redefinition of '%s'",
1161 get_tok_str(sym->v, NULL));
1162 #endif
1163 merge_symattr(&sym->a, &ad->a);
1164 if (ad->asm_label)
1165 sym->asm_label = ad->asm_label;
1166 update_storage(sym);
1169 /* copy sym to other stack */
1170 static Sym *sym_copy(Sym *s0, Sym **ps)
1172 Sym *s;
1173 s = sym_malloc(), *s = *s0;
1174 s->prev = *ps, *ps = s;
1175 if (s->v < SYM_FIRST_ANOM) {
1176 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1177 s->prev_tok = *ps, *ps = s;
1179 return s;
1182 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1183 static void sym_copy_ref(Sym *s, Sym **ps)
1185 int bt = s->type.t & VT_BTYPE;
1186 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1187 Sym **sp = &s->type.ref;
1188 for (s = *sp, *sp = NULL; s; s = s->next) {
1189 Sym *s2 = sym_copy(s, ps);
1190 sp = &(*sp = s2)->next;
1191 sym_copy_ref(s2, ps);
1196 /* define a new external reference to a symbol 'v' */
1197 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1199 Sym *s;
1201 /* look for global symbol */
1202 s = sym_find(v);
1203 while (s && s->sym_scope)
1204 s = s->prev_tok;
1206 if (!s) {
1207 /* push forward reference */
1208 s = global_identifier_push(v, type->t, 0);
1209 s->r |= r;
1210 s->a = ad->a;
1211 s->asm_label = ad->asm_label;
1212 s->type.ref = type->ref;
1213 /* copy type to the global stack */
1214 if (local_stack)
1215 sym_copy_ref(s, &global_stack);
1216 } else {
1217 patch_storage(s, ad, type);
1219 /* push variables on local_stack if any */
1220 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1221 s = sym_copy(s, &local_stack);
1222 return s;
1225 /* save registers up to (vtop - n) stack entry */
1226 ST_FUNC void save_regs(int n)
1228 SValue *p, *p1;
1229 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1230 save_reg(p->r);
1233 /* save r to the memory stack, and mark it as being free */
1234 ST_FUNC void save_reg(int r)
1236 save_reg_upstack(r, 0);
1239 /* save r to the memory stack, and mark it as being free,
1240 if seen up to (vtop - n) stack entry */
1241 ST_FUNC void save_reg_upstack(int r, int n)
1243 int l, size, align, bt;
1244 SValue *p, *p1, sv;
1246 if ((r &= VT_VALMASK) >= VT_CONST)
1247 return;
1248 if (nocode_wanted)
1249 return;
1250 l = 0;
1251 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1252 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1253 /* must save value on stack if not already done */
1254 if (!l) {
1255 bt = p->type.t & VT_BTYPE;
1256 if (bt == VT_VOID)
1257 continue;
1258 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1259 bt = VT_PTR;
1260 sv.type.t = bt;
1261 size = type_size(&sv.type, &align);
1262 l = get_temp_local_var(size,align);
1263 sv.r = VT_LOCAL | VT_LVAL;
1264 sv.c.i = l;
1265 store(p->r & VT_VALMASK, &sv);
1266 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1267 /* x86 specific: need to pop fp register ST0 if saved */
1268 if (r == TREG_ST0) {
1269 o(0xd8dd); /* fstp %st(0) */
1271 #endif
1272 /* special long long case */
1273 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1274 sv.c.i += PTR_SIZE;
1275 store(p->r2, &sv);
1278 /* mark that stack entry as being saved on the stack */
1279 if (p->r & VT_LVAL) {
1280 /* also clear the bounded flag because the
1281 relocation address of the function was stored in
1282 p->c.i */
1283 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1284 } else {
1285 p->r = VT_LVAL | VT_LOCAL;
1287 p->sym = NULL;
1288 p->r2 = VT_CONST;
1289 p->c.i = l;
1294 #ifdef TCC_TARGET_ARM
1295 /* find a register of class 'rc2' with at most one reference on stack.
1296 * If none, call get_reg(rc) */
1297 ST_FUNC int get_reg_ex(int rc, int rc2)
1299 int r;
1300 SValue *p;
1302 for(r=0;r<NB_REGS;r++) {
1303 if (reg_classes[r] & rc2) {
1304 int n;
1305 n=0;
1306 for(p = vstack; p <= vtop; p++) {
1307 if ((p->r & VT_VALMASK) == r ||
1308 p->r2 == r)
1309 n++;
1311 if (n <= 1)
1312 return r;
1315 return get_reg(rc);
1317 #endif
1319 /* find a free register of class 'rc'. If none, save one register */
1320 ST_FUNC int get_reg(int rc)
1322 int r;
1323 SValue *p;
1325 /* find a free register */
1326 for(r=0;r<NB_REGS;r++) {
1327 if (reg_classes[r] & rc) {
1328 if (nocode_wanted)
1329 return r;
1330 for(p=vstack;p<=vtop;p++) {
1331 if ((p->r & VT_VALMASK) == r ||
1332 p->r2 == r)
1333 goto notfound;
1335 return r;
1337 notfound: ;
1340 /* no register left : free the first one on the stack (VERY
1341 IMPORTANT to start from the bottom to ensure that we don't
1342 spill registers used in gen_opi()) */
1343 for(p=vstack;p<=vtop;p++) {
1344 /* look at second register (if long long) */
1345 r = p->r2;
1346 if (r < VT_CONST && (reg_classes[r] & rc))
1347 goto save_found;
1348 r = p->r & VT_VALMASK;
1349 if (r < VT_CONST && (reg_classes[r] & rc)) {
1350 save_found:
1351 save_reg(r);
1352 return r;
1355 /* Should never comes here */
1356 return -1;
1359 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1360 static int get_temp_local_var(int size,int align){
1361 int i;
1362 struct temp_local_variable *temp_var;
1363 int found_var;
1364 SValue *p;
1365 int r;
1366 char free;
1367 char found;
1368 found=0;
1369 for(i=0;i<nb_temp_local_vars;i++){
1370 temp_var=&arr_temp_local_vars[i];
1371 if(temp_var->size<size||align!=temp_var->align){
1372 continue;
1374 /*check if temp_var is free*/
1375 free=1;
1376 for(p=vstack;p<=vtop;p++) {
1377 r=p->r&VT_VALMASK;
1378 if(r==VT_LOCAL||r==VT_LLOCAL){
1379 if(p->c.i==temp_var->location){
1380 free=0;
1381 break;
1385 if(free){
1386 found_var=temp_var->location;
1387 found=1;
1388 break;
1391 if(!found){
1392 loc = (loc - size) & -align;
1393 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1394 temp_var=&arr_temp_local_vars[i];
1395 temp_var->location=loc;
1396 temp_var->size=size;
1397 temp_var->align=align;
1398 nb_temp_local_vars++;
1400 found_var=loc;
1402 return found_var;
1405 static void clear_temp_local_var_list(){
1406 nb_temp_local_vars=0;
1409 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1410 if needed */
1411 static void move_reg(int r, int s, int t)
1413 SValue sv;
1415 if (r != s) {
1416 save_reg(r);
1417 sv.type.t = t;
1418 sv.type.ref = NULL;
1419 sv.r = s;
1420 sv.c.i = 0;
1421 load(r, &sv);
1425 /* get address of vtop (vtop MUST BE an lvalue) */
1426 ST_FUNC void gaddrof(void)
1428 vtop->r &= ~VT_LVAL;
1429 /* tricky: if saved lvalue, then we can go back to lvalue */
1430 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1431 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1434 #ifdef CONFIG_TCC_BCHECK
1435 /* generate a bounded pointer addition */
1436 static void gen_bounded_ptr_add(void)
1438 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1439 if (save) {
1440 vpushv(&vtop[-1]);
1441 vrott(3);
1443 vpush_helper_func(TOK___bound_ptr_add);
1444 vrott(3);
1445 gfunc_call(2);
1446 vtop -= save;
1447 vpushi(0);
1448 /* returned pointer is in REG_IRET */
1449 vtop->r = REG_IRET | VT_BOUNDED;
1450 if (nocode_wanted)
1451 return;
1452 /* relocation offset of the bounding function call point */
1453 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1456 /* patch pointer addition in vtop so that pointer dereferencing is
1457 also tested */
1458 static void gen_bounded_ptr_deref(void)
1460 addr_t func;
1461 int size, align;
1462 ElfW_Rel *rel;
1463 Sym *sym;
1465 if (nocode_wanted)
1466 return;
1468 size = type_size(&vtop->type, &align);
1469 switch(size) {
1470 case 1: func = TOK___bound_ptr_indir1; break;
1471 case 2: func = TOK___bound_ptr_indir2; break;
1472 case 4: func = TOK___bound_ptr_indir4; break;
1473 case 8: func = TOK___bound_ptr_indir8; break;
1474 case 12: func = TOK___bound_ptr_indir12; break;
1475 case 16: func = TOK___bound_ptr_indir16; break;
1476 default:
1477 /* may happen with struct member access */
1478 return;
1480 sym = external_helper_sym(func);
1481 if (!sym->c)
1482 put_extern_sym(sym, NULL, 0, 0);
1483 /* patch relocation */
1484 /* XXX: find a better solution ? */
1485 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1486 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1489 /* generate lvalue bound code */
1490 static void gbound(void)
1492 CType type1;
1494 vtop->r &= ~VT_MUSTBOUND;
1495 /* if lvalue, then use checking code before dereferencing */
1496 if (vtop->r & VT_LVAL) {
1497 /* if not VT_BOUNDED value, then make one */
1498 if (!(vtop->r & VT_BOUNDED)) {
1499 /* must save type because we must set it to int to get pointer */
1500 type1 = vtop->type;
1501 vtop->type.t = VT_PTR;
1502 gaddrof();
1503 vpushi(0);
1504 gen_bounded_ptr_add();
1505 vtop->r |= VT_LVAL;
1506 vtop->type = type1;
1508 /* then check for dereferencing */
1509 gen_bounded_ptr_deref();
1513 /* we need to call __bound_ptr_add before we start to load function
1514 args into registers */
1515 ST_FUNC void gbound_args(int nb_args)
1517 int i, v;
1518 SValue *sv;
1520 for (i = 1; i <= nb_args; ++i)
1521 if (vtop[1 - i].r & VT_MUSTBOUND) {
1522 vrotb(i);
1523 gbound();
1524 vrott(i);
1527 sv = vtop - nb_args;
1528 if (sv->r & VT_SYM) {
1529 v = sv->sym->v;
1530 if (v == TOK_setjmp
1531 || v == TOK__setjmp
1532 #ifndef TCC_TARGET_PE
1533 || v == TOK_sigsetjmp
1534 || v == TOK___sigsetjmp
1535 #endif
1537 vpush_helper_func(TOK___bound_setjmp);
1538 vpushv(sv + 1);
1539 gfunc_call(1);
1540 func_bound_add_epilog = 1;
1542 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1543 if (v == TOK_alloca)
1544 func_bound_add_epilog = 1;
1545 #endif
1546 #if TARGETOS_NetBSD
1547 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1548 sv->sym->asm_label = TOK___bound_longjmp;
1549 #endif
1553 /* Add bounds for local symbols from S to E (via ->prev) */
1554 static void add_local_bounds(Sym *s, Sym *e)
1556 for (; s != e; s = s->prev) {
1557 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1558 continue;
1559 /* Add arrays/structs/unions because we always take address */
1560 if ((s->type.t & VT_ARRAY)
1561 || (s->type.t & VT_BTYPE) == VT_STRUCT
1562 || s->a.addrtaken) {
1563 /* add local bound info */
1564 int align, size = type_size(&s->type, &align);
1565 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1566 2 * sizeof(addr_t));
1567 bounds_ptr[0] = s->c;
1568 bounds_ptr[1] = size;
1572 #endif
1574 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1575 static void pop_local_syms(Sym *b, int keep)
1577 #ifdef CONFIG_TCC_BCHECK
1578 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1579 add_local_bounds(local_stack, b);
1580 #endif
1581 if (debug_modes)
1582 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1583 sym_pop(&local_stack, b, keep);
1586 static void incr_bf_adr(int o)
1588 vtop->type = char_pointer_type;
1589 gaddrof();
1590 vpushs(o);
1591 gen_op('+');
1592 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1593 vtop->r |= VT_LVAL;
1596 /* single-byte load mode for packed or otherwise unaligned bitfields */
1597 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1599 int n, o, bits;
1600 save_reg_upstack(vtop->r, 1);
1601 vpush64(type->t & VT_BTYPE, 0); // B X
1602 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1603 do {
1604 vswap(); // X B
1605 incr_bf_adr(o);
1606 vdup(); // X B B
1607 n = 8 - bit_pos;
1608 if (n > bit_size)
1609 n = bit_size;
1610 if (bit_pos)
1611 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1612 if (n < 8)
1613 vpushi((1 << n) - 1), gen_op('&');
1614 gen_cast(type);
1615 if (bits)
1616 vpushi(bits), gen_op(TOK_SHL);
1617 vrotb(3); // B Y X
1618 gen_op('|'); // B X
1619 bits += n, bit_size -= n, o = 1;
1620 } while (bit_size);
1621 vswap(), vpop();
1622 if (!(type->t & VT_UNSIGNED)) {
1623 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1624 vpushi(n), gen_op(TOK_SHL);
1625 vpushi(n), gen_op(TOK_SAR);
1629 /* single-byte store mode for packed or otherwise unaligned bitfields */
1630 static void store_packed_bf(int bit_pos, int bit_size)
1632 int bits, n, o, m, c;
1633 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1634 vswap(); // X B
1635 save_reg_upstack(vtop->r, 1);
1636 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1637 do {
1638 incr_bf_adr(o); // X B
1639 vswap(); //B X
1640 c ? vdup() : gv_dup(); // B V X
1641 vrott(3); // X B V
1642 if (bits)
1643 vpushi(bits), gen_op(TOK_SHR);
1644 if (bit_pos)
1645 vpushi(bit_pos), gen_op(TOK_SHL);
1646 n = 8 - bit_pos;
1647 if (n > bit_size)
1648 n = bit_size;
1649 if (n < 8) {
1650 m = ((1 << n) - 1) << bit_pos;
1651 vpushi(m), gen_op('&'); // X B V1
1652 vpushv(vtop-1); // X B V1 B
1653 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1654 gen_op('&'); // X B V1 B1
1655 gen_op('|'); // X B V2
1657 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1658 vstore(), vpop(); // X B
1659 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1660 } while (bit_size);
1661 vpop(), vpop();
1664 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1666 int t;
1667 if (0 == sv->type.ref)
1668 return 0;
1669 t = sv->type.ref->auxtype;
1670 if (t != -1 && t != VT_STRUCT) {
1671 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1672 sv->r |= VT_LVAL;
1674 return t;
1677 /* store vtop a register belonging to class 'rc'. lvalues are
1678 converted to values. Cannot be used if cannot be converted to
1679 register value (such as structures). */
1680 ST_FUNC int gv(int rc)
1682 int r, r2, r_ok, r2_ok, rc2, bt;
1683 int bit_pos, bit_size, size, align;
1685 /* NOTE: get_reg can modify vstack[] */
1686 if (vtop->type.t & VT_BITFIELD) {
1687 CType type;
1689 bit_pos = BIT_POS(vtop->type.t);
1690 bit_size = BIT_SIZE(vtop->type.t);
1691 /* remove bit field info to avoid loops */
1692 vtop->type.t &= ~VT_STRUCT_MASK;
1694 type.ref = NULL;
1695 type.t = vtop->type.t & VT_UNSIGNED;
1696 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1697 type.t |= VT_UNSIGNED;
1699 r = adjust_bf(vtop, bit_pos, bit_size);
1701 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1702 type.t |= VT_LLONG;
1703 else
1704 type.t |= VT_INT;
1706 if (r == VT_STRUCT) {
1707 load_packed_bf(&type, bit_pos, bit_size);
1708 } else {
1709 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1710 /* cast to int to propagate signedness in following ops */
1711 gen_cast(&type);
1712 /* generate shifts */
1713 vpushi(bits - (bit_pos + bit_size));
1714 gen_op(TOK_SHL);
1715 vpushi(bits - bit_size);
1716 /* NOTE: transformed to SHR if unsigned */
1717 gen_op(TOK_SAR);
1719 r = gv(rc);
1720 } else {
1721 if (is_float(vtop->type.t) &&
1722 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1723 /* CPUs usually cannot use float constants, so we store them
1724 generically in data segment */
1725 init_params p = { rodata_section };
1726 unsigned long offset;
1727 size = type_size(&vtop->type, &align);
1728 if (NODATA_WANTED)
1729 size = 0, align = 1;
1730 offset = section_add(p.sec, size, align);
1731 vpush_ref(&vtop->type, p.sec, offset, size);
1732 vswap();
1733 init_putv(&p, &vtop->type, offset);
1734 vtop->r |= VT_LVAL;
1736 #ifdef CONFIG_TCC_BCHECK
1737 if (vtop->r & VT_MUSTBOUND)
1738 gbound();
1739 #endif
1741 bt = vtop->type.t & VT_BTYPE;
1743 #ifdef TCC_TARGET_RISCV64
1744 /* XXX mega hack */
1745 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1746 rc = RC_INT;
1747 #endif
1748 rc2 = RC2_TYPE(bt, rc);
1750 /* need to reload if:
1751 - constant
1752 - lvalue (need to dereference pointer)
1753 - already a register, but not in the right class */
1754 r = vtop->r & VT_VALMASK;
1755 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1756 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1758 if (!r_ok || !r2_ok) {
1759 if (!r_ok)
1760 r = get_reg(rc);
1761 if (rc2) {
1762 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1763 int original_type = vtop->type.t;
1765 /* two register type load :
1766 expand to two words temporarily */
1767 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1768 /* load constant */
1769 unsigned long long ll = vtop->c.i;
1770 vtop->c.i = ll; /* first word */
1771 load(r, vtop);
1772 vtop->r = r; /* save register value */
1773 vpushi(ll >> 32); /* second word */
1774 } else if (vtop->r & VT_LVAL) {
1775 /* We do not want to modifier the long long pointer here.
1776 So we save any other instances down the stack */
1777 save_reg_upstack(vtop->r, 1);
1778 /* load from memory */
1779 vtop->type.t = load_type;
1780 load(r, vtop);
1781 vdup();
1782 vtop[-1].r = r; /* save register value */
1783 /* increment pointer to get second word */
1784 vtop->type.t = VT_PTRDIFF_T;
1785 gaddrof();
1786 vpushs(PTR_SIZE);
1787 gen_op('+');
1788 vtop->r |= VT_LVAL;
1789 vtop->type.t = load_type;
1790 } else {
1791 /* move registers */
1792 if (!r_ok)
1793 load(r, vtop);
1794 if (r2_ok && vtop->r2 < VT_CONST)
1795 goto done;
1796 vdup();
1797 vtop[-1].r = r; /* save register value */
1798 vtop->r = vtop[-1].r2;
1800 /* Allocate second register. Here we rely on the fact that
1801 get_reg() tries first to free r2 of an SValue. */
1802 r2 = get_reg(rc2);
1803 load(r2, vtop);
1804 vpop();
1805 /* write second register */
1806 vtop->r2 = r2;
1807 done:
1808 vtop->type.t = original_type;
1809 } else {
1810 if (vtop->r == VT_CMP)
1811 vset_VT_JMP();
1812 /* one register type load */
1813 load(r, vtop);
1816 vtop->r = r;
1817 #ifdef TCC_TARGET_C67
1818 /* uses register pairs for doubles */
1819 if (bt == VT_DOUBLE)
1820 vtop->r2 = r+1;
1821 #endif
1823 return r;
1826 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1827 ST_FUNC void gv2(int rc1, int rc2)
1829 /* generate more generic register first. But VT_JMP or VT_CMP
1830 values must be generated first in all cases to avoid possible
1831 reload errors */
1832 if (vtop->r != VT_CMP && rc1 <= rc2) {
1833 vswap();
1834 gv(rc1);
1835 vswap();
1836 gv(rc2);
1837 /* test if reload is needed for first register */
1838 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1839 vswap();
1840 gv(rc1);
1841 vswap();
1843 } else {
1844 gv(rc2);
1845 vswap();
1846 gv(rc1);
1847 vswap();
1848 /* test if reload is needed for first register */
1849 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1850 gv(rc2);
1855 #if PTR_SIZE == 4
1856 /* expand 64bit on stack in two ints */
1857 ST_FUNC void lexpand(void)
1859 int u, v;
1860 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1861 v = vtop->r & (VT_VALMASK | VT_LVAL);
1862 if (v == VT_CONST) {
1863 vdup();
1864 vtop[0].c.i >>= 32;
1865 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1866 vdup();
1867 vtop[0].c.i += 4;
1868 } else {
1869 gv(RC_INT);
1870 vdup();
1871 vtop[0].r = vtop[-1].r2;
1872 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1874 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1876 #endif
1878 #if PTR_SIZE == 4
1879 /* build a long long from two ints */
1880 static void lbuild(int t)
1882 gv2(RC_INT, RC_INT);
1883 vtop[-1].r2 = vtop[0].r;
1884 vtop[-1].type.t = t;
1885 vpop();
1887 #endif
1889 /* convert stack entry to register and duplicate its value in another
1890 register */
1891 static void gv_dup(void)
1893 int t, rc, r;
1895 t = vtop->type.t;
1896 #if PTR_SIZE == 4
1897 if ((t & VT_BTYPE) == VT_LLONG) {
1898 if (t & VT_BITFIELD) {
1899 gv(RC_INT);
1900 t = vtop->type.t;
1902 lexpand();
1903 gv_dup();
1904 vswap();
1905 vrotb(3);
1906 gv_dup();
1907 vrotb(4);
1908 /* stack: H L L1 H1 */
1909 lbuild(t);
1910 vrotb(3);
1911 vrotb(3);
1912 vswap();
1913 lbuild(t);
1914 vswap();
1915 return;
1917 #endif
1918 /* duplicate value */
1919 rc = RC_TYPE(t);
1920 gv(rc);
1921 r = get_reg(rc);
1922 vdup();
1923 load(r, vtop);
1924 vtop->r = r;
1927 #if PTR_SIZE == 4
1928 /* generate CPU independent (unsigned) long long operations */
1929 static void gen_opl(int op)
1931 int t, a, b, op1, c, i;
1932 int func;
1933 unsigned short reg_iret = REG_IRET;
1934 unsigned short reg_lret = REG_IRE2;
1935 SValue tmp;
1937 switch(op) {
1938 case '/':
1939 case TOK_PDIV:
1940 func = TOK___divdi3;
1941 goto gen_func;
1942 case TOK_UDIV:
1943 func = TOK___udivdi3;
1944 goto gen_func;
1945 case '%':
1946 func = TOK___moddi3;
1947 goto gen_mod_func;
1948 case TOK_UMOD:
1949 func = TOK___umoddi3;
1950 gen_mod_func:
1951 #ifdef TCC_ARM_EABI
1952 reg_iret = TREG_R2;
1953 reg_lret = TREG_R3;
1954 #endif
1955 gen_func:
1956 /* call generic long long function */
1957 vpush_helper_func(func);
1958 vrott(3);
1959 gfunc_call(2);
1960 vpushi(0);
1961 vtop->r = reg_iret;
1962 vtop->r2 = reg_lret;
1963 break;
1964 case '^':
1965 case '&':
1966 case '|':
1967 case '*':
1968 case '+':
1969 case '-':
1970 //pv("gen_opl A",0,2);
1971 t = vtop->type.t;
1972 vswap();
1973 lexpand();
1974 vrotb(3);
1975 lexpand();
1976 /* stack: L1 H1 L2 H2 */
1977 tmp = vtop[0];
1978 vtop[0] = vtop[-3];
1979 vtop[-3] = tmp;
1980 tmp = vtop[-2];
1981 vtop[-2] = vtop[-3];
1982 vtop[-3] = tmp;
1983 vswap();
1984 /* stack: H1 H2 L1 L2 */
1985 //pv("gen_opl B",0,4);
1986 if (op == '*') {
1987 vpushv(vtop - 1);
1988 vpushv(vtop - 1);
1989 gen_op(TOK_UMULL);
1990 lexpand();
1991 /* stack: H1 H2 L1 L2 ML MH */
1992 for(i=0;i<4;i++)
1993 vrotb(6);
1994 /* stack: ML MH H1 H2 L1 L2 */
1995 tmp = vtop[0];
1996 vtop[0] = vtop[-2];
1997 vtop[-2] = tmp;
1998 /* stack: ML MH H1 L2 H2 L1 */
1999 gen_op('*');
2000 vrotb(3);
2001 vrotb(3);
2002 gen_op('*');
2003 /* stack: ML MH M1 M2 */
2004 gen_op('+');
2005 gen_op('+');
2006 } else if (op == '+' || op == '-') {
2007 /* XXX: add non carry method too (for MIPS or alpha) */
2008 if (op == '+')
2009 op1 = TOK_ADDC1;
2010 else
2011 op1 = TOK_SUBC1;
2012 gen_op(op1);
2013 /* stack: H1 H2 (L1 op L2) */
2014 vrotb(3);
2015 vrotb(3);
2016 gen_op(op1 + 1); /* TOK_xxxC2 */
2017 } else {
2018 gen_op(op);
2019 /* stack: H1 H2 (L1 op L2) */
2020 vrotb(3);
2021 vrotb(3);
2022 /* stack: (L1 op L2) H1 H2 */
2023 gen_op(op);
2024 /* stack: (L1 op L2) (H1 op H2) */
2026 /* stack: L H */
2027 lbuild(t);
2028 break;
2029 case TOK_SAR:
2030 case TOK_SHR:
2031 case TOK_SHL:
2032 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2033 t = vtop[-1].type.t;
2034 vswap();
2035 lexpand();
2036 vrotb(3);
2037 /* stack: L H shift */
2038 c = (int)vtop->c.i;
2039 /* constant: simpler */
2040 /* NOTE: all comments are for SHL. the other cases are
2041 done by swapping words */
2042 vpop();
2043 if (op != TOK_SHL)
2044 vswap();
2045 if (c >= 32) {
2046 /* stack: L H */
2047 vpop();
2048 if (c > 32) {
2049 vpushi(c - 32);
2050 gen_op(op);
2052 if (op != TOK_SAR) {
2053 vpushi(0);
2054 } else {
2055 gv_dup();
2056 vpushi(31);
2057 gen_op(TOK_SAR);
2059 vswap();
2060 } else {
2061 vswap();
2062 gv_dup();
2063 /* stack: H L L */
2064 vpushi(c);
2065 gen_op(op);
2066 vswap();
2067 vpushi(32 - c);
2068 if (op == TOK_SHL)
2069 gen_op(TOK_SHR);
2070 else
2071 gen_op(TOK_SHL);
2072 vrotb(3);
2073 /* stack: L L H */
2074 vpushi(c);
2075 if (op == TOK_SHL)
2076 gen_op(TOK_SHL);
2077 else
2078 gen_op(TOK_SHR);
2079 gen_op('|');
2081 if (op != TOK_SHL)
2082 vswap();
2083 lbuild(t);
2084 } else {
2085 /* XXX: should provide a faster fallback on x86 ? */
2086 switch(op) {
2087 case TOK_SAR:
2088 func = TOK___ashrdi3;
2089 goto gen_func;
2090 case TOK_SHR:
2091 func = TOK___lshrdi3;
2092 goto gen_func;
2093 case TOK_SHL:
2094 func = TOK___ashldi3;
2095 goto gen_func;
2098 break;
2099 default:
2100 /* compare operations */
2101 t = vtop->type.t;
2102 vswap();
2103 lexpand();
2104 vrotb(3);
2105 lexpand();
2106 /* stack: L1 H1 L2 H2 */
2107 tmp = vtop[-1];
2108 vtop[-1] = vtop[-2];
2109 vtop[-2] = tmp;
2110 /* stack: L1 L2 H1 H2 */
2111 save_regs(4);
2112 /* compare high */
2113 op1 = op;
2114 /* when values are equal, we need to compare low words. since
2115 the jump is inverted, we invert the test too. */
2116 if (op1 == TOK_LT)
2117 op1 = TOK_LE;
2118 else if (op1 == TOK_GT)
2119 op1 = TOK_GE;
2120 else if (op1 == TOK_ULT)
2121 op1 = TOK_ULE;
2122 else if (op1 == TOK_UGT)
2123 op1 = TOK_UGE;
2124 a = 0;
2125 b = 0;
2126 gen_op(op1);
2127 if (op == TOK_NE) {
2128 b = gvtst(0, 0);
2129 } else {
2130 a = gvtst(1, 0);
2131 if (op != TOK_EQ) {
2132 /* generate non equal test */
2133 vpushi(0);
2134 vset_VT_CMP(TOK_NE);
2135 b = gvtst(0, 0);
2138 /* compare low. Always unsigned */
2139 op1 = op;
2140 if (op1 == TOK_LT)
2141 op1 = TOK_ULT;
2142 else if (op1 == TOK_LE)
2143 op1 = TOK_ULE;
2144 else if (op1 == TOK_GT)
2145 op1 = TOK_UGT;
2146 else if (op1 == TOK_GE)
2147 op1 = TOK_UGE;
2148 gen_op(op1);
2149 #if 0//def TCC_TARGET_I386
2150 if (op == TOK_NE) { gsym(b); break; }
2151 if (op == TOK_EQ) { gsym(a); break; }
2152 #endif
2153 gvtst_set(1, a);
2154 gvtst_set(0, b);
2155 break;
2158 #endif
2160 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2162 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2163 return (a ^ b) >> 63 ? -x : x;
2166 static int gen_opic_lt(uint64_t a, uint64_t b)
2168 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2171 /* handle integer constant optimizations and various machine
2172 independent opt */
2173 static void gen_opic(int op)
2175 SValue *v1 = vtop - 1;
2176 SValue *v2 = vtop;
2177 int t1 = v1->type.t & VT_BTYPE;
2178 int t2 = v2->type.t & VT_BTYPE;
2179 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2180 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2181 uint64_t l1 = c1 ? v1->c.i : 0;
2182 uint64_t l2 = c2 ? v2->c.i : 0;
2183 int shm = (t1 == VT_LLONG) ? 63 : 31;
2185 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2186 l1 = ((uint32_t)l1 |
2187 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2188 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2189 l2 = ((uint32_t)l2 |
2190 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2192 if (c1 && c2) {
2193 switch(op) {
2194 case '+': l1 += l2; break;
2195 case '-': l1 -= l2; break;
2196 case '&': l1 &= l2; break;
2197 case '^': l1 ^= l2; break;
2198 case '|': l1 |= l2; break;
2199 case '*': l1 *= l2; break;
2201 case TOK_PDIV:
2202 case '/':
2203 case '%':
2204 case TOK_UDIV:
2205 case TOK_UMOD:
2206 /* if division by zero, generate explicit division */
2207 if (l2 == 0) {
2208 if (const_wanted && !(nocode_wanted & unevalmask))
2209 tcc_error("division by zero in constant");
2210 goto general_case;
2212 switch(op) {
2213 default: l1 = gen_opic_sdiv(l1, l2); break;
2214 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2215 case TOK_UDIV: l1 = l1 / l2; break;
2216 case TOK_UMOD: l1 = l1 % l2; break;
2218 break;
2219 case TOK_SHL: l1 <<= (l2 & shm); break;
2220 case TOK_SHR: l1 >>= (l2 & shm); break;
2221 case TOK_SAR:
2222 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2223 break;
2224 /* tests */
2225 case TOK_ULT: l1 = l1 < l2; break;
2226 case TOK_UGE: l1 = l1 >= l2; break;
2227 case TOK_EQ: l1 = l1 == l2; break;
2228 case TOK_NE: l1 = l1 != l2; break;
2229 case TOK_ULE: l1 = l1 <= l2; break;
2230 case TOK_UGT: l1 = l1 > l2; break;
2231 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2232 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2233 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2234 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2235 /* logical */
2236 case TOK_LAND: l1 = l1 && l2; break;
2237 case TOK_LOR: l1 = l1 || l2; break;
2238 default:
2239 goto general_case;
2241 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2242 l1 = ((uint32_t)l1 |
2243 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2244 v1->c.i = l1;
2245 vtop--;
2246 } else {
2247 /* if commutative ops, put c2 as constant */
2248 if (c1 && (op == '+' || op == '&' || op == '^' ||
2249 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2250 vswap();
2251 c2 = c1; //c = c1, c1 = c2, c2 = c;
2252 l2 = l1; //l = l1, l1 = l2, l2 = l;
2254 if (!const_wanted &&
2255 c1 && ((l1 == 0 &&
2256 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2257 (l1 == -1 && op == TOK_SAR))) {
2258 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2259 vtop--;
2260 } else if (!const_wanted &&
2261 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2262 (op == '|' &&
2263 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2264 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2265 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2266 if (l2 == 1)
2267 vtop->c.i = 0;
2268 vswap();
2269 vtop--;
2270 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2271 op == TOK_PDIV) &&
2272 l2 == 1) ||
2273 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2274 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2275 l2 == 0) ||
2276 (op == '&' &&
2277 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2278 /* filter out NOP operations like x*1, x-0, x&-1... */
2279 vtop--;
2280 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2281 /* try to use shifts instead of muls or divs */
2282 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2283 int n = -1;
2284 while (l2) {
2285 l2 >>= 1;
2286 n++;
2288 vtop->c.i = n;
2289 if (op == '*')
2290 op = TOK_SHL;
2291 else if (op == TOK_PDIV)
2292 op = TOK_SAR;
2293 else
2294 op = TOK_SHR;
2296 goto general_case;
2297 } else if (c2 && (op == '+' || op == '-') &&
2298 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2299 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2300 /* symbol + constant case */
2301 if (op == '-')
2302 l2 = -l2;
2303 l2 += vtop[-1].c.i;
2304 /* The backends can't always deal with addends to symbols
2305 larger than +-1<<31. Don't construct such. */
2306 if ((int)l2 != l2)
2307 goto general_case;
2308 vtop--;
2309 vtop->c.i = l2;
2310 } else {
2311 general_case:
2312 /* call low level op generator */
2313 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2314 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2315 gen_opl(op);
2316 else
2317 gen_opi(op);
2322 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2323 # define gen_negf gen_opf
2324 #elif defined TCC_TARGET_ARM
2325 void gen_negf(int op)
2327 /* arm will detect 0-x and replace by vneg */
2328 vpushi(0), vswap(), gen_op('-');
2330 #else
2331 /* XXX: implement in gen_opf() for other backends too */
2332 void gen_negf(int op)
2334 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2335 subtract(-0, x), but with them it's really a sign flip
2336 operation. We implement this with bit manipulation and have
2337 to do some type reinterpretation for this, which TCC can do
2338 only via memory. */
2340 int align, size, bt;
2342 size = type_size(&vtop->type, &align);
2343 bt = vtop->type.t & VT_BTYPE;
2344 save_reg(gv(RC_TYPE(bt)));
2345 vdup();
2346 incr_bf_adr(size - 1);
2347 vdup();
2348 vpushi(0x80); /* flip sign */
2349 gen_op('^');
2350 vstore();
2351 vpop();
2353 #endif
2355 /* generate a floating point operation with constant propagation */
2356 static void gen_opif(int op)
2358 int c1, c2;
2359 SValue *v1, *v2;
2360 #if defined _MSC_VER && defined __x86_64__
2361 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2362 volatile
2363 #endif
2364 long double f1, f2;
2366 v1 = vtop - 1;
2367 v2 = vtop;
2368 if (op == TOK_NEG)
2369 v1 = v2;
2371 /* currently, we cannot do computations with forward symbols */
2372 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2373 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2374 if (c1 && c2) {
2375 if (v1->type.t == VT_FLOAT) {
2376 f1 = v1->c.f;
2377 f2 = v2->c.f;
2378 } else if (v1->type.t == VT_DOUBLE) {
2379 f1 = v1->c.d;
2380 f2 = v2->c.d;
2381 } else {
2382 f1 = v1->c.ld;
2383 f2 = v2->c.ld;
2385 /* NOTE: we only do constant propagation if finite number (not
2386 NaN or infinity) (ANSI spec) */
2387 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2388 goto general_case;
2389 switch(op) {
2390 case '+': f1 += f2; break;
2391 case '-': f1 -= f2; break;
2392 case '*': f1 *= f2; break;
2393 case '/':
2394 if (f2 == 0.0) {
2395 union { float f; unsigned u; } x1, x2, y;
2396 /* If not in initializer we need to potentially generate
2397 FP exceptions at runtime, otherwise we want to fold. */
2398 if (!const_wanted)
2399 goto general_case;
2400 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2401 when used to compile the f1 /= f2 below, would be -nan */
2402 x1.f = f1, x2.f = f2;
2403 if (f1 == 0.0)
2404 y.u = 0x7fc00000; /* nan */
2405 else
2406 y.u = 0x7f800000; /* infinity */
2407 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2408 f1 = y.f;
2409 break;
2411 f1 /= f2;
2412 break;
2413 case TOK_NEG:
2414 f1 = -f1;
2415 goto unary_result;
2416 /* XXX: also handles tests ? */
2417 default:
2418 goto general_case;
2420 vtop--;
2421 unary_result:
2422 /* XXX: overflow test ? */
2423 if (v1->type.t == VT_FLOAT) {
2424 v1->c.f = f1;
2425 } else if (v1->type.t == VT_DOUBLE) {
2426 v1->c.d = f1;
2427 } else {
2428 v1->c.ld = f1;
2430 } else {
2431 general_case:
2432 if (op == TOK_NEG) {
2433 gen_negf(op);
2434 } else {
2435 gen_opf(op);
2440 /* print a type. If 'varstr' is not NULL, then the variable is also
2441 printed in the type */
2442 /* XXX: union */
2443 /* XXX: add array and function pointers */
2444 static void type_to_str(char *buf, int buf_size,
2445 CType *type, const char *varstr)
2447 int bt, v, t;
2448 Sym *s, *sa;
2449 char buf1[256];
2450 const char *tstr;
2452 t = type->t;
2453 bt = t & VT_BTYPE;
2454 buf[0] = '\0';
2456 if (t & VT_EXTERN)
2457 pstrcat(buf, buf_size, "extern ");
2458 if (t & VT_STATIC)
2459 pstrcat(buf, buf_size, "static ");
2460 if (t & VT_TYPEDEF)
2461 pstrcat(buf, buf_size, "typedef ");
2462 if (t & VT_INLINE)
2463 pstrcat(buf, buf_size, "inline ");
2464 if (bt != VT_PTR) {
2465 if (t & VT_VOLATILE)
2466 pstrcat(buf, buf_size, "volatile ");
2467 if (t & VT_CONSTANT)
2468 pstrcat(buf, buf_size, "const ");
2470 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2471 || ((t & VT_UNSIGNED)
2472 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2473 && !IS_ENUM(t)
2475 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2477 buf_size -= strlen(buf);
2478 buf += strlen(buf);
2480 switch(bt) {
2481 case VT_VOID:
2482 tstr = "void";
2483 goto add_tstr;
2484 case VT_BOOL:
2485 tstr = "_Bool";
2486 goto add_tstr;
2487 case VT_BYTE:
2488 tstr = "char";
2489 goto add_tstr;
2490 case VT_SHORT:
2491 tstr = "short";
2492 goto add_tstr;
2493 case VT_INT:
2494 tstr = "int";
2495 goto maybe_long;
2496 case VT_LLONG:
2497 tstr = "long long";
2498 maybe_long:
2499 if (t & VT_LONG)
2500 tstr = "long";
2501 if (!IS_ENUM(t))
2502 goto add_tstr;
2503 tstr = "enum ";
2504 goto tstruct;
2505 case VT_FLOAT:
2506 tstr = "float";
2507 goto add_tstr;
2508 case VT_DOUBLE:
2509 tstr = "double";
2510 if (!(t & VT_LONG))
2511 goto add_tstr;
2512 case VT_LDOUBLE:
2513 tstr = "long double";
2514 add_tstr:
2515 pstrcat(buf, buf_size, tstr);
2516 break;
2517 case VT_STRUCT:
2518 tstr = "struct ";
2519 if (IS_UNION(t))
2520 tstr = "union ";
2521 tstruct:
2522 pstrcat(buf, buf_size, tstr);
2523 v = type->ref->v & ~SYM_STRUCT;
2524 if (v >= SYM_FIRST_ANOM)
2525 pstrcat(buf, buf_size, "<anonymous>");
2526 else
2527 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2528 break;
2529 case VT_FUNC:
2530 s = type->ref;
2531 buf1[0]=0;
2532 if (varstr && '*' == *varstr) {
2533 pstrcat(buf1, sizeof(buf1), "(");
2534 pstrcat(buf1, sizeof(buf1), varstr);
2535 pstrcat(buf1, sizeof(buf1), ")");
2537 pstrcat(buf1, buf_size, "(");
2538 sa = s->next;
2539 while (sa != NULL) {
2540 char buf2[256];
2541 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2542 pstrcat(buf1, sizeof(buf1), buf2);
2543 sa = sa->next;
2544 if (sa)
2545 pstrcat(buf1, sizeof(buf1), ", ");
2547 if (s->f.func_type == FUNC_ELLIPSIS)
2548 pstrcat(buf1, sizeof(buf1), ", ...");
2549 pstrcat(buf1, sizeof(buf1), ")");
2550 type_to_str(buf, buf_size, &s->type, buf1);
2551 goto no_var;
2552 case VT_PTR:
2553 s = type->ref;
2554 if (t & (VT_ARRAY|VT_VLA)) {
2555 if (varstr && '*' == *varstr)
2556 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2557 else
2558 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2559 type_to_str(buf, buf_size, &s->type, buf1);
2560 goto no_var;
2562 pstrcpy(buf1, sizeof(buf1), "*");
2563 if (t & VT_CONSTANT)
2564 pstrcat(buf1, buf_size, "const ");
2565 if (t & VT_VOLATILE)
2566 pstrcat(buf1, buf_size, "volatile ");
2567 if (varstr)
2568 pstrcat(buf1, sizeof(buf1), varstr);
2569 type_to_str(buf, buf_size, &s->type, buf1);
2570 goto no_var;
2572 if (varstr) {
2573 pstrcat(buf, buf_size, " ");
2574 pstrcat(buf, buf_size, varstr);
2576 no_var: ;
2579 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2581 char buf1[256], buf2[256];
2582 type_to_str(buf1, sizeof(buf1), st, NULL);
2583 type_to_str(buf2, sizeof(buf2), dt, NULL);
2584 tcc_error(fmt, buf1, buf2);
2587 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2589 char buf1[256], buf2[256];
2590 type_to_str(buf1, sizeof(buf1), st, NULL);
2591 type_to_str(buf2, sizeof(buf2), dt, NULL);
2592 tcc_warning(fmt, buf1, buf2);
2595 static int pointed_size(CType *type)
2597 int align;
2598 return type_size(pointed_type(type), &align);
2601 static inline int is_null_pointer(SValue *p)
2603 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2604 return 0;
2605 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2606 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2607 ((p->type.t & VT_BTYPE) == VT_PTR &&
2608 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2609 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2610 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2614 /* compare function types. OLD functions match any new functions */
2615 static int is_compatible_func(CType *type1, CType *type2)
2617 Sym *s1, *s2;
2619 s1 = type1->ref;
2620 s2 = type2->ref;
2621 if (s1->f.func_call != s2->f.func_call)
2622 return 0;
2623 if (s1->f.func_type != s2->f.func_type
2624 && s1->f.func_type != FUNC_OLD
2625 && s2->f.func_type != FUNC_OLD)
2626 return 0;
2627 for (;;) {
2628 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2629 return 0;
2630 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2631 return 1;
2632 s1 = s1->next;
2633 s2 = s2->next;
2634 if (!s1)
2635 return !s2;
2636 if (!s2)
2637 return 0;
2641 /* return true if type1 and type2 are the same. If unqualified is
2642 true, qualifiers on the types are ignored.
2644 static int compare_types(CType *type1, CType *type2, int unqualified)
2646 int bt1, t1, t2;
2648 t1 = type1->t & VT_TYPE;
2649 t2 = type2->t & VT_TYPE;
2650 if (unqualified) {
2651 /* strip qualifiers before comparing */
2652 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2653 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2656 /* Default Vs explicit signedness only matters for char */
2657 if ((t1 & VT_BTYPE) != VT_BYTE) {
2658 t1 &= ~VT_DEFSIGN;
2659 t2 &= ~VT_DEFSIGN;
2661 /* XXX: bitfields ? */
2662 if (t1 != t2)
2663 return 0;
2665 if ((t1 & VT_ARRAY)
2666 && !(type1->ref->c < 0
2667 || type2->ref->c < 0
2668 || type1->ref->c == type2->ref->c))
2669 return 0;
2671 /* test more complicated cases */
2672 bt1 = t1 & VT_BTYPE;
2673 if (bt1 == VT_PTR) {
2674 type1 = pointed_type(type1);
2675 type2 = pointed_type(type2);
2676 return is_compatible_types(type1, type2);
2677 } else if (bt1 == VT_STRUCT) {
2678 return (type1->ref == type2->ref);
2679 } else if (bt1 == VT_FUNC) {
2680 return is_compatible_func(type1, type2);
2681 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2682 /* If both are enums then they must be the same, if only one is then
2683 t1 and t2 must be equal, which was checked above already. */
2684 return type1->ref == type2->ref;
2685 } else {
2686 return 1;
2690 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2691 type is stored in DEST if non-null (except for pointer plus/minus) . */
2692 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2694 CType *type1 = &op1->type, *type2 = &op2->type, type;
2695 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2696 int ret = 1;
2698 type.t = VT_VOID;
2699 type.ref = NULL;
2701 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2702 ret = op == '?' ? 1 : 0;
2703 /* NOTE: as an extension, we accept void on only one side */
2704 type.t = VT_VOID;
2705 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2706 if (op == '+') ; /* Handled in caller */
2707 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2708 /* If one is a null ptr constant the result type is the other. */
2709 else if (is_null_pointer (op2)) type = *type1;
2710 else if (is_null_pointer (op1)) type = *type2;
2711 else if (bt1 != bt2) {
2712 /* accept comparison or cond-expr between pointer and integer
2713 with a warning */
2714 if ((op == '?' || TOK_ISCOND(op))
2715 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2716 tcc_warning("pointer/integer mismatch in %s",
2717 op == '?' ? "conditional expression" : "comparison");
2718 else if (op != '-' || !is_integer_btype(bt2))
2719 ret = 0;
2720 type = *(bt1 == VT_PTR ? type1 : type2);
2721 } else {
2722 CType *pt1 = pointed_type(type1);
2723 CType *pt2 = pointed_type(type2);
2724 int pbt1 = pt1->t & VT_BTYPE;
2725 int pbt2 = pt2->t & VT_BTYPE;
2726 int newquals, copied = 0;
2727 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2728 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2729 if (op != '?' && !TOK_ISCOND(op))
2730 ret = 0;
2731 else
2732 type_incompatibility_warning(type1, type2,
2733 op == '?'
2734 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2735 : "pointer type mismatch in comparison('%s' and '%s')");
2737 if (op == '?') {
2738 /* pointers to void get preferred, otherwise the
2739 pointed to types minus qualifs should be compatible */
2740 type = *((pbt1 == VT_VOID) ? type1 : type2);
2741 /* combine qualifs */
2742 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2743 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2744 & newquals)
2746 /* copy the pointer target symbol */
2747 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2748 0, type.ref->c);
2749 copied = 1;
2750 pointed_type(&type)->t |= newquals;
2752 /* pointers to incomplete arrays get converted to
2753 pointers to completed ones if possible */
2754 if (pt1->t & VT_ARRAY
2755 && pt2->t & VT_ARRAY
2756 && pointed_type(&type)->ref->c < 0
2757 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2759 if (!copied)
2760 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2761 0, type.ref->c);
2762 pointed_type(&type)->ref =
2763 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2764 0, pointed_type(&type)->ref->c);
2765 pointed_type(&type)->ref->c =
2766 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2770 if (TOK_ISCOND(op))
2771 type.t = VT_SIZE_T;
2772 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2773 if (op != '?' || !compare_types(type1, type2, 1))
2774 ret = 0;
2775 type = *type1;
2776 } else if (is_float(bt1) || is_float(bt2)) {
2777 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2778 type.t = VT_LDOUBLE;
2779 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2780 type.t = VT_DOUBLE;
2781 } else {
2782 type.t = VT_FLOAT;
2784 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2785 /* cast to biggest op */
2786 type.t = VT_LLONG | VT_LONG;
2787 if (bt1 == VT_LLONG)
2788 type.t &= t1;
2789 if (bt2 == VT_LLONG)
2790 type.t &= t2;
2791 /* convert to unsigned if it does not fit in a long long */
2792 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2793 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2794 type.t |= VT_UNSIGNED;
2795 } else {
2796 /* integer operations */
2797 type.t = VT_INT | (VT_LONG & (t1 | t2));
2798 /* convert to unsigned if it does not fit in an integer */
2799 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2800 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2801 type.t |= VT_UNSIGNED;
2803 if (dest)
2804 *dest = type;
2805 return ret;
2808 /* generic gen_op: handles types problems */
2809 ST_FUNC void gen_op(int op)
2811 int t1, t2, bt1, bt2, t;
2812 CType type1, combtype;
2814 redo:
2815 t1 = vtop[-1].type.t;
2816 t2 = vtop[0].type.t;
2817 bt1 = t1 & VT_BTYPE;
2818 bt2 = t2 & VT_BTYPE;
2820 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2821 if (bt2 == VT_FUNC) {
2822 mk_pointer(&vtop->type);
2823 gaddrof();
2825 if (bt1 == VT_FUNC) {
2826 vswap();
2827 mk_pointer(&vtop->type);
2828 gaddrof();
2829 vswap();
2831 goto redo;
2832 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2833 tcc_error_noabort("invalid operand types for binary operation");
2834 vpop();
2835 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2836 /* at least one operand is a pointer */
2837 /* relational op: must be both pointers */
2838 int align;
2839 if (TOK_ISCOND(op))
2840 goto std_op;
2841 /* if both pointers, then it must be the '-' op */
2842 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2843 if (op != '-')
2844 tcc_error("cannot use pointers here");
2845 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2846 vrott(3);
2847 gen_opic(op);
2848 vtop->type.t = VT_PTRDIFF_T;
2849 vswap();
2850 gen_op(TOK_PDIV);
2851 } else {
2852 /* exactly one pointer : must be '+' or '-'. */
2853 if (op != '-' && op != '+')
2854 tcc_error("cannot use pointers here");
2855 /* Put pointer as first operand */
2856 if (bt2 == VT_PTR) {
2857 vswap();
2858 t = t1, t1 = t2, t2 = t;
2860 #if PTR_SIZE == 4
2861 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2862 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2863 gen_cast_s(VT_INT);
2864 #endif
2865 type1 = vtop[-1].type;
2866 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2867 gen_op('*');
2868 #ifdef CONFIG_TCC_BCHECK
2869 if (tcc_state->do_bounds_check && !const_wanted) {
2870 /* if bounded pointers, we generate a special code to
2871 test bounds */
2872 if (op == '-') {
2873 vpushi(0);
2874 vswap();
2875 gen_op('-');
2877 gen_bounded_ptr_add();
2878 } else
2879 #endif
2881 gen_opic(op);
2883 type1.t &= ~(VT_ARRAY|VT_VLA);
2884 /* put again type if gen_opic() swaped operands */
2885 vtop->type = type1;
2887 } else {
2888 /* floats can only be used for a few operations */
2889 if (is_float(combtype.t)
2890 && op != '+' && op != '-' && op != '*' && op != '/'
2891 && !TOK_ISCOND(op))
2892 tcc_error("invalid operands for binary operation");
2893 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2894 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2895 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2896 t |= VT_UNSIGNED;
2897 t |= (VT_LONG & t1);
2898 combtype.t = t;
2900 std_op:
2901 t = t2 = combtype.t;
2902 /* XXX: currently, some unsigned operations are explicit, so
2903 we modify them here */
2904 if (t & VT_UNSIGNED) {
2905 if (op == TOK_SAR)
2906 op = TOK_SHR;
2907 else if (op == '/')
2908 op = TOK_UDIV;
2909 else if (op == '%')
2910 op = TOK_UMOD;
2911 else if (op == TOK_LT)
2912 op = TOK_ULT;
2913 else if (op == TOK_GT)
2914 op = TOK_UGT;
2915 else if (op == TOK_LE)
2916 op = TOK_ULE;
2917 else if (op == TOK_GE)
2918 op = TOK_UGE;
2920 vswap();
2921 gen_cast_s(t);
2922 vswap();
2923 /* special case for shifts and long long: we keep the shift as
2924 an integer */
2925 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2926 t2 = VT_INT;
2927 gen_cast_s(t2);
2928 if (is_float(t))
2929 gen_opif(op);
2930 else
2931 gen_opic(op);
2932 if (TOK_ISCOND(op)) {
2933 /* relational op: the result is an int */
2934 vtop->type.t = VT_INT;
2935 } else {
2936 vtop->type.t = t;
2939 // Make sure that we have converted to an rvalue:
2940 if (vtop->r & VT_LVAL)
2941 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2944 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2945 #define gen_cvt_itof1 gen_cvt_itof
2946 #else
2947 /* generic itof for unsigned long long case */
2948 static void gen_cvt_itof1(int t)
2950 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2951 (VT_LLONG | VT_UNSIGNED)) {
2953 if (t == VT_FLOAT)
2954 vpush_helper_func(TOK___floatundisf);
2955 #if LDOUBLE_SIZE != 8
2956 else if (t == VT_LDOUBLE)
2957 vpush_helper_func(TOK___floatundixf);
2958 #endif
2959 else
2960 vpush_helper_func(TOK___floatundidf);
2961 vrott(2);
2962 gfunc_call(1);
2963 vpushi(0);
2964 PUT_R_RET(vtop, t);
2965 } else {
2966 gen_cvt_itof(t);
2969 #endif
2971 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2972 #define gen_cvt_ftoi1 gen_cvt_ftoi
2973 #else
2974 /* generic ftoi for unsigned long long case */
2975 static void gen_cvt_ftoi1(int t)
2977 int st;
2978 if (t == (VT_LLONG | VT_UNSIGNED)) {
2979 /* not handled natively */
2980 st = vtop->type.t & VT_BTYPE;
2981 if (st == VT_FLOAT)
2982 vpush_helper_func(TOK___fixunssfdi);
2983 #if LDOUBLE_SIZE != 8
2984 else if (st == VT_LDOUBLE)
2985 vpush_helper_func(TOK___fixunsxfdi);
2986 #endif
2987 else
2988 vpush_helper_func(TOK___fixunsdfdi);
2989 vrott(2);
2990 gfunc_call(1);
2991 vpushi(0);
2992 PUT_R_RET(vtop, t);
2993 } else {
2994 gen_cvt_ftoi(t);
2997 #endif
2999 /* special delayed cast for char/short */
3000 static void force_charshort_cast(void)
3002 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3003 int dbt = vtop->type.t;
3004 vtop->r &= ~VT_MUSTCAST;
3005 vtop->type.t = sbt;
3006 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3007 vtop->type.t = dbt;
3010 static void gen_cast_s(int t)
3012 CType type;
3013 type.t = t;
3014 type.ref = NULL;
3015 gen_cast(&type);
3018 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3019 static void gen_cast(CType *type)
3021 int sbt, dbt, sf, df, c;
3022 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3024 /* special delayed cast for char/short */
3025 if (vtop->r & VT_MUSTCAST)
3026 force_charshort_cast();
3028 /* bitfields first get cast to ints */
3029 if (vtop->type.t & VT_BITFIELD)
3030 gv(RC_INT);
3032 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3033 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3034 if (sbt == VT_FUNC)
3035 sbt = VT_PTR;
3037 again:
3038 if (sbt != dbt) {
3039 sf = is_float(sbt);
3040 df = is_float(dbt);
3041 dbt_bt = dbt & VT_BTYPE;
3042 sbt_bt = sbt & VT_BTYPE;
3043 if (dbt_bt == VT_VOID)
3044 goto done;
3045 if (sbt_bt == VT_VOID) {
3046 error:
3047 cast_error(&vtop->type, type);
3050 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3051 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3052 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3053 #endif
3054 if (c) {
3055 /* constant case: we can do it now */
3056 /* XXX: in ISOC, cannot do it if error in convert */
3057 if (sbt == VT_FLOAT)
3058 vtop->c.ld = vtop->c.f;
3059 else if (sbt == VT_DOUBLE)
3060 vtop->c.ld = vtop->c.d;
3062 if (df) {
3063 if (sbt_bt == VT_LLONG) {
3064 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3065 vtop->c.ld = vtop->c.i;
3066 else
3067 vtop->c.ld = -(long double)-vtop->c.i;
3068 } else if(!sf) {
3069 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3070 vtop->c.ld = (uint32_t)vtop->c.i;
3071 else
3072 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3075 if (dbt == VT_FLOAT)
3076 vtop->c.f = (float)vtop->c.ld;
3077 else if (dbt == VT_DOUBLE)
3078 vtop->c.d = (double)vtop->c.ld;
3079 } else if (sf && dbt == VT_BOOL) {
3080 vtop->c.i = (vtop->c.ld != 0);
3081 } else {
3082 if(sf)
3083 vtop->c.i = vtop->c.ld;
3084 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3086 else if (sbt & VT_UNSIGNED)
3087 vtop->c.i = (uint32_t)vtop->c.i;
3088 else
3089 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3091 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3093 else if (dbt == VT_BOOL)
3094 vtop->c.i = (vtop->c.i != 0);
3095 else {
3096 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3097 dbt_bt == VT_SHORT ? 0xffff :
3098 0xffffffff;
3099 vtop->c.i &= m;
3100 if (!(dbt & VT_UNSIGNED))
3101 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3104 goto done;
3106 } else if (dbt == VT_BOOL
3107 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3108 == (VT_CONST | VT_SYM)) {
3109 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3110 vtop->r = VT_CONST;
3111 vtop->c.i = 1;
3112 goto done;
3115 /* cannot generate code for global or static initializers */
3116 if (STATIC_DATA_WANTED)
3117 goto done;
3119 /* non constant case: generate code */
3120 if (dbt == VT_BOOL) {
3121 gen_test_zero(TOK_NE);
3122 goto done;
3125 if (sf || df) {
3126 if (sf && df) {
3127 /* convert from fp to fp */
3128 gen_cvt_ftof(dbt);
3129 } else if (df) {
3130 /* convert int to fp */
3131 gen_cvt_itof1(dbt);
3132 } else {
3133 /* convert fp to int */
3134 sbt = dbt;
3135 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3136 sbt = VT_INT;
3137 gen_cvt_ftoi1(sbt);
3138 goto again; /* may need char/short cast */
3140 goto done;
3143 ds = btype_size(dbt_bt);
3144 ss = btype_size(sbt_bt);
3145 if (ds == 0 || ss == 0)
3146 goto error;
3148 if (IS_ENUM(type->t) && type->ref->c < 0)
3149 tcc_error("cast to incomplete type");
3151 /* same size and no sign conversion needed */
3152 if (ds == ss && ds >= 4)
3153 goto done;
3154 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3155 tcc_warning("cast between pointer and integer of different size");
3156 if (sbt_bt == VT_PTR) {
3157 /* put integer type to allow logical operations below */
3158 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3162 /* processor allows { int a = 0, b = *(char*)&a; }
3163 That means that if we cast to less width, we can just
3164 change the type and read it still later. */
3165 #define ALLOW_SUBTYPE_ACCESS 1
3167 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3168 /* value still in memory */
3169 if (ds <= ss)
3170 goto done;
3171 /* ss <= 4 here */
3172 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3173 gv(RC_INT);
3174 goto done; /* no 64bit envolved */
3177 gv(RC_INT);
3179 trunc = 0;
3180 #if PTR_SIZE == 4
3181 if (ds == 8) {
3182 /* generate high word */
3183 if (sbt & VT_UNSIGNED) {
3184 vpushi(0);
3185 gv(RC_INT);
3186 } else {
3187 gv_dup();
3188 vpushi(31);
3189 gen_op(TOK_SAR);
3191 lbuild(dbt);
3192 } else if (ss == 8) {
3193 /* from long long: just take low order word */
3194 lexpand();
3195 vpop();
3197 ss = 4;
3199 #elif PTR_SIZE == 8
3200 if (ds == 8) {
3201 /* need to convert from 32bit to 64bit */
3202 if (sbt & VT_UNSIGNED) {
3203 #if defined(TCC_TARGET_RISCV64)
3204 /* RISC-V keeps 32bit vals in registers sign-extended.
3205 So here we need a zero-extension. */
3206 trunc = 32;
3207 #else
3208 goto done;
3209 #endif
3210 } else {
3211 gen_cvt_sxtw();
3212 goto done;
3214 ss = ds, ds = 4, dbt = sbt;
3215 } else if (ss == 8) {
3216 /* RISC-V keeps 32bit vals in registers sign-extended.
3217 So here we need a sign-extension for signed types and
3218 zero-extension. for unsigned types. */
3219 #if !defined(TCC_TARGET_RISCV64)
3220 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3221 #endif
3222 } else {
3223 ss = 4;
3225 #endif
3227 if (ds >= ss)
3228 goto done;
3229 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3230 if (ss == 4) {
3231 gen_cvt_csti(dbt);
3232 goto done;
3234 #endif
3235 bits = (ss - ds) * 8;
3236 /* for unsigned, gen_op will convert SAR to SHR */
3237 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3238 vpushi(bits);
3239 gen_op(TOK_SHL);
3240 vpushi(bits - trunc);
3241 gen_op(TOK_SAR);
3242 vpushi(trunc);
3243 gen_op(TOK_SHR);
3245 done:
3246 vtop->type = *type;
3247 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3250 /* return type size as known at compile time. Put alignment at 'a' */
3251 ST_FUNC int type_size(CType *type, int *a)
3253 Sym *s;
3254 int bt;
3256 bt = type->t & VT_BTYPE;
3257 if (bt == VT_STRUCT) {
3258 /* struct/union */
3259 s = type->ref;
3260 *a = s->r;
3261 return s->c;
3262 } else if (bt == VT_PTR) {
3263 if (type->t & VT_ARRAY) {
3264 int ts;
3266 s = type->ref;
3267 ts = type_size(&s->type, a);
3269 if (ts < 0 && s->c < 0)
3270 ts = -ts;
3272 return ts * s->c;
3273 } else {
3274 *a = PTR_SIZE;
3275 return PTR_SIZE;
3277 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3278 *a = 0;
3279 return -1; /* incomplete enum */
3280 } else if (bt == VT_LDOUBLE) {
3281 *a = LDOUBLE_ALIGN;
3282 return LDOUBLE_SIZE;
3283 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3284 #ifdef TCC_TARGET_I386
3285 #ifdef TCC_TARGET_PE
3286 *a = 8;
3287 #else
3288 *a = 4;
3289 #endif
3290 #elif defined(TCC_TARGET_ARM)
3291 #ifdef TCC_ARM_EABI
3292 *a = 8;
3293 #else
3294 *a = 4;
3295 #endif
3296 #else
3297 *a = 8;
3298 #endif
3299 return 8;
3300 } else if (bt == VT_INT || bt == VT_FLOAT) {
3301 *a = 4;
3302 return 4;
3303 } else if (bt == VT_SHORT) {
3304 *a = 2;
3305 return 2;
3306 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3307 *a = 8;
3308 return 16;
3309 } else {
3310 /* char, void, function, _Bool */
3311 *a = 1;
3312 return 1;
3316 /* push type size as known at runtime time on top of value stack. Put
3317 alignment at 'a' */
3318 static void vpush_type_size(CType *type, int *a)
3320 if (type->t & VT_VLA) {
3321 type_size(&type->ref->type, a);
3322 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3323 } else {
3324 int size = type_size(type, a);
3325 if (size < 0)
3326 tcc_error("unknown type size");
3327 #if PTR_SIZE == 8
3328 vpushll(size);
3329 #else
3330 vpushi(size);
3331 #endif
3335 /* return the pointed type of t */
3336 static inline CType *pointed_type(CType *type)
3338 return &type->ref->type;
3341 /* modify type so that its it is a pointer to type. */
3342 ST_FUNC void mk_pointer(CType *type)
3344 Sym *s;
3345 s = sym_push(SYM_FIELD, type, 0, -1);
3346 type->t = VT_PTR | (type->t & VT_STORAGE);
3347 type->ref = s;
3350 /* return true if type1 and type2 are exactly the same (including
3351 qualifiers).
3353 static int is_compatible_types(CType *type1, CType *type2)
3355 return compare_types(type1,type2,0);
3358 /* return true if type1 and type2 are the same (ignoring qualifiers).
3360 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3362 return compare_types(type1,type2,1);
3365 static void cast_error(CType *st, CType *dt)
3367 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3370 /* verify type compatibility to store vtop in 'dt' type */
3371 static void verify_assign_cast(CType *dt)
3373 CType *st, *type1, *type2;
3374 int dbt, sbt, qualwarn, lvl;
3376 st = &vtop->type; /* source type */
3377 dbt = dt->t & VT_BTYPE;
3378 sbt = st->t & VT_BTYPE;
3379 if (dt->t & VT_CONSTANT)
3380 tcc_warning("assignment of read-only location");
3381 switch(dbt) {
3382 case VT_VOID:
3383 if (sbt != dbt)
3384 tcc_error("assignment to void expression");
3385 break;
3386 case VT_PTR:
3387 /* special cases for pointers */
3388 /* '0' can also be a pointer */
3389 if (is_null_pointer(vtop))
3390 break;
3391 /* accept implicit pointer to integer cast with warning */
3392 if (is_integer_btype(sbt)) {
3393 tcc_warning("assignment makes pointer from integer without a cast");
3394 break;
3396 type1 = pointed_type(dt);
3397 if (sbt == VT_PTR)
3398 type2 = pointed_type(st);
3399 else if (sbt == VT_FUNC)
3400 type2 = st; /* a function is implicitly a function pointer */
3401 else
3402 goto error;
3403 if (is_compatible_types(type1, type2))
3404 break;
3405 for (qualwarn = lvl = 0;; ++lvl) {
3406 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3407 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3408 qualwarn = 1;
3409 dbt = type1->t & (VT_BTYPE|VT_LONG);
3410 sbt = type2->t & (VT_BTYPE|VT_LONG);
3411 if (dbt != VT_PTR || sbt != VT_PTR)
3412 break;
3413 type1 = pointed_type(type1);
3414 type2 = pointed_type(type2);
3416 if (!is_compatible_unqualified_types(type1, type2)) {
3417 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3418 /* void * can match anything */
3419 } else if (dbt == sbt
3420 && is_integer_btype(sbt & VT_BTYPE)
3421 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3422 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3423 /* Like GCC don't warn by default for merely changes
3424 in pointer target signedness. Do warn for different
3425 base types, though, in particular for unsigned enums
3426 and signed int targets. */
3427 } else {
3428 tcc_warning("assignment from incompatible pointer type");
3429 break;
3432 if (qualwarn)
3433 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3434 break;
3435 case VT_BYTE:
3436 case VT_SHORT:
3437 case VT_INT:
3438 case VT_LLONG:
3439 if (sbt == VT_PTR || sbt == VT_FUNC) {
3440 tcc_warning("assignment makes integer from pointer without a cast");
3441 } else if (sbt == VT_STRUCT) {
3442 goto case_VT_STRUCT;
3444 /* XXX: more tests */
3445 break;
3446 case VT_STRUCT:
3447 case_VT_STRUCT:
3448 if (!is_compatible_unqualified_types(dt, st)) {
3449 error:
3450 cast_error(st, dt);
3452 break;
3456 static void gen_assign_cast(CType *dt)
3458 verify_assign_cast(dt);
3459 gen_cast(dt);
3462 /* store vtop in lvalue pushed on stack */
3463 ST_FUNC void vstore(void)
3465 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3467 ft = vtop[-1].type.t;
3468 sbt = vtop->type.t & VT_BTYPE;
3469 dbt = ft & VT_BTYPE;
3470 verify_assign_cast(&vtop[-1].type);
3472 if (sbt == VT_STRUCT) {
3473 /* if structure, only generate pointer */
3474 /* structure assignment : generate memcpy */
3475 size = type_size(&vtop->type, &align);
3476 /* destination, keep on stack() as result */
3477 vpushv(vtop - 1);
3478 #ifdef CONFIG_TCC_BCHECK
3479 if (vtop->r & VT_MUSTBOUND)
3480 gbound(); /* check would be wrong after gaddrof() */
3481 #endif
3482 vtop->type.t = VT_PTR;
3483 gaddrof();
3484 /* source */
3485 vswap();
3486 #ifdef CONFIG_TCC_BCHECK
3487 if (vtop->r & VT_MUSTBOUND)
3488 gbound();
3489 #endif
3490 vtop->type.t = VT_PTR;
3491 gaddrof();
3493 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3494 if (1
3495 #ifdef CONFIG_TCC_BCHECK
3496 && !tcc_state->do_bounds_check
3497 #endif
3499 gen_struct_copy(size);
3500 } else
3501 #endif
3503 /* type size */
3504 vpushi(size);
3505 /* Use memmove, rather than memcpy, as dest and src may be same: */
3506 #ifdef TCC_ARM_EABI
3507 if(!(align & 7))
3508 vpush_helper_func(TOK_memmove8);
3509 else if(!(align & 3))
3510 vpush_helper_func(TOK_memmove4);
3511 else
3512 #endif
3513 vpush_helper_func(TOK_memmove);
3514 vrott(4);
3515 gfunc_call(3);
3518 } else if (ft & VT_BITFIELD) {
3519 /* bitfield store handling */
3521 /* save lvalue as expression result (example: s.b = s.a = n;) */
3522 vdup(), vtop[-1] = vtop[-2];
3524 bit_pos = BIT_POS(ft);
3525 bit_size = BIT_SIZE(ft);
3526 /* remove bit field info to avoid loops */
3527 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3529 if (dbt == VT_BOOL) {
3530 gen_cast(&vtop[-1].type);
3531 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3533 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3534 if (dbt != VT_BOOL) {
3535 gen_cast(&vtop[-1].type);
3536 dbt = vtop[-1].type.t & VT_BTYPE;
3538 if (r == VT_STRUCT) {
3539 store_packed_bf(bit_pos, bit_size);
3540 } else {
3541 unsigned long long mask = (1ULL << bit_size) - 1;
3542 if (dbt != VT_BOOL) {
3543 /* mask source */
3544 if (dbt == VT_LLONG)
3545 vpushll(mask);
3546 else
3547 vpushi((unsigned)mask);
3548 gen_op('&');
3550 /* shift source */
3551 vpushi(bit_pos);
3552 gen_op(TOK_SHL);
3553 vswap();
3554 /* duplicate destination */
3555 vdup();
3556 vrott(3);
3557 /* load destination, mask and or with source */
3558 if (dbt == VT_LLONG)
3559 vpushll(~(mask << bit_pos));
3560 else
3561 vpushi(~((unsigned)mask << bit_pos));
3562 gen_op('&');
3563 gen_op('|');
3564 /* store result */
3565 vstore();
3566 /* ... and discard */
3567 vpop();
3569 } else if (dbt == VT_VOID) {
3570 --vtop;
3571 } else {
3572 /* optimize char/short casts */
3573 delayed_cast = 0;
3574 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3575 && is_integer_btype(sbt)
3577 if ((vtop->r & VT_MUSTCAST)
3578 && btype_size(dbt) > btype_size(sbt)
3580 force_charshort_cast();
3581 delayed_cast = 1;
3582 } else {
3583 gen_cast(&vtop[-1].type);
3586 #ifdef CONFIG_TCC_BCHECK
3587 /* bound check case */
3588 if (vtop[-1].r & VT_MUSTBOUND) {
3589 vswap();
3590 gbound();
3591 vswap();
3593 #endif
3594 gv(RC_TYPE(dbt)); /* generate value */
3596 if (delayed_cast) {
3597 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3598 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3599 vtop->type.t = ft & VT_TYPE;
3602 /* if lvalue was saved on stack, must read it */
3603 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3604 SValue sv;
3605 r = get_reg(RC_INT);
3606 sv.type.t = VT_PTRDIFF_T;
3607 sv.r = VT_LOCAL | VT_LVAL;
3608 sv.c.i = vtop[-1].c.i;
3609 load(r, &sv);
3610 vtop[-1].r = r | VT_LVAL;
3613 r = vtop->r & VT_VALMASK;
3614 /* two word case handling :
3615 store second register at word + 4 (or +8 for x86-64) */
3616 if (USING_TWO_WORDS(dbt)) {
3617 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3618 vtop[-1].type.t = load_type;
3619 store(r, vtop - 1);
3620 vswap();
3621 /* convert to int to increment easily */
3622 vtop->type.t = VT_PTRDIFF_T;
3623 gaddrof();
3624 vpushs(PTR_SIZE);
3625 gen_op('+');
3626 vtop->r |= VT_LVAL;
3627 vswap();
3628 vtop[-1].type.t = load_type;
3629 /* XXX: it works because r2 is spilled last ! */
3630 store(vtop->r2, vtop - 1);
3631 } else {
3632 /* single word */
3633 store(r, vtop - 1);
3635 vswap();
3636 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3640 /* post defines POST/PRE add. c is the token ++ or -- */
3641 ST_FUNC void inc(int post, int c)
3643 test_lvalue();
3644 vdup(); /* save lvalue */
3645 if (post) {
3646 gv_dup(); /* duplicate value */
3647 vrotb(3);
3648 vrotb(3);
3650 /* add constant */
3651 vpushi(c - TOK_MID);
3652 gen_op('+');
3653 vstore(); /* store value */
3654 if (post)
3655 vpop(); /* if post op, return saved value */
3658 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3660 /* read the string */
3661 if (tok != TOK_STR)
3662 expect(msg);
3663 cstr_new(astr);
3664 while (tok == TOK_STR) {
3665 /* XXX: add \0 handling too ? */
3666 cstr_cat(astr, tokc.str.data, -1);
3667 next();
3669 cstr_ccat(astr, '\0');
3672 /* If I is >= 1 and a power of two, returns log2(i)+1.
3673 If I is 0 returns 0. */
3674 ST_FUNC int exact_log2p1(int i)
3676 int ret;
3677 if (!i)
3678 return 0;
3679 for (ret = 1; i >= 1 << 8; ret += 8)
3680 i >>= 8;
3681 if (i >= 1 << 4)
3682 ret += 4, i >>= 4;
3683 if (i >= 1 << 2)
3684 ret += 2, i >>= 2;
3685 if (i >= 1 << 1)
3686 ret++;
3687 return ret;
3690 /* Parse __attribute__((...)) GNUC extension. */
3691 static void parse_attribute(AttributeDef *ad)
3693 int t, n;
3694 CString astr;
3696 redo:
3697 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3698 return;
3699 next();
3700 skip('(');
3701 skip('(');
3702 while (tok != ')') {
3703 if (tok < TOK_IDENT)
3704 expect("attribute name");
3705 t = tok;
3706 next();
3707 switch(t) {
3708 case TOK_CLEANUP1:
3709 case TOK_CLEANUP2:
3711 Sym *s;
3713 skip('(');
3714 s = sym_find(tok);
3715 if (!s) {
3716 tcc_warning_c(warn_implicit_function_declaration)(
3717 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3718 s = external_global_sym(tok, &func_old_type);
3719 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3720 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3721 ad->cleanup_func = s;
3722 next();
3723 skip(')');
3724 break;
3726 case TOK_CONSTRUCTOR1:
3727 case TOK_CONSTRUCTOR2:
3728 ad->f.func_ctor = 1;
3729 break;
3730 case TOK_DESTRUCTOR1:
3731 case TOK_DESTRUCTOR2:
3732 ad->f.func_dtor = 1;
3733 break;
3734 case TOK_ALWAYS_INLINE1:
3735 case TOK_ALWAYS_INLINE2:
3736 ad->f.func_alwinl = 1;
3737 break;
3738 case TOK_SECTION1:
3739 case TOK_SECTION2:
3740 skip('(');
3741 parse_mult_str(&astr, "section name");
3742 ad->section = find_section(tcc_state, (char *)astr.data);
3743 skip(')');
3744 cstr_free(&astr);
3745 break;
3746 case TOK_ALIAS1:
3747 case TOK_ALIAS2:
3748 skip('(');
3749 parse_mult_str(&astr, "alias(\"target\")");
3750 ad->alias_target = /* save string as token, for later */
3751 tok_alloc((char*)astr.data, astr.size-1)->tok;
3752 skip(')');
3753 cstr_free(&astr);
3754 break;
3755 case TOK_VISIBILITY1:
3756 case TOK_VISIBILITY2:
3757 skip('(');
3758 parse_mult_str(&astr,
3759 "visibility(\"default|hidden|internal|protected\")");
3760 if (!strcmp (astr.data, "default"))
3761 ad->a.visibility = STV_DEFAULT;
3762 else if (!strcmp (astr.data, "hidden"))
3763 ad->a.visibility = STV_HIDDEN;
3764 else if (!strcmp (astr.data, "internal"))
3765 ad->a.visibility = STV_INTERNAL;
3766 else if (!strcmp (astr.data, "protected"))
3767 ad->a.visibility = STV_PROTECTED;
3768 else
3769 expect("visibility(\"default|hidden|internal|protected\")");
3770 skip(')');
3771 cstr_free(&astr);
3772 break;
3773 case TOK_ALIGNED1:
3774 case TOK_ALIGNED2:
3775 if (tok == '(') {
3776 next();
3777 n = expr_const();
3778 if (n <= 0 || (n & (n - 1)) != 0)
3779 tcc_error("alignment must be a positive power of two");
3780 skip(')');
3781 } else {
3782 n = MAX_ALIGN;
3784 ad->a.aligned = exact_log2p1(n);
3785 if (n != 1 << (ad->a.aligned - 1))
3786 tcc_error("alignment of %d is larger than implemented", n);
3787 break;
3788 case TOK_PACKED1:
3789 case TOK_PACKED2:
3790 ad->a.packed = 1;
3791 break;
3792 case TOK_WEAK1:
3793 case TOK_WEAK2:
3794 ad->a.weak = 1;
3795 break;
3796 case TOK_UNUSED1:
3797 case TOK_UNUSED2:
3798 /* currently, no need to handle it because tcc does not
3799 track unused objects */
3800 break;
3801 case TOK_NORETURN1:
3802 case TOK_NORETURN2:
3803 ad->f.func_noreturn = 1;
3804 break;
3805 case TOK_CDECL1:
3806 case TOK_CDECL2:
3807 case TOK_CDECL3:
3808 ad->f.func_call = FUNC_CDECL;
3809 break;
3810 case TOK_STDCALL1:
3811 case TOK_STDCALL2:
3812 case TOK_STDCALL3:
3813 ad->f.func_call = FUNC_STDCALL;
3814 break;
3815 #ifdef TCC_TARGET_I386
3816 case TOK_REGPARM1:
3817 case TOK_REGPARM2:
3818 skip('(');
3819 n = expr_const();
3820 if (n > 3)
3821 n = 3;
3822 else if (n < 0)
3823 n = 0;
3824 if (n > 0)
3825 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3826 skip(')');
3827 break;
3828 case TOK_FASTCALL1:
3829 case TOK_FASTCALL2:
3830 case TOK_FASTCALL3:
3831 ad->f.func_call = FUNC_FASTCALLW;
3832 break;
3833 #endif
3834 case TOK_MODE:
3835 skip('(');
3836 switch(tok) {
3837 case TOK_MODE_DI:
3838 ad->attr_mode = VT_LLONG + 1;
3839 break;
3840 case TOK_MODE_QI:
3841 ad->attr_mode = VT_BYTE + 1;
3842 break;
3843 case TOK_MODE_HI:
3844 ad->attr_mode = VT_SHORT + 1;
3845 break;
3846 case TOK_MODE_SI:
3847 case TOK_MODE_word:
3848 ad->attr_mode = VT_INT + 1;
3849 break;
3850 default:
3851 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3852 break;
3854 next();
3855 skip(')');
3856 break;
3857 case TOK_DLLEXPORT:
3858 ad->a.dllexport = 1;
3859 break;
3860 case TOK_NODECORATE:
3861 ad->a.nodecorate = 1;
3862 break;
3863 case TOK_DLLIMPORT:
3864 ad->a.dllimport = 1;
3865 break;
3866 default:
3867 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3868 /* skip parameters */
3869 if (tok == '(') {
3870 int parenthesis = 0;
3871 do {
3872 if (tok == '(')
3873 parenthesis++;
3874 else if (tok == ')')
3875 parenthesis--;
3876 next();
3877 } while (parenthesis && tok != -1);
3879 break;
3881 if (tok != ',')
3882 break;
3883 next();
3885 skip(')');
3886 skip(')');
3887 goto redo;
3890 static Sym * find_field (CType *type, int v, int *cumofs)
3892 Sym *s = type->ref;
3893 v |= SYM_FIELD;
3894 while ((s = s->next) != NULL) {
3895 if ((s->v & SYM_FIELD) &&
3896 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3897 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3898 Sym *ret = find_field (&s->type, v, cumofs);
3899 if (ret) {
3900 *cumofs += s->c;
3901 return ret;
3904 if (s->v == v)
3905 break;
3907 return s;
3910 static void check_fields (CType *type, int check)
3912 Sym *s = type->ref;
3914 while ((s = s->next) != NULL) {
3915 int v = s->v & ~SYM_FIELD;
3916 if (v < SYM_FIRST_ANOM) {
3917 TokenSym *ts = table_ident[v - TOK_IDENT];
3918 if (check && (ts->tok & SYM_FIELD))
3919 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
3920 ts->tok ^= SYM_FIELD;
3921 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
3922 check_fields (&s->type, check);
3926 static void struct_layout(CType *type, AttributeDef *ad)
3928 int size, align, maxalign, offset, c, bit_pos, bit_size;
3929 int packed, a, bt, prevbt, prev_bit_size;
3930 int pcc = !tcc_state->ms_bitfields;
3931 int pragma_pack = *tcc_state->pack_stack_ptr;
3932 Sym *f;
3934 maxalign = 1;
3935 offset = 0;
3936 c = 0;
3937 bit_pos = 0;
3938 prevbt = VT_STRUCT; /* make it never match */
3939 prev_bit_size = 0;
3941 //#define BF_DEBUG
3943 for (f = type->ref->next; f; f = f->next) {
3944 if (f->type.t & VT_BITFIELD)
3945 bit_size = BIT_SIZE(f->type.t);
3946 else
3947 bit_size = -1;
3948 size = type_size(&f->type, &align);
3949 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3950 packed = 0;
3952 if (pcc && bit_size == 0) {
3953 /* in pcc mode, packing does not affect zero-width bitfields */
3955 } else {
3956 /* in pcc mode, attribute packed overrides if set. */
3957 if (pcc && (f->a.packed || ad->a.packed))
3958 align = packed = 1;
3960 /* pragma pack overrides align if lesser and packs bitfields always */
3961 if (pragma_pack) {
3962 packed = 1;
3963 if (pragma_pack < align)
3964 align = pragma_pack;
3965 /* in pcc mode pragma pack also overrides individual align */
3966 if (pcc && pragma_pack < a)
3967 a = 0;
3970 /* some individual align was specified */
3971 if (a)
3972 align = a;
3974 if (type->ref->type.t == VT_UNION) {
3975 if (pcc && bit_size >= 0)
3976 size = (bit_size + 7) >> 3;
3977 offset = 0;
3978 if (size > c)
3979 c = size;
3981 } else if (bit_size < 0) {
3982 if (pcc)
3983 c += (bit_pos + 7) >> 3;
3984 c = (c + align - 1) & -align;
3985 offset = c;
3986 if (size > 0)
3987 c += size;
3988 bit_pos = 0;
3989 prevbt = VT_STRUCT;
3990 prev_bit_size = 0;
3992 } else {
3993 /* A bit-field. Layout is more complicated. There are two
3994 options: PCC (GCC) compatible and MS compatible */
3995 if (pcc) {
3996 /* In PCC layout a bit-field is placed adjacent to the
3997 preceding bit-fields, except if:
3998 - it has zero-width
3999 - an individual alignment was given
4000 - it would overflow its base type container and
4001 there is no packing */
4002 if (bit_size == 0) {
4003 new_field:
4004 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4005 bit_pos = 0;
4006 } else if (f->a.aligned) {
4007 goto new_field;
4008 } else if (!packed) {
4009 int a8 = align * 8;
4010 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4011 if (ofs > size / align)
4012 goto new_field;
4015 /* in pcc mode, long long bitfields have type int if they fit */
4016 if (size == 8 && bit_size <= 32)
4017 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4019 while (bit_pos >= align * 8)
4020 c += align, bit_pos -= align * 8;
4021 offset = c;
4023 /* In PCC layout named bit-fields influence the alignment
4024 of the containing struct using the base types alignment,
4025 except for packed fields (which here have correct align). */
4026 if (f->v & SYM_FIRST_ANOM
4027 // && bit_size // ??? gcc on ARM/rpi does that
4029 align = 1;
4031 } else {
4032 bt = f->type.t & VT_BTYPE;
4033 if ((bit_pos + bit_size > size * 8)
4034 || (bit_size > 0) == (bt != prevbt)
4036 c = (c + align - 1) & -align;
4037 offset = c;
4038 bit_pos = 0;
4039 /* In MS bitfield mode a bit-field run always uses
4040 at least as many bits as the underlying type.
4041 To start a new run it's also required that this
4042 or the last bit-field had non-zero width. */
4043 if (bit_size || prev_bit_size)
4044 c += size;
4046 /* In MS layout the records alignment is normally
4047 influenced by the field, except for a zero-width
4048 field at the start of a run (but by further zero-width
4049 fields it is again). */
4050 if (bit_size == 0 && prevbt != bt)
4051 align = 1;
4052 prevbt = bt;
4053 prev_bit_size = bit_size;
4056 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4057 | (bit_pos << VT_STRUCT_SHIFT);
4058 bit_pos += bit_size;
4060 if (align > maxalign)
4061 maxalign = align;
4063 #ifdef BF_DEBUG
4064 printf("set field %s offset %-2d size %-2d align %-2d",
4065 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4066 if (f->type.t & VT_BITFIELD) {
4067 printf(" pos %-2d bits %-2d",
4068 BIT_POS(f->type.t),
4069 BIT_SIZE(f->type.t)
4072 printf("\n");
4073 #endif
4075 f->c = offset;
4076 f->r = 0;
4079 if (pcc)
4080 c += (bit_pos + 7) >> 3;
4082 /* store size and alignment */
4083 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4084 if (a < maxalign)
4085 a = maxalign;
4086 type->ref->r = a;
4087 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4088 /* can happen if individual align for some member was given. In
4089 this case MSVC ignores maxalign when aligning the size */
4090 a = pragma_pack;
4091 if (a < bt)
4092 a = bt;
4094 c = (c + a - 1) & -a;
4095 type->ref->c = c;
4097 #ifdef BF_DEBUG
4098 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4099 #endif
4101 /* check whether we can access bitfields by their type */
4102 for (f = type->ref->next; f; f = f->next) {
4103 int s, px, cx, c0;
4104 CType t;
4106 if (0 == (f->type.t & VT_BITFIELD))
4107 continue;
4108 f->type.ref = f;
4109 f->auxtype = -1;
4110 bit_size = BIT_SIZE(f->type.t);
4111 if (bit_size == 0)
4112 continue;
4113 bit_pos = BIT_POS(f->type.t);
4114 size = type_size(&f->type, &align);
4116 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4117 #ifdef TCC_TARGET_ARM
4118 && !(f->c & (align - 1))
4119 #endif
4121 continue;
4123 /* try to access the field using a different type */
4124 c0 = -1, s = align = 1;
4125 t.t = VT_BYTE;
4126 for (;;) {
4127 px = f->c * 8 + bit_pos;
4128 cx = (px >> 3) & -align;
4129 px = px - (cx << 3);
4130 if (c0 == cx)
4131 break;
4132 s = (px + bit_size + 7) >> 3;
4133 if (s > 4) {
4134 t.t = VT_LLONG;
4135 } else if (s > 2) {
4136 t.t = VT_INT;
4137 } else if (s > 1) {
4138 t.t = VT_SHORT;
4139 } else {
4140 t.t = VT_BYTE;
4142 s = type_size(&t, &align);
4143 c0 = cx;
4146 if (px + bit_size <= s * 8 && cx + s <= c
4147 #ifdef TCC_TARGET_ARM
4148 && !(cx & (align - 1))
4149 #endif
4151 /* update offset and bit position */
4152 f->c = cx;
4153 bit_pos = px;
4154 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4155 | (bit_pos << VT_STRUCT_SHIFT);
4156 if (s != size)
4157 f->auxtype = t.t;
4158 #ifdef BF_DEBUG
4159 printf("FIX field %s offset %-2d size %-2d align %-2d "
4160 "pos %-2d bits %-2d\n",
4161 get_tok_str(f->v & ~SYM_FIELD, NULL),
4162 cx, s, align, px, bit_size);
4163 #endif
4164 } else {
4165 /* fall back to load/store single-byte wise */
4166 f->auxtype = VT_STRUCT;
4167 #ifdef BF_DEBUG
4168 printf("FIX field %s : load byte-wise\n",
4169 get_tok_str(f->v & ~SYM_FIELD, NULL));
4170 #endif
4175 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4176 static void struct_decl(CType *type, int u)
4178 int v, c, size, align, flexible;
4179 int bit_size, bsize, bt;
4180 Sym *s, *ss, **ps;
4181 AttributeDef ad, ad1;
4182 CType type1, btype;
4184 memset(&ad, 0, sizeof ad);
4185 next();
4186 parse_attribute(&ad);
4187 if (tok != '{') {
4188 v = tok;
4189 next();
4190 /* struct already defined ? return it */
4191 if (v < TOK_IDENT)
4192 expect("struct/union/enum name");
4193 s = struct_find(v);
4194 if (s && (s->sym_scope == local_scope || tok != '{')) {
4195 if (u == s->type.t)
4196 goto do_decl;
4197 if (u == VT_ENUM && IS_ENUM(s->type.t))
4198 goto do_decl;
4199 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4201 } else {
4202 v = anon_sym++;
4204 /* Record the original enum/struct/union token. */
4205 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4206 type1.ref = NULL;
4207 /* we put an undefined size for struct/union */
4208 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4209 s->r = 0; /* default alignment is zero as gcc */
4210 do_decl:
4211 type->t = s->type.t;
4212 type->ref = s;
4214 if (tok == '{') {
4215 next();
4216 if (s->c != -1)
4217 tcc_error("struct/union/enum already defined");
4218 s->c = -2;
4219 /* cannot be empty */
4220 /* non empty enums are not allowed */
4221 ps = &s->next;
4222 if (u == VT_ENUM) {
4223 long long ll = 0, pl = 0, nl = 0;
4224 CType t;
4225 t.ref = s;
4226 /* enum symbols have static storage */
4227 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4228 for(;;) {
4229 v = tok;
4230 if (v < TOK_UIDENT)
4231 expect("identifier");
4232 ss = sym_find(v);
4233 if (ss && !local_stack)
4234 tcc_error("redefinition of enumerator '%s'",
4235 get_tok_str(v, NULL));
4236 next();
4237 if (tok == '=') {
4238 next();
4239 ll = expr_const64();
4241 ss = sym_push(v, &t, VT_CONST, 0);
4242 ss->enum_val = ll;
4243 *ps = ss, ps = &ss->next;
4244 if (ll < nl)
4245 nl = ll;
4246 if (ll > pl)
4247 pl = ll;
4248 if (tok != ',')
4249 break;
4250 next();
4251 ll++;
4252 /* NOTE: we accept a trailing comma */
4253 if (tok == '}')
4254 break;
4256 skip('}');
4257 /* set integral type of the enum */
4258 t.t = VT_INT;
4259 if (nl >= 0) {
4260 if (pl != (unsigned)pl)
4261 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4262 t.t |= VT_UNSIGNED;
4263 } else if (pl != (int)pl || nl != (int)nl)
4264 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4265 s->type.t = type->t = t.t | VT_ENUM;
4266 s->c = 0;
4267 /* set type for enum members */
4268 for (ss = s->next; ss; ss = ss->next) {
4269 ll = ss->enum_val;
4270 if (ll == (int)ll) /* default is int if it fits */
4271 continue;
4272 if (t.t & VT_UNSIGNED) {
4273 ss->type.t |= VT_UNSIGNED;
4274 if (ll == (unsigned)ll)
4275 continue;
4277 ss->type.t = (ss->type.t & ~VT_BTYPE)
4278 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4280 } else {
4281 c = 0;
4282 flexible = 0;
4283 while (tok != '}') {
4284 if (!parse_btype(&btype, &ad1, 0)) {
4285 skip(';');
4286 continue;
4288 while (1) {
4289 if (flexible)
4290 tcc_error("flexible array member '%s' not at the end of struct",
4291 get_tok_str(v, NULL));
4292 bit_size = -1;
4293 v = 0;
4294 type1 = btype;
4295 if (tok != ':') {
4296 if (tok != ';')
4297 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4298 if (v == 0) {
4299 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4300 expect("identifier");
4301 else {
4302 int v = btype.ref->v;
4303 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4304 if (tcc_state->ms_extensions == 0)
4305 expect("identifier");
4309 if (type_size(&type1, &align) < 0) {
4310 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4311 flexible = 1;
4312 else
4313 tcc_error("field '%s' has incomplete type",
4314 get_tok_str(v, NULL));
4316 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4317 (type1.t & VT_BTYPE) == VT_VOID ||
4318 (type1.t & VT_STORAGE))
4319 tcc_error("invalid type for '%s'",
4320 get_tok_str(v, NULL));
4322 if (tok == ':') {
4323 next();
4324 bit_size = expr_const();
4325 /* XXX: handle v = 0 case for messages */
4326 if (bit_size < 0)
4327 tcc_error("negative width in bit-field '%s'",
4328 get_tok_str(v, NULL));
4329 if (v && bit_size == 0)
4330 tcc_error("zero width for bit-field '%s'",
4331 get_tok_str(v, NULL));
4332 parse_attribute(&ad1);
4334 size = type_size(&type1, &align);
4335 if (bit_size >= 0) {
4336 bt = type1.t & VT_BTYPE;
4337 if (bt != VT_INT &&
4338 bt != VT_BYTE &&
4339 bt != VT_SHORT &&
4340 bt != VT_BOOL &&
4341 bt != VT_LLONG)
4342 tcc_error("bitfields must have scalar type");
4343 bsize = size * 8;
4344 if (bit_size > bsize) {
4345 tcc_error("width of '%s' exceeds its type",
4346 get_tok_str(v, NULL));
4347 } else if (bit_size == bsize
4348 && !ad.a.packed && !ad1.a.packed) {
4349 /* no need for bit fields */
4351 } else if (bit_size == 64) {
4352 tcc_error("field width 64 not implemented");
4353 } else {
4354 type1.t = (type1.t & ~VT_STRUCT_MASK)
4355 | VT_BITFIELD
4356 | (bit_size << (VT_STRUCT_SHIFT + 6));
4359 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4360 /* Remember we've seen a real field to check
4361 for placement of flexible array member. */
4362 c = 1;
4364 /* If member is a struct or bit-field, enforce
4365 placing into the struct (as anonymous). */
4366 if (v == 0 &&
4367 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4368 bit_size >= 0)) {
4369 v = anon_sym++;
4371 if (v) {
4372 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4373 ss->a = ad1.a;
4374 *ps = ss;
4375 ps = &ss->next;
4377 if (tok == ';' || tok == TOK_EOF)
4378 break;
4379 skip(',');
4381 skip(';');
4383 skip('}');
4384 parse_attribute(&ad);
4385 if (ad.cleanup_func) {
4386 tcc_warning("attribute '__cleanup__' ignored on type");
4388 check_fields(type, 1);
4389 check_fields(type, 0);
4390 struct_layout(type, &ad);
4391 if (debug_modes)
4392 tcc_debug_fix_anon(tcc_state, type);
4397 static void sym_to_attr(AttributeDef *ad, Sym *s)
4399 merge_symattr(&ad->a, &s->a);
4400 merge_funcattr(&ad->f, &s->f);
4403 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4404 are added to the element type, copied because it could be a typedef. */
4405 static void parse_btype_qualify(CType *type, int qualifiers)
4407 while (type->t & VT_ARRAY) {
4408 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4409 type = &type->ref->type;
4411 type->t |= qualifiers;
4414 /* return 0 if no type declaration. otherwise, return the basic type
4415 and skip it.
4417 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4419 int t, u, bt, st, type_found, typespec_found, g, n;
4420 Sym *s;
4421 CType type1;
4423 memset(ad, 0, sizeof(AttributeDef));
4424 type_found = 0;
4425 typespec_found = 0;
4426 t = VT_INT;
4427 bt = st = -1;
4428 type->ref = NULL;
4430 while(1) {
4431 switch(tok) {
4432 case TOK_EXTENSION:
4433 /* currently, we really ignore extension */
4434 next();
4435 continue;
4437 /* basic types */
4438 case TOK_CHAR:
4439 u = VT_BYTE;
4440 basic_type:
4441 next();
4442 basic_type1:
4443 if (u == VT_SHORT || u == VT_LONG) {
4444 if (st != -1 || (bt != -1 && bt != VT_INT))
4445 tmbt: tcc_error("too many basic types");
4446 st = u;
4447 } else {
4448 if (bt != -1 || (st != -1 && u != VT_INT))
4449 goto tmbt;
4450 bt = u;
4452 if (u != VT_INT)
4453 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4454 typespec_found = 1;
4455 break;
4456 case TOK_VOID:
4457 u = VT_VOID;
4458 goto basic_type;
4459 case TOK_SHORT:
4460 u = VT_SHORT;
4461 goto basic_type;
4462 case TOK_INT:
4463 u = VT_INT;
4464 goto basic_type;
4465 case TOK_ALIGNAS:
4466 { int n;
4467 AttributeDef ad1;
4468 next();
4469 skip('(');
4470 memset(&ad1, 0, sizeof(AttributeDef));
4471 if (parse_btype(&type1, &ad1, 0)) {
4472 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4473 if (ad1.a.aligned)
4474 n = 1 << (ad1.a.aligned - 1);
4475 else
4476 type_size(&type1, &n);
4477 } else {
4478 n = expr_const();
4479 if (n < 0 || (n & (n - 1)) != 0)
4480 tcc_error("alignment must be a positive power of two");
4482 skip(')');
4483 ad->a.aligned = exact_log2p1(n);
4485 continue;
4486 case TOK_LONG:
4487 if ((t & VT_BTYPE) == VT_DOUBLE) {
4488 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4489 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4490 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4491 } else {
4492 u = VT_LONG;
4493 goto basic_type;
4495 next();
4496 break;
4497 #ifdef TCC_TARGET_ARM64
4498 case TOK_UINT128:
4499 /* GCC's __uint128_t appears in some Linux header files. Make it a
4500 synonym for long double to get the size and alignment right. */
4501 u = VT_LDOUBLE;
4502 goto basic_type;
4503 #endif
4504 case TOK_BOOL:
4505 u = VT_BOOL;
4506 goto basic_type;
4507 case TOK_FLOAT:
4508 u = VT_FLOAT;
4509 goto basic_type;
4510 case TOK_DOUBLE:
4511 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4512 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4513 } else {
4514 u = VT_DOUBLE;
4515 goto basic_type;
4517 next();
4518 break;
4519 case TOK_ENUM:
4520 struct_decl(&type1, VT_ENUM);
4521 basic_type2:
4522 u = type1.t;
4523 type->ref = type1.ref;
4524 goto basic_type1;
4525 case TOK_STRUCT:
4526 struct_decl(&type1, VT_STRUCT);
4527 goto basic_type2;
4528 case TOK_UNION:
4529 struct_decl(&type1, VT_UNION);
4530 goto basic_type2;
4532 /* type modifiers */
4533 case TOK__Atomic:
4534 next();
4535 type->t = t;
4536 parse_btype_qualify(type, VT_ATOMIC);
4537 t = type->t;
4538 if (tok == '(') {
4539 parse_expr_type(&type1);
4540 /* remove all storage modifiers except typedef */
4541 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4542 if (type1.ref)
4543 sym_to_attr(ad, type1.ref);
4544 goto basic_type2;
4546 break;
4547 case TOK_CONST1:
4548 case TOK_CONST2:
4549 case TOK_CONST3:
4550 type->t = t;
4551 parse_btype_qualify(type, VT_CONSTANT);
4552 t = type->t;
4553 next();
4554 break;
4555 case TOK_VOLATILE1:
4556 case TOK_VOLATILE2:
4557 case TOK_VOLATILE3:
4558 type->t = t;
4559 parse_btype_qualify(type, VT_VOLATILE);
4560 t = type->t;
4561 next();
4562 break;
4563 case TOK_SIGNED1:
4564 case TOK_SIGNED2:
4565 case TOK_SIGNED3:
4566 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4567 tcc_error("signed and unsigned modifier");
4568 t |= VT_DEFSIGN;
4569 next();
4570 typespec_found = 1;
4571 break;
4572 case TOK_REGISTER:
4573 case TOK_AUTO:
4574 case TOK_RESTRICT1:
4575 case TOK_RESTRICT2:
4576 case TOK_RESTRICT3:
4577 next();
4578 break;
4579 case TOK_UNSIGNED:
4580 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4581 tcc_error("signed and unsigned modifier");
4582 t |= VT_DEFSIGN | VT_UNSIGNED;
4583 next();
4584 typespec_found = 1;
4585 break;
4587 /* storage */
4588 case TOK_EXTERN:
4589 g = VT_EXTERN;
4590 goto storage;
4591 case TOK_STATIC:
4592 g = VT_STATIC;
4593 goto storage;
4594 case TOK_TYPEDEF:
4595 g = VT_TYPEDEF;
4596 goto storage;
4597 storage:
4598 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4599 tcc_error("multiple storage classes");
4600 t |= g;
4601 next();
4602 break;
4603 case TOK_INLINE1:
4604 case TOK_INLINE2:
4605 case TOK_INLINE3:
4606 t |= VT_INLINE;
4607 next();
4608 break;
4609 case TOK_NORETURN3:
4610 next();
4611 ad->f.func_noreturn = 1;
4612 break;
4613 /* GNUC attribute */
4614 case TOK_ATTRIBUTE1:
4615 case TOK_ATTRIBUTE2:
4616 parse_attribute(ad);
4617 if (ad->attr_mode) {
4618 u = ad->attr_mode -1;
4619 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4621 continue;
4622 /* GNUC typeof */
4623 case TOK_TYPEOF1:
4624 case TOK_TYPEOF2:
4625 case TOK_TYPEOF3:
4626 next();
4627 parse_expr_type(&type1);
4628 /* remove all storage modifiers except typedef */
4629 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4630 if (type1.ref)
4631 sym_to_attr(ad, type1.ref);
4632 goto basic_type2;
4633 default:
4634 if (typespec_found)
4635 goto the_end;
4636 s = sym_find(tok);
4637 if (!s || !(s->type.t & VT_TYPEDEF))
4638 goto the_end;
4640 n = tok, next();
4641 if (tok == ':' && ignore_label) {
4642 /* ignore if it's a label */
4643 unget_tok(n);
4644 goto the_end;
4647 t &= ~(VT_BTYPE|VT_LONG);
4648 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4649 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4650 type->ref = s->type.ref;
4651 if (t)
4652 parse_btype_qualify(type, t);
4653 t = type->t;
4654 /* get attributes from typedef */
4655 sym_to_attr(ad, s);
4656 typespec_found = 1;
4657 st = bt = -2;
4658 break;
4660 type_found = 1;
4662 the_end:
4663 if (tcc_state->char_is_unsigned) {
4664 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4665 t |= VT_UNSIGNED;
4667 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4668 bt = t & (VT_BTYPE|VT_LONG);
4669 if (bt == VT_LONG)
4670 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4671 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4672 if (bt == VT_LDOUBLE)
4673 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4674 #endif
4675 type->t = t;
4676 return type_found;
4679 /* convert a function parameter type (array to pointer and function to
4680 function pointer) */
4681 static inline void convert_parameter_type(CType *pt)
4683 /* remove const and volatile qualifiers (XXX: const could be used
4684 to indicate a const function parameter */
4685 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4686 /* array must be transformed to pointer according to ANSI C */
4687 pt->t &= ~VT_ARRAY;
4688 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4689 mk_pointer(pt);
4693 ST_FUNC void parse_asm_str(CString *astr)
4695 skip('(');
4696 parse_mult_str(astr, "string constant");
4699 /* Parse an asm label and return the token */
4700 static int asm_label_instr(void)
4702 int v;
4703 CString astr;
4705 next();
4706 parse_asm_str(&astr);
4707 skip(')');
4708 #ifdef ASM_DEBUG
4709 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4710 #endif
4711 v = tok_alloc(astr.data, astr.size - 1)->tok;
4712 cstr_free(&astr);
4713 return v;
4716 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4718 int n, l, t1, arg_size, align, unused_align;
4719 Sym **plast, *s, *first;
4720 AttributeDef ad1;
4721 CType pt;
4722 TokenString *vla_array_tok = NULL;
4723 int *vla_array_str = NULL;
4725 if (tok == '(') {
4726 /* function type, or recursive declarator (return if so) */
4727 next();
4728 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4729 return 0;
4730 if (tok == ')')
4731 l = 0;
4732 else if (parse_btype(&pt, &ad1, 0))
4733 l = FUNC_NEW;
4734 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4735 merge_attr (ad, &ad1);
4736 return 0;
4737 } else
4738 l = FUNC_OLD;
4740 first = NULL;
4741 plast = &first;
4742 arg_size = 0;
4743 ++local_scope;
4744 if (l) {
4745 for(;;) {
4746 /* read param name and compute offset */
4747 if (l != FUNC_OLD) {
4748 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4749 break;
4750 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4751 if ((pt.t & VT_BTYPE) == VT_VOID)
4752 tcc_error("parameter declared as void");
4753 if (n == 0)
4754 n = SYM_FIELD;
4755 } else {
4756 n = tok;
4757 pt.t = VT_VOID; /* invalid type */
4758 pt.ref = NULL;
4759 next();
4761 if (n < TOK_UIDENT)
4762 expect("identifier");
4763 convert_parameter_type(&pt);
4764 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4765 s = sym_push(n, &pt, 0, 0);
4766 *plast = s;
4767 plast = &s->next;
4768 if (tok == ')')
4769 break;
4770 skip(',');
4771 if (l == FUNC_NEW && tok == TOK_DOTS) {
4772 l = FUNC_ELLIPSIS;
4773 next();
4774 break;
4776 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4777 tcc_error("invalid type");
4779 } else
4780 /* if no parameters, then old type prototype */
4781 l = FUNC_OLD;
4782 skip(')');
4783 /* remove parameter symbols from token table, keep on stack */
4784 if (first) {
4785 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4786 for (s = first; s; s = s->next)
4787 s->v |= SYM_FIELD;
4789 --local_scope;
4790 /* NOTE: const is ignored in returned type as it has a special
4791 meaning in gcc / C++ */
4792 type->t &= ~VT_CONSTANT;
4793 /* some ancient pre-K&R C allows a function to return an array
4794 and the array brackets to be put after the arguments, such
4795 that "int c()[]" means something like "int[] c()" */
4796 if (tok == '[') {
4797 next();
4798 skip(']'); /* only handle simple "[]" */
4799 mk_pointer(type);
4801 /* we push a anonymous symbol which will contain the function prototype */
4802 ad->f.func_args = arg_size;
4803 ad->f.func_type = l;
4804 s = sym_push(SYM_FIELD, type, 0, 0);
4805 s->a = ad->a;
4806 s->f = ad->f;
4807 s->next = first;
4808 type->t = VT_FUNC;
4809 type->ref = s;
4810 } else if (tok == '[') {
4811 int saved_nocode_wanted = nocode_wanted;
4812 /* array definition */
4813 next();
4814 n = -1;
4815 t1 = 0;
4816 if (td & TYPE_PARAM) while (1) {
4817 /* XXX The optional type-quals and static should only be accepted
4818 in parameter decls. The '*' as well, and then even only
4819 in prototypes (not function defs). */
4820 switch (tok) {
4821 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4822 case TOK_CONST1:
4823 case TOK_VOLATILE1:
4824 case TOK_STATIC:
4825 case '*':
4826 next();
4827 continue;
4828 default:
4829 break;
4831 if (tok != ']') {
4832 int nest = 1;
4834 /* Code generation is not done now but has to be done
4835 at start of function. Save code here for later use. */
4836 nocode_wanted = 1;
4837 vla_array_tok = tok_str_alloc();
4838 for (;;) {
4839 if (tok == ']') {
4840 nest--;
4841 if (nest == 0)
4842 break;
4844 if (tok == '[')
4845 nest++;
4846 tok_str_add_tok(vla_array_tok);
4847 next();
4849 unget_tok(0);
4850 tok_str_add(vla_array_tok, -1);
4851 tok_str_add(vla_array_tok, 0);
4852 vla_array_str = vla_array_tok->str;
4853 begin_macro(vla_array_tok, 2);
4854 next();
4855 gexpr();
4856 end_macro();
4857 next();
4858 goto check;
4860 break;
4862 } else if (tok != ']') {
4863 if (!local_stack || (storage & VT_STATIC))
4864 vpushi(expr_const());
4865 else {
4866 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4867 length must always be evaluated, even under nocode_wanted,
4868 so that its size slot is initialized (e.g. under sizeof
4869 or typeof). */
4870 nocode_wanted = 0;
4871 gexpr();
4873 check:
4874 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4875 n = vtop->c.i;
4876 if (n < 0)
4877 tcc_error("invalid array size");
4878 } else {
4879 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4880 tcc_error("size of variable length array should be an integer");
4881 n = 0;
4882 t1 = VT_VLA;
4885 skip(']');
4886 /* parse next post type */
4887 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
4889 if ((type->t & VT_BTYPE) == VT_FUNC)
4890 tcc_error("declaration of an array of functions");
4891 if ((type->t & VT_BTYPE) == VT_VOID
4892 || type_size(type, &unused_align) < 0)
4893 tcc_error("declaration of an array of incomplete type elements");
4895 t1 |= type->t & VT_VLA;
4897 if (t1 & VT_VLA) {
4898 if (n < 0) {
4899 if (td & TYPE_NEST)
4900 tcc_error("need explicit inner array size in VLAs");
4902 else {
4903 loc -= type_size(&int_type, &align);
4904 loc &= -align;
4905 n = loc;
4907 vpush_type_size(type, &align);
4908 gen_op('*');
4909 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4910 vswap();
4911 vstore();
4914 if (n != -1)
4915 vpop();
4916 nocode_wanted = saved_nocode_wanted;
4918 /* we push an anonymous symbol which will contain the array
4919 element type */
4920 s = sym_push(SYM_FIELD, type, 0, n);
4921 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4922 type->ref = s;
4923 if (vla_array_str) {
4924 if (t1 & VT_VLA)
4925 s->vla_array_str = vla_array_str;
4926 else
4927 tok_str_free_str(vla_array_str);
4930 return 1;
4933 /* Parse a type declarator (except basic type), and return the type
4934 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4935 expected. 'type' should contain the basic type. 'ad' is the
4936 attribute definition of the basic type. It can be modified by
4937 type_decl(). If this (possibly abstract) declarator is a pointer chain
4938 it returns the innermost pointed to type (equals *type, but is a different
4939 pointer), otherwise returns type itself, that's used for recursive calls. */
4940 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4942 CType *post, *ret;
4943 int qualifiers, storage;
4945 /* recursive type, remove storage bits first, apply them later again */
4946 storage = type->t & VT_STORAGE;
4947 type->t &= ~VT_STORAGE;
4948 post = ret = type;
4950 while (tok == '*') {
4951 qualifiers = 0;
4952 redo:
4953 next();
4954 switch(tok) {
4955 case TOK__Atomic:
4956 qualifiers |= VT_ATOMIC;
4957 goto redo;
4958 case TOK_CONST1:
4959 case TOK_CONST2:
4960 case TOK_CONST3:
4961 qualifiers |= VT_CONSTANT;
4962 goto redo;
4963 case TOK_VOLATILE1:
4964 case TOK_VOLATILE2:
4965 case TOK_VOLATILE3:
4966 qualifiers |= VT_VOLATILE;
4967 goto redo;
4968 case TOK_RESTRICT1:
4969 case TOK_RESTRICT2:
4970 case TOK_RESTRICT3:
4971 goto redo;
4972 /* XXX: clarify attribute handling */
4973 case TOK_ATTRIBUTE1:
4974 case TOK_ATTRIBUTE2:
4975 parse_attribute(ad);
4976 break;
4978 mk_pointer(type);
4979 type->t |= qualifiers;
4980 if (ret == type)
4981 /* innermost pointed to type is the one for the first derivation */
4982 ret = pointed_type(type);
4985 if (tok == '(') {
4986 /* This is possibly a parameter type list for abstract declarators
4987 ('int ()'), use post_type for testing this. */
4988 if (!post_type(type, ad, 0, td)) {
4989 /* It's not, so it's a nested declarator, and the post operations
4990 apply to the innermost pointed to type (if any). */
4991 /* XXX: this is not correct to modify 'ad' at this point, but
4992 the syntax is not clear */
4993 parse_attribute(ad);
4994 post = type_decl(type, ad, v, td);
4995 skip(')');
4996 } else
4997 goto abstract;
4998 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4999 /* type identifier */
5000 *v = tok;
5001 next();
5002 } else {
5003 abstract:
5004 if (!(td & TYPE_ABSTRACT))
5005 expect("identifier");
5006 *v = 0;
5008 post_type(post, ad, storage, td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5009 parse_attribute(ad);
5010 type->t |= storage;
5011 return ret;
5014 /* indirection with full error checking and bound check */
5015 ST_FUNC void indir(void)
5017 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5018 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5019 return;
5020 expect("pointer");
5022 if (vtop->r & VT_LVAL)
5023 gv(RC_INT);
5024 vtop->type = *pointed_type(&vtop->type);
5025 /* Arrays and functions are never lvalues */
5026 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5027 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5028 vtop->r |= VT_LVAL;
5029 /* if bound checking, the referenced pointer must be checked */
5030 #ifdef CONFIG_TCC_BCHECK
5031 if (tcc_state->do_bounds_check)
5032 vtop->r |= VT_MUSTBOUND;
5033 #endif
5037 /* pass a parameter to a function and do type checking and casting */
5038 static void gfunc_param_typed(Sym *func, Sym *arg)
5040 int func_type;
5041 CType type;
5043 func_type = func->f.func_type;
5044 if (func_type == FUNC_OLD ||
5045 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5046 /* default casting : only need to convert float to double */
5047 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5048 gen_cast_s(VT_DOUBLE);
5049 } else if (vtop->type.t & VT_BITFIELD) {
5050 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5051 type.ref = vtop->type.ref;
5052 gen_cast(&type);
5053 } else if (vtop->r & VT_MUSTCAST) {
5054 force_charshort_cast();
5056 } else if (arg == NULL) {
5057 tcc_error("too many arguments to function");
5058 } else {
5059 type = arg->type;
5060 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5061 gen_assign_cast(&type);
5065 /* parse an expression and return its type without any side effect. */
5066 static void expr_type(CType *type, void (*expr_fn)(void))
5068 nocode_wanted++;
5069 expr_fn();
5070 *type = vtop->type;
5071 vpop();
5072 nocode_wanted--;
5075 /* parse an expression of the form '(type)' or '(expr)' and return its
5076 type */
5077 static void parse_expr_type(CType *type)
5079 int n;
5080 AttributeDef ad;
5082 skip('(');
5083 if (parse_btype(type, &ad, 0)) {
5084 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5085 } else {
5086 expr_type(type, gexpr);
5088 skip(')');
5091 static void parse_type(CType *type)
5093 AttributeDef ad;
5094 int n;
5096 if (!parse_btype(type, &ad, 0)) {
5097 expect("type");
5099 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5102 static void parse_builtin_params(int nc, const char *args)
5104 char c, sep = '(';
5105 CType type;
5106 if (nc)
5107 nocode_wanted++;
5108 next();
5109 if (*args == 0)
5110 skip(sep);
5111 while ((c = *args++)) {
5112 skip(sep);
5113 sep = ',';
5114 if (c == 't') {
5115 parse_type(&type);
5116 vpush(&type);
5117 continue;
5119 expr_eq();
5120 type.ref = NULL;
5121 type.t = 0;
5122 switch (c) {
5123 case 'e':
5124 continue;
5125 case 'V':
5126 type.t = VT_CONSTANT;
5127 case 'v':
5128 type.t |= VT_VOID;
5129 mk_pointer (&type);
5130 break;
5131 case 'S':
5132 type.t = VT_CONSTANT;
5133 case 's':
5134 type.t |= char_type.t;
5135 mk_pointer (&type);
5136 break;
5137 case 'i':
5138 type.t = VT_INT;
5139 break;
5140 case 'l':
5141 type.t = VT_SIZE_T;
5142 break;
5143 default:
5144 break;
5146 gen_assign_cast(&type);
5148 skip(')');
5149 if (nc)
5150 nocode_wanted--;
5153 static void parse_atomic(int atok)
5155 int size, align, arg;
5156 CType *atom, *atom_ptr, ct = {0};
5157 char buf[40];
5158 static const char *const templates[] = {
5160 * Each entry consists of callback and function template.
5161 * The template represents argument types and return type.
5163 * ? void (return-only)
5164 * b bool
5165 * a atomic
5166 * A read-only atomic
5167 * p pointer to memory
5168 * v value
5169 * m memory model
5172 /* keep in order of appearance in tcctok.h: */
5173 /* __atomic_store */ "avm.?",
5174 /* __atomic_load */ "Am.v",
5175 /* __atomic_exchange */ "avm.v",
5176 /* __atomic_compare_exchange */ "apvbmm.b",
5177 /* __atomic_fetch_add */ "avm.v",
5178 /* __atomic_fetch_sub */ "avm.v",
5179 /* __atomic_fetch_or */ "avm.v",
5180 /* __atomic_fetch_xor */ "avm.v",
5181 /* __atomic_fetch_and */ "avm.v"
5183 const char *template = templates[(atok - TOK___atomic_store)];
5185 atom = atom_ptr = NULL;
5186 size = 0; /* pacify compiler */
5187 next();
5188 skip('(');
5189 for (arg = 0;;) {
5190 expr_eq();
5191 switch (template[arg]) {
5192 case 'a':
5193 case 'A':
5194 atom_ptr = &vtop->type;
5195 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5196 expect("pointer");
5197 atom = pointed_type(atom_ptr);
5198 size = type_size(atom, &align);
5199 if (size > 8
5200 || (size & (size - 1))
5201 || (atok > TOK___atomic_compare_exchange
5202 && (0 == btype_size(atom->t & VT_BTYPE)
5203 || (atom->t & VT_BTYPE) == VT_PTR)))
5204 expect("integral or integer-sized pointer target type");
5205 /* GCC does not care either: */
5206 /* if (!(atom->t & VT_ATOMIC))
5207 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5208 break;
5210 case 'p':
5211 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5212 || type_size(pointed_type(&vtop->type), &align) != size)
5213 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5214 gen_assign_cast(atom_ptr);
5215 break;
5216 case 'v':
5217 gen_assign_cast(atom);
5218 break;
5219 case 'm':
5220 gen_assign_cast(&int_type);
5221 break;
5222 case 'b':
5223 ct.t = VT_BOOL;
5224 gen_assign_cast(&ct);
5225 break;
5227 if ('.' == template[++arg])
5228 break;
5229 skip(',');
5231 skip(')');
5233 ct.t = VT_VOID;
5234 switch (template[arg + 1]) {
5235 case 'b':
5236 ct.t = VT_BOOL;
5237 break;
5238 case 'v':
5239 ct = *atom;
5240 break;
5243 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5244 vpush_helper_func(tok_alloc_const(buf));
5245 vrott(arg + 1);
5246 gfunc_call(arg);
5248 vpush(&ct);
5249 PUT_R_RET(vtop, ct.t);
5250 if (ct.t == VT_BOOL) {
5251 #ifdef PROMOTE_RET
5252 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5253 #else
5254 vtop->type.t = VT_INT;
5255 #endif
5259 ST_FUNC void unary(void)
5261 int n, t, align, size, r, sizeof_caller;
5262 CType type;
5263 Sym *s;
5264 AttributeDef ad;
5266 /* generate line number info */
5267 if (debug_modes)
5268 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5270 sizeof_caller = in_sizeof;
5271 in_sizeof = 0;
5272 type.ref = NULL;
5273 /* XXX: GCC 2.95.3 does not generate a table although it should be
5274 better here */
5275 tok_next:
5276 switch(tok) {
5277 case TOK_EXTENSION:
5278 next();
5279 goto tok_next;
5280 case TOK_LCHAR:
5281 #ifdef TCC_TARGET_PE
5282 t = VT_SHORT|VT_UNSIGNED;
5283 goto push_tokc;
5284 #endif
5285 case TOK_CINT:
5286 case TOK_CCHAR:
5287 t = VT_INT;
5288 push_tokc:
5289 type.t = t;
5290 vsetc(&type, VT_CONST, &tokc);
5291 next();
5292 break;
5293 case TOK_CUINT:
5294 t = VT_INT | VT_UNSIGNED;
5295 goto push_tokc;
5296 case TOK_CLLONG:
5297 t = VT_LLONG;
5298 goto push_tokc;
5299 case TOK_CULLONG:
5300 t = VT_LLONG | VT_UNSIGNED;
5301 goto push_tokc;
5302 case TOK_CFLOAT:
5303 t = VT_FLOAT;
5304 goto push_tokc;
5305 case TOK_CDOUBLE:
5306 t = VT_DOUBLE;
5307 goto push_tokc;
5308 case TOK_CLDOUBLE:
5309 t = VT_LDOUBLE;
5310 goto push_tokc;
5311 case TOK_CLONG:
5312 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5313 goto push_tokc;
5314 case TOK_CULONG:
5315 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5316 goto push_tokc;
5317 case TOK___FUNCTION__:
5318 if (!gnu_ext)
5319 goto tok_identifier;
5320 /* fall thru */
5321 case TOK___FUNC__:
5323 Section *sec;
5324 int len;
5325 /* special function name identifier */
5326 len = strlen(funcname) + 1;
5327 /* generate char[len] type */
5328 type.t = char_type.t;
5329 if (tcc_state->warn_write_strings & WARN_ON)
5330 type.t |= VT_CONSTANT;
5331 mk_pointer(&type);
5332 type.t |= VT_ARRAY;
5333 type.ref->c = len;
5334 sec = rodata_section;
5335 vpush_ref(&type, sec, sec->data_offset, len);
5336 if (!NODATA_WANTED)
5337 memcpy(section_ptr_add(sec, len), funcname, len);
5338 next();
5340 break;
5341 case TOK_LSTR:
5342 #ifdef TCC_TARGET_PE
5343 t = VT_SHORT | VT_UNSIGNED;
5344 #else
5345 t = VT_INT;
5346 #endif
5347 goto str_init;
5348 case TOK_STR:
5349 /* string parsing */
5350 t = char_type.t;
5351 str_init:
5352 if (tcc_state->warn_write_strings & WARN_ON)
5353 t |= VT_CONSTANT;
5354 type.t = t;
5355 mk_pointer(&type);
5356 type.t |= VT_ARRAY;
5357 memset(&ad, 0, sizeof(AttributeDef));
5358 ad.section = rodata_section;
5359 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5360 break;
5361 case '(':
5362 next();
5363 /* cast ? */
5364 if (parse_btype(&type, &ad, 0)) {
5365 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5366 skip(')');
5367 /* check ISOC99 compound literal */
5368 if (tok == '{') {
5369 /* data is allocated locally by default */
5370 if (global_expr)
5371 r = VT_CONST;
5372 else
5373 r = VT_LOCAL;
5374 /* all except arrays are lvalues */
5375 if (!(type.t & VT_ARRAY))
5376 r |= VT_LVAL;
5377 memset(&ad, 0, sizeof(AttributeDef));
5378 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5379 } else {
5380 if (sizeof_caller) {
5381 vpush(&type);
5382 return;
5384 unary();
5385 gen_cast(&type);
5387 } else if (tok == '{') {
5388 int saved_nocode_wanted = nocode_wanted;
5389 if (const_wanted && !(nocode_wanted & unevalmask))
5390 expect("constant");
5391 if (0 == local_scope)
5392 tcc_error("statement expression outside of function");
5393 /* save all registers */
5394 save_regs(0);
5395 /* statement expression : we do not accept break/continue
5396 inside as GCC does. We do retain the nocode_wanted state,
5397 as statement expressions can't ever be entered from the
5398 outside, so any reactivation of code emission (from labels
5399 or loop heads) can be disabled again after the end of it. */
5400 block(1);
5401 /* If the statement expr can be entered, then we retain the current
5402 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5403 If it can't be entered then the state is that from before the
5404 statement expression. */
5405 if (saved_nocode_wanted)
5406 nocode_wanted = saved_nocode_wanted;
5407 skip(')');
5408 } else {
5409 gexpr();
5410 skip(')');
5412 break;
5413 case '*':
5414 next();
5415 unary();
5416 indir();
5417 break;
5418 case '&':
5419 next();
5420 unary();
5421 /* functions names must be treated as function pointers,
5422 except for unary '&' and sizeof. Since we consider that
5423 functions are not lvalues, we only have to handle it
5424 there and in function calls. */
5425 /* arrays can also be used although they are not lvalues */
5426 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5427 !(vtop->type.t & VT_ARRAY))
5428 test_lvalue();
5429 if (vtop->sym)
5430 vtop->sym->a.addrtaken = 1;
5431 mk_pointer(&vtop->type);
5432 gaddrof();
5433 break;
5434 case '!':
5435 next();
5436 unary();
5437 gen_test_zero(TOK_EQ);
5438 break;
5439 case '~':
5440 next();
5441 unary();
5442 vpushi(-1);
5443 gen_op('^');
5444 break;
5445 case '+':
5446 next();
5447 unary();
5448 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5449 tcc_error("pointer not accepted for unary plus");
5450 /* In order to force cast, we add zero, except for floating point
5451 where we really need an noop (otherwise -0.0 will be transformed
5452 into +0.0). */
5453 if (!is_float(vtop->type.t)) {
5454 vpushi(0);
5455 gen_op('+');
5457 break;
5458 case TOK_SIZEOF:
5459 case TOK_ALIGNOF1:
5460 case TOK_ALIGNOF2:
5461 case TOK_ALIGNOF3:
5462 t = tok;
5463 next();
5464 in_sizeof++;
5465 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5466 if (t == TOK_SIZEOF) {
5467 vpush_type_size(&type, &align);
5468 gen_cast_s(VT_SIZE_T);
5469 } else {
5470 type_size(&type, &align);
5471 s = NULL;
5472 if (vtop[1].r & VT_SYM)
5473 s = vtop[1].sym; /* hack: accessing previous vtop */
5474 if (s && s->a.aligned)
5475 align = 1 << (s->a.aligned - 1);
5476 vpushs(align);
5478 break;
5480 case TOK_builtin_expect:
5481 /* __builtin_expect is a no-op for now */
5482 parse_builtin_params(0, "ee");
5483 vpop();
5484 break;
5485 case TOK_builtin_types_compatible_p:
5486 parse_builtin_params(0, "tt");
5487 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5488 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5489 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5490 vtop -= 2;
5491 vpushi(n);
5492 break;
5493 case TOK_builtin_choose_expr:
5495 int64_t c;
5496 next();
5497 skip('(');
5498 c = expr_const64();
5499 skip(',');
5500 if (!c) {
5501 nocode_wanted++;
5503 expr_eq();
5504 if (!c) {
5505 vpop();
5506 nocode_wanted--;
5508 skip(',');
5509 if (c) {
5510 nocode_wanted++;
5512 expr_eq();
5513 if (c) {
5514 vpop();
5515 nocode_wanted--;
5517 skip(')');
5519 break;
5520 case TOK_builtin_constant_p:
5521 parse_builtin_params(1, "e");
5522 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5523 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5524 vtop--;
5525 vpushi(n);
5526 break;
5527 case TOK_builtin_frame_address:
5528 case TOK_builtin_return_address:
5530 int tok1 = tok;
5531 int level;
5532 next();
5533 skip('(');
5534 if (tok != TOK_CINT) {
5535 tcc_error("%s only takes positive integers",
5536 tok1 == TOK_builtin_return_address ?
5537 "__builtin_return_address" :
5538 "__builtin_frame_address");
5540 level = (uint32_t)tokc.i;
5541 next();
5542 skip(')');
5543 type.t = VT_VOID;
5544 mk_pointer(&type);
5545 vset(&type, VT_LOCAL, 0); /* local frame */
5546 while (level--) {
5547 #ifdef TCC_TARGET_RISCV64
5548 vpushi(2*PTR_SIZE);
5549 gen_op('-');
5550 #endif
5551 mk_pointer(&vtop->type);
5552 indir(); /* -> parent frame */
5554 if (tok1 == TOK_builtin_return_address) {
5555 // assume return address is just above frame pointer on stack
5556 #ifdef TCC_TARGET_ARM
5557 vpushi(2*PTR_SIZE);
5558 gen_op('+');
5559 #elif defined TCC_TARGET_RISCV64
5560 vpushi(PTR_SIZE);
5561 gen_op('-');
5562 #else
5563 vpushi(PTR_SIZE);
5564 gen_op('+');
5565 #endif
5566 mk_pointer(&vtop->type);
5567 indir();
5570 break;
5571 #ifdef TCC_TARGET_RISCV64
5572 case TOK_builtin_va_start:
5573 parse_builtin_params(0, "ee");
5574 r = vtop->r & VT_VALMASK;
5575 if (r == VT_LLOCAL)
5576 r = VT_LOCAL;
5577 if (r != VT_LOCAL)
5578 tcc_error("__builtin_va_start expects a local variable");
5579 gen_va_start();
5580 vstore();
5581 break;
5582 #endif
5583 #ifdef TCC_TARGET_X86_64
5584 #ifdef TCC_TARGET_PE
5585 case TOK_builtin_va_start:
5586 parse_builtin_params(0, "ee");
5587 r = vtop->r & VT_VALMASK;
5588 if (r == VT_LLOCAL)
5589 r = VT_LOCAL;
5590 if (r != VT_LOCAL)
5591 tcc_error("__builtin_va_start expects a local variable");
5592 vtop->r = r;
5593 vtop->type = char_pointer_type;
5594 vtop->c.i += 8;
5595 vstore();
5596 break;
5597 #else
5598 case TOK_builtin_va_arg_types:
5599 parse_builtin_params(0, "t");
5600 vpushi(classify_x86_64_va_arg(&vtop->type));
5601 vswap();
5602 vpop();
5603 break;
5604 #endif
5605 #endif
5607 #ifdef TCC_TARGET_ARM64
5608 case TOK_builtin_va_start: {
5609 parse_builtin_params(0, "ee");
5610 //xx check types
5611 gen_va_start();
5612 vpushi(0);
5613 vtop->type.t = VT_VOID;
5614 break;
5616 case TOK_builtin_va_arg: {
5617 parse_builtin_params(0, "et");
5618 type = vtop->type;
5619 vpop();
5620 //xx check types
5621 gen_va_arg(&type);
5622 vtop->type = type;
5623 break;
5625 case TOK___arm64_clear_cache: {
5626 parse_builtin_params(0, "ee");
5627 gen_clear_cache();
5628 vpushi(0);
5629 vtop->type.t = VT_VOID;
5630 break;
5632 #endif
5634 /* atomic operations */
5635 case TOK___atomic_store:
5636 case TOK___atomic_load:
5637 case TOK___atomic_exchange:
5638 case TOK___atomic_compare_exchange:
5639 case TOK___atomic_fetch_add:
5640 case TOK___atomic_fetch_sub:
5641 case TOK___atomic_fetch_or:
5642 case TOK___atomic_fetch_xor:
5643 case TOK___atomic_fetch_and:
5644 parse_atomic(tok);
5645 break;
5647 /* pre operations */
5648 case TOK_INC:
5649 case TOK_DEC:
5650 t = tok;
5651 next();
5652 unary();
5653 inc(0, t);
5654 break;
5655 case '-':
5656 next();
5657 unary();
5658 if (is_float(vtop->type.t)) {
5659 gen_opif(TOK_NEG);
5660 } else {
5661 vpushi(0);
5662 vswap();
5663 gen_op('-');
5665 break;
5666 case TOK_LAND:
5667 if (!gnu_ext)
5668 goto tok_identifier;
5669 next();
5670 /* allow to take the address of a label */
5671 if (tok < TOK_UIDENT)
5672 expect("label identifier");
5673 s = label_find(tok);
5674 if (!s) {
5675 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5676 } else {
5677 if (s->r == LABEL_DECLARED)
5678 s->r = LABEL_FORWARD;
5680 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5681 s->type.t = VT_VOID;
5682 mk_pointer(&s->type);
5683 s->type.t |= VT_STATIC;
5685 vpushsym(&s->type, s);
5686 next();
5687 break;
5689 case TOK_GENERIC:
5691 CType controlling_type;
5692 int has_default = 0;
5693 int has_match = 0;
5694 int learn = 0;
5695 TokenString *str = NULL;
5696 int saved_const_wanted = const_wanted;
5698 next();
5699 skip('(');
5700 const_wanted = 0;
5701 expr_type(&controlling_type, expr_eq);
5702 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5703 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5704 mk_pointer(&controlling_type);
5705 const_wanted = saved_const_wanted;
5706 for (;;) {
5707 learn = 0;
5708 skip(',');
5709 if (tok == TOK_DEFAULT) {
5710 if (has_default)
5711 tcc_error("too many 'default'");
5712 has_default = 1;
5713 if (!has_match)
5714 learn = 1;
5715 next();
5716 } else {
5717 AttributeDef ad_tmp;
5718 int itmp;
5719 CType cur_type;
5721 parse_btype(&cur_type, &ad_tmp, 0);
5722 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5723 if (compare_types(&controlling_type, &cur_type, 0)) {
5724 if (has_match) {
5725 tcc_error("type match twice");
5727 has_match = 1;
5728 learn = 1;
5731 skip(':');
5732 if (learn) {
5733 if (str)
5734 tok_str_free(str);
5735 skip_or_save_block(&str);
5736 } else {
5737 skip_or_save_block(NULL);
5739 if (tok == ')')
5740 break;
5742 if (!str) {
5743 char buf[60];
5744 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5745 tcc_error("type '%s' does not match any association", buf);
5747 begin_macro(str, 1);
5748 next();
5749 expr_eq();
5750 if (tok != TOK_EOF)
5751 expect(",");
5752 end_macro();
5753 next();
5754 break;
5756 // special qnan , snan and infinity values
5757 case TOK___NAN__:
5758 n = 0x7fc00000;
5759 special_math_val:
5760 vpushi(n);
5761 vtop->type.t = VT_FLOAT;
5762 next();
5763 break;
5764 case TOK___SNAN__:
5765 n = 0x7f800001;
5766 goto special_math_val;
5767 case TOK___INF__:
5768 n = 0x7f800000;
5769 goto special_math_val;
5771 default:
5772 tok_identifier:
5773 t = tok;
5774 next();
5775 if (t < TOK_UIDENT)
5776 expect("identifier");
5777 s = sym_find(t);
5778 if (!s || IS_ASM_SYM(s)) {
5779 const char *name = get_tok_str(t, NULL);
5780 if (tok != '(')
5781 tcc_error("'%s' undeclared", name);
5782 /* for simple function calls, we tolerate undeclared
5783 external reference to int() function */
5784 tcc_warning_c(warn_implicit_function_declaration)(
5785 "implicit declaration of function '%s'", name);
5786 s = external_global_sym(t, &func_old_type);
5789 r = s->r;
5790 /* A symbol that has a register is a local register variable,
5791 which starts out as VT_LOCAL value. */
5792 if ((r & VT_VALMASK) < VT_CONST)
5793 r = (r & ~VT_VALMASK) | VT_LOCAL;
5795 vset(&s->type, r, s->c);
5796 /* Point to s as backpointer (even without r&VT_SYM).
5797 Will be used by at least the x86 inline asm parser for
5798 regvars. */
5799 vtop->sym = s;
5801 if (r & VT_SYM) {
5802 vtop->c.i = 0;
5803 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5804 vtop->c.i = s->enum_val;
5806 break;
5809 /* post operations */
5810 while (1) {
5811 if (tok == TOK_INC || tok == TOK_DEC) {
5812 inc(1, tok);
5813 next();
5814 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5815 int qualifiers, cumofs = 0;
5816 /* field */
5817 if (tok == TOK_ARROW)
5818 indir();
5819 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5820 test_lvalue();
5821 gaddrof();
5822 /* expect pointer on structure */
5823 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5824 expect("struct or union");
5825 if (tok == TOK_CDOUBLE)
5826 expect("field name");
5827 next();
5828 if (tok == TOK_CINT || tok == TOK_CUINT)
5829 expect("field name");
5830 s = find_field(&vtop->type, tok, &cumofs);
5831 if (!s)
5832 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5833 /* add field offset to pointer */
5834 vtop->type = char_pointer_type; /* change type to 'char *' */
5835 vpushi(cumofs + s->c);
5836 gen_op('+');
5837 /* change type to field type, and set to lvalue */
5838 vtop->type = s->type;
5839 vtop->type.t |= qualifiers;
5840 /* an array is never an lvalue */
5841 if (!(vtop->type.t & VT_ARRAY)) {
5842 vtop->r |= VT_LVAL;
5843 #ifdef CONFIG_TCC_BCHECK
5844 /* if bound checking, the referenced pointer must be checked */
5845 if (tcc_state->do_bounds_check)
5846 vtop->r |= VT_MUSTBOUND;
5847 #endif
5849 next();
5850 } else if (tok == '[') {
5851 next();
5852 gexpr();
5853 gen_op('+');
5854 indir();
5855 skip(']');
5856 } else if (tok == '(') {
5857 SValue ret;
5858 Sym *sa;
5859 int nb_args, ret_nregs, ret_align, regsize, variadic;
5861 /* function call */
5862 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5863 /* pointer test (no array accepted) */
5864 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5865 vtop->type = *pointed_type(&vtop->type);
5866 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5867 goto error_func;
5868 } else {
5869 error_func:
5870 expect("function pointer");
5872 } else {
5873 vtop->r &= ~VT_LVAL; /* no lvalue */
5875 /* get return type */
5876 s = vtop->type.ref;
5877 next();
5878 sa = s->next; /* first parameter */
5879 nb_args = regsize = 0;
5880 ret.r2 = VT_CONST;
5881 /* compute first implicit argument if a structure is returned */
5882 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5883 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5884 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5885 &ret_align, &regsize);
5886 if (ret_nregs <= 0) {
5887 /* get some space for the returned structure */
5888 size = type_size(&s->type, &align);
5889 #ifdef TCC_TARGET_ARM64
5890 /* On arm64, a small struct is return in registers.
5891 It is much easier to write it to memory if we know
5892 that we are allowed to write some extra bytes, so
5893 round the allocated space up to a power of 2: */
5894 if (size < 16)
5895 while (size & (size - 1))
5896 size = (size | (size - 1)) + 1;
5897 #endif
5898 loc = (loc - size) & -align;
5899 ret.type = s->type;
5900 ret.r = VT_LOCAL | VT_LVAL;
5901 /* pass it as 'int' to avoid structure arg passing
5902 problems */
5903 vseti(VT_LOCAL, loc);
5904 #ifdef CONFIG_TCC_BCHECK
5905 if (tcc_state->do_bounds_check)
5906 --loc;
5907 #endif
5908 ret.c = vtop->c;
5909 if (ret_nregs < 0)
5910 vtop--;
5911 else
5912 nb_args++;
5914 } else {
5915 ret_nregs = 1;
5916 ret.type = s->type;
5919 if (ret_nregs > 0) {
5920 /* return in register */
5921 ret.c.i = 0;
5922 PUT_R_RET(&ret, ret.type.t);
5924 if (tok != ')') {
5925 for(;;) {
5926 expr_eq();
5927 gfunc_param_typed(s, sa);
5928 nb_args++;
5929 if (sa)
5930 sa = sa->next;
5931 if (tok == ')')
5932 break;
5933 skip(',');
5936 if (sa)
5937 tcc_error("too few arguments to function");
5938 skip(')');
5939 gfunc_call(nb_args);
5941 if (ret_nregs < 0) {
5942 vsetc(&ret.type, ret.r, &ret.c);
5943 #ifdef TCC_TARGET_RISCV64
5944 arch_transfer_ret_regs(1);
5945 #endif
5946 } else {
5947 /* return value */
5948 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5949 vsetc(&ret.type, r, &ret.c);
5950 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5953 /* handle packed struct return */
5954 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5955 int addr, offset;
5957 size = type_size(&s->type, &align);
5958 /* We're writing whole regs often, make sure there's enough
5959 space. Assume register size is power of 2. */
5960 if (regsize > align)
5961 align = regsize;
5962 loc = (loc - size) & -align;
5963 addr = loc;
5964 offset = 0;
5965 for (;;) {
5966 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5967 vswap();
5968 vstore();
5969 vtop--;
5970 if (--ret_nregs == 0)
5971 break;
5972 offset += regsize;
5974 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5977 /* Promote char/short return values. This is matters only
5978 for calling function that were not compiled by TCC and
5979 only on some architectures. For those where it doesn't
5980 matter we expect things to be already promoted to int,
5981 but not larger. */
5982 t = s->type.t & VT_BTYPE;
5983 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5984 #ifdef PROMOTE_RET
5985 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5986 #else
5987 vtop->type.t = VT_INT;
5988 #endif
5991 if (s->f.func_noreturn) {
5992 if (debug_modes)
5993 tcc_tcov_block_end(tcc_state, -1);
5994 CODE_OFF();
5996 } else {
5997 break;
6002 #ifndef precedence_parser /* original top-down parser */
6004 static void expr_prod(void)
6006 int t;
6008 unary();
6009 while ((t = tok) == '*' || t == '/' || t == '%') {
6010 next();
6011 unary();
6012 gen_op(t);
6016 static void expr_sum(void)
6018 int t;
6020 expr_prod();
6021 while ((t = tok) == '+' || t == '-') {
6022 next();
6023 expr_prod();
6024 gen_op(t);
6028 static void expr_shift(void)
6030 int t;
6032 expr_sum();
6033 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6034 next();
6035 expr_sum();
6036 gen_op(t);
6040 static void expr_cmp(void)
6042 int t;
6044 expr_shift();
6045 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6046 t == TOK_ULT || t == TOK_UGE) {
6047 next();
6048 expr_shift();
6049 gen_op(t);
6053 static void expr_cmpeq(void)
6055 int t;
6057 expr_cmp();
6058 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6059 next();
6060 expr_cmp();
6061 gen_op(t);
6065 static void expr_and(void)
6067 expr_cmpeq();
6068 while (tok == '&') {
6069 next();
6070 expr_cmpeq();
6071 gen_op('&');
6075 static void expr_xor(void)
6077 expr_and();
6078 while (tok == '^') {
6079 next();
6080 expr_and();
6081 gen_op('^');
6085 static void expr_or(void)
6087 expr_xor();
6088 while (tok == '|') {
6089 next();
6090 expr_xor();
6091 gen_op('|');
6095 static void expr_landor(int op);
6097 static void expr_land(void)
6099 expr_or();
6100 if (tok == TOK_LAND)
6101 expr_landor(tok);
6104 static void expr_lor(void)
6106 expr_land();
6107 if (tok == TOK_LOR)
6108 expr_landor(tok);
6111 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6112 #else /* defined precedence_parser */
6113 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6114 # define expr_lor() unary(), expr_infix(1)
6116 static int precedence(int tok)
6118 switch (tok) {
6119 case TOK_LOR: return 1;
6120 case TOK_LAND: return 2;
6121 case '|': return 3;
6122 case '^': return 4;
6123 case '&': return 5;
6124 case TOK_EQ: case TOK_NE: return 6;
6125 relat: case TOK_ULT: case TOK_UGE: return 7;
6126 case TOK_SHL: case TOK_SAR: return 8;
6127 case '+': case '-': return 9;
6128 case '*': case '/': case '%': return 10;
6129 default:
6130 if (tok >= TOK_ULE && tok <= TOK_GT)
6131 goto relat;
6132 return 0;
6135 static unsigned char prec[256];
6136 static void init_prec(void)
6138 int i;
6139 for (i = 0; i < 256; i++)
6140 prec[i] = precedence(i);
6142 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6144 static void expr_landor(int op);
6146 static void expr_infix(int p)
6148 int t = tok, p2;
6149 while ((p2 = precedence(t)) >= p) {
6150 if (t == TOK_LOR || t == TOK_LAND) {
6151 expr_landor(t);
6152 } else {
6153 next();
6154 unary();
6155 if (precedence(tok) > p2)
6156 expr_infix(p2 + 1);
6157 gen_op(t);
6159 t = tok;
6162 #endif
6164 /* Assuming vtop is a value used in a conditional context
6165 (i.e. compared with zero) return 0 if it's false, 1 if
6166 true and -1 if it can't be statically determined. */
6167 static int condition_3way(void)
6169 int c = -1;
6170 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6171 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6172 vdup();
6173 gen_cast_s(VT_BOOL);
6174 c = vtop->c.i;
6175 vpop();
6177 return c;
6180 static void expr_landor(int op)
6182 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6183 for(;;) {
6184 c = f ? i : condition_3way();
6185 if (c < 0)
6186 save_regs(1), cc = 0;
6187 else if (c != i)
6188 nocode_wanted++, f = 1;
6189 if (tok != op)
6190 break;
6191 if (c < 0)
6192 t = gvtst(i, t);
6193 else
6194 vpop();
6195 next();
6196 expr_landor_next(op);
6198 if (cc || f) {
6199 vpop();
6200 vpushi(i ^ f);
6201 gsym(t);
6202 nocode_wanted -= f;
6203 } else {
6204 gvtst_set(i, t);
6208 static int is_cond_bool(SValue *sv)
6210 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6211 && (sv->type.t & VT_BTYPE) == VT_INT)
6212 return (unsigned)sv->c.i < 2;
6213 if (sv->r == VT_CMP)
6214 return 1;
6215 return 0;
6218 static void expr_cond(void)
6220 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6221 SValue sv;
6222 CType type;
6223 int ncw_prev;
6225 expr_lor();
6226 if (tok == '?') {
6227 next();
6228 c = condition_3way();
6229 g = (tok == ':' && gnu_ext);
6230 tt = 0;
6231 if (!g) {
6232 if (c < 0) {
6233 save_regs(1);
6234 tt = gvtst(1, 0);
6235 } else {
6236 vpop();
6238 } else if (c < 0) {
6239 /* needed to avoid having different registers saved in
6240 each branch */
6241 save_regs(1);
6242 gv_dup();
6243 tt = gvtst(0, 0);
6246 ncw_prev = nocode_wanted;
6247 if (c == 0)
6248 nocode_wanted++;
6249 if (!g)
6250 gexpr();
6252 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6253 mk_pointer(&vtop->type);
6254 sv = *vtop; /* save value to handle it later */
6255 vtop--; /* no vpop so that FP stack is not flushed */
6257 if (g) {
6258 u = tt;
6259 } else if (c < 0) {
6260 u = gjmp(0);
6261 gsym(tt);
6262 } else
6263 u = 0;
6265 nocode_wanted = ncw_prev;
6266 if (c == 1)
6267 nocode_wanted++;
6268 skip(':');
6269 expr_cond();
6271 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6272 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6273 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6274 this code jumps directly to the if's then/else branches. */
6275 t1 = gvtst(0, 0);
6276 t2 = gjmp(0);
6277 gsym(u);
6278 vpushv(&sv);
6279 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6280 gvtst_set(0, t1);
6281 gvtst_set(1, t2);
6282 nocode_wanted = ncw_prev;
6283 // tcc_warning("two conditions expr_cond");
6284 return;
6287 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6288 mk_pointer(&vtop->type);
6290 /* cast operands to correct type according to ISOC rules */
6291 if (!combine_types(&type, &sv, vtop, '?'))
6292 type_incompatibility_error(&sv.type, &vtop->type,
6293 "type mismatch in conditional expression (have '%s' and '%s')");
6294 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6295 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6296 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6298 /* now we convert second operand */
6299 if (c != 1) {
6300 gen_cast(&type);
6301 if (islv) {
6302 mk_pointer(&vtop->type);
6303 gaddrof();
6304 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6305 gaddrof();
6308 rc = RC_TYPE(type.t);
6309 /* for long longs, we use fixed registers to avoid having
6310 to handle a complicated move */
6311 if (USING_TWO_WORDS(type.t))
6312 rc = RC_RET(type.t);
6314 tt = r2 = 0;
6315 if (c < 0) {
6316 r2 = gv(rc);
6317 tt = gjmp(0);
6319 gsym(u);
6320 nocode_wanted = ncw_prev;
6322 /* this is horrible, but we must also convert first
6323 operand */
6324 if (c != 0) {
6325 *vtop = sv;
6326 gen_cast(&type);
6327 if (islv) {
6328 mk_pointer(&vtop->type);
6329 gaddrof();
6330 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6331 gaddrof();
6334 if (c < 0) {
6335 r1 = gv(rc);
6336 move_reg(r2, r1, islv ? VT_PTR : type.t);
6337 vtop->r = r2;
6338 gsym(tt);
6341 if (islv)
6342 indir();
6346 static void expr_eq(void)
6348 int t;
6350 expr_cond();
6351 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6352 test_lvalue();
6353 next();
6354 if (t == '=') {
6355 expr_eq();
6356 } else {
6357 vdup();
6358 expr_eq();
6359 gen_op(TOK_ASSIGN_OP(t));
6361 vstore();
6365 ST_FUNC void gexpr(void)
6367 while (1) {
6368 expr_eq();
6369 if (tok != ',')
6370 break;
6371 vpop();
6372 next();
6376 /* parse a constant expression and return value in vtop. */
6377 static void expr_const1(void)
6379 const_wanted++;
6380 nocode_wanted += unevalmask + 1;
6381 expr_cond();
6382 nocode_wanted -= unevalmask + 1;
6383 const_wanted--;
6386 /* parse an integer constant and return its value. */
6387 static inline int64_t expr_const64(void)
6389 int64_t c;
6390 expr_const1();
6391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6392 expect("constant expression");
6393 c = vtop->c.i;
6394 vpop();
6395 return c;
6398 /* parse an integer constant and return its value.
6399 Complain if it doesn't fit 32bit (signed or unsigned). */
6400 ST_FUNC int expr_const(void)
6402 int c;
6403 int64_t wc = expr_const64();
6404 c = wc;
6405 if (c != wc && (unsigned)c != wc)
6406 tcc_error("constant exceeds 32 bit");
6407 return c;
6410 /* ------------------------------------------------------------------------- */
6411 /* return from function */
6413 #ifndef TCC_TARGET_ARM64
6414 static void gfunc_return(CType *func_type)
6416 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6417 CType type, ret_type;
6418 int ret_align, ret_nregs, regsize;
6419 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6420 &ret_align, &regsize);
6421 if (ret_nregs < 0) {
6422 #ifdef TCC_TARGET_RISCV64
6423 arch_transfer_ret_regs(0);
6424 #endif
6425 } else if (0 == ret_nregs) {
6426 /* if returning structure, must copy it to implicit
6427 first pointer arg location */
6428 type = *func_type;
6429 mk_pointer(&type);
6430 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6431 indir();
6432 vswap();
6433 /* copy structure value to pointer */
6434 vstore();
6435 } else {
6436 /* returning structure packed into registers */
6437 int size, addr, align, rc;
6438 size = type_size(func_type,&align);
6439 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6440 (vtop->c.i & (ret_align-1)))
6441 && (align & (ret_align-1))) {
6442 loc = (loc - size) & -ret_align;
6443 addr = loc;
6444 type = *func_type;
6445 vset(&type, VT_LOCAL | VT_LVAL, addr);
6446 vswap();
6447 vstore();
6448 vpop();
6449 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6451 vtop->type = ret_type;
6452 rc = RC_RET(ret_type.t);
6453 if (ret_nregs == 1)
6454 gv(rc);
6455 else {
6456 for (;;) {
6457 vdup();
6458 gv(rc);
6459 vpop();
6460 if (--ret_nregs == 0)
6461 break;
6462 /* We assume that when a structure is returned in multiple
6463 registers, their classes are consecutive values of the
6464 suite s(n) = 2^n */
6465 rc <<= 1;
6466 vtop->c.i += regsize;
6470 } else {
6471 gv(RC_RET(func_type->t));
6473 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6475 #endif
6477 static void check_func_return(void)
6479 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6480 return;
6481 if (!strcmp (funcname, "main")
6482 && (func_vt.t & VT_BTYPE) == VT_INT) {
6483 /* main returns 0 by default */
6484 vpushi(0);
6485 gen_assign_cast(&func_vt);
6486 gfunc_return(&func_vt);
6487 } else {
6488 tcc_warning("function might return no value: '%s'", funcname);
6492 /* ------------------------------------------------------------------------- */
6493 /* switch/case */
6495 static int case_cmpi(const void *pa, const void *pb)
6497 int64_t a = (*(struct case_t**) pa)->v1;
6498 int64_t b = (*(struct case_t**) pb)->v1;
6499 return a < b ? -1 : a > b;
6502 static int case_cmpu(const void *pa, const void *pb)
6504 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6505 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6506 return a < b ? -1 : a > b;
6509 static void gtst_addr(int t, int a)
6511 gsym_addr(gvtst(0, t), a);
6514 static void gcase(struct case_t **base, int len, int *bsym)
6516 struct case_t *p;
6517 int e;
6518 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6519 while (len > 8) {
6520 /* binary search */
6521 p = base[len/2];
6522 vdup();
6523 if (ll)
6524 vpushll(p->v2);
6525 else
6526 vpushi(p->v2);
6527 gen_op(TOK_LE);
6528 e = gvtst(1, 0);
6529 vdup();
6530 if (ll)
6531 vpushll(p->v1);
6532 else
6533 vpushi(p->v1);
6534 gen_op(TOK_GE);
6535 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6536 /* x < v1 */
6537 gcase(base, len/2, bsym);
6538 /* x > v2 */
6539 gsym(e);
6540 e = len/2 + 1;
6541 base += e; len -= e;
6543 /* linear scan */
6544 while (len--) {
6545 p = *base++;
6546 vdup();
6547 if (ll)
6548 vpushll(p->v2);
6549 else
6550 vpushi(p->v2);
6551 if (p->v1 == p->v2) {
6552 gen_op(TOK_EQ);
6553 gtst_addr(0, p->sym);
6554 } else {
6555 gen_op(TOK_LE);
6556 e = gvtst(1, 0);
6557 vdup();
6558 if (ll)
6559 vpushll(p->v1);
6560 else
6561 vpushi(p->v1);
6562 gen_op(TOK_GE);
6563 gtst_addr(0, p->sym);
6564 gsym(e);
6567 *bsym = gjmp(*bsym);
6570 /* ------------------------------------------------------------------------- */
6571 /* __attribute__((cleanup(fn))) */
6573 static void try_call_scope_cleanup(Sym *stop)
6575 Sym *cls = cur_scope->cl.s;
6577 for (; cls != stop; cls = cls->ncl) {
6578 Sym *fs = cls->next;
6579 Sym *vs = cls->prev_tok;
6581 vpushsym(&fs->type, fs);
6582 vset(&vs->type, vs->r, vs->c);
6583 vtop->sym = vs;
6584 mk_pointer(&vtop->type);
6585 gaddrof();
6586 gfunc_call(1);
6590 static void try_call_cleanup_goto(Sym *cleanupstate)
6592 Sym *oc, *cc;
6593 int ocd, ccd;
6595 if (!cur_scope->cl.s)
6596 return;
6598 /* search NCA of both cleanup chains given parents and initial depth */
6599 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6600 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6602 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6604 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6607 try_call_scope_cleanup(cc);
6610 /* call 'func' for each __attribute__((cleanup(func))) */
6611 static void block_cleanup(struct scope *o)
6613 int jmp = 0;
6614 Sym *g, **pg;
6615 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6616 if (g->prev_tok->r & LABEL_FORWARD) {
6617 Sym *pcl = g->next;
6618 if (!jmp)
6619 jmp = gjmp(0);
6620 gsym(pcl->jnext);
6621 try_call_scope_cleanup(o->cl.s);
6622 pcl->jnext = gjmp(0);
6623 if (!o->cl.n)
6624 goto remove_pending;
6625 g->c = o->cl.n;
6626 pg = &g->prev;
6627 } else {
6628 remove_pending:
6629 *pg = g->prev;
6630 sym_free(g);
6633 gsym(jmp);
6634 try_call_scope_cleanup(o->cl.s);
6637 /* ------------------------------------------------------------------------- */
6638 /* VLA */
6640 static void vla_restore(int loc)
6642 if (loc)
6643 gen_vla_sp_restore(loc);
6646 static void vla_leave(struct scope *o)
6648 struct scope *c = cur_scope, *v = NULL;
6649 for (; c != o && c; c = c->prev)
6650 if (c->vla.num)
6651 v = c;
6652 if (v)
6653 vla_restore(v->vla.locorig);
6656 /* ------------------------------------------------------------------------- */
6657 /* local scopes */
6659 static void new_scope(struct scope *o)
6661 /* copy and link previous scope */
6662 *o = *cur_scope;
6663 o->prev = cur_scope;
6664 cur_scope = o;
6665 cur_scope->vla.num = 0;
6667 /* record local declaration stack position */
6668 o->lstk = local_stack;
6669 o->llstk = local_label_stack;
6670 ++local_scope;
6672 if (debug_modes)
6673 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6676 static void prev_scope(struct scope *o, int is_expr)
6678 vla_leave(o->prev);
6680 if (o->cl.s != o->prev->cl.s)
6681 block_cleanup(o->prev);
6683 /* pop locally defined labels */
6684 label_pop(&local_label_stack, o->llstk, is_expr);
6686 /* In the is_expr case (a statement expression is finished here),
6687 vtop might refer to symbols on the local_stack. Either via the
6688 type or via vtop->sym. We can't pop those nor any that in turn
6689 might be referred to. To make it easier we don't roll back
6690 any symbols in that case; some upper level call to block() will
6691 do that. We do have to remove such symbols from the lookup
6692 tables, though. sym_pop will do that. */
6694 /* pop locally defined symbols */
6695 pop_local_syms(o->lstk, is_expr);
6696 cur_scope = o->prev;
6697 --local_scope;
6699 if (debug_modes)
6700 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
6703 /* leave a scope via break/continue(/goto) */
6704 static void leave_scope(struct scope *o)
6706 if (!o)
6707 return;
6708 try_call_scope_cleanup(o->cl.s);
6709 vla_leave(o);
6712 /* ------------------------------------------------------------------------- */
6713 /* call block from 'for do while' loops */
6715 static void lblock(int *bsym, int *csym)
6717 struct scope *lo = loop_scope, *co = cur_scope;
6718 int *b = co->bsym, *c = co->csym;
6719 if (csym) {
6720 co->csym = csym;
6721 loop_scope = co;
6723 co->bsym = bsym;
6724 block(0);
6725 co->bsym = b;
6726 if (csym) {
6727 co->csym = c;
6728 loop_scope = lo;
6732 static void block(int is_expr)
6734 int a, b, c, d, e, t;
6735 struct scope o;
6736 Sym *s;
6738 if (is_expr) {
6739 /* default return value is (void) */
6740 vpushi(0);
6741 vtop->type.t = VT_VOID;
6744 again:
6745 t = tok;
6746 /* If the token carries a value, next() might destroy it. Only with
6747 invalid code such as f(){"123"4;} */
6748 if (TOK_HAS_VALUE(t))
6749 goto expr;
6750 next();
6752 if (debug_modes)
6753 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6755 if (t == TOK_IF) {
6756 skip('(');
6757 gexpr();
6758 skip(')');
6759 a = gvtst(1, 0);
6760 block(0);
6761 if (tok == TOK_ELSE) {
6762 d = gjmp(0);
6763 gsym(a);
6764 next();
6765 block(0);
6766 gsym(d); /* patch else jmp */
6767 } else {
6768 gsym(a);
6771 } else if (t == TOK_WHILE) {
6772 d = gind();
6773 skip('(');
6774 gexpr();
6775 skip(')');
6776 a = gvtst(1, 0);
6777 b = 0;
6778 lblock(&a, &b);
6779 gjmp_addr(d);
6780 gsym_addr(b, d);
6781 gsym(a);
6783 } else if (t == '{') {
6784 new_scope(&o);
6786 /* handle local labels declarations */
6787 while (tok == TOK_LABEL) {
6788 do {
6789 next();
6790 if (tok < TOK_UIDENT)
6791 expect("label identifier");
6792 label_push(&local_label_stack, tok, LABEL_DECLARED);
6793 next();
6794 } while (tok == ',');
6795 skip(';');
6798 while (tok != '}') {
6799 decl(VT_LOCAL);
6800 if (tok != '}') {
6801 if (is_expr)
6802 vpop();
6803 block(is_expr);
6807 prev_scope(&o, is_expr);
6808 if (local_scope)
6809 next();
6810 else if (!nocode_wanted)
6811 check_func_return();
6813 } else if (t == TOK_RETURN) {
6814 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6815 if (tok != ';') {
6816 gexpr();
6817 if (b) {
6818 gen_assign_cast(&func_vt);
6819 } else {
6820 if (vtop->type.t != VT_VOID)
6821 tcc_warning("void function returns a value");
6822 vtop--;
6824 } else if (b) {
6825 tcc_warning("'return' with no value");
6826 b = 0;
6828 leave_scope(root_scope);
6829 if (b)
6830 gfunc_return(&func_vt);
6831 skip(';');
6832 /* jump unless last stmt in top-level block */
6833 if (tok != '}' || local_scope != 1)
6834 rsym = gjmp(rsym);
6835 if (debug_modes)
6836 tcc_tcov_block_end (tcc_state, -1);
6837 CODE_OFF();
6839 } else if (t == TOK_BREAK) {
6840 /* compute jump */
6841 if (!cur_scope->bsym)
6842 tcc_error("cannot break");
6843 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6844 leave_scope(cur_switch->scope);
6845 else
6846 leave_scope(loop_scope);
6847 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6848 skip(';');
6850 } else if (t == TOK_CONTINUE) {
6851 /* compute jump */
6852 if (!cur_scope->csym)
6853 tcc_error("cannot continue");
6854 leave_scope(loop_scope);
6855 *cur_scope->csym = gjmp(*cur_scope->csym);
6856 skip(';');
6858 } else if (t == TOK_FOR) {
6859 new_scope(&o);
6861 skip('(');
6862 if (tok != ';') {
6863 /* c99 for-loop init decl? */
6864 if (!decl0(VT_LOCAL, 1, NULL)) {
6865 /* no, regular for-loop init expr */
6866 gexpr();
6867 vpop();
6870 skip(';');
6871 a = b = 0;
6872 c = d = gind();
6873 if (tok != ';') {
6874 gexpr();
6875 a = gvtst(1, 0);
6877 skip(';');
6878 if (tok != ')') {
6879 e = gjmp(0);
6880 d = gind();
6881 gexpr();
6882 vpop();
6883 gjmp_addr(c);
6884 gsym(e);
6886 skip(')');
6887 lblock(&a, &b);
6888 gjmp_addr(d);
6889 gsym_addr(b, d);
6890 gsym(a);
6891 prev_scope(&o, 0);
6893 } else if (t == TOK_DO) {
6894 a = b = 0;
6895 d = gind();
6896 lblock(&a, &b);
6897 gsym(b);
6898 skip(TOK_WHILE);
6899 skip('(');
6900 gexpr();
6901 skip(')');
6902 skip(';');
6903 c = gvtst(0, 0);
6904 gsym_addr(c, d);
6905 gsym(a);
6907 } else if (t == TOK_SWITCH) {
6908 struct switch_t *sw;
6910 sw = tcc_mallocz(sizeof *sw);
6911 sw->bsym = &a;
6912 sw->scope = cur_scope;
6913 sw->prev = cur_switch;
6914 cur_switch = sw;
6916 skip('(');
6917 gexpr();
6918 skip(')');
6919 sw->sv = *vtop--; /* save switch value */
6921 a = 0;
6922 b = gjmp(0); /* jump to first case */
6923 lblock(&a, NULL);
6924 a = gjmp(a); /* add implicit break */
6925 /* case lookup */
6926 gsym(b);
6928 if (sw->sv.type.t & VT_UNSIGNED)
6929 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
6930 else
6931 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
6933 for (b = 1; b < sw->n; b++)
6934 if (sw->sv.type.t & VT_UNSIGNED
6935 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
6936 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
6937 tcc_error("duplicate case value");
6939 vpushv(&sw->sv);
6940 gv(RC_INT);
6941 d = 0, gcase(sw->p, sw->n, &d);
6942 vpop();
6943 if (sw->def_sym)
6944 gsym_addr(d, sw->def_sym);
6945 else
6946 gsym(d);
6947 /* break label */
6948 gsym(a);
6950 dynarray_reset(&sw->p, &sw->n);
6951 cur_switch = sw->prev;
6952 tcc_free(sw);
6954 } else if (t == TOK_CASE) {
6955 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6956 if (!cur_switch)
6957 expect("switch");
6958 cr->v1 = cr->v2 = expr_const64();
6959 if (gnu_ext && tok == TOK_DOTS) {
6960 next();
6961 cr->v2 = expr_const64();
6962 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
6963 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
6964 tcc_warning("empty case range");
6966 if (debug_modes)
6967 tcc_tcov_reset_ind(tcc_state);
6968 cr->sym = gind();
6969 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6970 skip(':');
6971 is_expr = 0;
6972 goto block_after_label;
6974 } else if (t == TOK_DEFAULT) {
6975 if (!cur_switch)
6976 expect("switch");
6977 if (cur_switch->def_sym)
6978 tcc_error("too many 'default'");
6979 if (debug_modes)
6980 tcc_tcov_reset_ind(tcc_state);
6981 cur_switch->def_sym = gind();
6982 skip(':');
6983 is_expr = 0;
6984 goto block_after_label;
6986 } else if (t == TOK_GOTO) {
6987 if (cur_scope->vla.num)
6988 vla_restore(cur_scope->vla.locorig);
6989 if (tok == '*' && gnu_ext) {
6990 /* computed goto */
6991 next();
6992 gexpr();
6993 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6994 expect("pointer");
6995 ggoto();
6997 } else if (tok >= TOK_UIDENT) {
6998 s = label_find(tok);
6999 /* put forward definition if needed */
7000 if (!s)
7001 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7002 else if (s->r == LABEL_DECLARED)
7003 s->r = LABEL_FORWARD;
7005 if (s->r & LABEL_FORWARD) {
7006 /* start new goto chain for cleanups, linked via label->next */
7007 if (cur_scope->cl.s && !nocode_wanted) {
7008 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7009 pending_gotos->prev_tok = s;
7010 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7011 pending_gotos->next = s;
7013 s->jnext = gjmp(s->jnext);
7014 } else {
7015 try_call_cleanup_goto(s->cleanupstate);
7016 gjmp_addr(s->jnext);
7018 next();
7020 } else {
7021 expect("label identifier");
7023 skip(';');
7025 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7026 asm_instr();
7028 } else {
7029 if (tok == ':' && t >= TOK_UIDENT) {
7030 /* label case */
7031 next();
7032 s = label_find(t);
7033 if (s) {
7034 if (s->r == LABEL_DEFINED)
7035 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7036 s->r = LABEL_DEFINED;
7037 if (s->next) {
7038 Sym *pcl; /* pending cleanup goto */
7039 for (pcl = s->next; pcl; pcl = pcl->prev)
7040 gsym(pcl->jnext);
7041 sym_pop(&s->next, NULL, 0);
7042 } else
7043 gsym(s->jnext);
7044 } else {
7045 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7047 s->jnext = gind();
7048 s->cleanupstate = cur_scope->cl.s;
7050 block_after_label:
7051 vla_restore(cur_scope->vla.loc);
7052 if (tok != '}')
7053 goto again;
7054 /* we accept this, but it is a mistake */
7055 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7057 } else {
7058 /* expression case */
7059 if (t != ';') {
7060 unget_tok(t);
7061 expr:
7062 if (is_expr) {
7063 vpop();
7064 gexpr();
7065 } else {
7066 gexpr();
7067 vpop();
7069 skip(';');
7074 if (debug_modes)
7075 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7078 /* This skips over a stream of tokens containing balanced {} and ()
7079 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7080 with a '{'). If STR then allocates and stores the skipped tokens
7081 in *STR. This doesn't check if () and {} are nested correctly,
7082 i.e. "({)}" is accepted. */
7083 static void skip_or_save_block(TokenString **str)
7085 int braces = tok == '{';
7086 int level = 0;
7087 if (str)
7088 *str = tok_str_alloc();
7090 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7091 int t;
7092 if (tok == TOK_EOF) {
7093 if (str || level > 0)
7094 tcc_error("unexpected end of file");
7095 else
7096 break;
7098 if (str)
7099 tok_str_add_tok(*str);
7100 t = tok;
7101 next();
7102 if (t == '{' || t == '(') {
7103 level++;
7104 } else if (t == '}' || t == ')') {
7105 level--;
7106 if (level == 0 && braces && t == '}')
7107 break;
7110 if (str) {
7111 tok_str_add(*str, -1);
7112 tok_str_add(*str, 0);
7116 #define EXPR_CONST 1
7117 #define EXPR_ANY 2
7119 static void parse_init_elem(int expr_type)
7121 int saved_global_expr;
7122 switch(expr_type) {
7123 case EXPR_CONST:
7124 /* compound literals must be allocated globally in this case */
7125 saved_global_expr = global_expr;
7126 global_expr = 1;
7127 expr_const1();
7128 global_expr = saved_global_expr;
7129 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7130 (compound literals). */
7131 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7132 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7133 || vtop->sym->v < SYM_FIRST_ANOM))
7134 #ifdef TCC_TARGET_PE
7135 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7136 #endif
7138 tcc_error("initializer element is not constant");
7139 break;
7140 case EXPR_ANY:
7141 expr_eq();
7142 break;
7146 #if 1
7147 static void init_assert(init_params *p, int offset)
7149 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7150 : !nocode_wanted && offset > p->local_offset)
7151 tcc_internal_error("initializer overflow");
7153 #else
7154 #define init_assert(sec, offset)
7155 #endif
7157 /* put zeros for variable based init */
7158 static void init_putz(init_params *p, unsigned long c, int size)
7160 init_assert(p, c + size);
7161 if (p->sec) {
7162 /* nothing to do because globals are already set to zero */
7163 } else {
7164 vpush_helper_func(TOK_memset);
7165 vseti(VT_LOCAL, c);
7166 #ifdef TCC_TARGET_ARM
7167 vpushs(size);
7168 vpushi(0);
7169 #else
7170 vpushi(0);
7171 vpushs(size);
7172 #endif
7173 gfunc_call(3);
7177 #define DIF_FIRST 1
7178 #define DIF_SIZE_ONLY 2
7179 #define DIF_HAVE_ELEM 4
7180 #define DIF_CLEAR 8
7182 /* delete relocations for specified range c ... c + size. Unfortunatly
7183 in very special cases, relocations may occur unordered */
7184 static void decl_design_delrels(Section *sec, int c, int size)
7186 ElfW_Rel *rel, *rel2, *rel_end;
7187 if (!sec || !sec->reloc)
7188 return;
7189 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7190 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7191 while (rel < rel_end) {
7192 if (rel->r_offset >= c && rel->r_offset < c + size) {
7193 sec->reloc->data_offset -= sizeof *rel;
7194 } else {
7195 if (rel2 != rel)
7196 memcpy(rel2, rel, sizeof *rel);
7197 ++rel2;
7199 ++rel;
7203 static void decl_design_flex(init_params *p, Sym *ref, int index)
7205 if (ref == p->flex_array_ref) {
7206 if (index >= ref->c)
7207 ref->c = index + 1;
7208 } else if (ref->c < 0)
7209 tcc_error("flexible array has zero size in this context");
7212 /* t is the array or struct type. c is the array or struct
7213 address. cur_field is the pointer to the current
7214 field, for arrays the 'c' member contains the current start
7215 index. 'flags' is as in decl_initializer.
7216 'al' contains the already initialized length of the
7217 current container (starting at c). This returns the new length of that. */
7218 static int decl_designator(init_params *p, CType *type, unsigned long c,
7219 Sym **cur_field, int flags, int al)
7221 Sym *s, *f;
7222 int index, index_last, align, l, nb_elems, elem_size;
7223 unsigned long corig = c;
7225 elem_size = 0;
7226 nb_elems = 1;
7228 if (flags & DIF_HAVE_ELEM)
7229 goto no_designator;
7231 if (gnu_ext && tok >= TOK_UIDENT) {
7232 l = tok, next();
7233 if (tok == ':')
7234 goto struct_field;
7235 unget_tok(l);
7238 /* NOTE: we only support ranges for last designator */
7239 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7240 if (tok == '[') {
7241 if (!(type->t & VT_ARRAY))
7242 expect("array type");
7243 next();
7244 index = index_last = expr_const();
7245 if (tok == TOK_DOTS && gnu_ext) {
7246 next();
7247 index_last = expr_const();
7249 skip(']');
7250 s = type->ref;
7251 decl_design_flex(p, s, index_last);
7252 if (index < 0 || index_last >= s->c || index_last < index)
7253 tcc_error("index exceeds array bounds or range is empty");
7254 if (cur_field)
7255 (*cur_field)->c = index_last;
7256 type = pointed_type(type);
7257 elem_size = type_size(type, &align);
7258 c += index * elem_size;
7259 nb_elems = index_last - index + 1;
7260 } else {
7261 int cumofs;
7262 next();
7263 l = tok;
7264 struct_field:
7265 next();
7266 if ((type->t & VT_BTYPE) != VT_STRUCT)
7267 expect("struct/union type");
7268 cumofs = 0;
7269 f = find_field(type, l, &cumofs);
7270 if (!f)
7271 expect("field");
7272 if (cur_field)
7273 *cur_field = f;
7274 type = &f->type;
7275 c += cumofs + f->c;
7277 cur_field = NULL;
7279 if (!cur_field) {
7280 if (tok == '=') {
7281 next();
7282 } else if (!gnu_ext) {
7283 expect("=");
7285 } else {
7286 no_designator:
7287 if (type->t & VT_ARRAY) {
7288 index = (*cur_field)->c;
7289 s = type->ref;
7290 decl_design_flex(p, s, index);
7291 if (index >= s->c)
7292 tcc_error("too many initializers");
7293 type = pointed_type(type);
7294 elem_size = type_size(type, &align);
7295 c += index * elem_size;
7296 } else {
7297 f = *cur_field;
7298 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7299 *cur_field = f = f->next;
7300 if (!f)
7301 tcc_error("too many initializers");
7302 type = &f->type;
7303 c += f->c;
7307 if (!elem_size) /* for structs */
7308 elem_size = type_size(type, &align);
7310 /* Using designators the same element can be initialized more
7311 than once. In that case we need to delete possibly already
7312 existing relocations. */
7313 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7314 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7315 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7318 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7320 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7321 Sym aref = {0};
7322 CType t1;
7323 int i;
7324 if (p->sec || (type->t & VT_ARRAY)) {
7325 /* make init_putv/vstore believe it were a struct */
7326 aref.c = elem_size;
7327 t1.t = VT_STRUCT, t1.ref = &aref;
7328 type = &t1;
7330 if (p->sec)
7331 vpush_ref(type, p->sec, c, elem_size);
7332 else
7333 vset(type, VT_LOCAL|VT_LVAL, c);
7334 for (i = 1; i < nb_elems; i++) {
7335 vdup();
7336 init_putv(p, type, c + elem_size * i);
7338 vpop();
7341 c += nb_elems * elem_size;
7342 if (c - corig > al)
7343 al = c - corig;
7344 return al;
7347 /* store a value or an expression directly in global data or in local array */
7348 static void init_putv(init_params *p, CType *type, unsigned long c)
7350 int bt;
7351 void *ptr;
7352 CType dtype;
7353 int size, align;
7354 Section *sec = p->sec;
7355 uint64_t val;
7357 dtype = *type;
7358 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7360 size = type_size(type, &align);
7361 if (type->t & VT_BITFIELD)
7362 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7363 init_assert(p, c + size);
7365 if (sec) {
7366 /* XXX: not portable */
7367 /* XXX: generate error if incorrect relocation */
7368 gen_assign_cast(&dtype);
7369 bt = type->t & VT_BTYPE;
7371 if ((vtop->r & VT_SYM)
7372 && bt != VT_PTR
7373 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7374 || (type->t & VT_BITFIELD))
7375 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7377 tcc_error("initializer element is not computable at load time");
7379 if (NODATA_WANTED) {
7380 vtop--;
7381 return;
7384 ptr = sec->data + c;
7385 val = vtop->c.i;
7387 /* XXX: make code faster ? */
7388 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7389 vtop->sym->v >= SYM_FIRST_ANOM &&
7390 /* XXX This rejects compound literals like
7391 '(void *){ptr}'. The problem is that '&sym' is
7392 represented the same way, which would be ruled out
7393 by the SYM_FIRST_ANOM check above, but also '"string"'
7394 in 'char *p = "string"' is represented the same
7395 with the type being VT_PTR and the symbol being an
7396 anonymous one. That is, there's no difference in vtop
7397 between '(void *){x}' and '&(void *){x}'. Ignore
7398 pointer typed entities here. Hopefully no real code
7399 will ever use compound literals with scalar type. */
7400 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7401 /* These come from compound literals, memcpy stuff over. */
7402 Section *ssec;
7403 ElfSym *esym;
7404 ElfW_Rel *rel;
7405 esym = elfsym(vtop->sym);
7406 ssec = tcc_state->sections[esym->st_shndx];
7407 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7408 if (ssec->reloc) {
7409 /* We need to copy over all memory contents, and that
7410 includes relocations. Use the fact that relocs are
7411 created it order, so look from the end of relocs
7412 until we hit one before the copied region. */
7413 unsigned long relofs = ssec->reloc->data_offset;
7414 while (relofs >= sizeof(*rel)) {
7415 relofs -= sizeof(*rel);
7416 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7417 if (rel->r_offset >= esym->st_value + size)
7418 continue;
7419 if (rel->r_offset < esym->st_value)
7420 break;
7421 put_elf_reloca(symtab_section, sec,
7422 c + rel->r_offset - esym->st_value,
7423 ELFW(R_TYPE)(rel->r_info),
7424 ELFW(R_SYM)(rel->r_info),
7425 #if PTR_SIZE == 8
7426 rel->r_addend
7427 #else
7429 #endif
7433 } else {
7434 if (type->t & VT_BITFIELD) {
7435 int bit_pos, bit_size, bits, n;
7436 unsigned char *p, v, m;
7437 bit_pos = BIT_POS(vtop->type.t);
7438 bit_size = BIT_SIZE(vtop->type.t);
7439 p = (unsigned char*)ptr + (bit_pos >> 3);
7440 bit_pos &= 7, bits = 0;
7441 while (bit_size) {
7442 n = 8 - bit_pos;
7443 if (n > bit_size)
7444 n = bit_size;
7445 v = val >> bits << bit_pos;
7446 m = ((1 << n) - 1) << bit_pos;
7447 *p = (*p & ~m) | (v & m);
7448 bits += n, bit_size -= n, bit_pos = 0, ++p;
7450 } else
7451 switch(bt) {
7452 case VT_BOOL:
7453 *(char *)ptr = val != 0;
7454 break;
7455 case VT_BYTE:
7456 *(char *)ptr = val;
7457 break;
7458 case VT_SHORT:
7459 write16le(ptr, val);
7460 break;
7461 case VT_FLOAT:
7462 write32le(ptr, val);
7463 break;
7464 case VT_DOUBLE:
7465 write64le(ptr, val);
7466 break;
7467 case VT_LDOUBLE:
7468 #if defined TCC_IS_NATIVE_387
7469 /* Host and target platform may be different but both have x87.
7470 On windows, tcc does not use VT_LDOUBLE, except when it is a
7471 cross compiler. In this case a mingw gcc as host compiler
7472 comes here with 10-byte long doubles, while msvc or tcc won't.
7473 tcc itself can still translate by asm.
7474 In any case we avoid possibly random bytes 11 and 12.
7476 if (sizeof (long double) >= 10)
7477 memcpy(ptr, &vtop->c.ld, 10);
7478 #ifdef __TINYC__
7479 else if (sizeof (long double) == sizeof (double))
7480 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7481 #endif
7482 else if (vtop->c.ld == 0.0)
7484 else
7485 #endif
7486 /* For other platforms it should work natively, but may not work
7487 for cross compilers */
7488 if (sizeof(long double) == LDOUBLE_SIZE)
7489 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7490 else if (sizeof(double) == LDOUBLE_SIZE)
7491 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7492 #ifndef TCC_CROSS_TEST
7493 else
7494 tcc_error("can't cross compile long double constants");
7495 #endif
7496 break;
7498 #if PTR_SIZE == 8
7499 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7500 case VT_LLONG:
7501 case VT_PTR:
7502 if (vtop->r & VT_SYM)
7503 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7504 else
7505 write64le(ptr, val);
7506 break;
7507 case VT_INT:
7508 write32le(ptr, val);
7509 break;
7510 #else
7511 case VT_LLONG:
7512 write64le(ptr, val);
7513 break;
7514 case VT_PTR:
7515 case VT_INT:
7516 if (vtop->r & VT_SYM)
7517 greloc(sec, vtop->sym, c, R_DATA_PTR);
7518 write32le(ptr, val);
7519 break;
7520 #endif
7521 default:
7522 //tcc_internal_error("unexpected type");
7523 break;
7526 vtop--;
7527 } else {
7528 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7529 vswap();
7530 vstore();
7531 vpop();
7535 /* 't' contains the type and storage info. 'c' is the offset of the
7536 object in section 'sec'. If 'sec' is NULL, it means stack based
7537 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7538 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7539 size only evaluation is wanted (only for arrays). */
7540 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7542 int len, n, no_oblock, i;
7543 int size1, align1;
7544 Sym *s, *f;
7545 Sym indexsym;
7546 CType *t1;
7548 /* generate line number info */
7549 if (debug_modes && !p->sec)
7550 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7552 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7553 /* In case of strings we have special handling for arrays, so
7554 don't consume them as initializer value (which would commit them
7555 to some anonymous symbol). */
7556 tok != TOK_LSTR && tok != TOK_STR &&
7557 (!(flags & DIF_SIZE_ONLY)
7558 /* a struct may be initialized from a struct of same type, as in
7559 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7560 In that case we need to parse the element in order to check
7561 it for compatibility below */
7562 || (type->t & VT_BTYPE) == VT_STRUCT)
7564 int ncw_prev = nocode_wanted;
7565 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7566 ++nocode_wanted;
7567 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7568 nocode_wanted = ncw_prev;
7569 flags |= DIF_HAVE_ELEM;
7572 if (type->t & VT_ARRAY) {
7573 no_oblock = 1;
7574 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7575 tok == '{') {
7576 skip('{');
7577 no_oblock = 0;
7580 s = type->ref;
7581 n = s->c;
7582 t1 = pointed_type(type);
7583 size1 = type_size(t1, &align1);
7585 /* only parse strings here if correct type (otherwise: handle
7586 them as ((w)char *) expressions */
7587 if ((tok == TOK_LSTR &&
7588 #ifdef TCC_TARGET_PE
7589 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7590 #else
7591 (t1->t & VT_BTYPE) == VT_INT
7592 #endif
7593 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7594 len = 0;
7595 cstr_reset(&initstr);
7596 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7597 tcc_error("unhandled string literal merging");
7598 while (tok == TOK_STR || tok == TOK_LSTR) {
7599 if (initstr.size)
7600 initstr.size -= size1;
7601 if (tok == TOK_STR)
7602 len += tokc.str.size;
7603 else
7604 len += tokc.str.size / sizeof(nwchar_t);
7605 len--;
7606 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7607 next();
7609 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7610 && tok != TOK_EOF) {
7611 /* Not a lone literal but part of a bigger expression. */
7612 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7613 tokc.str.size = initstr.size;
7614 tokc.str.data = initstr.data;
7615 goto do_init_array;
7618 decl_design_flex(p, s, len);
7619 if (!(flags & DIF_SIZE_ONLY)) {
7620 int nb = n;
7621 if (len < nb)
7622 nb = len;
7623 if (len > nb)
7624 tcc_warning("initializer-string for array is too long");
7625 /* in order to go faster for common case (char
7626 string in global variable, we handle it
7627 specifically */
7628 if (p->sec && size1 == 1) {
7629 init_assert(p, c + nb);
7630 if (!NODATA_WANTED)
7631 memcpy(p->sec->data + c, initstr.data, nb);
7632 } else {
7633 for(i=0;i<n;i++) {
7634 if (i >= nb) {
7635 /* only add trailing zero if enough storage (no
7636 warning in this case since it is standard) */
7637 if (flags & DIF_CLEAR)
7638 break;
7639 if (n - i >= 4) {
7640 init_putz(p, c + i * size1, (n - i) * size1);
7641 break;
7643 ch = 0;
7644 } else if (size1 == 1)
7645 ch = ((unsigned char *)initstr.data)[i];
7646 else
7647 ch = ((nwchar_t *)initstr.data)[i];
7648 vpushi(ch);
7649 init_putv(p, t1, c + i * size1);
7653 } else {
7655 do_init_array:
7656 indexsym.c = 0;
7657 f = &indexsym;
7659 do_init_list:
7660 /* zero memory once in advance */
7661 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7662 init_putz(p, c, n*size1);
7663 flags |= DIF_CLEAR;
7666 len = 0;
7667 /* GNU extension: if the initializer is empty for a flex array,
7668 it's size is zero. We won't enter the loop, so set the size
7669 now. */
7670 decl_design_flex(p, s, len);
7671 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7672 len = decl_designator(p, type, c, &f, flags, len);
7673 flags &= ~DIF_HAVE_ELEM;
7674 if (type->t & VT_ARRAY) {
7675 ++indexsym.c;
7676 /* special test for multi dimensional arrays (may not
7677 be strictly correct if designators are used at the
7678 same time) */
7679 if (no_oblock && len >= n*size1)
7680 break;
7681 } else {
7682 if (s->type.t == VT_UNION)
7683 f = NULL;
7684 else
7685 f = f->next;
7686 if (no_oblock && f == NULL)
7687 break;
7690 if (tok == '}')
7691 break;
7692 skip(',');
7695 if (!no_oblock)
7696 skip('}');
7698 } else if ((flags & DIF_HAVE_ELEM)
7699 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7700 The source type might have VT_CONSTANT set, which is
7701 of course assignable to non-const elements. */
7702 && is_compatible_unqualified_types(type, &vtop->type)) {
7703 goto one_elem;
7705 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7706 no_oblock = 1;
7707 if ((flags & DIF_FIRST) || tok == '{') {
7708 skip('{');
7709 no_oblock = 0;
7711 s = type->ref;
7712 f = s->next;
7713 n = s->c;
7714 size1 = 1;
7715 goto do_init_list;
7717 } else if (tok == '{') {
7718 if (flags & DIF_HAVE_ELEM)
7719 skip(';');
7720 next();
7721 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7722 skip('}');
7724 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7725 /* If we supported only ISO C we wouldn't have to accept calling
7726 this on anything than an array if DIF_SIZE_ONLY (and even then
7727 only on the outermost level, so no recursion would be needed),
7728 because initializing a flex array member isn't supported.
7729 But GNU C supports it, so we need to recurse even into
7730 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7731 /* just skip expression */
7732 if (flags & DIF_HAVE_ELEM)
7733 vpop();
7734 else
7735 skip_or_save_block(NULL);
7737 } else {
7738 if (!(flags & DIF_HAVE_ELEM)) {
7739 /* This should happen only when we haven't parsed
7740 the init element above for fear of committing a
7741 string constant to memory too early. */
7742 if (tok != TOK_STR && tok != TOK_LSTR)
7743 expect("string constant");
7744 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7746 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7747 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7748 && vtop->c.i == 0
7749 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7751 vpop();
7752 else
7753 init_putv(p, type, c);
7757 /* parse an initializer for type 't' if 'has_init' is non zero, and
7758 allocate space in local or global data space ('r' is either
7759 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7760 variable 'v' of scope 'scope' is declared before initializers
7761 are parsed. If 'v' is zero, then a reference to the new object
7762 is put in the value stack. If 'has_init' is 2, a special parsing
7763 is done to handle string constants. */
7764 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7765 int has_init, int v, int scope)
7767 int size, align, addr;
7768 TokenString *init_str = NULL;
7770 Section *sec;
7771 Sym *flexible_array;
7772 Sym *sym;
7773 int saved_nocode_wanted = nocode_wanted;
7774 #ifdef CONFIG_TCC_BCHECK
7775 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7776 #endif
7777 init_params p = {0};
7779 /* Always allocate static or global variables */
7780 if (v && (r & VT_VALMASK) == VT_CONST)
7781 nocode_wanted |= 0x80000000;
7783 flexible_array = NULL;
7784 size = type_size(type, &align);
7786 /* exactly one flexible array may be initialized, either the
7787 toplevel array or the last member of the toplevel struct */
7789 if (size < 0) {
7790 /* If the base type itself was an array type of unspecified size
7791 (like in 'typedef int arr[]; arr x = {1};') then we will
7792 overwrite the unknown size by the real one for this decl.
7793 We need to unshare the ref symbol holding that size. */
7794 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
7795 p.flex_array_ref = type->ref;
7797 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
7798 Sym *field = type->ref->next;
7799 if (field) {
7800 while (field->next)
7801 field = field->next;
7802 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
7803 flexible_array = field;
7804 p.flex_array_ref = field->type.ref;
7805 size = -1;
7810 if (size < 0) {
7811 /* If unknown size, do a dry-run 1st pass */
7812 if (!has_init)
7813 tcc_error("unknown type size");
7814 if (has_init == 2) {
7815 /* only get strings */
7816 init_str = tok_str_alloc();
7817 while (tok == TOK_STR || tok == TOK_LSTR) {
7818 tok_str_add_tok(init_str);
7819 next();
7821 tok_str_add(init_str, -1);
7822 tok_str_add(init_str, 0);
7823 } else
7824 skip_or_save_block(&init_str);
7825 unget_tok(0);
7827 /* compute size */
7828 begin_macro(init_str, 1);
7829 next();
7830 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
7831 /* prepare second initializer parsing */
7832 macro_ptr = init_str->str;
7833 next();
7835 /* if still unknown size, error */
7836 size = type_size(type, &align);
7837 if (size < 0)
7838 tcc_error("unknown type size");
7840 /* If there's a flex member and it was used in the initializer
7841 adjust size. */
7842 if (flexible_array && flexible_array->type.ref->c > 0)
7843 size += flexible_array->type.ref->c
7844 * pointed_size(&flexible_array->type);
7847 /* take into account specified alignment if bigger */
7848 if (ad->a.aligned) {
7849 int speca = 1 << (ad->a.aligned - 1);
7850 if (speca > align)
7851 align = speca;
7852 } else if (ad->a.packed) {
7853 align = 1;
7856 if (!v && NODATA_WANTED)
7857 size = 0, align = 1;
7859 if ((r & VT_VALMASK) == VT_LOCAL) {
7860 sec = NULL;
7861 #ifdef CONFIG_TCC_BCHECK
7862 if (bcheck && v) {
7863 /* add padding between stack variables for bound checking */
7864 loc -= align;
7866 #endif
7867 loc = (loc - size) & -align;
7868 addr = loc;
7869 p.local_offset = addr + size;
7870 #ifdef CONFIG_TCC_BCHECK
7871 if (bcheck && v) {
7872 /* add padding between stack variables for bound checking */
7873 loc -= align;
7875 #endif
7876 if (v) {
7877 /* local variable */
7878 #ifdef CONFIG_TCC_ASM
7879 if (ad->asm_label) {
7880 int reg = asm_parse_regvar(ad->asm_label);
7881 if (reg >= 0)
7882 r = (r & ~VT_VALMASK) | reg;
7884 #endif
7885 sym = sym_push(v, type, r, addr);
7886 if (ad->cleanup_func) {
7887 Sym *cls = sym_push2(&all_cleanups,
7888 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7889 cls->prev_tok = sym;
7890 cls->next = ad->cleanup_func;
7891 cls->ncl = cur_scope->cl.s;
7892 cur_scope->cl.s = cls;
7895 sym->a = ad->a;
7896 } else {
7897 /* push local reference */
7898 vset(type, r, addr);
7900 } else {
7901 sym = NULL;
7902 if (v && scope == VT_CONST) {
7903 /* see if the symbol was already defined */
7904 sym = sym_find(v);
7905 if (sym) {
7906 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
7907 && sym->type.ref->c > type->ref->c) {
7908 /* flex array was already declared with explicit size
7909 extern int arr[10];
7910 int arr[] = { 1,2,3 }; */
7911 type->ref->c = sym->type.ref->c;
7912 size = type_size(type, &align);
7914 patch_storage(sym, ad, type);
7915 /* we accept several definitions of the same global variable. */
7916 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7917 goto no_alloc;
7921 /* allocate symbol in corresponding section */
7922 sec = ad->section;
7923 if (!sec) {
7924 CType *tp = type;
7925 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
7926 tp = &tp->ref->type;
7927 if (tp->t & VT_CONSTANT) {
7928 sec = rodata_section;
7929 } else if (has_init) {
7930 sec = data_section;
7931 /*if (tcc_state->g_debug & 4)
7932 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
7933 } else if (tcc_state->nocommon)
7934 sec = bss_section;
7937 if (sec) {
7938 addr = section_add(sec, size, align);
7939 #ifdef CONFIG_TCC_BCHECK
7940 /* add padding if bound check */
7941 if (bcheck)
7942 section_add(sec, 1, 1);
7943 #endif
7944 } else {
7945 addr = align; /* SHN_COMMON is special, symbol value is align */
7946 sec = common_section;
7949 if (v) {
7950 if (!sym) {
7951 sym = sym_push(v, type, r | VT_SYM, 0);
7952 patch_storage(sym, ad, NULL);
7954 /* update symbol definition */
7955 put_extern_sym(sym, sec, addr, size);
7956 } else {
7957 /* push global reference */
7958 vpush_ref(type, sec, addr, size);
7959 sym = vtop->sym;
7960 vtop->r |= r;
7963 #ifdef CONFIG_TCC_BCHECK
7964 /* handles bounds now because the symbol must be defined
7965 before for the relocation */
7966 if (bcheck) {
7967 addr_t *bounds_ptr;
7969 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7970 /* then add global bound info */
7971 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7972 bounds_ptr[0] = 0; /* relocated */
7973 bounds_ptr[1] = size;
7975 #endif
7978 if (type->t & VT_VLA) {
7979 int a;
7981 if (NODATA_WANTED)
7982 goto no_alloc;
7984 /* save before-VLA stack pointer if needed */
7985 if (cur_scope->vla.num == 0) {
7986 if (cur_scope->prev && cur_scope->prev->vla.num) {
7987 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
7988 } else {
7989 gen_vla_sp_save(loc -= PTR_SIZE);
7990 cur_scope->vla.locorig = loc;
7994 vpush_type_size(type, &a);
7995 gen_vla_alloc(type, a);
7996 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7997 /* on _WIN64, because of the function args scratch area, the
7998 result of alloca differs from RSP and is returned in RAX. */
7999 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8000 #endif
8001 gen_vla_sp_save(addr);
8002 cur_scope->vla.loc = addr;
8003 cur_scope->vla.num++;
8004 } else if (has_init) {
8005 p.sec = sec;
8006 decl_initializer(&p, type, addr, DIF_FIRST);
8007 /* patch flexible array member size back to -1, */
8008 /* for possible subsequent similar declarations */
8009 if (flexible_array)
8010 flexible_array->type.ref->c = -1;
8013 no_alloc:
8014 /* restore parse state if needed */
8015 if (init_str) {
8016 end_macro();
8017 next();
8020 nocode_wanted = saved_nocode_wanted;
8023 /* generate vla code saved in post_type() */
8024 static void func_vla_arg_code(Sym *arg)
8026 int align;
8027 TokenString *vla_array_tok = NULL;
8029 if (arg->type.ref)
8030 func_vla_arg_code(arg->type.ref);
8032 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8033 loc -= type_size(&int_type, &align);
8034 loc &= -align;
8035 arg->type.ref->c = loc;
8037 unget_tok(0);
8038 vla_array_tok = tok_str_alloc();
8039 vla_array_tok->str = arg->type.ref->vla_array_str;
8040 begin_macro(vla_array_tok, 1);
8041 next();
8042 gexpr();
8043 end_macro();
8044 next();
8045 vpush_type_size(&arg->type.ref->type, &align);
8046 gen_op('*');
8047 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8048 vswap();
8049 vstore();
8050 vpop();
8054 static void func_vla_arg(Sym *sym)
8056 Sym *arg;
8058 for (arg = sym->type.ref->next; arg; arg = arg->next)
8059 if (arg->type.t & VT_VLA)
8060 func_vla_arg_code(arg);
8063 /* parse a function defined by symbol 'sym' and generate its code in
8064 'cur_text_section' */
8065 static void gen_function(Sym *sym)
8067 struct scope f = { 0 };
8068 cur_scope = root_scope = &f;
8069 nocode_wanted = 0;
8070 ind = cur_text_section->data_offset;
8071 if (sym->a.aligned) {
8072 size_t newoff = section_add(cur_text_section, 0,
8073 1 << (sym->a.aligned - 1));
8074 gen_fill_nops(newoff - ind);
8076 /* NOTE: we patch the symbol size later */
8077 put_extern_sym(sym, cur_text_section, ind, 0);
8078 if (sym->type.ref->f.func_ctor)
8079 add_array (tcc_state, ".init_array", sym->c);
8080 if (sym->type.ref->f.func_dtor)
8081 add_array (tcc_state, ".fini_array", sym->c);
8083 funcname = get_tok_str(sym->v, NULL);
8084 func_ind = ind;
8085 func_vt = sym->type.ref->type;
8086 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8088 /* put debug symbol */
8089 tcc_debug_funcstart(tcc_state, sym);
8090 /* push a dummy symbol to enable local sym storage */
8091 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8092 local_scope = 1; /* for function parameters */
8093 gfunc_prolog(sym);
8094 local_scope = 0;
8095 rsym = 0;
8096 clear_temp_local_var_list();
8097 func_vla_arg(sym);
8098 block(0);
8099 gsym(rsym);
8100 nocode_wanted = 0;
8101 /* reset local stack */
8102 pop_local_syms(NULL, 0);
8103 gfunc_epilog();
8104 cur_text_section->data_offset = ind;
8105 local_scope = 0;
8106 label_pop(&global_label_stack, NULL, 0);
8107 sym_pop(&all_cleanups, NULL, 0);
8108 /* patch symbol size */
8109 elfsym(sym)->st_size = ind - func_ind;
8110 /* end of function */
8111 tcc_debug_funcend(tcc_state, ind - func_ind);
8112 /* It's better to crash than to generate wrong code */
8113 cur_text_section = NULL;
8114 funcname = ""; /* for safety */
8115 func_vt.t = VT_VOID; /* for safety */
8116 func_var = 0; /* for safety */
8117 ind = 0; /* for safety */
8118 func_ind = -1;
8119 nocode_wanted = 0x80000000;
8120 check_vstack();
8121 /* do this after funcend debug info */
8122 next();
8125 static void gen_inline_functions(TCCState *s)
8127 Sym *sym;
8128 int inline_generated, i;
8129 struct InlineFunc *fn;
8131 tcc_open_bf(s, ":inline:", 0);
8132 /* iterate while inline function are referenced */
8133 do {
8134 inline_generated = 0;
8135 for (i = 0; i < s->nb_inline_fns; ++i) {
8136 fn = s->inline_fns[i];
8137 sym = fn->sym;
8138 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8139 /* the function was used or forced (and then not internal):
8140 generate its code and convert it to a normal function */
8141 fn->sym = NULL;
8142 tcc_debug_putfile(s, fn->filename);
8143 begin_macro(fn->func_str, 1);
8144 next();
8145 cur_text_section = text_section;
8146 gen_function(sym);
8147 end_macro();
8149 inline_generated = 1;
8152 } while (inline_generated);
8153 tcc_close();
8156 static void free_inline_functions(TCCState *s)
8158 int i;
8159 /* free tokens of unused inline functions */
8160 for (i = 0; i < s->nb_inline_fns; ++i) {
8161 struct InlineFunc *fn = s->inline_fns[i];
8162 if (fn->sym)
8163 tok_str_free(fn->func_str);
8165 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8168 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8169 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8170 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8172 int v, has_init, r, oldint;
8173 CType type, btype;
8174 Sym *sym;
8175 AttributeDef ad, adbase;
8177 while (1) {
8178 if (tok == TOK_STATIC_ASSERT) {
8179 CString error_str;
8180 int c;
8182 next();
8183 skip('(');
8184 c = expr_const();
8186 if (tok == ')') {
8187 if (!c)
8188 tcc_error("_Static_assert fail");
8189 next();
8190 goto static_assert_out;
8193 skip(',');
8194 parse_mult_str(&error_str, "string constant");
8195 if (c == 0)
8196 tcc_error("%s", (char *)error_str.data);
8197 cstr_free(&error_str);
8198 skip(')');
8199 static_assert_out:
8200 skip(';');
8201 continue;
8204 oldint = 0;
8205 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8206 if (is_for_loop_init)
8207 return 0;
8208 /* skip redundant ';' if not in old parameter decl scope */
8209 if (tok == ';' && l != VT_CMP) {
8210 next();
8211 continue;
8213 if (l != VT_CONST)
8214 break;
8215 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8216 /* global asm block */
8217 asm_global_instr();
8218 continue;
8220 if (tok >= TOK_UIDENT) {
8221 /* special test for old K&R protos without explicit int
8222 type. Only accepted when defining global data */
8223 btype.t = VT_INT;
8224 oldint = 1;
8225 } else {
8226 if (tok != TOK_EOF)
8227 expect("declaration");
8228 break;
8232 if (tok == ';') {
8233 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8234 v = btype.ref->v;
8235 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8236 tcc_warning("unnamed struct/union that defines no instances");
8237 next();
8238 continue;
8240 if (IS_ENUM(btype.t)) {
8241 next();
8242 continue;
8246 while (1) { /* iterate thru each declaration */
8247 type = btype;
8248 ad = adbase;
8249 type_decl(&type, &ad, &v, TYPE_DIRECT);
8250 #if 0
8252 char buf[500];
8253 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8254 printf("type = '%s'\n", buf);
8256 #endif
8257 if ((type.t & VT_BTYPE) == VT_FUNC) {
8258 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8259 tcc_error("function without file scope cannot be static");
8260 /* if old style function prototype, we accept a
8261 declaration list */
8262 sym = type.ref;
8263 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8264 decl0(VT_CMP, 0, sym);
8265 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8266 if (sym->f.func_alwinl
8267 && ((type.t & (VT_EXTERN | VT_INLINE))
8268 == (VT_EXTERN | VT_INLINE))) {
8269 /* always_inline functions must be handled as if they
8270 don't generate multiple global defs, even if extern
8271 inline, i.e. GNU inline semantics for those. Rewrite
8272 them into static inline. */
8273 type.t &= ~VT_EXTERN;
8274 type.t |= VT_STATIC;
8276 #endif
8277 /* always compile 'extern inline' */
8278 if (type.t & VT_EXTERN)
8279 type.t &= ~VT_INLINE;
8281 } else if (oldint) {
8282 tcc_warning("type defaults to int");
8285 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8286 ad.asm_label = asm_label_instr();
8287 /* parse one last attribute list, after asm label */
8288 parse_attribute(&ad);
8289 #if 0
8290 /* gcc does not allow __asm__("label") with function definition,
8291 but why not ... */
8292 if (tok == '{')
8293 expect(";");
8294 #endif
8297 #ifdef TCC_TARGET_PE
8298 if (ad.a.dllimport || ad.a.dllexport) {
8299 if (type.t & VT_STATIC)
8300 tcc_error("cannot have dll linkage with static");
8301 if (type.t & VT_TYPEDEF) {
8302 tcc_warning("'%s' attribute ignored for typedef",
8303 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8304 (ad.a.dllexport = 0, "dllexport"));
8305 } else if (ad.a.dllimport) {
8306 if ((type.t & VT_BTYPE) == VT_FUNC)
8307 ad.a.dllimport = 0;
8308 else
8309 type.t |= VT_EXTERN;
8312 #endif
8313 if (tok == '{') {
8314 if (l != VT_CONST)
8315 tcc_error("cannot use local functions");
8316 if ((type.t & VT_BTYPE) != VT_FUNC)
8317 expect("function definition");
8319 /* reject abstract declarators in function definition
8320 make old style params without decl have int type */
8321 sym = type.ref;
8322 while ((sym = sym->next) != NULL) {
8323 if (!(sym->v & ~SYM_FIELD))
8324 expect("identifier");
8325 if (sym->type.t == VT_VOID)
8326 sym->type = int_type;
8329 /* apply post-declaraton attributes */
8330 merge_funcattr(&type.ref->f, &ad.f);
8332 /* put function symbol */
8333 type.t &= ~VT_EXTERN;
8334 sym = external_sym(v, &type, 0, &ad);
8336 /* static inline functions are just recorded as a kind
8337 of macro. Their code will be emitted at the end of
8338 the compilation unit only if they are used */
8339 if (sym->type.t & VT_INLINE) {
8340 struct InlineFunc *fn;
8341 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8342 strcpy(fn->filename, file->filename);
8343 fn->sym = sym;
8344 skip_or_save_block(&fn->func_str);
8345 dynarray_add(&tcc_state->inline_fns,
8346 &tcc_state->nb_inline_fns, fn);
8347 } else {
8348 /* compute text section */
8349 cur_text_section = ad.section;
8350 if (!cur_text_section)
8351 cur_text_section = text_section;
8352 gen_function(sym);
8354 break;
8355 } else {
8356 if (l == VT_CMP) {
8357 /* find parameter in function parameter list */
8358 for (sym = func_sym->next; sym; sym = sym->next)
8359 if ((sym->v & ~SYM_FIELD) == v)
8360 goto found;
8361 tcc_error("declaration for parameter '%s' but no such parameter",
8362 get_tok_str(v, NULL));
8363 found:
8364 if (type.t & VT_STORAGE) /* 'register' is okay */
8365 tcc_error("storage class specified for '%s'",
8366 get_tok_str(v, NULL));
8367 if (sym->type.t != VT_VOID)
8368 tcc_error("redefinition of parameter '%s'",
8369 get_tok_str(v, NULL));
8370 convert_parameter_type(&type);
8371 sym->type = type;
8372 } else if (type.t & VT_TYPEDEF) {
8373 /* save typedefed type */
8374 /* XXX: test storage specifiers ? */
8375 sym = sym_find(v);
8376 if (sym && sym->sym_scope == local_scope) {
8377 if (!is_compatible_types(&sym->type, &type)
8378 || !(sym->type.t & VT_TYPEDEF))
8379 tcc_error("incompatible redefinition of '%s'",
8380 get_tok_str(v, NULL));
8381 sym->type = type;
8382 } else {
8383 sym = sym_push(v, &type, 0, 0);
8385 sym->a = ad.a;
8386 sym->f = ad.f;
8387 if (debug_modes)
8388 tcc_debug_typedef (tcc_state, sym);
8389 } else if ((type.t & VT_BTYPE) == VT_VOID
8390 && !(type.t & VT_EXTERN)) {
8391 tcc_error("declaration of void object");
8392 } else {
8393 r = 0;
8394 if ((type.t & VT_BTYPE) == VT_FUNC) {
8395 /* external function definition */
8396 /* specific case for func_call attribute */
8397 type.ref->f = ad.f;
8398 } else if (!(type.t & VT_ARRAY)) {
8399 /* not lvalue if array */
8400 r |= VT_LVAL;
8402 has_init = (tok == '=');
8403 if (has_init && (type.t & VT_VLA))
8404 tcc_error("variable length array cannot be initialized");
8405 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8406 || (type.t & VT_BTYPE) == VT_FUNC
8407 /* as with GCC, uninitialized global arrays with no size
8408 are considered extern: */
8409 || ((type.t & VT_ARRAY) && !has_init
8410 && l == VT_CONST && type.ref->c < 0)
8412 /* external variable or function */
8413 type.t |= VT_EXTERN;
8414 sym = external_sym(v, &type, r, &ad);
8415 if (ad.alias_target) {
8416 /* Aliases need to be emitted when their target
8417 symbol is emitted, even if perhaps unreferenced.
8418 We only support the case where the base is
8419 already defined, otherwise we would need
8420 deferring to emit the aliases until the end of
8421 the compile unit. */
8422 Sym *alias_target = sym_find(ad.alias_target);
8423 ElfSym *esym = elfsym(alias_target);
8424 if (!esym)
8425 tcc_error("unsupported forward __alias__ attribute");
8426 put_extern_sym2(sym, esym->st_shndx,
8427 esym->st_value, esym->st_size, 1);
8429 } else {
8430 if (type.t & VT_STATIC)
8431 r |= VT_CONST;
8432 else
8433 r |= l;
8434 if (has_init)
8435 next();
8436 else if (l == VT_CONST)
8437 /* uninitialized global variables may be overridden */
8438 type.t |= VT_EXTERN;
8439 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8442 if (tok != ',') {
8443 if (is_for_loop_init)
8444 return 1;
8445 skip(';');
8446 break;
8448 next();
8452 return 0;
8455 static void decl(int l)
8457 decl0(l, 0, NULL);
8460 /* ------------------------------------------------------------------------- */
8461 #undef gjmp_addr
8462 #undef gjmp
8463 /* ------------------------------------------------------------------------- */