tccpp: tcc_warning("extra tokens after directive")
[tinycc.git] / tccgen.c
bloba1c7db47374105f79c915874a89322f6664d1e71
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 ST_DATA char debug_modes;
48 ST_DATA SValue *vtop;
49 static SValue _vstack[1 + VSTACK_SIZE];
50 #define vstack (_vstack + 1)
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
72 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
73 ST_DATA int func_vc;
74 ST_DATA int func_ind;
75 ST_DATA const char *funcname;
76 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
77 static CString initstr;
79 #if PTR_SIZE == 4
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
82 #elif LONG_SIZE == 4
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
85 #else
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
88 #endif
90 static struct switch_t {
91 struct case_t {
92 int64_t v1, v2;
93 int sym;
94 } **p; int n; /* list of case ranges */
95 int def_sym; /* default symbol */
96 int nocode_wanted;
97 int *bsym;
98 struct scope *scope;
99 struct switch_t *prev;
100 SValue sv;
101 } *cur_switch; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable {
106 int location; //offset on stack. Svalue.c.i
107 short size;
108 short align;
109 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
110 static int nb_temp_local_vars;
112 static struct scope {
113 struct scope *prev;
114 struct { int loc, locorig, num; } vla;
115 struct { Sym *s; int n; } cl;
116 int *bsym, *csym;
117 Sym *lstk, *llstk;
118 } *cur_scope, *loop_scope, *root_scope;
120 typedef struct {
121 Section *sec;
122 int local_offset;
123 Sym *flex_array_ref;
124 } init_params;
126 #if 1
127 #define precedence_parser
128 static void init_prec(void);
129 #endif
131 static void block(int flags);
132 #define STMT_EXPR 1
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType *type);
136 static void gen_cast_s(int t);
137 static inline CType *pointed_type(CType *type);
138 static int is_compatible_types(CType *type1, CType *type2);
139 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
140 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
141 static void parse_expr_type(CType *type);
142 static void init_putv(init_params *p, CType *type, unsigned long c);
143 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
144 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
145 static int decl(int l);
146 static void expr_eq(void);
147 static void vpush_type_size(CType *type, int *a);
148 static int is_compatible_unqualified_types(CType *type1, CType *type2);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty, unsigned long long v);
151 static void vpush(CType *type);
152 static int gvtst(int inv, int t);
153 static void gen_inline_functions(TCCState *s);
154 static void free_inline_functions(TCCState *s);
155 static void skip_or_save_block(TokenString **str);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size,int align);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType *st, CType *dt);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC void gsym(int t)
168 if (t) {
169 gsym_addr(t, ind);
170 CODE_ON();
174 /* Clear 'nocode_wanted' if current pc is a label */
175 static int gind()
177 int t = ind;
178 CODE_ON();
179 if (debug_modes)
180 tcc_tcov_block_begin(tcc_state);
181 return t;
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t)
187 gjmp_addr(t);
188 CODE_OFF();
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t)
194 t = gjmp(t);
195 CODE_OFF();
196 return t;
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN int is_float(int t)
206 int bt = t & VT_BTYPE;
207 return bt == VT_LDOUBLE
208 || bt == VT_DOUBLE
209 || bt == VT_FLOAT
210 || bt == VT_QFLOAT;
213 static inline int is_integer_btype(int bt)
215 return bt == VT_BYTE
216 || bt == VT_BOOL
217 || bt == VT_SHORT
218 || bt == VT_INT
219 || bt == VT_LLONG;
222 static int btype_size(int bt)
224 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
225 bt == VT_SHORT ? 2 :
226 bt == VT_INT ? 4 :
227 bt == VT_LLONG ? 8 :
228 bt == VT_PTR ? PTR_SIZE : 0;
231 /* returns function return register from type */
232 static int R_RET(int t)
234 if (!is_float(t))
235 return REG_IRET;
236 #ifdef TCC_TARGET_X86_64
237 if ((t & VT_BTYPE) == VT_LDOUBLE)
238 return TREG_ST0;
239 #elif defined TCC_TARGET_RISCV64
240 if ((t & VT_BTYPE) == VT_LDOUBLE)
241 return REG_IRET;
242 #endif
243 return REG_FRET;
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t)
249 t &= VT_BTYPE;
250 #if PTR_SIZE == 4
251 if (t == VT_LLONG)
252 return REG_IRE2;
253 #elif defined TCC_TARGET_X86_64
254 if (t == VT_QLONG)
255 return REG_IRE2;
256 if (t == VT_QFLOAT)
257 return REG_FRE2;
258 #elif defined TCC_TARGET_RISCV64
259 if (t == VT_LDOUBLE)
260 return REG_IRE2;
261 #endif
262 return VT_CONST;
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue *sv, int t)
271 sv->r = R_RET(t), sv->r2 = R2_RET(t);
274 /* returns function return register class for type t */
275 static int RC_RET(int t)
277 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t)
283 if (!is_float(t))
284 return RC_INT;
285 #ifdef TCC_TARGET_X86_64
286 if ((t & VT_BTYPE) == VT_LDOUBLE)
287 return RC_ST0;
288 if ((t & VT_BTYPE) == VT_QFLOAT)
289 return RC_FRET;
290 #elif defined TCC_TARGET_RISCV64
291 if ((t & VT_BTYPE) == VT_LDOUBLE)
292 return RC_INT;
293 #endif
294 return RC_FLOAT;
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t, int rc)
300 if (!USING_TWO_WORDS(t))
301 return 0;
302 #ifdef RC_IRE2
303 if (rc == RC_IRET)
304 return RC_IRE2;
305 #endif
306 #ifdef RC_FRE2
307 if (rc == RC_FRET)
308 return RC_FRE2;
309 #endif
310 if (rc & RC_FLOAT)
311 return RC_FLOAT;
312 return RC_INT;
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC int ieee_finite(double d)
320 int p[4];
321 memcpy(p, &d, sizeof(double));
322 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
329 #endif
331 ST_FUNC void test_lvalue(void)
333 if (!(vtop->r & VT_LVAL))
334 expect("lvalue");
337 ST_FUNC void check_vstack(void)
339 if (vtop != vstack - 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop - vstack + 1));
344 /* vstack debugging aid */
345 #if 0
346 void pv (const char *lbl, int a, int b)
348 int i;
349 for (i = a; i < a + b; ++i) {
350 SValue *p = &vtop[-i];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
355 #endif
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC void tccgen_init(TCCState *s1)
361 vtop = vstack - 1;
362 memset(vtop, 0, sizeof *vtop);
364 /* define some often used types */
365 int_type.t = VT_INT;
367 char_type.t = VT_BYTE;
368 if (s1->char_is_unsigned)
369 char_type.t |= VT_UNSIGNED;
370 char_pointer_type = char_type;
371 mk_pointer(&char_pointer_type);
373 func_old_type.t = VT_FUNC;
374 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
375 func_old_type.ref->f.func_call = FUNC_CDECL;
376 func_old_type.ref->f.func_type = FUNC_OLD;
377 #ifdef precedence_parser
378 init_prec();
379 #endif
380 cstr_new(&initstr);
383 ST_FUNC int tccgen_compile(TCCState *s1)
385 funcname = "";
386 func_ind = -1;
387 anon_sym = SYM_FIRST_ANOM;
388 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 cstr_free(&initstr);
421 dynarray_reset(&stk_data, &nb_stk_data);
422 while (cur_switch)
423 end_switch();
424 local_scope = 0;
425 loop_scope = NULL;
426 all_cleanups = NULL;
427 pending_gotos = NULL;
428 nb_temp_local_vars = 0;
429 global_label_stack = NULL;
430 local_label_stack = NULL;
431 cur_text_section = NULL;
432 sym_free_first = NULL;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym *elfsym(Sym *s)
438 if (!s || !s->c)
439 return NULL;
440 return &((ElfSym *)symtab_section->data)[s->c];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC void update_storage(Sym *sym)
446 ElfSym *esym;
447 int sym_bind, old_sym_bind;
449 esym = elfsym(sym);
450 if (!esym)
451 return;
453 if (sym->a.visibility)
454 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
455 | sym->a.visibility;
457 if (sym->type.t & (VT_STATIC | VT_INLINE))
458 sym_bind = STB_LOCAL;
459 else if (sym->a.weak)
460 sym_bind = STB_WEAK;
461 else
462 sym_bind = STB_GLOBAL;
463 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
464 if (sym_bind != old_sym_bind) {
465 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
468 #ifdef TCC_TARGET_PE
469 if (sym->a.dllimport)
470 esym->st_other |= ST_PE_IMPORT;
471 if (sym->a.dllexport)
472 esym->st_other |= ST_PE_EXPORT;
473 #endif
475 #if 0
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym->v, NULL),
478 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
479 sym->a.visibility,
480 sym->a.dllexport,
481 sym->a.dllimport
483 #endif
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
491 addr_t value, unsigned long size,
492 int can_add_underscore)
494 int sym_type, sym_bind, info, other, t;
495 ElfSym *esym;
496 const char *name;
497 char buf1[256];
499 if (!sym->c) {
500 name = get_tok_str(sym->v, NULL);
501 t = sym->type.t;
502 if ((t & VT_BTYPE) == VT_FUNC) {
503 sym_type = STT_FUNC;
504 } else if ((t & VT_BTYPE) == VT_VOID) {
505 sym_type = STT_NOTYPE;
506 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
507 sym_type = STT_FUNC;
508 } else {
509 sym_type = STT_OBJECT;
511 if (t & (VT_STATIC | VT_INLINE))
512 sym_bind = STB_LOCAL;
513 else
514 sym_bind = STB_GLOBAL;
515 other = 0;
517 #ifdef TCC_TARGET_PE
518 if (sym_type == STT_FUNC && sym->type.ref) {
519 Sym *ref = sym->type.ref;
520 if (ref->a.nodecorate) {
521 can_add_underscore = 0;
523 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
524 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
525 name = buf1;
526 other |= ST_PE_STDCALL;
527 can_add_underscore = 0;
530 #endif
532 if (sym->asm_label) {
533 name = get_tok_str(sym->asm_label, NULL);
534 can_add_underscore = 0;
537 if (tcc_state->leading_underscore && can_add_underscore) {
538 buf1[0] = '_';
539 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
540 name = buf1;
543 info = ELFW(ST_INFO)(sym_bind, sym_type);
544 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
546 if (debug_modes)
547 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
549 } else {
550 esym = elfsym(sym);
551 esym->st_value = value;
552 esym->st_size = size;
553 esym->st_shndx = sh_num;
555 update_storage(sym);
558 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
560 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
561 return;
562 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
567 addr_t addend)
569 int c = 0;
571 if (nocode_wanted && s == cur_text_section)
572 return;
574 if (sym) {
575 if (0 == sym->c)
576 put_extern_sym(sym, NULL, 0, 0);
577 c = sym->c;
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section, s, offset, type, c, addend);
584 #if PTR_SIZE == 4
585 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
587 greloca(s, sym, offset, type, 0);
589 #endif
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym *__sym_malloc(void)
595 Sym *sym_pool, *sym, *last_sym;
596 int i;
598 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
599 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
601 last_sym = sym_free_first;
602 sym = sym_pool;
603 for(i = 0; i < SYM_POOL_NB; i++) {
604 sym->next = last_sym;
605 last_sym = sym;
606 sym++;
608 sym_free_first = last_sym;
609 return last_sym;
612 static inline Sym *sym_malloc(void)
614 Sym *sym;
615 #ifndef SYM_DEBUG
616 sym = sym_free_first;
617 if (!sym)
618 sym = __sym_malloc();
619 sym_free_first = sym->next;
620 return sym;
621 #else
622 sym = tcc_malloc(sizeof(Sym));
623 return sym;
624 #endif
627 ST_INLN void sym_free(Sym *sym)
629 #ifndef SYM_DEBUG
630 sym->next = sym_free_first;
631 sym_free_first = sym;
632 #else
633 tcc_free(sym);
634 #endif
637 /* push, without hashing */
638 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
640 Sym *s;
642 s = sym_malloc();
643 memset(s, 0, sizeof *s);
644 s->v = v;
645 s->type.t = t;
646 s->c = c;
647 /* add in stack */
648 s->prev = *ps;
649 *ps = s;
650 return s;
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym *sym_find2(Sym *s, int v)
657 while (s) {
658 if (s->v == v)
659 return s;
660 s = s->prev;
662 return NULL;
665 /* structure lookup */
666 ST_INLN Sym *struct_find(int v)
668 v -= TOK_IDENT;
669 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
670 return NULL;
671 return table_ident[v]->sym_struct;
674 /* find an identifier */
675 ST_INLN Sym *sym_find(int v)
677 v -= TOK_IDENT;
678 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
679 return NULL;
680 return table_ident[v]->sym_identifier;
683 static int sym_scope(Sym *s)
685 if (IS_ENUM_VAL (s->type.t))
686 return s->type.ref->sym_scope;
687 else
688 return s->sym_scope;
691 /* push a given symbol on the symbol stack */
692 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
694 Sym *s, **ps;
695 TokenSym *ts;
697 if (local_stack)
698 ps = &local_stack;
699 else
700 ps = &global_stack;
701 s = sym_push2(ps, v, type->t, c);
702 s->type.ref = type->ref;
703 s->r = r;
704 /* don't record fields or anonymous symbols */
705 /* XXX: simplify */
706 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
707 /* record symbol in token array */
708 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
709 if (v & SYM_STRUCT)
710 ps = &ts->sym_struct;
711 else
712 ps = &ts->sym_identifier;
713 s->prev_tok = *ps;
714 *ps = s;
715 s->sym_scope = local_scope;
716 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
717 tcc_error("redeclaration of '%s'",
718 get_tok_str(v & ~SYM_STRUCT, NULL));
720 return s;
723 /* push a global identifier */
724 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
726 Sym *s, **ps;
727 s = sym_push2(&global_stack, v, t, c);
728 s->r = VT_CONST | VT_SYM;
729 /* don't record anonymous symbol */
730 if (v < SYM_FIRST_ANOM) {
731 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
732 /* modify the top most local identifier, so that sym_identifier will
733 point to 's' when popped; happens when called from inline asm */
734 while (*ps != NULL && (*ps)->sym_scope)
735 ps = &(*ps)->prev_tok;
736 s->prev_tok = *ps;
737 *ps = s;
739 return s;
742 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
743 pop them yet from the list, but do remove them from the token array. */
744 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
746 Sym *s, *ss, **ps;
747 TokenSym *ts;
748 int v;
750 s = *ptop;
751 while(s != b) {
752 ss = s->prev;
753 v = s->v;
754 /* remove symbol in token array */
755 /* XXX: simplify */
756 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
757 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
758 if (v & SYM_STRUCT)
759 ps = &ts->sym_struct;
760 else
761 ps = &ts->sym_identifier;
762 *ps = s->prev_tok;
764 if (!keep)
765 sym_free(s);
766 s = ss;
768 if (!keep)
769 *ptop = b;
772 /* label lookup */
773 ST_FUNC Sym *label_find(int v)
775 v -= TOK_IDENT;
776 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
777 return NULL;
778 return table_ident[v]->sym_label;
781 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
783 Sym *s, **ps;
784 s = sym_push2(ptop, v, VT_STATIC, 0);
785 s->r = flags;
786 ps = &table_ident[v - TOK_IDENT]->sym_label;
787 if (ptop == &global_label_stack) {
788 /* modify the top most local identifier, so that
789 sym_identifier will point to 's' when popped */
790 while (*ps != NULL)
791 ps = &(*ps)->prev_tok;
793 s->prev_tok = *ps;
794 *ps = s;
795 return s;
798 /* pop labels until element last is reached. Look if any labels are
799 undefined. Define symbols if '&&label' was used. */
800 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
802 Sym *s, *s1;
803 for(s = *ptop; s != slast; s = s1) {
804 s1 = s->prev;
805 if (s->r == LABEL_DECLARED) {
806 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
807 } else if (s->r == LABEL_FORWARD) {
808 tcc_error("label '%s' used but not defined",
809 get_tok_str(s->v, NULL));
810 } else {
811 if (s->c) {
812 /* define corresponding symbol. A size of
813 1 is put. */
814 put_extern_sym(s, cur_text_section, s->jnext, 1);
817 /* remove label */
818 if (s->r != LABEL_GONE)
819 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
820 if (!keep)
821 sym_free(s);
822 else
823 s->r = LABEL_GONE;
825 if (!keep)
826 *ptop = slast;
829 /* ------------------------------------------------------------------------- */
830 static void vcheck_cmp(void)
832 /* cannot let cpu flags if other instruction are generated. Also
833 avoid leaving VT_JMP anywhere except on the top of the stack
834 because it would complicate the code generator.
836 Don't do this when nocode_wanted. vtop might come from
837 !nocode_wanted regions (see 88_codeopt.c) and transforming
838 it to a register without actually generating code is wrong
839 as their value might still be used for real. All values
840 we push under nocode_wanted will eventually be popped
841 again, so that the VT_CMP/VT_JMP value will be in vtop
842 when code is unsuppressed again. */
844 /* However if it's just automatic suppression via CODE_OFF/ON()
845 then it seems that we better let things work undisturbed.
846 How can it work at all under nocode_wanted? Well, gv() will
847 actually clear it at the gsym() in load()/VT_JMP in the
848 generator backends */
850 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
851 gv(RC_INT);
854 static void vsetc(CType *type, int r, CValue *vc)
856 if (vtop >= vstack + (VSTACK_SIZE - 1))
857 tcc_error("memory full (vstack)");
858 vcheck_cmp();
859 vtop++;
860 vtop->type = *type;
861 vtop->r = r;
862 vtop->r2 = VT_CONST;
863 vtop->c = *vc;
864 vtop->sym = NULL;
867 ST_FUNC void vswap(void)
869 SValue tmp;
871 vcheck_cmp();
872 tmp = vtop[0];
873 vtop[0] = vtop[-1];
874 vtop[-1] = tmp;
877 /* pop stack value */
878 ST_FUNC void vpop(void)
880 int v;
881 v = vtop->r & VT_VALMASK;
882 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
883 /* for x86, we need to pop the FP stack */
884 if (v == TREG_ST0) {
885 o(0xd8dd); /* fstp %st(0) */
886 } else
887 #endif
888 if (v == VT_CMP) {
889 /* need to put correct jump if && or || without test */
890 gsym(vtop->jtrue);
891 gsym(vtop->jfalse);
893 vtop--;
896 /* push constant of type "type" with useless value */
897 static void vpush(CType *type)
899 vset(type, VT_CONST, 0);
902 /* push arbitrary 64bit constant */
903 static void vpush64(int ty, unsigned long long v)
905 CValue cval;
906 CType ctype;
907 ctype.t = ty;
908 ctype.ref = NULL;
909 cval.i = v;
910 vsetc(&ctype, VT_CONST, &cval);
913 /* push integer constant */
914 ST_FUNC void vpushi(int v)
916 vpush64(VT_INT, v);
919 /* push a pointer sized constant */
920 static void vpushs(addr_t v)
922 vpush64(VT_SIZE_T, v);
925 /* push long long constant */
926 static inline void vpushll(long long v)
928 vpush64(VT_LLONG, v);
931 ST_FUNC void vset(CType *type, int r, int v)
933 CValue cval;
934 cval.i = v;
935 vsetc(type, r, &cval);
938 static void vseti(int r, int v)
940 CType type;
941 type.t = VT_INT;
942 type.ref = NULL;
943 vset(&type, r, v);
946 ST_FUNC void vpushv(SValue *v)
948 if (vtop >= vstack + (VSTACK_SIZE - 1))
949 tcc_error("memory full (vstack)");
950 vtop++;
951 *vtop = *v;
954 static void vdup(void)
956 vpushv(vtop);
959 /* rotate n first stack elements to the bottom
960 I1 ... In -> I2 ... In I1 [top is right]
962 ST_FUNC void vrotb(int n)
964 int i;
965 SValue tmp;
967 vcheck_cmp();
968 tmp = vtop[-n + 1];
969 for(i=-n+1;i!=0;i++)
970 vtop[i] = vtop[i+1];
971 vtop[0] = tmp;
974 /* rotate the n elements before entry e towards the top
975 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
977 ST_FUNC void vrote(SValue *e, int n)
979 int i;
980 SValue tmp;
982 vcheck_cmp();
983 tmp = *e;
984 for(i = 0;i < n - 1; i++)
985 e[-i] = e[-i - 1];
986 e[-n + 1] = tmp;
989 /* rotate n first stack elements to the top
990 I1 ... In -> In I1 ... I(n-1) [top is right]
992 ST_FUNC void vrott(int n)
994 vrote(vtop, n);
997 /* ------------------------------------------------------------------------- */
998 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1000 /* called from generators to set the result from relational ops */
1001 ST_FUNC void vset_VT_CMP(int op)
1003 vtop->r = VT_CMP;
1004 vtop->cmp_op = op;
1005 vtop->jfalse = 0;
1006 vtop->jtrue = 0;
1009 /* called once before asking generators to load VT_CMP to a register */
1010 static void vset_VT_JMP(void)
1012 int op = vtop->cmp_op;
1014 if (vtop->jtrue || vtop->jfalse) {
1015 int origt = vtop->type.t;
1016 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1017 int inv = op & (op < 2); /* small optimization */
1018 vseti(VT_JMP+inv, gvtst(inv, 0));
1019 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1020 } else {
1021 /* otherwise convert flags (rsp. 0/1) to register */
1022 vtop->c.i = op;
1023 if (op < 2) /* doesn't seem to happen */
1024 vtop->r = VT_CONST;
1028 /* Set CPU Flags, doesn't yet jump */
1029 static void gvtst_set(int inv, int t)
1031 int *p;
1033 if (vtop->r != VT_CMP) {
1034 vpushi(0);
1035 gen_op(TOK_NE);
1036 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1037 vset_VT_CMP(vtop->c.i != 0);
1040 p = inv ? &vtop->jfalse : &vtop->jtrue;
1041 *p = gjmp_append(*p, t);
1044 /* Generate value test
1046 * Generate a test for any value (jump, comparison and integers) */
1047 static int gvtst(int inv, int t)
1049 int op, x, u;
1051 gvtst_set(inv, t);
1052 t = vtop->jtrue, u = vtop->jfalse;
1053 if (inv)
1054 x = u, u = t, t = x;
1055 op = vtop->cmp_op;
1057 /* jump to the wanted target */
1058 if (op > 1)
1059 t = gjmp_cond(op ^ inv, t);
1060 else if (op != inv)
1061 t = gjmp(t);
1062 /* resolve complementary jumps to here */
1063 gsym(u);
1065 vtop--;
1066 return t;
1069 /* generate a zero or nozero test */
1070 static void gen_test_zero(int op)
1072 if (vtop->r == VT_CMP) {
1073 int j;
1074 if (op == TOK_EQ) {
1075 j = vtop->jfalse;
1076 vtop->jfalse = vtop->jtrue;
1077 vtop->jtrue = j;
1078 vtop->cmp_op ^= 1;
1080 } else {
1081 vpushi(0);
1082 gen_op(op);
1086 /* ------------------------------------------------------------------------- */
1087 /* push a symbol value of TYPE */
1088 ST_FUNC void vpushsym(CType *type, Sym *sym)
1090 CValue cval;
1091 cval.i = 0;
1092 vsetc(type, VT_CONST | VT_SYM, &cval);
1093 vtop->sym = sym;
1096 /* Return a static symbol pointing to a section */
1097 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1099 int v;
1100 Sym *sym;
1102 v = anon_sym++;
1103 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1104 sym->type.t |= VT_STATIC;
1105 put_extern_sym(sym, sec, offset, size);
1106 return sym;
1109 /* push a reference to a section offset by adding a dummy symbol */
1110 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1112 vpushsym(type, get_sym_ref(type, sec, offset, size));
1115 /* define a new external reference to a symbol 'v' of type 'u' */
1116 ST_FUNC Sym *external_global_sym(int v, CType *type)
1118 Sym *s;
1120 s = sym_find(v);
1121 if (!s) {
1122 /* push forward reference */
1123 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1124 s->type.ref = type->ref;
1125 } else if (IS_ASM_SYM(s)) {
1126 s->type.t = type->t | (s->type.t & VT_EXTERN);
1127 s->type.ref = type->ref;
1128 update_storage(s);
1130 return s;
1133 /* create an external reference with no specific type similar to asm labels.
1134 This avoids type conflicts if the symbol is used from C too */
1135 ST_FUNC Sym *external_helper_sym(int v)
1137 CType ct = { VT_ASM_FUNC, NULL };
1138 return external_global_sym(v, &ct);
1141 /* push a reference to an helper function (such as memmove) */
1142 ST_FUNC void vpush_helper_func(int v)
1144 vpushsym(&func_old_type, external_helper_sym(v));
1147 /* Merge symbol attributes. */
1148 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1150 if (sa1->aligned && !sa->aligned)
1151 sa->aligned = sa1->aligned;
1152 sa->packed |= sa1->packed;
1153 sa->weak |= sa1->weak;
1154 sa->nodebug |= sa1->nodebug;
1155 if (sa1->visibility != STV_DEFAULT) {
1156 int vis = sa->visibility;
1157 if (vis == STV_DEFAULT
1158 || vis > sa1->visibility)
1159 vis = sa1->visibility;
1160 sa->visibility = vis;
1162 sa->dllexport |= sa1->dllexport;
1163 sa->nodecorate |= sa1->nodecorate;
1164 sa->dllimport |= sa1->dllimport;
1167 /* Merge function attributes. */
1168 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1170 if (fa1->func_call && !fa->func_call)
1171 fa->func_call = fa1->func_call;
1172 if (fa1->func_type && !fa->func_type)
1173 fa->func_type = fa1->func_type;
1174 if (fa1->func_args && !fa->func_args)
1175 fa->func_args = fa1->func_args;
1176 if (fa1->func_noreturn)
1177 fa->func_noreturn = 1;
1178 if (fa1->func_ctor)
1179 fa->func_ctor = 1;
1180 if (fa1->func_dtor)
1181 fa->func_dtor = 1;
1184 /* Merge attributes. */
1185 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1187 merge_symattr(&ad->a, &ad1->a);
1188 merge_funcattr(&ad->f, &ad1->f);
1190 if (ad1->section)
1191 ad->section = ad1->section;
1192 if (ad1->alias_target)
1193 ad->alias_target = ad1->alias_target;
1194 if (ad1->asm_label)
1195 ad->asm_label = ad1->asm_label;
1196 if (ad1->attr_mode)
1197 ad->attr_mode = ad1->attr_mode;
1200 /* Merge some type attributes. */
1201 static void patch_type(Sym *sym, CType *type)
1203 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1204 if (!(sym->type.t & VT_EXTERN))
1205 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1206 sym->type.t &= ~VT_EXTERN;
1209 if (IS_ASM_SYM(sym)) {
1210 /* stay static if both are static */
1211 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1212 sym->type.ref = type->ref;
1213 if ((type->t & VT_BTYPE) != VT_FUNC && !(type->t & VT_ARRAY))
1214 sym->r |= VT_LVAL;
1217 if (!is_compatible_types(&sym->type, type)) {
1218 tcc_error("incompatible types for redefinition of '%s'",
1219 get_tok_str(sym->v, NULL));
1221 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1222 int static_proto = sym->type.t & VT_STATIC;
1223 /* warn if static follows non-static function declaration */
1224 if ((type->t & VT_STATIC) && !static_proto
1225 /* XXX this test for inline shouldn't be here. Until we
1226 implement gnu-inline mode again it silences a warning for
1227 mingw caused by our workarounds. */
1228 && !((type->t | sym->type.t) & VT_INLINE))
1229 tcc_warning("static storage ignored for redefinition of '%s'",
1230 get_tok_str(sym->v, NULL));
1232 /* set 'inline' if both agree or if one has static */
1233 if ((type->t | sym->type.t) & VT_INLINE) {
1234 if (!((type->t ^ sym->type.t) & VT_INLINE)
1235 || ((type->t | sym->type.t) & VT_STATIC))
1236 static_proto |= VT_INLINE;
1239 if (0 == (type->t & VT_EXTERN)) {
1240 struct FuncAttr f = sym->type.ref->f;
1241 /* put complete type, use static from prototype */
1242 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1243 sym->type.ref = type->ref;
1244 merge_funcattr(&sym->type.ref->f, &f);
1245 } else {
1246 sym->type.t &= ~VT_INLINE | static_proto;
1249 if (sym->type.ref->f.func_type == FUNC_OLD
1250 && type->ref->f.func_type != FUNC_OLD) {
1251 sym->type.ref = type->ref;
1254 } else {
1255 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1256 /* set array size if it was omitted in extern declaration */
1257 sym->type.ref->c = type->ref->c;
1259 if ((type->t ^ sym->type.t) & VT_STATIC)
1260 tcc_warning("storage mismatch for redefinition of '%s'",
1261 get_tok_str(sym->v, NULL));
1265 /* Merge some storage attributes. */
1266 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1268 if (type)
1269 patch_type(sym, type);
1271 #ifdef TCC_TARGET_PE
1272 if (sym->a.dllimport != ad->a.dllimport)
1273 tcc_error("incompatible dll linkage for redefinition of '%s'",
1274 get_tok_str(sym->v, NULL));
1275 #endif
1276 merge_symattr(&sym->a, &ad->a);
1277 if (ad->asm_label)
1278 sym->asm_label = ad->asm_label;
1279 update_storage(sym);
1282 /* copy sym to other stack */
1283 static Sym *sym_copy(Sym *s0, Sym **ps)
1285 Sym *s;
1286 s = sym_malloc(), *s = *s0;
1287 s->prev = *ps, *ps = s;
1288 if (s->v < SYM_FIRST_ANOM) {
1289 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1290 s->prev_tok = *ps, *ps = s;
1292 return s;
1295 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1296 static void sym_copy_ref(Sym *s, Sym **ps)
1298 int bt = s->type.t & VT_BTYPE;
1299 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1300 Sym **sp = &s->type.ref;
1301 for (s = *sp, *sp = NULL; s; s = s->next) {
1302 Sym *s2 = sym_copy(s, ps);
1303 sp = &(*sp = s2)->next;
1304 sym_copy_ref(s2, ps);
1309 /* define a new external reference to a symbol 'v' */
1310 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1312 Sym *s;
1314 /* look for global symbol */
1315 s = sym_find(v);
1316 while (s && s->sym_scope)
1317 s = s->prev_tok;
1319 if (!s) {
1320 /* push forward reference */
1321 s = global_identifier_push(v, type->t, 0);
1322 s->r |= r;
1323 s->a = ad->a;
1324 s->asm_label = ad->asm_label;
1325 s->type.ref = type->ref;
1326 /* copy type to the global stack */
1327 if (local_stack)
1328 sym_copy_ref(s, &global_stack);
1329 } else {
1330 patch_storage(s, ad, type);
1332 /* push variables on local_stack if any */
1333 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1334 s = sym_copy(s, &local_stack);
1335 return s;
1338 /* save registers up to (vtop - n) stack entry */
1339 ST_FUNC void save_regs(int n)
1341 SValue *p, *p1;
1342 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1343 save_reg(p->r);
1346 /* save r to the memory stack, and mark it as being free */
1347 ST_FUNC void save_reg(int r)
1349 save_reg_upstack(r, 0);
1352 /* save r to the memory stack, and mark it as being free,
1353 if seen up to (vtop - n) stack entry */
1354 ST_FUNC void save_reg_upstack(int r, int n)
1356 int l, size, align, bt;
1357 SValue *p, *p1, sv;
1359 if ((r &= VT_VALMASK) >= VT_CONST)
1360 return;
1361 if (nocode_wanted)
1362 return;
1363 l = 0;
1364 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1365 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1366 /* must save value on stack if not already done */
1367 if (!l) {
1368 bt = p->type.t & VT_BTYPE;
1369 if (bt == VT_VOID)
1370 continue;
1371 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1372 bt = VT_PTR;
1373 sv.type.t = bt;
1374 size = type_size(&sv.type, &align);
1375 l = get_temp_local_var(size,align);
1376 sv.r = VT_LOCAL | VT_LVAL;
1377 sv.c.i = l;
1378 store(p->r & VT_VALMASK, &sv);
1379 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1380 /* x86 specific: need to pop fp register ST0 if saved */
1381 if (r == TREG_ST0) {
1382 o(0xd8dd); /* fstp %st(0) */
1384 #endif
1385 /* special long long case */
1386 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1387 sv.c.i += PTR_SIZE;
1388 store(p->r2, &sv);
1391 /* mark that stack entry as being saved on the stack */
1392 if (p->r & VT_LVAL) {
1393 /* also clear the bounded flag because the
1394 relocation address of the function was stored in
1395 p->c.i */
1396 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1397 } else {
1398 p->r = VT_LVAL | VT_LOCAL;
1399 p->type.t &= ~VT_ARRAY; /* cannot combine VT_LVAL with VT_ARRAY */
1401 p->sym = NULL;
1402 p->r2 = VT_CONST;
1403 p->c.i = l;
1408 #ifdef TCC_TARGET_ARM
1409 /* find a register of class 'rc2' with at most one reference on stack.
1410 * If none, call get_reg(rc) */
1411 ST_FUNC int get_reg_ex(int rc, int rc2)
1413 int r;
1414 SValue *p;
1416 for(r=0;r<NB_REGS;r++) {
1417 if (reg_classes[r] & rc2) {
1418 int n;
1419 n=0;
1420 for(p = vstack; p <= vtop; p++) {
1421 if ((p->r & VT_VALMASK) == r ||
1422 p->r2 == r)
1423 n++;
1425 if (n <= 1)
1426 return r;
1429 return get_reg(rc);
1431 #endif
1433 /* find a free register of class 'rc'. If none, save one register */
1434 ST_FUNC int get_reg(int rc)
1436 int r;
1437 SValue *p;
1439 /* find a free register */
1440 for(r=0;r<NB_REGS;r++) {
1441 if (reg_classes[r] & rc) {
1442 if (nocode_wanted)
1443 return r;
1444 for(p=vstack;p<=vtop;p++) {
1445 if ((p->r & VT_VALMASK) == r ||
1446 p->r2 == r)
1447 goto notfound;
1449 return r;
1451 notfound: ;
1454 /* no register left : free the first one on the stack (VERY
1455 IMPORTANT to start from the bottom to ensure that we don't
1456 spill registers used in gen_opi()) */
1457 for(p=vstack;p<=vtop;p++) {
1458 /* look at second register (if long long) */
1459 r = p->r2;
1460 if (r < VT_CONST && (reg_classes[r] & rc))
1461 goto save_found;
1462 r = p->r & VT_VALMASK;
1463 if (r < VT_CONST && (reg_classes[r] & rc)) {
1464 save_found:
1465 save_reg(r);
1466 return r;
1469 /* Should never comes here */
1470 return -1;
1473 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1474 static int get_temp_local_var(int size,int align){
1475 int i;
1476 struct temp_local_variable *temp_var;
1477 int found_var;
1478 SValue *p;
1479 int r;
1480 char free;
1481 char found;
1482 found=0;
1483 for(i=0;i<nb_temp_local_vars;i++){
1484 temp_var=&arr_temp_local_vars[i];
1485 if(temp_var->size<size||align!=temp_var->align){
1486 continue;
1488 /*check if temp_var is free*/
1489 free=1;
1490 for(p=vstack;p<=vtop;p++) {
1491 r=p->r&VT_VALMASK;
1492 if(r==VT_LOCAL||r==VT_LLOCAL){
1493 if(p->c.i==temp_var->location){
1494 free=0;
1495 break;
1499 if(free){
1500 found_var=temp_var->location;
1501 found=1;
1502 break;
1505 if(!found){
1506 loc = (loc - size) & -align;
1507 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1508 temp_var=&arr_temp_local_vars[i];
1509 temp_var->location=loc;
1510 temp_var->size=size;
1511 temp_var->align=align;
1512 nb_temp_local_vars++;
1514 found_var=loc;
1516 return found_var;
1519 static void clear_temp_local_var_list(){
1520 nb_temp_local_vars=0;
1523 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1524 if needed */
1525 static void move_reg(int r, int s, int t)
1527 SValue sv;
1529 if (r != s) {
1530 save_reg(r);
1531 sv.type.t = t;
1532 sv.type.ref = NULL;
1533 sv.r = s;
1534 sv.c.i = 0;
1535 load(r, &sv);
1539 /* get address of vtop (vtop MUST BE an lvalue) */
1540 ST_FUNC void gaddrof(void)
1542 vtop->r &= ~VT_LVAL;
1543 /* tricky: if saved lvalue, then we can go back to lvalue */
1544 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1545 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1548 #ifdef CONFIG_TCC_BCHECK
1549 /* generate a bounded pointer addition */
1550 static void gen_bounded_ptr_add(void)
1552 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1553 if (save) {
1554 vpushv(&vtop[-1]);
1555 vrott(3);
1557 vpush_helper_func(TOK___bound_ptr_add);
1558 vrott(3);
1559 gfunc_call(2);
1560 vtop -= save;
1561 vpushi(0);
1562 /* returned pointer is in REG_IRET */
1563 vtop->r = REG_IRET | VT_BOUNDED;
1564 if (nocode_wanted)
1565 return;
1566 /* relocation offset of the bounding function call point */
1567 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1570 /* patch pointer addition in vtop so that pointer dereferencing is
1571 also tested */
1572 static void gen_bounded_ptr_deref(void)
1574 addr_t func;
1575 int size, align;
1576 ElfW_Rel *rel;
1577 Sym *sym;
1579 if (nocode_wanted)
1580 return;
1582 size = type_size(&vtop->type, &align);
1583 switch(size) {
1584 case 1: func = TOK___bound_ptr_indir1; break;
1585 case 2: func = TOK___bound_ptr_indir2; break;
1586 case 4: func = TOK___bound_ptr_indir4; break;
1587 case 8: func = TOK___bound_ptr_indir8; break;
1588 case 12: func = TOK___bound_ptr_indir12; break;
1589 case 16: func = TOK___bound_ptr_indir16; break;
1590 default:
1591 /* may happen with struct member access */
1592 return;
1594 sym = external_helper_sym(func);
1595 if (!sym->c)
1596 put_extern_sym(sym, NULL, 0, 0);
1597 /* patch relocation */
1598 /* XXX: find a better solution ? */
1599 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1600 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1603 /* generate lvalue bound code */
1604 static void gbound(void)
1606 CType type1;
1608 vtop->r &= ~VT_MUSTBOUND;
1609 /* if lvalue, then use checking code before dereferencing */
1610 if (vtop->r & VT_LVAL) {
1611 /* if not VT_BOUNDED value, then make one */
1612 if (!(vtop->r & VT_BOUNDED)) {
1613 /* must save type because we must set it to int to get pointer */
1614 type1 = vtop->type;
1615 vtop->type.t = VT_PTR;
1616 gaddrof();
1617 vpushi(0);
1618 gen_bounded_ptr_add();
1619 vtop->r |= VT_LVAL;
1620 vtop->type = type1;
1622 /* then check for dereferencing */
1623 gen_bounded_ptr_deref();
1627 /* we need to call __bound_ptr_add before we start to load function
1628 args into registers */
1629 ST_FUNC void gbound_args(int nb_args)
1631 int i, v;
1632 SValue *sv;
1634 for (i = 1; i <= nb_args; ++i)
1635 if (vtop[1 - i].r & VT_MUSTBOUND) {
1636 vrotb(i);
1637 gbound();
1638 vrott(i);
1641 sv = vtop - nb_args;
1642 if (sv->r & VT_SYM) {
1643 v = sv->sym->v;
1644 if (v == TOK_setjmp
1645 || v == TOK__setjmp
1646 #ifndef TCC_TARGET_PE
1647 || v == TOK_sigsetjmp
1648 || v == TOK___sigsetjmp
1649 #endif
1651 vpush_helper_func(TOK___bound_setjmp);
1652 vpushv(sv + 1);
1653 gfunc_call(1);
1654 func_bound_add_epilog = 1;
1656 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1657 if (v == TOK_alloca)
1658 func_bound_add_epilog = 1;
1659 #endif
1660 #if TARGETOS_NetBSD
1661 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1662 sv->sym->asm_label = TOK___bound_longjmp;
1663 #endif
1667 /* Add bounds for local symbols from S to E (via ->prev) */
1668 static void add_local_bounds(Sym *s, Sym *e)
1670 for (; s != e; s = s->prev) {
1671 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1672 continue;
1673 /* Add arrays/structs/unions because we always take address */
1674 if ((s->type.t & VT_ARRAY)
1675 || (s->type.t & VT_BTYPE) == VT_STRUCT
1676 || s->a.addrtaken) {
1677 /* add local bound info */
1678 int align, size = type_size(&s->type, &align);
1679 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1680 2 * sizeof(addr_t));
1681 bounds_ptr[0] = s->c;
1682 bounds_ptr[1] = size;
1686 #endif
1688 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1689 static void pop_local_syms(Sym *b, int keep)
1691 #ifdef CONFIG_TCC_BCHECK
1692 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1693 add_local_bounds(local_stack, b);
1694 #endif
1695 if (debug_modes)
1696 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1697 sym_pop(&local_stack, b, keep);
1700 /* increment an lvalue pointer */
1701 static void incr_offset(int offset)
1703 int t = vtop->type.t;
1704 gaddrof(); /* remove VT_LVAL */
1705 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1706 vpushs(offset);
1707 gen_op('+');
1708 vtop->r |= VT_LVAL;
1709 vtop->type.t = t;
1712 static void incr_bf_adr(int o)
1714 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1715 incr_offset(o);
1718 /* single-byte load mode for packed or otherwise unaligned bitfields */
1719 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1721 int n, o, bits;
1722 save_reg_upstack(vtop->r, 1);
1723 vpush64(type->t & VT_BTYPE, 0); // B X
1724 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1725 do {
1726 vswap(); // X B
1727 incr_bf_adr(o);
1728 vdup(); // X B B
1729 n = 8 - bit_pos;
1730 if (n > bit_size)
1731 n = bit_size;
1732 if (bit_pos)
1733 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1734 if (n < 8)
1735 vpushi((1 << n) - 1), gen_op('&');
1736 gen_cast(type);
1737 if (bits)
1738 vpushi(bits), gen_op(TOK_SHL);
1739 vrotb(3); // B Y X
1740 gen_op('|'); // B X
1741 bits += n, bit_size -= n, o = 1;
1742 } while (bit_size);
1743 vswap(), vpop();
1744 if (!(type->t & VT_UNSIGNED)) {
1745 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1746 vpushi(n), gen_op(TOK_SHL);
1747 vpushi(n), gen_op(TOK_SAR);
1751 /* single-byte store mode for packed or otherwise unaligned bitfields */
1752 static void store_packed_bf(int bit_pos, int bit_size)
1754 int bits, n, o, m, c;
1755 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1756 vswap(); // X B
1757 save_reg_upstack(vtop->r, 1);
1758 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1759 do {
1760 incr_bf_adr(o); // X B
1761 vswap(); //B X
1762 c ? vdup() : gv_dup(); // B V X
1763 vrott(3); // X B V
1764 if (bits)
1765 vpushi(bits), gen_op(TOK_SHR);
1766 if (bit_pos)
1767 vpushi(bit_pos), gen_op(TOK_SHL);
1768 n = 8 - bit_pos;
1769 if (n > bit_size)
1770 n = bit_size;
1771 if (n < 8) {
1772 m = ((1 << n) - 1) << bit_pos;
1773 vpushi(m), gen_op('&'); // X B V1
1774 vpushv(vtop-1); // X B V1 B
1775 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1776 gen_op('&'); // X B V1 B1
1777 gen_op('|'); // X B V2
1779 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1780 vstore(), vpop(); // X B
1781 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1782 } while (bit_size);
1783 vpop(), vpop();
1786 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1788 int t;
1789 if (0 == sv->type.ref)
1790 return 0;
1791 t = sv->type.ref->auxtype;
1792 if (t != -1 && t != VT_STRUCT) {
1793 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1794 sv->r |= VT_LVAL;
1796 return t;
1799 /* store vtop a register belonging to class 'rc'. lvalues are
1800 converted to values. Cannot be used if cannot be converted to
1801 register value (such as structures). */
1802 ST_FUNC int gv(int rc)
1804 int r, r2, r_ok, r2_ok, rc2, bt;
1805 int bit_pos, bit_size, size, align;
1807 /* NOTE: get_reg can modify vstack[] */
1808 if (vtop->type.t & VT_BITFIELD) {
1809 CType type;
1811 bit_pos = BIT_POS(vtop->type.t);
1812 bit_size = BIT_SIZE(vtop->type.t);
1813 /* remove bit field info to avoid loops */
1814 vtop->type.t &= ~VT_STRUCT_MASK;
1816 type.ref = NULL;
1817 type.t = vtop->type.t & VT_UNSIGNED;
1818 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1819 type.t |= VT_UNSIGNED;
1821 r = adjust_bf(vtop, bit_pos, bit_size);
1823 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1824 type.t |= VT_LLONG;
1825 else
1826 type.t |= VT_INT;
1828 if (r == VT_STRUCT) {
1829 load_packed_bf(&type, bit_pos, bit_size);
1830 } else {
1831 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1832 /* cast to int to propagate signedness in following ops */
1833 gen_cast(&type);
1834 /* generate shifts */
1835 vpushi(bits - (bit_pos + bit_size));
1836 gen_op(TOK_SHL);
1837 vpushi(bits - bit_size);
1838 /* NOTE: transformed to SHR if unsigned */
1839 gen_op(TOK_SAR);
1841 r = gv(rc);
1842 } else {
1843 if (is_float(vtop->type.t) &&
1844 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1845 /* CPUs usually cannot use float constants, so we store them
1846 generically in data segment */
1847 init_params p = { rodata_section };
1848 unsigned long offset;
1849 size = type_size(&vtop->type, &align);
1850 if (NODATA_WANTED)
1851 size = 0, align = 1;
1852 offset = section_add(p.sec, size, align);
1853 vpush_ref(&vtop->type, p.sec, offset, size);
1854 vswap();
1855 init_putv(&p, &vtop->type, offset);
1856 vtop->r |= VT_LVAL;
1858 #ifdef CONFIG_TCC_BCHECK
1859 if (vtop->r & VT_MUSTBOUND)
1860 gbound();
1861 #endif
1863 bt = vtop->type.t & VT_BTYPE;
1865 #ifdef TCC_TARGET_RISCV64
1866 /* XXX mega hack */
1867 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1868 rc = RC_INT;
1869 #endif
1870 rc2 = RC2_TYPE(bt, rc);
1872 /* need to reload if:
1873 - constant
1874 - lvalue (need to dereference pointer)
1875 - already a register, but not in the right class */
1876 r = vtop->r & VT_VALMASK;
1877 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1878 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1880 if (!r_ok || !r2_ok) {
1882 if (!r_ok) {
1883 if (1 /* we can 'mov (r),r' in cases */
1884 && r < VT_CONST
1885 && (reg_classes[r] & rc)
1886 && !rc2
1888 save_reg_upstack(r, 1);
1889 else
1890 r = get_reg(rc);
1893 if (rc2) {
1894 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1895 int original_type = vtop->type.t;
1897 /* two register type load :
1898 expand to two words temporarily */
1899 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1900 /* load constant */
1901 unsigned long long ll = vtop->c.i;
1902 vtop->c.i = ll; /* first word */
1903 load(r, vtop);
1904 vtop->r = r; /* save register value */
1905 vpushi(ll >> 32); /* second word */
1906 } else if (vtop->r & VT_LVAL) {
1907 /* We do not want to modifier the long long pointer here.
1908 So we save any other instances down the stack */
1909 save_reg_upstack(vtop->r, 1);
1910 /* load from memory */
1911 vtop->type.t = load_type;
1912 load(r, vtop);
1913 vdup();
1914 vtop[-1].r = r; /* save register value */
1915 /* increment pointer to get second word */
1916 incr_offset(PTR_SIZE);
1917 } else {
1918 /* move registers */
1919 if (!r_ok)
1920 load(r, vtop);
1921 if (r2_ok && vtop->r2 < VT_CONST)
1922 goto done;
1923 vdup();
1924 vtop[-1].r = r; /* save register value */
1925 vtop->r = vtop[-1].r2;
1927 /* Allocate second register. Here we rely on the fact that
1928 get_reg() tries first to free r2 of an SValue. */
1929 r2 = get_reg(rc2);
1930 load(r2, vtop);
1931 vpop();
1932 /* write second register */
1933 vtop->r2 = r2;
1934 done:
1935 vtop->type.t = original_type;
1936 } else {
1937 if (vtop->r == VT_CMP)
1938 vset_VT_JMP();
1939 /* one register type load */
1940 load(r, vtop);
1943 vtop->r = r;
1944 #ifdef TCC_TARGET_C67
1945 /* uses register pairs for doubles */
1946 if (bt == VT_DOUBLE)
1947 vtop->r2 = r+1;
1948 #endif
1950 return r;
1953 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1954 ST_FUNC void gv2(int rc1, int rc2)
1956 /* generate more generic register first. But VT_JMP or VT_CMP
1957 values must be generated first in all cases to avoid possible
1958 reload errors */
1959 if (vtop->r != VT_CMP && rc1 <= rc2) {
1960 vswap();
1961 gv(rc1);
1962 vswap();
1963 gv(rc2);
1964 /* test if reload is needed for first register */
1965 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1966 vswap();
1967 gv(rc1);
1968 vswap();
1970 } else {
1971 gv(rc2);
1972 vswap();
1973 gv(rc1);
1974 vswap();
1975 /* test if reload is needed for first register */
1976 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1977 gv(rc2);
1982 #if PTR_SIZE == 4
1983 /* expand 64bit on stack in two ints */
1984 ST_FUNC void lexpand(void)
1986 int u, v;
1987 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1988 v = vtop->r & (VT_VALMASK | VT_LVAL);
1989 if (v == VT_CONST) {
1990 vdup();
1991 vtop[0].c.i >>= 32;
1992 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1993 vdup();
1994 vtop[0].c.i += 4;
1995 } else {
1996 gv(RC_INT);
1997 vdup();
1998 vtop[0].r = vtop[-1].r2;
1999 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2001 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2003 #endif
2005 #if PTR_SIZE == 4
2006 /* build a long long from two ints */
2007 static void lbuild(int t)
2009 gv2(RC_INT, RC_INT);
2010 vtop[-1].r2 = vtop[0].r;
2011 vtop[-1].type.t = t;
2012 vpop();
2014 #endif
2016 /* convert stack entry to register and duplicate its value in another
2017 register */
2018 static void gv_dup(void)
2020 int t, rc, r;
2022 t = vtop->type.t;
2023 #if PTR_SIZE == 4
2024 if ((t & VT_BTYPE) == VT_LLONG) {
2025 if (t & VT_BITFIELD) {
2026 gv(RC_INT);
2027 t = vtop->type.t;
2029 lexpand();
2030 gv_dup();
2031 vswap();
2032 vrotb(3);
2033 gv_dup();
2034 vrotb(4);
2035 /* stack: H L L1 H1 */
2036 lbuild(t);
2037 vrotb(3);
2038 vrotb(3);
2039 vswap();
2040 lbuild(t);
2041 vswap();
2042 return;
2044 #endif
2045 /* duplicate value */
2046 rc = RC_TYPE(t);
2047 gv(rc);
2048 r = get_reg(rc);
2049 vdup();
2050 load(r, vtop);
2051 vtop->r = r;
2054 #if PTR_SIZE == 4
2055 /* generate CPU independent (unsigned) long long operations */
2056 static void gen_opl(int op)
2058 int t, a, b, op1, c, i;
2059 int func;
2060 unsigned short reg_iret = REG_IRET;
2061 unsigned short reg_lret = REG_IRE2;
2062 SValue tmp;
2064 switch(op) {
2065 case '/':
2066 case TOK_PDIV:
2067 func = TOK___divdi3;
2068 goto gen_func;
2069 case TOK_UDIV:
2070 func = TOK___udivdi3;
2071 goto gen_func;
2072 case '%':
2073 func = TOK___moddi3;
2074 goto gen_mod_func;
2075 case TOK_UMOD:
2076 func = TOK___umoddi3;
2077 gen_mod_func:
2078 #ifdef TCC_ARM_EABI
2079 reg_iret = TREG_R2;
2080 reg_lret = TREG_R3;
2081 #endif
2082 gen_func:
2083 /* call generic long long function */
2084 vpush_helper_func(func);
2085 vrott(3);
2086 gfunc_call(2);
2087 vpushi(0);
2088 vtop->r = reg_iret;
2089 vtop->r2 = reg_lret;
2090 break;
2091 case '^':
2092 case '&':
2093 case '|':
2094 case '*':
2095 case '+':
2096 case '-':
2097 //pv("gen_opl A",0,2);
2098 t = vtop->type.t;
2099 vswap();
2100 lexpand();
2101 vrotb(3);
2102 lexpand();
2103 /* stack: L1 H1 L2 H2 */
2104 tmp = vtop[0];
2105 vtop[0] = vtop[-3];
2106 vtop[-3] = tmp;
2107 tmp = vtop[-2];
2108 vtop[-2] = vtop[-3];
2109 vtop[-3] = tmp;
2110 vswap();
2111 /* stack: H1 H2 L1 L2 */
2112 //pv("gen_opl B",0,4);
2113 if (op == '*') {
2114 vpushv(vtop - 1);
2115 vpushv(vtop - 1);
2116 gen_op(TOK_UMULL);
2117 lexpand();
2118 /* stack: H1 H2 L1 L2 ML MH */
2119 for(i=0;i<4;i++)
2120 vrotb(6);
2121 /* stack: ML MH H1 H2 L1 L2 */
2122 tmp = vtop[0];
2123 vtop[0] = vtop[-2];
2124 vtop[-2] = tmp;
2125 /* stack: ML MH H1 L2 H2 L1 */
2126 gen_op('*');
2127 vrotb(3);
2128 vrotb(3);
2129 gen_op('*');
2130 /* stack: ML MH M1 M2 */
2131 gen_op('+');
2132 gen_op('+');
2133 } else if (op == '+' || op == '-') {
2134 /* XXX: add non carry method too (for MIPS or alpha) */
2135 if (op == '+')
2136 op1 = TOK_ADDC1;
2137 else
2138 op1 = TOK_SUBC1;
2139 gen_op(op1);
2140 /* stack: H1 H2 (L1 op L2) */
2141 vrotb(3);
2142 vrotb(3);
2143 gen_op(op1 + 1); /* TOK_xxxC2 */
2144 } else {
2145 gen_op(op);
2146 /* stack: H1 H2 (L1 op L2) */
2147 vrotb(3);
2148 vrotb(3);
2149 /* stack: (L1 op L2) H1 H2 */
2150 gen_op(op);
2151 /* stack: (L1 op L2) (H1 op H2) */
2153 /* stack: L H */
2154 lbuild(t);
2155 break;
2156 case TOK_SAR:
2157 case TOK_SHR:
2158 case TOK_SHL:
2159 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2160 t = vtop[-1].type.t;
2161 vswap();
2162 lexpand();
2163 vrotb(3);
2164 /* stack: L H shift */
2165 c = (int)vtop->c.i;
2166 /* constant: simpler */
2167 /* NOTE: all comments are for SHL. the other cases are
2168 done by swapping words */
2169 vpop();
2170 if (op != TOK_SHL)
2171 vswap();
2172 if (c >= 32) {
2173 /* stack: L H */
2174 vpop();
2175 if (c > 32) {
2176 vpushi(c - 32);
2177 gen_op(op);
2179 if (op != TOK_SAR) {
2180 vpushi(0);
2181 } else {
2182 gv_dup();
2183 vpushi(31);
2184 gen_op(TOK_SAR);
2186 vswap();
2187 } else {
2188 vswap();
2189 gv_dup();
2190 /* stack: H L L */
2191 vpushi(c);
2192 gen_op(op);
2193 vswap();
2194 vpushi(32 - c);
2195 if (op == TOK_SHL)
2196 gen_op(TOK_SHR);
2197 else
2198 gen_op(TOK_SHL);
2199 vrotb(3);
2200 /* stack: L L H */
2201 vpushi(c);
2202 if (op == TOK_SHL)
2203 gen_op(TOK_SHL);
2204 else
2205 gen_op(TOK_SHR);
2206 gen_op('|');
2208 if (op != TOK_SHL)
2209 vswap();
2210 lbuild(t);
2211 } else {
2212 /* XXX: should provide a faster fallback on x86 ? */
2213 switch(op) {
2214 case TOK_SAR:
2215 func = TOK___ashrdi3;
2216 goto gen_func;
2217 case TOK_SHR:
2218 func = TOK___lshrdi3;
2219 goto gen_func;
2220 case TOK_SHL:
2221 func = TOK___ashldi3;
2222 goto gen_func;
2225 break;
2226 default:
2227 /* compare operations */
2228 t = vtop->type.t;
2229 vswap();
2230 lexpand();
2231 vrotb(3);
2232 lexpand();
2233 /* stack: L1 H1 L2 H2 */
2234 tmp = vtop[-1];
2235 vtop[-1] = vtop[-2];
2236 vtop[-2] = tmp;
2237 /* stack: L1 L2 H1 H2 */
2238 save_regs(4);
2239 /* compare high */
2240 op1 = op;
2241 /* when values are equal, we need to compare low words. since
2242 the jump is inverted, we invert the test too. */
2243 if (op1 == TOK_LT)
2244 op1 = TOK_LE;
2245 else if (op1 == TOK_GT)
2246 op1 = TOK_GE;
2247 else if (op1 == TOK_ULT)
2248 op1 = TOK_ULE;
2249 else if (op1 == TOK_UGT)
2250 op1 = TOK_UGE;
2251 a = 0;
2252 b = 0;
2253 gen_op(op1);
2254 if (op == TOK_NE) {
2255 b = gvtst(0, 0);
2256 } else {
2257 a = gvtst(1, 0);
2258 if (op != TOK_EQ) {
2259 /* generate non equal test */
2260 vpushi(0);
2261 vset_VT_CMP(TOK_NE);
2262 b = gvtst(0, 0);
2265 /* compare low. Always unsigned */
2266 op1 = op;
2267 if (op1 == TOK_LT)
2268 op1 = TOK_ULT;
2269 else if (op1 == TOK_LE)
2270 op1 = TOK_ULE;
2271 else if (op1 == TOK_GT)
2272 op1 = TOK_UGT;
2273 else if (op1 == TOK_GE)
2274 op1 = TOK_UGE;
2275 gen_op(op1);
2276 #if 0//def TCC_TARGET_I386
2277 if (op == TOK_NE) { gsym(b); break; }
2278 if (op == TOK_EQ) { gsym(a); break; }
2279 #endif
2280 gvtst_set(1, a);
2281 gvtst_set(0, b);
2282 break;
2285 #endif
2287 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2289 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2290 return (a ^ b) >> 63 ? -x : x;
2293 static int gen_opic_lt(uint64_t a, uint64_t b)
2295 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2298 /* handle integer constant optimizations and various machine
2299 independent opt */
2300 static void gen_opic(int op)
2302 SValue *v1 = vtop - 1;
2303 SValue *v2 = vtop;
2304 int t1 = v1->type.t & VT_BTYPE;
2305 int t2 = v2->type.t & VT_BTYPE;
2306 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2307 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2308 uint64_t l1 = c1 ? v1->c.i : 0;
2309 uint64_t l2 = c2 ? v2->c.i : 0;
2310 int shm = (t1 == VT_LLONG) ? 63 : 31;
2311 int r;
2313 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2314 l1 = ((uint32_t)l1 |
2315 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2316 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2317 l2 = ((uint32_t)l2 |
2318 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2320 if (c1 && c2) {
2321 switch(op) {
2322 case '+': l1 += l2; break;
2323 case '-': l1 -= l2; break;
2324 case '&': l1 &= l2; break;
2325 case '^': l1 ^= l2; break;
2326 case '|': l1 |= l2; break;
2327 case '*': l1 *= l2; break;
2329 case TOK_PDIV:
2330 case '/':
2331 case '%':
2332 case TOK_UDIV:
2333 case TOK_UMOD:
2334 /* if division by zero, generate explicit division */
2335 if (l2 == 0) {
2336 if (CONST_WANTED && !NOEVAL_WANTED)
2337 tcc_error("division by zero in constant");
2338 goto general_case;
2340 switch(op) {
2341 default: l1 = gen_opic_sdiv(l1, l2); break;
2342 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2343 case TOK_UDIV: l1 = l1 / l2; break;
2344 case TOK_UMOD: l1 = l1 % l2; break;
2346 break;
2347 case TOK_SHL: l1 <<= (l2 & shm); break;
2348 case TOK_SHR: l1 >>= (l2 & shm); break;
2349 case TOK_SAR:
2350 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2351 break;
2352 /* tests */
2353 case TOK_ULT: l1 = l1 < l2; break;
2354 case TOK_UGE: l1 = l1 >= l2; break;
2355 case TOK_EQ: l1 = l1 == l2; break;
2356 case TOK_NE: l1 = l1 != l2; break;
2357 case TOK_ULE: l1 = l1 <= l2; break;
2358 case TOK_UGT: l1 = l1 > l2; break;
2359 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2360 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2361 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2362 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2363 /* logical */
2364 case TOK_LAND: l1 = l1 && l2; break;
2365 case TOK_LOR: l1 = l1 || l2; break;
2366 default:
2367 goto general_case;
2369 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2370 l1 = ((uint32_t)l1 |
2371 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2372 v1->c.i = l1;
2373 v1->r |= v2->r & VT_NONCONST;
2374 vtop--;
2375 } else {
2376 /* if commutative ops, put c2 as constant */
2377 if (c1 && (op == '+' || op == '&' || op == '^' ||
2378 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2379 vswap();
2380 c2 = c1; //c = c1, c1 = c2, c2 = c;
2381 l2 = l1; //l = l1, l1 = l2, l2 = l;
2383 if (c1 && ((l1 == 0 &&
2384 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2385 (l1 == -1 && op == TOK_SAR))) {
2386 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2387 vpop();
2388 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2389 (op == '|' &&
2390 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2391 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2392 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2393 if (l2 == 1)
2394 vtop->c.i = 0;
2395 vswap();
2396 vtop--;
2397 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2398 op == TOK_PDIV) &&
2399 l2 == 1) ||
2400 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2401 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2402 l2 == 0) ||
2403 (op == '&' &&
2404 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2405 /* filter out NOP operations like x*1, x-0, x&-1... */
2406 vtop--;
2407 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2408 /* try to use shifts instead of muls or divs */
2409 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2410 int n = -1;
2411 while (l2) {
2412 l2 >>= 1;
2413 n++;
2415 vtop->c.i = n;
2416 if (op == '*')
2417 op = TOK_SHL;
2418 else if (op == TOK_PDIV)
2419 op = TOK_SAR;
2420 else
2421 op = TOK_SHR;
2423 goto general_case;
2424 } else if (c2 && (op == '+' || op == '-') &&
2425 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2426 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2427 /* symbol + constant case */
2428 if (op == '-')
2429 l2 = -l2;
2430 l2 += vtop[-1].c.i;
2431 /* The backends can't always deal with addends to symbols
2432 larger than +-1<<31. Don't construct such. */
2433 if ((int)l2 != l2)
2434 goto general_case;
2435 vtop--;
2436 vtop->c.i = l2;
2437 } else {
2438 general_case:
2439 /* call low level op generator */
2440 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2441 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2442 gen_opl(op);
2443 else
2444 gen_opi(op);
2446 if (vtop->r == VT_CONST)
2447 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2451 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2452 # define gen_negf gen_opf
2453 #elif defined TCC_TARGET_ARM
2454 void gen_negf(int op)
2456 /* arm will detect 0-x and replace by vneg */
2457 vpushi(0), vswap(), gen_op('-');
2459 #else
2460 /* XXX: implement in gen_opf() for other backends too */
2461 void gen_negf(int op)
2463 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2464 subtract(-0, x), but with them it's really a sign flip
2465 operation. We implement this with bit manipulation and have
2466 to do some type reinterpretation for this, which TCC can do
2467 only via memory. */
2469 int align, size, bt;
2471 size = type_size(&vtop->type, &align);
2472 bt = vtop->type.t & VT_BTYPE;
2473 save_reg(gv(RC_TYPE(bt)));
2474 vdup();
2475 incr_bf_adr(size - 1);
2476 vdup();
2477 vpushi(0x80); /* flip sign */
2478 gen_op('^');
2479 vstore();
2480 vpop();
2482 #endif
2484 /* generate a floating point operation with constant propagation */
2485 static void gen_opif(int op)
2487 int c1, c2, i, bt;
2488 SValue *v1, *v2;
2489 #if defined _MSC_VER && defined __x86_64__
2490 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2491 volatile
2492 #endif
2493 long double f1, f2;
2495 v1 = vtop - 1;
2496 v2 = vtop;
2497 if (op == TOK_NEG)
2498 v1 = v2;
2499 bt = v1->type.t & VT_BTYPE;
2501 /* currently, we cannot do computations with forward symbols */
2502 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2503 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2504 if (c1 && c2) {
2505 if (bt == VT_FLOAT) {
2506 f1 = v1->c.f;
2507 f2 = v2->c.f;
2508 } else if (bt == VT_DOUBLE) {
2509 f1 = v1->c.d;
2510 f2 = v2->c.d;
2511 } else {
2512 f1 = v1->c.ld;
2513 f2 = v2->c.ld;
2515 /* NOTE: we only do constant propagation if finite number (not
2516 NaN or infinity) (ANSI spec) */
2517 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2518 goto general_case;
2519 switch(op) {
2520 case '+': f1 += f2; break;
2521 case '-': f1 -= f2; break;
2522 case '*': f1 *= f2; break;
2523 case '/':
2524 if (f2 == 0.0) {
2525 union { float f; unsigned u; } x1, x2, y;
2526 /* If not in initializer we need to potentially generate
2527 FP exceptions at runtime, otherwise we want to fold. */
2528 if (!CONST_WANTED)
2529 goto general_case;
2530 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2531 when used to compile the f1 /= f2 below, would be -nan */
2532 x1.f = f1, x2.f = f2;
2533 if (f1 == 0.0)
2534 y.u = 0x7fc00000; /* nan */
2535 else
2536 y.u = 0x7f800000; /* infinity */
2537 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2538 f1 = y.f;
2539 break;
2541 f1 /= f2;
2542 break;
2543 case TOK_NEG:
2544 f1 = -f1;
2545 goto unary_result;
2546 case TOK_EQ:
2547 i = f1 == f2;
2548 make_int:
2549 vtop -= 2;
2550 vpushi(i);
2551 return;
2552 case TOK_NE:
2553 i = f1 != f2;
2554 goto make_int;
2555 case TOK_LT:
2556 i = f1 < f2;
2557 goto make_int;
2558 case TOK_GE:
2559 i = f1 >= f2;
2560 goto make_int;
2561 case TOK_LE:
2562 i = f1 <= f2;
2563 goto make_int;
2564 case TOK_GT:
2565 i = f1 > f2;
2566 goto make_int;
2567 default:
2568 goto general_case;
2570 vtop--;
2571 unary_result:
2572 /* XXX: overflow test ? */
2573 if (bt == VT_FLOAT) {
2574 v1->c.f = f1;
2575 } else if (bt == VT_DOUBLE) {
2576 v1->c.d = f1;
2577 } else {
2578 v1->c.ld = f1;
2580 } else {
2581 general_case:
2582 if (op == TOK_NEG) {
2583 gen_negf(op);
2584 } else {
2585 gen_opf(op);
2590 /* print a type. If 'varstr' is not NULL, then the variable is also
2591 printed in the type */
2592 /* XXX: union */
2593 /* XXX: add array and function pointers */
2594 static void type_to_str(char *buf, int buf_size,
2595 CType *type, const char *varstr)
2597 int bt, v, t;
2598 Sym *s, *sa;
2599 char buf1[256];
2600 const char *tstr;
2602 t = type->t;
2603 bt = t & VT_BTYPE;
2604 buf[0] = '\0';
2606 if (t & VT_EXTERN)
2607 pstrcat(buf, buf_size, "extern ");
2608 if (t & VT_STATIC)
2609 pstrcat(buf, buf_size, "static ");
2610 if (t & VT_TYPEDEF)
2611 pstrcat(buf, buf_size, "typedef ");
2612 if (t & VT_INLINE)
2613 pstrcat(buf, buf_size, "inline ");
2614 if (bt != VT_PTR) {
2615 if (t & VT_VOLATILE)
2616 pstrcat(buf, buf_size, "volatile ");
2617 if (t & VT_CONSTANT)
2618 pstrcat(buf, buf_size, "const ");
2620 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2621 || ((t & VT_UNSIGNED)
2622 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2623 && !IS_ENUM(t)
2625 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2627 buf_size -= strlen(buf);
2628 buf += strlen(buf);
2630 switch(bt) {
2631 case VT_VOID:
2632 tstr = "void";
2633 goto add_tstr;
2634 case VT_BOOL:
2635 tstr = "_Bool";
2636 goto add_tstr;
2637 case VT_BYTE:
2638 tstr = "char";
2639 goto add_tstr;
2640 case VT_SHORT:
2641 tstr = "short";
2642 goto add_tstr;
2643 case VT_INT:
2644 tstr = "int";
2645 goto maybe_long;
2646 case VT_LLONG:
2647 tstr = "long long";
2648 maybe_long:
2649 if (t & VT_LONG)
2650 tstr = "long";
2651 if (!IS_ENUM(t))
2652 goto add_tstr;
2653 tstr = "enum ";
2654 goto tstruct;
2655 case VT_FLOAT:
2656 tstr = "float";
2657 goto add_tstr;
2658 case VT_DOUBLE:
2659 tstr = "double";
2660 if (!(t & VT_LONG))
2661 goto add_tstr;
2662 case VT_LDOUBLE:
2663 tstr = "long double";
2664 add_tstr:
2665 pstrcat(buf, buf_size, tstr);
2666 break;
2667 case VT_STRUCT:
2668 tstr = "struct ";
2669 if (IS_UNION(t))
2670 tstr = "union ";
2671 tstruct:
2672 pstrcat(buf, buf_size, tstr);
2673 v = type->ref->v & ~SYM_STRUCT;
2674 if (v >= SYM_FIRST_ANOM)
2675 pstrcat(buf, buf_size, "<anonymous>");
2676 else
2677 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2678 break;
2679 case VT_FUNC:
2680 s = type->ref;
2681 buf1[0]=0;
2682 if (varstr && '*' == *varstr) {
2683 pstrcat(buf1, sizeof(buf1), "(");
2684 pstrcat(buf1, sizeof(buf1), varstr);
2685 pstrcat(buf1, sizeof(buf1), ")");
2687 pstrcat(buf1, buf_size, "(");
2688 sa = s->next;
2689 while (sa != NULL) {
2690 char buf2[256];
2691 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2692 pstrcat(buf1, sizeof(buf1), buf2);
2693 sa = sa->next;
2694 if (sa)
2695 pstrcat(buf1, sizeof(buf1), ", ");
2697 if (s->f.func_type == FUNC_ELLIPSIS)
2698 pstrcat(buf1, sizeof(buf1), ", ...");
2699 pstrcat(buf1, sizeof(buf1), ")");
2700 type_to_str(buf, buf_size, &s->type, buf1);
2701 goto no_var;
2702 case VT_PTR:
2703 s = type->ref;
2704 if (t & (VT_ARRAY|VT_VLA)) {
2705 if (varstr && '*' == *varstr)
2706 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2707 else
2708 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2709 type_to_str(buf, buf_size, &s->type, buf1);
2710 goto no_var;
2712 pstrcpy(buf1, sizeof(buf1), "*");
2713 if (t & VT_CONSTANT)
2714 pstrcat(buf1, buf_size, "const ");
2715 if (t & VT_VOLATILE)
2716 pstrcat(buf1, buf_size, "volatile ");
2717 if (varstr)
2718 pstrcat(buf1, sizeof(buf1), varstr);
2719 type_to_str(buf, buf_size, &s->type, buf1);
2720 goto no_var;
2722 if (varstr) {
2723 pstrcat(buf, buf_size, " ");
2724 pstrcat(buf, buf_size, varstr);
2726 no_var: ;
2729 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2731 char buf1[256], buf2[256];
2732 type_to_str(buf1, sizeof(buf1), st, NULL);
2733 type_to_str(buf2, sizeof(buf2), dt, NULL);
2734 tcc_error(fmt, buf1, buf2);
2737 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2739 char buf1[256], buf2[256];
2740 type_to_str(buf1, sizeof(buf1), st, NULL);
2741 type_to_str(buf2, sizeof(buf2), dt, NULL);
2742 tcc_warning(fmt, buf1, buf2);
2745 static int pointed_size(CType *type)
2747 int align;
2748 return type_size(pointed_type(type), &align);
2751 static inline int is_null_pointer(SValue *p)
2753 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2754 return 0;
2755 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2756 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2757 ((p->type.t & VT_BTYPE) == VT_PTR &&
2758 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2759 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2760 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2764 /* compare function types. OLD functions match any new functions */
2765 static int is_compatible_func(CType *type1, CType *type2)
2767 Sym *s1, *s2;
2769 s1 = type1->ref;
2770 s2 = type2->ref;
2771 if (s1->f.func_call != s2->f.func_call)
2772 return 0;
2773 if (s1->f.func_type != s2->f.func_type
2774 && s1->f.func_type != FUNC_OLD
2775 && s2->f.func_type != FUNC_OLD)
2776 return 0;
2777 for (;;) {
2778 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2779 return 0;
2780 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2781 return 1;
2782 s1 = s1->next;
2783 s2 = s2->next;
2784 if (!s1)
2785 return !s2;
2786 if (!s2)
2787 return 0;
2791 /* return true if type1 and type2 are the same. If unqualified is
2792 true, qualifiers on the types are ignored.
2794 static int compare_types(CType *type1, CType *type2, int unqualified)
2796 int bt1, t1, t2;
2798 t1 = type1->t & VT_TYPE;
2799 t2 = type2->t & VT_TYPE;
2800 if (unqualified) {
2801 /* strip qualifiers before comparing */
2802 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2803 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2806 /* Default Vs explicit signedness only matters for char */
2807 if ((t1 & VT_BTYPE) != VT_BYTE) {
2808 t1 &= ~VT_DEFSIGN;
2809 t2 &= ~VT_DEFSIGN;
2811 /* XXX: bitfields ? */
2812 if (t1 != t2)
2813 return 0;
2815 if ((t1 & VT_ARRAY)
2816 && !(type1->ref->c < 0
2817 || type2->ref->c < 0
2818 || type1->ref->c == type2->ref->c))
2819 return 0;
2821 /* test more complicated cases */
2822 bt1 = t1 & VT_BTYPE;
2823 if (bt1 == VT_PTR) {
2824 type1 = pointed_type(type1);
2825 type2 = pointed_type(type2);
2826 return is_compatible_types(type1, type2);
2827 } else if (bt1 == VT_STRUCT) {
2828 return (type1->ref == type2->ref);
2829 } else if (bt1 == VT_FUNC) {
2830 return is_compatible_func(type1, type2);
2831 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2832 /* If both are enums then they must be the same, if only one is then
2833 t1 and t2 must be equal, which was checked above already. */
2834 return type1->ref == type2->ref;
2835 } else {
2836 return 1;
2840 #define CMP_OP 'C'
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2847 CType *type1, *type2, type;
2848 int t1, t2, bt1, bt2;
2849 int ret = 1;
2851 /* for shifts, 'combine' only left operand */
2852 if (op == SHIFT_OP)
2853 op2 = op1;
2855 type1 = &op1->type, type2 = &op2->type;
2856 t1 = type1->t, t2 = type2->t;
2857 bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2859 type.t = VT_VOID;
2860 type.ref = NULL;
2862 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2863 ret = op == '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2865 type.t = VT_VOID;
2866 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2867 if (op == '+') {
2868 if (!is_integer_btype(bt1 == VT_PTR ? bt2 : bt1))
2869 ret = 0;
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2)) type = *type1;
2874 else if (is_null_pointer (op1)) type = *type2;
2875 else if (bt1 != bt2) {
2876 /* accept comparison or cond-expr between pointer and integer
2877 with a warning */
2878 if ((op == '?' || op == CMP_OP)
2879 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op == '?' ? "conditional expression" : "comparison");
2882 else if (op != '-' || !is_integer_btype(bt2))
2883 ret = 0;
2884 type = *(bt1 == VT_PTR ? type1 : type2);
2885 } else {
2886 CType *pt1 = pointed_type(type1);
2887 CType *pt2 = pointed_type(type2);
2888 int pbt1 = pt1->t & VT_BTYPE;
2889 int pbt2 = pt2->t & VT_BTYPE;
2890 int newquals, copied = 0;
2891 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2892 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2893 if (op != '?' && op != CMP_OP)
2894 ret = 0;
2895 else
2896 type_incompatibility_warning(type1, type2,
2897 op == '?'
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2901 if (op == '?') {
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type = *((pbt1 == VT_VOID) ? type1 : type2);
2905 /* combine qualifs */
2906 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2907 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2908 & newquals)
2910 /* copy the pointer target symbol */
2911 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2912 0, type.ref->c);
2913 copied = 1;
2914 pointed_type(&type)->t |= newquals;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1->t & VT_ARRAY
2919 && pt2->t & VT_ARRAY
2920 && pointed_type(&type)->ref->c < 0
2921 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2923 if (!copied)
2924 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2925 0, type.ref->c);
2926 pointed_type(&type)->ref =
2927 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2928 0, pointed_type(&type)->ref->c);
2929 pointed_type(&type)->ref->c =
2930 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2934 if (op == CMP_OP)
2935 type.t = VT_SIZE_T;
2936 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2937 if (op != '?' || !compare_types(type1, type2, 1))
2938 ret = 0;
2939 type = *type1;
2940 } else if (is_float(bt1) || is_float(bt2)) {
2941 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2942 type.t = VT_LDOUBLE;
2943 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2944 type.t = VT_DOUBLE;
2945 } else {
2946 type.t = VT_FLOAT;
2948 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2949 /* cast to biggest op */
2950 type.t = VT_LLONG | VT_LONG;
2951 if (bt1 == VT_LLONG)
2952 type.t &= t1;
2953 if (bt2 == VT_LLONG)
2954 type.t &= t2;
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2958 type.t |= VT_UNSIGNED;
2959 } else {
2960 /* integer operations */
2961 type.t = VT_INT | (VT_LONG & (t1 | t2));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2964 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2965 type.t |= VT_UNSIGNED;
2967 if (dest)
2968 *dest = type;
2969 return ret;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC void gen_op(int op)
2975 int t1, t2, bt1, bt2, t;
2976 CType type1, combtype;
2977 int op_class = op;
2979 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2980 op_class = SHIFT_OP;
2981 else if (TOK_ISCOND(op)) /* == != > ... */
2982 op_class = CMP_OP;
2984 redo:
2985 t1 = vtop[-1].type.t;
2986 t2 = vtop[0].type.t;
2987 bt1 = t1 & VT_BTYPE;
2988 bt2 = t2 & VT_BTYPE;
2990 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2991 if (bt2 == VT_FUNC) {
2992 mk_pointer(&vtop->type);
2993 gaddrof();
2995 if (bt1 == VT_FUNC) {
2996 vswap();
2997 mk_pointer(&vtop->type);
2998 gaddrof();
2999 vswap();
3001 goto redo;
3002 } else if (!combine_types(&combtype, vtop - 1, vtop, op_class)) {
3003 op_err:
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3008 int align;
3009 if (op_class == CMP_OP)
3010 goto std_op;
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3013 if (op != '-')
3014 goto op_err;
3015 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3016 vtop->type.t &= ~VT_UNSIGNED;
3017 vrott(3);
3018 gen_opic(op);
3019 vtop->type.t = VT_PTRDIFF_T;
3020 vswap();
3021 gen_op(TOK_PDIV);
3022 } else {
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op != '-' && op != '+')
3025 goto op_err;
3026 /* Put pointer as first operand */
3027 if (bt2 == VT_PTR) {
3028 vswap();
3029 t = t1, t1 = t2, t2 = t;
3030 bt2 = bt1;
3032 #if PTR_SIZE == 4
3033 if (bt2 == VT_LLONG)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3035 gen_cast_s(VT_INT);
3036 #endif
3037 type1 = vtop[-1].type;
3038 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3039 gen_op('*');
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3042 /* if bounded pointers, we generate a special code to
3043 test bounds */
3044 if (op == '-') {
3045 vpushi(0);
3046 vswap();
3047 gen_op('-');
3049 gen_bounded_ptr_add();
3050 } else
3051 #endif
3053 gen_opic(op);
3055 type1.t &= ~(VT_ARRAY|VT_VLA);
3056 /* put again type if gen_opic() swaped operands */
3057 vtop->type = type1;
3059 } else {
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype.t)
3062 && op != '+' && op != '-' && op != '*' && op != '/'
3063 && op_class != CMP_OP) {
3064 goto op_err;
3066 std_op:
3067 t = t2 = combtype.t;
3068 /* special case for shifts and long long: we keep the shift as
3069 an integer */
3070 if (op_class == SHIFT_OP)
3071 t2 = VT_INT;
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t & VT_UNSIGNED) {
3075 if (op == TOK_SAR)
3076 op = TOK_SHR;
3077 else if (op == '/')
3078 op = TOK_UDIV;
3079 else if (op == '%')
3080 op = TOK_UMOD;
3081 else if (op == TOK_LT)
3082 op = TOK_ULT;
3083 else if (op == TOK_GT)
3084 op = TOK_UGT;
3085 else if (op == TOK_LE)
3086 op = TOK_ULE;
3087 else if (op == TOK_GE)
3088 op = TOK_UGE;
3090 vswap();
3091 gen_cast_s(t);
3092 vswap();
3093 gen_cast_s(t2);
3094 if (is_float(t))
3095 gen_opif(op);
3096 else
3097 gen_opic(op);
3098 if (op_class == CMP_OP) {
3099 /* relational op: the result is an int */
3100 vtop->type.t = VT_INT;
3101 } else {
3102 vtop->type.t = t;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop->r & VT_LVAL)
3107 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3112 #else
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t)
3116 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3117 (VT_LLONG | VT_UNSIGNED)) {
3119 if (t == VT_FLOAT)
3120 vpush_helper_func(TOK___floatundisf);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t == VT_LDOUBLE)
3123 vpush_helper_func(TOK___floatundixf);
3124 #endif
3125 else
3126 vpush_helper_func(TOK___floatundidf);
3127 vrott(2);
3128 gfunc_call(1);
3129 vpushi(0);
3130 PUT_R_RET(vtop, t);
3131 } else {
3132 gen_cvt_itof(t);
3135 #endif
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3139 #else
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t)
3143 int st;
3144 if (t == (VT_LLONG | VT_UNSIGNED)) {
3145 /* not handled natively */
3146 st = vtop->type.t & VT_BTYPE;
3147 if (st == VT_FLOAT)
3148 vpush_helper_func(TOK___fixunssfdi);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st == VT_LDOUBLE)
3151 vpush_helper_func(TOK___fixunsxfdi);
3152 #endif
3153 else
3154 vpush_helper_func(TOK___fixunsdfdi);
3155 vrott(2);
3156 gfunc_call(1);
3157 vpushi(0);
3158 PUT_R_RET(vtop, t);
3159 } else {
3160 gen_cvt_ftoi(t);
3163 #endif
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3169 int dbt = vtop->type.t;
3170 vtop->r &= ~VT_MUSTCAST;
3171 vtop->type.t = sbt;
3172 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3173 vtop->type.t = dbt;
3176 static void gen_cast_s(int t)
3178 CType type;
3179 type.t = t;
3180 type.ref = NULL;
3181 gen_cast(&type);
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType *type)
3187 int sbt, dbt, sf, df, c;
3188 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3190 /* special delayed cast for char/short */
3191 if (vtop->r & VT_MUSTCAST)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop->type.t & VT_BITFIELD)
3196 gv(RC_INT);
3198 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3199 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3200 if (sbt == VT_FUNC)
3201 sbt = VT_PTR;
3203 again:
3204 if (sbt != dbt) {
3205 sf = is_float(sbt);
3206 df = is_float(dbt);
3207 dbt_bt = dbt & VT_BTYPE;
3208 sbt_bt = sbt & VT_BTYPE;
3209 if (dbt_bt == VT_VOID)
3210 goto done;
3211 if (sbt_bt == VT_VOID) {
3212 error:
3213 cast_error(&vtop->type, type);
3216 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3217 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3218 /* don't try to convert to ldouble when cross-compiling
3219 (except when it's '0' which is needed for arm:gen_negf()) */
3220 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3221 c = 0;
3222 #endif
3223 if (c) {
3224 /* constant case: we can do it now */
3225 /* XXX: in ISOC, cannot do it if error in convert */
3226 if (sbt == VT_FLOAT)
3227 vtop->c.ld = vtop->c.f;
3228 else if (sbt == VT_DOUBLE)
3229 vtop->c.ld = vtop->c.d;
3231 if (df) {
3232 if (sbt_bt == VT_LLONG) {
3233 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3234 vtop->c.ld = vtop->c.i;
3235 else
3236 vtop->c.ld = -(long double)-vtop->c.i;
3237 } else if(!sf) {
3238 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3239 vtop->c.ld = (uint32_t)vtop->c.i;
3240 else
3241 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3244 if (dbt == VT_FLOAT)
3245 vtop->c.f = (float)vtop->c.ld;
3246 else if (dbt == VT_DOUBLE)
3247 vtop->c.d = (double)vtop->c.ld;
3248 } else if (sf && dbt == VT_BOOL) {
3249 vtop->c.i = (vtop->c.ld != 0);
3250 } else {
3251 if(sf)
3252 vtop->c.i = vtop->c.ld;
3253 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3255 else if (sbt & VT_UNSIGNED)
3256 vtop->c.i = (uint32_t)vtop->c.i;
3257 else
3258 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3260 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3262 else if (dbt == VT_BOOL)
3263 vtop->c.i = (vtop->c.i != 0);
3264 else {
3265 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3266 dbt_bt == VT_SHORT ? 0xffff :
3267 0xffffffff;
3268 vtop->c.i &= m;
3269 if (!(dbt & VT_UNSIGNED))
3270 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3273 goto done;
3275 } else if (dbt == VT_BOOL
3276 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3277 == (VT_CONST | VT_SYM)) {
3278 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3279 vtop->r = VT_CONST;
3280 vtop->c.i = 1;
3281 goto done;
3284 /* cannot generate code for global or static initializers */
3285 if (nocode_wanted & DATA_ONLY_WANTED)
3286 goto done;
3288 /* non constant case: generate code */
3289 if (dbt == VT_BOOL) {
3290 gen_test_zero(TOK_NE);
3291 goto done;
3294 if (sf || df) {
3295 if (sf && df) {
3296 /* convert from fp to fp */
3297 gen_cvt_ftof(dbt);
3298 } else if (df) {
3299 /* convert int to fp */
3300 gen_cvt_itof1(dbt);
3301 } else {
3302 /* convert fp to int */
3303 sbt = dbt;
3304 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3305 sbt = VT_INT;
3306 gen_cvt_ftoi1(sbt);
3307 goto again; /* may need char/short cast */
3309 goto done;
3312 ds = btype_size(dbt_bt);
3313 ss = btype_size(sbt_bt);
3314 if (ds == 0 || ss == 0)
3315 goto error;
3317 if (IS_ENUM(type->t) && type->ref->c < 0)
3318 tcc_error("cast to incomplete type");
3320 /* same size and no sign conversion needed */
3321 if (ds == ss && ds >= 4)
3322 goto done;
3323 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3324 tcc_warning("cast between pointer and integer of different size");
3325 if (sbt_bt == VT_PTR) {
3326 /* put integer type to allow logical operations below */
3327 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3331 /* processor allows { int a = 0, b = *(char*)&a; }
3332 That means that if we cast to less width, we can just
3333 change the type and read it still later. */
3334 #define ALLOW_SUBTYPE_ACCESS 1
3336 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3337 /* value still in memory */
3338 if (ds <= ss)
3339 goto done;
3340 /* ss <= 4 here */
3341 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3342 gv(RC_INT);
3343 goto done; /* no 64bit envolved */
3346 gv(RC_INT);
3348 trunc = 0;
3349 #if PTR_SIZE == 4
3350 if (ds == 8) {
3351 /* generate high word */
3352 if (sbt & VT_UNSIGNED) {
3353 vpushi(0);
3354 gv(RC_INT);
3355 } else {
3356 gv_dup();
3357 vpushi(31);
3358 gen_op(TOK_SAR);
3360 lbuild(dbt);
3361 } else if (ss == 8) {
3362 /* from long long: just take low order word */
3363 lexpand();
3364 vpop();
3366 ss = 4;
3368 #elif PTR_SIZE == 8
3369 if (ds == 8) {
3370 /* need to convert from 32bit to 64bit */
3371 if (sbt & VT_UNSIGNED) {
3372 #if defined(TCC_TARGET_RISCV64)
3373 /* RISC-V keeps 32bit vals in registers sign-extended.
3374 So here we need a zero-extension. */
3375 trunc = 32;
3376 #else
3377 goto done;
3378 #endif
3379 } else {
3380 gen_cvt_sxtw();
3381 goto done;
3383 ss = ds, ds = 4, dbt = sbt;
3384 } else if (ss == 8) {
3385 /* RISC-V keeps 32bit vals in registers sign-extended.
3386 So here we need a sign-extension for signed types and
3387 zero-extension. for unsigned types. */
3388 #if !defined(TCC_TARGET_RISCV64)
3389 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3390 #endif
3391 } else {
3392 ss = 4;
3394 #endif
3396 if (ds >= ss)
3397 goto done;
3398 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3399 if (ss == 4) {
3400 gen_cvt_csti(dbt);
3401 goto done;
3403 #endif
3404 bits = (ss - ds) * 8;
3405 /* for unsigned, gen_op will convert SAR to SHR */
3406 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3407 vpushi(bits);
3408 gen_op(TOK_SHL);
3409 vpushi(bits - trunc);
3410 gen_op(TOK_SAR);
3411 vpushi(trunc);
3412 gen_op(TOK_SHR);
3414 done:
3415 vtop->type = *type;
3416 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3419 /* return type size as known at compile time. Put alignment at 'a' */
3420 ST_FUNC int type_size(CType *type, int *a)
3422 Sym *s;
3423 int bt;
3425 bt = type->t & VT_BTYPE;
3426 if (bt == VT_STRUCT) {
3427 /* struct/union */
3428 s = type->ref;
3429 *a = s->r;
3430 return s->c;
3431 } else if (bt == VT_PTR) {
3432 if (type->t & VT_ARRAY) {
3433 int ts;
3434 s = type->ref;
3435 ts = type_size(&s->type, a);
3436 if (ts < 0 && s->c < 0)
3437 ts = -ts;
3438 return ts * s->c;
3439 } else {
3440 *a = PTR_SIZE;
3441 return PTR_SIZE;
3443 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3444 *a = 0;
3445 return -1; /* incomplete enum */
3446 } else if (bt == VT_LDOUBLE) {
3447 *a = LDOUBLE_ALIGN;
3448 return LDOUBLE_SIZE;
3449 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3450 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3451 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3452 *a = 4;
3453 #else
3454 *a = 8;
3455 #endif
3456 return 8;
3457 } else if (bt == VT_INT || bt == VT_FLOAT) {
3458 *a = 4;
3459 return 4;
3460 } else if (bt == VT_SHORT) {
3461 *a = 2;
3462 return 2;
3463 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3464 *a = 8;
3465 return 16;
3466 } else {
3467 /* char, void, function, _Bool */
3468 *a = 1;
3469 return 1;
3473 /* push type size as known at runtime time on top of value stack. Put
3474 alignment at 'a' */
3475 static void vpush_type_size(CType *type, int *a)
3477 if (type->t & VT_VLA) {
3478 type_size(&type->ref->type, a);
3479 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3480 } else {
3481 int size = type_size(type, a);
3482 if (size < 0)
3483 tcc_error("unknown type size");
3484 vpushs(size);
3488 /* return the pointed type of t */
3489 static inline CType *pointed_type(CType *type)
3491 return &type->ref->type;
3494 /* modify type so that its it is a pointer to type. */
3495 ST_FUNC void mk_pointer(CType *type)
3497 Sym *s;
3498 s = sym_push(SYM_FIELD, type, 0, -1);
3499 type->t = VT_PTR | (type->t & VT_STORAGE);
3500 type->ref = s;
3503 /* return true if type1 and type2 are exactly the same (including
3504 qualifiers).
3506 static int is_compatible_types(CType *type1, CType *type2)
3508 return compare_types(type1,type2,0);
3511 /* return true if type1 and type2 are the same (ignoring qualifiers).
3513 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3515 return compare_types(type1,type2,1);
3518 static void cast_error(CType *st, CType *dt)
3520 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3523 /* verify type compatibility to store vtop in 'dt' type */
3524 static void verify_assign_cast(CType *dt)
3526 CType *st, *type1, *type2;
3527 int dbt, sbt, qualwarn, lvl;
3529 st = &vtop->type; /* source type */
3530 dbt = dt->t & VT_BTYPE;
3531 sbt = st->t & VT_BTYPE;
3532 if (dt->t & VT_CONSTANT)
3533 tcc_warning("assignment of read-only location");
3534 switch(dbt) {
3535 case VT_VOID:
3536 if (sbt != dbt)
3537 tcc_error("assignment to void expression");
3538 break;
3539 case VT_PTR:
3540 /* special cases for pointers */
3541 /* '0' can also be a pointer */
3542 if (is_null_pointer(vtop))
3543 break;
3544 /* accept implicit pointer to integer cast with warning */
3545 if (is_integer_btype(sbt)) {
3546 tcc_warning("assignment makes pointer from integer without a cast");
3547 break;
3549 type1 = pointed_type(dt);
3550 if (sbt == VT_PTR)
3551 type2 = pointed_type(st);
3552 else if (sbt == VT_FUNC)
3553 type2 = st; /* a function is implicitly a function pointer */
3554 else
3555 goto error;
3556 if (is_compatible_types(type1, type2))
3557 break;
3558 for (qualwarn = lvl = 0;; ++lvl) {
3559 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3560 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3561 qualwarn = 1;
3562 dbt = type1->t & (VT_BTYPE|VT_LONG);
3563 sbt = type2->t & (VT_BTYPE|VT_LONG);
3564 if (dbt != VT_PTR || sbt != VT_PTR)
3565 break;
3566 type1 = pointed_type(type1);
3567 type2 = pointed_type(type2);
3569 if (!is_compatible_unqualified_types(type1, type2)) {
3570 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3571 /* void * can match anything */
3572 } else if (dbt == sbt
3573 && is_integer_btype(sbt & VT_BTYPE)
3574 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3575 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3576 /* Like GCC don't warn by default for merely changes
3577 in pointer target signedness. Do warn for different
3578 base types, though, in particular for unsigned enums
3579 and signed int targets. */
3580 } else {
3581 tcc_warning("assignment from incompatible pointer type");
3582 break;
3585 if (qualwarn)
3586 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3587 break;
3588 case VT_BYTE:
3589 case VT_SHORT:
3590 case VT_INT:
3591 case VT_LLONG:
3592 if (sbt == VT_PTR || sbt == VT_FUNC) {
3593 tcc_warning("assignment makes integer from pointer without a cast");
3594 } else if (sbt == VT_STRUCT) {
3595 goto case_VT_STRUCT;
3597 /* XXX: more tests */
3598 break;
3599 case VT_STRUCT:
3600 case_VT_STRUCT:
3601 if (!is_compatible_unqualified_types(dt, st)) {
3602 error:
3603 cast_error(st, dt);
3605 break;
3609 static void gen_assign_cast(CType *dt)
3611 verify_assign_cast(dt);
3612 gen_cast(dt);
3615 /* store vtop in lvalue pushed on stack */
3616 ST_FUNC void vstore(void)
3618 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3620 ft = vtop[-1].type.t;
3621 sbt = vtop->type.t & VT_BTYPE;
3622 dbt = ft & VT_BTYPE;
3623 verify_assign_cast(&vtop[-1].type);
3625 if (sbt == VT_STRUCT) {
3626 /* if structure, only generate pointer */
3627 /* structure assignment : generate memcpy */
3628 size = type_size(&vtop->type, &align);
3629 /* destination, keep on stack() as result */
3630 vpushv(vtop - 1);
3631 #ifdef CONFIG_TCC_BCHECK
3632 if (vtop->r & VT_MUSTBOUND)
3633 gbound(); /* check would be wrong after gaddrof() */
3634 #endif
3635 vtop->type.t = VT_PTR;
3636 gaddrof();
3637 /* source */
3638 vswap();
3639 #ifdef CONFIG_TCC_BCHECK
3640 if (vtop->r & VT_MUSTBOUND)
3641 gbound();
3642 #endif
3643 vtop->type.t = VT_PTR;
3644 gaddrof();
3646 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3647 if (1
3648 #ifdef CONFIG_TCC_BCHECK
3649 && !tcc_state->do_bounds_check
3650 #endif
3652 gen_struct_copy(size);
3653 } else
3654 #endif
3656 /* type size */
3657 vpushi(size);
3658 /* Use memmove, rather than memcpy, as dest and src may be same: */
3659 #ifdef TCC_ARM_EABI
3660 if(!(align & 7))
3661 vpush_helper_func(TOK_memmove8);
3662 else if(!(align & 3))
3663 vpush_helper_func(TOK_memmove4);
3664 else
3665 #endif
3666 vpush_helper_func(TOK_memmove);
3667 vrott(4);
3668 gfunc_call(3);
3671 } else if (ft & VT_BITFIELD) {
3672 /* bitfield store handling */
3674 /* save lvalue as expression result (example: s.b = s.a = n;) */
3675 vdup(), vtop[-1] = vtop[-2];
3677 bit_pos = BIT_POS(ft);
3678 bit_size = BIT_SIZE(ft);
3679 /* remove bit field info to avoid loops */
3680 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3682 if (dbt == VT_BOOL) {
3683 gen_cast(&vtop[-1].type);
3684 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3686 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3687 if (dbt != VT_BOOL) {
3688 gen_cast(&vtop[-1].type);
3689 dbt = vtop[-1].type.t & VT_BTYPE;
3691 if (r == VT_STRUCT) {
3692 store_packed_bf(bit_pos, bit_size);
3693 } else {
3694 unsigned long long mask = (1ULL << bit_size) - 1;
3695 if (dbt != VT_BOOL) {
3696 /* mask source */
3697 if (dbt == VT_LLONG)
3698 vpushll(mask);
3699 else
3700 vpushi((unsigned)mask);
3701 gen_op('&');
3703 /* shift source */
3704 vpushi(bit_pos);
3705 gen_op(TOK_SHL);
3706 vswap();
3707 /* duplicate destination */
3708 vdup();
3709 vrott(3);
3710 /* load destination, mask and or with source */
3711 if (dbt == VT_LLONG)
3712 vpushll(~(mask << bit_pos));
3713 else
3714 vpushi(~((unsigned)mask << bit_pos));
3715 gen_op('&');
3716 gen_op('|');
3717 /* store result */
3718 vstore();
3719 /* ... and discard */
3720 vpop();
3722 } else if (dbt == VT_VOID) {
3723 --vtop;
3724 } else {
3725 /* optimize char/short casts */
3726 delayed_cast = 0;
3727 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3728 && is_integer_btype(sbt)
3730 if ((vtop->r & VT_MUSTCAST)
3731 && btype_size(dbt) > btype_size(sbt)
3733 force_charshort_cast();
3734 delayed_cast = 1;
3735 } else {
3736 gen_cast(&vtop[-1].type);
3739 #ifdef CONFIG_TCC_BCHECK
3740 /* bound check case */
3741 if (vtop[-1].r & VT_MUSTBOUND) {
3742 vswap();
3743 gbound();
3744 vswap();
3746 #endif
3747 gv(RC_TYPE(dbt)); /* generate value */
3749 if (delayed_cast) {
3750 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3751 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3752 vtop->type.t = ft & VT_TYPE;
3755 /* if lvalue was saved on stack, must read it */
3756 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3757 SValue sv;
3758 r = get_reg(RC_INT);
3759 sv.type.t = VT_PTRDIFF_T;
3760 sv.r = VT_LOCAL | VT_LVAL;
3761 sv.c.i = vtop[-1].c.i;
3762 load(r, &sv);
3763 vtop[-1].r = r | VT_LVAL;
3766 r = vtop->r & VT_VALMASK;
3767 /* two word case handling :
3768 store second register at word + 4 (or +8 for x86-64) */
3769 if (USING_TWO_WORDS(dbt)) {
3770 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3771 vtop[-1].type.t = load_type;
3772 store(r, vtop - 1);
3773 vswap();
3774 incr_offset(PTR_SIZE);
3775 vswap();
3776 /* XXX: it works because r2 is spilled last ! */
3777 store(vtop->r2, vtop - 1);
3778 } else {
3779 /* single word */
3780 store(r, vtop - 1);
3782 vswap();
3783 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3787 /* post defines POST/PRE add. c is the token ++ or -- */
3788 ST_FUNC void inc(int post, int c)
3790 test_lvalue();
3791 vdup(); /* save lvalue */
3792 if (post) {
3793 gv_dup(); /* duplicate value */
3794 vrotb(3);
3795 vrotb(3);
3797 /* add constant */
3798 vpushi(c - TOK_MID);
3799 gen_op('+');
3800 vstore(); /* store value */
3801 if (post)
3802 vpop(); /* if post op, return saved value */
3805 ST_FUNC CString* parse_mult_str (const char *msg)
3807 /* read the string */
3808 if (tok != TOK_STR)
3809 expect(msg);
3810 cstr_reset(&initstr);
3811 while (tok == TOK_STR) {
3812 /* XXX: add \0 handling too ? */
3813 cstr_cat(&initstr, tokc.str.data, -1);
3814 next();
3816 cstr_ccat(&initstr, '\0');
3817 return &initstr;
3820 /* If I is >= 1 and a power of two, returns log2(i)+1.
3821 If I is 0 returns 0. */
3822 ST_FUNC int exact_log2p1(int i)
3824 int ret;
3825 if (!i)
3826 return 0;
3827 for (ret = 1; i >= 1 << 8; ret += 8)
3828 i >>= 8;
3829 if (i >= 1 << 4)
3830 ret += 4, i >>= 4;
3831 if (i >= 1 << 2)
3832 ret += 2, i >>= 2;
3833 if (i >= 1 << 1)
3834 ret++;
3835 return ret;
3838 /* Parse __attribute__((...)) GNUC extension. */
3839 static void parse_attribute(AttributeDef *ad)
3841 int t, n;
3842 char *astr;
3844 redo:
3845 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3846 return;
3847 next();
3848 skip('(');
3849 skip('(');
3850 while (tok != ')') {
3851 if (tok < TOK_IDENT)
3852 expect("attribute name");
3853 t = tok;
3854 next();
3855 switch(t) {
3856 case TOK_CLEANUP1:
3857 case TOK_CLEANUP2:
3859 Sym *s;
3861 skip('(');
3862 s = sym_find(tok);
3863 if (!s) {
3864 tcc_warning_c(warn_implicit_function_declaration)(
3865 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3866 s = external_global_sym(tok, &func_old_type);
3867 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3868 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3869 ad->cleanup_func = s;
3870 next();
3871 skip(')');
3872 break;
3874 case TOK_CONSTRUCTOR1:
3875 case TOK_CONSTRUCTOR2:
3876 ad->f.func_ctor = 1;
3877 break;
3878 case TOK_DESTRUCTOR1:
3879 case TOK_DESTRUCTOR2:
3880 ad->f.func_dtor = 1;
3881 break;
3882 case TOK_ALWAYS_INLINE1:
3883 case TOK_ALWAYS_INLINE2:
3884 ad->f.func_alwinl = 1;
3885 break;
3886 case TOK_SECTION1:
3887 case TOK_SECTION2:
3888 skip('(');
3889 astr = parse_mult_str("section name")->data;
3890 ad->section = find_section(tcc_state, astr);
3891 skip(')');
3892 break;
3893 case TOK_ALIAS1:
3894 case TOK_ALIAS2:
3895 skip('(');
3896 astr = parse_mult_str("alias(\"target\")")->data;
3897 /* save string as token, for later */
3898 ad->alias_target = tok_alloc_const(astr);
3899 skip(')');
3900 break;
3901 case TOK_VISIBILITY1:
3902 case TOK_VISIBILITY2:
3903 skip('(');
3904 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3905 if (!strcmp (astr, "default"))
3906 ad->a.visibility = STV_DEFAULT;
3907 else if (!strcmp (astr, "hidden"))
3908 ad->a.visibility = STV_HIDDEN;
3909 else if (!strcmp (astr, "internal"))
3910 ad->a.visibility = STV_INTERNAL;
3911 else if (!strcmp (astr, "protected"))
3912 ad->a.visibility = STV_PROTECTED;
3913 else
3914 expect("visibility(\"default|hidden|internal|protected\")");
3915 skip(')');
3916 break;
3917 case TOK_ALIGNED1:
3918 case TOK_ALIGNED2:
3919 if (tok == '(') {
3920 next();
3921 n = expr_const();
3922 if (n <= 0 || (n & (n - 1)) != 0)
3923 tcc_error("alignment must be a positive power of two");
3924 skip(')');
3925 } else {
3926 n = MAX_ALIGN;
3928 ad->a.aligned = exact_log2p1(n);
3929 if (n != 1 << (ad->a.aligned - 1))
3930 tcc_error("alignment of %d is larger than implemented", n);
3931 break;
3932 case TOK_PACKED1:
3933 case TOK_PACKED2:
3934 ad->a.packed = 1;
3935 break;
3936 case TOK_WEAK1:
3937 case TOK_WEAK2:
3938 ad->a.weak = 1;
3939 break;
3940 case TOK_NODEBUG1:
3941 case TOK_NODEBUG2:
3942 ad->a.nodebug = 1;
3943 break;
3944 case TOK_UNUSED1:
3945 case TOK_UNUSED2:
3946 /* currently, no need to handle it because tcc does not
3947 track unused objects */
3948 break;
3949 case TOK_NORETURN1:
3950 case TOK_NORETURN2:
3951 ad->f.func_noreturn = 1;
3952 break;
3953 case TOK_CDECL1:
3954 case TOK_CDECL2:
3955 case TOK_CDECL3:
3956 ad->f.func_call = FUNC_CDECL;
3957 break;
3958 case TOK_STDCALL1:
3959 case TOK_STDCALL2:
3960 case TOK_STDCALL3:
3961 ad->f.func_call = FUNC_STDCALL;
3962 break;
3963 #ifdef TCC_TARGET_I386
3964 case TOK_REGPARM1:
3965 case TOK_REGPARM2:
3966 skip('(');
3967 n = expr_const();
3968 if (n > 3)
3969 n = 3;
3970 else if (n < 0)
3971 n = 0;
3972 if (n > 0)
3973 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3974 skip(')');
3975 break;
3976 case TOK_FASTCALL1:
3977 case TOK_FASTCALL2:
3978 case TOK_FASTCALL3:
3979 ad->f.func_call = FUNC_FASTCALLW;
3980 break;
3981 case TOK_THISCALL1:
3982 case TOK_THISCALL2:
3983 case TOK_THISCALL3:
3984 ad->f.func_call = FUNC_THISCALL;
3985 break;
3986 #endif
3987 case TOK_MODE:
3988 skip('(');
3989 switch(tok) {
3990 case TOK_MODE_DI:
3991 ad->attr_mode = VT_LLONG + 1;
3992 break;
3993 case TOK_MODE_QI:
3994 ad->attr_mode = VT_BYTE + 1;
3995 break;
3996 case TOK_MODE_HI:
3997 ad->attr_mode = VT_SHORT + 1;
3998 break;
3999 case TOK_MODE_SI:
4000 case TOK_MODE_word:
4001 ad->attr_mode = VT_INT + 1;
4002 break;
4003 default:
4004 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4005 break;
4007 next();
4008 skip(')');
4009 break;
4010 case TOK_DLLEXPORT:
4011 ad->a.dllexport = 1;
4012 break;
4013 case TOK_NODECORATE:
4014 ad->a.nodecorate = 1;
4015 break;
4016 case TOK_DLLIMPORT:
4017 ad->a.dllimport = 1;
4018 break;
4019 default:
4020 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4021 /* skip parameters */
4022 if (tok == '(') {
4023 int parenthesis = 0;
4024 do {
4025 if (tok == '(')
4026 parenthesis++;
4027 else if (tok == ')')
4028 parenthesis--;
4029 next();
4030 } while (parenthesis && tok != -1);
4032 break;
4034 if (tok != ',')
4035 break;
4036 next();
4038 skip(')');
4039 skip(')');
4040 goto redo;
4043 static Sym * find_field (CType *type, int v, int *cumofs)
4045 Sym *s = type->ref;
4046 int v1 = v | SYM_FIELD;
4047 if (!(v & SYM_FIELD)) { /* top-level call */
4048 if ((type->t & VT_BTYPE) != VT_STRUCT)
4049 expect("struct or union");
4050 if (v < TOK_UIDENT)
4051 expect("field name");
4052 if (s->c < 0)
4053 tcc_error("dereferencing incomplete type '%s'",
4054 get_tok_str(s->v & ~SYM_STRUCT, 0));
4056 while ((s = s->next) != NULL) {
4057 if (s->v == v1) {
4058 *cumofs = s->c;
4059 return s;
4061 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4062 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4063 /* try to find field in anonymous sub-struct/union */
4064 Sym *ret = find_field (&s->type, v1, cumofs);
4065 if (ret) {
4066 *cumofs += s->c;
4067 return ret;
4071 if (!(v & SYM_FIELD))
4072 tcc_error("field not found: %s", get_tok_str(v, NULL));
4073 return s;
4076 static void check_fields (CType *type, int check)
4078 Sym *s = type->ref;
4080 while ((s = s->next) != NULL) {
4081 int v = s->v & ~SYM_FIELD;
4082 if (v < SYM_FIRST_ANOM) {
4083 TokenSym *ts = table_ident[v - TOK_IDENT];
4084 if (check && (ts->tok & SYM_FIELD))
4085 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4086 ts->tok ^= SYM_FIELD;
4087 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4088 check_fields (&s->type, check);
4092 static void struct_layout(CType *type, AttributeDef *ad)
4094 int size, align, maxalign, offset, c, bit_pos, bit_size;
4095 int packed, a, bt, prevbt, prev_bit_size;
4096 int pcc = !tcc_state->ms_bitfields;
4097 int pragma_pack = *tcc_state->pack_stack_ptr;
4098 Sym *f;
4100 maxalign = 1;
4101 offset = 0;
4102 c = 0;
4103 bit_pos = 0;
4104 prevbt = VT_STRUCT; /* make it never match */
4105 prev_bit_size = 0;
4107 //#define BF_DEBUG
4109 for (f = type->ref->next; f; f = f->next) {
4110 if (f->type.t & VT_BITFIELD)
4111 bit_size = BIT_SIZE(f->type.t);
4112 else
4113 bit_size = -1;
4114 size = type_size(&f->type, &align);
4115 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4116 packed = 0;
4118 if (pcc && bit_size == 0) {
4119 /* in pcc mode, packing does not affect zero-width bitfields */
4121 } else {
4122 /* in pcc mode, attribute packed overrides if set. */
4123 if (pcc && (f->a.packed || ad->a.packed))
4124 align = packed = 1;
4126 /* pragma pack overrides align if lesser and packs bitfields always */
4127 if (pragma_pack) {
4128 packed = 1;
4129 if (pragma_pack < align)
4130 align = pragma_pack;
4131 /* in pcc mode pragma pack also overrides individual align */
4132 if (pcc && pragma_pack < a)
4133 a = 0;
4136 /* some individual align was specified */
4137 if (a)
4138 align = a;
4140 if (type->ref->type.t == VT_UNION) {
4141 if (pcc && bit_size >= 0)
4142 size = (bit_size + 7) >> 3;
4143 offset = 0;
4144 if (size > c)
4145 c = size;
4147 } else if (bit_size < 0) {
4148 if (pcc)
4149 c += (bit_pos + 7) >> 3;
4150 c = (c + align - 1) & -align;
4151 offset = c;
4152 if (size > 0)
4153 c += size;
4154 bit_pos = 0;
4155 prevbt = VT_STRUCT;
4156 prev_bit_size = 0;
4158 } else {
4159 /* A bit-field. Layout is more complicated. There are two
4160 options: PCC (GCC) compatible and MS compatible */
4161 if (pcc) {
4162 /* In PCC layout a bit-field is placed adjacent to the
4163 preceding bit-fields, except if:
4164 - it has zero-width
4165 - an individual alignment was given
4166 - it would overflow its base type container and
4167 there is no packing */
4168 if (bit_size == 0) {
4169 new_field:
4170 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4171 bit_pos = 0;
4172 } else if (f->a.aligned) {
4173 goto new_field;
4174 } else if (!packed) {
4175 int a8 = align * 8;
4176 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4177 if (ofs > size / align)
4178 goto new_field;
4181 /* in pcc mode, long long bitfields have type int if they fit */
4182 if (size == 8 && bit_size <= 32)
4183 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4185 while (bit_pos >= align * 8)
4186 c += align, bit_pos -= align * 8;
4187 offset = c;
4189 /* In PCC layout named bit-fields influence the alignment
4190 of the containing struct using the base types alignment,
4191 except for packed fields (which here have correct align). */
4192 if (f->v & SYM_FIRST_ANOM
4193 // && bit_size // ??? gcc on ARM/rpi does that
4195 align = 1;
4197 } else {
4198 bt = f->type.t & VT_BTYPE;
4199 if ((bit_pos + bit_size > size * 8)
4200 || (bit_size > 0) == (bt != prevbt)
4202 c = (c + align - 1) & -align;
4203 offset = c;
4204 bit_pos = 0;
4205 /* In MS bitfield mode a bit-field run always uses
4206 at least as many bits as the underlying type.
4207 To start a new run it's also required that this
4208 or the last bit-field had non-zero width. */
4209 if (bit_size || prev_bit_size)
4210 c += size;
4212 /* In MS layout the records alignment is normally
4213 influenced by the field, except for a zero-width
4214 field at the start of a run (but by further zero-width
4215 fields it is again). */
4216 if (bit_size == 0 && prevbt != bt)
4217 align = 1;
4218 prevbt = bt;
4219 prev_bit_size = bit_size;
4222 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4223 | (bit_pos << VT_STRUCT_SHIFT);
4224 bit_pos += bit_size;
4226 if (align > maxalign)
4227 maxalign = align;
4229 #ifdef BF_DEBUG
4230 printf("set field %s offset %-2d size %-2d align %-2d",
4231 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4232 if (f->type.t & VT_BITFIELD) {
4233 printf(" pos %-2d bits %-2d",
4234 BIT_POS(f->type.t),
4235 BIT_SIZE(f->type.t)
4238 printf("\n");
4239 #endif
4241 f->c = offset;
4242 f->r = 0;
4245 if (pcc)
4246 c += (bit_pos + 7) >> 3;
4248 /* store size and alignment */
4249 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4250 if (a < maxalign)
4251 a = maxalign;
4252 type->ref->r = a;
4253 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4254 /* can happen if individual align for some member was given. In
4255 this case MSVC ignores maxalign when aligning the size */
4256 a = pragma_pack;
4257 if (a < bt)
4258 a = bt;
4260 c = (c + a - 1) & -a;
4261 type->ref->c = c;
4263 #ifdef BF_DEBUG
4264 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4265 #endif
4267 /* check whether we can access bitfields by their type */
4268 for (f = type->ref->next; f; f = f->next) {
4269 int s, px, cx, c0;
4270 CType t;
4272 if (0 == (f->type.t & VT_BITFIELD))
4273 continue;
4274 f->type.ref = f;
4275 f->auxtype = -1;
4276 bit_size = BIT_SIZE(f->type.t);
4277 if (bit_size == 0)
4278 continue;
4279 bit_pos = BIT_POS(f->type.t);
4280 size = type_size(&f->type, &align);
4282 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4283 #ifdef TCC_TARGET_ARM
4284 && !(f->c & (align - 1))
4285 #endif
4287 continue;
4289 /* try to access the field using a different type */
4290 c0 = -1, s = align = 1;
4291 t.t = VT_BYTE;
4292 for (;;) {
4293 px = f->c * 8 + bit_pos;
4294 cx = (px >> 3) & -align;
4295 px = px - (cx << 3);
4296 if (c0 == cx)
4297 break;
4298 s = (px + bit_size + 7) >> 3;
4299 if (s > 4) {
4300 t.t = VT_LLONG;
4301 } else if (s > 2) {
4302 t.t = VT_INT;
4303 } else if (s > 1) {
4304 t.t = VT_SHORT;
4305 } else {
4306 t.t = VT_BYTE;
4308 s = type_size(&t, &align);
4309 c0 = cx;
4312 if (px + bit_size <= s * 8 && cx + s <= c
4313 #ifdef TCC_TARGET_ARM
4314 && !(cx & (align - 1))
4315 #endif
4317 /* update offset and bit position */
4318 f->c = cx;
4319 bit_pos = px;
4320 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4321 | (bit_pos << VT_STRUCT_SHIFT);
4322 if (s != size)
4323 f->auxtype = t.t;
4324 #ifdef BF_DEBUG
4325 printf("FIX field %s offset %-2d size %-2d align %-2d "
4326 "pos %-2d bits %-2d\n",
4327 get_tok_str(f->v & ~SYM_FIELD, NULL),
4328 cx, s, align, px, bit_size);
4329 #endif
4330 } else {
4331 /* fall back to load/store single-byte wise */
4332 f->auxtype = VT_STRUCT;
4333 #ifdef BF_DEBUG
4334 printf("FIX field %s : load byte-wise\n",
4335 get_tok_str(f->v & ~SYM_FIELD, NULL));
4336 #endif
4341 static void do_Static_assert(void);
4343 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4344 static void struct_decl(CType *type, int u)
4346 int v, c, size, align, flexible;
4347 int bit_size, bsize, bt;
4348 Sym *s, *ss, **ps;
4349 AttributeDef ad, ad1;
4350 CType type1, btype;
4352 memset(&ad, 0, sizeof ad);
4353 next();
4354 parse_attribute(&ad);
4355 if (tok != '{') {
4356 v = tok;
4357 next();
4358 /* struct already defined ? return it */
4359 if (v < TOK_IDENT)
4360 expect("struct/union/enum name");
4361 s = struct_find(v);
4362 if (s && (s->sym_scope == local_scope || tok != '{')) {
4363 if (u == s->type.t)
4364 goto do_decl;
4365 if (u == VT_ENUM && IS_ENUM(s->type.t))
4366 goto do_decl;
4367 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4369 } else {
4370 v = anon_sym++;
4372 /* Record the original enum/struct/union token. */
4373 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4374 type1.ref = NULL;
4375 /* we put an undefined size for struct/union */
4376 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4377 s->r = 0; /* default alignment is zero as gcc */
4378 do_decl:
4379 type->t = s->type.t;
4380 type->ref = s;
4382 if (tok == '{') {
4383 next();
4384 if (s->c != -1)
4385 tcc_error("struct/union/enum already defined");
4386 s->c = -2;
4387 /* cannot be empty */
4388 /* non empty enums are not allowed */
4389 ps = &s->next;
4390 if (u == VT_ENUM) {
4391 long long ll = 0, pl = 0, nl = 0;
4392 CType t;
4393 t.ref = s;
4394 /* enum symbols have static storage */
4395 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4396 for(;;) {
4397 v = tok;
4398 if (v < TOK_UIDENT)
4399 expect("identifier");
4400 ss = sym_find(v);
4401 if (ss && !local_stack)
4402 tcc_error("redefinition of enumerator '%s'",
4403 get_tok_str(v, NULL));
4404 next();
4405 if (tok == '=') {
4406 next();
4407 ll = expr_const64();
4409 ss = sym_push(v, &t, VT_CONST, 0);
4410 ss->enum_val = ll;
4411 *ps = ss, ps = &ss->next;
4412 if (ll < nl)
4413 nl = ll;
4414 if (ll > pl)
4415 pl = ll;
4416 if (tok != ',')
4417 break;
4418 next();
4419 ll++;
4420 /* NOTE: we accept a trailing comma */
4421 if (tok == '}')
4422 break;
4424 skip('}');
4425 /* set integral type of the enum */
4426 t.t = VT_INT;
4427 if (nl >= 0) {
4428 if (pl != (unsigned)pl)
4429 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4430 t.t |= VT_UNSIGNED;
4431 } else if (pl != (int)pl || nl != (int)nl)
4432 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4433 s->type.t = type->t = t.t | VT_ENUM;
4434 s->c = 0;
4435 /* set type for enum members */
4436 for (ss = s->next; ss; ss = ss->next) {
4437 ll = ss->enum_val;
4438 if (ll == (int)ll) /* default is int if it fits */
4439 continue;
4440 if (t.t & VT_UNSIGNED) {
4441 ss->type.t |= VT_UNSIGNED;
4442 if (ll == (unsigned)ll)
4443 continue;
4445 ss->type.t = (ss->type.t & ~VT_BTYPE)
4446 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4448 } else {
4449 c = 0;
4450 flexible = 0;
4451 while (tok != '}') {
4452 if (!parse_btype(&btype, &ad1, 0)) {
4453 if (tok == TOK_STATIC_ASSERT) {
4454 do_Static_assert();
4455 continue;
4457 skip(';');
4458 continue;
4460 while (1) {
4461 if (flexible)
4462 tcc_error("flexible array member '%s' not at the end of struct",
4463 get_tok_str(v, NULL));
4464 bit_size = -1;
4465 v = 0;
4466 type1 = btype;
4467 if (tok != ':') {
4468 if (tok != ';')
4469 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4470 if (v == 0) {
4471 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4472 expect("identifier");
4473 else {
4474 int v = btype.ref->v;
4475 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4476 if (tcc_state->ms_extensions == 0)
4477 expect("identifier");
4481 if (type_size(&type1, &align) < 0) {
4482 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4483 flexible = 1;
4484 else
4485 tcc_error("field '%s' has incomplete type",
4486 get_tok_str(v, NULL));
4488 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4489 (type1.t & VT_BTYPE) == VT_VOID ||
4490 (type1.t & VT_STORAGE))
4491 tcc_error("invalid type for '%s'",
4492 get_tok_str(v, NULL));
4494 if (tok == ':') {
4495 next();
4496 bit_size = expr_const();
4497 /* XXX: handle v = 0 case for messages */
4498 if (bit_size < 0)
4499 tcc_error("negative width in bit-field '%s'",
4500 get_tok_str(v, NULL));
4501 if (v && bit_size == 0)
4502 tcc_error("zero width for bit-field '%s'",
4503 get_tok_str(v, NULL));
4504 parse_attribute(&ad1);
4506 size = type_size(&type1, &align);
4507 if (bit_size >= 0) {
4508 bt = type1.t & VT_BTYPE;
4509 if (bt != VT_INT &&
4510 bt != VT_BYTE &&
4511 bt != VT_SHORT &&
4512 bt != VT_BOOL &&
4513 bt != VT_LLONG)
4514 tcc_error("bitfields must have scalar type");
4515 bsize = size * 8;
4516 if (bit_size > bsize) {
4517 tcc_error("width of '%s' exceeds its type",
4518 get_tok_str(v, NULL));
4519 } else if (bit_size == bsize
4520 && !ad.a.packed && !ad1.a.packed) {
4521 /* no need for bit fields */
4523 } else if (bit_size == 64) {
4524 tcc_error("field width 64 not implemented");
4525 } else {
4526 type1.t = (type1.t & ~VT_STRUCT_MASK)
4527 | VT_BITFIELD
4528 | (bit_size << (VT_STRUCT_SHIFT + 6));
4531 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4532 /* Remember we've seen a real field to check
4533 for placement of flexible array member. */
4534 c = 1;
4536 /* If member is a struct or bit-field, enforce
4537 placing into the struct (as anonymous). */
4538 if (v == 0 &&
4539 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4540 bit_size >= 0)) {
4541 v = anon_sym++;
4543 if (v) {
4544 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4545 ss->a = ad1.a;
4546 *ps = ss;
4547 ps = &ss->next;
4549 if (tok == ';' || tok == TOK_EOF)
4550 break;
4551 skip(',');
4553 skip(';');
4555 skip('}');
4556 parse_attribute(&ad);
4557 if (ad.cleanup_func) {
4558 tcc_warning("attribute '__cleanup__' ignored on type");
4560 check_fields(type, 1);
4561 check_fields(type, 0);
4562 struct_layout(type, &ad);
4563 if (debug_modes)
4564 tcc_debug_fix_anon(tcc_state, type);
4569 static void sym_to_attr(AttributeDef *ad, Sym *s)
4571 merge_symattr(&ad->a, &s->a);
4572 merge_funcattr(&ad->f, &s->f);
4575 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4576 are added to the element type, copied because it could be a typedef. */
4577 static void parse_btype_qualify(CType *type, int qualifiers)
4579 while (type->t & VT_ARRAY) {
4580 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4581 type = &type->ref->type;
4583 type->t |= qualifiers;
4586 /* return 0 if no type declaration. otherwise, return the basic type
4587 and skip it.
4589 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4591 int t, u, bt, st, type_found, typespec_found, g, n;
4592 Sym *s;
4593 CType type1;
4595 memset(ad, 0, sizeof(AttributeDef));
4596 type_found = 0;
4597 typespec_found = 0;
4598 t = VT_INT;
4599 bt = st = -1;
4600 type->ref = NULL;
4602 while(1) {
4603 switch(tok) {
4604 case TOK_EXTENSION:
4605 /* currently, we really ignore extension */
4606 next();
4607 continue;
4609 /* basic types */
4610 case TOK_CHAR:
4611 u = VT_BYTE;
4612 basic_type:
4613 next();
4614 basic_type1:
4615 if (u == VT_SHORT || u == VT_LONG) {
4616 if (st != -1 || (bt != -1 && bt != VT_INT))
4617 tmbt: tcc_error("too many basic types");
4618 st = u;
4619 } else {
4620 if (bt != -1 || (st != -1 && u != VT_INT))
4621 goto tmbt;
4622 bt = u;
4624 if (u != VT_INT)
4625 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4626 typespec_found = 1;
4627 break;
4628 case TOK_VOID:
4629 u = VT_VOID;
4630 goto basic_type;
4631 case TOK_SHORT:
4632 u = VT_SHORT;
4633 goto basic_type;
4634 case TOK_INT:
4635 u = VT_INT;
4636 goto basic_type;
4637 case TOK_ALIGNAS:
4638 { int n;
4639 AttributeDef ad1;
4640 next();
4641 skip('(');
4642 memset(&ad1, 0, sizeof(AttributeDef));
4643 if (parse_btype(&type1, &ad1, 0)) {
4644 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4645 if (ad1.a.aligned)
4646 n = 1 << (ad1.a.aligned - 1);
4647 else
4648 type_size(&type1, &n);
4649 } else {
4650 n = expr_const();
4651 if (n < 0 || (n & (n - 1)) != 0)
4652 tcc_error("alignment must be a positive power of two");
4654 skip(')');
4655 ad->a.aligned = exact_log2p1(n);
4657 continue;
4658 case TOK_LONG:
4659 if ((t & VT_BTYPE) == VT_DOUBLE) {
4660 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4661 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4662 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4663 } else {
4664 u = VT_LONG;
4665 goto basic_type;
4667 next();
4668 break;
4669 #ifdef TCC_TARGET_ARM64
4670 case TOK_UINT128:
4671 /* GCC's __uint128_t appears in some Linux header files. Make it a
4672 synonym for long double to get the size and alignment right. */
4673 u = VT_LDOUBLE;
4674 goto basic_type;
4675 #endif
4676 case TOK_BOOL:
4677 u = VT_BOOL;
4678 goto basic_type;
4679 case TOK_COMPLEX:
4680 tcc_error("_Complex is not yet supported");
4681 case TOK_FLOAT:
4682 u = VT_FLOAT;
4683 goto basic_type;
4684 case TOK_DOUBLE:
4685 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4686 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4687 } else {
4688 u = VT_DOUBLE;
4689 goto basic_type;
4691 next();
4692 break;
4693 case TOK_ENUM:
4694 struct_decl(&type1, VT_ENUM);
4695 basic_type2:
4696 u = type1.t;
4697 type->ref = type1.ref;
4698 goto basic_type1;
4699 case TOK_STRUCT:
4700 struct_decl(&type1, VT_STRUCT);
4701 goto basic_type2;
4702 case TOK_UNION:
4703 struct_decl(&type1, VT_UNION);
4704 goto basic_type2;
4706 /* type modifiers */
4707 case TOK__Atomic:
4708 next();
4709 type->t = t;
4710 parse_btype_qualify(type, VT_ATOMIC);
4711 t = type->t;
4712 if (tok == '(') {
4713 parse_expr_type(&type1);
4714 /* remove all storage modifiers except typedef */
4715 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4716 if (type1.ref)
4717 sym_to_attr(ad, type1.ref);
4718 goto basic_type2;
4720 break;
4721 case TOK_CONST1:
4722 case TOK_CONST2:
4723 case TOK_CONST3:
4724 type->t = t;
4725 parse_btype_qualify(type, VT_CONSTANT);
4726 t = type->t;
4727 next();
4728 break;
4729 case TOK_VOLATILE1:
4730 case TOK_VOLATILE2:
4731 case TOK_VOLATILE3:
4732 type->t = t;
4733 parse_btype_qualify(type, VT_VOLATILE);
4734 t = type->t;
4735 next();
4736 break;
4737 case TOK_SIGNED1:
4738 case TOK_SIGNED2:
4739 case TOK_SIGNED3:
4740 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4741 tcc_error("signed and unsigned modifier");
4742 t |= VT_DEFSIGN;
4743 next();
4744 typespec_found = 1;
4745 break;
4746 case TOK_REGISTER:
4747 case TOK_AUTO:
4748 case TOK_RESTRICT1:
4749 case TOK_RESTRICT2:
4750 case TOK_RESTRICT3:
4751 next();
4752 break;
4753 case TOK_UNSIGNED:
4754 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4755 tcc_error("signed and unsigned modifier");
4756 t |= VT_DEFSIGN | VT_UNSIGNED;
4757 next();
4758 typespec_found = 1;
4759 break;
4761 /* storage */
4762 case TOK_EXTERN:
4763 g = VT_EXTERN;
4764 goto storage;
4765 case TOK_STATIC:
4766 g = VT_STATIC;
4767 goto storage;
4768 case TOK_TYPEDEF:
4769 g = VT_TYPEDEF;
4770 goto storage;
4771 storage:
4772 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4773 tcc_error("multiple storage classes");
4774 t |= g;
4775 next();
4776 break;
4777 case TOK_INLINE1:
4778 case TOK_INLINE2:
4779 case TOK_INLINE3:
4780 t |= VT_INLINE;
4781 next();
4782 break;
4783 case TOK_NORETURN3:
4784 next();
4785 ad->f.func_noreturn = 1;
4786 break;
4787 /* GNUC attribute */
4788 case TOK_ATTRIBUTE1:
4789 case TOK_ATTRIBUTE2:
4790 parse_attribute(ad);
4791 if (ad->attr_mode) {
4792 u = ad->attr_mode -1;
4793 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4795 continue;
4796 /* GNUC typeof */
4797 case TOK_TYPEOF1:
4798 case TOK_TYPEOF2:
4799 case TOK_TYPEOF3:
4800 next();
4801 parse_expr_type(&type1);
4802 /* remove all storage modifiers except typedef */
4803 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4804 if (type1.ref)
4805 sym_to_attr(ad, type1.ref);
4806 goto basic_type2;
4807 case TOK_THREAD_LOCAL:
4808 tcc_error("_Thread_local is not implemented");
4809 default:
4810 if (typespec_found)
4811 goto the_end;
4812 s = sym_find(tok);
4813 if (!s || !(s->type.t & VT_TYPEDEF))
4814 goto the_end;
4816 n = tok, next();
4817 if (tok == ':' && ignore_label) {
4818 /* ignore if it's a label */
4819 unget_tok(n);
4820 goto the_end;
4823 t &= ~(VT_BTYPE|VT_LONG);
4824 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4825 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4826 type->ref = s->type.ref;
4827 if (t)
4828 parse_btype_qualify(type, t);
4829 t = type->t;
4830 /* get attributes from typedef */
4831 sym_to_attr(ad, s);
4832 typespec_found = 1;
4833 st = bt = -2;
4834 break;
4836 type_found = 1;
4838 the_end:
4839 if (tcc_state->char_is_unsigned) {
4840 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4841 t |= VT_UNSIGNED;
4843 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4844 bt = t & (VT_BTYPE|VT_LONG);
4845 if (bt == VT_LONG)
4846 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4847 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4848 if (bt == VT_LDOUBLE)
4849 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4850 #endif
4851 type->t = t;
4852 return type_found;
4855 /* convert a function parameter type (array to pointer and function to
4856 function pointer) */
4857 static inline void convert_parameter_type(CType *pt)
4859 /* remove const and volatile qualifiers (XXX: const could be used
4860 to indicate a const function parameter */
4861 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4862 /* array must be transformed to pointer according to ANSI C */
4863 pt->t &= ~(VT_ARRAY | VT_VLA);
4864 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4865 mk_pointer(pt);
4869 ST_FUNC CString* parse_asm_str(void)
4871 skip('(');
4872 return parse_mult_str("string constant");
4875 /* Parse an asm label and return the token */
4876 static int asm_label_instr(void)
4878 int v;
4879 char *astr;
4881 next();
4882 astr = parse_asm_str()->data;
4883 skip(')');
4884 #ifdef ASM_DEBUG
4885 printf("asm_alias: \"%s\"\n", astr);
4886 #endif
4887 v = tok_alloc_const(astr);
4888 return v;
4891 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4893 int n, l, t1, arg_size, align;
4894 Sym **plast, *s, *first;
4895 AttributeDef ad1;
4896 CType pt;
4897 TokenString *vla_array_tok = NULL;
4898 int *vla_array_str = NULL;
4900 if (tok == '(') {
4901 /* function type, or recursive declarator (return if so) */
4902 next();
4903 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4904 return 0;
4905 if (tok == ')')
4906 l = 0;
4907 else if (parse_btype(&pt, &ad1, 0))
4908 l = FUNC_NEW;
4909 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4910 merge_attr (ad, &ad1);
4911 return 0;
4912 } else
4913 l = FUNC_OLD;
4915 first = NULL;
4916 plast = &first;
4917 arg_size = 0;
4918 ++local_scope;
4919 if (l) {
4920 for(;;) {
4921 /* read param name and compute offset */
4922 if (l != FUNC_OLD) {
4923 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4924 break;
4925 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4926 if ((pt.t & VT_BTYPE) == VT_VOID)
4927 tcc_error("parameter declared as void");
4928 if (n == 0)
4929 n = SYM_FIELD;
4930 } else {
4931 n = tok;
4932 pt.t = VT_VOID; /* invalid type */
4933 pt.ref = NULL;
4934 next();
4936 if (n < TOK_UIDENT)
4937 expect("identifier");
4938 convert_parameter_type(&pt);
4939 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4940 /* these symbols may be evaluated for VLArrays (see below, under
4941 nocode_wanted) which is why we push them here as normal symbols
4942 temporarily. Example: int func(int a, int b[++a]); */
4943 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4944 *plast = s;
4945 plast = &s->next;
4946 if (tok == ')')
4947 break;
4948 skip(',');
4949 if (l == FUNC_NEW && tok == TOK_DOTS) {
4950 l = FUNC_ELLIPSIS;
4951 next();
4952 break;
4954 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4955 tcc_error("invalid type");
4957 } else
4958 /* if no parameters, then old type prototype */
4959 l = FUNC_OLD;
4960 skip(')');
4961 /* remove parameter symbols from token table, keep on stack */
4962 if (first) {
4963 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4964 for (s = first; s; s = s->next)
4965 s->v |= SYM_FIELD;
4967 --local_scope;
4968 /* NOTE: const is ignored in returned type as it has a special
4969 meaning in gcc / C++ */
4970 type->t &= ~VT_CONSTANT;
4971 /* some ancient pre-K&R C allows a function to return an array
4972 and the array brackets to be put after the arguments, such
4973 that "int c()[]" means something like "int[] c()" */
4974 if (tok == '[') {
4975 next();
4976 skip(']'); /* only handle simple "[]" */
4977 mk_pointer(type);
4979 /* we push a anonymous symbol which will contain the function prototype */
4980 ad->f.func_args = arg_size;
4981 ad->f.func_type = l;
4982 s = sym_push(SYM_FIELD, type, 0, 0);
4983 s->a = ad->a;
4984 s->f = ad->f;
4985 s->next = first;
4986 type->t = VT_FUNC;
4987 type->ref = s;
4988 } else if (tok == '[') {
4989 int saved_nocode_wanted = nocode_wanted;
4990 /* array definition */
4991 next();
4992 n = -1;
4993 t1 = 0;
4994 if (td & TYPE_PARAM) while (1) {
4995 /* XXX The optional type-quals and static should only be accepted
4996 in parameter decls. The '*' as well, and then even only
4997 in prototypes (not function defs). */
4998 switch (tok) {
4999 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5000 case TOK_CONST1:
5001 case TOK_VOLATILE1:
5002 case TOK_STATIC:
5003 case '*':
5004 next();
5005 continue;
5006 default:
5007 break;
5009 if (tok != ']') {
5010 /* Code generation is not done now but has to be done
5011 at start of function. Save code here for later use. */
5012 nocode_wanted = 1;
5013 skip_or_save_block(&vla_array_tok);
5014 unget_tok(0);
5015 vla_array_str = vla_array_tok->str;
5016 begin_macro(vla_array_tok, 2);
5017 next();
5018 gexpr();
5019 end_macro();
5020 next();
5021 goto check;
5023 break;
5025 } else if (tok != ']') {
5026 if (!local_stack || (storage & VT_STATIC))
5027 vpushi(expr_const());
5028 else {
5029 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5030 length must always be evaluated, even under nocode_wanted,
5031 so that its size slot is initialized (e.g. under sizeof
5032 or typeof). */
5033 nocode_wanted = 0;
5034 gexpr();
5036 check:
5037 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5038 n = vtop->c.i;
5039 if (n < 0)
5040 tcc_error("invalid array size");
5041 } else {
5042 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5043 tcc_error("size of variable length array should be an integer");
5044 n = 0;
5045 t1 = VT_VLA;
5048 skip(']');
5049 /* parse next post type */
5050 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5052 if ((type->t & VT_BTYPE) == VT_FUNC)
5053 tcc_error("declaration of an array of functions");
5054 if ((type->t & VT_BTYPE) == VT_VOID
5055 || type_size(type, &align) < 0)
5056 tcc_error("declaration of an array of incomplete type elements");
5058 t1 |= type->t & VT_VLA;
5060 if (t1 & VT_VLA) {
5061 if (n < 0) {
5062 if (td & TYPE_NEST)
5063 tcc_error("need explicit inner array size in VLAs");
5065 else {
5066 loc -= type_size(&int_type, &align);
5067 loc &= -align;
5068 n = loc;
5070 vpush_type_size(type, &align);
5071 gen_op('*');
5072 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5073 vswap();
5074 vstore();
5077 if (n != -1)
5078 vpop();
5079 nocode_wanted = saved_nocode_wanted;
5081 /* we push an anonymous symbol which will contain the array
5082 element type */
5083 s = sym_push(SYM_FIELD, type, 0, n);
5084 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5085 type->ref = s;
5087 if (vla_array_str) {
5088 /* for function args, the top dimension is converted to pointer */
5089 if ((t1 & VT_VLA) && (td & TYPE_NEST))
5090 s->vla_array_str = vla_array_str;
5091 else
5092 tok_str_free_str(vla_array_str);
5095 return 1;
5098 /* Parse a type declarator (except basic type), and return the type
5099 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5100 expected. 'type' should contain the basic type. 'ad' is the
5101 attribute definition of the basic type. It can be modified by
5102 type_decl(). If this (possibly abstract) declarator is a pointer chain
5103 it returns the innermost pointed to type (equals *type, but is a different
5104 pointer), otherwise returns type itself, that's used for recursive calls. */
5105 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5107 CType *post, *ret;
5108 int qualifiers, storage;
5110 /* recursive type, remove storage bits first, apply them later again */
5111 storage = type->t & VT_STORAGE;
5112 type->t &= ~VT_STORAGE;
5113 post = ret = type;
5115 while (tok == '*') {
5116 qualifiers = 0;
5117 redo:
5118 next();
5119 switch(tok) {
5120 case TOK__Atomic:
5121 qualifiers |= VT_ATOMIC;
5122 goto redo;
5123 case TOK_CONST1:
5124 case TOK_CONST2:
5125 case TOK_CONST3:
5126 qualifiers |= VT_CONSTANT;
5127 goto redo;
5128 case TOK_VOLATILE1:
5129 case TOK_VOLATILE2:
5130 case TOK_VOLATILE3:
5131 qualifiers |= VT_VOLATILE;
5132 goto redo;
5133 case TOK_RESTRICT1:
5134 case TOK_RESTRICT2:
5135 case TOK_RESTRICT3:
5136 goto redo;
5137 /* XXX: clarify attribute handling */
5138 case TOK_ATTRIBUTE1:
5139 case TOK_ATTRIBUTE2:
5140 parse_attribute(ad);
5141 break;
5143 mk_pointer(type);
5144 type->t |= qualifiers;
5145 if (ret == type)
5146 /* innermost pointed to type is the one for the first derivation */
5147 ret = pointed_type(type);
5150 if (tok == '(') {
5151 /* This is possibly a parameter type list for abstract declarators
5152 ('int ()'), use post_type for testing this. */
5153 if (!post_type(type, ad, 0, td)) {
5154 /* It's not, so it's a nested declarator, and the post operations
5155 apply to the innermost pointed to type (if any). */
5156 /* XXX: this is not correct to modify 'ad' at this point, but
5157 the syntax is not clear */
5158 parse_attribute(ad);
5159 post = type_decl(type, ad, v, td);
5160 skip(')');
5161 } else
5162 goto abstract;
5163 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5164 /* type identifier */
5165 *v = tok;
5166 next();
5167 } else {
5168 abstract:
5169 if (!(td & TYPE_ABSTRACT))
5170 expect("identifier");
5171 *v = 0;
5173 post_type(post, ad, post != ret ? 0 : storage,
5174 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5175 parse_attribute(ad);
5176 type->t |= storage;
5177 return ret;
5180 /* indirection with full error checking and bound check */
5181 ST_FUNC void indir(void)
5183 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5184 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5185 return;
5186 expect("pointer");
5188 if (vtop->r & VT_LVAL)
5189 gv(RC_INT);
5190 vtop->type = *pointed_type(&vtop->type);
5191 /* Arrays and functions are never lvalues */
5192 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5193 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5194 vtop->r |= VT_LVAL;
5195 /* if bound checking, the referenced pointer must be checked */
5196 #ifdef CONFIG_TCC_BCHECK
5197 if (tcc_state->do_bounds_check)
5198 vtop->r |= VT_MUSTBOUND;
5199 #endif
5203 /* pass a parameter to a function and do type checking and casting */
5204 static void gfunc_param_typed(Sym *func, Sym *arg)
5206 int func_type;
5207 CType type;
5209 func_type = func->f.func_type;
5210 if (func_type == FUNC_OLD ||
5211 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5212 /* default casting : only need to convert float to double */
5213 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5214 gen_cast_s(VT_DOUBLE);
5215 } else if (vtop->type.t & VT_BITFIELD) {
5216 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5217 type.ref = vtop->type.ref;
5218 gen_cast(&type);
5219 } else if (vtop->r & VT_MUSTCAST) {
5220 force_charshort_cast();
5222 } else if (arg == NULL) {
5223 tcc_error("too many arguments to function");
5224 } else {
5225 type = arg->type;
5226 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5227 gen_assign_cast(&type);
5231 /* parse an expression and return its type without any side effect. */
5232 static void expr_type(CType *type, void (*expr_fn)(void))
5234 nocode_wanted++;
5235 expr_fn();
5236 *type = vtop->type;
5237 vpop();
5238 nocode_wanted--;
5241 /* parse an expression of the form '(type)' or '(expr)' and return its
5242 type */
5243 static void parse_expr_type(CType *type)
5245 int n;
5246 AttributeDef ad;
5248 skip('(');
5249 if (parse_btype(type, &ad, 0)) {
5250 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5251 } else {
5252 expr_type(type, gexpr);
5254 skip(')');
5257 static void parse_type(CType *type)
5259 AttributeDef ad;
5260 int n;
5262 if (!parse_btype(type, &ad, 0)) {
5263 expect("type");
5265 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5268 static void parse_builtin_params(int nc, const char *args)
5270 char c, sep = '(';
5271 CType type;
5272 if (nc)
5273 nocode_wanted++;
5274 next();
5275 if (*args == 0)
5276 skip(sep);
5277 while ((c = *args++)) {
5278 skip(sep);
5279 sep = ',';
5280 if (c == 't') {
5281 parse_type(&type);
5282 vpush(&type);
5283 continue;
5285 expr_eq();
5286 type.ref = NULL;
5287 type.t = 0;
5288 switch (c) {
5289 case 'e':
5290 continue;
5291 case 'V':
5292 type.t = VT_CONSTANT;
5293 case 'v':
5294 type.t |= VT_VOID;
5295 mk_pointer (&type);
5296 break;
5297 case 'S':
5298 type.t = VT_CONSTANT;
5299 case 's':
5300 type.t |= char_type.t;
5301 mk_pointer (&type);
5302 break;
5303 case 'i':
5304 type.t = VT_INT;
5305 break;
5306 case 'l':
5307 type.t = VT_SIZE_T;
5308 break;
5309 default:
5310 break;
5312 gen_assign_cast(&type);
5314 skip(')');
5315 if (nc)
5316 nocode_wanted--;
5319 static void parse_atomic(int atok)
5321 int size, align, arg, t, save = 0;
5322 CType *atom, *atom_ptr, ct = {0};
5323 SValue store;
5324 char buf[40];
5325 static const char *const templates[] = {
5327 * Each entry consists of callback and function template.
5328 * The template represents argument types and return type.
5330 * ? void (return-only)
5331 * b bool
5332 * a atomic
5333 * A read-only atomic
5334 * p pointer to memory
5335 * v value
5336 * l load pointer
5337 * s save pointer
5338 * m memory model
5341 /* keep in order of appearance in tcctok.h: */
5342 /* __atomic_store */ "alm.?",
5343 /* __atomic_load */ "Asm.v",
5344 /* __atomic_exchange */ "alsm.v",
5345 /* __atomic_compare_exchange */ "aplbmm.b",
5346 /* __atomic_fetch_add */ "avm.v",
5347 /* __atomic_fetch_sub */ "avm.v",
5348 /* __atomic_fetch_or */ "avm.v",
5349 /* __atomic_fetch_xor */ "avm.v",
5350 /* __atomic_fetch_and */ "avm.v",
5351 /* __atomic_fetch_nand */ "avm.v",
5352 /* __atomic_and_fetch */ "avm.v",
5353 /* __atomic_sub_fetch */ "avm.v",
5354 /* __atomic_or_fetch */ "avm.v",
5355 /* __atomic_xor_fetch */ "avm.v",
5356 /* __atomic_and_fetch */ "avm.v",
5357 /* __atomic_nand_fetch */ "avm.v"
5359 const char *template = templates[(atok - TOK___atomic_store)];
5361 atom = atom_ptr = NULL;
5362 size = 0; /* pacify compiler */
5363 next();
5364 skip('(');
5365 for (arg = 0;;) {
5366 expr_eq();
5367 switch (template[arg]) {
5368 case 'a':
5369 case 'A':
5370 atom_ptr = &vtop->type;
5371 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5372 expect("pointer");
5373 atom = pointed_type(atom_ptr);
5374 size = type_size(atom, &align);
5375 if (size > 8
5376 || (size & (size - 1))
5377 || (atok > TOK___atomic_compare_exchange
5378 && (0 == btype_size(atom->t & VT_BTYPE)
5379 || (atom->t & VT_BTYPE) == VT_PTR)))
5380 expect("integral or integer-sized pointer target type");
5381 /* GCC does not care either: */
5382 /* if (!(atom->t & VT_ATOMIC))
5383 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5384 break;
5386 case 'p':
5387 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5388 || type_size(pointed_type(&vtop->type), &align) != size)
5389 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5390 gen_assign_cast(atom_ptr);
5391 break;
5392 case 'v':
5393 gen_assign_cast(atom);
5394 break;
5395 case 'l':
5396 indir();
5397 gen_assign_cast(atom);
5398 break;
5399 case 's':
5400 save = 1;
5401 indir();
5402 store = *vtop;
5403 vpop();
5404 break;
5405 case 'm':
5406 gen_assign_cast(&int_type);
5407 break;
5408 case 'b':
5409 ct.t = VT_BOOL;
5410 gen_assign_cast(&ct);
5411 break;
5413 if ('.' == template[++arg])
5414 break;
5415 skip(',');
5417 skip(')');
5419 ct.t = VT_VOID;
5420 switch (template[arg + 1]) {
5421 case 'b':
5422 ct.t = VT_BOOL;
5423 break;
5424 case 'v':
5425 ct = *atom;
5426 break;
5429 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5430 vpush_helper_func(tok_alloc_const(buf));
5431 vrott(arg - save + 1);
5432 gfunc_call(arg - save);
5434 vpush(&ct);
5435 PUT_R_RET(vtop, ct.t);
5436 t = ct.t & VT_BTYPE;
5437 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5438 #ifdef PROMOTE_RET
5439 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5440 #else
5441 vtop->type.t = VT_INT;
5442 #endif
5444 gen_cast(&ct);
5445 if (save) {
5446 vpush(&ct);
5447 *vtop = store;
5448 vswap();
5449 vstore();
5453 ST_FUNC void unary(void)
5455 int n, t, align, size, r;
5456 CType type;
5457 Sym *s;
5458 AttributeDef ad;
5460 /* generate line number info */
5461 if (debug_modes)
5462 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5464 type.ref = NULL;
5465 /* XXX: GCC 2.95.3 does not generate a table although it should be
5466 better here */
5467 tok_next:
5468 switch(tok) {
5469 case TOK_EXTENSION:
5470 next();
5471 goto tok_next;
5472 case TOK_LCHAR:
5473 #ifdef TCC_TARGET_PE
5474 t = VT_SHORT|VT_UNSIGNED;
5475 goto push_tokc;
5476 #endif
5477 case TOK_CINT:
5478 case TOK_CCHAR:
5479 t = VT_INT;
5480 push_tokc:
5481 type.t = t;
5482 vsetc(&type, VT_CONST, &tokc);
5483 next();
5484 break;
5485 case TOK_CUINT:
5486 t = VT_INT | VT_UNSIGNED;
5487 goto push_tokc;
5488 case TOK_CLLONG:
5489 t = VT_LLONG;
5490 goto push_tokc;
5491 case TOK_CULLONG:
5492 t = VT_LLONG | VT_UNSIGNED;
5493 goto push_tokc;
5494 case TOK_CFLOAT:
5495 t = VT_FLOAT;
5496 goto push_tokc;
5497 case TOK_CDOUBLE:
5498 t = VT_DOUBLE;
5499 goto push_tokc;
5500 case TOK_CLDOUBLE:
5501 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5502 t = VT_DOUBLE | VT_LONG;
5503 #else
5504 t = VT_LDOUBLE;
5505 #endif
5506 goto push_tokc;
5507 case TOK_CLONG:
5508 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5509 goto push_tokc;
5510 case TOK_CULONG:
5511 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5512 goto push_tokc;
5513 case TOK___FUNCTION__:
5514 if (!gnu_ext)
5515 goto tok_identifier;
5516 /* fall thru */
5517 case TOK___FUNC__:
5518 tok = TOK_STR;
5519 cstr_reset(&tokcstr);
5520 cstr_cat(&tokcstr, funcname, 0);
5521 tokc.str.size = tokcstr.size;
5522 tokc.str.data = tokcstr.data;
5523 goto case_TOK_STR;
5524 case TOK_LSTR:
5525 #ifdef TCC_TARGET_PE
5526 t = VT_SHORT | VT_UNSIGNED;
5527 #else
5528 t = VT_INT;
5529 #endif
5530 goto str_init;
5531 case TOK_STR:
5532 case_TOK_STR:
5533 /* string parsing */
5534 t = char_type.t;
5535 str_init:
5536 if (tcc_state->warn_write_strings & WARN_ON)
5537 t |= VT_CONSTANT;
5538 type.t = t;
5539 mk_pointer(&type);
5540 type.t |= VT_ARRAY;
5541 memset(&ad, 0, sizeof(AttributeDef));
5542 ad.section = rodata_section;
5543 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5544 break;
5545 case TOK_SOTYPE:
5546 case '(':
5547 t = tok;
5548 next();
5549 /* cast ? */
5550 if (parse_btype(&type, &ad, 0)) {
5551 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5552 skip(')');
5553 /* check ISOC99 compound literal */
5554 if (tok == '{') {
5555 /* data is allocated locally by default */
5556 if (global_expr)
5557 r = VT_CONST;
5558 else
5559 r = VT_LOCAL;
5560 /* all except arrays are lvalues */
5561 if (!(type.t & VT_ARRAY))
5562 r |= VT_LVAL;
5563 memset(&ad, 0, sizeof(AttributeDef));
5564 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5565 } else if (t == TOK_SOTYPE) { /* from sizeof/alignof (...) */
5566 vpush(&type);
5567 return;
5568 } else {
5569 unary();
5570 gen_cast(&type);
5572 } else if (tok == '{') {
5573 int saved_nocode_wanted = nocode_wanted;
5574 if (CONST_WANTED && !NOEVAL_WANTED)
5575 expect("constant");
5576 if (0 == local_scope)
5577 tcc_error("statement expression outside of function");
5578 /* save all registers */
5579 save_regs(0);
5580 /* statement expression : we do not accept break/continue
5581 inside as GCC does. We do retain the nocode_wanted state,
5582 as statement expressions can't ever be entered from the
5583 outside, so any reactivation of code emission (from labels
5584 or loop heads) can be disabled again after the end of it. */
5585 block(STMT_EXPR);
5586 /* If the statement expr can be entered, then we retain the current
5587 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5588 If it can't be entered then the state is that from before the
5589 statement expression. */
5590 if (saved_nocode_wanted)
5591 nocode_wanted = saved_nocode_wanted;
5592 skip(')');
5593 } else {
5594 gexpr();
5595 skip(')');
5597 break;
5598 case '*':
5599 next();
5600 unary();
5601 indir();
5602 break;
5603 case '&':
5604 next();
5605 unary();
5606 /* functions names must be treated as function pointers,
5607 except for unary '&' and sizeof. Since we consider that
5608 functions are not lvalues, we only have to handle it
5609 there and in function calls. */
5610 /* arrays can also be used although they are not lvalues */
5611 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5612 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5613 test_lvalue();
5614 if (vtop->sym)
5615 vtop->sym->a.addrtaken = 1;
5616 mk_pointer(&vtop->type);
5617 gaddrof();
5618 break;
5619 case '!':
5620 next();
5621 unary();
5622 gen_test_zero(TOK_EQ);
5623 break;
5624 case '~':
5625 next();
5626 unary();
5627 vpushi(-1);
5628 gen_op('^');
5629 break;
5630 case '+':
5631 next();
5632 unary();
5633 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5634 tcc_error("pointer not accepted for unary plus");
5635 /* In order to force cast, we add zero, except for floating point
5636 where we really need an noop (otherwise -0.0 will be transformed
5637 into +0.0). */
5638 if (!is_float(vtop->type.t)) {
5639 vpushi(0);
5640 gen_op('+');
5642 break;
5643 case TOK_SIZEOF:
5644 case TOK_ALIGNOF1:
5645 case TOK_ALIGNOF2:
5646 case TOK_ALIGNOF3:
5647 t = tok;
5648 next();
5649 if (tok == '(')
5650 tok = TOK_SOTYPE;
5651 expr_type(&type, unary);
5652 if (t == TOK_SIZEOF) {
5653 vpush_type_size(&type, &align);
5654 gen_cast_s(VT_SIZE_T);
5655 } else {
5656 type_size(&type, &align);
5657 s = NULL;
5658 if (vtop[1].r & VT_SYM)
5659 s = vtop[1].sym; /* hack: accessing previous vtop */
5660 if (s && s->a.aligned)
5661 align = 1 << (s->a.aligned - 1);
5662 vpushs(align);
5664 break;
5666 case TOK_builtin_expect:
5667 /* __builtin_expect is a no-op for now */
5668 parse_builtin_params(0, "ee");
5669 vpop();
5670 break;
5671 case TOK_builtin_types_compatible_p:
5672 parse_builtin_params(0, "tt");
5673 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5674 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5675 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5676 vtop -= 2;
5677 vpushi(n);
5678 break;
5679 case TOK_builtin_choose_expr:
5681 int64_t c;
5682 next();
5683 skip('(');
5684 c = expr_const64();
5685 skip(',');
5686 if (!c) {
5687 nocode_wanted++;
5689 expr_eq();
5690 if (!c) {
5691 vpop();
5692 nocode_wanted--;
5694 skip(',');
5695 if (c) {
5696 nocode_wanted++;
5698 expr_eq();
5699 if (c) {
5700 vpop();
5701 nocode_wanted--;
5703 skip(')');
5705 break;
5706 case TOK_builtin_constant_p:
5707 parse_builtin_params(1, "e");
5708 n = 1;
5709 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5710 || ((vtop->r & VT_SYM) && vtop->sym->a.addrtaken)
5712 n = 0;
5713 vtop--;
5714 vpushi(n);
5715 break;
5716 case TOK_builtin_frame_address:
5717 case TOK_builtin_return_address:
5719 int tok1 = tok;
5720 int level;
5721 next();
5722 skip('(');
5723 level = expr_const();
5724 if (level < 0)
5725 tcc_error("%s only takes positive integers", get_tok_str(tok1, 0));
5726 skip(')');
5727 type.t = VT_VOID;
5728 mk_pointer(&type);
5729 vset(&type, VT_LOCAL, 0); /* local frame */
5730 while (level--) {
5731 #ifdef TCC_TARGET_RISCV64
5732 vpushi(2*PTR_SIZE);
5733 gen_op('-');
5734 #endif
5735 mk_pointer(&vtop->type);
5736 indir(); /* -> parent frame */
5738 if (tok1 == TOK_builtin_return_address) {
5739 // assume return address is just above frame pointer on stack
5740 #ifdef TCC_TARGET_ARM
5741 vpushi(2*PTR_SIZE);
5742 gen_op('+');
5743 #elif defined TCC_TARGET_RISCV64
5744 vpushi(PTR_SIZE);
5745 gen_op('-');
5746 #else
5747 vpushi(PTR_SIZE);
5748 gen_op('+');
5749 #endif
5750 mk_pointer(&vtop->type);
5751 indir();
5754 break;
5755 #ifdef TCC_TARGET_RISCV64
5756 case TOK_builtin_va_start:
5757 parse_builtin_params(0, "ee");
5758 r = vtop->r & VT_VALMASK;
5759 if (r == VT_LLOCAL)
5760 r = VT_LOCAL;
5761 if (r != VT_LOCAL)
5762 tcc_error("__builtin_va_start expects a local variable");
5763 gen_va_start();
5764 vstore();
5765 break;
5766 #endif
5767 #ifdef TCC_TARGET_X86_64
5768 #ifdef TCC_TARGET_PE
5769 case TOK_builtin_va_start:
5770 parse_builtin_params(0, "ee");
5771 r = vtop->r & VT_VALMASK;
5772 if (r == VT_LLOCAL)
5773 r = VT_LOCAL;
5774 if (r != VT_LOCAL)
5775 tcc_error("__builtin_va_start expects a local variable");
5776 vtop->r = r;
5777 vtop->type = char_pointer_type;
5778 vtop->c.i += 8;
5779 vstore();
5780 break;
5781 #else
5782 case TOK_builtin_va_arg_types:
5783 parse_builtin_params(0, "t");
5784 vpushi(classify_x86_64_va_arg(&vtop->type));
5785 vswap();
5786 vpop();
5787 break;
5788 #endif
5789 #endif
5791 #ifdef TCC_TARGET_ARM64
5792 case TOK_builtin_va_start: {
5793 parse_builtin_params(0, "ee");
5794 //xx check types
5795 gen_va_start();
5796 vpushi(0);
5797 vtop->type.t = VT_VOID;
5798 break;
5800 case TOK_builtin_va_arg: {
5801 parse_builtin_params(0, "et");
5802 type = vtop->type;
5803 vpop();
5804 //xx check types
5805 gen_va_arg(&type);
5806 vtop->type = type;
5807 break;
5809 case TOK___arm64_clear_cache: {
5810 parse_builtin_params(0, "ee");
5811 gen_clear_cache();
5812 vpushi(0);
5813 vtop->type.t = VT_VOID;
5814 break;
5816 #endif
5818 /* atomic operations */
5819 case TOK___atomic_store:
5820 case TOK___atomic_load:
5821 case TOK___atomic_exchange:
5822 case TOK___atomic_compare_exchange:
5823 case TOK___atomic_fetch_add:
5824 case TOK___atomic_fetch_sub:
5825 case TOK___atomic_fetch_or:
5826 case TOK___atomic_fetch_xor:
5827 case TOK___atomic_fetch_and:
5828 case TOK___atomic_fetch_nand:
5829 case TOK___atomic_add_fetch:
5830 case TOK___atomic_sub_fetch:
5831 case TOK___atomic_or_fetch:
5832 case TOK___atomic_xor_fetch:
5833 case TOK___atomic_and_fetch:
5834 case TOK___atomic_nand_fetch:
5835 parse_atomic(tok);
5836 break;
5838 /* pre operations */
5839 case TOK_INC:
5840 case TOK_DEC:
5841 t = tok;
5842 next();
5843 unary();
5844 inc(0, t);
5845 break;
5846 case '-':
5847 next();
5848 unary();
5849 if (is_float(vtop->type.t)) {
5850 gen_opif(TOK_NEG);
5851 } else {
5852 vpushi(0);
5853 vswap();
5854 gen_op('-');
5856 break;
5857 case TOK_LAND:
5858 if (!gnu_ext)
5859 goto tok_identifier;
5860 next();
5861 /* allow to take the address of a label */
5862 if (tok < TOK_UIDENT)
5863 expect("label identifier");
5864 s = label_find(tok);
5865 if (!s) {
5866 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5867 } else {
5868 if (s->r == LABEL_DECLARED)
5869 s->r = LABEL_FORWARD;
5871 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5872 s->type.t = VT_VOID;
5873 mk_pointer(&s->type);
5874 s->type.t |= VT_STATIC;
5876 vpushsym(&s->type, s);
5877 next();
5878 break;
5880 case TOK_GENERIC:
5882 CType controlling_type;
5883 int has_default = 0;
5884 int has_match = 0;
5885 int learn = 0;
5886 TokenString *str = NULL;
5887 int saved_nocode_wanted = nocode_wanted;
5888 nocode_wanted &= ~CONST_WANTED_MASK;
5890 next();
5891 skip('(');
5892 expr_type(&controlling_type, expr_eq);
5893 convert_parameter_type (&controlling_type);
5895 nocode_wanted = saved_nocode_wanted;
5897 for (;;) {
5898 learn = 0;
5899 skip(',');
5900 if (tok == TOK_DEFAULT) {
5901 if (has_default)
5902 tcc_error("too many 'default'");
5903 has_default = 1;
5904 if (!has_match)
5905 learn = 1;
5906 next();
5907 } else {
5908 AttributeDef ad_tmp;
5909 int itmp;
5910 CType cur_type;
5912 parse_btype(&cur_type, &ad_tmp, 0);
5913 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5914 if (compare_types(&controlling_type, &cur_type, 0)) {
5915 if (has_match) {
5916 tcc_error("type match twice");
5918 has_match = 1;
5919 learn = 1;
5922 skip(':');
5923 if (learn) {
5924 if (str)
5925 tok_str_free(str);
5926 skip_or_save_block(&str);
5927 } else {
5928 skip_or_save_block(NULL);
5930 if (tok == ')')
5931 break;
5933 if (!str) {
5934 char buf[60];
5935 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5936 tcc_error("type '%s' does not match any association", buf);
5938 begin_macro(str, 1);
5939 next();
5940 expr_eq();
5941 if (tok != TOK_EOF)
5942 expect(",");
5943 end_macro();
5944 next();
5945 break;
5947 // special qnan , snan and infinity values
5948 case TOK___NAN__:
5949 n = 0x7fc00000;
5950 special_math_val:
5951 vpushi(n);
5952 vtop->type.t = VT_FLOAT;
5953 next();
5954 break;
5955 case TOK___SNAN__:
5956 n = 0x7f800001;
5957 goto special_math_val;
5958 case TOK___INF__:
5959 n = 0x7f800000;
5960 goto special_math_val;
5962 default:
5963 tok_identifier:
5964 if (tok < TOK_UIDENT)
5965 tcc_error("expression expected before '%s'", get_tok_str(tok, &tokc));
5966 t = tok;
5967 next();
5968 s = sym_find(t);
5969 if (!s || IS_ASM_SYM(s)) {
5970 const char *name = get_tok_str(t, NULL);
5971 if (tok != '(')
5972 tcc_error("'%s' undeclared", name);
5973 /* for simple function calls, we tolerate undeclared
5974 external reference to int() function */
5975 tcc_warning_c(warn_implicit_function_declaration)(
5976 "implicit declaration of function '%s'", name);
5977 s = external_global_sym(t, &func_old_type);
5980 r = s->r;
5981 /* A symbol that has a register is a local register variable,
5982 which starts out as VT_LOCAL value. */
5983 if ((r & VT_VALMASK) < VT_CONST)
5984 r = (r & ~VT_VALMASK) | VT_LOCAL;
5986 vset(&s->type, r, s->c);
5987 /* Point to s as backpointer (even without r&VT_SYM).
5988 Will be used by at least the x86 inline asm parser for
5989 regvars. */
5990 vtop->sym = s;
5992 if (r & VT_SYM) {
5993 vtop->c.i = 0;
5994 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5995 vtop->c.i = s->enum_val;
5997 break;
6000 /* post operations */
6001 while (1) {
6002 if (tok == TOK_INC || tok == TOK_DEC) {
6003 inc(1, tok);
6004 next();
6005 } else if (tok == '.' || tok == TOK_ARROW) {
6006 int qualifiers, cumofs;
6007 /* field */
6008 if (tok == TOK_ARROW)
6009 indir();
6010 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6011 test_lvalue();
6012 /* expect pointer on structure */
6013 next();
6014 s = find_field(&vtop->type, tok, &cumofs);
6015 /* add field offset to pointer */
6016 gaddrof();
6017 vtop->type = char_pointer_type; /* change type to 'char *' */
6018 vpushi(cumofs);
6019 gen_op('+');
6020 /* change type to field type, and set to lvalue */
6021 vtop->type = s->type;
6022 vtop->type.t |= qualifiers;
6023 /* an array is never an lvalue */
6024 if (!(vtop->type.t & VT_ARRAY)) {
6025 vtop->r |= VT_LVAL;
6026 #ifdef CONFIG_TCC_BCHECK
6027 /* if bound checking, the referenced pointer must be checked */
6028 if (tcc_state->do_bounds_check)
6029 vtop->r |= VT_MUSTBOUND;
6030 #endif
6032 next();
6033 } else if (tok == '[') {
6034 next();
6035 gexpr();
6036 gen_op('+');
6037 indir();
6038 skip(']');
6039 } else if (tok == '(') {
6040 SValue ret;
6041 Sym *sa;
6042 int nb_args, ret_nregs, ret_align, regsize, variadic;
6044 /* function call */
6045 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6046 /* pointer test (no array accepted) */
6047 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6048 vtop->type = *pointed_type(&vtop->type);
6049 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6050 goto error_func;
6051 } else {
6052 error_func:
6053 expect("function pointer");
6055 } else {
6056 vtop->r &= ~VT_LVAL; /* no lvalue */
6058 /* get return type */
6059 s = vtop->type.ref;
6060 next();
6061 sa = s->next; /* first parameter */
6062 nb_args = regsize = 0;
6063 ret.r2 = VT_CONST;
6064 /* compute first implicit argument if a structure is returned */
6065 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6066 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6067 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6068 &ret_align, &regsize);
6069 if (ret_nregs <= 0) {
6070 /* get some space for the returned structure */
6071 size = type_size(&s->type, &align);
6072 #ifdef TCC_TARGET_ARM64
6073 /* On arm64, a small struct is return in registers.
6074 It is much easier to write it to memory if we know
6075 that we are allowed to write some extra bytes, so
6076 round the allocated space up to a power of 2: */
6077 if (size < 16)
6078 while (size & (size - 1))
6079 size = (size | (size - 1)) + 1;
6080 #endif
6081 loc = (loc - size) & -align;
6082 ret.type = s->type;
6083 ret.r = VT_LOCAL | VT_LVAL;
6084 /* pass it as 'int' to avoid structure arg passing
6085 problems */
6086 vseti(VT_LOCAL, loc);
6087 #ifdef CONFIG_TCC_BCHECK
6088 if (tcc_state->do_bounds_check)
6089 --loc;
6090 #endif
6091 ret.c = vtop->c;
6092 if (ret_nregs < 0)
6093 vtop--;
6094 else
6095 nb_args++;
6097 } else {
6098 ret_nregs = 1;
6099 ret.type = s->type;
6102 if (ret_nregs > 0) {
6103 /* return in register */
6104 ret.c.i = 0;
6105 PUT_R_RET(&ret, ret.type.t);
6107 if (tok != ')') {
6108 for(;;) {
6109 expr_eq();
6110 gfunc_param_typed(s, sa);
6111 nb_args++;
6112 if (sa)
6113 sa = sa->next;
6114 if (tok == ')')
6115 break;
6116 skip(',');
6119 if (sa)
6120 tcc_error("too few arguments to function");
6121 skip(')');
6122 gfunc_call(nb_args);
6124 if (ret_nregs < 0) {
6125 vsetc(&ret.type, ret.r, &ret.c);
6126 #ifdef TCC_TARGET_RISCV64
6127 arch_transfer_ret_regs(1);
6128 #endif
6129 } else {
6130 /* return value */
6131 n = ret_nregs;
6132 while (n > 1) {
6133 int rc = reg_classes[ret.r] & ~(RC_INT | RC_FLOAT);
6134 /* We assume that when a structure is returned in multiple
6135 registers, their classes are consecutive values of the
6136 suite s(n) = 2^n */
6137 rc <<= --n;
6138 for (r = 0; r < NB_REGS; ++r)
6139 if (reg_classes[r] & rc)
6140 break;
6141 vsetc(&ret.type, r, &ret.c);
6143 vsetc(&ret.type, ret.r, &ret.c);
6144 vtop->r2 = ret.r2;
6146 /* handle packed struct return */
6147 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6148 int addr, offset;
6150 size = type_size(&s->type, &align);
6151 /* We're writing whole regs often, make sure there's enough
6152 space. Assume register size is power of 2. */
6153 size = (size + regsize - 1) & -regsize;
6154 if (ret_align > align)
6155 align = ret_align;
6156 loc = (loc - size) & -align;
6157 addr = loc;
6158 offset = 0;
6159 for (;;) {
6160 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6161 vswap();
6162 vstore();
6163 vtop--;
6164 if (--ret_nregs == 0)
6165 break;
6166 offset += regsize;
6168 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6171 /* Promote char/short return values. This is matters only
6172 for calling function that were not compiled by TCC and
6173 only on some architectures. For those where it doesn't
6174 matter we expect things to be already promoted to int,
6175 but not larger. */
6176 t = s->type.t & VT_BTYPE;
6177 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6178 #ifdef PROMOTE_RET
6179 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6180 #else
6181 vtop->type.t = VT_INT;
6182 #endif
6185 if (s->f.func_noreturn) {
6186 if (debug_modes)
6187 tcc_tcov_block_end(tcc_state, -1);
6188 CODE_OFF();
6190 } else {
6191 break;
6196 #ifndef precedence_parser /* original top-down parser */
6198 static void expr_prod(void)
6200 int t;
6202 unary();
6203 while ((t = tok) == '*' || t == '/' || t == '%') {
6204 next();
6205 unary();
6206 gen_op(t);
6210 static void expr_sum(void)
6212 int t;
6214 expr_prod();
6215 while ((t = tok) == '+' || t == '-') {
6216 next();
6217 expr_prod();
6218 gen_op(t);
6222 static void expr_shift(void)
6224 int t;
6226 expr_sum();
6227 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6228 next();
6229 expr_sum();
6230 gen_op(t);
6234 static void expr_cmp(void)
6236 int t;
6238 expr_shift();
6239 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6240 t == TOK_ULT || t == TOK_UGE) {
6241 next();
6242 expr_shift();
6243 gen_op(t);
6247 static void expr_cmpeq(void)
6249 int t;
6251 expr_cmp();
6252 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6253 next();
6254 expr_cmp();
6255 gen_op(t);
6259 static void expr_and(void)
6261 expr_cmpeq();
6262 while (tok == '&') {
6263 next();
6264 expr_cmpeq();
6265 gen_op('&');
6269 static void expr_xor(void)
6271 expr_and();
6272 while (tok == '^') {
6273 next();
6274 expr_and();
6275 gen_op('^');
6279 static void expr_or(void)
6281 expr_xor();
6282 while (tok == '|') {
6283 next();
6284 expr_xor();
6285 gen_op('|');
6289 static void expr_landor(int op);
6291 static void expr_land(void)
6293 expr_or();
6294 if (tok == TOK_LAND)
6295 expr_landor(tok);
6298 static void expr_lor(void)
6300 expr_land();
6301 if (tok == TOK_LOR)
6302 expr_landor(tok);
6305 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6306 #else /* defined precedence_parser */
6307 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6308 # define expr_lor() unary(), expr_infix(1)
6310 static int precedence(int tok)
6312 switch (tok) {
6313 case TOK_LOR: return 1;
6314 case TOK_LAND: return 2;
6315 case '|': return 3;
6316 case '^': return 4;
6317 case '&': return 5;
6318 case TOK_EQ: case TOK_NE: return 6;
6319 relat: case TOK_ULT: case TOK_UGE: return 7;
6320 case TOK_SHL: case TOK_SAR: return 8;
6321 case '+': case '-': return 9;
6322 case '*': case '/': case '%': return 10;
6323 default:
6324 if (tok >= TOK_ULE && tok <= TOK_GT)
6325 goto relat;
6326 return 0;
6329 static unsigned char prec[256];
6330 static void init_prec(void)
6332 int i;
6333 for (i = 0; i < 256; i++)
6334 prec[i] = precedence(i);
6336 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6338 static void expr_landor(int op);
6340 static void expr_infix(int p)
6342 int t = tok, p2;
6343 while ((p2 = precedence(t)) >= p) {
6344 if (t == TOK_LOR || t == TOK_LAND) {
6345 expr_landor(t);
6346 } else {
6347 next();
6348 unary();
6349 if (precedence(tok) > p2)
6350 expr_infix(p2 + 1);
6351 gen_op(t);
6353 t = tok;
6356 #endif
6358 /* Assuming vtop is a value used in a conditional context
6359 (i.e. compared with zero) return 0 if it's false, 1 if
6360 true and -1 if it can't be statically determined. */
6361 static int condition_3way(void)
6363 int c = -1;
6364 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6365 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6366 vdup();
6367 gen_cast_s(VT_BOOL);
6368 c = vtop->c.i;
6369 vpop();
6371 return c;
6374 static void expr_landor(int op)
6376 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6377 for(;;) {
6378 c = f ? i : condition_3way();
6379 if (c < 0)
6380 save_regs(1), cc = 0;
6381 else if (c != i)
6382 nocode_wanted++, f = 1;
6383 if (tok != op)
6384 break;
6385 if (c < 0)
6386 t = gvtst(i, t);
6387 else
6388 vpop();
6389 next();
6390 expr_landor_next(op);
6392 if (cc || f) {
6393 vpop();
6394 vpushi(i ^ f);
6395 gsym(t);
6396 nocode_wanted -= f;
6397 } else {
6398 gvtst_set(i, t);
6402 static int is_cond_bool(SValue *sv)
6404 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6405 && (sv->type.t & VT_BTYPE) == VT_INT)
6406 return (unsigned)sv->c.i < 2;
6407 if (sv->r == VT_CMP)
6408 return 1;
6409 return 0;
6412 static void expr_cond(void)
6414 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6415 SValue sv;
6416 CType type;
6418 expr_lor();
6419 if (tok == '?') {
6420 next();
6421 c = condition_3way();
6422 g = (tok == ':' && gnu_ext);
6423 tt = 0;
6424 if (!g) {
6425 if (c < 0) {
6426 save_regs(1);
6427 tt = gvtst(1, 0);
6428 } else {
6429 vpop();
6431 } else if (c < 0) {
6432 /* needed to avoid having different registers saved in
6433 each branch */
6434 save_regs(1);
6435 gv_dup();
6436 tt = gvtst(0, 0);
6439 if (c == 0)
6440 nocode_wanted++;
6441 if (!g)
6442 gexpr();
6444 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6445 mk_pointer(&vtop->type);
6446 sv = *vtop; /* save value to handle it later */
6447 vtop--; /* no vpop so that FP stack is not flushed */
6449 if (g) {
6450 u = tt;
6451 } else if (c < 0) {
6452 u = gjmp(0);
6453 gsym(tt);
6454 } else
6455 u = 0;
6457 if (c == 0)
6458 nocode_wanted--;
6459 if (c == 1)
6460 nocode_wanted++;
6461 skip(':');
6462 expr_cond();
6464 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6465 mk_pointer(&vtop->type);
6467 /* cast operands to correct type according to ISOC rules */
6468 if (!combine_types(&type, &sv, vtop, '?'))
6469 type_incompatibility_error(&sv.type, &vtop->type,
6470 "type mismatch in conditional expression (have '%s' and '%s')");
6472 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6473 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6474 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6475 this code jumps directly to the if's then/else branches. */
6476 t1 = gvtst(0, 0);
6477 t2 = gjmp(0);
6478 gsym(u);
6479 vpushv(&sv);
6480 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6481 gvtst_set(0, t1);
6482 gvtst_set(1, t2);
6483 gen_cast(&type);
6484 // tcc_warning("two conditions expr_cond");
6485 return;
6488 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6489 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6490 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6492 /* now we convert second operand */
6493 if (c != 1) {
6494 gen_cast(&type);
6495 if (islv) {
6496 mk_pointer(&vtop->type);
6497 gaddrof();
6498 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6499 gaddrof();
6502 rc = RC_TYPE(type.t);
6503 /* for long longs, we use fixed registers to avoid having
6504 to handle a complicated move */
6505 if (USING_TWO_WORDS(type.t))
6506 rc = RC_RET(type.t);
6508 tt = r2 = 0;
6509 if (c < 0) {
6510 r2 = gv(rc);
6511 tt = gjmp(0);
6513 gsym(u);
6514 if (c == 1)
6515 nocode_wanted--;
6517 /* this is horrible, but we must also convert first
6518 operand */
6519 if (c != 0) {
6520 *vtop = sv;
6521 gen_cast(&type);
6522 if (islv) {
6523 mk_pointer(&vtop->type);
6524 gaddrof();
6525 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6526 gaddrof();
6529 if (c < 0) {
6530 r1 = gv(rc);
6531 move_reg(r2, r1, islv ? VT_PTR : type.t);
6532 vtop->r = r2;
6533 gsym(tt);
6536 if (islv)
6537 indir();
6541 static void expr_eq(void)
6543 int t;
6545 expr_cond();
6546 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6547 test_lvalue();
6548 next();
6549 if (t == '=') {
6550 expr_eq();
6551 } else {
6552 vdup();
6553 expr_eq();
6554 gen_op(TOK_ASSIGN_OP(t));
6556 vstore();
6560 ST_FUNC void gexpr(void)
6562 expr_eq();
6563 if (tok == ',') {
6564 do {
6565 vpop();
6566 next();
6567 expr_eq();
6568 } while (tok == ',');
6570 /* convert array & function to pointer */
6571 convert_parameter_type(&vtop->type);
6573 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6574 if ((vtop->r & VT_VALMASK) == VT_CONST && nocode_wanted && !CONST_WANTED)
6575 gv(RC_TYPE(vtop->type.t));
6579 /* parse a constant expression and return value in vtop. */
6580 static void expr_const1(void)
6582 nocode_wanted += CONST_WANTED_BIT;
6583 expr_cond();
6584 nocode_wanted -= CONST_WANTED_BIT;
6587 /* parse an integer constant and return its value. */
6588 static inline int64_t expr_const64(void)
6590 int64_t c;
6591 expr_const1();
6592 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6593 expect("constant expression");
6594 c = vtop->c.i;
6595 vpop();
6596 return c;
6599 /* parse an integer constant and return its value.
6600 Complain if it doesn't fit 32bit (signed or unsigned). */
6601 ST_FUNC int expr_const(void)
6603 int c;
6604 int64_t wc = expr_const64();
6605 c = wc;
6606 if (c != wc && (unsigned)c != wc)
6607 tcc_error("constant exceeds 32 bit");
6608 return c;
6611 /* ------------------------------------------------------------------------- */
6612 /* return from function */
6614 #ifndef TCC_TARGET_ARM64
6615 static void gfunc_return(CType *func_type)
6617 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6618 CType type, ret_type;
6619 int ret_align, ret_nregs, regsize;
6620 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6621 &ret_align, &regsize);
6622 if (ret_nregs < 0) {
6623 #ifdef TCC_TARGET_RISCV64
6624 arch_transfer_ret_regs(0);
6625 #endif
6626 } else if (0 == ret_nregs) {
6627 /* if returning structure, must copy it to implicit
6628 first pointer arg location */
6629 type = *func_type;
6630 mk_pointer(&type);
6631 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6632 indir();
6633 vswap();
6634 /* copy structure value to pointer */
6635 vstore();
6636 } else {
6637 /* returning structure packed into registers */
6638 int size, addr, align, rc, n;
6639 size = type_size(func_type,&align);
6640 if ((align & (ret_align - 1))
6641 && ((vtop->r & VT_VALMASK) < VT_CONST /* pointer to struct */
6642 || (vtop->c.i & (ret_align - 1))
6643 )) {
6644 loc = (loc - size) & -ret_align;
6645 addr = loc;
6646 type = *func_type;
6647 vset(&type, VT_LOCAL | VT_LVAL, addr);
6648 vswap();
6649 vstore();
6650 vpop();
6651 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6653 vtop->type = ret_type;
6654 rc = RC_RET(ret_type.t);
6655 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6656 for (n = ret_nregs; --n > 0;) {
6657 vdup();
6658 gv(rc);
6659 vswap();
6660 incr_offset(regsize);
6661 /* We assume that when a structure is returned in multiple
6662 registers, their classes are consecutive values of the
6663 suite s(n) = 2^n */
6664 rc <<= 1;
6666 gv(rc);
6667 vtop -= ret_nregs - 1;
6669 } else {
6670 gv(RC_RET(func_type->t));
6672 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6674 #endif
6676 static void check_func_return(void)
6678 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6679 return;
6680 if (!strcmp (funcname, "main")
6681 && (func_vt.t & VT_BTYPE) == VT_INT) {
6682 /* main returns 0 by default */
6683 vpushi(0);
6684 gen_assign_cast(&func_vt);
6685 gfunc_return(&func_vt);
6686 } else {
6687 tcc_warning("function might return no value: '%s'", funcname);
6691 /* ------------------------------------------------------------------------- */
6692 /* switch/case */
6694 static int case_cmpi(const void *pa, const void *pb)
6696 int64_t a = (*(struct case_t**) pa)->v1;
6697 int64_t b = (*(struct case_t**) pb)->v1;
6698 return a < b ? -1 : a > b;
6701 static int case_cmpu(const void *pa, const void *pb)
6703 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6704 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6705 return a < b ? -1 : a > b;
6708 static void gtst_addr(int t, int a)
6710 gsym_addr(gvtst(0, t), a);
6713 static void gcase(struct case_t **base, int len, int *bsym)
6715 struct case_t *p;
6716 int e;
6717 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6718 while (len > 8) {
6719 /* binary search */
6720 p = base[len/2];
6721 vdup();
6722 if (ll)
6723 vpushll(p->v2);
6724 else
6725 vpushi(p->v2);
6726 gen_op(TOK_LE);
6727 e = gvtst(1, 0);
6728 vdup();
6729 if (ll)
6730 vpushll(p->v1);
6731 else
6732 vpushi(p->v1);
6733 gen_op(TOK_GE);
6734 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6735 /* x < v1 */
6736 gcase(base, len/2, bsym);
6737 /* x > v2 */
6738 gsym(e);
6739 e = len/2 + 1;
6740 base += e; len -= e;
6742 /* linear scan */
6743 while (len--) {
6744 p = *base++;
6745 vdup();
6746 if (ll)
6747 vpushll(p->v2);
6748 else
6749 vpushi(p->v2);
6750 if (p->v1 == p->v2) {
6751 gen_op(TOK_EQ);
6752 gtst_addr(0, p->sym);
6753 } else {
6754 gen_op(TOK_LE);
6755 e = gvtst(1, 0);
6756 vdup();
6757 if (ll)
6758 vpushll(p->v1);
6759 else
6760 vpushi(p->v1);
6761 gen_op(TOK_GE);
6762 gtst_addr(0, p->sym);
6763 gsym(e);
6766 *bsym = gjmp(*bsym);
6769 static void end_switch(void)
6771 struct switch_t *sw = cur_switch;
6772 dynarray_reset(&sw->p, &sw->n);
6773 cur_switch = sw->prev;
6774 tcc_free(sw);
6777 /* ------------------------------------------------------------------------- */
6778 /* __attribute__((cleanup(fn))) */
6780 static void try_call_scope_cleanup(Sym *stop)
6782 Sym *cls = cur_scope->cl.s;
6784 for (; cls != stop; cls = cls->ncl) {
6785 Sym *fs = cls->next;
6786 Sym *vs = cls->prev_tok;
6788 vpushsym(&fs->type, fs);
6789 vset(&vs->type, vs->r, vs->c);
6790 vtop->sym = vs;
6791 mk_pointer(&vtop->type);
6792 gaddrof();
6793 gfunc_call(1);
6797 static void try_call_cleanup_goto(Sym *cleanupstate)
6799 Sym *oc, *cc;
6800 int ocd, ccd;
6802 if (!cur_scope->cl.s)
6803 return;
6805 /* search NCA of both cleanup chains given parents and initial depth */
6806 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6807 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6809 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6811 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6814 try_call_scope_cleanup(cc);
6817 /* call 'func' for each __attribute__((cleanup(func))) */
6818 static void block_cleanup(struct scope *o)
6820 int jmp = 0;
6821 Sym *g, **pg;
6822 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6823 if (g->prev_tok->r & LABEL_FORWARD) {
6824 Sym *pcl = g->next;
6825 if (!jmp)
6826 jmp = gjmp(0);
6827 gsym(pcl->jnext);
6828 try_call_scope_cleanup(o->cl.s);
6829 pcl->jnext = gjmp(0);
6830 if (!o->cl.n)
6831 goto remove_pending;
6832 g->c = o->cl.n;
6833 pg = &g->prev;
6834 } else {
6835 remove_pending:
6836 *pg = g->prev;
6837 sym_free(g);
6840 gsym(jmp);
6841 try_call_scope_cleanup(o->cl.s);
6844 /* ------------------------------------------------------------------------- */
6845 /* VLA */
6847 static void vla_restore(int loc)
6849 if (loc)
6850 gen_vla_sp_restore(loc);
6853 static void vla_leave(struct scope *o)
6855 struct scope *c = cur_scope, *v = NULL;
6856 for (; c != o && c; c = c->prev)
6857 if (c->vla.num)
6858 v = c;
6859 if (v)
6860 vla_restore(v->vla.locorig);
6863 /* ------------------------------------------------------------------------- */
6864 /* local scopes */
6866 static void new_scope(struct scope *o)
6868 /* copy and link previous scope */
6869 *o = *cur_scope;
6870 o->prev = cur_scope;
6871 cur_scope = o;
6872 cur_scope->vla.num = 0;
6874 /* record local declaration stack position */
6875 o->lstk = local_stack;
6876 o->llstk = local_label_stack;
6877 ++local_scope;
6880 static void prev_scope(struct scope *o, int is_expr)
6882 vla_leave(o->prev);
6884 if (o->cl.s != o->prev->cl.s)
6885 block_cleanup(o->prev);
6887 /* pop locally defined labels */
6888 label_pop(&local_label_stack, o->llstk, is_expr);
6890 /* In the is_expr case (a statement expression is finished here),
6891 vtop might refer to symbols on the local_stack. Either via the
6892 type or via vtop->sym. We can't pop those nor any that in turn
6893 might be referred to. To make it easier we don't roll back
6894 any symbols in that case; some upper level call to block() will
6895 do that. We do have to remove such symbols from the lookup
6896 tables, though. sym_pop will do that. */
6898 /* pop locally defined symbols */
6899 pop_local_syms(o->lstk, is_expr);
6900 cur_scope = o->prev;
6901 --local_scope;
6904 /* leave a scope via break/continue(/goto) */
6905 static void leave_scope(struct scope *o)
6907 if (!o)
6908 return;
6909 try_call_scope_cleanup(o->cl.s);
6910 vla_leave(o);
6913 /* short versiona for scopes with 'if/do/while/switch' which can
6914 declare only types (of struct/union/enum) */
6915 static void new_scope_s(struct scope *o)
6917 o->lstk = local_stack;
6918 ++local_scope;
6921 static void prev_scope_s(struct scope *o)
6923 sym_pop(&local_stack, o->lstk, 0);
6924 --local_scope;
6927 /* ------------------------------------------------------------------------- */
6928 /* call block from 'for do while' loops */
6930 static void lblock(int *bsym, int *csym)
6932 struct scope *lo = loop_scope, *co = cur_scope;
6933 int *b = co->bsym, *c = co->csym;
6934 if (csym) {
6935 co->csym = csym;
6936 loop_scope = co;
6938 co->bsym = bsym;
6939 block(0);
6940 co->bsym = b;
6941 if (csym) {
6942 co->csym = c;
6943 loop_scope = lo;
6947 static void block(int flags)
6949 int a, b, c, d, e, t;
6950 struct scope o;
6951 Sym *s;
6953 if (flags & STMT_EXPR) {
6954 /* default return value is (void) */
6955 vpushi(0);
6956 vtop->type.t = VT_VOID;
6959 again:
6960 t = tok;
6961 /* If the token carries a value, next() might destroy it. Only with
6962 invalid code such as f(){"123"4;} */
6963 if (TOK_HAS_VALUE(t))
6964 goto expr;
6965 next();
6967 if (debug_modes)
6968 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6970 if (t == TOK_IF) {
6971 new_scope_s(&o);
6972 skip('(');
6973 gexpr();
6974 skip(')');
6975 a = gvtst(1, 0);
6976 block(0);
6977 if (tok == TOK_ELSE) {
6978 d = gjmp(0);
6979 gsym(a);
6980 next();
6981 block(0);
6982 gsym(d); /* patch else jmp */
6983 } else {
6984 gsym(a);
6986 prev_scope_s(&o);
6988 } else if (t == TOK_WHILE) {
6989 new_scope_s(&o);
6990 d = gind();
6991 skip('(');
6992 gexpr();
6993 skip(')');
6994 a = gvtst(1, 0);
6995 b = 0;
6996 lblock(&a, &b);
6997 gjmp_addr(d);
6998 gsym_addr(b, d);
6999 gsym(a);
7000 prev_scope_s(&o);
7002 } else if (t == '{') {
7003 if (debug_modes)
7004 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7005 new_scope(&o);
7007 /* handle local labels declarations */
7008 while (tok == TOK_LABEL) {
7009 do {
7010 next();
7011 if (tok < TOK_UIDENT)
7012 expect("label identifier");
7013 label_push(&local_label_stack, tok, LABEL_DECLARED);
7014 next();
7015 } while (tok == ',');
7016 skip(';');
7019 while (tok != '}') {
7020 decl(VT_LOCAL);
7021 if (tok != '}') {
7022 if (flags & STMT_EXPR)
7023 vpop();
7024 block(flags | STMT_COMPOUND);
7028 prev_scope(&o, flags & STMT_EXPR);
7029 if (debug_modes)
7030 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7031 if (local_scope)
7032 next();
7033 else if (!nocode_wanted)
7034 check_func_return();
7036 } else if (t == TOK_RETURN) {
7037 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7038 if (tok != ';') {
7039 gexpr();
7040 if (b) {
7041 gen_assign_cast(&func_vt);
7042 } else {
7043 if (vtop->type.t != VT_VOID)
7044 tcc_warning("void function returns a value");
7045 vtop--;
7047 } else if (b) {
7048 tcc_warning("'return' with no value");
7049 b = 0;
7051 leave_scope(root_scope);
7052 if (b)
7053 gfunc_return(&func_vt);
7054 skip(';');
7055 /* jump unless last stmt in top-level block */
7056 if (tok != '}' || local_scope != 1)
7057 rsym = gjmp(rsym);
7058 if (debug_modes)
7059 tcc_tcov_block_end (tcc_state, -1);
7060 CODE_OFF();
7062 } else if (t == TOK_BREAK) {
7063 /* compute jump */
7064 if (!cur_scope->bsym)
7065 tcc_error("cannot break");
7066 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7067 leave_scope(cur_switch->scope);
7068 else
7069 leave_scope(loop_scope);
7070 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7071 skip(';');
7073 } else if (t == TOK_CONTINUE) {
7074 /* compute jump */
7075 if (!cur_scope->csym)
7076 tcc_error("cannot continue");
7077 leave_scope(loop_scope);
7078 *cur_scope->csym = gjmp(*cur_scope->csym);
7079 skip(';');
7081 } else if (t == TOK_FOR) {
7082 new_scope(&o);
7084 skip('(');
7085 if (tok != ';') {
7086 /* c99 for-loop init decl? */
7087 if (!decl(VT_JMP)) {
7088 /* no, regular for-loop init expr */
7089 gexpr();
7090 vpop();
7093 skip(';');
7094 a = b = 0;
7095 c = d = gind();
7096 if (tok != ';') {
7097 gexpr();
7098 a = gvtst(1, 0);
7100 skip(';');
7101 if (tok != ')') {
7102 e = gjmp(0);
7103 d = gind();
7104 gexpr();
7105 vpop();
7106 gjmp_addr(c);
7107 gsym(e);
7109 skip(')');
7110 lblock(&a, &b);
7111 gjmp_addr(d);
7112 gsym_addr(b, d);
7113 gsym(a);
7114 prev_scope(&o, 0);
7116 } else if (t == TOK_DO) {
7117 new_scope_s(&o);
7118 a = b = 0;
7119 d = gind();
7120 lblock(&a, &b);
7121 gsym(b);
7122 skip(TOK_WHILE);
7123 skip('(');
7124 gexpr();
7125 skip(')');
7126 skip(';');
7127 c = gvtst(0, 0);
7128 gsym_addr(c, d);
7129 gsym(a);
7130 prev_scope_s(&o);
7132 } else if (t == TOK_SWITCH) {
7133 struct switch_t *sw;
7135 sw = tcc_mallocz(sizeof *sw);
7136 sw->bsym = &a;
7137 sw->scope = cur_scope;
7138 sw->prev = cur_switch;
7139 sw->nocode_wanted = nocode_wanted;
7140 cur_switch = sw;
7142 new_scope_s(&o);
7143 skip('(');
7144 gexpr();
7145 skip(')');
7146 sw->sv = *vtop--; /* save switch value */
7147 a = 0;
7148 b = gjmp(0); /* jump to first case */
7149 lblock(&a, NULL);
7150 a = gjmp(a); /* add implicit break */
7151 /* case lookup */
7152 gsym(b);
7153 prev_scope_s(&o);
7155 if (sw->nocode_wanted)
7156 goto skip_switch;
7157 if (sw->sv.type.t & VT_UNSIGNED)
7158 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7159 else
7160 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7161 for (b = 1; b < sw->n; b++)
7162 if (sw->sv.type.t & VT_UNSIGNED
7163 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7164 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7165 tcc_error("duplicate case value");
7166 vpushv(&sw->sv);
7167 gv(RC_INT);
7168 d = 0, gcase(sw->p, sw->n, &d);
7169 vpop();
7170 if (sw->def_sym)
7171 gsym_addr(d, sw->def_sym);
7172 else
7173 gsym(d);
7174 skip_switch:
7175 /* break label */
7176 gsym(a);
7177 end_switch();
7179 } else if (t == TOK_CASE) {
7180 struct case_t *cr;
7181 if (!cur_switch)
7182 expect("switch");
7183 cr = tcc_malloc(sizeof(struct case_t));
7184 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7185 cr->v1 = cr->v2 = expr_const64();
7186 if (gnu_ext && tok == TOK_DOTS) {
7187 next();
7188 cr->v2 = expr_const64();
7189 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7190 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7191 tcc_warning("empty case range");
7193 /* case and default are unreachable from a switch under nocode_wanted */
7194 if (!cur_switch->nocode_wanted)
7195 cr->sym = gind();
7196 skip(':');
7197 goto block_after_label;
7199 } else if (t == TOK_DEFAULT) {
7200 if (!cur_switch)
7201 expect("switch");
7202 if (cur_switch->def_sym)
7203 tcc_error("too many 'default'");
7204 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7205 skip(':');
7206 goto block_after_label;
7208 } else if (t == TOK_GOTO) {
7209 vla_restore(cur_scope->vla.locorig);
7210 if (tok == '*' && gnu_ext) {
7211 /* computed goto */
7212 next();
7213 gexpr();
7214 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7215 expect("pointer");
7216 ggoto();
7218 } else if (tok >= TOK_UIDENT) {
7219 s = label_find(tok);
7220 /* put forward definition if needed */
7221 if (!s)
7222 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7223 else if (s->r == LABEL_DECLARED)
7224 s->r = LABEL_FORWARD;
7226 if (s->r & LABEL_FORWARD) {
7227 /* start new goto chain for cleanups, linked via label->next */
7228 if (cur_scope->cl.s && !nocode_wanted) {
7229 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7230 pending_gotos->prev_tok = s;
7231 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7232 pending_gotos->next = s;
7234 s->jnext = gjmp(s->jnext);
7235 } else {
7236 try_call_cleanup_goto(s->cleanupstate);
7237 gjmp_addr(s->jnext);
7239 next();
7241 } else {
7242 expect("label identifier");
7244 skip(';');
7246 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7247 asm_instr();
7249 } else {
7250 if (tok == ':' && t >= TOK_UIDENT) {
7251 /* label case */
7252 next();
7253 s = label_find(t);
7254 if (s) {
7255 if (s->r == LABEL_DEFINED)
7256 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7257 s->r = LABEL_DEFINED;
7258 if (s->next) {
7259 Sym *pcl; /* pending cleanup goto */
7260 for (pcl = s->next; pcl; pcl = pcl->prev)
7261 gsym(pcl->jnext);
7262 sym_pop(&s->next, NULL, 0);
7263 } else
7264 gsym(s->jnext);
7265 } else {
7266 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7268 s->jnext = gind();
7269 s->cleanupstate = cur_scope->cl.s;
7271 block_after_label:
7273 /* Accept attributes after labels (e.g. 'unused') */
7274 AttributeDef ad_tmp;
7275 parse_attribute(&ad_tmp);
7277 if (debug_modes)
7278 tcc_tcov_reset_ind(tcc_state);
7279 vla_restore(cur_scope->vla.loc);
7281 if (tok != '}') {
7282 if (0 == (flags & STMT_COMPOUND))
7283 goto again;
7284 /* C23: insert implicit null-statement whithin compound statement */
7285 } else {
7286 /* we accept this, but it is a mistake */
7287 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7289 } else {
7290 /* expression case */
7291 if (t != ';') {
7292 unget_tok(t);
7293 expr:
7294 if (flags & STMT_EXPR) {
7295 vpop();
7296 gexpr();
7297 } else {
7298 gexpr();
7299 vpop();
7301 skip(';');
7306 if (debug_modes)
7307 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7310 /* This skips over a stream of tokens containing balanced {} and ()
7311 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7312 with a '{'). If STR then allocates and stores the skipped tokens
7313 in *STR. This doesn't check if () and {} are nested correctly,
7314 i.e. "({)}" is accepted. */
7315 static void skip_or_save_block(TokenString **str)
7317 int braces = tok == '{';
7318 int level = 0;
7319 if (str)
7320 *str = tok_str_alloc();
7322 while (1) {
7323 int t = tok;
7324 if (level == 0
7325 && (t == ','
7326 || t == ';'
7327 || t == '}'
7328 || t == ')'
7329 || t == ']'))
7330 break;
7331 if (t == TOK_EOF) {
7332 if (str || level > 0)
7333 tcc_error("unexpected end of file");
7334 else
7335 break;
7337 if (str)
7338 tok_str_add_tok(*str);
7339 next();
7340 if (t == '{' || t == '(' || t == '[') {
7341 level++;
7342 } else if (t == '}' || t == ')' || t == ']') {
7343 level--;
7344 if (level == 0 && braces && t == '}')
7345 break;
7348 if (str)
7349 tok_str_add(*str, TOK_EOF);
7352 #define EXPR_CONST 1
7353 #define EXPR_ANY 2
7355 static void parse_init_elem(int expr_type)
7357 int saved_global_expr;
7358 switch(expr_type) {
7359 case EXPR_CONST:
7360 /* compound literals must be allocated globally in this case */
7361 saved_global_expr = global_expr;
7362 global_expr = 1;
7363 expr_const1();
7364 global_expr = saved_global_expr;
7365 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7366 (compound literals). */
7367 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7368 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7369 || vtop->sym->v < SYM_FIRST_ANOM))
7370 #ifdef TCC_TARGET_PE
7371 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7372 #endif
7374 tcc_error("initializer element is not constant");
7375 break;
7376 case EXPR_ANY:
7377 expr_eq();
7378 break;
7382 #if 1
7383 static void init_assert(init_params *p, int offset)
7385 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7386 : !nocode_wanted && offset > p->local_offset)
7387 tcc_internal_error("initializer overflow");
7389 #else
7390 #define init_assert(sec, offset)
7391 #endif
7393 /* put zeros for variable based init */
7394 static void init_putz(init_params *p, unsigned long c, int size)
7396 init_assert(p, c + size);
7397 if (p->sec) {
7398 /* nothing to do because globals are already set to zero */
7399 } else {
7400 vpush_helper_func(TOK_memset);
7401 vseti(VT_LOCAL, c);
7402 vpushi(0);
7403 vpushs(size);
7404 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7405 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7406 #endif
7407 gfunc_call(3);
7411 #define DIF_FIRST 1
7412 #define DIF_SIZE_ONLY 2
7413 #define DIF_HAVE_ELEM 4
7414 #define DIF_CLEAR 8
7416 /* delete relocations for specified range c ... c + size. Unfortunatly
7417 in very special cases, relocations may occur unordered */
7418 static void decl_design_delrels(Section *sec, int c, int size)
7420 ElfW_Rel *rel, *rel2, *rel_end;
7421 if (!sec || !sec->reloc)
7422 return;
7423 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7424 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7425 while (rel < rel_end) {
7426 if (rel->r_offset >= c && rel->r_offset < c + size) {
7427 sec->reloc->data_offset -= sizeof *rel;
7428 } else {
7429 if (rel2 != rel)
7430 memcpy(rel2, rel, sizeof *rel);
7431 ++rel2;
7433 ++rel;
7437 static void decl_design_flex(init_params *p, Sym *ref, int index)
7439 if (ref == p->flex_array_ref) {
7440 if (index >= ref->c)
7441 ref->c = index + 1;
7442 } else if (ref->c < 0)
7443 tcc_error("flexible array has zero size in this context");
7446 /* t is the array or struct type. c is the array or struct
7447 address. cur_field is the pointer to the current
7448 field, for arrays the 'c' member contains the current start
7449 index. 'flags' is as in decl_initializer.
7450 'al' contains the already initialized length of the
7451 current container (starting at c). This returns the new length of that. */
7452 static int decl_designator(init_params *p, CType *type, unsigned long c,
7453 Sym **cur_field, int flags, int al)
7455 Sym *s, *f;
7456 int index, index_last, align, l, nb_elems, elem_size;
7457 unsigned long corig = c;
7459 elem_size = 0;
7460 nb_elems = 1;
7462 if (flags & DIF_HAVE_ELEM)
7463 goto no_designator;
7465 if (gnu_ext && tok >= TOK_UIDENT) {
7466 l = tok, next();
7467 if (tok == ':')
7468 goto struct_field;
7469 unget_tok(l);
7472 /* NOTE: we only support ranges for last designator */
7473 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7474 if (tok == '[') {
7475 if (!(type->t & VT_ARRAY))
7476 expect("array type");
7477 next();
7478 index = index_last = expr_const();
7479 if (tok == TOK_DOTS && gnu_ext) {
7480 next();
7481 index_last = expr_const();
7483 skip(']');
7484 s = type->ref;
7485 decl_design_flex(p, s, index_last);
7486 if (index < 0 || index_last >= s->c || index_last < index)
7487 tcc_error("index exceeds array bounds or range is empty");
7488 if (cur_field)
7489 (*cur_field)->c = index_last;
7490 type = pointed_type(type);
7491 elem_size = type_size(type, &align);
7492 c += index * elem_size;
7493 nb_elems = index_last - index + 1;
7494 } else {
7495 int cumofs;
7496 next();
7497 l = tok;
7498 struct_field:
7499 next();
7500 f = find_field(type, l, &cumofs);
7501 if (cur_field)
7502 *cur_field = f;
7503 type = &f->type;
7504 c += cumofs;
7506 cur_field = NULL;
7508 if (!cur_field) {
7509 if (tok == '=') {
7510 next();
7511 } else if (!gnu_ext) {
7512 expect("=");
7514 } else {
7515 no_designator:
7516 if (type->t & VT_ARRAY) {
7517 index = (*cur_field)->c;
7518 s = type->ref;
7519 decl_design_flex(p, s, index);
7520 if (index >= s->c)
7521 tcc_error("too many initializers");
7522 type = pointed_type(type);
7523 elem_size = type_size(type, &align);
7524 c += index * elem_size;
7525 } else {
7526 f = *cur_field;
7527 /* Skip bitfield padding. Also with size 32 and 64. */
7528 while (f && (f->v & SYM_FIRST_ANOM) &&
7529 is_integer_btype(f->type.t & VT_BTYPE))
7530 *cur_field = f = f->next;
7531 if (!f)
7532 tcc_error("too many initializers");
7533 type = &f->type;
7534 c += f->c;
7538 if (!elem_size) /* for structs */
7539 elem_size = type_size(type, &align);
7541 /* Using designators the same element can be initialized more
7542 than once. In that case we need to delete possibly already
7543 existing relocations. */
7544 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7545 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7546 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7549 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7551 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7552 Sym aref = {0};
7553 CType t1;
7554 int i;
7555 if (p->sec || (type->t & VT_ARRAY)) {
7556 /* make init_putv/vstore believe it were a struct */
7557 aref.c = elem_size;
7558 t1.t = VT_STRUCT, t1.ref = &aref;
7559 type = &t1;
7561 if (p->sec)
7562 vpush_ref(type, p->sec, c, elem_size);
7563 else
7564 vset(type, VT_LOCAL|VT_LVAL, c);
7565 for (i = 1; i < nb_elems; i++) {
7566 vdup();
7567 init_putv(p, type, c + elem_size * i);
7569 vpop();
7572 c += nb_elems * elem_size;
7573 if (c - corig > al)
7574 al = c - corig;
7575 return al;
7578 /* store a value or an expression directly in global data or in local array */
7579 static void init_putv(init_params *p, CType *type, unsigned long c)
7581 int bt;
7582 void *ptr;
7583 CType dtype;
7584 int size, align;
7585 Section *sec = p->sec;
7586 uint64_t val;
7588 dtype = *type;
7589 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7591 size = type_size(type, &align);
7592 if (type->t & VT_BITFIELD)
7593 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7594 init_assert(p, c + size);
7596 if (sec) {
7597 /* XXX: not portable */
7598 /* XXX: generate error if incorrect relocation */
7599 gen_assign_cast(&dtype);
7600 bt = type->t & VT_BTYPE;
7602 if ((vtop->r & VT_SYM)
7603 && bt != VT_PTR
7604 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7605 || (type->t & VT_BITFIELD))
7606 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7608 tcc_error("initializer element is not computable at load time");
7610 if (NODATA_WANTED) {
7611 vtop--;
7612 return;
7615 ptr = sec->data + c;
7616 val = vtop->c.i;
7618 /* XXX: make code faster ? */
7619 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7620 vtop->sym->v >= SYM_FIRST_ANOM &&
7621 /* XXX This rejects compound literals like
7622 '(void *){ptr}'. The problem is that '&sym' is
7623 represented the same way, which would be ruled out
7624 by the SYM_FIRST_ANOM check above, but also '"string"'
7625 in 'char *p = "string"' is represented the same
7626 with the type being VT_PTR and the symbol being an
7627 anonymous one. That is, there's no difference in vtop
7628 between '(void *){x}' and '&(void *){x}'. Ignore
7629 pointer typed entities here. Hopefully no real code
7630 will ever use compound literals with scalar type. */
7631 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7632 /* These come from compound literals, memcpy stuff over. */
7633 Section *ssec;
7634 ElfSym *esym;
7635 ElfW_Rel *rel;
7636 esym = elfsym(vtop->sym);
7637 ssec = tcc_state->sections[esym->st_shndx];
7638 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7639 if (ssec->reloc) {
7640 /* We need to copy over all memory contents, and that
7641 includes relocations. Use the fact that relocs are
7642 created it order, so look from the end of relocs
7643 until we hit one before the copied region. */
7644 unsigned long relofs = ssec->reloc->data_offset;
7645 while (relofs >= sizeof(*rel)) {
7646 relofs -= sizeof(*rel);
7647 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7648 if (rel->r_offset >= esym->st_value + size)
7649 continue;
7650 if (rel->r_offset < esym->st_value)
7651 break;
7652 put_elf_reloca(symtab_section, sec,
7653 c + rel->r_offset - esym->st_value,
7654 ELFW(R_TYPE)(rel->r_info),
7655 ELFW(R_SYM)(rel->r_info),
7656 #if PTR_SIZE == 8
7657 rel->r_addend
7658 #else
7660 #endif
7664 } else {
7665 if (type->t & VT_BITFIELD) {
7666 int bit_pos, bit_size, bits, n;
7667 unsigned char *p, v, m;
7668 bit_pos = BIT_POS(vtop->type.t);
7669 bit_size = BIT_SIZE(vtop->type.t);
7670 p = (unsigned char*)ptr + (bit_pos >> 3);
7671 bit_pos &= 7, bits = 0;
7672 while (bit_size) {
7673 n = 8 - bit_pos;
7674 if (n > bit_size)
7675 n = bit_size;
7676 v = val >> bits << bit_pos;
7677 m = ((1 << n) - 1) << bit_pos;
7678 *p = (*p & ~m) | (v & m);
7679 bits += n, bit_size -= n, bit_pos = 0, ++p;
7681 } else
7682 switch(bt) {
7683 case VT_BOOL:
7684 *(char *)ptr = val != 0;
7685 break;
7686 case VT_BYTE:
7687 *(char *)ptr = val;
7688 break;
7689 case VT_SHORT:
7690 write16le(ptr, val);
7691 break;
7692 case VT_FLOAT:
7693 write32le(ptr, val);
7694 break;
7695 case VT_DOUBLE:
7696 write64le(ptr, val);
7697 break;
7698 case VT_LDOUBLE:
7699 #if defined TCC_IS_NATIVE_387
7700 /* Host and target platform may be different but both have x87.
7701 On windows, tcc does not use VT_LDOUBLE, except when it is a
7702 cross compiler. In this case a mingw gcc as host compiler
7703 comes here with 10-byte long doubles, while msvc or tcc won't.
7704 tcc itself can still translate by asm.
7705 In any case we avoid possibly random bytes 11 and 12.
7707 if (sizeof (long double) >= 10)
7708 memcpy(ptr, &vtop->c.ld, 10);
7709 #ifdef __TINYC__
7710 else if (sizeof (long double) == sizeof (double))
7711 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7712 #endif
7713 else
7714 #endif
7715 /* For other platforms it should work natively, but may not work
7716 for cross compilers */
7717 if (sizeof(long double) == LDOUBLE_SIZE)
7718 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7719 else if (sizeof(double) == LDOUBLE_SIZE)
7720 *(double*)ptr = (double)vtop->c.ld;
7721 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7722 ; /* nothing to do for 0.0 */
7723 #ifndef TCC_CROSS_TEST
7724 else
7725 tcc_error("can't cross compile long double constants");
7726 #endif
7727 break;
7729 #if PTR_SIZE == 8
7730 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7731 case VT_LLONG:
7732 case VT_PTR:
7733 if (vtop->r & VT_SYM)
7734 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7735 else
7736 write64le(ptr, val);
7737 break;
7738 case VT_INT:
7739 write32le(ptr, val);
7740 break;
7741 #else
7742 case VT_LLONG:
7743 write64le(ptr, val);
7744 break;
7745 case VT_PTR:
7746 case VT_INT:
7747 if (vtop->r & VT_SYM)
7748 greloc(sec, vtop->sym, c, R_DATA_PTR);
7749 write32le(ptr, val);
7750 break;
7751 #endif
7752 default:
7753 //tcc_internal_error("unexpected type");
7754 break;
7757 vtop--;
7758 } else {
7759 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7760 vswap();
7761 vstore();
7762 vpop();
7766 /* 't' contains the type and storage info. 'c' is the offset of the
7767 object in section 'sec'. If 'sec' is NULL, it means stack based
7768 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7769 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7770 size only evaluation is wanted (only for arrays). */
7771 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7773 int len, n, no_oblock, i;
7774 int size1, align1;
7775 Sym *s, *f;
7776 Sym indexsym;
7777 CType *t1;
7779 /* generate line number info */
7780 if (debug_modes && !(flags & DIF_SIZE_ONLY) && !p->sec)
7781 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7783 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7784 /* In case of strings we have special handling for arrays, so
7785 don't consume them as initializer value (which would commit them
7786 to some anonymous symbol). */
7787 tok != TOK_LSTR && tok != TOK_STR &&
7788 (!(flags & DIF_SIZE_ONLY)
7789 /* a struct may be initialized from a struct of same type, as in
7790 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7791 In that case we need to parse the element in order to check
7792 it for compatibility below */
7793 || (type->t & VT_BTYPE) == VT_STRUCT)
7795 int ncw_prev = nocode_wanted;
7796 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7797 ++nocode_wanted;
7798 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7799 nocode_wanted = ncw_prev;
7800 flags |= DIF_HAVE_ELEM;
7803 if (type->t & VT_ARRAY) {
7804 no_oblock = 1;
7805 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7806 tok == '{') {
7807 skip('{');
7808 no_oblock = 0;
7811 s = type->ref;
7812 n = s->c;
7813 t1 = pointed_type(type);
7814 size1 = type_size(t1, &align1);
7816 /* only parse strings here if correct type (otherwise: handle
7817 them as ((w)char *) expressions */
7818 if ((tok == TOK_LSTR &&
7819 #ifdef TCC_TARGET_PE
7820 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7821 #else
7822 (t1->t & VT_BTYPE) == VT_INT
7823 #endif
7824 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7825 len = 0;
7826 cstr_reset(&initstr);
7827 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7828 tcc_error("unhandled string literal merging");
7829 while (tok == TOK_STR || tok == TOK_LSTR) {
7830 if (initstr.size)
7831 initstr.size -= size1;
7832 if (tok == TOK_STR)
7833 len += tokc.str.size;
7834 else
7835 len += tokc.str.size / sizeof(nwchar_t);
7836 len--;
7837 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7838 next();
7840 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7841 && tok != TOK_EOF) {
7842 /* Not a lone literal but part of a bigger expression. */
7843 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7844 tokc.str.size = initstr.size;
7845 tokc.str.data = initstr.data;
7846 goto do_init_array;
7849 decl_design_flex(p, s, len);
7850 if (!(flags & DIF_SIZE_ONLY)) {
7851 int nb = n, ch;
7852 if (len < nb)
7853 nb = len;
7854 if (len > nb)
7855 tcc_warning("initializer-string for array is too long");
7856 /* in order to go faster for common case (char
7857 string in global variable, we handle it
7858 specifically */
7859 if (p->sec && size1 == 1) {
7860 init_assert(p, c + nb);
7861 if (!NODATA_WANTED)
7862 memcpy(p->sec->data + c, initstr.data, nb);
7863 } else {
7864 for(i=0;i<n;i++) {
7865 if (i >= nb) {
7866 /* only add trailing zero if enough storage (no
7867 warning in this case since it is standard) */
7868 if (flags & DIF_CLEAR)
7869 break;
7870 if (n - i >= 4) {
7871 init_putz(p, c + i * size1, (n - i) * size1);
7872 break;
7874 ch = 0;
7875 } else if (size1 == 1)
7876 ch = ((unsigned char *)initstr.data)[i];
7877 else
7878 ch = ((nwchar_t *)initstr.data)[i];
7879 vpushi(ch);
7880 init_putv(p, t1, c + i * size1);
7884 } else {
7886 do_init_array:
7887 indexsym.c = 0;
7888 f = &indexsym;
7890 do_init_list:
7891 /* zero memory once in advance */
7892 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7893 init_putz(p, c, n*size1);
7894 flags |= DIF_CLEAR;
7897 len = 0;
7898 /* GNU extension: if the initializer is empty for a flex array,
7899 it's size is zero. We won't enter the loop, so set the size
7900 now. */
7901 decl_design_flex(p, s, len);
7902 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7903 len = decl_designator(p, type, c, &f, flags, len);
7904 flags &= ~DIF_HAVE_ELEM;
7905 if (type->t & VT_ARRAY) {
7906 ++indexsym.c;
7907 /* special test for multi dimensional arrays (may not
7908 be strictly correct if designators are used at the
7909 same time) */
7910 if (no_oblock && len >= n*size1)
7911 break;
7912 } else {
7913 if (s->type.t == VT_UNION)
7914 f = NULL;
7915 else
7916 f = f->next;
7917 if (no_oblock && f == NULL)
7918 break;
7921 if (tok == '}')
7922 break;
7923 skip(',');
7926 if (!no_oblock)
7927 skip('}');
7929 } else if ((flags & DIF_HAVE_ELEM)
7930 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7931 The source type might have VT_CONSTANT set, which is
7932 of course assignable to non-const elements. */
7933 && is_compatible_unqualified_types(type, &vtop->type)) {
7934 goto one_elem;
7936 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7937 no_oblock = 1;
7938 if ((flags & DIF_FIRST) || tok == '{') {
7939 skip('{');
7940 no_oblock = 0;
7942 s = type->ref;
7943 f = s->next;
7944 n = s->c;
7945 size1 = 1;
7946 goto do_init_list;
7948 } else if (tok == '{') {
7949 if (flags & DIF_HAVE_ELEM)
7950 skip(';');
7951 next();
7952 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7953 skip('}');
7955 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7956 /* If we supported only ISO C we wouldn't have to accept calling
7957 this on anything than an array if DIF_SIZE_ONLY (and even then
7958 only on the outermost level, so no recursion would be needed),
7959 because initializing a flex array member isn't supported.
7960 But GNU C supports it, so we need to recurse even into
7961 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7962 /* just skip expression */
7963 if (flags & DIF_HAVE_ELEM)
7964 vpop();
7965 else
7966 skip_or_save_block(NULL);
7968 } else {
7969 if (!(flags & DIF_HAVE_ELEM)) {
7970 /* This should happen only when we haven't parsed
7971 the init element above for fear of committing a
7972 string constant to memory too early. */
7973 if (tok != TOK_STR && tok != TOK_LSTR)
7974 expect("string constant");
7975 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7977 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7978 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7979 && vtop->c.i == 0
7980 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7982 vpop();
7983 else
7984 init_putv(p, type, c);
7988 /* parse an initializer for type 't' if 'has_init' is non zero, and
7989 allocate space in local or global data space ('r' is either
7990 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7991 variable 'v' of scope 'scope' is declared before initializers
7992 are parsed. If 'v' is zero, then a reference to the new object
7993 is put in the value stack. If 'has_init' is 2, a special parsing
7994 is done to handle string constants. */
7995 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7996 int has_init, int v, int global)
7998 int size, align, addr;
7999 TokenString *init_str = NULL;
8001 Section *sec;
8002 Sym *flexible_array;
8003 Sym *sym;
8004 int saved_nocode_wanted = nocode_wanted;
8005 #ifdef CONFIG_TCC_BCHECK
8006 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8007 #endif
8008 init_params p = {0};
8010 /* Always allocate static or global variables */
8011 if (v && (r & VT_VALMASK) == VT_CONST)
8012 nocode_wanted |= DATA_ONLY_WANTED;
8014 flexible_array = NULL;
8015 size = type_size(type, &align);
8017 /* exactly one flexible array may be initialized, either the
8018 toplevel array or the last member of the toplevel struct */
8020 if (size < 0) {
8021 // error out except for top-level incomplete arrays
8022 // (arrays of incomplete types are handled in array parsing)
8023 if (!(type->t & VT_ARRAY))
8024 tcc_error("initialization of incomplete type");
8026 /* If the base type itself was an array type of unspecified size
8027 (like in 'typedef int arr[]; arr x = {1};') then we will
8028 overwrite the unknown size by the real one for this decl.
8029 We need to unshare the ref symbol holding that size. */
8030 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8031 p.flex_array_ref = type->ref;
8033 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8034 Sym *field = type->ref->next;
8035 if (field) {
8036 while (field->next)
8037 field = field->next;
8038 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8039 flexible_array = field;
8040 p.flex_array_ref = field->type.ref;
8041 size = -1;
8046 if (size < 0) {
8047 /* If unknown size, do a dry-run 1st pass */
8048 if (!has_init)
8049 tcc_error("unknown type size");
8050 if (has_init == 2) {
8051 /* only get strings */
8052 init_str = tok_str_alloc();
8053 while (tok == TOK_STR || tok == TOK_LSTR) {
8054 tok_str_add_tok(init_str);
8055 next();
8057 tok_str_add(init_str, TOK_EOF);
8058 } else
8059 skip_or_save_block(&init_str);
8060 unget_tok(0);
8062 /* compute size */
8063 begin_macro(init_str, 1);
8064 next();
8065 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8066 /* prepare second initializer parsing */
8067 macro_ptr = init_str->str;
8068 next();
8070 /* if still unknown size, error */
8071 size = type_size(type, &align);
8072 if (size < 0)
8073 tcc_error("unknown type size");
8075 /* If there's a flex member and it was used in the initializer
8076 adjust size. */
8077 if (flexible_array && flexible_array->type.ref->c > 0)
8078 size += flexible_array->type.ref->c
8079 * pointed_size(&flexible_array->type);
8082 /* take into account specified alignment if bigger */
8083 if (ad->a.aligned) {
8084 int speca = 1 << (ad->a.aligned - 1);
8085 if (speca > align)
8086 align = speca;
8087 } else if (ad->a.packed) {
8088 align = 1;
8091 if (!v && NODATA_WANTED)
8092 size = 0, align = 1;
8094 if ((r & VT_VALMASK) == VT_LOCAL) {
8095 sec = NULL;
8096 #ifdef CONFIG_TCC_BCHECK
8097 if (bcheck && v) {
8098 /* add padding between stack variables for bound checking */
8099 loc -= align;
8101 #endif
8102 loc = (loc - size) & -align;
8103 addr = loc;
8104 p.local_offset = addr + size;
8105 #ifdef CONFIG_TCC_BCHECK
8106 if (bcheck && v) {
8107 /* add padding between stack variables for bound checking */
8108 loc -= align;
8110 #endif
8111 if (v) {
8112 /* local variable */
8113 #ifdef CONFIG_TCC_ASM
8114 if (ad->asm_label) {
8115 int reg = asm_parse_regvar(ad->asm_label);
8116 if (reg >= 0)
8117 r = (r & ~VT_VALMASK) | reg;
8119 #endif
8120 sym = sym_push(v, type, r, addr);
8121 if (ad->cleanup_func) {
8122 Sym *cls = sym_push2(&all_cleanups,
8123 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8124 cls->prev_tok = sym;
8125 cls->next = ad->cleanup_func;
8126 cls->ncl = cur_scope->cl.s;
8127 cur_scope->cl.s = cls;
8130 sym->a = ad->a;
8131 } else {
8132 /* push local reference */
8133 vset(type, r, addr);
8135 } else {
8136 sym = NULL;
8137 if (v && global) {
8138 /* see if the symbol was already defined */
8139 sym = sym_find(v);
8140 if (sym) {
8141 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8142 && sym->type.ref->c > type->ref->c) {
8143 /* flex array was already declared with explicit size
8144 extern int arr[10];
8145 int arr[] = { 1,2,3 }; */
8146 type->ref->c = sym->type.ref->c;
8147 size = type_size(type, &align);
8149 patch_storage(sym, ad, type);
8150 /* we accept several definitions of the same global variable. */
8151 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8152 goto no_alloc;
8156 /* allocate symbol in corresponding section */
8157 sec = ad->section;
8158 if (!sec) {
8159 CType *tp = type;
8160 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8161 tp = &tp->ref->type;
8162 if (tp->t & VT_CONSTANT) {
8163 sec = rodata_section;
8164 } else if (has_init) {
8165 sec = data_section;
8166 /*if (tcc_state->g_debug & 4)
8167 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8168 } else if (tcc_state->nocommon)
8169 sec = bss_section;
8172 if (sec) {
8173 addr = section_add(sec, size, align);
8174 #ifdef CONFIG_TCC_BCHECK
8175 /* add padding if bound check */
8176 if (bcheck)
8177 section_add(sec, 1, 1);
8178 #endif
8179 } else {
8180 addr = align; /* SHN_COMMON is special, symbol value is align */
8181 sec = common_section;
8184 if (v) {
8185 if (!sym) {
8186 sym = sym_push(v, type, r | VT_SYM, 0);
8187 patch_storage(sym, ad, NULL);
8189 /* update symbol definition */
8190 put_extern_sym(sym, sec, addr, size);
8191 } else {
8192 /* push global reference */
8193 vpush_ref(type, sec, addr, size);
8194 sym = vtop->sym;
8195 vtop->r |= r;
8198 #ifdef CONFIG_TCC_BCHECK
8199 /* handles bounds now because the symbol must be defined
8200 before for the relocation */
8201 if (bcheck) {
8202 addr_t *bounds_ptr;
8204 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8205 /* then add global bound info */
8206 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8207 bounds_ptr[0] = 0; /* relocated */
8208 bounds_ptr[1] = size;
8210 #endif
8213 if (type->t & VT_VLA) {
8214 int a;
8216 if (NODATA_WANTED)
8217 goto no_alloc;
8219 /* save before-VLA stack pointer if needed */
8220 if (cur_scope->vla.num == 0) {
8221 if (cur_scope->prev && cur_scope->prev->vla.num) {
8222 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8223 } else {
8224 gen_vla_sp_save(loc -= PTR_SIZE);
8225 cur_scope->vla.locorig = loc;
8229 vpush_type_size(type, &a);
8230 gen_vla_alloc(type, a);
8231 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8232 /* on _WIN64, because of the function args scratch area, the
8233 result of alloca differs from RSP and is returned in RAX. */
8234 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8235 #endif
8236 gen_vla_sp_save(addr);
8237 cur_scope->vla.loc = addr;
8238 cur_scope->vla.num++;
8239 } else if (has_init) {
8240 p.sec = sec;
8241 decl_initializer(&p, type, addr, DIF_FIRST);
8242 /* patch flexible array member size back to -1, */
8243 /* for possible subsequent similar declarations */
8244 if (flexible_array)
8245 flexible_array->type.ref->c = -1;
8248 no_alloc:
8249 /* restore parse state if needed */
8250 if (init_str) {
8251 end_macro();
8252 next();
8255 nocode_wanted = saved_nocode_wanted;
8258 /* generate vla code saved in post_type() */
8259 static void func_vla_arg_code(Sym *arg)
8261 int align;
8262 TokenString *vla_array_tok = NULL;
8264 if (arg->type.ref)
8265 func_vla_arg_code(arg->type.ref);
8267 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8268 loc -= type_size(&int_type, &align);
8269 loc &= -align;
8270 arg->type.ref->c = loc;
8272 unget_tok(0);
8273 vla_array_tok = tok_str_alloc();
8274 vla_array_tok->str = arg->type.ref->vla_array_str;
8275 begin_macro(vla_array_tok, 1);
8276 next();
8277 gexpr();
8278 end_macro();
8279 next();
8280 vpush_type_size(&arg->type.ref->type, &align);
8281 gen_op('*');
8282 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8283 vswap();
8284 vstore();
8285 vpop();
8289 static void func_vla_arg(Sym *sym)
8291 Sym *arg;
8293 for (arg = sym->type.ref->next; arg; arg = arg->next)
8294 if ((arg->type.t & VT_BTYPE) == VT_PTR && (arg->type.ref->type.t & VT_VLA))
8295 func_vla_arg_code(arg->type.ref);
8298 /* parse a function defined by symbol 'sym' and generate its code in
8299 'cur_text_section' */
8300 static void gen_function(Sym *sym)
8302 struct scope f = { 0 };
8303 cur_scope = root_scope = &f;
8304 nocode_wanted = 0;
8306 ind = cur_text_section->data_offset;
8307 if (sym->a.aligned) {
8308 size_t newoff = section_add(cur_text_section, 0,
8309 1 << (sym->a.aligned - 1));
8310 gen_fill_nops(newoff - ind);
8313 funcname = get_tok_str(sym->v, NULL);
8314 func_ind = ind;
8315 func_vt = sym->type.ref->type;
8316 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8318 /* NOTE: we patch the symbol size later */
8319 put_extern_sym(sym, cur_text_section, ind, 0);
8321 if (sym->type.ref->f.func_ctor)
8322 add_array (tcc_state, ".init_array", sym->c);
8323 if (sym->type.ref->f.func_dtor)
8324 add_array (tcc_state, ".fini_array", sym->c);
8326 /* put debug symbol */
8327 tcc_debug_funcstart(tcc_state, sym);
8329 /* push a dummy symbol to enable local sym storage */
8330 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8331 local_scope = 1; /* for function parameters */
8332 gfunc_prolog(sym);
8333 tcc_debug_prolog_epilog(tcc_state, 0);
8335 local_scope = 0;
8336 rsym = 0;
8337 clear_temp_local_var_list();
8338 func_vla_arg(sym);
8339 block(0);
8340 gsym(rsym);
8342 nocode_wanted = 0;
8343 /* reset local stack */
8344 pop_local_syms(NULL, 0);
8345 tcc_debug_prolog_epilog(tcc_state, 1);
8346 gfunc_epilog();
8348 /* end of function */
8349 tcc_debug_funcend(tcc_state, ind - func_ind);
8351 /* patch symbol size */
8352 elfsym(sym)->st_size = ind - func_ind;
8354 cur_text_section->data_offset = ind;
8355 local_scope = 0;
8356 label_pop(&global_label_stack, NULL, 0);
8357 sym_pop(&all_cleanups, NULL, 0);
8359 /* It's better to crash than to generate wrong code */
8360 cur_text_section = NULL;
8361 funcname = ""; /* for safety */
8362 func_vt.t = VT_VOID; /* for safety */
8363 func_var = 0; /* for safety */
8364 ind = 0; /* for safety */
8365 func_ind = -1;
8366 nocode_wanted = DATA_ONLY_WANTED;
8367 check_vstack();
8369 /* do this after funcend debug info */
8370 next();
8373 static void gen_inline_functions(TCCState *s)
8375 Sym *sym;
8376 int inline_generated, i;
8377 struct InlineFunc *fn;
8379 tcc_open_bf(s, ":inline:", 0);
8380 /* iterate while inline function are referenced */
8381 do {
8382 inline_generated = 0;
8383 for (i = 0; i < s->nb_inline_fns; ++i) {
8384 fn = s->inline_fns[i];
8385 sym = fn->sym;
8386 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8387 /* the function was used or forced (and then not internal):
8388 generate its code and convert it to a normal function */
8389 fn->sym = NULL;
8390 tccpp_putfile(fn->filename);
8391 begin_macro(fn->func_str, 1);
8392 next();
8393 cur_text_section = text_section;
8394 gen_function(sym);
8395 end_macro();
8397 inline_generated = 1;
8400 } while (inline_generated);
8401 tcc_close();
8404 static void free_inline_functions(TCCState *s)
8406 int i;
8407 /* free tokens of unused inline functions */
8408 for (i = 0; i < s->nb_inline_fns; ++i) {
8409 struct InlineFunc *fn = s->inline_fns[i];
8410 if (fn->sym)
8411 tok_str_free(fn->func_str);
8413 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8416 static void do_Static_assert(void)
8418 int c;
8419 const char *msg;
8421 next();
8422 skip('(');
8423 c = expr_const();
8424 msg = "_Static_assert fail";
8425 if (tok == ',') {
8426 next();
8427 msg = parse_mult_str("string constant")->data;
8429 skip(')');
8430 if (c == 0)
8431 tcc_error("%s", msg);
8432 skip(';');
8435 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8436 or VT_CMP if parsing old style parameter list
8437 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8438 static int decl(int l)
8440 int v, has_init, r, oldint;
8441 CType type, btype;
8442 Sym *sym;
8443 AttributeDef ad, adbase;
8445 while (1) {
8447 oldint = 0;
8448 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8449 if (l == VT_JMP)
8450 return 0;
8451 /* skip redundant ';' if not in old parameter decl scope */
8452 if (tok == ';' && l != VT_CMP) {
8453 next();
8454 continue;
8456 if (tok == TOK_STATIC_ASSERT) {
8457 do_Static_assert();
8458 continue;
8460 if (l != VT_CONST)
8461 break;
8462 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8463 /* global asm block */
8464 asm_global_instr();
8465 continue;
8467 if (tok >= TOK_UIDENT) {
8468 /* special test for old K&R protos without explicit int
8469 type. Only accepted when defining global data */
8470 btype.t = VT_INT;
8471 oldint = 1;
8472 } else {
8473 if (tok != TOK_EOF)
8474 expect("declaration");
8475 break;
8479 if (tok == ';') {
8480 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8481 v = btype.ref->v;
8482 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8483 tcc_warning("unnamed struct/union that defines no instances");
8484 next();
8485 continue;
8487 if (IS_ENUM(btype.t)) {
8488 next();
8489 continue;
8493 while (1) { /* iterate thru each declaration */
8494 type = btype;
8495 ad = adbase;
8496 type_decl(&type, &ad, &v, TYPE_DIRECT);
8497 #if 0
8499 char buf[500];
8500 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8501 printf("type = '%s'\n", buf);
8503 #endif
8504 if ((type.t & VT_BTYPE) == VT_FUNC) {
8505 if ((type.t & VT_STATIC) && (l != VT_CONST))
8506 tcc_error("function without file scope cannot be static");
8507 /* if old style function prototype, we accept a
8508 declaration list */
8509 sym = type.ref;
8510 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8511 func_vt = type;
8512 decl(VT_CMP);
8514 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8515 if (sym->f.func_alwinl
8516 && ((type.t & (VT_EXTERN | VT_INLINE))
8517 == (VT_EXTERN | VT_INLINE))) {
8518 /* always_inline functions must be handled as if they
8519 don't generate multiple global defs, even if extern
8520 inline, i.e. GNU inline semantics for those. Rewrite
8521 them into static inline. */
8522 type.t &= ~VT_EXTERN;
8523 type.t |= VT_STATIC;
8525 #endif
8526 /* always compile 'extern inline' */
8527 if (type.t & VT_EXTERN)
8528 type.t &= ~VT_INLINE;
8530 } else if (oldint) {
8531 tcc_warning("type defaults to int");
8534 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8535 ad.asm_label = asm_label_instr();
8536 /* parse one last attribute list, after asm label */
8537 parse_attribute(&ad);
8538 #if 0
8539 /* gcc does not allow __asm__("label") with function definition,
8540 but why not ... */
8541 if (tok == '{')
8542 expect(";");
8543 #endif
8546 #ifdef TCC_TARGET_PE
8547 if (ad.a.dllimport || ad.a.dllexport) {
8548 if (type.t & VT_STATIC)
8549 tcc_error("cannot have dll linkage with static");
8550 if (type.t & VT_TYPEDEF) {
8551 tcc_warning("'%s' attribute ignored for typedef",
8552 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8553 (ad.a.dllexport = 0, "dllexport"));
8554 } else if (ad.a.dllimport) {
8555 if ((type.t & VT_BTYPE) == VT_FUNC)
8556 ad.a.dllimport = 0;
8557 else
8558 type.t |= VT_EXTERN;
8561 #endif
8562 if (tok == '{') {
8563 if (l != VT_CONST)
8564 tcc_error("cannot use local functions");
8565 if ((type.t & VT_BTYPE) != VT_FUNC)
8566 expect("function definition");
8568 /* reject abstract declarators in function definition
8569 make old style params without decl have int type */
8570 sym = type.ref;
8571 while ((sym = sym->next) != NULL) {
8572 if (!(sym->v & ~SYM_FIELD))
8573 expect("identifier");
8574 if (sym->type.t == VT_VOID)
8575 sym->type = int_type;
8578 /* apply post-declaraton attributes */
8579 merge_funcattr(&type.ref->f, &ad.f);
8581 /* put function symbol */
8582 type.t &= ~VT_EXTERN;
8583 sym = external_sym(v, &type, 0, &ad);
8585 /* static inline functions are just recorded as a kind
8586 of macro. Their code will be emitted at the end of
8587 the compilation unit only if they are used */
8588 if (sym->type.t & VT_INLINE) {
8589 struct InlineFunc *fn;
8590 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8591 strcpy(fn->filename, file->filename);
8592 fn->sym = sym;
8593 dynarray_add(&tcc_state->inline_fns,
8594 &tcc_state->nb_inline_fns, fn);
8595 skip_or_save_block(&fn->func_str);
8596 } else {
8597 /* compute text section */
8598 cur_text_section = ad.section;
8599 if (!cur_text_section)
8600 cur_text_section = text_section;
8601 gen_function(sym);
8603 break;
8604 } else {
8605 if (l == VT_CMP) {
8606 /* find parameter in function parameter list */
8607 for (sym = func_vt.ref->next; sym; sym = sym->next)
8608 if ((sym->v & ~SYM_FIELD) == v)
8609 goto found;
8610 tcc_error("declaration for parameter '%s' but no such parameter",
8611 get_tok_str(v, NULL));
8612 found:
8613 if (type.t & VT_STORAGE) /* 'register' is okay */
8614 tcc_error("storage class specified for '%s'",
8615 get_tok_str(v, NULL));
8616 if (sym->type.t != VT_VOID)
8617 tcc_error("redefinition of parameter '%s'",
8618 get_tok_str(v, NULL));
8619 convert_parameter_type(&type);
8620 sym->type = type;
8621 } else if (type.t & VT_TYPEDEF) {
8622 /* save typedefed type */
8623 /* XXX: test storage specifiers ? */
8624 sym = sym_find(v);
8625 if (sym && sym->sym_scope == local_scope) {
8626 if (!is_compatible_types(&sym->type, &type)
8627 || !(sym->type.t & VT_TYPEDEF))
8628 tcc_error("incompatible redefinition of '%s'",
8629 get_tok_str(v, NULL));
8630 sym->type = type;
8631 } else {
8632 sym = sym_push(v, &type, 0, 0);
8634 sym->a = ad.a;
8635 if ((type.t & VT_BTYPE) == VT_FUNC)
8636 merge_funcattr(&sym->type.ref->f, &ad.f);
8637 if (debug_modes)
8638 tcc_debug_typedef (tcc_state, sym);
8639 } else if ((type.t & VT_BTYPE) == VT_VOID
8640 && !(type.t & VT_EXTERN)) {
8641 tcc_error("declaration of void object");
8642 } else {
8643 r = 0;
8644 if ((type.t & VT_BTYPE) == VT_FUNC) {
8645 /* external function definition */
8646 /* specific case for func_call attribute */
8647 merge_funcattr(&type.ref->f, &ad.f);
8648 } else if (!(type.t & VT_ARRAY)) {
8649 /* not lvalue if array */
8650 r |= VT_LVAL;
8652 has_init = (tok == '=');
8653 if (has_init && (type.t & VT_VLA))
8654 tcc_error("variable length array cannot be initialized");
8656 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8657 || (type.t & VT_BTYPE) == VT_FUNC
8658 /* as with GCC, uninitialized global arrays with no size
8659 are considered extern: */
8660 || ((type.t & VT_ARRAY) && !has_init
8661 && l == VT_CONST && type.ref->c < 0)
8663 /* external variable or function */
8664 type.t |= VT_EXTERN;
8665 sym = external_sym(v, &type, r, &ad);
8666 } else {
8667 if (l == VT_CONST || (type.t & VT_STATIC))
8668 r |= VT_CONST;
8669 else
8670 r |= VT_LOCAL;
8671 if (has_init)
8672 next();
8673 else if (l == VT_CONST)
8674 /* uninitialized global variables may be overridden */
8675 type.t |= VT_EXTERN;
8676 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8679 if (ad.alias_target && l == VT_CONST) {
8680 /* Aliases need to be emitted when their target symbol
8681 is emitted, even if perhaps unreferenced.
8682 We only support the case where the base is already
8683 defined, otherwise we would need deferring to emit
8684 the aliases until the end of the compile unit. */
8685 Sym *alias_target = sym_find(ad.alias_target);
8686 ElfSym *esym = elfsym(alias_target);
8687 if (!esym)
8688 tcc_error("unsupported forward __alias__ attribute");
8689 put_extern_sym2(sym_find(v), esym->st_shndx,
8690 esym->st_value, esym->st_size, 1);
8693 if (tok != ',') {
8694 if (l == VT_JMP)
8695 return 1;
8696 skip(';');
8697 break;
8699 next();
8703 return 0;
8706 /* ------------------------------------------------------------------------- */
8707 #undef gjmp_addr
8708 #undef gjmp
8709 /* ------------------------------------------------------------------------- */