fix UB in constant folding of double -> signed integer conversion
[tinycc.git] / tccgen.c
blob9431582353e1e8d7c742ce56afba04d5d18da2a8
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 ST_DATA char debug_modes;
48 ST_DATA SValue *vtop;
49 static SValue _vstack[1 + VSTACK_SIZE];
50 #define vstack (_vstack + 1)
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
72 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
73 ST_DATA int func_vc;
74 ST_DATA int func_ind;
75 ST_DATA const char *funcname;
76 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
77 static CString initstr;
79 #if PTR_SIZE == 4
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
82 #elif LONG_SIZE == 4
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
85 #else
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
88 #endif
90 static struct switch_t {
91 struct case_t {
92 int64_t v1, v2;
93 int sym;
94 } **p; int n; /* list of case ranges */
95 int def_sym; /* default symbol */
96 int nocode_wanted;
97 int *bsym;
98 struct scope *scope;
99 struct switch_t *prev;
100 SValue sv;
101 } *cur_switch; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable {
106 int location; //offset on stack. Svalue.c.i
107 short size;
108 short align;
109 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
110 static int nb_temp_local_vars;
112 static struct scope {
113 struct scope *prev;
114 struct { int loc, locorig, num; } vla;
115 struct { Sym *s; int n; } cl;
116 int *bsym, *csym;
117 Sym *lstk, *llstk;
118 } *cur_scope, *loop_scope, *root_scope;
120 typedef struct {
121 Section *sec;
122 int local_offset;
123 Sym *flex_array_ref;
124 } init_params;
126 #if 1
127 #define precedence_parser
128 static void init_prec(void);
129 #endif
131 static void block(int flags);
132 #define STMT_EXPR 1
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType *type);
136 static void gen_cast_s(int t);
137 static inline CType *pointed_type(CType *type);
138 static int is_compatible_types(CType *type1, CType *type2);
139 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
140 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
141 static void parse_expr_type(CType *type);
142 static void init_putv(init_params *p, CType *type, unsigned long c);
143 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
144 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
145 static int decl(int l);
146 static void expr_eq(void);
147 static void vpush_type_size(CType *type, int *a);
148 static int is_compatible_unqualified_types(CType *type1, CType *type2);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty, unsigned long long v);
151 static void vpush(CType *type);
152 static int gvtst(int inv, int t);
153 static void gen_inline_functions(TCCState *s);
154 static void free_inline_functions(TCCState *s);
155 static void skip_or_save_block(TokenString **str);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size,int align);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType *st, CType *dt);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC void gsym(int t)
168 if (t) {
169 gsym_addr(t, ind);
170 CODE_ON();
174 /* Clear 'nocode_wanted' if current pc is a label */
175 static int gind()
177 int t = ind;
178 CODE_ON();
179 if (debug_modes)
180 tcc_tcov_block_begin(tcc_state);
181 return t;
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t)
187 gjmp_addr(t);
188 CODE_OFF();
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t)
194 t = gjmp(t);
195 CODE_OFF();
196 return t;
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN int is_float(int t)
206 int bt = t & VT_BTYPE;
207 return bt == VT_LDOUBLE
208 || bt == VT_DOUBLE
209 || bt == VT_FLOAT
210 || bt == VT_QFLOAT;
213 static inline int is_integer_btype(int bt)
215 return bt == VT_BYTE
216 || bt == VT_BOOL
217 || bt == VT_SHORT
218 || bt == VT_INT
219 || bt == VT_LLONG;
222 static int btype_size(int bt)
224 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
225 bt == VT_SHORT ? 2 :
226 bt == VT_INT ? 4 :
227 bt == VT_LLONG ? 8 :
228 bt == VT_PTR ? PTR_SIZE : 0;
231 /* returns function return register from type */
232 static int R_RET(int t)
234 if (!is_float(t))
235 return REG_IRET;
236 #ifdef TCC_TARGET_X86_64
237 if ((t & VT_BTYPE) == VT_LDOUBLE)
238 return TREG_ST0;
239 #elif defined TCC_TARGET_RISCV64
240 if ((t & VT_BTYPE) == VT_LDOUBLE)
241 return REG_IRET;
242 #endif
243 return REG_FRET;
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t)
249 t &= VT_BTYPE;
250 #if PTR_SIZE == 4
251 if (t == VT_LLONG)
252 return REG_IRE2;
253 #elif defined TCC_TARGET_X86_64
254 if (t == VT_QLONG)
255 return REG_IRE2;
256 if (t == VT_QFLOAT)
257 return REG_FRE2;
258 #elif defined TCC_TARGET_RISCV64
259 if (t == VT_LDOUBLE)
260 return REG_IRE2;
261 #endif
262 return VT_CONST;
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue *sv, int t)
271 sv->r = R_RET(t), sv->r2 = R2_RET(t);
274 /* returns function return register class for type t */
275 static int RC_RET(int t)
277 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t)
283 if (!is_float(t))
284 return RC_INT;
285 #ifdef TCC_TARGET_X86_64
286 if ((t & VT_BTYPE) == VT_LDOUBLE)
287 return RC_ST0;
288 if ((t & VT_BTYPE) == VT_QFLOAT)
289 return RC_FRET;
290 #elif defined TCC_TARGET_RISCV64
291 if ((t & VT_BTYPE) == VT_LDOUBLE)
292 return RC_INT;
293 #endif
294 return RC_FLOAT;
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t, int rc)
300 if (!USING_TWO_WORDS(t))
301 return 0;
302 #ifdef RC_IRE2
303 if (rc == RC_IRET)
304 return RC_IRE2;
305 #endif
306 #ifdef RC_FRE2
307 if (rc == RC_FRET)
308 return RC_FRE2;
309 #endif
310 if (rc & RC_FLOAT)
311 return RC_FLOAT;
312 return RC_INT;
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC int ieee_finite(double d)
320 int p[4];
321 memcpy(p, &d, sizeof(double));
322 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
329 #endif
331 ST_FUNC void test_lvalue(void)
333 if (!(vtop->r & VT_LVAL))
334 expect("lvalue");
337 ST_FUNC void check_vstack(void)
339 if (vtop != vstack - 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop - vstack + 1));
344 /* vstack debugging aid */
345 #if 0
346 void pv (const char *lbl, int a, int b)
348 int i;
349 for (i = a; i < a + b; ++i) {
350 SValue *p = &vtop[-i];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
355 #endif
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC void tccgen_init(TCCState *s1)
361 vtop = vstack - 1;
362 memset(vtop, 0, sizeof *vtop);
364 /* define some often used types */
365 int_type.t = VT_INT;
367 char_type.t = VT_BYTE;
368 if (s1->char_is_unsigned)
369 char_type.t |= VT_UNSIGNED;
370 char_pointer_type = char_type;
371 mk_pointer(&char_pointer_type);
373 func_old_type.t = VT_FUNC;
374 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
375 func_old_type.ref->f.func_call = FUNC_CDECL;
376 func_old_type.ref->f.func_type = FUNC_OLD;
377 #ifdef precedence_parser
378 init_prec();
379 #endif
380 cstr_new(&initstr);
383 ST_FUNC int tccgen_compile(TCCState *s1)
385 funcname = "";
386 func_ind = -1;
387 anon_sym = SYM_FIRST_ANOM;
388 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 cstr_free(&initstr);
421 dynarray_reset(&stk_data, &nb_stk_data);
422 while (cur_switch)
423 end_switch();
424 local_scope = 0;
425 loop_scope = NULL;
426 all_cleanups = NULL;
427 pending_gotos = NULL;
428 nb_temp_local_vars = 0;
429 global_label_stack = NULL;
430 local_label_stack = NULL;
431 cur_text_section = NULL;
432 sym_free_first = NULL;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym *elfsym(Sym *s)
438 if (!s || !s->c)
439 return NULL;
440 return &((ElfSym *)symtab_section->data)[s->c];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC void update_storage(Sym *sym)
446 ElfSym *esym;
447 int sym_bind, old_sym_bind;
449 esym = elfsym(sym);
450 if (!esym)
451 return;
453 if (sym->a.visibility)
454 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
455 | sym->a.visibility;
457 if (sym->type.t & (VT_STATIC | VT_INLINE))
458 sym_bind = STB_LOCAL;
459 else if (sym->a.weak)
460 sym_bind = STB_WEAK;
461 else
462 sym_bind = STB_GLOBAL;
463 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
464 if (sym_bind != old_sym_bind) {
465 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
468 #ifdef TCC_TARGET_PE
469 if (sym->a.dllimport)
470 esym->st_other |= ST_PE_IMPORT;
471 if (sym->a.dllexport)
472 esym->st_other |= ST_PE_EXPORT;
473 #endif
475 #if 0
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym->v, NULL),
478 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
479 sym->a.visibility,
480 sym->a.dllexport,
481 sym->a.dllimport
483 #endif
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
491 addr_t value, unsigned long size,
492 int can_add_underscore)
494 int sym_type, sym_bind, info, other, t;
495 ElfSym *esym;
496 const char *name;
497 char buf1[256];
499 if (!sym->c) {
500 name = get_tok_str(sym->v, NULL);
501 t = sym->type.t;
502 if ((t & VT_BTYPE) == VT_FUNC) {
503 sym_type = STT_FUNC;
504 } else if ((t & VT_BTYPE) == VT_VOID) {
505 sym_type = STT_NOTYPE;
506 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
507 sym_type = STT_FUNC;
508 } else {
509 sym_type = STT_OBJECT;
511 if (t & (VT_STATIC | VT_INLINE))
512 sym_bind = STB_LOCAL;
513 else
514 sym_bind = STB_GLOBAL;
515 other = 0;
517 #ifdef TCC_TARGET_PE
518 if (sym_type == STT_FUNC && sym->type.ref) {
519 Sym *ref = sym->type.ref;
520 if (ref->a.nodecorate) {
521 can_add_underscore = 0;
523 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
524 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
525 name = buf1;
526 other |= ST_PE_STDCALL;
527 can_add_underscore = 0;
530 #endif
532 if (sym->asm_label) {
533 name = get_tok_str(sym->asm_label, NULL);
534 can_add_underscore = 0;
537 if (tcc_state->leading_underscore && can_add_underscore) {
538 buf1[0] = '_';
539 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
540 name = buf1;
543 info = ELFW(ST_INFO)(sym_bind, sym_type);
544 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
546 if (debug_modes)
547 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
549 } else {
550 esym = elfsym(sym);
551 esym->st_value = value;
552 esym->st_size = size;
553 esym->st_shndx = sh_num;
555 update_storage(sym);
558 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
560 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
561 return;
562 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
567 addr_t addend)
569 int c = 0;
571 if (nocode_wanted && s == cur_text_section)
572 return;
574 if (sym) {
575 if (0 == sym->c)
576 put_extern_sym(sym, NULL, 0, 0);
577 c = sym->c;
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section, s, offset, type, c, addend);
584 #if PTR_SIZE == 4
585 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
587 greloca(s, sym, offset, type, 0);
589 #endif
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym *__sym_malloc(void)
595 Sym *sym_pool, *sym, *last_sym;
596 int i;
598 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
599 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
601 last_sym = sym_free_first;
602 sym = sym_pool;
603 for(i = 0; i < SYM_POOL_NB; i++) {
604 sym->next = last_sym;
605 last_sym = sym;
606 sym++;
608 sym_free_first = last_sym;
609 return last_sym;
612 static inline Sym *sym_malloc(void)
614 Sym *sym;
615 #ifndef SYM_DEBUG
616 sym = sym_free_first;
617 if (!sym)
618 sym = __sym_malloc();
619 sym_free_first = sym->next;
620 return sym;
621 #else
622 sym = tcc_malloc(sizeof(Sym));
623 return sym;
624 #endif
627 ST_INLN void sym_free(Sym *sym)
629 #ifndef SYM_DEBUG
630 sym->next = sym_free_first;
631 sym_free_first = sym;
632 #else
633 tcc_free(sym);
634 #endif
637 /* push, without hashing */
638 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
640 Sym *s;
642 s = sym_malloc();
643 memset(s, 0, sizeof *s);
644 s->v = v;
645 s->type.t = t;
646 s->c = c;
647 /* add in stack */
648 s->prev = *ps;
649 *ps = s;
650 return s;
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym *sym_find2(Sym *s, int v)
657 while (s) {
658 if (s->v == v)
659 return s;
660 s = s->prev;
662 return NULL;
665 /* structure lookup */
666 ST_INLN Sym *struct_find(int v)
668 v -= TOK_IDENT;
669 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
670 return NULL;
671 return table_ident[v]->sym_struct;
674 /* find an identifier */
675 ST_INLN Sym *sym_find(int v)
677 v -= TOK_IDENT;
678 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
679 return NULL;
680 return table_ident[v]->sym_identifier;
683 static int sym_scope(Sym *s)
685 if (IS_ENUM_VAL (s->type.t))
686 return s->type.ref->sym_scope;
687 else
688 return s->sym_scope;
691 /* push a given symbol on the symbol stack */
692 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
694 Sym *s, **ps;
695 TokenSym *ts;
697 if (local_stack)
698 ps = &local_stack;
699 else
700 ps = &global_stack;
701 s = sym_push2(ps, v, type->t, c);
702 s->type.ref = type->ref;
703 s->r = r;
704 /* don't record fields or anonymous symbols */
705 /* XXX: simplify */
706 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
707 /* record symbol in token array */
708 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
709 if (v & SYM_STRUCT)
710 ps = &ts->sym_struct;
711 else
712 ps = &ts->sym_identifier;
713 s->prev_tok = *ps;
714 *ps = s;
715 s->sym_scope = local_scope;
716 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
717 tcc_error("redeclaration of '%s'",
718 get_tok_str(v & ~SYM_STRUCT, NULL));
720 return s;
723 /* push a global identifier */
724 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
726 Sym *s, **ps;
727 s = sym_push2(&global_stack, v, t, c);
728 s->r = VT_CONST | VT_SYM;
729 /* don't record anonymous symbol */
730 if (v < SYM_FIRST_ANOM) {
731 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
732 /* modify the top most local identifier, so that sym_identifier will
733 point to 's' when popped; happens when called from inline asm */
734 while (*ps != NULL && (*ps)->sym_scope)
735 ps = &(*ps)->prev_tok;
736 s->prev_tok = *ps;
737 *ps = s;
739 return s;
742 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
743 pop them yet from the list, but do remove them from the token array. */
744 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
746 Sym *s, *ss, **ps;
747 TokenSym *ts;
748 int v;
750 s = *ptop;
751 while(s != b) {
752 ss = s->prev;
753 v = s->v;
754 /* remove symbol in token array */
755 /* XXX: simplify */
756 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
757 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
758 if (v & SYM_STRUCT)
759 ps = &ts->sym_struct;
760 else
761 ps = &ts->sym_identifier;
762 *ps = s->prev_tok;
764 if (!keep)
765 sym_free(s);
766 s = ss;
768 if (!keep)
769 *ptop = b;
772 /* label lookup */
773 ST_FUNC Sym *label_find(int v)
775 v -= TOK_IDENT;
776 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
777 return NULL;
778 return table_ident[v]->sym_label;
781 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
783 Sym *s, **ps;
784 s = sym_push2(ptop, v, VT_STATIC, 0);
785 s->r = flags;
786 ps = &table_ident[v - TOK_IDENT]->sym_label;
787 if (ptop == &global_label_stack) {
788 /* modify the top most local identifier, so that
789 sym_identifier will point to 's' when popped */
790 while (*ps != NULL)
791 ps = &(*ps)->prev_tok;
793 s->prev_tok = *ps;
794 *ps = s;
795 return s;
798 /* pop labels until element last is reached. Look if any labels are
799 undefined. Define symbols if '&&label' was used. */
800 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
802 Sym *s, *s1;
803 for(s = *ptop; s != slast; s = s1) {
804 s1 = s->prev;
805 if (s->r == LABEL_DECLARED) {
806 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
807 } else if (s->r == LABEL_FORWARD) {
808 tcc_error("label '%s' used but not defined",
809 get_tok_str(s->v, NULL));
810 } else {
811 if (s->c) {
812 /* define corresponding symbol. A size of
813 1 is put. */
814 put_extern_sym(s, cur_text_section, s->jnext, 1);
817 /* remove label */
818 if (s->r != LABEL_GONE)
819 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
820 if (!keep)
821 sym_free(s);
822 else
823 s->r = LABEL_GONE;
825 if (!keep)
826 *ptop = slast;
829 /* ------------------------------------------------------------------------- */
830 static void vcheck_cmp(void)
832 /* cannot let cpu flags if other instruction are generated. Also
833 avoid leaving VT_JMP anywhere except on the top of the stack
834 because it would complicate the code generator.
836 Don't do this when nocode_wanted. vtop might come from
837 !nocode_wanted regions (see 88_codeopt.c) and transforming
838 it to a register without actually generating code is wrong
839 as their value might still be used for real. All values
840 we push under nocode_wanted will eventually be popped
841 again, so that the VT_CMP/VT_JMP value will be in vtop
842 when code is unsuppressed again. */
844 /* However if it's just automatic suppression via CODE_OFF/ON()
845 then it seems that we better let things work undisturbed.
846 How can it work at all under nocode_wanted? Well, gv() will
847 actually clear it at the gsym() in load()/VT_JMP in the
848 generator backends */
850 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
851 gv(RC_INT);
854 static void vsetc(CType *type, int r, CValue *vc)
856 if (vtop >= vstack + (VSTACK_SIZE - 1))
857 tcc_error("memory full (vstack)");
858 vcheck_cmp();
859 vtop++;
860 vtop->type = *type;
861 vtop->r = r;
862 vtop->r2 = VT_CONST;
863 vtop->c = *vc;
864 vtop->sym = NULL;
867 ST_FUNC void vswap(void)
869 SValue tmp;
871 vcheck_cmp();
872 tmp = vtop[0];
873 vtop[0] = vtop[-1];
874 vtop[-1] = tmp;
877 /* pop stack value */
878 ST_FUNC void vpop(void)
880 int v;
881 v = vtop->r & VT_VALMASK;
882 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
883 /* for x86, we need to pop the FP stack */
884 if (v == TREG_ST0) {
885 o(0xd8dd); /* fstp %st(0) */
886 } else
887 #endif
888 if (v == VT_CMP) {
889 /* need to put correct jump if && or || without test */
890 gsym(vtop->jtrue);
891 gsym(vtop->jfalse);
893 vtop--;
896 /* push constant of type "type" with useless value */
897 static void vpush(CType *type)
899 vset(type, VT_CONST, 0);
902 /* push arbitrary 64bit constant */
903 static void vpush64(int ty, unsigned long long v)
905 CValue cval;
906 CType ctype;
907 ctype.t = ty;
908 ctype.ref = NULL;
909 cval.i = v;
910 vsetc(&ctype, VT_CONST, &cval);
913 /* push integer constant */
914 ST_FUNC void vpushi(int v)
916 vpush64(VT_INT, v);
919 /* push a pointer sized constant */
920 static void vpushs(addr_t v)
922 vpush64(VT_SIZE_T, v);
925 /* push long long constant */
926 static inline void vpushll(long long v)
928 vpush64(VT_LLONG, v);
931 ST_FUNC void vset(CType *type, int r, int v)
933 CValue cval;
934 cval.i = v;
935 vsetc(type, r, &cval);
938 static void vseti(int r, int v)
940 CType type;
941 type.t = VT_INT;
942 type.ref = NULL;
943 vset(&type, r, v);
946 ST_FUNC void vpushv(SValue *v)
948 if (vtop >= vstack + (VSTACK_SIZE - 1))
949 tcc_error("memory full (vstack)");
950 vtop++;
951 *vtop = *v;
954 static void vdup(void)
956 vpushv(vtop);
959 /* rotate n first stack elements to the bottom
960 I1 ... In -> I2 ... In I1 [top is right]
962 ST_FUNC void vrotb(int n)
964 int i;
965 SValue tmp;
967 vcheck_cmp();
968 tmp = vtop[-n + 1];
969 for(i=-n+1;i!=0;i++)
970 vtop[i] = vtop[i+1];
971 vtop[0] = tmp;
974 /* rotate the n elements before entry e towards the top
975 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
977 ST_FUNC void vrote(SValue *e, int n)
979 int i;
980 SValue tmp;
982 vcheck_cmp();
983 tmp = *e;
984 for(i = 0;i < n - 1; i++)
985 e[-i] = e[-i - 1];
986 e[-n + 1] = tmp;
989 /* rotate n first stack elements to the top
990 I1 ... In -> In I1 ... I(n-1) [top is right]
992 ST_FUNC void vrott(int n)
994 vrote(vtop, n);
997 /* ------------------------------------------------------------------------- */
998 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1000 /* called from generators to set the result from relational ops */
1001 ST_FUNC void vset_VT_CMP(int op)
1003 vtop->r = VT_CMP;
1004 vtop->cmp_op = op;
1005 vtop->jfalse = 0;
1006 vtop->jtrue = 0;
1009 /* called once before asking generators to load VT_CMP to a register */
1010 static void vset_VT_JMP(void)
1012 int op = vtop->cmp_op;
1014 if (vtop->jtrue || vtop->jfalse) {
1015 int origt = vtop->type.t;
1016 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1017 int inv = op & (op < 2); /* small optimization */
1018 vseti(VT_JMP+inv, gvtst(inv, 0));
1019 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1020 } else {
1021 /* otherwise convert flags (rsp. 0/1) to register */
1022 vtop->c.i = op;
1023 if (op < 2) /* doesn't seem to happen */
1024 vtop->r = VT_CONST;
1028 /* Set CPU Flags, doesn't yet jump */
1029 static void gvtst_set(int inv, int t)
1031 int *p;
1033 if (vtop->r != VT_CMP) {
1034 vpushi(0);
1035 gen_op(TOK_NE);
1036 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1037 vset_VT_CMP(vtop->c.i != 0);
1040 p = inv ? &vtop->jfalse : &vtop->jtrue;
1041 *p = gjmp_append(*p, t);
1044 /* Generate value test
1046 * Generate a test for any value (jump, comparison and integers) */
1047 static int gvtst(int inv, int t)
1049 int op, x, u;
1051 gvtst_set(inv, t);
1052 t = vtop->jtrue, u = vtop->jfalse;
1053 if (inv)
1054 x = u, u = t, t = x;
1055 op = vtop->cmp_op;
1057 /* jump to the wanted target */
1058 if (op > 1)
1059 t = gjmp_cond(op ^ inv, t);
1060 else if (op != inv)
1061 t = gjmp(t);
1062 /* resolve complementary jumps to here */
1063 gsym(u);
1065 vtop--;
1066 return t;
1069 /* generate a zero or nozero test */
1070 static void gen_test_zero(int op)
1072 if (vtop->r == VT_CMP) {
1073 int j;
1074 if (op == TOK_EQ) {
1075 j = vtop->jfalse;
1076 vtop->jfalse = vtop->jtrue;
1077 vtop->jtrue = j;
1078 vtop->cmp_op ^= 1;
1080 } else {
1081 vpushi(0);
1082 gen_op(op);
1086 /* ------------------------------------------------------------------------- */
1087 /* push a symbol value of TYPE */
1088 ST_FUNC void vpushsym(CType *type, Sym *sym)
1090 CValue cval;
1091 cval.i = 0;
1092 vsetc(type, VT_CONST | VT_SYM, &cval);
1093 vtop->sym = sym;
1096 /* Return a static symbol pointing to a section */
1097 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1099 int v;
1100 Sym *sym;
1102 v = anon_sym++;
1103 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1104 sym->type.t |= VT_STATIC;
1105 put_extern_sym(sym, sec, offset, size);
1106 return sym;
1109 /* push a reference to a section offset by adding a dummy symbol */
1110 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1112 vpushsym(type, get_sym_ref(type, sec, offset, size));
1115 /* define a new external reference to a symbol 'v' of type 'u' */
1116 ST_FUNC Sym *external_global_sym(int v, CType *type)
1118 Sym *s;
1120 s = sym_find(v);
1121 if (!s) {
1122 /* push forward reference */
1123 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1124 s->type.ref = type->ref;
1125 } else if (IS_ASM_SYM(s)) {
1126 s->type.t = type->t | (s->type.t & VT_EXTERN);
1127 s->type.ref = type->ref;
1128 update_storage(s);
1130 return s;
1133 /* create an external reference with no specific type similar to asm labels.
1134 This avoids type conflicts if the symbol is used from C too */
1135 ST_FUNC Sym *external_helper_sym(int v)
1137 CType ct = { VT_ASM_FUNC, NULL };
1138 return external_global_sym(v, &ct);
1141 /* push a reference to an helper function (such as memmove) */
1142 ST_FUNC void vpush_helper_func(int v)
1144 vpushsym(&func_old_type, external_helper_sym(v));
1147 /* Merge symbol attributes. */
1148 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1150 if (sa1->aligned && !sa->aligned)
1151 sa->aligned = sa1->aligned;
1152 sa->packed |= sa1->packed;
1153 sa->weak |= sa1->weak;
1154 sa->nodebug |= sa1->nodebug;
1155 if (sa1->visibility != STV_DEFAULT) {
1156 int vis = sa->visibility;
1157 if (vis == STV_DEFAULT
1158 || vis > sa1->visibility)
1159 vis = sa1->visibility;
1160 sa->visibility = vis;
1162 sa->dllexport |= sa1->dllexport;
1163 sa->nodecorate |= sa1->nodecorate;
1164 sa->dllimport |= sa1->dllimport;
1167 /* Merge function attributes. */
1168 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1170 if (fa1->func_call && !fa->func_call)
1171 fa->func_call = fa1->func_call;
1172 if (fa1->func_type && !fa->func_type)
1173 fa->func_type = fa1->func_type;
1174 if (fa1->func_args && !fa->func_args)
1175 fa->func_args = fa1->func_args;
1176 if (fa1->func_noreturn)
1177 fa->func_noreturn = 1;
1178 if (fa1->func_ctor)
1179 fa->func_ctor = 1;
1180 if (fa1->func_dtor)
1181 fa->func_dtor = 1;
1184 /* Merge attributes. */
1185 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1187 merge_symattr(&ad->a, &ad1->a);
1188 merge_funcattr(&ad->f, &ad1->f);
1190 if (ad1->section)
1191 ad->section = ad1->section;
1192 if (ad1->alias_target)
1193 ad->alias_target = ad1->alias_target;
1194 if (ad1->asm_label)
1195 ad->asm_label = ad1->asm_label;
1196 if (ad1->attr_mode)
1197 ad->attr_mode = ad1->attr_mode;
1200 /* Merge some type attributes. */
1201 static void patch_type(Sym *sym, CType *type)
1203 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1204 if (!(sym->type.t & VT_EXTERN))
1205 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1206 sym->type.t &= ~VT_EXTERN;
1209 if (IS_ASM_SYM(sym)) {
1210 /* stay static if both are static */
1211 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1212 sym->type.ref = type->ref;
1213 if ((type->t & VT_BTYPE) != VT_FUNC && !(type->t & VT_ARRAY))
1214 sym->r |= VT_LVAL;
1217 if (!is_compatible_types(&sym->type, type)) {
1218 tcc_error("incompatible types for redefinition of '%s'",
1219 get_tok_str(sym->v, NULL));
1221 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1222 int static_proto = sym->type.t & VT_STATIC;
1223 /* warn if static follows non-static function declaration */
1224 if ((type->t & VT_STATIC) && !static_proto
1225 /* XXX this test for inline shouldn't be here. Until we
1226 implement gnu-inline mode again it silences a warning for
1227 mingw caused by our workarounds. */
1228 && !((type->t | sym->type.t) & VT_INLINE))
1229 tcc_warning("static storage ignored for redefinition of '%s'",
1230 get_tok_str(sym->v, NULL));
1232 /* set 'inline' if both agree or if one has static */
1233 if ((type->t | sym->type.t) & VT_INLINE) {
1234 if (!((type->t ^ sym->type.t) & VT_INLINE)
1235 || ((type->t | sym->type.t) & VT_STATIC))
1236 static_proto |= VT_INLINE;
1239 if (0 == (type->t & VT_EXTERN)) {
1240 struct FuncAttr f = sym->type.ref->f;
1241 /* put complete type, use static from prototype */
1242 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1243 sym->type.ref = type->ref;
1244 merge_funcattr(&sym->type.ref->f, &f);
1245 } else {
1246 sym->type.t &= ~VT_INLINE | static_proto;
1249 if (sym->type.ref->f.func_type == FUNC_OLD
1250 && type->ref->f.func_type != FUNC_OLD) {
1251 sym->type.ref = type->ref;
1254 } else {
1255 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1256 /* set array size if it was omitted in extern declaration */
1257 sym->type.ref->c = type->ref->c;
1259 if ((type->t ^ sym->type.t) & VT_STATIC)
1260 tcc_warning("storage mismatch for redefinition of '%s'",
1261 get_tok_str(sym->v, NULL));
1265 /* Merge some storage attributes. */
1266 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1268 if (type)
1269 patch_type(sym, type);
1271 #ifdef TCC_TARGET_PE
1272 if (sym->a.dllimport != ad->a.dllimport)
1273 tcc_error("incompatible dll linkage for redefinition of '%s'",
1274 get_tok_str(sym->v, NULL));
1275 #endif
1276 merge_symattr(&sym->a, &ad->a);
1277 if (ad->asm_label)
1278 sym->asm_label = ad->asm_label;
1279 update_storage(sym);
1282 /* copy sym to other stack */
1283 static Sym *sym_copy(Sym *s0, Sym **ps)
1285 Sym *s;
1286 s = sym_malloc(), *s = *s0;
1287 s->prev = *ps, *ps = s;
1288 if (s->v < SYM_FIRST_ANOM) {
1289 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1290 s->prev_tok = *ps, *ps = s;
1292 return s;
1295 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1296 static void sym_copy_ref(Sym *s, Sym **ps)
1298 int bt = s->type.t & VT_BTYPE;
1299 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1300 Sym **sp = &s->type.ref;
1301 for (s = *sp, *sp = NULL; s; s = s->next) {
1302 Sym *s2 = sym_copy(s, ps);
1303 sp = &(*sp = s2)->next;
1304 sym_copy_ref(s2, ps);
1309 /* define a new external reference to a symbol 'v' */
1310 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1312 Sym *s;
1314 /* look for global symbol */
1315 s = sym_find(v);
1316 while (s && s->sym_scope)
1317 s = s->prev_tok;
1319 if (!s) {
1320 /* push forward reference */
1321 s = global_identifier_push(v, type->t, 0);
1322 s->r |= r;
1323 s->a = ad->a;
1324 s->asm_label = ad->asm_label;
1325 s->type.ref = type->ref;
1326 /* copy type to the global stack */
1327 if (local_stack)
1328 sym_copy_ref(s, &global_stack);
1329 } else {
1330 patch_storage(s, ad, type);
1332 /* push variables on local_stack if any */
1333 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1334 s = sym_copy(s, &local_stack);
1335 return s;
1338 /* save registers up to (vtop - n) stack entry */
1339 ST_FUNC void save_regs(int n)
1341 SValue *p, *p1;
1342 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1343 save_reg(p->r);
1346 /* save r to the memory stack, and mark it as being free */
1347 ST_FUNC void save_reg(int r)
1349 save_reg_upstack(r, 0);
1352 /* save r to the memory stack, and mark it as being free,
1353 if seen up to (vtop - n) stack entry */
1354 ST_FUNC void save_reg_upstack(int r, int n)
1356 int l, size, align, bt;
1357 SValue *p, *p1, sv;
1359 if ((r &= VT_VALMASK) >= VT_CONST)
1360 return;
1361 if (nocode_wanted)
1362 return;
1363 l = 0;
1364 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1365 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1366 /* must save value on stack if not already done */
1367 if (!l) {
1368 bt = p->type.t & VT_BTYPE;
1369 if (bt == VT_VOID)
1370 continue;
1371 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1372 bt = VT_PTR;
1373 sv.type.t = bt;
1374 size = type_size(&sv.type, &align);
1375 l = get_temp_local_var(size,align);
1376 sv.r = VT_LOCAL | VT_LVAL;
1377 sv.c.i = l;
1378 store(p->r & VT_VALMASK, &sv);
1379 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1380 /* x86 specific: need to pop fp register ST0 if saved */
1381 if (r == TREG_ST0) {
1382 o(0xd8dd); /* fstp %st(0) */
1384 #endif
1385 /* special long long case */
1386 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1387 sv.c.i += PTR_SIZE;
1388 store(p->r2, &sv);
1391 /* mark that stack entry as being saved on the stack */
1392 if (p->r & VT_LVAL) {
1393 /* also clear the bounded flag because the
1394 relocation address of the function was stored in
1395 p->c.i */
1396 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1397 } else {
1398 p->r = VT_LVAL | VT_LOCAL;
1399 p->type.t &= ~VT_ARRAY; /* cannot combine VT_LVAL with VT_ARRAY */
1401 p->sym = NULL;
1402 p->r2 = VT_CONST;
1403 p->c.i = l;
1408 #ifdef TCC_TARGET_ARM
1409 /* find a register of class 'rc2' with at most one reference on stack.
1410 * If none, call get_reg(rc) */
1411 ST_FUNC int get_reg_ex(int rc, int rc2)
1413 int r;
1414 SValue *p;
1416 for(r=0;r<NB_REGS;r++) {
1417 if (reg_classes[r] & rc2) {
1418 int n;
1419 n=0;
1420 for(p = vstack; p <= vtop; p++) {
1421 if ((p->r & VT_VALMASK) == r ||
1422 p->r2 == r)
1423 n++;
1425 if (n <= 1)
1426 return r;
1429 return get_reg(rc);
1431 #endif
1433 /* find a free register of class 'rc'. If none, save one register */
1434 ST_FUNC int get_reg(int rc)
1436 int r;
1437 SValue *p;
1439 /* find a free register */
1440 for(r=0;r<NB_REGS;r++) {
1441 if (reg_classes[r] & rc) {
1442 if (nocode_wanted)
1443 return r;
1444 for(p=vstack;p<=vtop;p++) {
1445 if ((p->r & VT_VALMASK) == r ||
1446 p->r2 == r)
1447 goto notfound;
1449 return r;
1451 notfound: ;
1454 /* no register left : free the first one on the stack (VERY
1455 IMPORTANT to start from the bottom to ensure that we don't
1456 spill registers used in gen_opi()) */
1457 for(p=vstack;p<=vtop;p++) {
1458 /* look at second register (if long long) */
1459 r = p->r2;
1460 if (r < VT_CONST && (reg_classes[r] & rc))
1461 goto save_found;
1462 r = p->r & VT_VALMASK;
1463 if (r < VT_CONST && (reg_classes[r] & rc)) {
1464 save_found:
1465 save_reg(r);
1466 return r;
1469 /* Should never comes here */
1470 return -1;
1473 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1474 static int get_temp_local_var(int size,int align){
1475 int i;
1476 struct temp_local_variable *temp_var;
1477 int found_var;
1478 SValue *p;
1479 int r;
1480 char free;
1481 char found;
1482 found=0;
1483 for(i=0;i<nb_temp_local_vars;i++){
1484 temp_var=&arr_temp_local_vars[i];
1485 if(temp_var->size<size||align!=temp_var->align){
1486 continue;
1488 /*check if temp_var is free*/
1489 free=1;
1490 for(p=vstack;p<=vtop;p++) {
1491 r=p->r&VT_VALMASK;
1492 if(r==VT_LOCAL||r==VT_LLOCAL){
1493 if(p->c.i==temp_var->location){
1494 free=0;
1495 break;
1499 if(free){
1500 found_var=temp_var->location;
1501 found=1;
1502 break;
1505 if(!found){
1506 loc = (loc - size) & -align;
1507 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1508 temp_var=&arr_temp_local_vars[i];
1509 temp_var->location=loc;
1510 temp_var->size=size;
1511 temp_var->align=align;
1512 nb_temp_local_vars++;
1514 found_var=loc;
1516 return found_var;
1519 static void clear_temp_local_var_list(){
1520 nb_temp_local_vars=0;
1523 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1524 if needed */
1525 static void move_reg(int r, int s, int t)
1527 SValue sv;
1529 if (r != s) {
1530 save_reg(r);
1531 sv.type.t = t;
1532 sv.type.ref = NULL;
1533 sv.r = s;
1534 sv.c.i = 0;
1535 load(r, &sv);
1539 /* get address of vtop (vtop MUST BE an lvalue) */
1540 ST_FUNC void gaddrof(void)
1542 vtop->r &= ~VT_LVAL;
1543 /* tricky: if saved lvalue, then we can go back to lvalue */
1544 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1545 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1548 #ifdef CONFIG_TCC_BCHECK
1549 /* generate a bounded pointer addition */
1550 static void gen_bounded_ptr_add(void)
1552 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1553 if (save) {
1554 vpushv(&vtop[-1]);
1555 vrott(3);
1557 vpush_helper_func(TOK___bound_ptr_add);
1558 vrott(3);
1559 gfunc_call(2);
1560 vtop -= save;
1561 vpushi(0);
1562 /* returned pointer is in REG_IRET */
1563 vtop->r = REG_IRET | VT_BOUNDED;
1564 if (nocode_wanted)
1565 return;
1566 /* relocation offset of the bounding function call point */
1567 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1570 /* patch pointer addition in vtop so that pointer dereferencing is
1571 also tested */
1572 static void gen_bounded_ptr_deref(void)
1574 addr_t func;
1575 int size, align;
1576 ElfW_Rel *rel;
1577 Sym *sym;
1579 if (nocode_wanted)
1580 return;
1582 size = type_size(&vtop->type, &align);
1583 switch(size) {
1584 case 1: func = TOK___bound_ptr_indir1; break;
1585 case 2: func = TOK___bound_ptr_indir2; break;
1586 case 4: func = TOK___bound_ptr_indir4; break;
1587 case 8: func = TOK___bound_ptr_indir8; break;
1588 case 12: func = TOK___bound_ptr_indir12; break;
1589 case 16: func = TOK___bound_ptr_indir16; break;
1590 default:
1591 /* may happen with struct member access */
1592 return;
1594 sym = external_helper_sym(func);
1595 if (!sym->c)
1596 put_extern_sym(sym, NULL, 0, 0);
1597 /* patch relocation */
1598 /* XXX: find a better solution ? */
1599 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1600 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1603 /* generate lvalue bound code */
1604 static void gbound(void)
1606 CType type1;
1608 vtop->r &= ~VT_MUSTBOUND;
1609 /* if lvalue, then use checking code before dereferencing */
1610 if (vtop->r & VT_LVAL) {
1611 /* if not VT_BOUNDED value, then make one */
1612 if (!(vtop->r & VT_BOUNDED)) {
1613 /* must save type because we must set it to int to get pointer */
1614 type1 = vtop->type;
1615 vtop->type.t = VT_PTR;
1616 gaddrof();
1617 vpushi(0);
1618 gen_bounded_ptr_add();
1619 vtop->r |= VT_LVAL;
1620 vtop->type = type1;
1622 /* then check for dereferencing */
1623 gen_bounded_ptr_deref();
1627 /* we need to call __bound_ptr_add before we start to load function
1628 args into registers */
1629 ST_FUNC void gbound_args(int nb_args)
1631 int i, v;
1632 SValue *sv;
1634 for (i = 1; i <= nb_args; ++i)
1635 if (vtop[1 - i].r & VT_MUSTBOUND) {
1636 vrotb(i);
1637 gbound();
1638 vrott(i);
1641 sv = vtop - nb_args;
1642 if (sv->r & VT_SYM) {
1643 v = sv->sym->v;
1644 if (v == TOK_setjmp
1645 || v == TOK__setjmp
1646 #ifndef TCC_TARGET_PE
1647 || v == TOK_sigsetjmp
1648 || v == TOK___sigsetjmp
1649 #endif
1651 vpush_helper_func(TOK___bound_setjmp);
1652 vpushv(sv + 1);
1653 gfunc_call(1);
1654 func_bound_add_epilog = 1;
1656 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1657 if (v == TOK_alloca)
1658 func_bound_add_epilog = 1;
1659 #endif
1660 #if TARGETOS_NetBSD
1661 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1662 sv->sym->asm_label = TOK___bound_longjmp;
1663 #endif
1667 /* Add bounds for local symbols from S to E (via ->prev) */
1668 static void add_local_bounds(Sym *s, Sym *e)
1670 for (; s != e; s = s->prev) {
1671 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1672 continue;
1673 /* Add arrays/structs/unions because we always take address */
1674 if ((s->type.t & VT_ARRAY)
1675 || (s->type.t & VT_BTYPE) == VT_STRUCT
1676 || s->a.addrtaken) {
1677 /* add local bound info */
1678 int align, size = type_size(&s->type, &align);
1679 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1680 2 * sizeof(addr_t));
1681 bounds_ptr[0] = s->c;
1682 bounds_ptr[1] = size;
1686 #endif
1688 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1689 static void pop_local_syms(Sym *b, int keep)
1691 #ifdef CONFIG_TCC_BCHECK
1692 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1693 add_local_bounds(local_stack, b);
1694 #endif
1695 if (debug_modes)
1696 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1697 sym_pop(&local_stack, b, keep);
1700 /* increment an lvalue pointer */
1701 static void incr_offset(int offset)
1703 int t = vtop->type.t;
1704 gaddrof(); /* remove VT_LVAL */
1705 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1706 vpushs(offset);
1707 gen_op('+');
1708 vtop->r |= VT_LVAL;
1709 vtop->type.t = t;
1712 static void incr_bf_adr(int o)
1714 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1715 incr_offset(o);
1718 /* single-byte load mode for packed or otherwise unaligned bitfields */
1719 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1721 int n, o, bits;
1722 save_reg_upstack(vtop->r, 1);
1723 vpush64(type->t & VT_BTYPE, 0); // B X
1724 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1725 do {
1726 vswap(); // X B
1727 incr_bf_adr(o);
1728 vdup(); // X B B
1729 n = 8 - bit_pos;
1730 if (n > bit_size)
1731 n = bit_size;
1732 if (bit_pos)
1733 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1734 if (n < 8)
1735 vpushi((1 << n) - 1), gen_op('&');
1736 gen_cast(type);
1737 if (bits)
1738 vpushi(bits), gen_op(TOK_SHL);
1739 vrotb(3); // B Y X
1740 gen_op('|'); // B X
1741 bits += n, bit_size -= n, o = 1;
1742 } while (bit_size);
1743 vswap(), vpop();
1744 if (!(type->t & VT_UNSIGNED)) {
1745 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1746 vpushi(n), gen_op(TOK_SHL);
1747 vpushi(n), gen_op(TOK_SAR);
1751 /* single-byte store mode for packed or otherwise unaligned bitfields */
1752 static void store_packed_bf(int bit_pos, int bit_size)
1754 int bits, n, o, m, c;
1755 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1756 vswap(); // X B
1757 save_reg_upstack(vtop->r, 1);
1758 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1759 do {
1760 incr_bf_adr(o); // X B
1761 vswap(); //B X
1762 c ? vdup() : gv_dup(); // B V X
1763 vrott(3); // X B V
1764 if (bits)
1765 vpushi(bits), gen_op(TOK_SHR);
1766 if (bit_pos)
1767 vpushi(bit_pos), gen_op(TOK_SHL);
1768 n = 8 - bit_pos;
1769 if (n > bit_size)
1770 n = bit_size;
1771 if (n < 8) {
1772 m = ((1 << n) - 1) << bit_pos;
1773 vpushi(m), gen_op('&'); // X B V1
1774 vpushv(vtop-1); // X B V1 B
1775 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1776 gen_op('&'); // X B V1 B1
1777 gen_op('|'); // X B V2
1779 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1780 vstore(), vpop(); // X B
1781 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1782 } while (bit_size);
1783 vpop(), vpop();
1786 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1788 int t;
1789 if (0 == sv->type.ref)
1790 return 0;
1791 t = sv->type.ref->auxtype;
1792 if (t != -1 && t != VT_STRUCT) {
1793 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1794 sv->r |= VT_LVAL;
1796 return t;
1799 /* store vtop a register belonging to class 'rc'. lvalues are
1800 converted to values. Cannot be used if cannot be converted to
1801 register value (such as structures). */
1802 ST_FUNC int gv(int rc)
1804 int r, r2, r_ok, r2_ok, rc2, bt;
1805 int bit_pos, bit_size, size, align;
1807 /* NOTE: get_reg can modify vstack[] */
1808 if (vtop->type.t & VT_BITFIELD) {
1809 CType type;
1811 bit_pos = BIT_POS(vtop->type.t);
1812 bit_size = BIT_SIZE(vtop->type.t);
1813 /* remove bit field info to avoid loops */
1814 vtop->type.t &= ~VT_STRUCT_MASK;
1816 type.ref = NULL;
1817 type.t = vtop->type.t & VT_UNSIGNED;
1818 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1819 type.t |= VT_UNSIGNED;
1821 r = adjust_bf(vtop, bit_pos, bit_size);
1823 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1824 type.t |= VT_LLONG;
1825 else
1826 type.t |= VT_INT;
1828 if (r == VT_STRUCT) {
1829 load_packed_bf(&type, bit_pos, bit_size);
1830 } else {
1831 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1832 /* cast to int to propagate signedness in following ops */
1833 gen_cast(&type);
1834 /* generate shifts */
1835 vpushi(bits - (bit_pos + bit_size));
1836 gen_op(TOK_SHL);
1837 vpushi(bits - bit_size);
1838 /* NOTE: transformed to SHR if unsigned */
1839 gen_op(TOK_SAR);
1841 r = gv(rc);
1842 } else {
1843 if (is_float(vtop->type.t) &&
1844 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1845 /* CPUs usually cannot use float constants, so we store them
1846 generically in data segment */
1847 init_params p = { rodata_section };
1848 unsigned long offset;
1849 size = type_size(&vtop->type, &align);
1850 if (NODATA_WANTED)
1851 size = 0, align = 1;
1852 offset = section_add(p.sec, size, align);
1853 vpush_ref(&vtop->type, p.sec, offset, size);
1854 vswap();
1855 init_putv(&p, &vtop->type, offset);
1856 vtop->r |= VT_LVAL;
1858 #ifdef CONFIG_TCC_BCHECK
1859 if (vtop->r & VT_MUSTBOUND)
1860 gbound();
1861 #endif
1863 bt = vtop->type.t & VT_BTYPE;
1865 #ifdef TCC_TARGET_RISCV64
1866 /* XXX mega hack */
1867 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1868 rc = RC_INT;
1869 #endif
1870 rc2 = RC2_TYPE(bt, rc);
1872 /* need to reload if:
1873 - constant
1874 - lvalue (need to dereference pointer)
1875 - already a register, but not in the right class */
1876 r = vtop->r & VT_VALMASK;
1877 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1878 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1880 if (!r_ok || !r2_ok) {
1882 if (!r_ok) {
1883 if (1 /* we can 'mov (r),r' in cases */
1884 && r < VT_CONST
1885 && (reg_classes[r] & rc)
1886 && !rc2
1888 save_reg_upstack(r, 1);
1889 else
1890 r = get_reg(rc);
1893 if (rc2) {
1894 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1895 int original_type = vtop->type.t;
1897 /* two register type load :
1898 expand to two words temporarily */
1899 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1900 /* load constant */
1901 unsigned long long ll = vtop->c.i;
1902 vtop->c.i = ll; /* first word */
1903 load(r, vtop);
1904 vtop->r = r; /* save register value */
1905 vpushi(ll >> 32); /* second word */
1906 } else if (vtop->r & VT_LVAL) {
1907 /* We do not want to modifier the long long pointer here.
1908 So we save any other instances down the stack */
1909 save_reg_upstack(vtop->r, 1);
1910 /* load from memory */
1911 vtop->type.t = load_type;
1912 load(r, vtop);
1913 vdup();
1914 vtop[-1].r = r; /* save register value */
1915 /* increment pointer to get second word */
1916 incr_offset(PTR_SIZE);
1917 } else {
1918 /* move registers */
1919 if (!r_ok)
1920 load(r, vtop);
1921 if (r2_ok && vtop->r2 < VT_CONST)
1922 goto done;
1923 vdup();
1924 vtop[-1].r = r; /* save register value */
1925 vtop->r = vtop[-1].r2;
1927 /* Allocate second register. Here we rely on the fact that
1928 get_reg() tries first to free r2 of an SValue. */
1929 r2 = get_reg(rc2);
1930 load(r2, vtop);
1931 vpop();
1932 /* write second register */
1933 vtop->r2 = r2;
1934 done:
1935 vtop->type.t = original_type;
1936 } else {
1937 if (vtop->r == VT_CMP)
1938 vset_VT_JMP();
1939 /* one register type load */
1940 load(r, vtop);
1943 vtop->r = r;
1944 #ifdef TCC_TARGET_C67
1945 /* uses register pairs for doubles */
1946 if (bt == VT_DOUBLE)
1947 vtop->r2 = r+1;
1948 #endif
1950 return r;
1953 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1954 ST_FUNC void gv2(int rc1, int rc2)
1956 /* generate more generic register first. But VT_JMP or VT_CMP
1957 values must be generated first in all cases to avoid possible
1958 reload errors */
1959 if (vtop->r != VT_CMP && rc1 <= rc2) {
1960 vswap();
1961 gv(rc1);
1962 vswap();
1963 gv(rc2);
1964 /* test if reload is needed for first register */
1965 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1966 vswap();
1967 gv(rc1);
1968 vswap();
1970 } else {
1971 gv(rc2);
1972 vswap();
1973 gv(rc1);
1974 vswap();
1975 /* test if reload is needed for first register */
1976 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1977 gv(rc2);
1982 #if PTR_SIZE == 4
1983 /* expand 64bit on stack in two ints */
1984 ST_FUNC void lexpand(void)
1986 int u, v;
1987 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1988 v = vtop->r & (VT_VALMASK | VT_LVAL);
1989 if (v == VT_CONST) {
1990 vdup();
1991 vtop[0].c.i >>= 32;
1992 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1993 vdup();
1994 vtop[0].c.i += 4;
1995 } else {
1996 gv(RC_INT);
1997 vdup();
1998 vtop[0].r = vtop[-1].r2;
1999 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2001 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2003 #endif
2005 #if PTR_SIZE == 4
2006 /* build a long long from two ints */
2007 static void lbuild(int t)
2009 gv2(RC_INT, RC_INT);
2010 vtop[-1].r2 = vtop[0].r;
2011 vtop[-1].type.t = t;
2012 vpop();
2014 #endif
2016 /* convert stack entry to register and duplicate its value in another
2017 register */
2018 static void gv_dup(void)
2020 int t, rc, r;
2022 t = vtop->type.t;
2023 #if PTR_SIZE == 4
2024 if ((t & VT_BTYPE) == VT_LLONG) {
2025 if (t & VT_BITFIELD) {
2026 gv(RC_INT);
2027 t = vtop->type.t;
2029 lexpand();
2030 gv_dup();
2031 vswap();
2032 vrotb(3);
2033 gv_dup();
2034 vrotb(4);
2035 /* stack: H L L1 H1 */
2036 lbuild(t);
2037 vrotb(3);
2038 vrotb(3);
2039 vswap();
2040 lbuild(t);
2041 vswap();
2042 return;
2044 #endif
2045 /* duplicate value */
2046 rc = RC_TYPE(t);
2047 gv(rc);
2048 r = get_reg(rc);
2049 vdup();
2050 load(r, vtop);
2051 vtop->r = r;
2054 #if PTR_SIZE == 4
2055 /* generate CPU independent (unsigned) long long operations */
2056 static void gen_opl(int op)
2058 int t, a, b, op1, c, i;
2059 int func;
2060 unsigned short reg_iret = REG_IRET;
2061 unsigned short reg_lret = REG_IRE2;
2062 SValue tmp;
2064 switch(op) {
2065 case '/':
2066 case TOK_PDIV:
2067 func = TOK___divdi3;
2068 goto gen_func;
2069 case TOK_UDIV:
2070 func = TOK___udivdi3;
2071 goto gen_func;
2072 case '%':
2073 func = TOK___moddi3;
2074 goto gen_mod_func;
2075 case TOK_UMOD:
2076 func = TOK___umoddi3;
2077 gen_mod_func:
2078 #ifdef TCC_ARM_EABI
2079 reg_iret = TREG_R2;
2080 reg_lret = TREG_R3;
2081 #endif
2082 gen_func:
2083 /* call generic long long function */
2084 vpush_helper_func(func);
2085 vrott(3);
2086 gfunc_call(2);
2087 vpushi(0);
2088 vtop->r = reg_iret;
2089 vtop->r2 = reg_lret;
2090 break;
2091 case '^':
2092 case '&':
2093 case '|':
2094 case '*':
2095 case '+':
2096 case '-':
2097 //pv("gen_opl A",0,2);
2098 t = vtop->type.t;
2099 vswap();
2100 lexpand();
2101 vrotb(3);
2102 lexpand();
2103 /* stack: L1 H1 L2 H2 */
2104 tmp = vtop[0];
2105 vtop[0] = vtop[-3];
2106 vtop[-3] = tmp;
2107 tmp = vtop[-2];
2108 vtop[-2] = vtop[-3];
2109 vtop[-3] = tmp;
2110 vswap();
2111 /* stack: H1 H2 L1 L2 */
2112 //pv("gen_opl B",0,4);
2113 if (op == '*') {
2114 vpushv(vtop - 1);
2115 vpushv(vtop - 1);
2116 gen_op(TOK_UMULL);
2117 lexpand();
2118 /* stack: H1 H2 L1 L2 ML MH */
2119 for(i=0;i<4;i++)
2120 vrotb(6);
2121 /* stack: ML MH H1 H2 L1 L2 */
2122 tmp = vtop[0];
2123 vtop[0] = vtop[-2];
2124 vtop[-2] = tmp;
2125 /* stack: ML MH H1 L2 H2 L1 */
2126 gen_op('*');
2127 vrotb(3);
2128 vrotb(3);
2129 gen_op('*');
2130 /* stack: ML MH M1 M2 */
2131 gen_op('+');
2132 gen_op('+');
2133 } else if (op == '+' || op == '-') {
2134 /* XXX: add non carry method too (for MIPS or alpha) */
2135 if (op == '+')
2136 op1 = TOK_ADDC1;
2137 else
2138 op1 = TOK_SUBC1;
2139 gen_op(op1);
2140 /* stack: H1 H2 (L1 op L2) */
2141 vrotb(3);
2142 vrotb(3);
2143 gen_op(op1 + 1); /* TOK_xxxC2 */
2144 } else {
2145 gen_op(op);
2146 /* stack: H1 H2 (L1 op L2) */
2147 vrotb(3);
2148 vrotb(3);
2149 /* stack: (L1 op L2) H1 H2 */
2150 gen_op(op);
2151 /* stack: (L1 op L2) (H1 op H2) */
2153 /* stack: L H */
2154 lbuild(t);
2155 break;
2156 case TOK_SAR:
2157 case TOK_SHR:
2158 case TOK_SHL:
2159 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2160 t = vtop[-1].type.t;
2161 vswap();
2162 lexpand();
2163 vrotb(3);
2164 /* stack: L H shift */
2165 c = (int)vtop->c.i;
2166 /* constant: simpler */
2167 /* NOTE: all comments are for SHL. the other cases are
2168 done by swapping words */
2169 vpop();
2170 if (op != TOK_SHL)
2171 vswap();
2172 if (c >= 32) {
2173 /* stack: L H */
2174 vpop();
2175 if (c > 32) {
2176 vpushi(c - 32);
2177 gen_op(op);
2179 if (op != TOK_SAR) {
2180 vpushi(0);
2181 } else {
2182 gv_dup();
2183 vpushi(31);
2184 gen_op(TOK_SAR);
2186 vswap();
2187 } else {
2188 vswap();
2189 gv_dup();
2190 /* stack: H L L */
2191 vpushi(c);
2192 gen_op(op);
2193 vswap();
2194 vpushi(32 - c);
2195 if (op == TOK_SHL)
2196 gen_op(TOK_SHR);
2197 else
2198 gen_op(TOK_SHL);
2199 vrotb(3);
2200 /* stack: L L H */
2201 vpushi(c);
2202 if (op == TOK_SHL)
2203 gen_op(TOK_SHL);
2204 else
2205 gen_op(TOK_SHR);
2206 gen_op('|');
2208 if (op != TOK_SHL)
2209 vswap();
2210 lbuild(t);
2211 } else {
2212 /* XXX: should provide a faster fallback on x86 ? */
2213 switch(op) {
2214 case TOK_SAR:
2215 func = TOK___ashrdi3;
2216 goto gen_func;
2217 case TOK_SHR:
2218 func = TOK___lshrdi3;
2219 goto gen_func;
2220 case TOK_SHL:
2221 func = TOK___ashldi3;
2222 goto gen_func;
2225 break;
2226 default:
2227 /* compare operations */
2228 t = vtop->type.t;
2229 vswap();
2230 lexpand();
2231 vrotb(3);
2232 lexpand();
2233 /* stack: L1 H1 L2 H2 */
2234 tmp = vtop[-1];
2235 vtop[-1] = vtop[-2];
2236 vtop[-2] = tmp;
2237 /* stack: L1 L2 H1 H2 */
2238 save_regs(4);
2239 /* compare high */
2240 op1 = op;
2241 /* when values are equal, we need to compare low words. since
2242 the jump is inverted, we invert the test too. */
2243 if (op1 == TOK_LT)
2244 op1 = TOK_LE;
2245 else if (op1 == TOK_GT)
2246 op1 = TOK_GE;
2247 else if (op1 == TOK_ULT)
2248 op1 = TOK_ULE;
2249 else if (op1 == TOK_UGT)
2250 op1 = TOK_UGE;
2251 a = 0;
2252 b = 0;
2253 gen_op(op1);
2254 if (op == TOK_NE) {
2255 b = gvtst(0, 0);
2256 } else {
2257 a = gvtst(1, 0);
2258 if (op != TOK_EQ) {
2259 /* generate non equal test */
2260 vpushi(0);
2261 vset_VT_CMP(TOK_NE);
2262 b = gvtst(0, 0);
2265 /* compare low. Always unsigned */
2266 op1 = op;
2267 if (op1 == TOK_LT)
2268 op1 = TOK_ULT;
2269 else if (op1 == TOK_LE)
2270 op1 = TOK_ULE;
2271 else if (op1 == TOK_GT)
2272 op1 = TOK_UGT;
2273 else if (op1 == TOK_GE)
2274 op1 = TOK_UGE;
2275 gen_op(op1);
2276 #if 0//def TCC_TARGET_I386
2277 if (op == TOK_NE) { gsym(b); break; }
2278 if (op == TOK_EQ) { gsym(a); break; }
2279 #endif
2280 gvtst_set(1, a);
2281 gvtst_set(0, b);
2282 break;
2285 #endif
2287 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2289 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2290 return (a ^ b) >> 63 ? -x : x;
2293 static int gen_opic_lt(uint64_t a, uint64_t b)
2295 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2298 /* handle integer constant optimizations and various machine
2299 independent opt */
2300 static void gen_opic(int op)
2302 SValue *v1 = vtop - 1;
2303 SValue *v2 = vtop;
2304 int t1 = v1->type.t & VT_BTYPE;
2305 int t2 = v2->type.t & VT_BTYPE;
2306 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2307 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2308 uint64_t l1 = c1 ? v1->c.i : 0;
2309 uint64_t l2 = c2 ? v2->c.i : 0;
2310 int shm = (t1 == VT_LLONG) ? 63 : 31;
2311 int r;
2313 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2314 l1 = ((uint32_t)l1 |
2315 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2316 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2317 l2 = ((uint32_t)l2 |
2318 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2320 if (c1 && c2) {
2321 switch(op) {
2322 case '+': l1 += l2; break;
2323 case '-': l1 -= l2; break;
2324 case '&': l1 &= l2; break;
2325 case '^': l1 ^= l2; break;
2326 case '|': l1 |= l2; break;
2327 case '*': l1 *= l2; break;
2329 case TOK_PDIV:
2330 case '/':
2331 case '%':
2332 case TOK_UDIV:
2333 case TOK_UMOD:
2334 /* if division by zero, generate explicit division */
2335 if (l2 == 0) {
2336 if (CONST_WANTED && !NOEVAL_WANTED)
2337 tcc_error("division by zero in constant");
2338 goto general_case;
2340 switch(op) {
2341 default: l1 = gen_opic_sdiv(l1, l2); break;
2342 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2343 case TOK_UDIV: l1 = l1 / l2; break;
2344 case TOK_UMOD: l1 = l1 % l2; break;
2346 break;
2347 case TOK_SHL: l1 <<= (l2 & shm); break;
2348 case TOK_SHR: l1 >>= (l2 & shm); break;
2349 case TOK_SAR:
2350 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2351 break;
2352 /* tests */
2353 case TOK_ULT: l1 = l1 < l2; break;
2354 case TOK_UGE: l1 = l1 >= l2; break;
2355 case TOK_EQ: l1 = l1 == l2; break;
2356 case TOK_NE: l1 = l1 != l2; break;
2357 case TOK_ULE: l1 = l1 <= l2; break;
2358 case TOK_UGT: l1 = l1 > l2; break;
2359 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2360 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2361 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2362 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2363 /* logical */
2364 case TOK_LAND: l1 = l1 && l2; break;
2365 case TOK_LOR: l1 = l1 || l2; break;
2366 default:
2367 goto general_case;
2369 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2370 l1 = ((uint32_t)l1 |
2371 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2372 v1->c.i = l1;
2373 v1->r |= v2->r & VT_NONCONST;
2374 vtop--;
2375 } else {
2376 /* if commutative ops, put c2 as constant */
2377 if (c1 && (op == '+' || op == '&' || op == '^' ||
2378 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2379 vswap();
2380 c2 = c1; //c = c1, c1 = c2, c2 = c;
2381 l2 = l1; //l = l1, l1 = l2, l2 = l;
2383 if (c1 && ((l1 == 0 &&
2384 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2385 (l1 == -1 && op == TOK_SAR))) {
2386 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2387 vpop();
2388 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2389 (op == '|' &&
2390 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2391 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2392 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2393 if (l2 == 1)
2394 vtop->c.i = 0;
2395 vswap();
2396 vtop--;
2397 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2398 op == TOK_PDIV) &&
2399 l2 == 1) ||
2400 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2401 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2402 l2 == 0) ||
2403 (op == '&' &&
2404 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2405 /* filter out NOP operations like x*1, x-0, x&-1... */
2406 vtop--;
2407 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2408 /* try to use shifts instead of muls or divs */
2409 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2410 int n = -1;
2411 while (l2) {
2412 l2 >>= 1;
2413 n++;
2415 vtop->c.i = n;
2416 if (op == '*')
2417 op = TOK_SHL;
2418 else if (op == TOK_PDIV)
2419 op = TOK_SAR;
2420 else
2421 op = TOK_SHR;
2423 goto general_case;
2424 } else if (c2 && (op == '+' || op == '-') &&
2425 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2426 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2427 /* symbol + constant case */
2428 if (op == '-')
2429 l2 = -l2;
2430 l2 += vtop[-1].c.i;
2431 /* The backends can't always deal with addends to symbols
2432 larger than +-1<<31. Don't construct such. */
2433 if ((int)l2 != l2)
2434 goto general_case;
2435 vtop--;
2436 vtop->c.i = l2;
2437 } else {
2438 general_case:
2439 /* call low level op generator */
2440 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2441 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2442 gen_opl(op);
2443 else
2444 gen_opi(op);
2446 if (vtop->r == VT_CONST)
2447 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2451 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2452 # define gen_negf gen_opf
2453 #elif defined TCC_TARGET_ARM
2454 void gen_negf(int op)
2456 /* arm will detect 0-x and replace by vneg */
2457 vpushi(0), vswap(), gen_op('-');
2459 #else
2460 /* XXX: implement in gen_opf() for other backends too */
2461 void gen_negf(int op)
2463 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2464 subtract(-0, x), but with them it's really a sign flip
2465 operation. We implement this with bit manipulation and have
2466 to do some type reinterpretation for this, which TCC can do
2467 only via memory. */
2469 int align, size, bt;
2471 size = type_size(&vtop->type, &align);
2472 bt = vtop->type.t & VT_BTYPE;
2473 save_reg(gv(RC_TYPE(bt)));
2474 vdup();
2475 incr_bf_adr(size - 1);
2476 vdup();
2477 vpushi(0x80); /* flip sign */
2478 gen_op('^');
2479 vstore();
2480 vpop();
2482 #endif
2484 /* generate a floating point operation with constant propagation */
2485 static void gen_opif(int op)
2487 int c1, c2, i, bt;
2488 SValue *v1, *v2;
2489 #if defined _MSC_VER && defined __x86_64__
2490 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2491 volatile
2492 #endif
2493 long double f1, f2;
2495 v1 = vtop - 1;
2496 v2 = vtop;
2497 if (op == TOK_NEG)
2498 v1 = v2;
2499 bt = v1->type.t & VT_BTYPE;
2501 /* currently, we cannot do computations with forward symbols */
2502 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2503 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2504 if (c1 && c2) {
2505 if (bt == VT_FLOAT) {
2506 f1 = v1->c.f;
2507 f2 = v2->c.f;
2508 } else if (bt == VT_DOUBLE) {
2509 f1 = v1->c.d;
2510 f2 = v2->c.d;
2511 } else {
2512 f1 = v1->c.ld;
2513 f2 = v2->c.ld;
2515 /* NOTE: we only do constant propagation if finite number (not
2516 NaN or infinity) (ANSI spec) */
2517 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2518 goto general_case;
2519 switch(op) {
2520 case '+': f1 += f2; break;
2521 case '-': f1 -= f2; break;
2522 case '*': f1 *= f2; break;
2523 case '/':
2524 if (f2 == 0.0) {
2525 union { float f; unsigned u; } x1, x2, y;
2526 /* If not in initializer we need to potentially generate
2527 FP exceptions at runtime, otherwise we want to fold. */
2528 if (!CONST_WANTED)
2529 goto general_case;
2530 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2531 when used to compile the f1 /= f2 below, would be -nan */
2532 x1.f = f1, x2.f = f2;
2533 if (f1 == 0.0)
2534 y.u = 0x7fc00000; /* nan */
2535 else
2536 y.u = 0x7f800000; /* infinity */
2537 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2538 f1 = y.f;
2539 break;
2541 f1 /= f2;
2542 break;
2543 case TOK_NEG:
2544 f1 = -f1;
2545 goto unary_result;
2546 case TOK_EQ:
2547 i = f1 == f2;
2548 make_int:
2549 vtop -= 2;
2550 vpushi(i);
2551 return;
2552 case TOK_NE:
2553 i = f1 != f2;
2554 goto make_int;
2555 case TOK_LT:
2556 i = f1 < f2;
2557 goto make_int;
2558 case TOK_GE:
2559 i = f1 >= f2;
2560 goto make_int;
2561 case TOK_LE:
2562 i = f1 <= f2;
2563 goto make_int;
2564 case TOK_GT:
2565 i = f1 > f2;
2566 goto make_int;
2567 default:
2568 goto general_case;
2570 vtop--;
2571 unary_result:
2572 /* XXX: overflow test ? */
2573 if (bt == VT_FLOAT) {
2574 v1->c.f = f1;
2575 } else if (bt == VT_DOUBLE) {
2576 v1->c.d = f1;
2577 } else {
2578 v1->c.ld = f1;
2580 } else {
2581 general_case:
2582 if (op == TOK_NEG) {
2583 gen_negf(op);
2584 } else {
2585 gen_opf(op);
2590 /* print a type. If 'varstr' is not NULL, then the variable is also
2591 printed in the type */
2592 /* XXX: union */
2593 /* XXX: add array and function pointers */
2594 static void type_to_str(char *buf, int buf_size,
2595 CType *type, const char *varstr)
2597 int bt, v, t;
2598 Sym *s, *sa;
2599 char buf1[256];
2600 const char *tstr;
2602 t = type->t;
2603 bt = t & VT_BTYPE;
2604 buf[0] = '\0';
2606 if (t & VT_EXTERN)
2607 pstrcat(buf, buf_size, "extern ");
2608 if (t & VT_STATIC)
2609 pstrcat(buf, buf_size, "static ");
2610 if (t & VT_TYPEDEF)
2611 pstrcat(buf, buf_size, "typedef ");
2612 if (t & VT_INLINE)
2613 pstrcat(buf, buf_size, "inline ");
2614 if (bt != VT_PTR) {
2615 if (t & VT_VOLATILE)
2616 pstrcat(buf, buf_size, "volatile ");
2617 if (t & VT_CONSTANT)
2618 pstrcat(buf, buf_size, "const ");
2620 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2621 || ((t & VT_UNSIGNED)
2622 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2623 && !IS_ENUM(t)
2625 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2627 buf_size -= strlen(buf);
2628 buf += strlen(buf);
2630 switch(bt) {
2631 case VT_VOID:
2632 tstr = "void";
2633 goto add_tstr;
2634 case VT_BOOL:
2635 tstr = "_Bool";
2636 goto add_tstr;
2637 case VT_BYTE:
2638 tstr = "char";
2639 goto add_tstr;
2640 case VT_SHORT:
2641 tstr = "short";
2642 goto add_tstr;
2643 case VT_INT:
2644 tstr = "int";
2645 goto maybe_long;
2646 case VT_LLONG:
2647 tstr = "long long";
2648 maybe_long:
2649 if (t & VT_LONG)
2650 tstr = "long";
2651 if (!IS_ENUM(t))
2652 goto add_tstr;
2653 tstr = "enum ";
2654 goto tstruct;
2655 case VT_FLOAT:
2656 tstr = "float";
2657 goto add_tstr;
2658 case VT_DOUBLE:
2659 tstr = "double";
2660 if (!(t & VT_LONG))
2661 goto add_tstr;
2662 case VT_LDOUBLE:
2663 tstr = "long double";
2664 add_tstr:
2665 pstrcat(buf, buf_size, tstr);
2666 break;
2667 case VT_STRUCT:
2668 tstr = "struct ";
2669 if (IS_UNION(t))
2670 tstr = "union ";
2671 tstruct:
2672 pstrcat(buf, buf_size, tstr);
2673 v = type->ref->v & ~SYM_STRUCT;
2674 if (v >= SYM_FIRST_ANOM)
2675 pstrcat(buf, buf_size, "<anonymous>");
2676 else
2677 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2678 break;
2679 case VT_FUNC:
2680 s = type->ref;
2681 buf1[0]=0;
2682 if (varstr && '*' == *varstr) {
2683 pstrcat(buf1, sizeof(buf1), "(");
2684 pstrcat(buf1, sizeof(buf1), varstr);
2685 pstrcat(buf1, sizeof(buf1), ")");
2687 pstrcat(buf1, buf_size, "(");
2688 sa = s->next;
2689 while (sa != NULL) {
2690 char buf2[256];
2691 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2692 pstrcat(buf1, sizeof(buf1), buf2);
2693 sa = sa->next;
2694 if (sa)
2695 pstrcat(buf1, sizeof(buf1), ", ");
2697 if (s->f.func_type == FUNC_ELLIPSIS)
2698 pstrcat(buf1, sizeof(buf1), ", ...");
2699 pstrcat(buf1, sizeof(buf1), ")");
2700 type_to_str(buf, buf_size, &s->type, buf1);
2701 goto no_var;
2702 case VT_PTR:
2703 s = type->ref;
2704 if (t & (VT_ARRAY|VT_VLA)) {
2705 if (varstr && '*' == *varstr)
2706 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2707 else
2708 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2709 type_to_str(buf, buf_size, &s->type, buf1);
2710 goto no_var;
2712 pstrcpy(buf1, sizeof(buf1), "*");
2713 if (t & VT_CONSTANT)
2714 pstrcat(buf1, buf_size, "const ");
2715 if (t & VT_VOLATILE)
2716 pstrcat(buf1, buf_size, "volatile ");
2717 if (varstr)
2718 pstrcat(buf1, sizeof(buf1), varstr);
2719 type_to_str(buf, buf_size, &s->type, buf1);
2720 goto no_var;
2722 if (varstr) {
2723 pstrcat(buf, buf_size, " ");
2724 pstrcat(buf, buf_size, varstr);
2726 no_var: ;
2729 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2731 char buf1[256], buf2[256];
2732 type_to_str(buf1, sizeof(buf1), st, NULL);
2733 type_to_str(buf2, sizeof(buf2), dt, NULL);
2734 tcc_error(fmt, buf1, buf2);
2737 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2739 char buf1[256], buf2[256];
2740 type_to_str(buf1, sizeof(buf1), st, NULL);
2741 type_to_str(buf2, sizeof(buf2), dt, NULL);
2742 tcc_warning(fmt, buf1, buf2);
2745 static int pointed_size(CType *type)
2747 int align;
2748 return type_size(pointed_type(type), &align);
2751 static inline int is_null_pointer(SValue *p)
2753 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2754 return 0;
2755 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2756 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2757 ((p->type.t & VT_BTYPE) == VT_PTR &&
2758 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2759 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2760 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2764 /* compare function types. OLD functions match any new functions */
2765 static int is_compatible_func(CType *type1, CType *type2)
2767 Sym *s1, *s2;
2769 s1 = type1->ref;
2770 s2 = type2->ref;
2771 if (s1->f.func_call != s2->f.func_call)
2772 return 0;
2773 if (s1->f.func_type != s2->f.func_type
2774 && s1->f.func_type != FUNC_OLD
2775 && s2->f.func_type != FUNC_OLD)
2776 return 0;
2777 for (;;) {
2778 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2779 return 0;
2780 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2781 return 1;
2782 s1 = s1->next;
2783 s2 = s2->next;
2784 if (!s1)
2785 return !s2;
2786 if (!s2)
2787 return 0;
2791 /* return true if type1 and type2 are the same. If unqualified is
2792 true, qualifiers on the types are ignored.
2794 static int compare_types(CType *type1, CType *type2, int unqualified)
2796 int bt1, t1, t2;
2798 t1 = type1->t & VT_TYPE;
2799 t2 = type2->t & VT_TYPE;
2800 if (unqualified) {
2801 /* strip qualifiers before comparing */
2802 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2803 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2806 /* Default Vs explicit signedness only matters for char */
2807 if ((t1 & VT_BTYPE) != VT_BYTE) {
2808 t1 &= ~VT_DEFSIGN;
2809 t2 &= ~VT_DEFSIGN;
2811 /* XXX: bitfields ? */
2812 if (t1 != t2)
2813 return 0;
2815 if ((t1 & VT_ARRAY)
2816 && !(type1->ref->c < 0
2817 || type2->ref->c < 0
2818 || type1->ref->c == type2->ref->c))
2819 return 0;
2821 /* test more complicated cases */
2822 bt1 = t1 & VT_BTYPE;
2823 if (bt1 == VT_PTR) {
2824 type1 = pointed_type(type1);
2825 type2 = pointed_type(type2);
2826 return is_compatible_types(type1, type2);
2827 } else if (bt1 == VT_STRUCT) {
2828 return (type1->ref == type2->ref);
2829 } else if (bt1 == VT_FUNC) {
2830 return is_compatible_func(type1, type2);
2831 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2832 /* If both are enums then they must be the same, if only one is then
2833 t1 and t2 must be equal, which was checked above already. */
2834 return type1->ref == type2->ref;
2835 } else {
2836 return 1;
2840 #define CMP_OP 'C'
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2847 CType *type1, *type2, type;
2848 int t1, t2, bt1, bt2;
2849 int ret = 1;
2851 /* for shifts, 'combine' only left operand */
2852 if (op == SHIFT_OP)
2853 op2 = op1;
2855 type1 = &op1->type, type2 = &op2->type;
2856 t1 = type1->t, t2 = type2->t;
2857 bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2859 type.t = VT_VOID;
2860 type.ref = NULL;
2862 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2863 ret = op == '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2865 type.t = VT_VOID;
2866 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2867 if (op == '+') {
2868 if (!is_integer_btype(bt1 == VT_PTR ? bt2 : bt1))
2869 ret = 0;
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2)) type = *type1;
2874 else if (is_null_pointer (op1)) type = *type2;
2875 else if (bt1 != bt2) {
2876 /* accept comparison or cond-expr between pointer and integer
2877 with a warning */
2878 if ((op == '?' || op == CMP_OP)
2879 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op == '?' ? "conditional expression" : "comparison");
2882 else if (op != '-' || !is_integer_btype(bt2))
2883 ret = 0;
2884 type = *(bt1 == VT_PTR ? type1 : type2);
2885 } else {
2886 CType *pt1 = pointed_type(type1);
2887 CType *pt2 = pointed_type(type2);
2888 int pbt1 = pt1->t & VT_BTYPE;
2889 int pbt2 = pt2->t & VT_BTYPE;
2890 int newquals, copied = 0;
2891 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2892 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2893 if (op != '?' && op != CMP_OP)
2894 ret = 0;
2895 else
2896 type_incompatibility_warning(type1, type2,
2897 op == '?'
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2901 if (op == '?') {
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type = *((pbt1 == VT_VOID) ? type1 : type2);
2905 /* combine qualifs */
2906 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2907 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2908 & newquals)
2910 /* copy the pointer target symbol */
2911 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2912 0, type.ref->c);
2913 copied = 1;
2914 pointed_type(&type)->t |= newquals;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1->t & VT_ARRAY
2919 && pt2->t & VT_ARRAY
2920 && pointed_type(&type)->ref->c < 0
2921 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2923 if (!copied)
2924 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2925 0, type.ref->c);
2926 pointed_type(&type)->ref =
2927 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2928 0, pointed_type(&type)->ref->c);
2929 pointed_type(&type)->ref->c =
2930 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2934 if (op == CMP_OP)
2935 type.t = VT_SIZE_T;
2936 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2937 if (op != '?' || !compare_types(type1, type2, 1))
2938 ret = 0;
2939 type = *type1;
2940 } else if (is_float(bt1) || is_float(bt2)) {
2941 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2942 type.t = VT_LDOUBLE;
2943 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2944 type.t = VT_DOUBLE;
2945 } else {
2946 type.t = VT_FLOAT;
2948 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2949 /* cast to biggest op */
2950 type.t = VT_LLONG | VT_LONG;
2951 if (bt1 == VT_LLONG)
2952 type.t &= t1;
2953 if (bt2 == VT_LLONG)
2954 type.t &= t2;
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2958 type.t |= VT_UNSIGNED;
2959 } else {
2960 /* integer operations */
2961 type.t = VT_INT | (VT_LONG & (t1 | t2));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2964 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2965 type.t |= VT_UNSIGNED;
2967 if (dest)
2968 *dest = type;
2969 return ret;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC void gen_op(int op)
2975 int t1, t2, bt1, bt2, t;
2976 CType type1, combtype;
2977 int op_class = op;
2979 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2980 op_class = SHIFT_OP;
2981 else if (TOK_ISCOND(op)) /* == != > ... */
2982 op_class = CMP_OP;
2984 redo:
2985 t1 = vtop[-1].type.t;
2986 t2 = vtop[0].type.t;
2987 bt1 = t1 & VT_BTYPE;
2988 bt2 = t2 & VT_BTYPE;
2990 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2991 if (bt2 == VT_FUNC) {
2992 mk_pointer(&vtop->type);
2993 gaddrof();
2995 if (bt1 == VT_FUNC) {
2996 vswap();
2997 mk_pointer(&vtop->type);
2998 gaddrof();
2999 vswap();
3001 goto redo;
3002 } else if (!combine_types(&combtype, vtop - 1, vtop, op_class)) {
3003 op_err:
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3008 int align;
3009 if (op_class == CMP_OP)
3010 goto std_op;
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3013 if (op != '-')
3014 goto op_err;
3015 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3016 vtop->type.t &= ~VT_UNSIGNED;
3017 vrott(3);
3018 gen_opic(op);
3019 vtop->type.t = VT_PTRDIFF_T;
3020 vswap();
3021 gen_op(TOK_PDIV);
3022 } else {
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op != '-' && op != '+')
3025 goto op_err;
3026 /* Put pointer as first operand */
3027 if (bt2 == VT_PTR) {
3028 vswap();
3029 t = t1, t1 = t2, t2 = t;
3030 bt2 = bt1;
3032 #if PTR_SIZE == 4
3033 if (bt2 == VT_LLONG)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3035 gen_cast_s(VT_INT);
3036 #endif
3037 type1 = vtop[-1].type;
3038 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3039 gen_op('*');
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3042 /* if bounded pointers, we generate a special code to
3043 test bounds */
3044 if (op == '-') {
3045 vpushi(0);
3046 vswap();
3047 gen_op('-');
3049 gen_bounded_ptr_add();
3050 } else
3051 #endif
3053 gen_opic(op);
3055 type1.t &= ~(VT_ARRAY|VT_VLA);
3056 /* put again type if gen_opic() swaped operands */
3057 vtop->type = type1;
3059 } else {
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype.t)
3062 && op != '+' && op != '-' && op != '*' && op != '/'
3063 && op_class != CMP_OP) {
3064 goto op_err;
3066 std_op:
3067 t = t2 = combtype.t;
3068 /* special case for shifts and long long: we keep the shift as
3069 an integer */
3070 if (op_class == SHIFT_OP)
3071 t2 = VT_INT;
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t & VT_UNSIGNED) {
3075 if (op == TOK_SAR)
3076 op = TOK_SHR;
3077 else if (op == '/')
3078 op = TOK_UDIV;
3079 else if (op == '%')
3080 op = TOK_UMOD;
3081 else if (op == TOK_LT)
3082 op = TOK_ULT;
3083 else if (op == TOK_GT)
3084 op = TOK_UGT;
3085 else if (op == TOK_LE)
3086 op = TOK_ULE;
3087 else if (op == TOK_GE)
3088 op = TOK_UGE;
3090 vswap();
3091 gen_cast_s(t);
3092 vswap();
3093 gen_cast_s(t2);
3094 if (is_float(t))
3095 gen_opif(op);
3096 else
3097 gen_opic(op);
3098 if (op_class == CMP_OP) {
3099 /* relational op: the result is an int */
3100 vtop->type.t = VT_INT;
3101 } else {
3102 vtop->type.t = t;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop->r & VT_LVAL)
3107 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3112 #else
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t)
3116 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3117 (VT_LLONG | VT_UNSIGNED)) {
3119 if (t == VT_FLOAT)
3120 vpush_helper_func(TOK___floatundisf);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t == VT_LDOUBLE)
3123 vpush_helper_func(TOK___floatundixf);
3124 #endif
3125 else
3126 vpush_helper_func(TOK___floatundidf);
3127 vrott(2);
3128 gfunc_call(1);
3129 vpushi(0);
3130 PUT_R_RET(vtop, t);
3131 } else {
3132 gen_cvt_itof(t);
3135 #endif
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3139 #else
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t)
3143 int st;
3144 if (t == (VT_LLONG | VT_UNSIGNED)) {
3145 /* not handled natively */
3146 st = vtop->type.t & VT_BTYPE;
3147 if (st == VT_FLOAT)
3148 vpush_helper_func(TOK___fixunssfdi);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st == VT_LDOUBLE)
3151 vpush_helper_func(TOK___fixunsxfdi);
3152 #endif
3153 else
3154 vpush_helper_func(TOK___fixunsdfdi);
3155 vrott(2);
3156 gfunc_call(1);
3157 vpushi(0);
3158 PUT_R_RET(vtop, t);
3159 } else {
3160 gen_cvt_ftoi(t);
3163 #endif
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3169 int dbt = vtop->type.t;
3170 vtop->r &= ~VT_MUSTCAST;
3171 vtop->type.t = sbt;
3172 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3173 vtop->type.t = dbt;
3176 static void gen_cast_s(int t)
3178 CType type;
3179 type.t = t;
3180 type.ref = NULL;
3181 gen_cast(&type);
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType *type)
3187 int sbt, dbt, sf, df, c;
3188 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3190 /* special delayed cast for char/short */
3191 if (vtop->r & VT_MUSTCAST)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop->type.t & VT_BITFIELD)
3196 gv(RC_INT);
3198 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3199 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3200 if (sbt == VT_FUNC)
3201 sbt = VT_PTR;
3203 again:
3204 if (sbt != dbt) {
3205 sf = is_float(sbt);
3206 df = is_float(dbt);
3207 dbt_bt = dbt & VT_BTYPE;
3208 sbt_bt = sbt & VT_BTYPE;
3209 if (dbt_bt == VT_VOID)
3210 goto done;
3211 if (sbt_bt == VT_VOID) {
3212 error:
3213 cast_error(&vtop->type, type);
3216 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3217 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3218 /* don't try to convert to ldouble when cross-compiling
3219 (except when it's '0' which is needed for arm:gen_negf()) */
3220 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3221 c = 0;
3222 #endif
3223 if (c) {
3224 /* constant case: we can do it now */
3225 /* XXX: in ISOC, cannot do it if error in convert */
3226 if (sbt == VT_FLOAT)
3227 vtop->c.ld = vtop->c.f;
3228 else if (sbt == VT_DOUBLE)
3229 vtop->c.ld = vtop->c.d;
3231 if (df) {
3232 if (sbt_bt == VT_LLONG) {
3233 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3234 vtop->c.ld = vtop->c.i;
3235 else
3236 vtop->c.ld = -(long double)-vtop->c.i;
3237 } else if(!sf) {
3238 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3239 vtop->c.ld = (uint32_t)vtop->c.i;
3240 else
3241 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3244 if (dbt == VT_FLOAT)
3245 vtop->c.f = (float)vtop->c.ld;
3246 else if (dbt == VT_DOUBLE)
3247 vtop->c.d = (double)vtop->c.ld;
3248 } else if (sf && dbt == VT_BOOL) {
3249 vtop->c.i = (vtop->c.ld != 0);
3250 } else {
3251 if(sf)
3252 /* the range of [int64_t] is enough to hold the integer part of any float value.
3253 Meanwhile, converting negative double to unsigned integer is UB.
3254 So first convert to [int64_t] here. */
3255 vtop->c.i = (int64_t)vtop->c.ld;
3256 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3258 else if (sbt & VT_UNSIGNED)
3259 vtop->c.i = (uint32_t)vtop->c.i;
3260 else
3261 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3263 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3265 else if (dbt == VT_BOOL)
3266 vtop->c.i = (vtop->c.i != 0);
3267 else {
3268 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3269 dbt_bt == VT_SHORT ? 0xffff :
3270 0xffffffff;
3271 vtop->c.i &= m;
3272 if (!(dbt & VT_UNSIGNED))
3273 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3276 goto done;
3278 } else if (dbt == VT_BOOL
3279 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3280 == (VT_CONST | VT_SYM)) {
3281 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3282 vtop->r = VT_CONST;
3283 vtop->c.i = 1;
3284 goto done;
3287 /* cannot generate code for global or static initializers */
3288 if (nocode_wanted & DATA_ONLY_WANTED)
3289 goto done;
3291 /* non constant case: generate code */
3292 if (dbt == VT_BOOL) {
3293 gen_test_zero(TOK_NE);
3294 goto done;
3297 if (sf || df) {
3298 if (sf && df) {
3299 /* convert from fp to fp */
3300 gen_cvt_ftof(dbt);
3301 } else if (df) {
3302 /* convert int to fp */
3303 gen_cvt_itof1(dbt);
3304 } else {
3305 /* convert fp to int */
3306 sbt = dbt;
3307 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3308 sbt = VT_INT;
3309 gen_cvt_ftoi1(sbt);
3310 goto again; /* may need char/short cast */
3312 goto done;
3315 ds = btype_size(dbt_bt);
3316 ss = btype_size(sbt_bt);
3317 if (ds == 0 || ss == 0)
3318 goto error;
3320 if (IS_ENUM(type->t) && type->ref->c < 0)
3321 tcc_error("cast to incomplete type");
3323 /* same size and no sign conversion needed */
3324 if (ds == ss && ds >= 4)
3325 goto done;
3326 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3327 tcc_warning("cast between pointer and integer of different size");
3328 if (sbt_bt == VT_PTR) {
3329 /* put integer type to allow logical operations below */
3330 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3334 /* processor allows { int a = 0, b = *(char*)&a; }
3335 That means that if we cast to less width, we can just
3336 change the type and read it still later. */
3337 #define ALLOW_SUBTYPE_ACCESS 1
3339 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3340 /* value still in memory */
3341 if (ds <= ss)
3342 goto done;
3343 /* ss <= 4 here */
3344 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3345 gv(RC_INT);
3346 goto done; /* no 64bit envolved */
3349 gv(RC_INT);
3351 trunc = 0;
3352 #if PTR_SIZE == 4
3353 if (ds == 8) {
3354 /* generate high word */
3355 if (sbt & VT_UNSIGNED) {
3356 vpushi(0);
3357 gv(RC_INT);
3358 } else {
3359 gv_dup();
3360 vpushi(31);
3361 gen_op(TOK_SAR);
3363 lbuild(dbt);
3364 } else if (ss == 8) {
3365 /* from long long: just take low order word */
3366 lexpand();
3367 vpop();
3369 ss = 4;
3371 #elif PTR_SIZE == 8
3372 if (ds == 8) {
3373 /* need to convert from 32bit to 64bit */
3374 if (sbt & VT_UNSIGNED) {
3375 #if defined(TCC_TARGET_RISCV64)
3376 /* RISC-V keeps 32bit vals in registers sign-extended.
3377 So here we need a zero-extension. */
3378 trunc = 32;
3379 #else
3380 goto done;
3381 #endif
3382 } else {
3383 gen_cvt_sxtw();
3384 goto done;
3386 ss = ds, ds = 4, dbt = sbt;
3387 } else if (ss == 8) {
3388 /* RISC-V keeps 32bit vals in registers sign-extended.
3389 So here we need a sign-extension for signed types and
3390 zero-extension. for unsigned types. */
3391 #if !defined(TCC_TARGET_RISCV64)
3392 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3393 #endif
3394 } else {
3395 ss = 4;
3397 #endif
3399 if (ds >= ss)
3400 goto done;
3401 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3402 if (ss == 4) {
3403 gen_cvt_csti(dbt);
3404 goto done;
3406 #endif
3407 bits = (ss - ds) * 8;
3408 /* for unsigned, gen_op will convert SAR to SHR */
3409 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3410 vpushi(bits);
3411 gen_op(TOK_SHL);
3412 vpushi(bits - trunc);
3413 gen_op(TOK_SAR);
3414 vpushi(trunc);
3415 gen_op(TOK_SHR);
3417 done:
3418 vtop->type = *type;
3419 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3422 /* return type size as known at compile time. Put alignment at 'a' */
3423 ST_FUNC int type_size(CType *type, int *a)
3425 Sym *s;
3426 int bt;
3428 bt = type->t & VT_BTYPE;
3429 if (bt == VT_STRUCT) {
3430 /* struct/union */
3431 s = type->ref;
3432 *a = s->r;
3433 return s->c;
3434 } else if (bt == VT_PTR) {
3435 if (type->t & VT_ARRAY) {
3436 int ts;
3437 s = type->ref;
3438 ts = type_size(&s->type, a);
3439 if (ts < 0 && s->c < 0)
3440 ts = -ts;
3441 return ts * s->c;
3442 } else {
3443 *a = PTR_SIZE;
3444 return PTR_SIZE;
3446 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3447 *a = 0;
3448 return -1; /* incomplete enum */
3449 } else if (bt == VT_LDOUBLE) {
3450 *a = LDOUBLE_ALIGN;
3451 return LDOUBLE_SIZE;
3452 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3453 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3454 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3455 *a = 4;
3456 #else
3457 *a = 8;
3458 #endif
3459 return 8;
3460 } else if (bt == VT_INT || bt == VT_FLOAT) {
3461 *a = 4;
3462 return 4;
3463 } else if (bt == VT_SHORT) {
3464 *a = 2;
3465 return 2;
3466 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3467 *a = 8;
3468 return 16;
3469 } else {
3470 /* char, void, function, _Bool */
3471 *a = 1;
3472 return 1;
3476 /* push type size as known at runtime time on top of value stack. Put
3477 alignment at 'a' */
3478 static void vpush_type_size(CType *type, int *a)
3480 if (type->t & VT_VLA) {
3481 type_size(&type->ref->type, a);
3482 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3483 } else {
3484 int size = type_size(type, a);
3485 if (size < 0)
3486 tcc_error("unknown type size");
3487 vpushs(size);
3491 /* return the pointed type of t */
3492 static inline CType *pointed_type(CType *type)
3494 return &type->ref->type;
3497 /* modify type so that its it is a pointer to type. */
3498 ST_FUNC void mk_pointer(CType *type)
3500 Sym *s;
3501 s = sym_push(SYM_FIELD, type, 0, -1);
3502 type->t = VT_PTR | (type->t & VT_STORAGE);
3503 type->ref = s;
3506 /* return true if type1 and type2 are exactly the same (including
3507 qualifiers).
3509 static int is_compatible_types(CType *type1, CType *type2)
3511 return compare_types(type1,type2,0);
3514 /* return true if type1 and type2 are the same (ignoring qualifiers).
3516 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3518 return compare_types(type1,type2,1);
3521 static void cast_error(CType *st, CType *dt)
3523 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3526 /* verify type compatibility to store vtop in 'dt' type */
3527 static void verify_assign_cast(CType *dt)
3529 CType *st, *type1, *type2;
3530 int dbt, sbt, qualwarn, lvl;
3532 st = &vtop->type; /* source type */
3533 dbt = dt->t & VT_BTYPE;
3534 sbt = st->t & VT_BTYPE;
3535 if (dt->t & VT_CONSTANT)
3536 tcc_warning("assignment of read-only location");
3537 switch(dbt) {
3538 case VT_VOID:
3539 if (sbt != dbt)
3540 tcc_error("assignment to void expression");
3541 break;
3542 case VT_PTR:
3543 /* special cases for pointers */
3544 /* '0' can also be a pointer */
3545 if (is_null_pointer(vtop))
3546 break;
3547 /* accept implicit pointer to integer cast with warning */
3548 if (is_integer_btype(sbt)) {
3549 tcc_warning("assignment makes pointer from integer without a cast");
3550 break;
3552 type1 = pointed_type(dt);
3553 if (sbt == VT_PTR)
3554 type2 = pointed_type(st);
3555 else if (sbt == VT_FUNC)
3556 type2 = st; /* a function is implicitly a function pointer */
3557 else
3558 goto error;
3559 if (is_compatible_types(type1, type2))
3560 break;
3561 for (qualwarn = lvl = 0;; ++lvl) {
3562 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3563 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3564 qualwarn = 1;
3565 dbt = type1->t & (VT_BTYPE|VT_LONG);
3566 sbt = type2->t & (VT_BTYPE|VT_LONG);
3567 if (dbt != VT_PTR || sbt != VT_PTR)
3568 break;
3569 type1 = pointed_type(type1);
3570 type2 = pointed_type(type2);
3572 if (!is_compatible_unqualified_types(type1, type2)) {
3573 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3574 /* void * can match anything */
3575 } else if (dbt == sbt
3576 && is_integer_btype(sbt & VT_BTYPE)
3577 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3578 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3579 /* Like GCC don't warn by default for merely changes
3580 in pointer target signedness. Do warn for different
3581 base types, though, in particular for unsigned enums
3582 and signed int targets. */
3583 } else {
3584 tcc_warning("assignment from incompatible pointer type");
3585 break;
3588 if (qualwarn)
3589 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3590 break;
3591 case VT_BYTE:
3592 case VT_SHORT:
3593 case VT_INT:
3594 case VT_LLONG:
3595 if (sbt == VT_PTR || sbt == VT_FUNC) {
3596 tcc_warning("assignment makes integer from pointer without a cast");
3597 } else if (sbt == VT_STRUCT) {
3598 goto case_VT_STRUCT;
3600 /* XXX: more tests */
3601 break;
3602 case VT_STRUCT:
3603 case_VT_STRUCT:
3604 if (!is_compatible_unqualified_types(dt, st)) {
3605 error:
3606 cast_error(st, dt);
3608 break;
3612 static void gen_assign_cast(CType *dt)
3614 verify_assign_cast(dt);
3615 gen_cast(dt);
3618 /* store vtop in lvalue pushed on stack */
3619 ST_FUNC void vstore(void)
3621 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3623 ft = vtop[-1].type.t;
3624 sbt = vtop->type.t & VT_BTYPE;
3625 dbt = ft & VT_BTYPE;
3626 verify_assign_cast(&vtop[-1].type);
3628 if (sbt == VT_STRUCT) {
3629 /* if structure, only generate pointer */
3630 /* structure assignment : generate memcpy */
3631 size = type_size(&vtop->type, &align);
3632 /* destination, keep on stack() as result */
3633 vpushv(vtop - 1);
3634 #ifdef CONFIG_TCC_BCHECK
3635 if (vtop->r & VT_MUSTBOUND)
3636 gbound(); /* check would be wrong after gaddrof() */
3637 #endif
3638 vtop->type.t = VT_PTR;
3639 gaddrof();
3640 /* source */
3641 vswap();
3642 #ifdef CONFIG_TCC_BCHECK
3643 if (vtop->r & VT_MUSTBOUND)
3644 gbound();
3645 #endif
3646 vtop->type.t = VT_PTR;
3647 gaddrof();
3649 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3650 if (1
3651 #ifdef CONFIG_TCC_BCHECK
3652 && !tcc_state->do_bounds_check
3653 #endif
3655 gen_struct_copy(size);
3656 } else
3657 #endif
3659 /* type size */
3660 vpushi(size);
3661 /* Use memmove, rather than memcpy, as dest and src may be same: */
3662 #ifdef TCC_ARM_EABI
3663 if(!(align & 7))
3664 vpush_helper_func(TOK_memmove8);
3665 else if(!(align & 3))
3666 vpush_helper_func(TOK_memmove4);
3667 else
3668 #endif
3669 vpush_helper_func(TOK_memmove);
3670 vrott(4);
3671 gfunc_call(3);
3674 } else if (ft & VT_BITFIELD) {
3675 /* bitfield store handling */
3677 /* save lvalue as expression result (example: s.b = s.a = n;) */
3678 vdup(), vtop[-1] = vtop[-2];
3680 bit_pos = BIT_POS(ft);
3681 bit_size = BIT_SIZE(ft);
3682 /* remove bit field info to avoid loops */
3683 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3685 if (dbt == VT_BOOL) {
3686 gen_cast(&vtop[-1].type);
3687 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3689 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3690 if (dbt != VT_BOOL) {
3691 gen_cast(&vtop[-1].type);
3692 dbt = vtop[-1].type.t & VT_BTYPE;
3694 if (r == VT_STRUCT) {
3695 store_packed_bf(bit_pos, bit_size);
3696 } else {
3697 unsigned long long mask = (1ULL << bit_size) - 1;
3698 if (dbt != VT_BOOL) {
3699 /* mask source */
3700 if (dbt == VT_LLONG)
3701 vpushll(mask);
3702 else
3703 vpushi((unsigned)mask);
3704 gen_op('&');
3706 /* shift source */
3707 vpushi(bit_pos);
3708 gen_op(TOK_SHL);
3709 vswap();
3710 /* duplicate destination */
3711 vdup();
3712 vrott(3);
3713 /* load destination, mask and or with source */
3714 if (dbt == VT_LLONG)
3715 vpushll(~(mask << bit_pos));
3716 else
3717 vpushi(~((unsigned)mask << bit_pos));
3718 gen_op('&');
3719 gen_op('|');
3720 /* store result */
3721 vstore();
3722 /* ... and discard */
3723 vpop();
3725 } else if (dbt == VT_VOID) {
3726 --vtop;
3727 } else {
3728 /* optimize char/short casts */
3729 delayed_cast = 0;
3730 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3731 && is_integer_btype(sbt)
3733 if ((vtop->r & VT_MUSTCAST)
3734 && btype_size(dbt) > btype_size(sbt)
3736 force_charshort_cast();
3737 delayed_cast = 1;
3738 } else {
3739 gen_cast(&vtop[-1].type);
3742 #ifdef CONFIG_TCC_BCHECK
3743 /* bound check case */
3744 if (vtop[-1].r & VT_MUSTBOUND) {
3745 vswap();
3746 gbound();
3747 vswap();
3749 #endif
3750 gv(RC_TYPE(dbt)); /* generate value */
3752 if (delayed_cast) {
3753 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3754 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3755 vtop->type.t = ft & VT_TYPE;
3758 /* if lvalue was saved on stack, must read it */
3759 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3760 SValue sv;
3761 r = get_reg(RC_INT);
3762 sv.type.t = VT_PTRDIFF_T;
3763 sv.r = VT_LOCAL | VT_LVAL;
3764 sv.c.i = vtop[-1].c.i;
3765 load(r, &sv);
3766 vtop[-1].r = r | VT_LVAL;
3769 r = vtop->r & VT_VALMASK;
3770 /* two word case handling :
3771 store second register at word + 4 (or +8 for x86-64) */
3772 if (USING_TWO_WORDS(dbt)) {
3773 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3774 vtop[-1].type.t = load_type;
3775 store(r, vtop - 1);
3776 vswap();
3777 incr_offset(PTR_SIZE);
3778 vswap();
3779 /* XXX: it works because r2 is spilled last ! */
3780 store(vtop->r2, vtop - 1);
3781 } else {
3782 /* single word */
3783 store(r, vtop - 1);
3785 vswap();
3786 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3790 /* post defines POST/PRE add. c is the token ++ or -- */
3791 ST_FUNC void inc(int post, int c)
3793 test_lvalue();
3794 vdup(); /* save lvalue */
3795 if (post) {
3796 gv_dup(); /* duplicate value */
3797 vrotb(3);
3798 vrotb(3);
3800 /* add constant */
3801 vpushi(c - TOK_MID);
3802 gen_op('+');
3803 vstore(); /* store value */
3804 if (post)
3805 vpop(); /* if post op, return saved value */
3808 ST_FUNC CString* parse_mult_str (const char *msg)
3810 /* read the string */
3811 if (tok != TOK_STR)
3812 expect(msg);
3813 cstr_reset(&initstr);
3814 while (tok == TOK_STR) {
3815 /* XXX: add \0 handling too ? */
3816 cstr_cat(&initstr, tokc.str.data, -1);
3817 next();
3819 cstr_ccat(&initstr, '\0');
3820 return &initstr;
3823 /* If I is >= 1 and a power of two, returns log2(i)+1.
3824 If I is 0 returns 0. */
3825 ST_FUNC int exact_log2p1(int i)
3827 int ret;
3828 if (!i)
3829 return 0;
3830 for (ret = 1; i >= 1 << 8; ret += 8)
3831 i >>= 8;
3832 if (i >= 1 << 4)
3833 ret += 4, i >>= 4;
3834 if (i >= 1 << 2)
3835 ret += 2, i >>= 2;
3836 if (i >= 1 << 1)
3837 ret++;
3838 return ret;
3841 /* Parse __attribute__((...)) GNUC extension. */
3842 static void parse_attribute(AttributeDef *ad)
3844 int t, n;
3845 char *astr;
3847 redo:
3848 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3849 return;
3850 next();
3851 skip('(');
3852 skip('(');
3853 while (tok != ')') {
3854 if (tok < TOK_IDENT)
3855 expect("attribute name");
3856 t = tok;
3857 next();
3858 switch(t) {
3859 case TOK_CLEANUP1:
3860 case TOK_CLEANUP2:
3862 Sym *s;
3864 skip('(');
3865 s = sym_find(tok);
3866 if (!s) {
3867 tcc_warning_c(warn_implicit_function_declaration)(
3868 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3869 s = external_global_sym(tok, &func_old_type);
3870 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3871 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3872 ad->cleanup_func = s;
3873 next();
3874 skip(')');
3875 break;
3877 case TOK_CONSTRUCTOR1:
3878 case TOK_CONSTRUCTOR2:
3879 ad->f.func_ctor = 1;
3880 break;
3881 case TOK_DESTRUCTOR1:
3882 case TOK_DESTRUCTOR2:
3883 ad->f.func_dtor = 1;
3884 break;
3885 case TOK_ALWAYS_INLINE1:
3886 case TOK_ALWAYS_INLINE2:
3887 ad->f.func_alwinl = 1;
3888 break;
3889 case TOK_SECTION1:
3890 case TOK_SECTION2:
3891 skip('(');
3892 astr = parse_mult_str("section name")->data;
3893 ad->section = find_section(tcc_state, astr);
3894 skip(')');
3895 break;
3896 case TOK_ALIAS1:
3897 case TOK_ALIAS2:
3898 skip('(');
3899 astr = parse_mult_str("alias(\"target\")")->data;
3900 /* save string as token, for later */
3901 ad->alias_target = tok_alloc_const(astr);
3902 skip(')');
3903 break;
3904 case TOK_VISIBILITY1:
3905 case TOK_VISIBILITY2:
3906 skip('(');
3907 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3908 if (!strcmp (astr, "default"))
3909 ad->a.visibility = STV_DEFAULT;
3910 else if (!strcmp (astr, "hidden"))
3911 ad->a.visibility = STV_HIDDEN;
3912 else if (!strcmp (astr, "internal"))
3913 ad->a.visibility = STV_INTERNAL;
3914 else if (!strcmp (astr, "protected"))
3915 ad->a.visibility = STV_PROTECTED;
3916 else
3917 expect("visibility(\"default|hidden|internal|protected\")");
3918 skip(')');
3919 break;
3920 case TOK_ALIGNED1:
3921 case TOK_ALIGNED2:
3922 if (tok == '(') {
3923 next();
3924 n = expr_const();
3925 if (n <= 0 || (n & (n - 1)) != 0)
3926 tcc_error("alignment must be a positive power of two");
3927 skip(')');
3928 } else {
3929 n = MAX_ALIGN;
3931 ad->a.aligned = exact_log2p1(n);
3932 if (n != 1 << (ad->a.aligned - 1))
3933 tcc_error("alignment of %d is larger than implemented", n);
3934 break;
3935 case TOK_PACKED1:
3936 case TOK_PACKED2:
3937 ad->a.packed = 1;
3938 break;
3939 case TOK_WEAK1:
3940 case TOK_WEAK2:
3941 ad->a.weak = 1;
3942 break;
3943 case TOK_NODEBUG1:
3944 case TOK_NODEBUG2:
3945 ad->a.nodebug = 1;
3946 break;
3947 case TOK_UNUSED1:
3948 case TOK_UNUSED2:
3949 /* currently, no need to handle it because tcc does not
3950 track unused objects */
3951 break;
3952 case TOK_NORETURN1:
3953 case TOK_NORETURN2:
3954 ad->f.func_noreturn = 1;
3955 break;
3956 case TOK_CDECL1:
3957 case TOK_CDECL2:
3958 case TOK_CDECL3:
3959 ad->f.func_call = FUNC_CDECL;
3960 break;
3961 case TOK_STDCALL1:
3962 case TOK_STDCALL2:
3963 case TOK_STDCALL3:
3964 ad->f.func_call = FUNC_STDCALL;
3965 break;
3966 #ifdef TCC_TARGET_I386
3967 case TOK_REGPARM1:
3968 case TOK_REGPARM2:
3969 skip('(');
3970 n = expr_const();
3971 if (n > 3)
3972 n = 3;
3973 else if (n < 0)
3974 n = 0;
3975 if (n > 0)
3976 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3977 skip(')');
3978 break;
3979 case TOK_FASTCALL1:
3980 case TOK_FASTCALL2:
3981 case TOK_FASTCALL3:
3982 ad->f.func_call = FUNC_FASTCALLW;
3983 break;
3984 case TOK_THISCALL1:
3985 case TOK_THISCALL2:
3986 case TOK_THISCALL3:
3987 ad->f.func_call = FUNC_THISCALL;
3988 break;
3989 #endif
3990 case TOK_MODE:
3991 skip('(');
3992 switch(tok) {
3993 case TOK_MODE_DI:
3994 ad->attr_mode = VT_LLONG + 1;
3995 break;
3996 case TOK_MODE_QI:
3997 ad->attr_mode = VT_BYTE + 1;
3998 break;
3999 case TOK_MODE_HI:
4000 ad->attr_mode = VT_SHORT + 1;
4001 break;
4002 case TOK_MODE_SI:
4003 case TOK_MODE_word:
4004 ad->attr_mode = VT_INT + 1;
4005 break;
4006 default:
4007 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4008 break;
4010 next();
4011 skip(')');
4012 break;
4013 case TOK_DLLEXPORT:
4014 ad->a.dllexport = 1;
4015 break;
4016 case TOK_NODECORATE:
4017 ad->a.nodecorate = 1;
4018 break;
4019 case TOK_DLLIMPORT:
4020 ad->a.dllimport = 1;
4021 break;
4022 default:
4023 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4024 /* skip parameters */
4025 if (tok == '(') {
4026 int parenthesis = 0;
4027 do {
4028 if (tok == '(')
4029 parenthesis++;
4030 else if (tok == ')')
4031 parenthesis--;
4032 next();
4033 } while (parenthesis && tok != -1);
4035 break;
4037 if (tok != ',')
4038 break;
4039 next();
4041 skip(')');
4042 skip(')');
4043 goto redo;
4046 static Sym * find_field (CType *type, int v, int *cumofs)
4048 Sym *s = type->ref;
4049 int v1 = v | SYM_FIELD;
4050 if (!(v & SYM_FIELD)) { /* top-level call */
4051 if ((type->t & VT_BTYPE) != VT_STRUCT)
4052 expect("struct or union");
4053 if (v < TOK_UIDENT)
4054 expect("field name");
4055 if (s->c < 0)
4056 tcc_error("dereferencing incomplete type '%s'",
4057 get_tok_str(s->v & ~SYM_STRUCT, 0));
4059 while ((s = s->next) != NULL) {
4060 if (s->v == v1) {
4061 *cumofs = s->c;
4062 return s;
4064 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4065 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4066 /* try to find field in anonymous sub-struct/union */
4067 Sym *ret = find_field (&s->type, v1, cumofs);
4068 if (ret) {
4069 *cumofs += s->c;
4070 return ret;
4074 if (!(v & SYM_FIELD))
4075 tcc_error("field not found: %s", get_tok_str(v, NULL));
4076 return s;
4079 static void check_fields (CType *type, int check)
4081 Sym *s = type->ref;
4083 while ((s = s->next) != NULL) {
4084 int v = s->v & ~SYM_FIELD;
4085 if (v < SYM_FIRST_ANOM) {
4086 TokenSym *ts = table_ident[v - TOK_IDENT];
4087 if (check && (ts->tok & SYM_FIELD))
4088 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4089 ts->tok ^= SYM_FIELD;
4090 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4091 check_fields (&s->type, check);
4095 static void struct_layout(CType *type, AttributeDef *ad)
4097 int size, align, maxalign, offset, c, bit_pos, bit_size;
4098 int packed, a, bt, prevbt, prev_bit_size;
4099 int pcc = !tcc_state->ms_bitfields;
4100 int pragma_pack = *tcc_state->pack_stack_ptr;
4101 Sym *f;
4103 maxalign = 1;
4104 offset = 0;
4105 c = 0;
4106 bit_pos = 0;
4107 prevbt = VT_STRUCT; /* make it never match */
4108 prev_bit_size = 0;
4110 //#define BF_DEBUG
4112 for (f = type->ref->next; f; f = f->next) {
4113 if (f->type.t & VT_BITFIELD)
4114 bit_size = BIT_SIZE(f->type.t);
4115 else
4116 bit_size = -1;
4117 size = type_size(&f->type, &align);
4118 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4119 packed = 0;
4121 if (pcc && bit_size == 0) {
4122 /* in pcc mode, packing does not affect zero-width bitfields */
4124 } else {
4125 /* in pcc mode, attribute packed overrides if set. */
4126 if (pcc && (f->a.packed || ad->a.packed))
4127 align = packed = 1;
4129 /* pragma pack overrides align if lesser and packs bitfields always */
4130 if (pragma_pack) {
4131 packed = 1;
4132 if (pragma_pack < align)
4133 align = pragma_pack;
4134 /* in pcc mode pragma pack also overrides individual align */
4135 if (pcc && pragma_pack < a)
4136 a = 0;
4139 /* some individual align was specified */
4140 if (a)
4141 align = a;
4143 if (type->ref->type.t == VT_UNION) {
4144 if (pcc && bit_size >= 0)
4145 size = (bit_size + 7) >> 3;
4146 offset = 0;
4147 if (size > c)
4148 c = size;
4150 } else if (bit_size < 0) {
4151 if (pcc)
4152 c += (bit_pos + 7) >> 3;
4153 c = (c + align - 1) & -align;
4154 offset = c;
4155 if (size > 0)
4156 c += size;
4157 bit_pos = 0;
4158 prevbt = VT_STRUCT;
4159 prev_bit_size = 0;
4161 } else {
4162 /* A bit-field. Layout is more complicated. There are two
4163 options: PCC (GCC) compatible and MS compatible */
4164 if (pcc) {
4165 /* In PCC layout a bit-field is placed adjacent to the
4166 preceding bit-fields, except if:
4167 - it has zero-width
4168 - an individual alignment was given
4169 - it would overflow its base type container and
4170 there is no packing */
4171 if (bit_size == 0) {
4172 new_field:
4173 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4174 bit_pos = 0;
4175 } else if (f->a.aligned) {
4176 goto new_field;
4177 } else if (!packed) {
4178 int a8 = align * 8;
4179 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4180 if (ofs > size / align)
4181 goto new_field;
4184 /* in pcc mode, long long bitfields have type int if they fit */
4185 if (size == 8 && bit_size <= 32)
4186 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4188 while (bit_pos >= align * 8)
4189 c += align, bit_pos -= align * 8;
4190 offset = c;
4192 /* In PCC layout named bit-fields influence the alignment
4193 of the containing struct using the base types alignment,
4194 except for packed fields (which here have correct align). */
4195 if (f->v & SYM_FIRST_ANOM
4196 // && bit_size // ??? gcc on ARM/rpi does that
4198 align = 1;
4200 } else {
4201 bt = f->type.t & VT_BTYPE;
4202 if ((bit_pos + bit_size > size * 8)
4203 || (bit_size > 0) == (bt != prevbt)
4205 c = (c + align - 1) & -align;
4206 offset = c;
4207 bit_pos = 0;
4208 /* In MS bitfield mode a bit-field run always uses
4209 at least as many bits as the underlying type.
4210 To start a new run it's also required that this
4211 or the last bit-field had non-zero width. */
4212 if (bit_size || prev_bit_size)
4213 c += size;
4215 /* In MS layout the records alignment is normally
4216 influenced by the field, except for a zero-width
4217 field at the start of a run (but by further zero-width
4218 fields it is again). */
4219 if (bit_size == 0 && prevbt != bt)
4220 align = 1;
4221 prevbt = bt;
4222 prev_bit_size = bit_size;
4225 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4226 | (bit_pos << VT_STRUCT_SHIFT);
4227 bit_pos += bit_size;
4229 if (align > maxalign)
4230 maxalign = align;
4232 #ifdef BF_DEBUG
4233 printf("set field %s offset %-2d size %-2d align %-2d",
4234 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4235 if (f->type.t & VT_BITFIELD) {
4236 printf(" pos %-2d bits %-2d",
4237 BIT_POS(f->type.t),
4238 BIT_SIZE(f->type.t)
4241 printf("\n");
4242 #endif
4244 f->c = offset;
4245 f->r = 0;
4248 if (pcc)
4249 c += (bit_pos + 7) >> 3;
4251 /* store size and alignment */
4252 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4253 if (a < maxalign)
4254 a = maxalign;
4255 type->ref->r = a;
4256 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4257 /* can happen if individual align for some member was given. In
4258 this case MSVC ignores maxalign when aligning the size */
4259 a = pragma_pack;
4260 if (a < bt)
4261 a = bt;
4263 c = (c + a - 1) & -a;
4264 type->ref->c = c;
4266 #ifdef BF_DEBUG
4267 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4268 #endif
4270 /* check whether we can access bitfields by their type */
4271 for (f = type->ref->next; f; f = f->next) {
4272 int s, px, cx, c0;
4273 CType t;
4275 if (0 == (f->type.t & VT_BITFIELD))
4276 continue;
4277 f->type.ref = f;
4278 f->auxtype = -1;
4279 bit_size = BIT_SIZE(f->type.t);
4280 if (bit_size == 0)
4281 continue;
4282 bit_pos = BIT_POS(f->type.t);
4283 size = type_size(&f->type, &align);
4285 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4286 #ifdef TCC_TARGET_ARM
4287 && !(f->c & (align - 1))
4288 #endif
4290 continue;
4292 /* try to access the field using a different type */
4293 c0 = -1, s = align = 1;
4294 t.t = VT_BYTE;
4295 for (;;) {
4296 px = f->c * 8 + bit_pos;
4297 cx = (px >> 3) & -align;
4298 px = px - (cx << 3);
4299 if (c0 == cx)
4300 break;
4301 s = (px + bit_size + 7) >> 3;
4302 if (s > 4) {
4303 t.t = VT_LLONG;
4304 } else if (s > 2) {
4305 t.t = VT_INT;
4306 } else if (s > 1) {
4307 t.t = VT_SHORT;
4308 } else {
4309 t.t = VT_BYTE;
4311 s = type_size(&t, &align);
4312 c0 = cx;
4315 if (px + bit_size <= s * 8 && cx + s <= c
4316 #ifdef TCC_TARGET_ARM
4317 && !(cx & (align - 1))
4318 #endif
4320 /* update offset and bit position */
4321 f->c = cx;
4322 bit_pos = px;
4323 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4324 | (bit_pos << VT_STRUCT_SHIFT);
4325 if (s != size)
4326 f->auxtype = t.t;
4327 #ifdef BF_DEBUG
4328 printf("FIX field %s offset %-2d size %-2d align %-2d "
4329 "pos %-2d bits %-2d\n",
4330 get_tok_str(f->v & ~SYM_FIELD, NULL),
4331 cx, s, align, px, bit_size);
4332 #endif
4333 } else {
4334 /* fall back to load/store single-byte wise */
4335 f->auxtype = VT_STRUCT;
4336 #ifdef BF_DEBUG
4337 printf("FIX field %s : load byte-wise\n",
4338 get_tok_str(f->v & ~SYM_FIELD, NULL));
4339 #endif
4344 static void do_Static_assert(void);
4346 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4347 static void struct_decl(CType *type, int u)
4349 int v, c, size, align, flexible;
4350 int bit_size, bsize, bt;
4351 Sym *s, *ss, **ps;
4352 AttributeDef ad, ad1;
4353 CType type1, btype;
4355 memset(&ad, 0, sizeof ad);
4356 next();
4357 parse_attribute(&ad);
4358 if (tok != '{') {
4359 v = tok;
4360 next();
4361 /* struct already defined ? return it */
4362 if (v < TOK_IDENT)
4363 expect("struct/union/enum name");
4364 s = struct_find(v);
4365 if (s && (s->sym_scope == local_scope || tok != '{')) {
4366 if (u == s->type.t)
4367 goto do_decl;
4368 if (u == VT_ENUM && IS_ENUM(s->type.t))
4369 goto do_decl;
4370 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4372 } else {
4373 v = anon_sym++;
4375 /* Record the original enum/struct/union token. */
4376 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4377 type1.ref = NULL;
4378 /* we put an undefined size for struct/union */
4379 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4380 s->r = 0; /* default alignment is zero as gcc */
4381 do_decl:
4382 type->t = s->type.t;
4383 type->ref = s;
4385 if (tok == '{') {
4386 next();
4387 if (s->c != -1)
4388 tcc_error("struct/union/enum already defined");
4389 s->c = -2;
4390 /* cannot be empty */
4391 /* non empty enums are not allowed */
4392 ps = &s->next;
4393 if (u == VT_ENUM) {
4394 long long ll = 0, pl = 0, nl = 0;
4395 CType t;
4396 t.ref = s;
4397 /* enum symbols have static storage */
4398 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4399 for(;;) {
4400 v = tok;
4401 if (v < TOK_UIDENT)
4402 expect("identifier");
4403 ss = sym_find(v);
4404 if (ss && !local_stack)
4405 tcc_error("redefinition of enumerator '%s'",
4406 get_tok_str(v, NULL));
4407 next();
4408 if (tok == '=') {
4409 next();
4410 ll = expr_const64();
4412 ss = sym_push(v, &t, VT_CONST, 0);
4413 ss->enum_val = ll;
4414 *ps = ss, ps = &ss->next;
4415 if (ll < nl)
4416 nl = ll;
4417 if (ll > pl)
4418 pl = ll;
4419 if (tok != ',')
4420 break;
4421 next();
4422 ll++;
4423 /* NOTE: we accept a trailing comma */
4424 if (tok == '}')
4425 break;
4427 skip('}');
4428 /* set integral type of the enum */
4429 t.t = VT_INT;
4430 if (nl >= 0) {
4431 if (pl != (unsigned)pl)
4432 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4433 t.t |= VT_UNSIGNED;
4434 } else if (pl != (int)pl || nl != (int)nl)
4435 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4436 s->type.t = type->t = t.t | VT_ENUM;
4437 s->c = 0;
4438 /* set type for enum members */
4439 for (ss = s->next; ss; ss = ss->next) {
4440 ll = ss->enum_val;
4441 if (ll == (int)ll) /* default is int if it fits */
4442 continue;
4443 if (t.t & VT_UNSIGNED) {
4444 ss->type.t |= VT_UNSIGNED;
4445 if (ll == (unsigned)ll)
4446 continue;
4448 ss->type.t = (ss->type.t & ~VT_BTYPE)
4449 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4451 } else {
4452 c = 0;
4453 flexible = 0;
4454 while (tok != '}') {
4455 if (!parse_btype(&btype, &ad1, 0)) {
4456 if (tok == TOK_STATIC_ASSERT) {
4457 do_Static_assert();
4458 continue;
4460 skip(';');
4461 continue;
4463 while (1) {
4464 if (flexible)
4465 tcc_error("flexible array member '%s' not at the end of struct",
4466 get_tok_str(v, NULL));
4467 bit_size = -1;
4468 v = 0;
4469 type1 = btype;
4470 if (tok != ':') {
4471 if (tok != ';')
4472 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4473 if (v == 0) {
4474 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4475 expect("identifier");
4476 else {
4477 int v = btype.ref->v;
4478 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4479 if (tcc_state->ms_extensions == 0)
4480 expect("identifier");
4484 if (type_size(&type1, &align) < 0) {
4485 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4486 flexible = 1;
4487 else
4488 tcc_error("field '%s' has incomplete type",
4489 get_tok_str(v, NULL));
4491 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4492 (type1.t & VT_BTYPE) == VT_VOID ||
4493 (type1.t & VT_STORAGE))
4494 tcc_error("invalid type for '%s'",
4495 get_tok_str(v, NULL));
4497 if (tok == ':') {
4498 next();
4499 bit_size = expr_const();
4500 /* XXX: handle v = 0 case for messages */
4501 if (bit_size < 0)
4502 tcc_error("negative width in bit-field '%s'",
4503 get_tok_str(v, NULL));
4504 if (v && bit_size == 0)
4505 tcc_error("zero width for bit-field '%s'",
4506 get_tok_str(v, NULL));
4507 parse_attribute(&ad1);
4509 size = type_size(&type1, &align);
4510 if (bit_size >= 0) {
4511 bt = type1.t & VT_BTYPE;
4512 if (bt != VT_INT &&
4513 bt != VT_BYTE &&
4514 bt != VT_SHORT &&
4515 bt != VT_BOOL &&
4516 bt != VT_LLONG)
4517 tcc_error("bitfields must have scalar type");
4518 bsize = size * 8;
4519 if (bit_size > bsize) {
4520 tcc_error("width of '%s' exceeds its type",
4521 get_tok_str(v, NULL));
4522 } else if (bit_size == bsize
4523 && !ad.a.packed && !ad1.a.packed) {
4524 /* no need for bit fields */
4526 } else if (bit_size == 64) {
4527 tcc_error("field width 64 not implemented");
4528 } else {
4529 type1.t = (type1.t & ~VT_STRUCT_MASK)
4530 | VT_BITFIELD
4531 | (bit_size << (VT_STRUCT_SHIFT + 6));
4534 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4535 /* Remember we've seen a real field to check
4536 for placement of flexible array member. */
4537 c = 1;
4539 /* If member is a struct or bit-field, enforce
4540 placing into the struct (as anonymous). */
4541 if (v == 0 &&
4542 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4543 bit_size >= 0)) {
4544 v = anon_sym++;
4546 if (v) {
4547 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4548 ss->a = ad1.a;
4549 *ps = ss;
4550 ps = &ss->next;
4552 if (tok == ';' || tok == TOK_EOF)
4553 break;
4554 skip(',');
4556 skip(';');
4558 skip('}');
4559 parse_attribute(&ad);
4560 if (ad.cleanup_func) {
4561 tcc_warning("attribute '__cleanup__' ignored on type");
4563 check_fields(type, 1);
4564 check_fields(type, 0);
4565 struct_layout(type, &ad);
4566 if (debug_modes)
4567 tcc_debug_fix_anon(tcc_state, type);
4572 static void sym_to_attr(AttributeDef *ad, Sym *s)
4574 merge_symattr(&ad->a, &s->a);
4575 merge_funcattr(&ad->f, &s->f);
4578 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4579 are added to the element type, copied because it could be a typedef. */
4580 static void parse_btype_qualify(CType *type, int qualifiers)
4582 while (type->t & VT_ARRAY) {
4583 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4584 type = &type->ref->type;
4586 type->t |= qualifiers;
4589 /* return 0 if no type declaration. otherwise, return the basic type
4590 and skip it.
4592 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4594 int t, u, bt, st, type_found, typespec_found, g, n;
4595 Sym *s;
4596 CType type1;
4598 memset(ad, 0, sizeof(AttributeDef));
4599 type_found = 0;
4600 typespec_found = 0;
4601 t = VT_INT;
4602 bt = st = -1;
4603 type->ref = NULL;
4605 while(1) {
4606 switch(tok) {
4607 case TOK_EXTENSION:
4608 /* currently, we really ignore extension */
4609 next();
4610 continue;
4612 /* basic types */
4613 case TOK_CHAR:
4614 u = VT_BYTE;
4615 basic_type:
4616 next();
4617 basic_type1:
4618 if (u == VT_SHORT || u == VT_LONG) {
4619 if (st != -1 || (bt != -1 && bt != VT_INT))
4620 tmbt: tcc_error("too many basic types");
4621 st = u;
4622 } else {
4623 if (bt != -1 || (st != -1 && u != VT_INT))
4624 goto tmbt;
4625 bt = u;
4627 if (u != VT_INT)
4628 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4629 typespec_found = 1;
4630 break;
4631 case TOK_VOID:
4632 u = VT_VOID;
4633 goto basic_type;
4634 case TOK_SHORT:
4635 u = VT_SHORT;
4636 goto basic_type;
4637 case TOK_INT:
4638 u = VT_INT;
4639 goto basic_type;
4640 case TOK_ALIGNAS:
4641 { int n;
4642 AttributeDef ad1;
4643 next();
4644 skip('(');
4645 memset(&ad1, 0, sizeof(AttributeDef));
4646 if (parse_btype(&type1, &ad1, 0)) {
4647 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4648 if (ad1.a.aligned)
4649 n = 1 << (ad1.a.aligned - 1);
4650 else
4651 type_size(&type1, &n);
4652 } else {
4653 n = expr_const();
4654 if (n < 0 || (n & (n - 1)) != 0)
4655 tcc_error("alignment must be a positive power of two");
4657 skip(')');
4658 ad->a.aligned = exact_log2p1(n);
4660 continue;
4661 case TOK_LONG:
4662 if ((t & VT_BTYPE) == VT_DOUBLE) {
4663 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4664 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4665 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4666 } else {
4667 u = VT_LONG;
4668 goto basic_type;
4670 next();
4671 break;
4672 #ifdef TCC_TARGET_ARM64
4673 case TOK_UINT128:
4674 /* GCC's __uint128_t appears in some Linux header files. Make it a
4675 synonym for long double to get the size and alignment right. */
4676 u = VT_LDOUBLE;
4677 goto basic_type;
4678 #endif
4679 case TOK_BOOL:
4680 u = VT_BOOL;
4681 goto basic_type;
4682 case TOK_COMPLEX:
4683 tcc_error("_Complex is not yet supported");
4684 case TOK_FLOAT:
4685 u = VT_FLOAT;
4686 goto basic_type;
4687 case TOK_DOUBLE:
4688 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4689 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4690 } else {
4691 u = VT_DOUBLE;
4692 goto basic_type;
4694 next();
4695 break;
4696 case TOK_ENUM:
4697 struct_decl(&type1, VT_ENUM);
4698 basic_type2:
4699 u = type1.t;
4700 type->ref = type1.ref;
4701 goto basic_type1;
4702 case TOK_STRUCT:
4703 struct_decl(&type1, VT_STRUCT);
4704 goto basic_type2;
4705 case TOK_UNION:
4706 struct_decl(&type1, VT_UNION);
4707 goto basic_type2;
4709 /* type modifiers */
4710 case TOK__Atomic:
4711 next();
4712 type->t = t;
4713 parse_btype_qualify(type, VT_ATOMIC);
4714 t = type->t;
4715 if (tok == '(') {
4716 parse_expr_type(&type1);
4717 /* remove all storage modifiers except typedef */
4718 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4719 if (type1.ref)
4720 sym_to_attr(ad, type1.ref);
4721 goto basic_type2;
4723 break;
4724 case TOK_CONST1:
4725 case TOK_CONST2:
4726 case TOK_CONST3:
4727 type->t = t;
4728 parse_btype_qualify(type, VT_CONSTANT);
4729 t = type->t;
4730 next();
4731 break;
4732 case TOK_VOLATILE1:
4733 case TOK_VOLATILE2:
4734 case TOK_VOLATILE3:
4735 type->t = t;
4736 parse_btype_qualify(type, VT_VOLATILE);
4737 t = type->t;
4738 next();
4739 break;
4740 case TOK_SIGNED1:
4741 case TOK_SIGNED2:
4742 case TOK_SIGNED3:
4743 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4744 tcc_error("signed and unsigned modifier");
4745 t |= VT_DEFSIGN;
4746 next();
4747 typespec_found = 1;
4748 break;
4749 case TOK_REGISTER:
4750 case TOK_AUTO:
4751 case TOK_RESTRICT1:
4752 case TOK_RESTRICT2:
4753 case TOK_RESTRICT3:
4754 next();
4755 break;
4756 case TOK_UNSIGNED:
4757 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4758 tcc_error("signed and unsigned modifier");
4759 t |= VT_DEFSIGN | VT_UNSIGNED;
4760 next();
4761 typespec_found = 1;
4762 break;
4764 /* storage */
4765 case TOK_EXTERN:
4766 g = VT_EXTERN;
4767 goto storage;
4768 case TOK_STATIC:
4769 g = VT_STATIC;
4770 goto storage;
4771 case TOK_TYPEDEF:
4772 g = VT_TYPEDEF;
4773 goto storage;
4774 storage:
4775 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4776 tcc_error("multiple storage classes");
4777 t |= g;
4778 next();
4779 break;
4780 case TOK_INLINE1:
4781 case TOK_INLINE2:
4782 case TOK_INLINE3:
4783 t |= VT_INLINE;
4784 next();
4785 break;
4786 case TOK_NORETURN3:
4787 next();
4788 ad->f.func_noreturn = 1;
4789 break;
4790 /* GNUC attribute */
4791 case TOK_ATTRIBUTE1:
4792 case TOK_ATTRIBUTE2:
4793 parse_attribute(ad);
4794 if (ad->attr_mode) {
4795 u = ad->attr_mode -1;
4796 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4798 continue;
4799 /* GNUC typeof */
4800 case TOK_TYPEOF1:
4801 case TOK_TYPEOF2:
4802 case TOK_TYPEOF3:
4803 next();
4804 parse_expr_type(&type1);
4805 /* remove all storage modifiers except typedef */
4806 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4807 if (type1.ref)
4808 sym_to_attr(ad, type1.ref);
4809 goto basic_type2;
4810 case TOK_THREAD_LOCAL:
4811 tcc_error("_Thread_local is not implemented");
4812 default:
4813 if (typespec_found)
4814 goto the_end;
4815 s = sym_find(tok);
4816 if (!s || !(s->type.t & VT_TYPEDEF))
4817 goto the_end;
4819 n = tok, next();
4820 if (tok == ':' && ignore_label) {
4821 /* ignore if it's a label */
4822 unget_tok(n);
4823 goto the_end;
4826 t &= ~(VT_BTYPE|VT_LONG);
4827 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4828 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4829 type->ref = s->type.ref;
4830 if (t)
4831 parse_btype_qualify(type, t);
4832 t = type->t;
4833 /* get attributes from typedef */
4834 sym_to_attr(ad, s);
4835 typespec_found = 1;
4836 st = bt = -2;
4837 break;
4839 type_found = 1;
4841 the_end:
4842 if (tcc_state->char_is_unsigned) {
4843 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4844 t |= VT_UNSIGNED;
4846 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4847 bt = t & (VT_BTYPE|VT_LONG);
4848 if (bt == VT_LONG)
4849 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4850 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4851 if (bt == VT_LDOUBLE)
4852 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4853 #endif
4854 type->t = t;
4855 return type_found;
4858 /* convert a function parameter type (array to pointer and function to
4859 function pointer) */
4860 static inline void convert_parameter_type(CType *pt)
4862 /* remove const and volatile qualifiers (XXX: const could be used
4863 to indicate a const function parameter */
4864 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4865 /* array must be transformed to pointer according to ANSI C */
4866 pt->t &= ~(VT_ARRAY | VT_VLA);
4867 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4868 mk_pointer(pt);
4872 ST_FUNC CString* parse_asm_str(void)
4874 skip('(');
4875 return parse_mult_str("string constant");
4878 /* Parse an asm label and return the token */
4879 static int asm_label_instr(void)
4881 int v;
4882 char *astr;
4884 next();
4885 astr = parse_asm_str()->data;
4886 skip(')');
4887 #ifdef ASM_DEBUG
4888 printf("asm_alias: \"%s\"\n", astr);
4889 #endif
4890 v = tok_alloc_const(astr);
4891 return v;
4894 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4896 int n, l, t1, arg_size, align;
4897 Sym **plast, *s, *first;
4898 AttributeDef ad1;
4899 CType pt;
4900 TokenString *vla_array_tok = NULL;
4901 int *vla_array_str = NULL;
4903 if (tok == '(') {
4904 /* function type, or recursive declarator (return if so) */
4905 next();
4906 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4907 return 0;
4908 if (tok == ')')
4909 l = 0;
4910 else if (parse_btype(&pt, &ad1, 0))
4911 l = FUNC_NEW;
4912 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4913 merge_attr (ad, &ad1);
4914 return 0;
4915 } else
4916 l = FUNC_OLD;
4918 first = NULL;
4919 plast = &first;
4920 arg_size = 0;
4921 ++local_scope;
4922 if (l) {
4923 for(;;) {
4924 /* read param name and compute offset */
4925 if (l != FUNC_OLD) {
4926 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4927 break;
4928 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4929 if ((pt.t & VT_BTYPE) == VT_VOID)
4930 tcc_error("parameter declared as void");
4931 if (n == 0)
4932 n = SYM_FIELD;
4933 } else {
4934 n = tok;
4935 pt.t = VT_VOID; /* invalid type */
4936 pt.ref = NULL;
4937 next();
4939 if (n < TOK_UIDENT)
4940 expect("identifier");
4941 convert_parameter_type(&pt);
4942 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4943 /* these symbols may be evaluated for VLArrays (see below, under
4944 nocode_wanted) which is why we push them here as normal symbols
4945 temporarily. Example: int func(int a, int b[++a]); */
4946 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4947 *plast = s;
4948 plast = &s->next;
4949 if (tok == ')')
4950 break;
4951 skip(',');
4952 if (l == FUNC_NEW && tok == TOK_DOTS) {
4953 l = FUNC_ELLIPSIS;
4954 next();
4955 break;
4957 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4958 tcc_error("invalid type");
4960 } else
4961 /* if no parameters, then old type prototype */
4962 l = FUNC_OLD;
4963 skip(')');
4964 /* remove parameter symbols from token table, keep on stack */
4965 if (first) {
4966 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4967 for (s = first; s; s = s->next)
4968 s->v |= SYM_FIELD;
4970 --local_scope;
4971 /* NOTE: const is ignored in returned type as it has a special
4972 meaning in gcc / C++ */
4973 type->t &= ~VT_CONSTANT;
4974 /* some ancient pre-K&R C allows a function to return an array
4975 and the array brackets to be put after the arguments, such
4976 that "int c()[]" means something like "int[] c()" */
4977 if (tok == '[') {
4978 next();
4979 skip(']'); /* only handle simple "[]" */
4980 mk_pointer(type);
4982 /* we push a anonymous symbol which will contain the function prototype */
4983 ad->f.func_args = arg_size;
4984 ad->f.func_type = l;
4985 s = sym_push(SYM_FIELD, type, 0, 0);
4986 s->a = ad->a;
4987 s->f = ad->f;
4988 s->next = first;
4989 type->t = VT_FUNC;
4990 type->ref = s;
4991 } else if (tok == '[') {
4992 int saved_nocode_wanted = nocode_wanted;
4993 /* array definition */
4994 next();
4995 n = -1;
4996 t1 = 0;
4997 if (td & TYPE_PARAM) while (1) {
4998 /* XXX The optional type-quals and static should only be accepted
4999 in parameter decls. The '*' as well, and then even only
5000 in prototypes (not function defs). */
5001 switch (tok) {
5002 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5003 case TOK_CONST1:
5004 case TOK_VOLATILE1:
5005 case TOK_STATIC:
5006 case '*':
5007 next();
5008 continue;
5009 default:
5010 break;
5012 if (tok != ']') {
5013 /* Code generation is not done now but has to be done
5014 at start of function. Save code here for later use. */
5015 nocode_wanted = 1;
5016 skip_or_save_block(&vla_array_tok);
5017 unget_tok(0);
5018 vla_array_str = vla_array_tok->str;
5019 begin_macro(vla_array_tok, 2);
5020 next();
5021 gexpr();
5022 end_macro();
5023 next();
5024 goto check;
5026 break;
5028 } else if (tok != ']') {
5029 if (!local_stack || (storage & VT_STATIC))
5030 vpushi(expr_const());
5031 else {
5032 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5033 length must always be evaluated, even under nocode_wanted,
5034 so that its size slot is initialized (e.g. under sizeof
5035 or typeof). */
5036 nocode_wanted = 0;
5037 gexpr();
5039 check:
5040 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5041 n = vtop->c.i;
5042 if (n < 0)
5043 tcc_error("invalid array size");
5044 } else {
5045 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5046 tcc_error("size of variable length array should be an integer");
5047 n = 0;
5048 t1 = VT_VLA;
5051 skip(']');
5052 /* parse next post type */
5053 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5055 if ((type->t & VT_BTYPE) == VT_FUNC)
5056 tcc_error("declaration of an array of functions");
5057 if ((type->t & VT_BTYPE) == VT_VOID
5058 || type_size(type, &align) < 0)
5059 tcc_error("declaration of an array of incomplete type elements");
5061 t1 |= type->t & VT_VLA;
5063 if (t1 & VT_VLA) {
5064 if (n < 0) {
5065 if (td & TYPE_NEST)
5066 tcc_error("need explicit inner array size in VLAs");
5068 else {
5069 loc -= type_size(&int_type, &align);
5070 loc &= -align;
5071 n = loc;
5073 vpush_type_size(type, &align);
5074 gen_op('*');
5075 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5076 vswap();
5077 vstore();
5080 if (n != -1)
5081 vpop();
5082 nocode_wanted = saved_nocode_wanted;
5084 /* we push an anonymous symbol which will contain the array
5085 element type */
5086 s = sym_push(SYM_FIELD, type, 0, n);
5087 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5088 type->ref = s;
5090 if (vla_array_str) {
5091 /* for function args, the top dimension is converted to pointer */
5092 if ((t1 & VT_VLA) && (td & TYPE_NEST))
5093 s->vla_array_str = vla_array_str;
5094 else
5095 tok_str_free_str(vla_array_str);
5098 return 1;
5101 /* Parse a type declarator (except basic type), and return the type
5102 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5103 expected. 'type' should contain the basic type. 'ad' is the
5104 attribute definition of the basic type. It can be modified by
5105 type_decl(). If this (possibly abstract) declarator is a pointer chain
5106 it returns the innermost pointed to type (equals *type, but is a different
5107 pointer), otherwise returns type itself, that's used for recursive calls. */
5108 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5110 CType *post, *ret;
5111 int qualifiers, storage;
5113 /* recursive type, remove storage bits first, apply them later again */
5114 storage = type->t & VT_STORAGE;
5115 type->t &= ~VT_STORAGE;
5116 post = ret = type;
5118 while (tok == '*') {
5119 qualifiers = 0;
5120 redo:
5121 next();
5122 switch(tok) {
5123 case TOK__Atomic:
5124 qualifiers |= VT_ATOMIC;
5125 goto redo;
5126 case TOK_CONST1:
5127 case TOK_CONST2:
5128 case TOK_CONST3:
5129 qualifiers |= VT_CONSTANT;
5130 goto redo;
5131 case TOK_VOLATILE1:
5132 case TOK_VOLATILE2:
5133 case TOK_VOLATILE3:
5134 qualifiers |= VT_VOLATILE;
5135 goto redo;
5136 case TOK_RESTRICT1:
5137 case TOK_RESTRICT2:
5138 case TOK_RESTRICT3:
5139 goto redo;
5140 /* XXX: clarify attribute handling */
5141 case TOK_ATTRIBUTE1:
5142 case TOK_ATTRIBUTE2:
5143 parse_attribute(ad);
5144 break;
5146 mk_pointer(type);
5147 type->t |= qualifiers;
5148 if (ret == type)
5149 /* innermost pointed to type is the one for the first derivation */
5150 ret = pointed_type(type);
5153 if (tok == '(') {
5154 /* This is possibly a parameter type list for abstract declarators
5155 ('int ()'), use post_type for testing this. */
5156 if (!post_type(type, ad, 0, td)) {
5157 /* It's not, so it's a nested declarator, and the post operations
5158 apply to the innermost pointed to type (if any). */
5159 /* XXX: this is not correct to modify 'ad' at this point, but
5160 the syntax is not clear */
5161 parse_attribute(ad);
5162 post = type_decl(type, ad, v, td);
5163 skip(')');
5164 } else
5165 goto abstract;
5166 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5167 /* type identifier */
5168 *v = tok;
5169 next();
5170 } else {
5171 abstract:
5172 if (!(td & TYPE_ABSTRACT))
5173 expect("identifier");
5174 *v = 0;
5176 post_type(post, ad, post != ret ? 0 : storage,
5177 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5178 parse_attribute(ad);
5179 type->t |= storage;
5180 return ret;
5183 /* indirection with full error checking and bound check */
5184 ST_FUNC void indir(void)
5186 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5187 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5188 return;
5189 expect("pointer");
5191 if (vtop->r & VT_LVAL)
5192 gv(RC_INT);
5193 vtop->type = *pointed_type(&vtop->type);
5194 /* Arrays and functions are never lvalues */
5195 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5196 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5197 vtop->r |= VT_LVAL;
5198 /* if bound checking, the referenced pointer must be checked */
5199 #ifdef CONFIG_TCC_BCHECK
5200 if (tcc_state->do_bounds_check)
5201 vtop->r |= VT_MUSTBOUND;
5202 #endif
5206 /* pass a parameter to a function and do type checking and casting */
5207 static void gfunc_param_typed(Sym *func, Sym *arg)
5209 int func_type;
5210 CType type;
5212 func_type = func->f.func_type;
5213 if (func_type == FUNC_OLD ||
5214 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5215 /* default casting : only need to convert float to double */
5216 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5217 gen_cast_s(VT_DOUBLE);
5218 } else if (vtop->type.t & VT_BITFIELD) {
5219 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5220 type.ref = vtop->type.ref;
5221 gen_cast(&type);
5222 } else if (vtop->r & VT_MUSTCAST) {
5223 force_charshort_cast();
5225 } else if (arg == NULL) {
5226 tcc_error("too many arguments to function");
5227 } else {
5228 type = arg->type;
5229 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5230 gen_assign_cast(&type);
5234 /* parse an expression and return its type without any side effect. */
5235 static void expr_type(CType *type, void (*expr_fn)(void))
5237 nocode_wanted++;
5238 expr_fn();
5239 *type = vtop->type;
5240 vpop();
5241 nocode_wanted--;
5244 /* parse an expression of the form '(type)' or '(expr)' and return its
5245 type */
5246 static void parse_expr_type(CType *type)
5248 int n;
5249 AttributeDef ad;
5251 skip('(');
5252 if (parse_btype(type, &ad, 0)) {
5253 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5254 } else {
5255 expr_type(type, gexpr);
5257 skip(')');
5260 static void parse_type(CType *type)
5262 AttributeDef ad;
5263 int n;
5265 if (!parse_btype(type, &ad, 0)) {
5266 expect("type");
5268 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5271 static void parse_builtin_params(int nc, const char *args)
5273 char c, sep = '(';
5274 CType type;
5275 if (nc)
5276 nocode_wanted++;
5277 next();
5278 if (*args == 0)
5279 skip(sep);
5280 while ((c = *args++)) {
5281 skip(sep);
5282 sep = ',';
5283 if (c == 't') {
5284 parse_type(&type);
5285 vpush(&type);
5286 continue;
5288 expr_eq();
5289 type.ref = NULL;
5290 type.t = 0;
5291 switch (c) {
5292 case 'e':
5293 continue;
5294 case 'V':
5295 type.t = VT_CONSTANT;
5296 case 'v':
5297 type.t |= VT_VOID;
5298 mk_pointer (&type);
5299 break;
5300 case 'S':
5301 type.t = VT_CONSTANT;
5302 case 's':
5303 type.t |= char_type.t;
5304 mk_pointer (&type);
5305 break;
5306 case 'i':
5307 type.t = VT_INT;
5308 break;
5309 case 'l':
5310 type.t = VT_SIZE_T;
5311 break;
5312 default:
5313 break;
5315 gen_assign_cast(&type);
5317 skip(')');
5318 if (nc)
5319 nocode_wanted--;
5322 static void parse_atomic(int atok)
5324 int size, align, arg, t, save = 0;
5325 CType *atom, *atom_ptr, ct = {0};
5326 SValue store;
5327 char buf[40];
5328 static const char *const templates[] = {
5330 * Each entry consists of callback and function template.
5331 * The template represents argument types and return type.
5333 * ? void (return-only)
5334 * b bool
5335 * a atomic
5336 * A read-only atomic
5337 * p pointer to memory
5338 * v value
5339 * l load pointer
5340 * s save pointer
5341 * m memory model
5344 /* keep in order of appearance in tcctok.h: */
5345 /* __atomic_store */ "alm.?",
5346 /* __atomic_load */ "Asm.v",
5347 /* __atomic_exchange */ "alsm.v",
5348 /* __atomic_compare_exchange */ "aplbmm.b",
5349 /* __atomic_fetch_add */ "avm.v",
5350 /* __atomic_fetch_sub */ "avm.v",
5351 /* __atomic_fetch_or */ "avm.v",
5352 /* __atomic_fetch_xor */ "avm.v",
5353 /* __atomic_fetch_and */ "avm.v",
5354 /* __atomic_fetch_nand */ "avm.v",
5355 /* __atomic_and_fetch */ "avm.v",
5356 /* __atomic_sub_fetch */ "avm.v",
5357 /* __atomic_or_fetch */ "avm.v",
5358 /* __atomic_xor_fetch */ "avm.v",
5359 /* __atomic_and_fetch */ "avm.v",
5360 /* __atomic_nand_fetch */ "avm.v"
5362 const char *template = templates[(atok - TOK___atomic_store)];
5364 atom = atom_ptr = NULL;
5365 size = 0; /* pacify compiler */
5366 next();
5367 skip('(');
5368 for (arg = 0;;) {
5369 expr_eq();
5370 switch (template[arg]) {
5371 case 'a':
5372 case 'A':
5373 atom_ptr = &vtop->type;
5374 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5375 expect("pointer");
5376 atom = pointed_type(atom_ptr);
5377 size = type_size(atom, &align);
5378 if (size > 8
5379 || (size & (size - 1))
5380 || (atok > TOK___atomic_compare_exchange
5381 && (0 == btype_size(atom->t & VT_BTYPE)
5382 || (atom->t & VT_BTYPE) == VT_PTR)))
5383 expect("integral or integer-sized pointer target type");
5384 /* GCC does not care either: */
5385 /* if (!(atom->t & VT_ATOMIC))
5386 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5387 break;
5389 case 'p':
5390 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5391 || type_size(pointed_type(&vtop->type), &align) != size)
5392 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5393 gen_assign_cast(atom_ptr);
5394 break;
5395 case 'v':
5396 gen_assign_cast(atom);
5397 break;
5398 case 'l':
5399 indir();
5400 gen_assign_cast(atom);
5401 break;
5402 case 's':
5403 save = 1;
5404 indir();
5405 store = *vtop;
5406 vpop();
5407 break;
5408 case 'm':
5409 gen_assign_cast(&int_type);
5410 break;
5411 case 'b':
5412 ct.t = VT_BOOL;
5413 gen_assign_cast(&ct);
5414 break;
5416 if ('.' == template[++arg])
5417 break;
5418 skip(',');
5420 skip(')');
5422 ct.t = VT_VOID;
5423 switch (template[arg + 1]) {
5424 case 'b':
5425 ct.t = VT_BOOL;
5426 break;
5427 case 'v':
5428 ct = *atom;
5429 break;
5432 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5433 vpush_helper_func(tok_alloc_const(buf));
5434 vrott(arg - save + 1);
5435 gfunc_call(arg - save);
5437 vpush(&ct);
5438 PUT_R_RET(vtop, ct.t);
5439 t = ct.t & VT_BTYPE;
5440 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5441 #ifdef PROMOTE_RET
5442 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5443 #else
5444 vtop->type.t = VT_INT;
5445 #endif
5447 gen_cast(&ct);
5448 if (save) {
5449 vpush(&ct);
5450 *vtop = store;
5451 vswap();
5452 vstore();
5456 ST_FUNC void unary(void)
5458 int n, t, align, size, r;
5459 CType type;
5460 Sym *s;
5461 AttributeDef ad;
5463 /* generate line number info */
5464 if (debug_modes)
5465 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5467 type.ref = NULL;
5468 /* XXX: GCC 2.95.3 does not generate a table although it should be
5469 better here */
5470 tok_next:
5471 switch(tok) {
5472 case TOK_EXTENSION:
5473 next();
5474 goto tok_next;
5475 case TOK_LCHAR:
5476 #ifdef TCC_TARGET_PE
5477 t = VT_SHORT|VT_UNSIGNED;
5478 goto push_tokc;
5479 #endif
5480 case TOK_CINT:
5481 case TOK_CCHAR:
5482 t = VT_INT;
5483 push_tokc:
5484 type.t = t;
5485 vsetc(&type, VT_CONST, &tokc);
5486 next();
5487 break;
5488 case TOK_CUINT:
5489 t = VT_INT | VT_UNSIGNED;
5490 goto push_tokc;
5491 case TOK_CLLONG:
5492 t = VT_LLONG;
5493 goto push_tokc;
5494 case TOK_CULLONG:
5495 t = VT_LLONG | VT_UNSIGNED;
5496 goto push_tokc;
5497 case TOK_CFLOAT:
5498 t = VT_FLOAT;
5499 goto push_tokc;
5500 case TOK_CDOUBLE:
5501 t = VT_DOUBLE;
5502 goto push_tokc;
5503 case TOK_CLDOUBLE:
5504 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5505 t = VT_DOUBLE | VT_LONG;
5506 #else
5507 t = VT_LDOUBLE;
5508 #endif
5509 goto push_tokc;
5510 case TOK_CLONG:
5511 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5512 goto push_tokc;
5513 case TOK_CULONG:
5514 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5515 goto push_tokc;
5516 case TOK___FUNCTION__:
5517 if (!gnu_ext)
5518 goto tok_identifier;
5519 /* fall thru */
5520 case TOK___FUNC__:
5521 tok = TOK_STR;
5522 cstr_reset(&tokcstr);
5523 cstr_cat(&tokcstr, funcname, 0);
5524 tokc.str.size = tokcstr.size;
5525 tokc.str.data = tokcstr.data;
5526 goto case_TOK_STR;
5527 case TOK_LSTR:
5528 #ifdef TCC_TARGET_PE
5529 t = VT_SHORT | VT_UNSIGNED;
5530 #else
5531 t = VT_INT;
5532 #endif
5533 goto str_init;
5534 case TOK_STR:
5535 case_TOK_STR:
5536 /* string parsing */
5537 t = char_type.t;
5538 str_init:
5539 if (tcc_state->warn_write_strings & WARN_ON)
5540 t |= VT_CONSTANT;
5541 type.t = t;
5542 mk_pointer(&type);
5543 type.t |= VT_ARRAY;
5544 memset(&ad, 0, sizeof(AttributeDef));
5545 ad.section = rodata_section;
5546 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5547 break;
5548 case TOK_SOTYPE:
5549 case '(':
5550 t = tok;
5551 next();
5552 /* cast ? */
5553 if (parse_btype(&type, &ad, 0)) {
5554 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5555 skip(')');
5556 /* check ISOC99 compound literal */
5557 if (tok == '{') {
5558 /* data is allocated locally by default */
5559 if (global_expr)
5560 r = VT_CONST;
5561 else
5562 r = VT_LOCAL;
5563 /* all except arrays are lvalues */
5564 if (!(type.t & VT_ARRAY))
5565 r |= VT_LVAL;
5566 memset(&ad, 0, sizeof(AttributeDef));
5567 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5568 } else if (t == TOK_SOTYPE) { /* from sizeof/alignof (...) */
5569 vpush(&type);
5570 return;
5571 } else {
5572 unary();
5573 gen_cast(&type);
5575 } else if (tok == '{') {
5576 int saved_nocode_wanted = nocode_wanted;
5577 if (CONST_WANTED && !NOEVAL_WANTED)
5578 expect("constant");
5579 if (0 == local_scope)
5580 tcc_error("statement expression outside of function");
5581 /* save all registers */
5582 save_regs(0);
5583 /* statement expression : we do not accept break/continue
5584 inside as GCC does. We do retain the nocode_wanted state,
5585 as statement expressions can't ever be entered from the
5586 outside, so any reactivation of code emission (from labels
5587 or loop heads) can be disabled again after the end of it. */
5588 block(STMT_EXPR);
5589 /* If the statement expr can be entered, then we retain the current
5590 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5591 If it can't be entered then the state is that from before the
5592 statement expression. */
5593 if (saved_nocode_wanted)
5594 nocode_wanted = saved_nocode_wanted;
5595 skip(')');
5596 } else {
5597 gexpr();
5598 skip(')');
5600 break;
5601 case '*':
5602 next();
5603 unary();
5604 indir();
5605 break;
5606 case '&':
5607 next();
5608 unary();
5609 /* functions names must be treated as function pointers,
5610 except for unary '&' and sizeof. Since we consider that
5611 functions are not lvalues, we only have to handle it
5612 there and in function calls. */
5613 /* arrays can also be used although they are not lvalues */
5614 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5615 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5616 test_lvalue();
5617 if (vtop->sym)
5618 vtop->sym->a.addrtaken = 1;
5619 mk_pointer(&vtop->type);
5620 gaddrof();
5621 break;
5622 case '!':
5623 next();
5624 unary();
5625 gen_test_zero(TOK_EQ);
5626 break;
5627 case '~':
5628 next();
5629 unary();
5630 vpushi(-1);
5631 gen_op('^');
5632 break;
5633 case '+':
5634 next();
5635 unary();
5636 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5637 tcc_error("pointer not accepted for unary plus");
5638 /* In order to force cast, we add zero, except for floating point
5639 where we really need an noop (otherwise -0.0 will be transformed
5640 into +0.0). */
5641 if (!is_float(vtop->type.t)) {
5642 vpushi(0);
5643 gen_op('+');
5645 break;
5646 case TOK_SIZEOF:
5647 case TOK_ALIGNOF1:
5648 case TOK_ALIGNOF2:
5649 case TOK_ALIGNOF3:
5650 t = tok;
5651 next();
5652 if (tok == '(')
5653 tok = TOK_SOTYPE;
5654 expr_type(&type, unary);
5655 if (t == TOK_SIZEOF) {
5656 vpush_type_size(&type, &align);
5657 gen_cast_s(VT_SIZE_T);
5658 } else {
5659 type_size(&type, &align);
5660 s = NULL;
5661 if (vtop[1].r & VT_SYM)
5662 s = vtop[1].sym; /* hack: accessing previous vtop */
5663 if (s && s->a.aligned)
5664 align = 1 << (s->a.aligned - 1);
5665 vpushs(align);
5667 break;
5669 case TOK_builtin_expect:
5670 /* __builtin_expect is a no-op for now */
5671 parse_builtin_params(0, "ee");
5672 vpop();
5673 break;
5674 case TOK_builtin_types_compatible_p:
5675 parse_builtin_params(0, "tt");
5676 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5677 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5678 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5679 vtop -= 2;
5680 vpushi(n);
5681 break;
5682 case TOK_builtin_choose_expr:
5684 int64_t c;
5685 next();
5686 skip('(');
5687 c = expr_const64();
5688 skip(',');
5689 if (!c) {
5690 nocode_wanted++;
5692 expr_eq();
5693 if (!c) {
5694 vpop();
5695 nocode_wanted--;
5697 skip(',');
5698 if (c) {
5699 nocode_wanted++;
5701 expr_eq();
5702 if (c) {
5703 vpop();
5704 nocode_wanted--;
5706 skip(')');
5708 break;
5709 case TOK_builtin_constant_p:
5710 parse_builtin_params(1, "e");
5711 n = 1;
5712 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5713 || ((vtop->r & VT_SYM) && vtop->sym->a.addrtaken)
5715 n = 0;
5716 vtop--;
5717 vpushi(n);
5718 break;
5719 case TOK_builtin_frame_address:
5720 case TOK_builtin_return_address:
5722 int tok1 = tok;
5723 int level;
5724 next();
5725 skip('(');
5726 level = expr_const();
5727 if (level < 0)
5728 tcc_error("%s only takes positive integers", get_tok_str(tok1, 0));
5729 skip(')');
5730 type.t = VT_VOID;
5731 mk_pointer(&type);
5732 vset(&type, VT_LOCAL, 0); /* local frame */
5733 while (level--) {
5734 #ifdef TCC_TARGET_RISCV64
5735 vpushi(2*PTR_SIZE);
5736 gen_op('-');
5737 #endif
5738 mk_pointer(&vtop->type);
5739 indir(); /* -> parent frame */
5741 if (tok1 == TOK_builtin_return_address) {
5742 // assume return address is just above frame pointer on stack
5743 #ifdef TCC_TARGET_ARM
5744 vpushi(2*PTR_SIZE);
5745 gen_op('+');
5746 #elif defined TCC_TARGET_RISCV64
5747 vpushi(PTR_SIZE);
5748 gen_op('-');
5749 #else
5750 vpushi(PTR_SIZE);
5751 gen_op('+');
5752 #endif
5753 mk_pointer(&vtop->type);
5754 indir();
5757 break;
5758 #ifdef TCC_TARGET_RISCV64
5759 case TOK_builtin_va_start:
5760 parse_builtin_params(0, "ee");
5761 r = vtop->r & VT_VALMASK;
5762 if (r == VT_LLOCAL)
5763 r = VT_LOCAL;
5764 if (r != VT_LOCAL)
5765 tcc_error("__builtin_va_start expects a local variable");
5766 gen_va_start();
5767 vstore();
5768 break;
5769 #endif
5770 #ifdef TCC_TARGET_X86_64
5771 #ifdef TCC_TARGET_PE
5772 case TOK_builtin_va_start:
5773 parse_builtin_params(0, "ee");
5774 r = vtop->r & VT_VALMASK;
5775 if (r == VT_LLOCAL)
5776 r = VT_LOCAL;
5777 if (r != VT_LOCAL)
5778 tcc_error("__builtin_va_start expects a local variable");
5779 vtop->r = r;
5780 vtop->type = char_pointer_type;
5781 vtop->c.i += 8;
5782 vstore();
5783 break;
5784 #else
5785 case TOK_builtin_va_arg_types:
5786 parse_builtin_params(0, "t");
5787 vpushi(classify_x86_64_va_arg(&vtop->type));
5788 vswap();
5789 vpop();
5790 break;
5791 #endif
5792 #endif
5794 #ifdef TCC_TARGET_ARM64
5795 case TOK_builtin_va_start: {
5796 parse_builtin_params(0, "ee");
5797 //xx check types
5798 gen_va_start();
5799 vpushi(0);
5800 vtop->type.t = VT_VOID;
5801 break;
5803 case TOK_builtin_va_arg: {
5804 parse_builtin_params(0, "et");
5805 type = vtop->type;
5806 vpop();
5807 //xx check types
5808 gen_va_arg(&type);
5809 vtop->type = type;
5810 break;
5812 case TOK___arm64_clear_cache: {
5813 parse_builtin_params(0, "ee");
5814 gen_clear_cache();
5815 vpushi(0);
5816 vtop->type.t = VT_VOID;
5817 break;
5819 #endif
5821 /* atomic operations */
5822 case TOK___atomic_store:
5823 case TOK___atomic_load:
5824 case TOK___atomic_exchange:
5825 case TOK___atomic_compare_exchange:
5826 case TOK___atomic_fetch_add:
5827 case TOK___atomic_fetch_sub:
5828 case TOK___atomic_fetch_or:
5829 case TOK___atomic_fetch_xor:
5830 case TOK___atomic_fetch_and:
5831 case TOK___atomic_fetch_nand:
5832 case TOK___atomic_add_fetch:
5833 case TOK___atomic_sub_fetch:
5834 case TOK___atomic_or_fetch:
5835 case TOK___atomic_xor_fetch:
5836 case TOK___atomic_and_fetch:
5837 case TOK___atomic_nand_fetch:
5838 parse_atomic(tok);
5839 break;
5841 /* pre operations */
5842 case TOK_INC:
5843 case TOK_DEC:
5844 t = tok;
5845 next();
5846 unary();
5847 inc(0, t);
5848 break;
5849 case '-':
5850 next();
5851 unary();
5852 if (is_float(vtop->type.t)) {
5853 gen_opif(TOK_NEG);
5854 } else {
5855 vpushi(0);
5856 vswap();
5857 gen_op('-');
5859 break;
5860 case TOK_LAND:
5861 if (!gnu_ext)
5862 goto tok_identifier;
5863 next();
5864 /* allow to take the address of a label */
5865 if (tok < TOK_UIDENT)
5866 expect("label identifier");
5867 s = label_find(tok);
5868 if (!s) {
5869 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5870 } else {
5871 if (s->r == LABEL_DECLARED)
5872 s->r = LABEL_FORWARD;
5874 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5875 s->type.t = VT_VOID;
5876 mk_pointer(&s->type);
5877 s->type.t |= VT_STATIC;
5879 vpushsym(&s->type, s);
5880 next();
5881 break;
5883 case TOK_GENERIC:
5885 CType controlling_type;
5886 int has_default = 0;
5887 int has_match = 0;
5888 int learn = 0;
5889 TokenString *str = NULL;
5890 int saved_nocode_wanted = nocode_wanted;
5891 nocode_wanted &= ~CONST_WANTED_MASK;
5893 next();
5894 skip('(');
5895 expr_type(&controlling_type, expr_eq);
5896 convert_parameter_type (&controlling_type);
5898 nocode_wanted = saved_nocode_wanted;
5900 for (;;) {
5901 learn = 0;
5902 skip(',');
5903 if (tok == TOK_DEFAULT) {
5904 if (has_default)
5905 tcc_error("too many 'default'");
5906 has_default = 1;
5907 if (!has_match)
5908 learn = 1;
5909 next();
5910 } else {
5911 AttributeDef ad_tmp;
5912 int itmp;
5913 CType cur_type;
5915 parse_btype(&cur_type, &ad_tmp, 0);
5916 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5917 if (compare_types(&controlling_type, &cur_type, 0)) {
5918 if (has_match) {
5919 tcc_error("type match twice");
5921 has_match = 1;
5922 learn = 1;
5925 skip(':');
5926 if (learn) {
5927 if (str)
5928 tok_str_free(str);
5929 skip_or_save_block(&str);
5930 } else {
5931 skip_or_save_block(NULL);
5933 if (tok == ')')
5934 break;
5936 if (!str) {
5937 char buf[60];
5938 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5939 tcc_error("type '%s' does not match any association", buf);
5941 begin_macro(str, 1);
5942 next();
5943 expr_eq();
5944 if (tok != TOK_EOF)
5945 expect(",");
5946 end_macro();
5947 next();
5948 break;
5950 // special qnan , snan and infinity values
5951 case TOK___NAN__:
5952 n = 0x7fc00000;
5953 special_math_val:
5954 vpushi(n);
5955 vtop->type.t = VT_FLOAT;
5956 next();
5957 break;
5958 case TOK___SNAN__:
5959 n = 0x7f800001;
5960 goto special_math_val;
5961 case TOK___INF__:
5962 n = 0x7f800000;
5963 goto special_math_val;
5965 default:
5966 tok_identifier:
5967 if (tok < TOK_UIDENT)
5968 tcc_error("expression expected before '%s'", get_tok_str(tok, &tokc));
5969 t = tok;
5970 next();
5971 s = sym_find(t);
5972 if (!s || IS_ASM_SYM(s)) {
5973 const char *name = get_tok_str(t, NULL);
5974 if (tok != '(')
5975 tcc_error("'%s' undeclared", name);
5976 /* for simple function calls, we tolerate undeclared
5977 external reference to int() function */
5978 tcc_warning_c(warn_implicit_function_declaration)(
5979 "implicit declaration of function '%s'", name);
5980 s = external_global_sym(t, &func_old_type);
5983 r = s->r;
5984 /* A symbol that has a register is a local register variable,
5985 which starts out as VT_LOCAL value. */
5986 if ((r & VT_VALMASK) < VT_CONST)
5987 r = (r & ~VT_VALMASK) | VT_LOCAL;
5989 vset(&s->type, r, s->c);
5990 /* Point to s as backpointer (even without r&VT_SYM).
5991 Will be used by at least the x86 inline asm parser for
5992 regvars. */
5993 vtop->sym = s;
5995 if (r & VT_SYM) {
5996 vtop->c.i = 0;
5997 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5998 vtop->c.i = s->enum_val;
6000 break;
6003 /* post operations */
6004 while (1) {
6005 if (tok == TOK_INC || tok == TOK_DEC) {
6006 inc(1, tok);
6007 next();
6008 } else if (tok == '.' || tok == TOK_ARROW) {
6009 int qualifiers, cumofs;
6010 /* field */
6011 if (tok == TOK_ARROW)
6012 indir();
6013 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6014 test_lvalue();
6015 /* expect pointer on structure */
6016 next();
6017 s = find_field(&vtop->type, tok, &cumofs);
6018 /* add field offset to pointer */
6019 gaddrof();
6020 vtop->type = char_pointer_type; /* change type to 'char *' */
6021 vpushi(cumofs);
6022 gen_op('+');
6023 /* change type to field type, and set to lvalue */
6024 vtop->type = s->type;
6025 vtop->type.t |= qualifiers;
6026 /* an array is never an lvalue */
6027 if (!(vtop->type.t & VT_ARRAY)) {
6028 vtop->r |= VT_LVAL;
6029 #ifdef CONFIG_TCC_BCHECK
6030 /* if bound checking, the referenced pointer must be checked */
6031 if (tcc_state->do_bounds_check)
6032 vtop->r |= VT_MUSTBOUND;
6033 #endif
6035 next();
6036 } else if (tok == '[') {
6037 next();
6038 gexpr();
6039 gen_op('+');
6040 indir();
6041 skip(']');
6042 } else if (tok == '(') {
6043 SValue ret;
6044 Sym *sa;
6045 int nb_args, ret_nregs, ret_align, regsize, variadic;
6047 /* function call */
6048 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6049 /* pointer test (no array accepted) */
6050 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6051 vtop->type = *pointed_type(&vtop->type);
6052 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6053 goto error_func;
6054 } else {
6055 error_func:
6056 expect("function pointer");
6058 } else {
6059 vtop->r &= ~VT_LVAL; /* no lvalue */
6061 /* get return type */
6062 s = vtop->type.ref;
6063 next();
6064 sa = s->next; /* first parameter */
6065 nb_args = regsize = 0;
6066 ret.r2 = VT_CONST;
6067 /* compute first implicit argument if a structure is returned */
6068 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6069 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6070 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6071 &ret_align, &regsize);
6072 if (ret_nregs <= 0) {
6073 /* get some space for the returned structure */
6074 size = type_size(&s->type, &align);
6075 #ifdef TCC_TARGET_ARM64
6076 /* On arm64, a small struct is return in registers.
6077 It is much easier to write it to memory if we know
6078 that we are allowed to write some extra bytes, so
6079 round the allocated space up to a power of 2: */
6080 if (size < 16)
6081 while (size & (size - 1))
6082 size = (size | (size - 1)) + 1;
6083 #endif
6084 loc = (loc - size) & -align;
6085 ret.type = s->type;
6086 ret.r = VT_LOCAL | VT_LVAL;
6087 /* pass it as 'int' to avoid structure arg passing
6088 problems */
6089 vseti(VT_LOCAL, loc);
6090 #ifdef CONFIG_TCC_BCHECK
6091 if (tcc_state->do_bounds_check)
6092 --loc;
6093 #endif
6094 ret.c = vtop->c;
6095 if (ret_nregs < 0)
6096 vtop--;
6097 else
6098 nb_args++;
6100 } else {
6101 ret_nregs = 1;
6102 ret.type = s->type;
6105 if (ret_nregs > 0) {
6106 /* return in register */
6107 ret.c.i = 0;
6108 PUT_R_RET(&ret, ret.type.t);
6110 if (tok != ')') {
6111 for(;;) {
6112 expr_eq();
6113 gfunc_param_typed(s, sa);
6114 nb_args++;
6115 if (sa)
6116 sa = sa->next;
6117 if (tok == ')')
6118 break;
6119 skip(',');
6122 if (sa)
6123 tcc_error("too few arguments to function");
6124 skip(')');
6125 gfunc_call(nb_args);
6127 if (ret_nregs < 0) {
6128 vsetc(&ret.type, ret.r, &ret.c);
6129 #ifdef TCC_TARGET_RISCV64
6130 arch_transfer_ret_regs(1);
6131 #endif
6132 } else {
6133 /* return value */
6134 n = ret_nregs;
6135 while (n > 1) {
6136 int rc = reg_classes[ret.r] & ~(RC_INT | RC_FLOAT);
6137 /* We assume that when a structure is returned in multiple
6138 registers, their classes are consecutive values of the
6139 suite s(n) = 2^n */
6140 rc <<= --n;
6141 for (r = 0; r < NB_REGS; ++r)
6142 if (reg_classes[r] & rc)
6143 break;
6144 vsetc(&ret.type, r, &ret.c);
6146 vsetc(&ret.type, ret.r, &ret.c);
6147 vtop->r2 = ret.r2;
6149 /* handle packed struct return */
6150 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6151 int addr, offset;
6153 size = type_size(&s->type, &align);
6154 /* We're writing whole regs often, make sure there's enough
6155 space. Assume register size is power of 2. */
6156 size = (size + regsize - 1) & -regsize;
6157 if (ret_align > align)
6158 align = ret_align;
6159 loc = (loc - size) & -align;
6160 addr = loc;
6161 offset = 0;
6162 for (;;) {
6163 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6164 vswap();
6165 vstore();
6166 vtop--;
6167 if (--ret_nregs == 0)
6168 break;
6169 offset += regsize;
6171 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6174 /* Promote char/short return values. This is matters only
6175 for calling function that were not compiled by TCC and
6176 only on some architectures. For those where it doesn't
6177 matter we expect things to be already promoted to int,
6178 but not larger. */
6179 t = s->type.t & VT_BTYPE;
6180 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6181 #ifdef PROMOTE_RET
6182 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6183 #else
6184 vtop->type.t = VT_INT;
6185 #endif
6188 if (s->f.func_noreturn) {
6189 if (debug_modes)
6190 tcc_tcov_block_end(tcc_state, -1);
6191 CODE_OFF();
6193 } else {
6194 break;
6199 #ifndef precedence_parser /* original top-down parser */
6201 static void expr_prod(void)
6203 int t;
6205 unary();
6206 while ((t = tok) == '*' || t == '/' || t == '%') {
6207 next();
6208 unary();
6209 gen_op(t);
6213 static void expr_sum(void)
6215 int t;
6217 expr_prod();
6218 while ((t = tok) == '+' || t == '-') {
6219 next();
6220 expr_prod();
6221 gen_op(t);
6225 static void expr_shift(void)
6227 int t;
6229 expr_sum();
6230 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6231 next();
6232 expr_sum();
6233 gen_op(t);
6237 static void expr_cmp(void)
6239 int t;
6241 expr_shift();
6242 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6243 t == TOK_ULT || t == TOK_UGE) {
6244 next();
6245 expr_shift();
6246 gen_op(t);
6250 static void expr_cmpeq(void)
6252 int t;
6254 expr_cmp();
6255 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6256 next();
6257 expr_cmp();
6258 gen_op(t);
6262 static void expr_and(void)
6264 expr_cmpeq();
6265 while (tok == '&') {
6266 next();
6267 expr_cmpeq();
6268 gen_op('&');
6272 static void expr_xor(void)
6274 expr_and();
6275 while (tok == '^') {
6276 next();
6277 expr_and();
6278 gen_op('^');
6282 static void expr_or(void)
6284 expr_xor();
6285 while (tok == '|') {
6286 next();
6287 expr_xor();
6288 gen_op('|');
6292 static void expr_landor(int op);
6294 static void expr_land(void)
6296 expr_or();
6297 if (tok == TOK_LAND)
6298 expr_landor(tok);
6301 static void expr_lor(void)
6303 expr_land();
6304 if (tok == TOK_LOR)
6305 expr_landor(tok);
6308 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6309 #else /* defined precedence_parser */
6310 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6311 # define expr_lor() unary(), expr_infix(1)
6313 static int precedence(int tok)
6315 switch (tok) {
6316 case TOK_LOR: return 1;
6317 case TOK_LAND: return 2;
6318 case '|': return 3;
6319 case '^': return 4;
6320 case '&': return 5;
6321 case TOK_EQ: case TOK_NE: return 6;
6322 relat: case TOK_ULT: case TOK_UGE: return 7;
6323 case TOK_SHL: case TOK_SAR: return 8;
6324 case '+': case '-': return 9;
6325 case '*': case '/': case '%': return 10;
6326 default:
6327 if (tok >= TOK_ULE && tok <= TOK_GT)
6328 goto relat;
6329 return 0;
6332 static unsigned char prec[256];
6333 static void init_prec(void)
6335 int i;
6336 for (i = 0; i < 256; i++)
6337 prec[i] = precedence(i);
6339 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6341 static void expr_landor(int op);
6343 static void expr_infix(int p)
6345 int t = tok, p2;
6346 while ((p2 = precedence(t)) >= p) {
6347 if (t == TOK_LOR || t == TOK_LAND) {
6348 expr_landor(t);
6349 } else {
6350 next();
6351 unary();
6352 if (precedence(tok) > p2)
6353 expr_infix(p2 + 1);
6354 gen_op(t);
6356 t = tok;
6359 #endif
6361 /* Assuming vtop is a value used in a conditional context
6362 (i.e. compared with zero) return 0 if it's false, 1 if
6363 true and -1 if it can't be statically determined. */
6364 static int condition_3way(void)
6366 int c = -1;
6367 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6368 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6369 vdup();
6370 gen_cast_s(VT_BOOL);
6371 c = vtop->c.i;
6372 vpop();
6374 return c;
6377 static void expr_landor(int op)
6379 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6380 for(;;) {
6381 c = f ? i : condition_3way();
6382 if (c < 0)
6383 save_regs(1), cc = 0;
6384 else if (c != i)
6385 nocode_wanted++, f = 1;
6386 if (tok != op)
6387 break;
6388 if (c < 0)
6389 t = gvtst(i, t);
6390 else
6391 vpop();
6392 next();
6393 expr_landor_next(op);
6395 if (cc || f) {
6396 vpop();
6397 vpushi(i ^ f);
6398 gsym(t);
6399 nocode_wanted -= f;
6400 } else {
6401 gvtst_set(i, t);
6405 static int is_cond_bool(SValue *sv)
6407 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6408 && (sv->type.t & VT_BTYPE) == VT_INT)
6409 return (unsigned)sv->c.i < 2;
6410 if (sv->r == VT_CMP)
6411 return 1;
6412 return 0;
6415 static void expr_cond(void)
6417 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6418 SValue sv;
6419 CType type;
6421 expr_lor();
6422 if (tok == '?') {
6423 next();
6424 c = condition_3way();
6425 g = (tok == ':' && gnu_ext);
6426 tt = 0;
6427 if (!g) {
6428 if (c < 0) {
6429 save_regs(1);
6430 tt = gvtst(1, 0);
6431 } else {
6432 vpop();
6434 } else if (c < 0) {
6435 /* needed to avoid having different registers saved in
6436 each branch */
6437 save_regs(1);
6438 gv_dup();
6439 tt = gvtst(0, 0);
6442 if (c == 0)
6443 nocode_wanted++;
6444 if (!g)
6445 gexpr();
6447 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6448 mk_pointer(&vtop->type);
6449 sv = *vtop; /* save value to handle it later */
6450 vtop--; /* no vpop so that FP stack is not flushed */
6452 if (g) {
6453 u = tt;
6454 } else if (c < 0) {
6455 u = gjmp(0);
6456 gsym(tt);
6457 } else
6458 u = 0;
6460 if (c == 0)
6461 nocode_wanted--;
6462 if (c == 1)
6463 nocode_wanted++;
6464 skip(':');
6465 expr_cond();
6467 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6468 mk_pointer(&vtop->type);
6470 /* cast operands to correct type according to ISOC rules */
6471 if (!combine_types(&type, &sv, vtop, '?'))
6472 type_incompatibility_error(&sv.type, &vtop->type,
6473 "type mismatch in conditional expression (have '%s' and '%s')");
6475 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6476 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6477 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6478 this code jumps directly to the if's then/else branches. */
6479 t1 = gvtst(0, 0);
6480 t2 = gjmp(0);
6481 gsym(u);
6482 vpushv(&sv);
6483 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6484 gvtst_set(0, t1);
6485 gvtst_set(1, t2);
6486 gen_cast(&type);
6487 // tcc_warning("two conditions expr_cond");
6488 return;
6491 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6492 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6493 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6495 /* now we convert second operand */
6496 if (c != 1) {
6497 gen_cast(&type);
6498 if (islv) {
6499 mk_pointer(&vtop->type);
6500 gaddrof();
6501 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6502 gaddrof();
6505 rc = RC_TYPE(type.t);
6506 /* for long longs, we use fixed registers to avoid having
6507 to handle a complicated move */
6508 if (USING_TWO_WORDS(type.t))
6509 rc = RC_RET(type.t);
6511 tt = r2 = 0;
6512 if (c < 0) {
6513 r2 = gv(rc);
6514 tt = gjmp(0);
6516 gsym(u);
6517 if (c == 1)
6518 nocode_wanted--;
6520 /* this is horrible, but we must also convert first
6521 operand */
6522 if (c != 0) {
6523 *vtop = sv;
6524 gen_cast(&type);
6525 if (islv) {
6526 mk_pointer(&vtop->type);
6527 gaddrof();
6528 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6529 gaddrof();
6532 if (c < 0) {
6533 r1 = gv(rc);
6534 move_reg(r2, r1, islv ? VT_PTR : type.t);
6535 vtop->r = r2;
6536 gsym(tt);
6539 if (islv)
6540 indir();
6544 static void expr_eq(void)
6546 int t;
6548 expr_cond();
6549 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6550 test_lvalue();
6551 next();
6552 if (t == '=') {
6553 expr_eq();
6554 } else {
6555 vdup();
6556 expr_eq();
6557 gen_op(TOK_ASSIGN_OP(t));
6559 vstore();
6563 ST_FUNC void gexpr(void)
6565 expr_eq();
6566 if (tok == ',') {
6567 do {
6568 vpop();
6569 next();
6570 expr_eq();
6571 } while (tok == ',');
6573 /* convert array & function to pointer */
6574 convert_parameter_type(&vtop->type);
6576 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6577 if ((vtop->r & VT_VALMASK) == VT_CONST && nocode_wanted && !CONST_WANTED)
6578 gv(RC_TYPE(vtop->type.t));
6582 /* parse a constant expression and return value in vtop. */
6583 static void expr_const1(void)
6585 nocode_wanted += CONST_WANTED_BIT;
6586 expr_cond();
6587 nocode_wanted -= CONST_WANTED_BIT;
6590 /* parse an integer constant and return its value. */
6591 static inline int64_t expr_const64(void)
6593 int64_t c;
6594 expr_const1();
6595 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6596 expect("constant expression");
6597 c = vtop->c.i;
6598 vpop();
6599 return c;
6602 /* parse an integer constant and return its value.
6603 Complain if it doesn't fit 32bit (signed or unsigned). */
6604 ST_FUNC int expr_const(void)
6606 int c;
6607 int64_t wc = expr_const64();
6608 c = wc;
6609 if (c != wc && (unsigned)c != wc)
6610 tcc_error("constant exceeds 32 bit");
6611 return c;
6614 /* ------------------------------------------------------------------------- */
6615 /* return from function */
6617 #ifndef TCC_TARGET_ARM64
6618 static void gfunc_return(CType *func_type)
6620 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6621 CType type, ret_type;
6622 int ret_align, ret_nregs, regsize;
6623 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6624 &ret_align, &regsize);
6625 if (ret_nregs < 0) {
6626 #ifdef TCC_TARGET_RISCV64
6627 arch_transfer_ret_regs(0);
6628 #endif
6629 } else if (0 == ret_nregs) {
6630 /* if returning structure, must copy it to implicit
6631 first pointer arg location */
6632 type = *func_type;
6633 mk_pointer(&type);
6634 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6635 indir();
6636 vswap();
6637 /* copy structure value to pointer */
6638 vstore();
6639 } else {
6640 /* returning structure packed into registers */
6641 int size, addr, align, rc, n;
6642 size = type_size(func_type,&align);
6643 if ((align & (ret_align - 1))
6644 && ((vtop->r & VT_VALMASK) < VT_CONST /* pointer to struct */
6645 || (vtop->c.i & (ret_align - 1))
6646 )) {
6647 loc = (loc - size) & -ret_align;
6648 addr = loc;
6649 type = *func_type;
6650 vset(&type, VT_LOCAL | VT_LVAL, addr);
6651 vswap();
6652 vstore();
6653 vpop();
6654 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6656 vtop->type = ret_type;
6657 rc = RC_RET(ret_type.t);
6658 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6659 for (n = ret_nregs; --n > 0;) {
6660 vdup();
6661 gv(rc);
6662 vswap();
6663 incr_offset(regsize);
6664 /* We assume that when a structure is returned in multiple
6665 registers, their classes are consecutive values of the
6666 suite s(n) = 2^n */
6667 rc <<= 1;
6669 gv(rc);
6670 vtop -= ret_nregs - 1;
6672 } else {
6673 gv(RC_RET(func_type->t));
6675 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6677 #endif
6679 static void check_func_return(void)
6681 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6682 return;
6683 if (!strcmp (funcname, "main")
6684 && (func_vt.t & VT_BTYPE) == VT_INT) {
6685 /* main returns 0 by default */
6686 vpushi(0);
6687 gen_assign_cast(&func_vt);
6688 gfunc_return(&func_vt);
6689 } else {
6690 tcc_warning("function might return no value: '%s'", funcname);
6694 /* ------------------------------------------------------------------------- */
6695 /* switch/case */
6697 static int case_cmpi(const void *pa, const void *pb)
6699 int64_t a = (*(struct case_t**) pa)->v1;
6700 int64_t b = (*(struct case_t**) pb)->v1;
6701 return a < b ? -1 : a > b;
6704 static int case_cmpu(const void *pa, const void *pb)
6706 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6707 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6708 return a < b ? -1 : a > b;
6711 static void gtst_addr(int t, int a)
6713 gsym_addr(gvtst(0, t), a);
6716 static void gcase(struct case_t **base, int len, int *bsym)
6718 struct case_t *p;
6719 int e;
6720 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6721 while (len > 8) {
6722 /* binary search */
6723 p = base[len/2];
6724 vdup();
6725 if (ll)
6726 vpushll(p->v2);
6727 else
6728 vpushi(p->v2);
6729 gen_op(TOK_LE);
6730 e = gvtst(1, 0);
6731 vdup();
6732 if (ll)
6733 vpushll(p->v1);
6734 else
6735 vpushi(p->v1);
6736 gen_op(TOK_GE);
6737 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6738 /* x < v1 */
6739 gcase(base, len/2, bsym);
6740 /* x > v2 */
6741 gsym(e);
6742 e = len/2 + 1;
6743 base += e; len -= e;
6745 /* linear scan */
6746 while (len--) {
6747 p = *base++;
6748 vdup();
6749 if (ll)
6750 vpushll(p->v2);
6751 else
6752 vpushi(p->v2);
6753 if (p->v1 == p->v2) {
6754 gen_op(TOK_EQ);
6755 gtst_addr(0, p->sym);
6756 } else {
6757 gen_op(TOK_LE);
6758 e = gvtst(1, 0);
6759 vdup();
6760 if (ll)
6761 vpushll(p->v1);
6762 else
6763 vpushi(p->v1);
6764 gen_op(TOK_GE);
6765 gtst_addr(0, p->sym);
6766 gsym(e);
6769 *bsym = gjmp(*bsym);
6772 static void end_switch(void)
6774 struct switch_t *sw = cur_switch;
6775 dynarray_reset(&sw->p, &sw->n);
6776 cur_switch = sw->prev;
6777 tcc_free(sw);
6780 /* ------------------------------------------------------------------------- */
6781 /* __attribute__((cleanup(fn))) */
6783 static void try_call_scope_cleanup(Sym *stop)
6785 Sym *cls = cur_scope->cl.s;
6787 for (; cls != stop; cls = cls->ncl) {
6788 Sym *fs = cls->next;
6789 Sym *vs = cls->prev_tok;
6791 vpushsym(&fs->type, fs);
6792 vset(&vs->type, vs->r, vs->c);
6793 vtop->sym = vs;
6794 mk_pointer(&vtop->type);
6795 gaddrof();
6796 gfunc_call(1);
6800 static void try_call_cleanup_goto(Sym *cleanupstate)
6802 Sym *oc, *cc;
6803 int ocd, ccd;
6805 if (!cur_scope->cl.s)
6806 return;
6808 /* search NCA of both cleanup chains given parents and initial depth */
6809 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6810 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6812 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6814 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6817 try_call_scope_cleanup(cc);
6820 /* call 'func' for each __attribute__((cleanup(func))) */
6821 static void block_cleanup(struct scope *o)
6823 int jmp = 0;
6824 Sym *g, **pg;
6825 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6826 if (g->prev_tok->r & LABEL_FORWARD) {
6827 Sym *pcl = g->next;
6828 if (!jmp)
6829 jmp = gjmp(0);
6830 gsym(pcl->jnext);
6831 try_call_scope_cleanup(o->cl.s);
6832 pcl->jnext = gjmp(0);
6833 if (!o->cl.n)
6834 goto remove_pending;
6835 g->c = o->cl.n;
6836 pg = &g->prev;
6837 } else {
6838 remove_pending:
6839 *pg = g->prev;
6840 sym_free(g);
6843 gsym(jmp);
6844 try_call_scope_cleanup(o->cl.s);
6847 /* ------------------------------------------------------------------------- */
6848 /* VLA */
6850 static void vla_restore(int loc)
6852 if (loc)
6853 gen_vla_sp_restore(loc);
6856 static void vla_leave(struct scope *o)
6858 struct scope *c = cur_scope, *v = NULL;
6859 for (; c != o && c; c = c->prev)
6860 if (c->vla.num)
6861 v = c;
6862 if (v)
6863 vla_restore(v->vla.locorig);
6866 /* ------------------------------------------------------------------------- */
6867 /* local scopes */
6869 static void new_scope(struct scope *o)
6871 /* copy and link previous scope */
6872 *o = *cur_scope;
6873 o->prev = cur_scope;
6874 cur_scope = o;
6875 cur_scope->vla.num = 0;
6877 /* record local declaration stack position */
6878 o->lstk = local_stack;
6879 o->llstk = local_label_stack;
6880 ++local_scope;
6883 static void prev_scope(struct scope *o, int is_expr)
6885 vla_leave(o->prev);
6887 if (o->cl.s != o->prev->cl.s)
6888 block_cleanup(o->prev);
6890 /* pop locally defined labels */
6891 label_pop(&local_label_stack, o->llstk, is_expr);
6893 /* In the is_expr case (a statement expression is finished here),
6894 vtop might refer to symbols on the local_stack. Either via the
6895 type or via vtop->sym. We can't pop those nor any that in turn
6896 might be referred to. To make it easier we don't roll back
6897 any symbols in that case; some upper level call to block() will
6898 do that. We do have to remove such symbols from the lookup
6899 tables, though. sym_pop will do that. */
6901 /* pop locally defined symbols */
6902 pop_local_syms(o->lstk, is_expr);
6903 cur_scope = o->prev;
6904 --local_scope;
6907 /* leave a scope via break/continue(/goto) */
6908 static void leave_scope(struct scope *o)
6910 if (!o)
6911 return;
6912 try_call_scope_cleanup(o->cl.s);
6913 vla_leave(o);
6916 /* short versiona for scopes with 'if/do/while/switch' which can
6917 declare only types (of struct/union/enum) */
6918 static void new_scope_s(struct scope *o)
6920 o->lstk = local_stack;
6921 ++local_scope;
6924 static void prev_scope_s(struct scope *o)
6926 sym_pop(&local_stack, o->lstk, 0);
6927 --local_scope;
6930 /* ------------------------------------------------------------------------- */
6931 /* call block from 'for do while' loops */
6933 static void lblock(int *bsym, int *csym)
6935 struct scope *lo = loop_scope, *co = cur_scope;
6936 int *b = co->bsym, *c = co->csym;
6937 if (csym) {
6938 co->csym = csym;
6939 loop_scope = co;
6941 co->bsym = bsym;
6942 block(0);
6943 co->bsym = b;
6944 if (csym) {
6945 co->csym = c;
6946 loop_scope = lo;
6950 static void block(int flags)
6952 int a, b, c, d, e, t;
6953 struct scope o;
6954 Sym *s;
6956 if (flags & STMT_EXPR) {
6957 /* default return value is (void) */
6958 vpushi(0);
6959 vtop->type.t = VT_VOID;
6962 again:
6963 t = tok;
6964 /* If the token carries a value, next() might destroy it. Only with
6965 invalid code such as f(){"123"4;} */
6966 if (TOK_HAS_VALUE(t))
6967 goto expr;
6968 next();
6970 if (debug_modes)
6971 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6973 if (t == TOK_IF) {
6974 new_scope_s(&o);
6975 skip('(');
6976 gexpr();
6977 skip(')');
6978 a = gvtst(1, 0);
6979 block(0);
6980 if (tok == TOK_ELSE) {
6981 d = gjmp(0);
6982 gsym(a);
6983 next();
6984 block(0);
6985 gsym(d); /* patch else jmp */
6986 } else {
6987 gsym(a);
6989 prev_scope_s(&o);
6991 } else if (t == TOK_WHILE) {
6992 new_scope_s(&o);
6993 d = gind();
6994 skip('(');
6995 gexpr();
6996 skip(')');
6997 a = gvtst(1, 0);
6998 b = 0;
6999 lblock(&a, &b);
7000 gjmp_addr(d);
7001 gsym_addr(b, d);
7002 gsym(a);
7003 prev_scope_s(&o);
7005 } else if (t == '{') {
7006 if (debug_modes)
7007 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7008 new_scope(&o);
7010 /* handle local labels declarations */
7011 while (tok == TOK_LABEL) {
7012 do {
7013 next();
7014 if (tok < TOK_UIDENT)
7015 expect("label identifier");
7016 label_push(&local_label_stack, tok, LABEL_DECLARED);
7017 next();
7018 } while (tok == ',');
7019 skip(';');
7022 while (tok != '}') {
7023 decl(VT_LOCAL);
7024 if (tok != '}') {
7025 if (flags & STMT_EXPR)
7026 vpop();
7027 block(flags | STMT_COMPOUND);
7031 prev_scope(&o, flags & STMT_EXPR);
7032 if (debug_modes)
7033 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7034 if (local_scope)
7035 next();
7036 else if (!nocode_wanted)
7037 check_func_return();
7039 } else if (t == TOK_RETURN) {
7040 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7041 if (tok != ';') {
7042 gexpr();
7043 if (b) {
7044 gen_assign_cast(&func_vt);
7045 } else {
7046 if (vtop->type.t != VT_VOID)
7047 tcc_warning("void function returns a value");
7048 vtop--;
7050 } else if (b) {
7051 tcc_warning("'return' with no value");
7052 b = 0;
7054 leave_scope(root_scope);
7055 if (b)
7056 gfunc_return(&func_vt);
7057 skip(';');
7058 /* jump unless last stmt in top-level block */
7059 if (tok != '}' || local_scope != 1)
7060 rsym = gjmp(rsym);
7061 if (debug_modes)
7062 tcc_tcov_block_end (tcc_state, -1);
7063 CODE_OFF();
7065 } else if (t == TOK_BREAK) {
7066 /* compute jump */
7067 if (!cur_scope->bsym)
7068 tcc_error("cannot break");
7069 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7070 leave_scope(cur_switch->scope);
7071 else
7072 leave_scope(loop_scope);
7073 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7074 skip(';');
7076 } else if (t == TOK_CONTINUE) {
7077 /* compute jump */
7078 if (!cur_scope->csym)
7079 tcc_error("cannot continue");
7080 leave_scope(loop_scope);
7081 *cur_scope->csym = gjmp(*cur_scope->csym);
7082 skip(';');
7084 } else if (t == TOK_FOR) {
7085 new_scope(&o);
7087 skip('(');
7088 if (tok != ';') {
7089 /* c99 for-loop init decl? */
7090 if (!decl(VT_JMP)) {
7091 /* no, regular for-loop init expr */
7092 gexpr();
7093 vpop();
7096 skip(';');
7097 a = b = 0;
7098 c = d = gind();
7099 if (tok != ';') {
7100 gexpr();
7101 a = gvtst(1, 0);
7103 skip(';');
7104 if (tok != ')') {
7105 e = gjmp(0);
7106 d = gind();
7107 gexpr();
7108 vpop();
7109 gjmp_addr(c);
7110 gsym(e);
7112 skip(')');
7113 lblock(&a, &b);
7114 gjmp_addr(d);
7115 gsym_addr(b, d);
7116 gsym(a);
7117 prev_scope(&o, 0);
7119 } else if (t == TOK_DO) {
7120 new_scope_s(&o);
7121 a = b = 0;
7122 d = gind();
7123 lblock(&a, &b);
7124 gsym(b);
7125 skip(TOK_WHILE);
7126 skip('(');
7127 gexpr();
7128 skip(')');
7129 skip(';');
7130 c = gvtst(0, 0);
7131 gsym_addr(c, d);
7132 gsym(a);
7133 prev_scope_s(&o);
7135 } else if (t == TOK_SWITCH) {
7136 struct switch_t *sw;
7138 sw = tcc_mallocz(sizeof *sw);
7139 sw->bsym = &a;
7140 sw->scope = cur_scope;
7141 sw->prev = cur_switch;
7142 sw->nocode_wanted = nocode_wanted;
7143 cur_switch = sw;
7145 new_scope_s(&o);
7146 skip('(');
7147 gexpr();
7148 skip(')');
7149 sw->sv = *vtop--; /* save switch value */
7150 a = 0;
7151 b = gjmp(0); /* jump to first case */
7152 lblock(&a, NULL);
7153 a = gjmp(a); /* add implicit break */
7154 /* case lookup */
7155 gsym(b);
7156 prev_scope_s(&o);
7158 if (sw->nocode_wanted)
7159 goto skip_switch;
7160 if (sw->sv.type.t & VT_UNSIGNED)
7161 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7162 else
7163 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7164 for (b = 1; b < sw->n; b++)
7165 if (sw->sv.type.t & VT_UNSIGNED
7166 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7167 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7168 tcc_error("duplicate case value");
7169 vpushv(&sw->sv);
7170 gv(RC_INT);
7171 d = 0, gcase(sw->p, sw->n, &d);
7172 vpop();
7173 if (sw->def_sym)
7174 gsym_addr(d, sw->def_sym);
7175 else
7176 gsym(d);
7177 skip_switch:
7178 /* break label */
7179 gsym(a);
7180 end_switch();
7182 } else if (t == TOK_CASE) {
7183 struct case_t *cr;
7184 if (!cur_switch)
7185 expect("switch");
7186 cr = tcc_malloc(sizeof(struct case_t));
7187 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7188 cr->v1 = cr->v2 = expr_const64();
7189 if (gnu_ext && tok == TOK_DOTS) {
7190 next();
7191 cr->v2 = expr_const64();
7192 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7193 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7194 tcc_warning("empty case range");
7196 /* case and default are unreachable from a switch under nocode_wanted */
7197 if (!cur_switch->nocode_wanted)
7198 cr->sym = gind();
7199 skip(':');
7200 goto block_after_label;
7202 } else if (t == TOK_DEFAULT) {
7203 if (!cur_switch)
7204 expect("switch");
7205 if (cur_switch->def_sym)
7206 tcc_error("too many 'default'");
7207 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7208 skip(':');
7209 goto block_after_label;
7211 } else if (t == TOK_GOTO) {
7212 vla_restore(cur_scope->vla.locorig);
7213 if (tok == '*' && gnu_ext) {
7214 /* computed goto */
7215 next();
7216 gexpr();
7217 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7218 expect("pointer");
7219 ggoto();
7221 } else if (tok >= TOK_UIDENT) {
7222 s = label_find(tok);
7223 /* put forward definition if needed */
7224 if (!s)
7225 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7226 else if (s->r == LABEL_DECLARED)
7227 s->r = LABEL_FORWARD;
7229 if (s->r & LABEL_FORWARD) {
7230 /* start new goto chain for cleanups, linked via label->next */
7231 if (cur_scope->cl.s && !nocode_wanted) {
7232 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7233 pending_gotos->prev_tok = s;
7234 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7235 pending_gotos->next = s;
7237 s->jnext = gjmp(s->jnext);
7238 } else {
7239 try_call_cleanup_goto(s->cleanupstate);
7240 gjmp_addr(s->jnext);
7242 next();
7244 } else {
7245 expect("label identifier");
7247 skip(';');
7249 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7250 asm_instr();
7252 } else {
7253 if (tok == ':' && t >= TOK_UIDENT) {
7254 /* label case */
7255 next();
7256 s = label_find(t);
7257 if (s) {
7258 if (s->r == LABEL_DEFINED)
7259 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7260 s->r = LABEL_DEFINED;
7261 if (s->next) {
7262 Sym *pcl; /* pending cleanup goto */
7263 for (pcl = s->next; pcl; pcl = pcl->prev)
7264 gsym(pcl->jnext);
7265 sym_pop(&s->next, NULL, 0);
7266 } else
7267 gsym(s->jnext);
7268 } else {
7269 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7271 s->jnext = gind();
7272 s->cleanupstate = cur_scope->cl.s;
7274 block_after_label:
7276 /* Accept attributes after labels (e.g. 'unused') */
7277 AttributeDef ad_tmp;
7278 parse_attribute(&ad_tmp);
7280 if (debug_modes)
7281 tcc_tcov_reset_ind(tcc_state);
7282 vla_restore(cur_scope->vla.loc);
7284 if (tok != '}') {
7285 if (0 == (flags & STMT_COMPOUND))
7286 goto again;
7287 /* C23: insert implicit null-statement whithin compound statement */
7288 } else {
7289 /* we accept this, but it is a mistake */
7290 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7292 } else {
7293 /* expression case */
7294 if (t != ';') {
7295 unget_tok(t);
7296 expr:
7297 if (flags & STMT_EXPR) {
7298 vpop();
7299 gexpr();
7300 } else {
7301 gexpr();
7302 vpop();
7304 skip(';');
7309 if (debug_modes)
7310 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7313 /* This skips over a stream of tokens containing balanced {} and ()
7314 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7315 with a '{'). If STR then allocates and stores the skipped tokens
7316 in *STR. This doesn't check if () and {} are nested correctly,
7317 i.e. "({)}" is accepted. */
7318 static void skip_or_save_block(TokenString **str)
7320 int braces = tok == '{';
7321 int level = 0;
7322 if (str)
7323 *str = tok_str_alloc();
7325 while (1) {
7326 int t = tok;
7327 if (level == 0
7328 && (t == ','
7329 || t == ';'
7330 || t == '}'
7331 || t == ')'
7332 || t == ']'))
7333 break;
7334 if (t == TOK_EOF) {
7335 if (str || level > 0)
7336 tcc_error("unexpected end of file");
7337 else
7338 break;
7340 if (str)
7341 tok_str_add_tok(*str);
7342 next();
7343 if (t == '{' || t == '(' || t == '[') {
7344 level++;
7345 } else if (t == '}' || t == ')' || t == ']') {
7346 level--;
7347 if (level == 0 && braces && t == '}')
7348 break;
7351 if (str)
7352 tok_str_add(*str, TOK_EOF);
7355 #define EXPR_CONST 1
7356 #define EXPR_ANY 2
7358 static void parse_init_elem(int expr_type)
7360 int saved_global_expr;
7361 switch(expr_type) {
7362 case EXPR_CONST:
7363 /* compound literals must be allocated globally in this case */
7364 saved_global_expr = global_expr;
7365 global_expr = 1;
7366 expr_const1();
7367 global_expr = saved_global_expr;
7368 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7369 (compound literals). */
7370 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7371 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7372 || vtop->sym->v < SYM_FIRST_ANOM))
7373 #ifdef TCC_TARGET_PE
7374 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7375 #endif
7377 tcc_error("initializer element is not constant");
7378 break;
7379 case EXPR_ANY:
7380 expr_eq();
7381 break;
7385 #if 1
7386 static void init_assert(init_params *p, int offset)
7388 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7389 : !nocode_wanted && offset > p->local_offset)
7390 tcc_internal_error("initializer overflow");
7392 #else
7393 #define init_assert(sec, offset)
7394 #endif
7396 /* put zeros for variable based init */
7397 static void init_putz(init_params *p, unsigned long c, int size)
7399 init_assert(p, c + size);
7400 if (p->sec) {
7401 /* nothing to do because globals are already set to zero */
7402 } else {
7403 vpush_helper_func(TOK_memset);
7404 vseti(VT_LOCAL, c);
7405 vpushi(0);
7406 vpushs(size);
7407 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7408 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7409 #endif
7410 gfunc_call(3);
7414 #define DIF_FIRST 1
7415 #define DIF_SIZE_ONLY 2
7416 #define DIF_HAVE_ELEM 4
7417 #define DIF_CLEAR 8
7419 /* delete relocations for specified range c ... c + size. Unfortunatly
7420 in very special cases, relocations may occur unordered */
7421 static void decl_design_delrels(Section *sec, int c, int size)
7423 ElfW_Rel *rel, *rel2, *rel_end;
7424 if (!sec || !sec->reloc)
7425 return;
7426 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7427 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7428 while (rel < rel_end) {
7429 if (rel->r_offset >= c && rel->r_offset < c + size) {
7430 sec->reloc->data_offset -= sizeof *rel;
7431 } else {
7432 if (rel2 != rel)
7433 memcpy(rel2, rel, sizeof *rel);
7434 ++rel2;
7436 ++rel;
7440 static void decl_design_flex(init_params *p, Sym *ref, int index)
7442 if (ref == p->flex_array_ref) {
7443 if (index >= ref->c)
7444 ref->c = index + 1;
7445 } else if (ref->c < 0)
7446 tcc_error("flexible array has zero size in this context");
7449 /* t is the array or struct type. c is the array or struct
7450 address. cur_field is the pointer to the current
7451 field, for arrays the 'c' member contains the current start
7452 index. 'flags' is as in decl_initializer.
7453 'al' contains the already initialized length of the
7454 current container (starting at c). This returns the new length of that. */
7455 static int decl_designator(init_params *p, CType *type, unsigned long c,
7456 Sym **cur_field, int flags, int al)
7458 Sym *s, *f;
7459 int index, index_last, align, l, nb_elems, elem_size;
7460 unsigned long corig = c;
7462 elem_size = 0;
7463 nb_elems = 1;
7465 if (flags & DIF_HAVE_ELEM)
7466 goto no_designator;
7468 if (gnu_ext && tok >= TOK_UIDENT) {
7469 l = tok, next();
7470 if (tok == ':')
7471 goto struct_field;
7472 unget_tok(l);
7475 /* NOTE: we only support ranges for last designator */
7476 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7477 if (tok == '[') {
7478 if (!(type->t & VT_ARRAY))
7479 expect("array type");
7480 next();
7481 index = index_last = expr_const();
7482 if (tok == TOK_DOTS && gnu_ext) {
7483 next();
7484 index_last = expr_const();
7486 skip(']');
7487 s = type->ref;
7488 decl_design_flex(p, s, index_last);
7489 if (index < 0 || index_last >= s->c || index_last < index)
7490 tcc_error("index exceeds array bounds or range is empty");
7491 if (cur_field)
7492 (*cur_field)->c = index_last;
7493 type = pointed_type(type);
7494 elem_size = type_size(type, &align);
7495 c += index * elem_size;
7496 nb_elems = index_last - index + 1;
7497 } else {
7498 int cumofs;
7499 next();
7500 l = tok;
7501 struct_field:
7502 next();
7503 f = find_field(type, l, &cumofs);
7504 if (cur_field)
7505 *cur_field = f;
7506 type = &f->type;
7507 c += cumofs;
7509 cur_field = NULL;
7511 if (!cur_field) {
7512 if (tok == '=') {
7513 next();
7514 } else if (!gnu_ext) {
7515 expect("=");
7517 } else {
7518 no_designator:
7519 if (type->t & VT_ARRAY) {
7520 index = (*cur_field)->c;
7521 s = type->ref;
7522 decl_design_flex(p, s, index);
7523 if (index >= s->c)
7524 tcc_error("too many initializers");
7525 type = pointed_type(type);
7526 elem_size = type_size(type, &align);
7527 c += index * elem_size;
7528 } else {
7529 f = *cur_field;
7530 /* Skip bitfield padding. Also with size 32 and 64. */
7531 while (f && (f->v & SYM_FIRST_ANOM) &&
7532 is_integer_btype(f->type.t & VT_BTYPE))
7533 *cur_field = f = f->next;
7534 if (!f)
7535 tcc_error("too many initializers");
7536 type = &f->type;
7537 c += f->c;
7541 if (!elem_size) /* for structs */
7542 elem_size = type_size(type, &align);
7544 /* Using designators the same element can be initialized more
7545 than once. In that case we need to delete possibly already
7546 existing relocations. */
7547 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7548 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7549 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7552 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7554 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7555 Sym aref = {0};
7556 CType t1;
7557 int i;
7558 if (p->sec || (type->t & VT_ARRAY)) {
7559 /* make init_putv/vstore believe it were a struct */
7560 aref.c = elem_size;
7561 t1.t = VT_STRUCT, t1.ref = &aref;
7562 type = &t1;
7564 if (p->sec)
7565 vpush_ref(type, p->sec, c, elem_size);
7566 else
7567 vset(type, VT_LOCAL|VT_LVAL, c);
7568 for (i = 1; i < nb_elems; i++) {
7569 vdup();
7570 init_putv(p, type, c + elem_size * i);
7572 vpop();
7575 c += nb_elems * elem_size;
7576 if (c - corig > al)
7577 al = c - corig;
7578 return al;
7581 /* store a value or an expression directly in global data or in local array */
7582 static void init_putv(init_params *p, CType *type, unsigned long c)
7584 int bt;
7585 void *ptr;
7586 CType dtype;
7587 int size, align;
7588 Section *sec = p->sec;
7589 uint64_t val;
7591 dtype = *type;
7592 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7594 size = type_size(type, &align);
7595 if (type->t & VT_BITFIELD)
7596 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7597 init_assert(p, c + size);
7599 if (sec) {
7600 /* XXX: not portable */
7601 /* XXX: generate error if incorrect relocation */
7602 gen_assign_cast(&dtype);
7603 bt = type->t & VT_BTYPE;
7605 if ((vtop->r & VT_SYM)
7606 && bt != VT_PTR
7607 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7608 || (type->t & VT_BITFIELD))
7609 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7611 tcc_error("initializer element is not computable at load time");
7613 if (NODATA_WANTED) {
7614 vtop--;
7615 return;
7618 ptr = sec->data + c;
7619 val = vtop->c.i;
7621 /* XXX: make code faster ? */
7622 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7623 vtop->sym->v >= SYM_FIRST_ANOM &&
7624 /* XXX This rejects compound literals like
7625 '(void *){ptr}'. The problem is that '&sym' is
7626 represented the same way, which would be ruled out
7627 by the SYM_FIRST_ANOM check above, but also '"string"'
7628 in 'char *p = "string"' is represented the same
7629 with the type being VT_PTR and the symbol being an
7630 anonymous one. That is, there's no difference in vtop
7631 between '(void *){x}' and '&(void *){x}'. Ignore
7632 pointer typed entities here. Hopefully no real code
7633 will ever use compound literals with scalar type. */
7634 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7635 /* These come from compound literals, memcpy stuff over. */
7636 Section *ssec;
7637 ElfSym *esym;
7638 ElfW_Rel *rel;
7639 esym = elfsym(vtop->sym);
7640 ssec = tcc_state->sections[esym->st_shndx];
7641 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7642 if (ssec->reloc) {
7643 /* We need to copy over all memory contents, and that
7644 includes relocations. Use the fact that relocs are
7645 created it order, so look from the end of relocs
7646 until we hit one before the copied region. */
7647 unsigned long relofs = ssec->reloc->data_offset;
7648 while (relofs >= sizeof(*rel)) {
7649 relofs -= sizeof(*rel);
7650 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7651 if (rel->r_offset >= esym->st_value + size)
7652 continue;
7653 if (rel->r_offset < esym->st_value)
7654 break;
7655 put_elf_reloca(symtab_section, sec,
7656 c + rel->r_offset - esym->st_value,
7657 ELFW(R_TYPE)(rel->r_info),
7658 ELFW(R_SYM)(rel->r_info),
7659 #if PTR_SIZE == 8
7660 rel->r_addend
7661 #else
7663 #endif
7667 } else {
7668 if (type->t & VT_BITFIELD) {
7669 int bit_pos, bit_size, bits, n;
7670 unsigned char *p, v, m;
7671 bit_pos = BIT_POS(vtop->type.t);
7672 bit_size = BIT_SIZE(vtop->type.t);
7673 p = (unsigned char*)ptr + (bit_pos >> 3);
7674 bit_pos &= 7, bits = 0;
7675 while (bit_size) {
7676 n = 8 - bit_pos;
7677 if (n > bit_size)
7678 n = bit_size;
7679 v = val >> bits << bit_pos;
7680 m = ((1 << n) - 1) << bit_pos;
7681 *p = (*p & ~m) | (v & m);
7682 bits += n, bit_size -= n, bit_pos = 0, ++p;
7684 } else
7685 switch(bt) {
7686 case VT_BOOL:
7687 *(char *)ptr = val != 0;
7688 break;
7689 case VT_BYTE:
7690 *(char *)ptr = val;
7691 break;
7692 case VT_SHORT:
7693 write16le(ptr, val);
7694 break;
7695 case VT_FLOAT:
7696 write32le(ptr, val);
7697 break;
7698 case VT_DOUBLE:
7699 write64le(ptr, val);
7700 break;
7701 case VT_LDOUBLE:
7702 #if defined TCC_IS_NATIVE_387
7703 /* Host and target platform may be different but both have x87.
7704 On windows, tcc does not use VT_LDOUBLE, except when it is a
7705 cross compiler. In this case a mingw gcc as host compiler
7706 comes here with 10-byte long doubles, while msvc or tcc won't.
7707 tcc itself can still translate by asm.
7708 In any case we avoid possibly random bytes 11 and 12.
7710 if (sizeof (long double) >= 10)
7711 memcpy(ptr, &vtop->c.ld, 10);
7712 #ifdef __TINYC__
7713 else if (sizeof (long double) == sizeof (double))
7714 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7715 #endif
7716 else
7717 #endif
7718 /* For other platforms it should work natively, but may not work
7719 for cross compilers */
7720 if (sizeof(long double) == LDOUBLE_SIZE)
7721 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7722 else if (sizeof(double) == LDOUBLE_SIZE)
7723 *(double*)ptr = (double)vtop->c.ld;
7724 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7725 ; /* nothing to do for 0.0 */
7726 #ifndef TCC_CROSS_TEST
7727 else
7728 tcc_error("can't cross compile long double constants");
7729 #endif
7730 break;
7732 #if PTR_SIZE == 8
7733 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7734 case VT_LLONG:
7735 case VT_PTR:
7736 if (vtop->r & VT_SYM)
7737 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7738 else
7739 write64le(ptr, val);
7740 break;
7741 case VT_INT:
7742 write32le(ptr, val);
7743 break;
7744 #else
7745 case VT_LLONG:
7746 write64le(ptr, val);
7747 break;
7748 case VT_PTR:
7749 case VT_INT:
7750 if (vtop->r & VT_SYM)
7751 greloc(sec, vtop->sym, c, R_DATA_PTR);
7752 write32le(ptr, val);
7753 break;
7754 #endif
7755 default:
7756 //tcc_internal_error("unexpected type");
7757 break;
7760 vtop--;
7761 } else {
7762 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7763 vswap();
7764 vstore();
7765 vpop();
7769 /* 't' contains the type and storage info. 'c' is the offset of the
7770 object in section 'sec'. If 'sec' is NULL, it means stack based
7771 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7772 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7773 size only evaluation is wanted (only for arrays). */
7774 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7776 int len, n, no_oblock, i;
7777 int size1, align1;
7778 Sym *s, *f;
7779 Sym indexsym;
7780 CType *t1;
7782 /* generate line number info */
7783 if (debug_modes && !(flags & DIF_SIZE_ONLY) && !p->sec)
7784 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7786 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7787 /* In case of strings we have special handling for arrays, so
7788 don't consume them as initializer value (which would commit them
7789 to some anonymous symbol). */
7790 tok != TOK_LSTR && tok != TOK_STR &&
7791 (!(flags & DIF_SIZE_ONLY)
7792 /* a struct may be initialized from a struct of same type, as in
7793 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7794 In that case we need to parse the element in order to check
7795 it for compatibility below */
7796 || (type->t & VT_BTYPE) == VT_STRUCT)
7798 int ncw_prev = nocode_wanted;
7799 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7800 ++nocode_wanted;
7801 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7802 nocode_wanted = ncw_prev;
7803 flags |= DIF_HAVE_ELEM;
7806 if (type->t & VT_ARRAY) {
7807 no_oblock = 1;
7808 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7809 tok == '{') {
7810 skip('{');
7811 no_oblock = 0;
7814 s = type->ref;
7815 n = s->c;
7816 t1 = pointed_type(type);
7817 size1 = type_size(t1, &align1);
7819 /* only parse strings here if correct type (otherwise: handle
7820 them as ((w)char *) expressions */
7821 if ((tok == TOK_LSTR &&
7822 #ifdef TCC_TARGET_PE
7823 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7824 #else
7825 (t1->t & VT_BTYPE) == VT_INT
7826 #endif
7827 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7828 len = 0;
7829 cstr_reset(&initstr);
7830 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7831 tcc_error("unhandled string literal merging");
7832 while (tok == TOK_STR || tok == TOK_LSTR) {
7833 if (initstr.size)
7834 initstr.size -= size1;
7835 if (tok == TOK_STR)
7836 len += tokc.str.size;
7837 else
7838 len += tokc.str.size / sizeof(nwchar_t);
7839 len--;
7840 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7841 next();
7843 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7844 && tok != TOK_EOF) {
7845 /* Not a lone literal but part of a bigger expression. */
7846 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7847 tokc.str.size = initstr.size;
7848 tokc.str.data = initstr.data;
7849 goto do_init_array;
7852 decl_design_flex(p, s, len);
7853 if (!(flags & DIF_SIZE_ONLY)) {
7854 int nb = n, ch;
7855 if (len < nb)
7856 nb = len;
7857 if (len > nb)
7858 tcc_warning("initializer-string for array is too long");
7859 /* in order to go faster for common case (char
7860 string in global variable, we handle it
7861 specifically */
7862 if (p->sec && size1 == 1) {
7863 init_assert(p, c + nb);
7864 if (!NODATA_WANTED)
7865 memcpy(p->sec->data + c, initstr.data, nb);
7866 } else {
7867 for(i=0;i<n;i++) {
7868 if (i >= nb) {
7869 /* only add trailing zero if enough storage (no
7870 warning in this case since it is standard) */
7871 if (flags & DIF_CLEAR)
7872 break;
7873 if (n - i >= 4) {
7874 init_putz(p, c + i * size1, (n - i) * size1);
7875 break;
7877 ch = 0;
7878 } else if (size1 == 1)
7879 ch = ((unsigned char *)initstr.data)[i];
7880 else
7881 ch = ((nwchar_t *)initstr.data)[i];
7882 vpushi(ch);
7883 init_putv(p, t1, c + i * size1);
7887 } else {
7889 do_init_array:
7890 indexsym.c = 0;
7891 f = &indexsym;
7893 do_init_list:
7894 /* zero memory once in advance */
7895 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7896 init_putz(p, c, n*size1);
7897 flags |= DIF_CLEAR;
7900 len = 0;
7901 /* GNU extension: if the initializer is empty for a flex array,
7902 it's size is zero. We won't enter the loop, so set the size
7903 now. */
7904 decl_design_flex(p, s, len);
7905 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7906 len = decl_designator(p, type, c, &f, flags, len);
7907 flags &= ~DIF_HAVE_ELEM;
7908 if (type->t & VT_ARRAY) {
7909 ++indexsym.c;
7910 /* special test for multi dimensional arrays (may not
7911 be strictly correct if designators are used at the
7912 same time) */
7913 if (no_oblock && len >= n*size1)
7914 break;
7915 } else {
7916 if (s->type.t == VT_UNION)
7917 f = NULL;
7918 else
7919 f = f->next;
7920 if (no_oblock && f == NULL)
7921 break;
7924 if (tok == '}')
7925 break;
7926 skip(',');
7929 if (!no_oblock)
7930 skip('}');
7932 } else if ((flags & DIF_HAVE_ELEM)
7933 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7934 The source type might have VT_CONSTANT set, which is
7935 of course assignable to non-const elements. */
7936 && is_compatible_unqualified_types(type, &vtop->type)) {
7937 goto one_elem;
7939 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7940 no_oblock = 1;
7941 if ((flags & DIF_FIRST) || tok == '{') {
7942 skip('{');
7943 no_oblock = 0;
7945 s = type->ref;
7946 f = s->next;
7947 n = s->c;
7948 size1 = 1;
7949 goto do_init_list;
7951 } else if (tok == '{') {
7952 if (flags & DIF_HAVE_ELEM)
7953 skip(';');
7954 next();
7955 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7956 skip('}');
7958 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7959 /* If we supported only ISO C we wouldn't have to accept calling
7960 this on anything than an array if DIF_SIZE_ONLY (and even then
7961 only on the outermost level, so no recursion would be needed),
7962 because initializing a flex array member isn't supported.
7963 But GNU C supports it, so we need to recurse even into
7964 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7965 /* just skip expression */
7966 if (flags & DIF_HAVE_ELEM)
7967 vpop();
7968 else
7969 skip_or_save_block(NULL);
7971 } else {
7972 if (!(flags & DIF_HAVE_ELEM)) {
7973 /* This should happen only when we haven't parsed
7974 the init element above for fear of committing a
7975 string constant to memory too early. */
7976 if (tok != TOK_STR && tok != TOK_LSTR)
7977 expect("string constant");
7978 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7980 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7981 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7982 && vtop->c.i == 0
7983 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7985 vpop();
7986 else
7987 init_putv(p, type, c);
7991 /* parse an initializer for type 't' if 'has_init' is non zero, and
7992 allocate space in local or global data space ('r' is either
7993 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7994 variable 'v' of scope 'scope' is declared before initializers
7995 are parsed. If 'v' is zero, then a reference to the new object
7996 is put in the value stack. If 'has_init' is 2, a special parsing
7997 is done to handle string constants. */
7998 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7999 int has_init, int v, int global)
8001 int size, align, addr;
8002 TokenString *init_str = NULL;
8004 Section *sec;
8005 Sym *flexible_array;
8006 Sym *sym;
8007 int saved_nocode_wanted = nocode_wanted;
8008 #ifdef CONFIG_TCC_BCHECK
8009 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8010 #endif
8011 init_params p = {0};
8013 /* Always allocate static or global variables */
8014 if (v && (r & VT_VALMASK) == VT_CONST)
8015 nocode_wanted |= DATA_ONLY_WANTED;
8017 flexible_array = NULL;
8018 size = type_size(type, &align);
8020 /* exactly one flexible array may be initialized, either the
8021 toplevel array or the last member of the toplevel struct */
8023 if (size < 0) {
8024 // error out except for top-level incomplete arrays
8025 // (arrays of incomplete types are handled in array parsing)
8026 if (!(type->t & VT_ARRAY))
8027 tcc_error("initialization of incomplete type");
8029 /* If the base type itself was an array type of unspecified size
8030 (like in 'typedef int arr[]; arr x = {1};') then we will
8031 overwrite the unknown size by the real one for this decl.
8032 We need to unshare the ref symbol holding that size. */
8033 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8034 p.flex_array_ref = type->ref;
8036 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8037 Sym *field = type->ref->next;
8038 if (field) {
8039 while (field->next)
8040 field = field->next;
8041 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8042 flexible_array = field;
8043 p.flex_array_ref = field->type.ref;
8044 size = -1;
8049 if (size < 0) {
8050 /* If unknown size, do a dry-run 1st pass */
8051 if (!has_init)
8052 tcc_error("unknown type size");
8053 if (has_init == 2) {
8054 /* only get strings */
8055 init_str = tok_str_alloc();
8056 while (tok == TOK_STR || tok == TOK_LSTR) {
8057 tok_str_add_tok(init_str);
8058 next();
8060 tok_str_add(init_str, TOK_EOF);
8061 } else
8062 skip_or_save_block(&init_str);
8063 unget_tok(0);
8065 /* compute size */
8066 begin_macro(init_str, 1);
8067 next();
8068 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8069 /* prepare second initializer parsing */
8070 macro_ptr = init_str->str;
8071 next();
8073 /* if still unknown size, error */
8074 size = type_size(type, &align);
8075 if (size < 0)
8076 tcc_error("unknown type size");
8078 /* If there's a flex member and it was used in the initializer
8079 adjust size. */
8080 if (flexible_array && flexible_array->type.ref->c > 0)
8081 size += flexible_array->type.ref->c
8082 * pointed_size(&flexible_array->type);
8085 /* take into account specified alignment if bigger */
8086 if (ad->a.aligned) {
8087 int speca = 1 << (ad->a.aligned - 1);
8088 if (speca > align)
8089 align = speca;
8090 } else if (ad->a.packed) {
8091 align = 1;
8094 if (!v && NODATA_WANTED)
8095 size = 0, align = 1;
8097 if ((r & VT_VALMASK) == VT_LOCAL) {
8098 sec = NULL;
8099 #ifdef CONFIG_TCC_BCHECK
8100 if (bcheck && v) {
8101 /* add padding between stack variables for bound checking */
8102 loc -= align;
8104 #endif
8105 loc = (loc - size) & -align;
8106 addr = loc;
8107 p.local_offset = addr + size;
8108 #ifdef CONFIG_TCC_BCHECK
8109 if (bcheck && v) {
8110 /* add padding between stack variables for bound checking */
8111 loc -= align;
8113 #endif
8114 if (v) {
8115 /* local variable */
8116 #ifdef CONFIG_TCC_ASM
8117 if (ad->asm_label) {
8118 int reg = asm_parse_regvar(ad->asm_label);
8119 if (reg >= 0)
8120 r = (r & ~VT_VALMASK) | reg;
8122 #endif
8123 sym = sym_push(v, type, r, addr);
8124 if (ad->cleanup_func) {
8125 Sym *cls = sym_push2(&all_cleanups,
8126 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8127 cls->prev_tok = sym;
8128 cls->next = ad->cleanup_func;
8129 cls->ncl = cur_scope->cl.s;
8130 cur_scope->cl.s = cls;
8133 sym->a = ad->a;
8134 } else {
8135 /* push local reference */
8136 vset(type, r, addr);
8138 } else {
8139 sym = NULL;
8140 if (v && global) {
8141 /* see if the symbol was already defined */
8142 sym = sym_find(v);
8143 if (sym) {
8144 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8145 && sym->type.ref->c > type->ref->c) {
8146 /* flex array was already declared with explicit size
8147 extern int arr[10];
8148 int arr[] = { 1,2,3 }; */
8149 type->ref->c = sym->type.ref->c;
8150 size = type_size(type, &align);
8152 patch_storage(sym, ad, type);
8153 /* we accept several definitions of the same global variable. */
8154 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8155 goto no_alloc;
8159 /* allocate symbol in corresponding section */
8160 sec = ad->section;
8161 if (!sec) {
8162 CType *tp = type;
8163 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8164 tp = &tp->ref->type;
8165 if (tp->t & VT_CONSTANT) {
8166 sec = rodata_section;
8167 } else if (has_init) {
8168 sec = data_section;
8169 /*if (tcc_state->g_debug & 4)
8170 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8171 } else if (tcc_state->nocommon)
8172 sec = bss_section;
8175 if (sec) {
8176 addr = section_add(sec, size, align);
8177 #ifdef CONFIG_TCC_BCHECK
8178 /* add padding if bound check */
8179 if (bcheck)
8180 section_add(sec, 1, 1);
8181 #endif
8182 } else {
8183 addr = align; /* SHN_COMMON is special, symbol value is align */
8184 sec = common_section;
8187 if (v) {
8188 if (!sym) {
8189 sym = sym_push(v, type, r | VT_SYM, 0);
8190 patch_storage(sym, ad, NULL);
8192 /* update symbol definition */
8193 put_extern_sym(sym, sec, addr, size);
8194 } else {
8195 /* push global reference */
8196 vpush_ref(type, sec, addr, size);
8197 sym = vtop->sym;
8198 vtop->r |= r;
8201 #ifdef CONFIG_TCC_BCHECK
8202 /* handles bounds now because the symbol must be defined
8203 before for the relocation */
8204 if (bcheck) {
8205 addr_t *bounds_ptr;
8207 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8208 /* then add global bound info */
8209 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8210 bounds_ptr[0] = 0; /* relocated */
8211 bounds_ptr[1] = size;
8213 #endif
8216 if (type->t & VT_VLA) {
8217 int a;
8219 if (NODATA_WANTED)
8220 goto no_alloc;
8222 /* save before-VLA stack pointer if needed */
8223 if (cur_scope->vla.num == 0) {
8224 if (cur_scope->prev && cur_scope->prev->vla.num) {
8225 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8226 } else {
8227 gen_vla_sp_save(loc -= PTR_SIZE);
8228 cur_scope->vla.locorig = loc;
8232 vpush_type_size(type, &a);
8233 gen_vla_alloc(type, a);
8234 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8235 /* on _WIN64, because of the function args scratch area, the
8236 result of alloca differs from RSP and is returned in RAX. */
8237 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8238 #endif
8239 gen_vla_sp_save(addr);
8240 cur_scope->vla.loc = addr;
8241 cur_scope->vla.num++;
8242 } else if (has_init) {
8243 p.sec = sec;
8244 decl_initializer(&p, type, addr, DIF_FIRST);
8245 /* patch flexible array member size back to -1, */
8246 /* for possible subsequent similar declarations */
8247 if (flexible_array)
8248 flexible_array->type.ref->c = -1;
8251 no_alloc:
8252 /* restore parse state if needed */
8253 if (init_str) {
8254 end_macro();
8255 next();
8258 nocode_wanted = saved_nocode_wanted;
8261 /* generate vla code saved in post_type() */
8262 static void func_vla_arg_code(Sym *arg)
8264 int align;
8265 TokenString *vla_array_tok = NULL;
8267 if (arg->type.ref)
8268 func_vla_arg_code(arg->type.ref);
8270 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8271 loc -= type_size(&int_type, &align);
8272 loc &= -align;
8273 arg->type.ref->c = loc;
8275 unget_tok(0);
8276 vla_array_tok = tok_str_alloc();
8277 vla_array_tok->str = arg->type.ref->vla_array_str;
8278 begin_macro(vla_array_tok, 1);
8279 next();
8280 gexpr();
8281 end_macro();
8282 next();
8283 vpush_type_size(&arg->type.ref->type, &align);
8284 gen_op('*');
8285 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8286 vswap();
8287 vstore();
8288 vpop();
8292 static void func_vla_arg(Sym *sym)
8294 Sym *arg;
8296 for (arg = sym->type.ref->next; arg; arg = arg->next)
8297 if ((arg->type.t & VT_BTYPE) == VT_PTR && (arg->type.ref->type.t & VT_VLA))
8298 func_vla_arg_code(arg->type.ref);
8301 /* parse a function defined by symbol 'sym' and generate its code in
8302 'cur_text_section' */
8303 static void gen_function(Sym *sym)
8305 struct scope f = { 0 };
8306 cur_scope = root_scope = &f;
8307 nocode_wanted = 0;
8309 cur_text_section->sh_flags |= SHF_EXECINSTR;
8310 ind = cur_text_section->data_offset;
8311 if (sym->a.aligned) {
8312 size_t newoff = section_add(cur_text_section, 0,
8313 1 << (sym->a.aligned - 1));
8314 gen_fill_nops(newoff - ind);
8317 funcname = get_tok_str(sym->v, NULL);
8318 func_ind = ind;
8319 func_vt = sym->type.ref->type;
8320 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8322 /* NOTE: we patch the symbol size later */
8323 put_extern_sym(sym, cur_text_section, ind, 0);
8325 if (sym->type.ref->f.func_ctor)
8326 add_array (tcc_state, ".init_array", sym->c);
8327 if (sym->type.ref->f.func_dtor)
8328 add_array (tcc_state, ".fini_array", sym->c);
8330 /* put debug symbol */
8331 tcc_debug_funcstart(tcc_state, sym);
8333 /* push a dummy symbol to enable local sym storage */
8334 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8335 local_scope = 1; /* for function parameters */
8336 gfunc_prolog(sym);
8337 tcc_debug_prolog_epilog(tcc_state, 0);
8339 local_scope = 0;
8340 rsym = 0;
8341 clear_temp_local_var_list();
8342 func_vla_arg(sym);
8343 block(0);
8344 gsym(rsym);
8346 nocode_wanted = 0;
8347 /* reset local stack */
8348 pop_local_syms(NULL, 0);
8349 tcc_debug_prolog_epilog(tcc_state, 1);
8350 gfunc_epilog();
8352 /* end of function */
8353 tcc_debug_funcend(tcc_state, ind - func_ind);
8355 /* patch symbol size */
8356 elfsym(sym)->st_size = ind - func_ind;
8358 cur_text_section->data_offset = ind;
8359 local_scope = 0;
8360 label_pop(&global_label_stack, NULL, 0);
8361 sym_pop(&all_cleanups, NULL, 0);
8363 /* It's better to crash than to generate wrong code */
8364 cur_text_section = NULL;
8365 funcname = ""; /* for safety */
8366 func_vt.t = VT_VOID; /* for safety */
8367 func_var = 0; /* for safety */
8368 ind = 0; /* for safety */
8369 func_ind = -1;
8370 nocode_wanted = DATA_ONLY_WANTED;
8371 check_vstack();
8373 /* do this after funcend debug info */
8374 next();
8377 static void gen_inline_functions(TCCState *s)
8379 Sym *sym;
8380 int inline_generated, i;
8381 struct InlineFunc *fn;
8383 tcc_open_bf(s, ":inline:", 0);
8384 /* iterate while inline function are referenced */
8385 do {
8386 inline_generated = 0;
8387 for (i = 0; i < s->nb_inline_fns; ++i) {
8388 fn = s->inline_fns[i];
8389 sym = fn->sym;
8390 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8391 /* the function was used or forced (and then not internal):
8392 generate its code and convert it to a normal function */
8393 fn->sym = NULL;
8394 tccpp_putfile(fn->filename);
8395 begin_macro(fn->func_str, 1);
8396 next();
8397 cur_text_section = text_section;
8398 gen_function(sym);
8399 end_macro();
8401 inline_generated = 1;
8404 } while (inline_generated);
8405 tcc_close();
8408 static void free_inline_functions(TCCState *s)
8410 int i;
8411 /* free tokens of unused inline functions */
8412 for (i = 0; i < s->nb_inline_fns; ++i) {
8413 struct InlineFunc *fn = s->inline_fns[i];
8414 if (fn->sym)
8415 tok_str_free(fn->func_str);
8417 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8420 static void do_Static_assert(void)
8422 int c;
8423 const char *msg;
8425 next();
8426 skip('(');
8427 c = expr_const();
8428 msg = "_Static_assert fail";
8429 if (tok == ',') {
8430 next();
8431 msg = parse_mult_str("string constant")->data;
8433 skip(')');
8434 if (c == 0)
8435 tcc_error("%s", msg);
8436 skip(';');
8439 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8440 or VT_CMP if parsing old style parameter list
8441 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8442 static int decl(int l)
8444 int v, has_init, r, oldint;
8445 CType type, btype;
8446 Sym *sym;
8447 AttributeDef ad, adbase;
8449 while (1) {
8451 oldint = 0;
8452 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8453 if (l == VT_JMP)
8454 return 0;
8455 /* skip redundant ';' if not in old parameter decl scope */
8456 if (tok == ';' && l != VT_CMP) {
8457 next();
8458 continue;
8460 if (tok == TOK_STATIC_ASSERT) {
8461 do_Static_assert();
8462 continue;
8464 if (l != VT_CONST)
8465 break;
8466 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8467 /* global asm block */
8468 asm_global_instr();
8469 continue;
8471 if (tok >= TOK_UIDENT) {
8472 /* special test for old K&R protos without explicit int
8473 type. Only accepted when defining global data */
8474 btype.t = VT_INT;
8475 oldint = 1;
8476 } else {
8477 if (tok != TOK_EOF)
8478 expect("declaration");
8479 break;
8483 if (tok == ';') {
8484 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8485 v = btype.ref->v;
8486 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8487 tcc_warning("unnamed struct/union that defines no instances");
8488 next();
8489 continue;
8491 if (IS_ENUM(btype.t)) {
8492 next();
8493 continue;
8497 while (1) { /* iterate thru each declaration */
8498 type = btype;
8499 ad = adbase;
8500 type_decl(&type, &ad, &v, TYPE_DIRECT);
8501 #if 0
8503 char buf[500];
8504 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8505 printf("type = '%s'\n", buf);
8507 #endif
8508 if ((type.t & VT_BTYPE) == VT_FUNC) {
8509 if ((type.t & VT_STATIC) && (l != VT_CONST))
8510 tcc_error("function without file scope cannot be static");
8511 /* if old style function prototype, we accept a
8512 declaration list */
8513 sym = type.ref;
8514 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8515 func_vt = type;
8516 decl(VT_CMP);
8518 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8519 if (sym->f.func_alwinl
8520 && ((type.t & (VT_EXTERN | VT_INLINE))
8521 == (VT_EXTERN | VT_INLINE))) {
8522 /* always_inline functions must be handled as if they
8523 don't generate multiple global defs, even if extern
8524 inline, i.e. GNU inline semantics for those. Rewrite
8525 them into static inline. */
8526 type.t &= ~VT_EXTERN;
8527 type.t |= VT_STATIC;
8529 #endif
8530 /* always compile 'extern inline' */
8531 if (type.t & VT_EXTERN)
8532 type.t &= ~VT_INLINE;
8534 } else if (oldint) {
8535 tcc_warning("type defaults to int");
8538 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8539 ad.asm_label = asm_label_instr();
8540 /* parse one last attribute list, after asm label */
8541 parse_attribute(&ad);
8542 #if 0
8543 /* gcc does not allow __asm__("label") with function definition,
8544 but why not ... */
8545 if (tok == '{')
8546 expect(";");
8547 #endif
8550 #ifdef TCC_TARGET_PE
8551 if (ad.a.dllimport || ad.a.dllexport) {
8552 if (type.t & VT_STATIC)
8553 tcc_error("cannot have dll linkage with static");
8554 if (type.t & VT_TYPEDEF) {
8555 tcc_warning("'%s' attribute ignored for typedef",
8556 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8557 (ad.a.dllexport = 0, "dllexport"));
8558 } else if (ad.a.dllimport) {
8559 if ((type.t & VT_BTYPE) == VT_FUNC)
8560 ad.a.dllimport = 0;
8561 else
8562 type.t |= VT_EXTERN;
8565 #endif
8566 if (tok == '{') {
8567 if (l != VT_CONST)
8568 tcc_error("cannot use local functions");
8569 if ((type.t & VT_BTYPE) != VT_FUNC)
8570 expect("function definition");
8572 /* reject abstract declarators in function definition
8573 make old style params without decl have int type */
8574 sym = type.ref;
8575 while ((sym = sym->next) != NULL) {
8576 if (!(sym->v & ~SYM_FIELD))
8577 expect("identifier");
8578 if (sym->type.t == VT_VOID)
8579 sym->type = int_type;
8582 /* apply post-declaraton attributes */
8583 merge_funcattr(&type.ref->f, &ad.f);
8585 /* put function symbol */
8586 type.t &= ~VT_EXTERN;
8587 sym = external_sym(v, &type, 0, &ad);
8589 /* static inline functions are just recorded as a kind
8590 of macro. Their code will be emitted at the end of
8591 the compilation unit only if they are used */
8592 if (sym->type.t & VT_INLINE) {
8593 struct InlineFunc *fn;
8594 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8595 strcpy(fn->filename, file->filename);
8596 fn->sym = sym;
8597 dynarray_add(&tcc_state->inline_fns,
8598 &tcc_state->nb_inline_fns, fn);
8599 skip_or_save_block(&fn->func_str);
8600 } else {
8601 /* compute text section */
8602 cur_text_section = ad.section;
8603 if (!cur_text_section)
8604 cur_text_section = text_section;
8605 gen_function(sym);
8607 break;
8608 } else {
8609 if (l == VT_CMP) {
8610 /* find parameter in function parameter list */
8611 for (sym = func_vt.ref->next; sym; sym = sym->next)
8612 if ((sym->v & ~SYM_FIELD) == v)
8613 goto found;
8614 tcc_error("declaration for parameter '%s' but no such parameter",
8615 get_tok_str(v, NULL));
8616 found:
8617 if (type.t & VT_STORAGE) /* 'register' is okay */
8618 tcc_error("storage class specified for '%s'",
8619 get_tok_str(v, NULL));
8620 if (sym->type.t != VT_VOID)
8621 tcc_error("redefinition of parameter '%s'",
8622 get_tok_str(v, NULL));
8623 convert_parameter_type(&type);
8624 sym->type = type;
8625 } else if (type.t & VT_TYPEDEF) {
8626 /* save typedefed type */
8627 /* XXX: test storage specifiers ? */
8628 sym = sym_find(v);
8629 if (sym && sym->sym_scope == local_scope) {
8630 if (!is_compatible_types(&sym->type, &type)
8631 || !(sym->type.t & VT_TYPEDEF))
8632 tcc_error("incompatible redefinition of '%s'",
8633 get_tok_str(v, NULL));
8634 sym->type = type;
8635 } else {
8636 sym = sym_push(v, &type, 0, 0);
8638 sym->a = ad.a;
8639 if ((type.t & VT_BTYPE) == VT_FUNC)
8640 merge_funcattr(&sym->type.ref->f, &ad.f);
8641 if (debug_modes)
8642 tcc_debug_typedef (tcc_state, sym);
8643 } else if ((type.t & VT_BTYPE) == VT_VOID
8644 && !(type.t & VT_EXTERN)) {
8645 tcc_error("declaration of void object");
8646 } else {
8647 r = 0;
8648 if ((type.t & VT_BTYPE) == VT_FUNC) {
8649 /* external function definition */
8650 /* specific case for func_call attribute */
8651 merge_funcattr(&type.ref->f, &ad.f);
8652 } else if (!(type.t & VT_ARRAY)) {
8653 /* not lvalue if array */
8654 r |= VT_LVAL;
8656 has_init = (tok == '=');
8657 if (has_init && (type.t & VT_VLA))
8658 tcc_error("variable length array cannot be initialized");
8660 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8661 || (type.t & VT_BTYPE) == VT_FUNC
8662 /* as with GCC, uninitialized global arrays with no size
8663 are considered extern: */
8664 || ((type.t & VT_ARRAY) && !has_init
8665 && l == VT_CONST && type.ref->c < 0)
8667 /* external variable or function */
8668 type.t |= VT_EXTERN;
8669 sym = external_sym(v, &type, r, &ad);
8670 } else {
8671 if (l == VT_CONST || (type.t & VT_STATIC))
8672 r |= VT_CONST;
8673 else
8674 r |= VT_LOCAL;
8675 if (has_init)
8676 next();
8677 else if (l == VT_CONST)
8678 /* uninitialized global variables may be overridden */
8679 type.t |= VT_EXTERN;
8680 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8683 if (ad.alias_target && l == VT_CONST) {
8684 /* Aliases need to be emitted when their target symbol
8685 is emitted, even if perhaps unreferenced.
8686 We only support the case where the base is already
8687 defined, otherwise we would need deferring to emit
8688 the aliases until the end of the compile unit. */
8689 Sym *alias_target = sym_find(ad.alias_target);
8690 ElfSym *esym = elfsym(alias_target);
8691 if (!esym)
8692 tcc_error("unsupported forward __alias__ attribute");
8693 put_extern_sym2(sym_find(v), esym->st_shndx,
8694 esym->st_value, esym->st_size, 1);
8697 if (tok != ',') {
8698 if (l == VT_JMP)
8699 return 1;
8700 skip(';');
8701 break;
8703 next();
8707 return 0;
8710 /* ------------------------------------------------------------------------- */
8711 #undef gjmp_addr
8712 #undef gjmp
8713 /* ------------------------------------------------------------------------- */