arm bits
[tinycc.git] / tccgen.c
blobe17597a3dd11cb7888a0486e939a7e9d60bc8cfb
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 ST_DATA char debug_modes;
48 ST_DATA SValue *vtop;
49 static SValue _vstack[1 + VSTACK_SIZE];
50 #define vstack (_vstack + 1)
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
72 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
73 ST_DATA int func_vc;
74 ST_DATA int func_ind;
75 ST_DATA const char *funcname;
76 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
77 static CString initstr;
79 #if PTR_SIZE == 4
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
82 #elif LONG_SIZE == 4
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
85 #else
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
88 #endif
90 static struct switch_t {
91 struct case_t {
92 int64_t v1, v2;
93 int ind, line;
94 } **p; int n; /* list of case ranges */
95 int def_sym; /* default symbol */
96 int nocode_wanted;
97 int *bsym;
98 struct scope *scope;
99 struct switch_t *prev;
100 SValue sv;
101 } *cur_switch; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable {
106 int location; //offset on stack. Svalue.c.i
107 short size;
108 short align;
109 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
110 static int nb_temp_local_vars;
112 static struct scope {
113 struct scope *prev;
114 struct { int loc, locorig, num; } vla;
115 struct { Sym *s; int n; } cl;
116 int *bsym, *csym;
117 Sym *lstk, *llstk;
118 } *cur_scope, *loop_scope, *root_scope;
120 typedef struct {
121 Section *sec;
122 int local_offset;
123 Sym *flex_array_ref;
124 } init_params;
126 #if 1
127 #define precedence_parser
128 static void init_prec(void);
129 #endif
131 static void block(int flags);
132 #define STMT_EXPR 1
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType *type);
136 static void gen_cast_s(int t);
137 static inline CType *pointed_type(CType *type);
138 static int is_compatible_types(CType *type1, CType *type2);
139 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
140 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
141 static void parse_expr_type(CType *type);
142 static void init_putv(init_params *p, CType *type, unsigned long c);
143 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
144 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
145 static int decl(int l);
146 static void expr_eq(void);
147 static void vpush_type_size(CType *type, int *a);
148 static int is_compatible_unqualified_types(CType *type1, CType *type2);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty, unsigned long long v);
151 static void vpush(CType *type);
152 static int gvtst(int inv, int t);
153 static void gen_inline_functions(TCCState *s);
154 static void free_inline_functions(TCCState *s);
155 static void skip_or_save_block(TokenString **str);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size,int align,int *r2);
158 static void cast_error(CType *st, CType *dt);
159 static void end_switch(void);
160 static void do_Static_assert(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC void gsym(int t)
168 if (t) {
169 gsym_addr(t, ind);
170 CODE_ON();
174 /* Clear 'nocode_wanted' if current pc is a label */
175 static int gind()
177 int t = ind;
178 CODE_ON();
179 if (debug_modes)
180 tcc_tcov_block_begin(tcc_state);
181 return t;
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t)
187 gjmp_addr(t);
188 CODE_OFF();
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t)
194 t = gjmp(t);
195 CODE_OFF();
196 return t;
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN int is_float(int t)
206 int bt = t & VT_BTYPE;
207 return bt == VT_LDOUBLE
208 || bt == VT_DOUBLE
209 || bt == VT_FLOAT
210 || bt == VT_QFLOAT;
213 static inline int is_integer_btype(int bt)
215 return bt == VT_BYTE
216 || bt == VT_BOOL
217 || bt == VT_SHORT
218 || bt == VT_INT
219 || bt == VT_LLONG;
222 static int btype_size(int bt)
224 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
225 bt == VT_SHORT ? 2 :
226 bt == VT_INT ? 4 :
227 bt == VT_LLONG ? 8 :
228 bt == VT_PTR ? PTR_SIZE : 0;
231 /* returns function return register from type */
232 static int R_RET(int t)
234 if (!is_float(t))
235 return REG_IRET;
236 #ifdef TCC_TARGET_X86_64
237 if ((t & VT_BTYPE) == VT_LDOUBLE)
238 return TREG_ST0;
239 #elif defined TCC_TARGET_RISCV64
240 if ((t & VT_BTYPE) == VT_LDOUBLE)
241 return REG_IRET;
242 #endif
243 return REG_FRET;
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t)
249 t &= VT_BTYPE;
250 #if PTR_SIZE == 4
251 if (t == VT_LLONG)
252 return REG_IRE2;
253 #elif defined TCC_TARGET_X86_64
254 if (t == VT_QLONG)
255 return REG_IRE2;
256 if (t == VT_QFLOAT)
257 return REG_FRE2;
258 #elif defined TCC_TARGET_RISCV64
259 if (t == VT_LDOUBLE)
260 return REG_IRE2;
261 #endif
262 return VT_CONST;
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue *sv, int t)
271 sv->r = R_RET(t), sv->r2 = R2_RET(t);
274 /* returns function return register class for type t */
275 static int RC_RET(int t)
277 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t)
283 if (!is_float(t))
284 return RC_INT;
285 #ifdef TCC_TARGET_X86_64
286 if ((t & VT_BTYPE) == VT_LDOUBLE)
287 return RC_ST0;
288 if ((t & VT_BTYPE) == VT_QFLOAT)
289 return RC_FRET;
290 #elif defined TCC_TARGET_RISCV64
291 if ((t & VT_BTYPE) == VT_LDOUBLE)
292 return RC_INT;
293 #endif
294 return RC_FLOAT;
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t, int rc)
300 if (!USING_TWO_WORDS(t))
301 return 0;
302 #ifdef RC_IRE2
303 if (rc == RC_IRET)
304 return RC_IRE2;
305 #endif
306 #ifdef RC_FRE2
307 if (rc == RC_FRET)
308 return RC_FRE2;
309 #endif
310 if (rc & RC_FLOAT)
311 return RC_FLOAT;
312 return RC_INT;
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC int ieee_finite(double d)
320 int p[4];
321 memcpy(p, &d, sizeof(double));
322 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
329 #endif
331 ST_FUNC void test_lvalue(void)
333 if (!(vtop->r & VT_LVAL))
334 expect("lvalue");
337 ST_FUNC void check_vstack(void)
339 if (vtop != vstack - 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop - vstack + 1));
344 /* vstack debugging aid */
345 #if 0
346 void pv (const char *lbl, int a, int b)
348 int i;
349 for (i = a; i < a + b; ++i) {
350 SValue *p = &vtop[-i];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
355 #endif
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC void tccgen_init(TCCState *s1)
361 vtop = vstack - 1;
362 memset(vtop, 0, sizeof *vtop);
364 /* define some often used types */
365 int_type.t = VT_INT;
367 char_type.t = VT_BYTE;
368 if (s1->char_is_unsigned)
369 char_type.t |= VT_UNSIGNED;
370 char_pointer_type = char_type;
371 mk_pointer(&char_pointer_type);
373 func_old_type.t = VT_FUNC;
374 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
375 func_old_type.ref->f.func_call = FUNC_CDECL;
376 func_old_type.ref->f.func_type = FUNC_OLD;
377 #ifdef precedence_parser
378 init_prec();
379 #endif
380 cstr_new(&initstr);
383 ST_FUNC int tccgen_compile(TCCState *s1)
385 funcname = "";
386 func_ind = -1;
387 anon_sym = SYM_FIRST_ANOM;
388 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 #if TCC_EH_FRAME
406 tcc_eh_frame_end(s1);
407 #endif
408 tcc_debug_end(s1);
409 tcc_tcov_end(s1);
410 return 0;
413 ST_FUNC void tccgen_finish(TCCState *s1)
415 tcc_debug_end(s1); /* just in case of errors: free memory */
416 free_inline_functions(s1);
417 sym_pop(&global_stack, NULL, 0);
418 sym_pop(&local_stack, NULL, 0);
419 /* free preprocessor macros */
420 free_defines(NULL);
421 /* free sym_pools */
422 dynarray_reset(&sym_pools, &nb_sym_pools);
423 cstr_free(&initstr);
424 dynarray_reset(&stk_data, &nb_stk_data);
425 while (cur_switch)
426 end_switch();
427 local_scope = 0;
428 loop_scope = NULL;
429 all_cleanups = NULL;
430 pending_gotos = NULL;
431 nb_temp_local_vars = 0;
432 global_label_stack = NULL;
433 local_label_stack = NULL;
434 cur_text_section = NULL;
435 sym_free_first = NULL;
438 /* ------------------------------------------------------------------------- */
439 ST_FUNC ElfSym *elfsym(Sym *s)
441 if (!s || !s->c)
442 return NULL;
443 return &((ElfSym *)symtab_section->data)[s->c];
446 /* apply storage attributes to Elf symbol */
447 ST_FUNC void update_storage(Sym *sym)
449 ElfSym *esym;
450 int sym_bind, old_sym_bind;
452 esym = elfsym(sym);
453 if (!esym)
454 return;
456 if (sym->a.visibility)
457 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
458 | sym->a.visibility;
460 if (sym->type.t & (VT_STATIC | VT_INLINE))
461 sym_bind = STB_LOCAL;
462 else if (sym->a.weak)
463 sym_bind = STB_WEAK;
464 else
465 sym_bind = STB_GLOBAL;
466 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
467 if (sym_bind != old_sym_bind) {
468 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
471 #ifdef TCC_TARGET_PE
472 if (sym->a.dllimport)
473 esym->st_other |= ST_PE_IMPORT;
474 if (sym->a.dllexport)
475 esym->st_other |= ST_PE_EXPORT;
476 #endif
478 #if 0
479 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
480 get_tok_str(sym->v, NULL),
481 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
482 sym->a.visibility,
483 sym->a.dllexport,
484 sym->a.dllimport
486 #endif
489 /* ------------------------------------------------------------------------- */
490 /* update sym->c so that it points to an external symbol in section
491 'section' with value 'value' */
493 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
494 addr_t value, unsigned long size,
495 int can_add_underscore)
497 int sym_type, sym_bind, info, other, t;
498 ElfSym *esym;
499 const char *name;
500 char buf1[256];
502 if (!sym->c) {
503 name = get_tok_str(sym->v, NULL);
504 t = sym->type.t;
505 if ((t & VT_BTYPE) == VT_FUNC) {
506 sym_type = STT_FUNC;
507 } else if ((t & VT_BTYPE) == VT_VOID) {
508 sym_type = STT_NOTYPE;
509 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
510 sym_type = STT_FUNC;
511 } else {
512 sym_type = STT_OBJECT;
514 if (t & (VT_STATIC | VT_INLINE))
515 sym_bind = STB_LOCAL;
516 else
517 sym_bind = STB_GLOBAL;
518 other = 0;
520 #ifdef TCC_TARGET_PE
521 if (sym_type == STT_FUNC && sym->type.ref) {
522 Sym *ref = sym->type.ref;
523 if (ref->a.nodecorate) {
524 can_add_underscore = 0;
526 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
527 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
528 name = buf1;
529 other |= ST_PE_STDCALL;
530 can_add_underscore = 0;
533 #endif
535 if (sym->asm_label) {
536 name = get_tok_str(sym->asm_label, NULL);
537 can_add_underscore = 0;
540 if (tcc_state->leading_underscore && can_add_underscore) {
541 buf1[0] = '_';
542 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
543 name = buf1;
546 info = ELFW(ST_INFO)(sym_bind, sym_type);
547 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
549 if (debug_modes)
550 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
552 } else {
553 esym = elfsym(sym);
554 esym->st_value = value;
555 esym->st_size = size;
556 esym->st_shndx = sh_num;
558 update_storage(sym);
561 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
563 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
564 return;
565 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
568 /* add a new relocation entry to symbol 'sym' in section 's' */
569 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
570 addr_t addend)
572 int c = 0;
574 if (nocode_wanted && s == cur_text_section)
575 return;
577 if (sym) {
578 if (0 == sym->c)
579 put_extern_sym(sym, NULL, 0, 0);
580 c = sym->c;
583 /* now we can add ELF relocation info */
584 put_elf_reloca(symtab_section, s, offset, type, c, addend);
587 #if PTR_SIZE == 4
588 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
590 greloca(s, sym, offset, type, 0);
592 #endif
594 /* ------------------------------------------------------------------------- */
595 /* symbol allocator */
596 static Sym *__sym_malloc(void)
598 Sym *sym_pool, *sym, *last_sym;
599 int i;
601 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
602 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
604 last_sym = sym_free_first;
605 sym = sym_pool;
606 for(i = 0; i < SYM_POOL_NB; i++) {
607 sym->next = last_sym;
608 last_sym = sym;
609 sym++;
611 sym_free_first = last_sym;
612 return last_sym;
615 static inline Sym *sym_malloc(void)
617 Sym *sym;
618 #ifndef SYM_DEBUG
619 sym = sym_free_first;
620 if (!sym)
621 sym = __sym_malloc();
622 sym_free_first = sym->next;
623 return sym;
624 #else
625 sym = tcc_malloc(sizeof(Sym));
626 return sym;
627 #endif
630 ST_INLN void sym_free(Sym *sym)
632 #ifndef SYM_DEBUG
633 sym->next = sym_free_first;
634 sym_free_first = sym;
635 #else
636 tcc_free(sym);
637 #endif
640 /* push, without hashing */
641 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
643 Sym *s;
645 s = sym_malloc();
646 memset(s, 0, sizeof *s);
647 s->v = v;
648 s->type.t = t;
649 s->c = c;
650 /* add in stack */
651 s->prev = *ps;
652 *ps = s;
653 return s;
656 /* find a symbol and return its associated structure. 's' is the top
657 of the symbol stack */
658 ST_FUNC Sym *sym_find2(Sym *s, int v)
660 while (s) {
661 if (s->v == v)
662 return s;
663 s = s->prev;
665 return NULL;
668 /* structure lookup */
669 ST_INLN Sym *struct_find(int v)
671 v -= TOK_IDENT;
672 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
673 return NULL;
674 return table_ident[v]->sym_struct;
677 /* find an identifier */
678 ST_INLN Sym *sym_find(int v)
680 v -= TOK_IDENT;
681 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
682 return NULL;
683 return table_ident[v]->sym_identifier;
686 static int sym_scope(Sym *s)
688 if (IS_ENUM_VAL (s->type.t))
689 return s->type.ref->sym_scope;
690 else
691 return s->sym_scope;
694 /* push a given symbol on the symbol stack */
695 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
697 Sym *s, **ps;
698 TokenSym *ts;
700 if (local_stack)
701 ps = &local_stack;
702 else
703 ps = &global_stack;
704 s = sym_push2(ps, v, type->t, c);
705 s->type.ref = type->ref;
706 s->r = r;
707 /* don't record fields or anonymous symbols */
708 /* XXX: simplify */
709 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
710 /* record symbol in token array */
711 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
712 if (v & SYM_STRUCT)
713 ps = &ts->sym_struct;
714 else
715 ps = &ts->sym_identifier;
716 s->prev_tok = *ps;
717 *ps = s;
718 s->sym_scope = local_scope;
719 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
720 tcc_error("redeclaration of '%s'",
721 get_tok_str(v & ~SYM_STRUCT, NULL));
723 return s;
726 /* push a global identifier */
727 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
729 Sym *s, **ps;
730 s = sym_push2(&global_stack, v, t, c);
731 s->r = VT_CONST | VT_SYM;
732 /* don't record anonymous symbol */
733 if (v < SYM_FIRST_ANOM) {
734 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
735 /* modify the top most local identifier, so that sym_identifier will
736 point to 's' when popped; happens when called from inline asm */
737 while (*ps != NULL && (*ps)->sym_scope)
738 ps = &(*ps)->prev_tok;
739 s->prev_tok = *ps;
740 *ps = s;
742 return s;
745 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
746 pop them yet from the list, but do remove them from the token array. */
747 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
749 Sym *s, *ss, **ps;
750 TokenSym *ts;
751 int v;
753 s = *ptop;
754 while(s != b) {
755 ss = s->prev;
756 v = s->v;
757 /* remove symbol in token array */
758 /* XXX: simplify */
759 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
760 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
761 if (v & SYM_STRUCT)
762 ps = &ts->sym_struct;
763 else
764 ps = &ts->sym_identifier;
765 *ps = s->prev_tok;
767 if (!keep)
768 sym_free(s);
769 s = ss;
771 if (!keep)
772 *ptop = b;
775 /* label lookup */
776 ST_FUNC Sym *label_find(int v)
778 v -= TOK_IDENT;
779 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
780 return NULL;
781 return table_ident[v]->sym_label;
784 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
786 Sym *s, **ps;
787 s = sym_push2(ptop, v, VT_STATIC, 0);
788 s->r = flags;
789 ps = &table_ident[v - TOK_IDENT]->sym_label;
790 if (ptop == &global_label_stack) {
791 /* modify the top most local identifier, so that
792 sym_identifier will point to 's' when popped */
793 while (*ps != NULL)
794 ps = &(*ps)->prev_tok;
796 s->prev_tok = *ps;
797 *ps = s;
798 return s;
801 /* pop labels until element last is reached. Look if any labels are
802 undefined. Define symbols if '&&label' was used. */
803 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
805 Sym *s, *s1;
806 for(s = *ptop; s != slast; s = s1) {
807 s1 = s->prev;
808 if (s->r == LABEL_DECLARED) {
809 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
810 } else if (s->r == LABEL_FORWARD) {
811 tcc_error("label '%s' used but not defined",
812 get_tok_str(s->v, NULL));
813 } else {
814 if (s->c) {
815 /* define corresponding symbol. A size of
816 1 is put. */
817 put_extern_sym(s, cur_text_section, s->jnext, 1);
820 /* remove label */
821 if (s->r != LABEL_GONE)
822 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
823 if (!keep)
824 sym_free(s);
825 else
826 s->r = LABEL_GONE;
828 if (!keep)
829 *ptop = slast;
832 /* ------------------------------------------------------------------------- */
833 static void vcheck_cmp(void)
835 /* cannot let cpu flags if other instruction are generated. Also
836 avoid leaving VT_JMP anywhere except on the top of the stack
837 because it would complicate the code generator.
839 Don't do this when nocode_wanted. vtop might come from
840 !nocode_wanted regions (see 88_codeopt.c) and transforming
841 it to a register without actually generating code is wrong
842 as their value might still be used for real. All values
843 we push under nocode_wanted will eventually be popped
844 again, so that the VT_CMP/VT_JMP value will be in vtop
845 when code is unsuppressed again. */
847 /* However if it's just automatic suppression via CODE_OFF/ON()
848 then it seems that we better let things work undisturbed.
849 How can it work at all under nocode_wanted? Well, gv() will
850 actually clear it at the gsym() in load()/VT_JMP in the
851 generator backends */
853 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
854 gv(RC_INT);
857 static void vsetc(CType *type, int r, CValue *vc)
859 if (vtop >= vstack + (VSTACK_SIZE - 1))
860 tcc_error("memory full (vstack)");
861 vcheck_cmp();
862 vtop++;
863 vtop->type = *type;
864 vtop->r = r;
865 vtop->r2 = VT_CONST;
866 vtop->c = *vc;
867 vtop->sym = NULL;
870 ST_FUNC void vswap(void)
872 SValue tmp;
874 vcheck_cmp();
875 tmp = vtop[0];
876 vtop[0] = vtop[-1];
877 vtop[-1] = tmp;
880 /* pop stack value */
881 ST_FUNC void vpop(void)
883 int v;
884 v = vtop->r & VT_VALMASK;
885 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
886 /* for x86, we need to pop the FP stack */
887 if (v == TREG_ST0) {
888 o(0xd8dd); /* fstp %st(0) */
889 } else
890 #endif
891 if (v == VT_CMP) {
892 /* need to put correct jump if && or || without test */
893 gsym(vtop->jtrue);
894 gsym(vtop->jfalse);
896 vtop--;
899 /* push constant of type "type" with useless value */
900 static void vpush(CType *type)
902 vset(type, VT_CONST, 0);
905 /* push arbitrary 64bit constant */
906 static void vpush64(int ty, unsigned long long v)
908 CValue cval;
909 CType ctype;
910 ctype.t = ty;
911 ctype.ref = NULL;
912 cval.i = v;
913 vsetc(&ctype, VT_CONST, &cval);
916 /* push integer constant */
917 ST_FUNC void vpushi(int v)
919 vpush64(VT_INT, v);
922 /* push a pointer sized constant */
923 static void vpushs(addr_t v)
925 vpush64(VT_SIZE_T, v);
928 /* push long long constant */
929 static inline void vpushll(long long v)
931 vpush64(VT_LLONG, v);
934 ST_FUNC void vset(CType *type, int r, int v)
936 CValue cval;
937 cval.i = v;
938 vsetc(type, r, &cval);
941 static void vseti(int r, int v)
943 CType type;
944 type.t = VT_INT;
945 type.ref = NULL;
946 vset(&type, r, v);
949 ST_FUNC void vpushv(SValue *v)
951 if (vtop >= vstack + (VSTACK_SIZE - 1))
952 tcc_error("memory full (vstack)");
953 vtop++;
954 *vtop = *v;
957 static void vdup(void)
959 vpushv(vtop);
962 /* rotate the stack element at position n-1 to the top */
963 ST_FUNC void vrotb(int n)
965 SValue tmp;
966 if (--n < 1)
967 return;
968 vcheck_cmp();
969 tmp = vtop[-n];
970 memmove(vtop - n, vtop - n + 1, sizeof *vtop * n);
971 vtop[0] = tmp;
974 /* rotate the top stack element into position n-1 */
975 ST_FUNC void vrott(int n)
977 SValue tmp;
978 if (--n < 1)
979 return;
980 vcheck_cmp();
981 tmp = vtop[0];
982 memmove(vtop - n + 1, vtop - n, sizeof *vtop * n);
983 vtop[-n] = tmp;
986 /* reverse order of the the first n stack elements */
987 ST_FUNC void vrev(int n)
989 int i;
990 SValue tmp;
991 vcheck_cmp();
992 for (i = 0, n = -n; i > ++n; --i)
993 tmp = vtop[i], vtop[i] = vtop[n], vtop[n] = tmp;
996 /* ------------------------------------------------------------------------- */
997 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
999 /* called from generators to set the result from relational ops */
1000 ST_FUNC void vset_VT_CMP(int op)
1002 vtop->r = VT_CMP;
1003 vtop->cmp_op = op;
1004 vtop->jfalse = 0;
1005 vtop->jtrue = 0;
1008 /* called once before asking generators to load VT_CMP to a register */
1009 static void vset_VT_JMP(void)
1011 int op = vtop->cmp_op;
1013 if (vtop->jtrue || vtop->jfalse) {
1014 int origt = vtop->type.t;
1015 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1016 int inv = op & (op < 2); /* small optimization */
1017 vseti(VT_JMP+inv, gvtst(inv, 0));
1018 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1019 } else {
1020 /* otherwise convert flags (rsp. 0/1) to register */
1021 vtop->c.i = op;
1022 if (op < 2) /* doesn't seem to happen */
1023 vtop->r = VT_CONST;
1027 /* Set CPU Flags, doesn't yet jump */
1028 static void gvtst_set(int inv, int t)
1030 int *p;
1032 if (vtop->r != VT_CMP) {
1033 vpushi(0);
1034 gen_op(TOK_NE);
1035 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1036 vset_VT_CMP(vtop->c.i != 0);
1039 p = inv ? &vtop->jfalse : &vtop->jtrue;
1040 *p = gjmp_append(*p, t);
1043 /* Generate value test
1045 * Generate a test for any value (jump, comparison and integers) */
1046 static int gvtst(int inv, int t)
1048 int op, x, u;
1050 gvtst_set(inv, t);
1051 t = vtop->jtrue, u = vtop->jfalse;
1052 if (inv)
1053 x = u, u = t, t = x;
1054 op = vtop->cmp_op;
1056 /* jump to the wanted target */
1057 if (op > 1)
1058 t = gjmp_cond(op ^ inv, t);
1059 else if (op != inv)
1060 t = gjmp(t);
1061 /* resolve complementary jumps to here */
1062 gsym(u);
1064 vtop--;
1065 return t;
1068 /* generate a zero or nozero test */
1069 static void gen_test_zero(int op)
1071 if (vtop->r == VT_CMP) {
1072 int j;
1073 if (op == TOK_EQ) {
1074 j = vtop->jfalse;
1075 vtop->jfalse = vtop->jtrue;
1076 vtop->jtrue = j;
1077 vtop->cmp_op ^= 1;
1079 } else {
1080 vpushi(0);
1081 gen_op(op);
1085 /* ------------------------------------------------------------------------- */
1086 /* push a symbol value of TYPE */
1087 ST_FUNC void vpushsym(CType *type, Sym *sym)
1089 CValue cval;
1090 cval.i = 0;
1091 vsetc(type, VT_CONST | VT_SYM, &cval);
1092 vtop->sym = sym;
1095 /* Return a static symbol pointing to a section */
1096 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1098 int v;
1099 Sym *sym;
1101 v = anon_sym++;
1102 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1103 sym->type.t |= VT_STATIC;
1104 put_extern_sym(sym, sec, offset, size);
1105 return sym;
1108 /* push a reference to a section offset by adding a dummy symbol */
1109 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1111 vpushsym(type, get_sym_ref(type, sec, offset, size));
1114 /* define a new external reference to a symbol 'v' of type 'u' */
1115 ST_FUNC Sym *external_global_sym(int v, CType *type)
1117 Sym *s;
1119 s = sym_find(v);
1120 if (!s) {
1121 /* push forward reference */
1122 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1123 s->type.ref = type->ref;
1124 } else if (IS_ASM_SYM(s)) {
1125 s->type.t = type->t | (s->type.t & VT_EXTERN);
1126 s->type.ref = type->ref;
1127 update_storage(s);
1129 return s;
1132 /* create an external reference with no specific type similar to asm labels.
1133 This avoids type conflicts if the symbol is used from C too */
1134 ST_FUNC Sym *external_helper_sym(int v)
1136 CType ct = { VT_ASM_FUNC, NULL };
1137 return external_global_sym(v, &ct);
1140 /* push a reference to an helper function (such as memmove) */
1141 ST_FUNC void vpush_helper_func(int v)
1143 vpushsym(&func_old_type, external_helper_sym(v));
1146 /* Merge symbol attributes. */
1147 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1149 if (sa1->aligned && !sa->aligned)
1150 sa->aligned = sa1->aligned;
1151 sa->packed |= sa1->packed;
1152 sa->weak |= sa1->weak;
1153 sa->nodebug |= sa1->nodebug;
1154 if (sa1->visibility != STV_DEFAULT) {
1155 int vis = sa->visibility;
1156 if (vis == STV_DEFAULT
1157 || vis > sa1->visibility)
1158 vis = sa1->visibility;
1159 sa->visibility = vis;
1161 sa->dllexport |= sa1->dllexport;
1162 sa->nodecorate |= sa1->nodecorate;
1163 sa->dllimport |= sa1->dllimport;
1166 /* Merge function attributes. */
1167 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1169 if (fa1->func_call && !fa->func_call)
1170 fa->func_call = fa1->func_call;
1171 if (fa1->func_type && !fa->func_type)
1172 fa->func_type = fa1->func_type;
1173 if (fa1->func_args && !fa->func_args)
1174 fa->func_args = fa1->func_args;
1175 if (fa1->func_noreturn)
1176 fa->func_noreturn = 1;
1177 if (fa1->func_ctor)
1178 fa->func_ctor = 1;
1179 if (fa1->func_dtor)
1180 fa->func_dtor = 1;
1183 /* Merge attributes. */
1184 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1186 merge_symattr(&ad->a, &ad1->a);
1187 merge_funcattr(&ad->f, &ad1->f);
1189 if (ad1->section)
1190 ad->section = ad1->section;
1191 if (ad1->alias_target)
1192 ad->alias_target = ad1->alias_target;
1193 if (ad1->asm_label)
1194 ad->asm_label = ad1->asm_label;
1195 if (ad1->attr_mode)
1196 ad->attr_mode = ad1->attr_mode;
1199 /* Merge some type attributes. */
1200 static void patch_type(Sym *sym, CType *type)
1202 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1203 if (!(sym->type.t & VT_EXTERN))
1204 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1205 sym->type.t &= ~VT_EXTERN;
1208 if (IS_ASM_SYM(sym)) {
1209 /* stay static if both are static */
1210 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1211 sym->type.ref = type->ref;
1212 if ((type->t & VT_BTYPE) != VT_FUNC && !(type->t & VT_ARRAY))
1213 sym->r |= VT_LVAL;
1216 if (!is_compatible_types(&sym->type, type)) {
1217 tcc_error("incompatible types for redefinition of '%s'",
1218 get_tok_str(sym->v, NULL));
1220 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1221 int static_proto = sym->type.t & VT_STATIC;
1222 /* warn if static follows non-static function declaration */
1223 if ((type->t & VT_STATIC) && !static_proto
1224 /* XXX this test for inline shouldn't be here. Until we
1225 implement gnu-inline mode again it silences a warning for
1226 mingw caused by our workarounds. */
1227 && !((type->t | sym->type.t) & VT_INLINE))
1228 tcc_warning("static storage ignored for redefinition of '%s'",
1229 get_tok_str(sym->v, NULL));
1231 /* set 'inline' if both agree or if one has static */
1232 if ((type->t | sym->type.t) & VT_INLINE) {
1233 if (!((type->t ^ sym->type.t) & VT_INLINE)
1234 || ((type->t | sym->type.t) & VT_STATIC))
1235 static_proto |= VT_INLINE;
1238 if (0 == (type->t & VT_EXTERN)) {
1239 struct FuncAttr f = sym->type.ref->f;
1240 /* put complete type, use static from prototype */
1241 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1242 sym->type.ref = type->ref;
1243 merge_funcattr(&sym->type.ref->f, &f);
1244 } else {
1245 sym->type.t &= ~VT_INLINE | static_proto;
1248 if (sym->type.ref->f.func_type == FUNC_OLD
1249 && type->ref->f.func_type != FUNC_OLD) {
1250 sym->type.ref = type->ref;
1253 } else {
1254 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1255 /* set array size if it was omitted in extern declaration */
1256 sym->type.ref->c = type->ref->c;
1258 if ((type->t ^ sym->type.t) & VT_STATIC)
1259 tcc_warning("storage mismatch for redefinition of '%s'",
1260 get_tok_str(sym->v, NULL));
1264 /* Merge some storage attributes. */
1265 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1267 if (type)
1268 patch_type(sym, type);
1270 #ifdef TCC_TARGET_PE
1271 if (sym->a.dllimport != ad->a.dllimport)
1272 tcc_error("incompatible dll linkage for redefinition of '%s'",
1273 get_tok_str(sym->v, NULL));
1274 #endif
1275 merge_symattr(&sym->a, &ad->a);
1276 if (ad->asm_label)
1277 sym->asm_label = ad->asm_label;
1278 update_storage(sym);
1281 /* copy sym to other stack */
1282 static Sym *sym_copy(Sym *s0, Sym **ps)
1284 Sym *s;
1285 s = sym_malloc(), *s = *s0;
1286 s->prev = *ps, *ps = s;
1287 if (s->v < SYM_FIRST_ANOM) {
1288 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1289 s->prev_tok = *ps, *ps = s;
1291 return s;
1294 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1295 static void sym_copy_ref(Sym *s, Sym **ps)
1297 int bt = s->type.t & VT_BTYPE;
1298 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1299 Sym **sp = &s->type.ref;
1300 for (s = *sp, *sp = NULL; s; s = s->next) {
1301 Sym *s2 = sym_copy(s, ps);
1302 sp = &(*sp = s2)->next;
1303 sym_copy_ref(s2, ps);
1308 /* define a new external reference to a symbol 'v' */
1309 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1311 Sym *s;
1313 /* look for global symbol */
1314 s = sym_find(v);
1315 while (s && s->sym_scope)
1316 s = s->prev_tok;
1318 if (!s) {
1319 /* push forward reference */
1320 s = global_identifier_push(v, type->t, 0);
1321 s->r |= r;
1322 s->a = ad->a;
1323 s->asm_label = ad->asm_label;
1324 s->type.ref = type->ref;
1325 /* copy type to the global stack */
1326 if (local_stack)
1327 sym_copy_ref(s, &global_stack);
1328 } else {
1329 patch_storage(s, ad, type);
1331 /* push variables on local_stack if any */
1332 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1333 s = sym_copy(s, &local_stack);
1334 return s;
1337 /* save registers up to (vtop - n) stack entry */
1338 ST_FUNC void save_regs(int n)
1340 SValue *p, *p1;
1341 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1342 save_reg(p->r);
1345 /* save r to the memory stack, and mark it as being free */
1346 ST_FUNC void save_reg(int r)
1348 save_reg_upstack(r, 0);
1351 /* save r to the memory stack, and mark it as being free,
1352 if seen up to (vtop - n) stack entry */
1353 ST_FUNC void save_reg_upstack(int r, int n)
1355 int l, size, align, bt, r2;
1356 SValue *p, *p1, sv;
1358 if ((r &= VT_VALMASK) >= VT_CONST)
1359 return;
1360 if (nocode_wanted)
1361 return;
1362 l = r2 = 0;
1363 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1364 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1365 /* must save value on stack if not already done */
1366 if (!l) {
1367 bt = p->type.t & VT_BTYPE;
1368 if (bt == VT_VOID)
1369 continue;
1370 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1371 bt = VT_PTR;
1372 sv.type.t = bt;
1373 size = type_size(&sv.type, &align);
1374 l = get_temp_local_var(size, align, &r2);
1375 sv.r = VT_LOCAL | VT_LVAL;
1376 sv.c.i = l;
1377 store(p->r & VT_VALMASK, &sv);
1378 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1379 /* x86 specific: need to pop fp register ST0 if saved */
1380 if (r == TREG_ST0) {
1381 o(0xd8dd); /* fstp %st(0) */
1383 #endif
1384 /* special long long case */
1385 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1386 sv.c.i += PTR_SIZE;
1387 store(p->r2, &sv);
1390 /* mark that stack entry as being saved on the stack */
1391 if (p->r & VT_LVAL) {
1392 /* also clear the bounded flag because the
1393 relocation address of the function was stored in
1394 p->c.i */
1395 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1396 } else {
1397 p->r = VT_LVAL | VT_LOCAL;
1398 p->type.t &= ~VT_ARRAY; /* cannot combine VT_LVAL with VT_ARRAY */
1400 p->sym = NULL;
1401 p->r2 = r2;
1402 p->c.i = l;
1407 #ifdef TCC_TARGET_ARM
1408 /* find a register of class 'rc2' with at most one reference on stack.
1409 * If none, call get_reg(rc) */
1410 ST_FUNC int get_reg_ex(int rc, int rc2)
1412 int r;
1413 SValue *p;
1415 for(r=0;r<NB_REGS;r++) {
1416 if (reg_classes[r] & rc2) {
1417 int n;
1418 n=0;
1419 for(p = vstack; p <= vtop; p++) {
1420 if ((p->r & VT_VALMASK) == r ||
1421 p->r2 == r)
1422 n++;
1424 if (n <= 1)
1425 return r;
1428 return get_reg(rc);
1430 #endif
1432 /* find a free register of class 'rc'. If none, save one register */
1433 ST_FUNC int get_reg(int rc)
1435 int r;
1436 SValue *p;
1438 /* find a free register */
1439 for(r=0;r<NB_REGS;r++) {
1440 if (reg_classes[r] & rc) {
1441 if (nocode_wanted)
1442 return r;
1443 for(p=vstack;p<=vtop;p++) {
1444 if ((p->r & VT_VALMASK) == r ||
1445 p->r2 == r)
1446 goto notfound;
1448 return r;
1450 notfound: ;
1453 /* no register left : free the first one on the stack (VERY
1454 IMPORTANT to start from the bottom to ensure that we don't
1455 spill registers used in gen_opi()) */
1456 for(p=vstack;p<=vtop;p++) {
1457 /* look at second register (if long long) */
1458 r = p->r2;
1459 if (r < VT_CONST && (reg_classes[r] & rc))
1460 goto save_found;
1461 r = p->r & VT_VALMASK;
1462 if (r < VT_CONST && (reg_classes[r] & rc)) {
1463 save_found:
1464 save_reg(r);
1465 return r;
1468 /* Should never comes here */
1469 return -1;
1472 /* find a free temporary local variable (return the offset on stack) match
1473 size and align. If none, add new temporary stack variable */
1474 static int get_temp_local_var(int size,int align, int *r2)
1476 int i;
1477 struct temp_local_variable *temp_var;
1478 SValue *p;
1479 int r;
1480 unsigned used = 0;
1482 /* mark locations that are still in use */
1483 for (p = vstack; p <= vtop; p++) {
1484 r = p->r & VT_VALMASK;
1485 if (r == VT_LOCAL || r == VT_LLOCAL) {
1486 r = p->r2 - (VT_CONST + 1);
1487 if (r >= 0 && r < MAX_TEMP_LOCAL_VARIABLE_NUMBER)
1488 used |= 1<<r;
1491 for (i=0;i<nb_temp_local_vars;i++) {
1492 temp_var=&arr_temp_local_vars[i];
1493 if(!(used & 1<<i)
1494 && temp_var->size>=size
1495 && temp_var->align>=align) {
1496 ret_tmp:
1497 *r2 = (VT_CONST + 1) + i;
1498 return temp_var->location;
1501 loc = (loc - size) & -align;
1502 if (nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER) {
1503 temp_var=&arr_temp_local_vars[i];
1504 temp_var->location=loc;
1505 temp_var->size=size;
1506 temp_var->align=align;
1507 nb_temp_local_vars++;
1508 goto ret_tmp;
1510 *r2 = VT_CONST;
1511 return loc;
1514 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1515 if needed */
1516 static void move_reg(int r, int s, int t)
1518 SValue sv;
1520 if (r != s) {
1521 save_reg(r);
1522 sv.type.t = t;
1523 sv.type.ref = NULL;
1524 sv.r = s;
1525 sv.c.i = 0;
1526 load(r, &sv);
1530 /* get address of vtop (vtop MUST BE an lvalue) */
1531 ST_FUNC void gaddrof(void)
1533 vtop->r &= ~VT_LVAL;
1534 /* tricky: if saved lvalue, then we can go back to lvalue */
1535 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1536 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1539 #ifdef CONFIG_TCC_BCHECK
1540 /* generate a bounded pointer addition */
1541 static void gen_bounded_ptr_add(void)
1543 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1544 if (save) {
1545 vpushv(&vtop[-1]);
1546 vrott(3);
1548 vpush_helper_func(TOK___bound_ptr_add);
1549 vrott(3);
1550 gfunc_call(2);
1551 vtop -= save;
1552 vpushi(0);
1553 /* returned pointer is in REG_IRET */
1554 vtop->r = REG_IRET | VT_BOUNDED;
1555 if (nocode_wanted)
1556 return;
1557 /* relocation offset of the bounding function call point */
1558 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1561 /* patch pointer addition in vtop so that pointer dereferencing is
1562 also tested */
1563 static void gen_bounded_ptr_deref(void)
1565 addr_t func;
1566 int size, align;
1567 ElfW_Rel *rel;
1568 Sym *sym;
1570 if (nocode_wanted)
1571 return;
1573 size = type_size(&vtop->type, &align);
1574 switch(size) {
1575 case 1: func = TOK___bound_ptr_indir1; break;
1576 case 2: func = TOK___bound_ptr_indir2; break;
1577 case 4: func = TOK___bound_ptr_indir4; break;
1578 case 8: func = TOK___bound_ptr_indir8; break;
1579 case 12: func = TOK___bound_ptr_indir12; break;
1580 case 16: func = TOK___bound_ptr_indir16; break;
1581 default:
1582 /* may happen with struct member access */
1583 return;
1585 sym = external_helper_sym(func);
1586 if (!sym->c)
1587 put_extern_sym(sym, NULL, 0, 0);
1588 /* patch relocation */
1589 /* XXX: find a better solution ? */
1590 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1591 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1594 /* generate lvalue bound code */
1595 static void gbound(void)
1597 CType type1;
1599 vtop->r &= ~VT_MUSTBOUND;
1600 /* if lvalue, then use checking code before dereferencing */
1601 if (vtop->r & VT_LVAL) {
1602 /* if not VT_BOUNDED value, then make one */
1603 if (!(vtop->r & VT_BOUNDED)) {
1604 /* must save type because we must set it to int to get pointer */
1605 type1 = vtop->type;
1606 vtop->type.t = VT_PTR;
1607 gaddrof();
1608 vpushi(0);
1609 gen_bounded_ptr_add();
1610 vtop->r |= VT_LVAL;
1611 vtop->type = type1;
1613 /* then check for dereferencing */
1614 gen_bounded_ptr_deref();
1618 /* we need to call __bound_ptr_add before we start to load function
1619 args into registers */
1620 ST_FUNC void gbound_args(int nb_args)
1622 int i, v;
1623 SValue *sv;
1625 for (i = 1; i <= nb_args; ++i)
1626 if (vtop[1 - i].r & VT_MUSTBOUND) {
1627 vrotb(i);
1628 gbound();
1629 vrott(i);
1632 sv = vtop - nb_args;
1633 if (sv->r & VT_SYM) {
1634 v = sv->sym->v;
1635 if (v == TOK_setjmp
1636 || v == TOK__setjmp
1637 #ifndef TCC_TARGET_PE
1638 || v == TOK_sigsetjmp
1639 || v == TOK___sigsetjmp
1640 #endif
1642 vpush_helper_func(TOK___bound_setjmp);
1643 vpushv(sv + 1);
1644 gfunc_call(1);
1645 func_bound_add_epilog = 1;
1647 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1648 if (v == TOK_alloca)
1649 func_bound_add_epilog = 1;
1650 #endif
1651 #if TARGETOS_NetBSD
1652 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1653 sv->sym->asm_label = TOK___bound_longjmp;
1654 #endif
1658 /* Add bounds for local symbols from S to E (via ->prev) */
1659 static void add_local_bounds(Sym *s, Sym *e)
1661 for (; s != e; s = s->prev) {
1662 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1663 continue;
1664 /* Add arrays/structs/unions because we always take address */
1665 if ((s->type.t & VT_ARRAY)
1666 || (s->type.t & VT_BTYPE) == VT_STRUCT
1667 || s->a.addrtaken) {
1668 /* add local bound info */
1669 int align, size = type_size(&s->type, &align);
1670 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1671 2 * sizeof(addr_t));
1672 bounds_ptr[0] = s->c;
1673 bounds_ptr[1] = size;
1677 #endif
1679 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1680 static void pop_local_syms(Sym *b, int keep)
1682 #ifdef CONFIG_TCC_BCHECK
1683 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1684 add_local_bounds(local_stack, b);
1685 #endif
1686 if (debug_modes)
1687 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1688 sym_pop(&local_stack, b, keep);
1691 /* increment an lvalue pointer */
1692 static void incr_offset(int offset)
1694 int t = vtop->type.t;
1695 gaddrof(); /* remove VT_LVAL */
1696 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1697 vpushs(offset);
1698 gen_op('+');
1699 vtop->r |= VT_LVAL;
1700 vtop->type.t = t;
1703 static void incr_bf_adr(int o)
1705 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1706 incr_offset(o);
1709 /* single-byte load mode for packed or otherwise unaligned bitfields */
1710 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1712 int n, o, bits;
1713 save_reg_upstack(vtop->r, 1);
1714 vpush64(type->t & VT_BTYPE, 0); // B X
1715 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1716 do {
1717 vswap(); // X B
1718 incr_bf_adr(o);
1719 vdup(); // X B B
1720 n = 8 - bit_pos;
1721 if (n > bit_size)
1722 n = bit_size;
1723 if (bit_pos)
1724 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1725 if (n < 8)
1726 vpushi((1 << n) - 1), gen_op('&');
1727 gen_cast(type);
1728 if (bits)
1729 vpushi(bits), gen_op(TOK_SHL);
1730 vrotb(3); // B Y X
1731 gen_op('|'); // B X
1732 bits += n, bit_size -= n, o = 1;
1733 } while (bit_size);
1734 vswap(), vpop();
1735 if (!(type->t & VT_UNSIGNED)) {
1736 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1737 vpushi(n), gen_op(TOK_SHL);
1738 vpushi(n), gen_op(TOK_SAR);
1742 /* single-byte store mode for packed or otherwise unaligned bitfields */
1743 static void store_packed_bf(int bit_pos, int bit_size)
1745 int bits, n, o, m, c;
1746 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1747 vswap(); // X B
1748 save_reg_upstack(vtop->r, 1);
1749 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1750 do {
1751 incr_bf_adr(o); // X B
1752 vswap(); //B X
1753 c ? vdup() : gv_dup(); // B V X
1754 vrott(3); // X B V
1755 if (bits)
1756 vpushi(bits), gen_op(TOK_SHR);
1757 if (bit_pos)
1758 vpushi(bit_pos), gen_op(TOK_SHL);
1759 n = 8 - bit_pos;
1760 if (n > bit_size)
1761 n = bit_size;
1762 if (n < 8) {
1763 m = ((1 << n) - 1) << bit_pos;
1764 vpushi(m), gen_op('&'); // X B V1
1765 vpushv(vtop-1); // X B V1 B
1766 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1767 gen_op('&'); // X B V1 B1
1768 gen_op('|'); // X B V2
1770 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1771 vstore(), vpop(); // X B
1772 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1773 } while (bit_size);
1774 vpop(), vpop();
1777 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1779 int t;
1780 if (0 == sv->type.ref)
1781 return 0;
1782 t = sv->type.ref->auxtype;
1783 if (t != -1 && t != VT_STRUCT) {
1784 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1785 sv->r |= VT_LVAL;
1787 return t;
1790 /* store vtop a register belonging to class 'rc'. lvalues are
1791 converted to values. Cannot be used if cannot be converted to
1792 register value (such as structures). */
1793 ST_FUNC int gv(int rc)
1795 int r, r2, r_ok, r2_ok, rc2, bt;
1796 int bit_pos, bit_size, size, align;
1798 /* NOTE: get_reg can modify vstack[] */
1799 if (vtop->type.t & VT_BITFIELD) {
1800 CType type;
1802 bit_pos = BIT_POS(vtop->type.t);
1803 bit_size = BIT_SIZE(vtop->type.t);
1804 /* remove bit field info to avoid loops */
1805 vtop->type.t &= ~VT_STRUCT_MASK;
1807 type.ref = NULL;
1808 type.t = vtop->type.t & VT_UNSIGNED;
1809 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1810 type.t |= VT_UNSIGNED;
1812 r = adjust_bf(vtop, bit_pos, bit_size);
1814 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1815 type.t |= VT_LLONG;
1816 else
1817 type.t |= VT_INT;
1819 if (r == VT_STRUCT) {
1820 load_packed_bf(&type, bit_pos, bit_size);
1821 } else {
1822 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1823 /* cast to int to propagate signedness in following ops */
1824 gen_cast(&type);
1825 /* generate shifts */
1826 vpushi(bits - (bit_pos + bit_size));
1827 gen_op(TOK_SHL);
1828 vpushi(bits - bit_size);
1829 /* NOTE: transformed to SHR if unsigned */
1830 gen_op(TOK_SAR);
1832 r = gv(rc);
1833 } else {
1834 if (is_float(vtop->type.t) &&
1835 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1836 /* CPUs usually cannot use float constants, so we store them
1837 generically in data segment */
1838 init_params p = { rodata_section };
1839 unsigned long offset;
1840 size = type_size(&vtop->type, &align);
1841 if (NODATA_WANTED)
1842 size = 0, align = 1;
1843 offset = section_add(p.sec, size, align);
1844 vpush_ref(&vtop->type, p.sec, offset, size);
1845 vswap();
1846 init_putv(&p, &vtop->type, offset);
1847 vtop->r |= VT_LVAL;
1849 #ifdef CONFIG_TCC_BCHECK
1850 if (vtop->r & VT_MUSTBOUND)
1851 gbound();
1852 #endif
1854 bt = vtop->type.t & VT_BTYPE;
1856 #ifdef TCC_TARGET_RISCV64
1857 /* XXX mega hack */
1858 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1859 rc = RC_INT;
1860 #endif
1861 rc2 = RC2_TYPE(bt, rc);
1863 /* need to reload if:
1864 - constant
1865 - lvalue (need to dereference pointer)
1866 - already a register, but not in the right class */
1867 r = vtop->r & VT_VALMASK;
1868 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1869 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1871 if (!r_ok || !r2_ok) {
1873 if (!r_ok) {
1874 if (1 /* we can 'mov (r),r' in cases */
1875 && r < VT_CONST
1876 && (reg_classes[r] & rc)
1877 && !rc2
1879 save_reg_upstack(r, 1);
1880 else
1881 r = get_reg(rc);
1884 if (rc2) {
1885 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1886 int original_type = vtop->type.t;
1888 /* two register type load :
1889 expand to two words temporarily */
1890 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1891 /* load constant */
1892 unsigned long long ll = vtop->c.i;
1893 vtop->c.i = ll; /* first word */
1894 load(r, vtop);
1895 vtop->r = r; /* save register value */
1896 vpushi(ll >> 32); /* second word */
1897 } else if (vtop->r & VT_LVAL) {
1898 /* We do not want to modifier the long long pointer here.
1899 So we save any other instances down the stack */
1900 save_reg_upstack(vtop->r, 1);
1901 /* load from memory */
1902 vtop->type.t = load_type;
1903 load(r, vtop);
1904 vdup();
1905 vtop[-1].r = r; /* save register value */
1906 /* increment pointer to get second word */
1907 incr_offset(PTR_SIZE);
1908 } else {
1909 /* move registers */
1910 if (!r_ok)
1911 load(r, vtop);
1912 if (r2_ok && vtop->r2 < VT_CONST)
1913 goto done;
1914 vdup();
1915 vtop[-1].r = r; /* save register value */
1916 vtop->r = vtop[-1].r2;
1918 /* Allocate second register. Here we rely on the fact that
1919 get_reg() tries first to free r2 of an SValue. */
1920 r2 = get_reg(rc2);
1921 load(r2, vtop);
1922 vpop();
1923 /* write second register */
1924 vtop->r2 = r2;
1925 done:
1926 vtop->type.t = original_type;
1927 } else {
1928 if (vtop->r == VT_CMP)
1929 vset_VT_JMP();
1930 /* one register type load */
1931 load(r, vtop);
1934 vtop->r = r;
1935 #ifdef TCC_TARGET_C67
1936 /* uses register pairs for doubles */
1937 if (bt == VT_DOUBLE)
1938 vtop->r2 = r+1;
1939 #endif
1941 return r;
1944 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1945 ST_FUNC void gv2(int rc1, int rc2)
1947 /* generate more generic register first. But VT_JMP or VT_CMP
1948 values must be generated first in all cases to avoid possible
1949 reload errors */
1950 if (vtop->r != VT_CMP && rc1 <= rc2) {
1951 vswap();
1952 gv(rc1);
1953 vswap();
1954 gv(rc2);
1955 /* test if reload is needed for first register */
1956 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1957 vswap();
1958 gv(rc1);
1959 vswap();
1961 } else {
1962 gv(rc2);
1963 vswap();
1964 gv(rc1);
1965 vswap();
1966 /* test if reload is needed for first register */
1967 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1968 gv(rc2);
1973 #if PTR_SIZE == 4
1974 /* expand 64bit on stack in two ints */
1975 ST_FUNC void lexpand(void)
1977 int u, v;
1978 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1979 v = vtop->r & (VT_VALMASK | VT_LVAL);
1980 if (v == VT_CONST) {
1981 vdup();
1982 vtop[0].c.i >>= 32;
1983 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1984 vdup();
1985 vtop[0].c.i += 4;
1986 } else {
1987 gv(RC_INT);
1988 vdup();
1989 vtop[0].r = vtop[-1].r2;
1990 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1992 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1994 #endif
1996 #if PTR_SIZE == 4
1997 /* build a long long from two ints */
1998 static void lbuild(int t)
2000 gv2(RC_INT, RC_INT);
2001 vtop[-1].r2 = vtop[0].r;
2002 vtop[-1].type.t = t;
2003 vpop();
2005 #endif
2007 /* convert stack entry to register and duplicate its value in another
2008 register */
2009 static void gv_dup(void)
2011 int t, rc, r;
2013 t = vtop->type.t;
2014 #if PTR_SIZE == 4
2015 if ((t & VT_BTYPE) == VT_LLONG) {
2016 if (t & VT_BITFIELD) {
2017 gv(RC_INT);
2018 t = vtop->type.t;
2020 lexpand();
2021 gv_dup();
2022 vswap();
2023 vrotb(3);
2024 gv_dup();
2025 vrotb(4);
2026 /* stack: H L L1 H1 */
2027 lbuild(t);
2028 vrotb(3);
2029 vrotb(3);
2030 vswap();
2031 lbuild(t);
2032 vswap();
2033 return;
2035 #endif
2036 /* duplicate value */
2037 rc = RC_TYPE(t);
2038 gv(rc);
2039 r = get_reg(rc);
2040 vdup();
2041 load(r, vtop);
2042 vtop->r = r;
2045 #if PTR_SIZE == 4
2046 /* generate CPU independent (unsigned) long long operations */
2047 static void gen_opl(int op)
2049 int t, a, b, op1, c, i;
2050 int func;
2051 unsigned short reg_iret = REG_IRET;
2052 unsigned short reg_lret = REG_IRE2;
2053 SValue tmp;
2055 switch(op) {
2056 case '/':
2057 case TOK_PDIV:
2058 func = TOK___divdi3;
2059 goto gen_func;
2060 case TOK_UDIV:
2061 func = TOK___udivdi3;
2062 goto gen_func;
2063 case '%':
2064 func = TOK___moddi3;
2065 goto gen_mod_func;
2066 case TOK_UMOD:
2067 func = TOK___umoddi3;
2068 gen_mod_func:
2069 #ifdef TCC_ARM_EABI
2070 reg_iret = TREG_R2;
2071 reg_lret = TREG_R3;
2072 #endif
2073 gen_func:
2074 /* call generic long long function */
2075 vpush_helper_func(func);
2076 vrott(3);
2077 gfunc_call(2);
2078 vpushi(0);
2079 vtop->r = reg_iret;
2080 vtop->r2 = reg_lret;
2081 break;
2082 case '^':
2083 case '&':
2084 case '|':
2085 case '*':
2086 case '+':
2087 case '-':
2088 //pv("gen_opl A",0,2);
2089 t = vtop->type.t;
2090 vswap();
2091 lexpand();
2092 vrotb(3);
2093 lexpand();
2094 /* stack: L1 H1 L2 H2 */
2095 tmp = vtop[0];
2096 vtop[0] = vtop[-3];
2097 vtop[-3] = tmp;
2098 tmp = vtop[-2];
2099 vtop[-2] = vtop[-3];
2100 vtop[-3] = tmp;
2101 vswap();
2102 /* stack: H1 H2 L1 L2 */
2103 //pv("gen_opl B",0,4);
2104 if (op == '*') {
2105 vpushv(vtop - 1);
2106 vpushv(vtop - 1);
2107 gen_op(TOK_UMULL);
2108 lexpand();
2109 /* stack: H1 H2 L1 L2 ML MH */
2110 for(i=0;i<4;i++)
2111 vrotb(6);
2112 /* stack: ML MH H1 H2 L1 L2 */
2113 tmp = vtop[0];
2114 vtop[0] = vtop[-2];
2115 vtop[-2] = tmp;
2116 /* stack: ML MH H1 L2 H2 L1 */
2117 gen_op('*');
2118 vrotb(3);
2119 vrotb(3);
2120 gen_op('*');
2121 /* stack: ML MH M1 M2 */
2122 gen_op('+');
2123 gen_op('+');
2124 } else if (op == '+' || op == '-') {
2125 /* XXX: add non carry method too (for MIPS or alpha) */
2126 if (op == '+')
2127 op1 = TOK_ADDC1;
2128 else
2129 op1 = TOK_SUBC1;
2130 gen_op(op1);
2131 /* stack: H1 H2 (L1 op L2) */
2132 vrotb(3);
2133 vrotb(3);
2134 gen_op(op1 + 1); /* TOK_xxxC2 */
2135 } else {
2136 gen_op(op);
2137 /* stack: H1 H2 (L1 op L2) */
2138 vrotb(3);
2139 vrotb(3);
2140 /* stack: (L1 op L2) H1 H2 */
2141 gen_op(op);
2142 /* stack: (L1 op L2) (H1 op H2) */
2144 /* stack: L H */
2145 lbuild(t);
2146 break;
2147 case TOK_SAR:
2148 case TOK_SHR:
2149 case TOK_SHL:
2150 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2151 t = vtop[-1].type.t;
2152 vswap();
2153 lexpand();
2154 vrotb(3);
2155 /* stack: L H shift */
2156 c = (int)vtop->c.i;
2157 /* constant: simpler */
2158 /* NOTE: all comments are for SHL. the other cases are
2159 done by swapping words */
2160 vpop();
2161 if (op != TOK_SHL)
2162 vswap();
2163 if (c >= 32) {
2164 /* stack: L H */
2165 vpop();
2166 if (c > 32) {
2167 vpushi(c - 32);
2168 gen_op(op);
2170 if (op != TOK_SAR) {
2171 vpushi(0);
2172 } else {
2173 gv_dup();
2174 vpushi(31);
2175 gen_op(TOK_SAR);
2177 vswap();
2178 } else {
2179 vswap();
2180 gv_dup();
2181 /* stack: H L L */
2182 vpushi(c);
2183 gen_op(op);
2184 vswap();
2185 vpushi(32 - c);
2186 if (op == TOK_SHL)
2187 gen_op(TOK_SHR);
2188 else
2189 gen_op(TOK_SHL);
2190 vrotb(3);
2191 /* stack: L L H */
2192 vpushi(c);
2193 if (op == TOK_SHL)
2194 gen_op(TOK_SHL);
2195 else
2196 gen_op(TOK_SHR);
2197 gen_op('|');
2199 if (op != TOK_SHL)
2200 vswap();
2201 lbuild(t);
2202 } else {
2203 /* XXX: should provide a faster fallback on x86 ? */
2204 switch(op) {
2205 case TOK_SAR:
2206 func = TOK___ashrdi3;
2207 goto gen_func;
2208 case TOK_SHR:
2209 func = TOK___lshrdi3;
2210 goto gen_func;
2211 case TOK_SHL:
2212 func = TOK___ashldi3;
2213 goto gen_func;
2216 break;
2217 default:
2218 /* compare operations */
2219 t = vtop->type.t;
2220 vswap();
2221 lexpand();
2222 vrotb(3);
2223 lexpand();
2224 /* stack: L1 H1 L2 H2 */
2225 tmp = vtop[-1];
2226 vtop[-1] = vtop[-2];
2227 vtop[-2] = tmp;
2228 /* stack: L1 L2 H1 H2 */
2229 if (!cur_switch || cur_switch->bsym) {
2230 /* avoid differnt registers being saved in branches.
2231 This is not needed when comparing switch cases */
2232 save_regs(4);
2234 /* compare high */
2235 op1 = op;
2236 /* when values are equal, we need to compare low words. since
2237 the jump is inverted, we invert the test too. */
2238 if (op1 == TOK_LT)
2239 op1 = TOK_LE;
2240 else if (op1 == TOK_GT)
2241 op1 = TOK_GE;
2242 else if (op1 == TOK_ULT)
2243 op1 = TOK_ULE;
2244 else if (op1 == TOK_UGT)
2245 op1 = TOK_UGE;
2246 a = 0;
2247 b = 0;
2248 gen_op(op1);
2249 if (op == TOK_NE) {
2250 b = gvtst(0, 0);
2251 } else {
2252 a = gvtst(1, 0);
2253 if (op != TOK_EQ) {
2254 /* generate non equal test */
2255 vpushi(0);
2256 vset_VT_CMP(TOK_NE);
2257 b = gvtst(0, 0);
2260 /* compare low. Always unsigned */
2261 op1 = op;
2262 if (op1 == TOK_LT)
2263 op1 = TOK_ULT;
2264 else if (op1 == TOK_LE)
2265 op1 = TOK_ULE;
2266 else if (op1 == TOK_GT)
2267 op1 = TOK_UGT;
2268 else if (op1 == TOK_GE)
2269 op1 = TOK_UGE;
2270 gen_op(op1);
2271 #if 0//def TCC_TARGET_I386
2272 if (op == TOK_NE) { gsym(b); break; }
2273 if (op == TOK_EQ) { gsym(a); break; }
2274 #endif
2275 gvtst_set(1, a);
2276 gvtst_set(0, b);
2277 break;
2280 #endif
2282 /* normalize values */
2283 static uint64_t value64(uint64_t l1, int t)
2285 if ((t & VT_BTYPE) == VT_LLONG
2286 || (PTR_SIZE == 8 && (t & VT_BTYPE) == VT_PTR))
2287 return l1;
2288 else if (t & VT_UNSIGNED)
2289 return (uint32_t)l1;
2290 else
2291 return (uint32_t)l1 | -(l1 & 0x80000000);
2294 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2296 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2297 return (a ^ b) >> 63 ? -x : x;
2300 static int gen_opic_lt(uint64_t a, uint64_t b)
2302 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2305 /* handle integer constant optimizations and various machine
2306 independent opt */
2307 static void gen_opic(int op)
2309 SValue *v1 = vtop - 1;
2310 SValue *v2 = vtop;
2311 int t1 = v1->type.t & VT_BTYPE;
2312 int t2 = v2->type.t & VT_BTYPE;
2313 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2314 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2315 uint64_t l1 = c1 ? value64(v1->c.i, v1->type.t) : 0;
2316 uint64_t l2 = c2 ? value64(v2->c.i, v2->type.t) : 0;
2317 int shm = (t1 == VT_LLONG) ? 63 : 31;
2318 int r;
2320 if (c1 && c2) {
2321 switch(op) {
2322 case '+': l1 += l2; break;
2323 case '-': l1 -= l2; break;
2324 case '&': l1 &= l2; break;
2325 case '^': l1 ^= l2; break;
2326 case '|': l1 |= l2; break;
2327 case '*': l1 *= l2; break;
2329 case TOK_PDIV:
2330 case '/':
2331 case '%':
2332 case TOK_UDIV:
2333 case TOK_UMOD:
2334 /* if division by zero, generate explicit division */
2335 if (l2 == 0) {
2336 if (CONST_WANTED && !NOEVAL_WANTED)
2337 tcc_error("division by zero in constant");
2338 goto general_case;
2340 switch(op) {
2341 default: l1 = gen_opic_sdiv(l1, l2); break;
2342 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2343 case TOK_UDIV: l1 = l1 / l2; break;
2344 case TOK_UMOD: l1 = l1 % l2; break;
2346 break;
2347 case TOK_SHL: l1 <<= (l2 & shm); break;
2348 case TOK_SHR: l1 >>= (l2 & shm); break;
2349 case TOK_SAR:
2350 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2351 break;
2352 /* tests */
2353 case TOK_ULT: l1 = l1 < l2; break;
2354 case TOK_UGE: l1 = l1 >= l2; break;
2355 case TOK_EQ: l1 = l1 == l2; break;
2356 case TOK_NE: l1 = l1 != l2; break;
2357 case TOK_ULE: l1 = l1 <= l2; break;
2358 case TOK_UGT: l1 = l1 > l2; break;
2359 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2360 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2361 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2362 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2363 /* logical */
2364 case TOK_LAND: l1 = l1 && l2; break;
2365 case TOK_LOR: l1 = l1 || l2; break;
2366 default:
2367 goto general_case;
2369 v1->c.i = value64(l1, v1->type.t);
2370 v1->r |= v2->r & VT_NONCONST;
2371 vtop--;
2372 } else {
2373 /* if commutative ops, put c2 as constant */
2374 if (c1 && (op == '+' || op == '&' || op == '^' ||
2375 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2376 vswap();
2377 c2 = c1; //c = c1, c1 = c2, c2 = c;
2378 l2 = l1; //l = l1, l1 = l2, l2 = l;
2380 if (c1 && ((l1 == 0 &&
2381 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2382 (l1 == -1 && op == TOK_SAR))) {
2383 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2384 vpop();
2385 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2386 (op == '|' &&
2387 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2388 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2389 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2390 if (l2 == 1)
2391 vtop->c.i = 0;
2392 vswap();
2393 vtop--;
2394 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2395 op == TOK_PDIV) &&
2396 l2 == 1) ||
2397 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2398 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2399 l2 == 0) ||
2400 (op == '&' &&
2401 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2402 /* filter out NOP operations like x*1, x-0, x&-1... */
2403 vtop--;
2404 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2405 /* try to use shifts instead of muls or divs */
2406 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2407 int n = -1;
2408 while (l2) {
2409 l2 >>= 1;
2410 n++;
2412 vtop->c.i = n;
2413 if (op == '*')
2414 op = TOK_SHL;
2415 else if (op == TOK_PDIV)
2416 op = TOK_SAR;
2417 else
2418 op = TOK_SHR;
2420 goto general_case;
2421 } else if (c2 && (op == '+' || op == '-') &&
2422 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2423 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2424 /* symbol + constant case */
2425 if (op == '-')
2426 l2 = -l2;
2427 l2 += vtop[-1].c.i;
2428 /* The backends can't always deal with addends to symbols
2429 larger than +-1<<31. Don't construct such. */
2430 if ((int)l2 != l2)
2431 goto general_case;
2432 vtop--;
2433 vtop->c.i = l2;
2434 } else {
2435 general_case:
2436 /* call low level op generator */
2437 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2438 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2439 gen_opl(op);
2440 else
2441 gen_opi(op);
2443 if (vtop->r == VT_CONST)
2444 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2448 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2449 # define gen_negf gen_opf
2450 #elif defined TCC_TARGET_ARM
2451 void gen_negf(int op)
2453 /* arm will detect 0-x and replace by vneg */
2454 vpushi(0), vswap(), gen_op('-');
2456 #else
2457 /* XXX: implement in gen_opf() for other backends too */
2458 void gen_negf(int op)
2460 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2461 subtract(-0, x), but with them it's really a sign flip
2462 operation. We implement this with bit manipulation and have
2463 to do some type reinterpretation for this, which TCC can do
2464 only via memory. */
2466 int align, size, bt;
2468 size = type_size(&vtop->type, &align);
2469 bt = vtop->type.t & VT_BTYPE;
2470 save_reg(gv(RC_TYPE(bt)));
2471 vdup();
2472 incr_bf_adr(size - 1);
2473 vdup();
2474 vpushi(0x80); /* flip sign */
2475 gen_op('^');
2476 vstore();
2477 vpop();
2479 #endif
2481 /* generate a floating point operation with constant propagation */
2482 static void gen_opif(int op)
2484 int c1, c2, i, bt;
2485 SValue *v1, *v2;
2486 #if defined _MSC_VER && defined __x86_64__
2487 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2488 volatile
2489 #endif
2490 long double f1, f2;
2492 v1 = vtop - 1;
2493 v2 = vtop;
2494 if (op == TOK_NEG)
2495 v1 = v2;
2496 bt = v1->type.t & VT_BTYPE;
2498 /* currently, we cannot do computations with forward symbols */
2499 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2500 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2501 if (c1 && c2) {
2502 if (bt == VT_FLOAT) {
2503 f1 = v1->c.f;
2504 f2 = v2->c.f;
2505 } else if (bt == VT_DOUBLE) {
2506 f1 = v1->c.d;
2507 f2 = v2->c.d;
2508 } else {
2509 f1 = v1->c.ld;
2510 f2 = v2->c.ld;
2512 /* NOTE: we only do constant propagation if finite number (not
2513 NaN or infinity) (ANSI spec) */
2514 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2515 goto general_case;
2516 switch(op) {
2517 case '+': f1 += f2; break;
2518 case '-': f1 -= f2; break;
2519 case '*': f1 *= f2; break;
2520 case '/':
2521 if (f2 == 0.0) {
2522 union { float f; unsigned u; } x1, x2, y;
2523 /* If not in initializer we need to potentially generate
2524 FP exceptions at runtime, otherwise we want to fold. */
2525 if (!CONST_WANTED)
2526 goto general_case;
2527 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2528 when used to compile the f1 /= f2 below, would be -nan */
2529 x1.f = f1, x2.f = f2;
2530 if (f1 == 0.0)
2531 y.u = 0x7fc00000; /* nan */
2532 else
2533 y.u = 0x7f800000; /* infinity */
2534 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2535 f1 = y.f;
2536 break;
2538 f1 /= f2;
2539 break;
2540 case TOK_NEG:
2541 f1 = -f1;
2542 goto unary_result;
2543 case TOK_EQ:
2544 i = f1 == f2;
2545 make_int:
2546 vtop -= 2;
2547 vpushi(i);
2548 return;
2549 case TOK_NE:
2550 i = f1 != f2;
2551 goto make_int;
2552 case TOK_LT:
2553 i = f1 < f2;
2554 goto make_int;
2555 case TOK_GE:
2556 i = f1 >= f2;
2557 goto make_int;
2558 case TOK_LE:
2559 i = f1 <= f2;
2560 goto make_int;
2561 case TOK_GT:
2562 i = f1 > f2;
2563 goto make_int;
2564 default:
2565 goto general_case;
2567 vtop--;
2568 unary_result:
2569 /* XXX: overflow test ? */
2570 if (bt == VT_FLOAT) {
2571 v1->c.f = f1;
2572 } else if (bt == VT_DOUBLE) {
2573 v1->c.d = f1;
2574 } else {
2575 v1->c.ld = f1;
2577 } else {
2578 general_case:
2579 if (op == TOK_NEG) {
2580 gen_negf(op);
2581 } else {
2582 gen_opf(op);
2587 /* print a type. If 'varstr' is not NULL, then the variable is also
2588 printed in the type */
2589 /* XXX: union */
2590 /* XXX: add array and function pointers */
2591 static void type_to_str(char *buf, int buf_size,
2592 CType *type, const char *varstr)
2594 int bt, v, t;
2595 Sym *s, *sa;
2596 char buf1[256];
2597 const char *tstr;
2599 t = type->t;
2600 bt = t & VT_BTYPE;
2601 buf[0] = '\0';
2603 if (t & VT_EXTERN)
2604 pstrcat(buf, buf_size, "extern ");
2605 if (t & VT_STATIC)
2606 pstrcat(buf, buf_size, "static ");
2607 if (t & VT_TYPEDEF)
2608 pstrcat(buf, buf_size, "typedef ");
2609 if (t & VT_INLINE)
2610 pstrcat(buf, buf_size, "inline ");
2611 if (bt != VT_PTR) {
2612 if (t & VT_VOLATILE)
2613 pstrcat(buf, buf_size, "volatile ");
2614 if (t & VT_CONSTANT)
2615 pstrcat(buf, buf_size, "const ");
2617 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2618 || ((t & VT_UNSIGNED)
2619 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2620 && !IS_ENUM(t)
2622 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2624 buf_size -= strlen(buf);
2625 buf += strlen(buf);
2627 switch(bt) {
2628 case VT_VOID:
2629 tstr = "void";
2630 goto add_tstr;
2631 case VT_BOOL:
2632 tstr = "_Bool";
2633 goto add_tstr;
2634 case VT_BYTE:
2635 tstr = "char";
2636 goto add_tstr;
2637 case VT_SHORT:
2638 tstr = "short";
2639 goto add_tstr;
2640 case VT_INT:
2641 tstr = "int";
2642 goto maybe_long;
2643 case VT_LLONG:
2644 tstr = "long long";
2645 maybe_long:
2646 if (t & VT_LONG)
2647 tstr = "long";
2648 if (!IS_ENUM(t))
2649 goto add_tstr;
2650 tstr = "enum ";
2651 goto tstruct;
2652 case VT_FLOAT:
2653 tstr = "float";
2654 goto add_tstr;
2655 case VT_DOUBLE:
2656 tstr = "double";
2657 if (!(t & VT_LONG))
2658 goto add_tstr;
2659 case VT_LDOUBLE:
2660 tstr = "long double";
2661 add_tstr:
2662 pstrcat(buf, buf_size, tstr);
2663 break;
2664 case VT_STRUCT:
2665 tstr = "struct ";
2666 if (IS_UNION(t))
2667 tstr = "union ";
2668 tstruct:
2669 pstrcat(buf, buf_size, tstr);
2670 v = type->ref->v & ~SYM_STRUCT;
2671 if (v >= SYM_FIRST_ANOM)
2672 pstrcat(buf, buf_size, "<anonymous>");
2673 else
2674 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2675 break;
2676 case VT_FUNC:
2677 s = type->ref;
2678 buf1[0]=0;
2679 if (varstr && '*' == *varstr) {
2680 pstrcat(buf1, sizeof(buf1), "(");
2681 pstrcat(buf1, sizeof(buf1), varstr);
2682 pstrcat(buf1, sizeof(buf1), ")");
2684 pstrcat(buf1, buf_size, "(");
2685 sa = s->next;
2686 while (sa != NULL) {
2687 char buf2[256];
2688 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2689 pstrcat(buf1, sizeof(buf1), buf2);
2690 sa = sa->next;
2691 if (sa)
2692 pstrcat(buf1, sizeof(buf1), ", ");
2694 if (s->f.func_type == FUNC_ELLIPSIS)
2695 pstrcat(buf1, sizeof(buf1), ", ...");
2696 pstrcat(buf1, sizeof(buf1), ")");
2697 type_to_str(buf, buf_size, &s->type, buf1);
2698 goto no_var;
2699 case VT_PTR:
2700 s = type->ref;
2701 if (t & (VT_ARRAY|VT_VLA)) {
2702 if (varstr && '*' == *varstr)
2703 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2704 else
2705 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2706 type_to_str(buf, buf_size, &s->type, buf1);
2707 goto no_var;
2709 pstrcpy(buf1, sizeof(buf1), "*");
2710 if (t & VT_CONSTANT)
2711 pstrcat(buf1, buf_size, "const ");
2712 if (t & VT_VOLATILE)
2713 pstrcat(buf1, buf_size, "volatile ");
2714 if (varstr)
2715 pstrcat(buf1, sizeof(buf1), varstr);
2716 type_to_str(buf, buf_size, &s->type, buf1);
2717 goto no_var;
2719 if (varstr) {
2720 pstrcat(buf, buf_size, " ");
2721 pstrcat(buf, buf_size, varstr);
2723 no_var: ;
2726 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2728 char buf1[256], buf2[256];
2729 type_to_str(buf1, sizeof(buf1), st, NULL);
2730 type_to_str(buf2, sizeof(buf2), dt, NULL);
2731 tcc_error(fmt, buf1, buf2);
2734 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2736 char buf1[256], buf2[256];
2737 type_to_str(buf1, sizeof(buf1), st, NULL);
2738 type_to_str(buf2, sizeof(buf2), dt, NULL);
2739 tcc_warning(fmt, buf1, buf2);
2742 static int pointed_size(CType *type)
2744 int align;
2745 return type_size(pointed_type(type), &align);
2748 static inline int is_null_pointer(SValue *p)
2750 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2751 return 0;
2752 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2753 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2754 ((p->type.t & VT_BTYPE) == VT_PTR &&
2755 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2756 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2757 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2761 /* compare function types. OLD functions match any new functions */
2762 static int is_compatible_func(CType *type1, CType *type2)
2764 Sym *s1, *s2;
2766 s1 = type1->ref;
2767 s2 = type2->ref;
2768 if (s1->f.func_call != s2->f.func_call)
2769 return 0;
2770 if (s1->f.func_type != s2->f.func_type
2771 && s1->f.func_type != FUNC_OLD
2772 && s2->f.func_type != FUNC_OLD)
2773 return 0;
2774 for (;;) {
2775 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2776 return 0;
2777 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2778 return 1;
2779 s1 = s1->next;
2780 s2 = s2->next;
2781 if (!s1)
2782 return !s2;
2783 if (!s2)
2784 return 0;
2788 /* return true if type1 and type2 are the same. If unqualified is
2789 true, qualifiers on the types are ignored.
2791 static int compare_types(CType *type1, CType *type2, int unqualified)
2793 int bt1, t1, t2;
2795 if (IS_ENUM(type1->t)) {
2796 if (IS_ENUM(type2->t))
2797 return type1->ref == type2->ref;
2798 type1 = &type1->ref->type;
2799 } else if (IS_ENUM(type2->t))
2800 type2 = &type2->ref->type;
2802 t1 = type1->t & VT_TYPE;
2803 t2 = type2->t & VT_TYPE;
2804 if (unqualified) {
2805 /* strip qualifiers before comparing */
2806 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2807 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2810 /* Default Vs explicit signedness only matters for char */
2811 if ((t1 & VT_BTYPE) != VT_BYTE) {
2812 t1 &= ~VT_DEFSIGN;
2813 t2 &= ~VT_DEFSIGN;
2815 /* XXX: bitfields ? */
2816 if (t1 != t2)
2817 return 0;
2819 if ((t1 & VT_ARRAY)
2820 && !(type1->ref->c < 0
2821 || type2->ref->c < 0
2822 || type1->ref->c == type2->ref->c))
2823 return 0;
2825 /* test more complicated cases */
2826 bt1 = t1 & VT_BTYPE;
2827 if (bt1 == VT_PTR) {
2828 type1 = pointed_type(type1);
2829 type2 = pointed_type(type2);
2830 return is_compatible_types(type1, type2);
2831 } else if (bt1 == VT_STRUCT) {
2832 return (type1->ref == type2->ref);
2833 } else if (bt1 == VT_FUNC) {
2834 return is_compatible_func(type1, type2);
2835 } else {
2836 return 1;
2840 #define CMP_OP 'C'
2841 #define SHIFT_OP 'S'
2843 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2844 type is stored in DEST if non-null (except for pointer plus/minus) . */
2845 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2847 CType *type1, *type2, type;
2848 int t1, t2, bt1, bt2;
2849 int ret = 1;
2851 /* for shifts, 'combine' only left operand */
2852 if (op == SHIFT_OP)
2853 op2 = op1;
2855 type1 = &op1->type, type2 = &op2->type;
2856 t1 = type1->t, t2 = type2->t;
2857 bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2859 type.t = VT_VOID;
2860 type.ref = NULL;
2862 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2863 ret = op == '?' ? 1 : 0;
2864 /* NOTE: as an extension, we accept void on only one side */
2865 type.t = VT_VOID;
2866 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2867 if (op == '+') {
2868 if (!is_integer_btype(bt1 == VT_PTR ? bt2 : bt1))
2869 ret = 0;
2871 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2872 /* If one is a null ptr constant the result type is the other. */
2873 else if (is_null_pointer (op2)) type = *type1;
2874 else if (is_null_pointer (op1)) type = *type2;
2875 else if (bt1 != bt2) {
2876 /* accept comparison or cond-expr between pointer and integer
2877 with a warning */
2878 if ((op == '?' || op == CMP_OP)
2879 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2880 tcc_warning("pointer/integer mismatch in %s",
2881 op == '?' ? "conditional expression" : "comparison");
2882 else if (op != '-' || !is_integer_btype(bt2))
2883 ret = 0;
2884 type = *(bt1 == VT_PTR ? type1 : type2);
2885 } else {
2886 CType *pt1 = pointed_type(type1);
2887 CType *pt2 = pointed_type(type2);
2888 int pbt1 = pt1->t & VT_BTYPE;
2889 int pbt2 = pt2->t & VT_BTYPE;
2890 int newquals, copied = 0;
2891 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2892 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2893 if (op != '?' && op != CMP_OP)
2894 ret = 0;
2895 else
2896 type_incompatibility_warning(type1, type2,
2897 op == '?'
2898 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2899 : "pointer type mismatch in comparison('%s' and '%s')");
2901 if (op == '?') {
2902 /* pointers to void get preferred, otherwise the
2903 pointed to types minus qualifs should be compatible */
2904 type = *((pbt1 == VT_VOID) ? type1 : type2);
2905 /* combine qualifs */
2906 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2907 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2908 & newquals)
2910 /* copy the pointer target symbol */
2911 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2912 0, type.ref->c);
2913 copied = 1;
2914 pointed_type(&type)->t |= newquals;
2916 /* pointers to incomplete arrays get converted to
2917 pointers to completed ones if possible */
2918 if (pt1->t & VT_ARRAY
2919 && pt2->t & VT_ARRAY
2920 && pointed_type(&type)->ref->c < 0
2921 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2923 if (!copied)
2924 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2925 0, type.ref->c);
2926 pointed_type(&type)->ref =
2927 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2928 0, pointed_type(&type)->ref->c);
2929 pointed_type(&type)->ref->c =
2930 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2934 if (op == CMP_OP)
2935 type.t = VT_SIZE_T;
2936 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2937 if (op != '?' || !compare_types(type1, type2, 1))
2938 ret = 0;
2939 type = *type1;
2940 } else if (is_float(bt1) || is_float(bt2)) {
2941 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2942 type.t = VT_LDOUBLE;
2943 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2944 type.t = VT_DOUBLE;
2945 } else {
2946 type.t = VT_FLOAT;
2948 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2949 /* cast to biggest op */
2950 type.t = VT_LLONG | VT_LONG;
2951 if (bt1 == VT_LLONG)
2952 type.t &= t1;
2953 if (bt2 == VT_LLONG)
2954 type.t &= t2;
2955 /* convert to unsigned if it does not fit in a long long */
2956 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2957 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2958 type.t |= VT_UNSIGNED;
2959 } else {
2960 /* integer operations */
2961 type.t = VT_INT | (VT_LONG & (t1 | t2));
2962 /* convert to unsigned if it does not fit in an integer */
2963 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2964 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2965 type.t |= VT_UNSIGNED;
2967 if (dest)
2968 *dest = type;
2969 return ret;
2972 /* generic gen_op: handles types problems */
2973 ST_FUNC void gen_op(int op)
2975 int t1, t2, bt1, bt2, t;
2976 CType type1, combtype;
2977 int op_class = op;
2979 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2980 op_class = SHIFT_OP;
2981 else if (TOK_ISCOND(op)) /* == != > ... */
2982 op_class = CMP_OP;
2984 redo:
2985 t1 = vtop[-1].type.t;
2986 t2 = vtop[0].type.t;
2987 bt1 = t1 & VT_BTYPE;
2988 bt2 = t2 & VT_BTYPE;
2990 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2991 if (bt2 == VT_FUNC) {
2992 mk_pointer(&vtop->type);
2993 gaddrof();
2995 if (bt1 == VT_FUNC) {
2996 vswap();
2997 mk_pointer(&vtop->type);
2998 gaddrof();
2999 vswap();
3001 goto redo;
3002 } else if (!combine_types(&combtype, vtop - 1, vtop, op_class)) {
3003 op_err:
3004 tcc_error("invalid operand types for binary operation");
3005 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3006 /* at least one operand is a pointer */
3007 /* relational op: must be both pointers */
3008 int align;
3009 if (op_class == CMP_OP)
3010 goto std_op;
3011 /* if both pointers, then it must be the '-' op */
3012 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3013 if (op != '-')
3014 goto op_err;
3015 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3016 vtop->type.t &= ~VT_UNSIGNED;
3017 vrott(3);
3018 gen_opic(op);
3019 vtop->type.t = VT_PTRDIFF_T;
3020 vswap();
3021 gen_op(TOK_PDIV);
3022 } else {
3023 /* exactly one pointer : must be '+' or '-'. */
3024 if (op != '-' && op != '+')
3025 goto op_err;
3026 /* Put pointer as first operand */
3027 if (bt2 == VT_PTR) {
3028 vswap();
3029 t = t1, t1 = t2, t2 = t;
3030 bt2 = bt1;
3032 #if PTR_SIZE == 4
3033 if (bt2 == VT_LLONG)
3034 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3035 gen_cast_s(VT_INT);
3036 #endif
3037 type1 = vtop[-1].type;
3038 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3039 gen_op('*');
3040 #ifdef CONFIG_TCC_BCHECK
3041 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3042 /* if bounded pointers, we generate a special code to
3043 test bounds */
3044 if (op == '-') {
3045 vpushi(0);
3046 vswap();
3047 gen_op('-');
3049 gen_bounded_ptr_add();
3050 } else
3051 #endif
3053 gen_opic(op);
3055 type1.t &= ~(VT_ARRAY|VT_VLA);
3056 /* put again type if gen_opic() swaped operands */
3057 vtop->type = type1;
3059 } else {
3060 /* floats can only be used for a few operations */
3061 if (is_float(combtype.t)
3062 && op != '+' && op != '-' && op != '*' && op != '/'
3063 && op_class != CMP_OP) {
3064 goto op_err;
3066 std_op:
3067 t = t2 = combtype.t;
3068 /* special case for shifts and long long: we keep the shift as
3069 an integer */
3070 if (op_class == SHIFT_OP)
3071 t2 = VT_INT;
3072 /* XXX: currently, some unsigned operations are explicit, so
3073 we modify them here */
3074 if (t & VT_UNSIGNED) {
3075 if (op == TOK_SAR)
3076 op = TOK_SHR;
3077 else if (op == '/')
3078 op = TOK_UDIV;
3079 else if (op == '%')
3080 op = TOK_UMOD;
3081 else if (op == TOK_LT)
3082 op = TOK_ULT;
3083 else if (op == TOK_GT)
3084 op = TOK_UGT;
3085 else if (op == TOK_LE)
3086 op = TOK_ULE;
3087 else if (op == TOK_GE)
3088 op = TOK_UGE;
3090 vswap();
3091 gen_cast_s(t);
3092 vswap();
3093 gen_cast_s(t2);
3094 if (is_float(t))
3095 gen_opif(op);
3096 else
3097 gen_opic(op);
3098 if (op_class == CMP_OP) {
3099 /* relational op: the result is an int */
3100 vtop->type.t = VT_INT;
3101 } else {
3102 vtop->type.t = t;
3105 // Make sure that we have converted to an rvalue:
3106 if (vtop->r & VT_LVAL)
3107 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3110 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3111 #define gen_cvt_itof1 gen_cvt_itof
3112 #else
3113 /* generic itof for unsigned long long case */
3114 static void gen_cvt_itof1(int t)
3116 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3117 (VT_LLONG | VT_UNSIGNED)) {
3119 if (t == VT_FLOAT)
3120 vpush_helper_func(TOK___floatundisf);
3121 #if LDOUBLE_SIZE != 8
3122 else if (t == VT_LDOUBLE)
3123 vpush_helper_func(TOK___floatundixf);
3124 #endif
3125 else
3126 vpush_helper_func(TOK___floatundidf);
3127 vrott(2);
3128 gfunc_call(1);
3129 vpushi(0);
3130 PUT_R_RET(vtop, t);
3131 } else {
3132 gen_cvt_itof(t);
3135 #endif
3137 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3138 #define gen_cvt_ftoi1 gen_cvt_ftoi
3139 #else
3140 /* generic ftoi for unsigned long long case */
3141 static void gen_cvt_ftoi1(int t)
3143 int st;
3144 if (t == (VT_LLONG | VT_UNSIGNED)) {
3145 /* not handled natively */
3146 st = vtop->type.t & VT_BTYPE;
3147 if (st == VT_FLOAT)
3148 vpush_helper_func(TOK___fixunssfdi);
3149 #if LDOUBLE_SIZE != 8
3150 else if (st == VT_LDOUBLE)
3151 vpush_helper_func(TOK___fixunsxfdi);
3152 #endif
3153 else
3154 vpush_helper_func(TOK___fixunsdfdi);
3155 vrott(2);
3156 gfunc_call(1);
3157 vpushi(0);
3158 PUT_R_RET(vtop, t);
3159 } else {
3160 gen_cvt_ftoi(t);
3163 #endif
3165 /* special delayed cast for char/short */
3166 static void force_charshort_cast(void)
3168 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3169 int dbt = vtop->type.t;
3170 vtop->r &= ~VT_MUSTCAST;
3171 vtop->type.t = sbt;
3172 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3173 vtop->type.t = dbt;
3176 static void gen_cast_s(int t)
3178 CType type;
3179 type.t = t;
3180 type.ref = NULL;
3181 gen_cast(&type);
3184 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3185 static void gen_cast(CType *type)
3187 int sbt, dbt, sf, df, c;
3188 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3190 /* special delayed cast for char/short */
3191 if (vtop->r & VT_MUSTCAST)
3192 force_charshort_cast();
3194 /* bitfields first get cast to ints */
3195 if (vtop->type.t & VT_BITFIELD)
3196 gv(RC_INT);
3198 if (IS_ENUM(type->t) && type->ref->c < 0)
3199 tcc_error("cast to incomplete type");
3201 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3202 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3203 if (sbt == VT_FUNC)
3204 sbt = VT_PTR;
3206 again:
3207 if (sbt != dbt) {
3208 sf = is_float(sbt);
3209 df = is_float(dbt);
3210 dbt_bt = dbt & VT_BTYPE;
3211 sbt_bt = sbt & VT_BTYPE;
3212 if (dbt_bt == VT_VOID)
3213 goto done;
3214 if (sbt_bt == VT_VOID) {
3215 error:
3216 cast_error(&vtop->type, type);
3219 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3220 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3221 /* don't try to convert to ldouble when cross-compiling
3222 (except when it's '0' which is needed for arm:gen_negf()) */
3223 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3224 c = 0;
3225 #endif
3226 if (c) {
3227 /* constant case: we can do it now */
3228 /* XXX: in ISOC, cannot do it if error in convert */
3229 if (sbt == VT_FLOAT)
3230 vtop->c.ld = vtop->c.f;
3231 else if (sbt == VT_DOUBLE)
3232 vtop->c.ld = vtop->c.d;
3234 if (df) {
3235 if (sbt_bt == VT_LLONG) {
3236 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3237 vtop->c.ld = vtop->c.i;
3238 else
3239 vtop->c.ld = -(long double)-vtop->c.i;
3240 } else if(!sf) {
3241 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3242 vtop->c.ld = (uint32_t)vtop->c.i;
3243 else
3244 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3247 if (dbt == VT_FLOAT)
3248 vtop->c.f = (float)vtop->c.ld;
3249 else if (dbt == VT_DOUBLE)
3250 vtop->c.d = (double)vtop->c.ld;
3251 } else if (sf && dbt == VT_BOOL) {
3252 vtop->c.i = (vtop->c.ld != 0);
3253 } else {
3254 if(sf) {
3255 if (dbt & VT_UNSIGNED)
3256 vtop->c.i = (uint64_t)vtop->c.ld;
3257 else
3258 vtop->c.i = (int64_t)vtop->c.ld;
3260 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3262 else if (sbt & VT_UNSIGNED)
3263 vtop->c.i = (uint32_t)vtop->c.i;
3264 else
3265 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3267 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3269 else if (dbt == VT_BOOL)
3270 vtop->c.i = (vtop->c.i != 0);
3271 else {
3272 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3273 dbt_bt == VT_SHORT ? 0xffff :
3274 0xffffffff;
3275 vtop->c.i &= m;
3276 if (!(dbt & VT_UNSIGNED))
3277 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3280 goto done;
3282 } else if (dbt == VT_BOOL
3283 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3284 == (VT_CONST | VT_SYM)) {
3285 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3286 vtop->r = VT_CONST;
3287 vtop->c.i = 1;
3288 goto done;
3291 /* cannot generate code for global or static initializers */
3292 if (nocode_wanted & DATA_ONLY_WANTED)
3293 goto done;
3295 /* non constant case: generate code */
3296 if (dbt == VT_BOOL) {
3297 gen_test_zero(TOK_NE);
3298 goto done;
3301 if (sf || df) {
3302 if (sf && df) {
3303 /* convert from fp to fp */
3304 gen_cvt_ftof(dbt);
3305 } else if (df) {
3306 /* convert int to fp */
3307 gen_cvt_itof1(dbt);
3308 } else {
3309 /* convert fp to int */
3310 sbt = dbt;
3311 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3312 sbt = VT_INT;
3313 gen_cvt_ftoi1(sbt);
3314 goto again; /* may need char/short cast */
3316 goto done;
3319 ds = btype_size(dbt_bt);
3320 ss = btype_size(sbt_bt);
3321 if (ds == 0 || ss == 0)
3322 goto error;
3324 /* same size and no sign conversion needed */
3325 if (ds == ss && ds >= 4)
3326 goto done;
3327 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3328 tcc_warning("cast between pointer and integer of different size");
3329 if (sbt_bt == VT_PTR) {
3330 /* put integer type to allow logical operations below */
3331 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3335 /* processor allows { int a = 0, b = *(char*)&a; }
3336 That means that if we cast to less width, we can just
3337 change the type and read it still later. */
3338 #define ALLOW_SUBTYPE_ACCESS 1
3340 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3341 /* value still in memory */
3342 if (ds <= ss)
3343 goto done;
3344 /* ss <= 4 here */
3345 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3346 gv(RC_INT);
3347 goto done; /* no 64bit envolved */
3350 gv(RC_INT);
3352 trunc = 0;
3353 #if PTR_SIZE == 4
3354 if (ds == 8) {
3355 /* generate high word */
3356 if (sbt & VT_UNSIGNED) {
3357 vpushi(0);
3358 gv(RC_INT);
3359 } else {
3360 gv_dup();
3361 vpushi(31);
3362 gen_op(TOK_SAR);
3364 lbuild(dbt);
3365 } else if (ss == 8) {
3366 /* from long long: just take low order word */
3367 lexpand();
3368 vpop();
3370 ss = 4;
3372 #elif PTR_SIZE == 8
3373 if (ds == 8) {
3374 /* need to convert from 32bit to 64bit */
3375 if (sbt & VT_UNSIGNED) {
3376 #if defined(TCC_TARGET_RISCV64)
3377 /* RISC-V keeps 32bit vals in registers sign-extended.
3378 So here we need a zero-extension. */
3379 trunc = 32;
3380 #else
3381 goto done;
3382 #endif
3383 } else {
3384 gen_cvt_sxtw();
3385 goto done;
3387 ss = ds, ds = 4, dbt = sbt;
3388 } else if (ss == 8) {
3389 /* RISC-V keeps 32bit vals in registers sign-extended.
3390 So here we need a sign-extension for signed types and
3391 zero-extension. for unsigned types. */
3392 #if !defined(TCC_TARGET_RISCV64)
3393 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3394 #endif
3395 } else {
3396 ss = 4;
3398 #endif
3400 if (ds >= ss)
3401 goto done;
3402 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3403 if (ss == 4) {
3404 gen_cvt_csti(dbt);
3405 goto done;
3407 #endif
3408 bits = (ss - ds) * 8;
3409 /* for unsigned, gen_op will convert SAR to SHR */
3410 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3411 vpushi(bits);
3412 gen_op(TOK_SHL);
3413 vpushi(bits - trunc);
3414 gen_op(TOK_SAR);
3415 vpushi(trunc);
3416 gen_op(TOK_SHR);
3418 done:
3419 vtop->type = *type;
3420 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3423 /* return type size as known at compile time. Put alignment at 'a' */
3424 ST_FUNC int type_size(CType *type, int *a)
3426 Sym *s;
3427 int bt;
3429 bt = type->t & VT_BTYPE;
3430 if (bt == VT_STRUCT) {
3431 /* struct/union */
3432 s = type->ref;
3433 *a = s->r;
3434 return s->c;
3435 } else if (bt == VT_PTR) {
3436 if (type->t & VT_ARRAY) {
3437 int ts;
3438 s = type->ref;
3439 ts = type_size(&s->type, a);
3440 if (ts < 0 && s->c < 0)
3441 ts = -ts;
3442 return ts * s->c;
3443 } else {
3444 *a = PTR_SIZE;
3445 return PTR_SIZE;
3447 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3448 *a = 0;
3449 return -1; /* incomplete enum */
3450 } else if (bt == VT_LDOUBLE) {
3451 *a = LDOUBLE_ALIGN;
3452 return LDOUBLE_SIZE;
3453 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3454 #if (defined TCC_TARGET_I386 && !defined TCC_TARGET_PE) \
3455 || (defined TCC_TARGET_ARM && !defined TCC_ARM_EABI)
3456 *a = 4;
3457 #else
3458 *a = 8;
3459 #endif
3460 return 8;
3461 } else if (bt == VT_INT || bt == VT_FLOAT) {
3462 *a = 4;
3463 return 4;
3464 } else if (bt == VT_SHORT) {
3465 *a = 2;
3466 return 2;
3467 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3468 *a = 8;
3469 return 16;
3470 } else {
3471 /* char, void, function, _Bool */
3472 *a = 1;
3473 return 1;
3477 /* push type size as known at runtime time on top of value stack. Put
3478 alignment at 'a' */
3479 static void vpush_type_size(CType *type, int *a)
3481 if (type->t & VT_VLA) {
3482 type_size(&type->ref->type, a);
3483 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3484 } else {
3485 int size = type_size(type, a);
3486 if (size < 0)
3487 tcc_error("unknown type size");
3488 vpushs(size);
3492 /* return the pointed type of t */
3493 static inline CType *pointed_type(CType *type)
3495 return &type->ref->type;
3498 /* modify type so that its it is a pointer to type. */
3499 ST_FUNC void mk_pointer(CType *type)
3501 Sym *s;
3502 s = sym_push(SYM_FIELD, type, 0, -1);
3503 type->t = VT_PTR | (type->t & VT_STORAGE);
3504 type->ref = s;
3507 /* return true if type1 and type2 are exactly the same (including
3508 qualifiers).
3510 static int is_compatible_types(CType *type1, CType *type2)
3512 return compare_types(type1,type2,0);
3515 /* return true if type1 and type2 are the same (ignoring qualifiers).
3517 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3519 return compare_types(type1,type2,1);
3522 static void cast_error(CType *st, CType *dt)
3524 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3527 /* verify type compatibility to store vtop in 'dt' type */
3528 static void verify_assign_cast(CType *dt)
3530 CType *st, *type1, *type2;
3531 int dbt, sbt, qualwarn, lvl;
3533 st = &vtop->type; /* source type */
3534 dbt = dt->t & VT_BTYPE;
3535 sbt = st->t & VT_BTYPE;
3536 if (dt->t & VT_CONSTANT)
3537 tcc_warning("assignment of read-only location");
3538 switch(dbt) {
3539 case VT_VOID:
3540 if (sbt != dbt)
3541 tcc_error("assignment to void expression");
3542 break;
3543 case VT_PTR:
3544 /* special cases for pointers */
3545 /* '0' can also be a pointer */
3546 if (is_null_pointer(vtop))
3547 break;
3548 /* accept implicit pointer to integer cast with warning */
3549 if (is_integer_btype(sbt)) {
3550 tcc_warning("assignment makes pointer from integer without a cast");
3551 break;
3553 type1 = pointed_type(dt);
3554 if (sbt == VT_PTR)
3555 type2 = pointed_type(st);
3556 else if (sbt == VT_FUNC)
3557 type2 = st; /* a function is implicitly a function pointer */
3558 else
3559 goto error;
3560 if (is_compatible_types(type1, type2))
3561 break;
3562 for (qualwarn = lvl = 0;; ++lvl) {
3563 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3564 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3565 qualwarn = 1;
3566 dbt = type1->t & (VT_BTYPE|VT_LONG);
3567 sbt = type2->t & (VT_BTYPE|VT_LONG);
3568 if (dbt != VT_PTR || sbt != VT_PTR)
3569 break;
3570 type1 = pointed_type(type1);
3571 type2 = pointed_type(type2);
3573 if (!is_compatible_unqualified_types(type1, type2)) {
3574 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3575 /* void * can match anything */
3576 } else if (dbt == sbt
3577 && is_integer_btype(sbt & VT_BTYPE)
3578 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3579 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3580 /* Like GCC don't warn by default for merely changes
3581 in pointer target signedness. Do warn for different
3582 base types, though, in particular for unsigned enums
3583 and signed int targets. */
3584 } else {
3585 tcc_warning("assignment from incompatible pointer type");
3586 break;
3589 if (qualwarn)
3590 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3591 break;
3592 case VT_BYTE:
3593 case VT_SHORT:
3594 case VT_INT:
3595 case VT_LLONG:
3596 if (sbt == VT_PTR || sbt == VT_FUNC) {
3597 tcc_warning("assignment makes integer from pointer without a cast");
3598 } else if (sbt == VT_STRUCT) {
3599 goto case_VT_STRUCT;
3601 /* XXX: more tests */
3602 break;
3603 case VT_STRUCT:
3604 case_VT_STRUCT:
3605 if (!is_compatible_unqualified_types(dt, st)) {
3606 error:
3607 cast_error(st, dt);
3609 break;
3613 static void gen_assign_cast(CType *dt)
3615 verify_assign_cast(dt);
3616 gen_cast(dt);
3619 /* store vtop in lvalue pushed on stack */
3620 ST_FUNC void vstore(void)
3622 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3624 ft = vtop[-1].type.t;
3625 sbt = vtop->type.t & VT_BTYPE;
3626 dbt = ft & VT_BTYPE;
3627 verify_assign_cast(&vtop[-1].type);
3629 if (sbt == VT_STRUCT) {
3630 /* if structure, only generate pointer */
3631 /* structure assignment : generate memcpy */
3632 size = type_size(&vtop->type, &align);
3633 /* destination, keep on stack() as result */
3634 vpushv(vtop - 1);
3635 #ifdef CONFIG_TCC_BCHECK
3636 if (vtop->r & VT_MUSTBOUND)
3637 gbound(); /* check would be wrong after gaddrof() */
3638 #endif
3639 vtop->type.t = VT_PTR;
3640 gaddrof();
3641 /* source */
3642 vswap();
3643 #ifdef CONFIG_TCC_BCHECK
3644 if (vtop->r & VT_MUSTBOUND)
3645 gbound();
3646 #endif
3647 vtop->type.t = VT_PTR;
3648 gaddrof();
3650 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3651 if (1
3652 #ifdef CONFIG_TCC_BCHECK
3653 && !tcc_state->do_bounds_check
3654 #endif
3656 gen_struct_copy(size);
3657 } else
3658 #endif
3660 /* type size */
3661 vpushi(size);
3662 /* Use memmove, rather than memcpy, as dest and src may be same: */
3663 #ifdef TCC_ARM_EABI
3664 if(!(align & 7))
3665 vpush_helper_func(TOK_memmove8);
3666 else if(!(align & 3))
3667 vpush_helper_func(TOK_memmove4);
3668 else
3669 #endif
3670 vpush_helper_func(TOK_memmove);
3671 vrott(4);
3672 gfunc_call(3);
3675 } else if (ft & VT_BITFIELD) {
3676 /* bitfield store handling */
3678 /* save lvalue as expression result (example: s.b = s.a = n;) */
3679 vdup(), vtop[-1] = vtop[-2];
3681 bit_pos = BIT_POS(ft);
3682 bit_size = BIT_SIZE(ft);
3683 /* remove bit field info to avoid loops */
3684 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3686 if (dbt == VT_BOOL) {
3687 gen_cast(&vtop[-1].type);
3688 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3690 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3691 if (dbt != VT_BOOL) {
3692 gen_cast(&vtop[-1].type);
3693 dbt = vtop[-1].type.t & VT_BTYPE;
3695 if (r == VT_STRUCT) {
3696 store_packed_bf(bit_pos, bit_size);
3697 } else {
3698 unsigned long long mask = (1ULL << bit_size) - 1;
3699 if (dbt != VT_BOOL) {
3700 /* mask source */
3701 if (dbt == VT_LLONG)
3702 vpushll(mask);
3703 else
3704 vpushi((unsigned)mask);
3705 gen_op('&');
3707 /* shift source */
3708 vpushi(bit_pos);
3709 gen_op(TOK_SHL);
3710 vswap();
3711 /* duplicate destination */
3712 vdup();
3713 vrott(3);
3714 /* load destination, mask and or with source */
3715 if (dbt == VT_LLONG)
3716 vpushll(~(mask << bit_pos));
3717 else
3718 vpushi(~((unsigned)mask << bit_pos));
3719 gen_op('&');
3720 gen_op('|');
3721 /* store result */
3722 vstore();
3723 /* ... and discard */
3724 vpop();
3726 } else if (dbt == VT_VOID) {
3727 --vtop;
3728 } else {
3729 /* optimize char/short casts */
3730 delayed_cast = 0;
3731 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3732 && is_integer_btype(sbt)
3734 if ((vtop->r & VT_MUSTCAST)
3735 && btype_size(dbt) > btype_size(sbt)
3737 force_charshort_cast();
3738 delayed_cast = 1;
3739 } else {
3740 gen_cast(&vtop[-1].type);
3743 #ifdef CONFIG_TCC_BCHECK
3744 /* bound check case */
3745 if (vtop[-1].r & VT_MUSTBOUND) {
3746 vswap();
3747 gbound();
3748 vswap();
3750 #endif
3751 gv(RC_TYPE(dbt)); /* generate value */
3753 if (delayed_cast) {
3754 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3755 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3756 vtop->type.t = ft & VT_TYPE;
3759 /* if lvalue was saved on stack, must read it */
3760 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3761 SValue sv;
3762 r = get_reg(RC_INT);
3763 sv.type.t = VT_PTRDIFF_T;
3764 sv.r = VT_LOCAL | VT_LVAL;
3765 sv.c.i = vtop[-1].c.i;
3766 load(r, &sv);
3767 vtop[-1].r = r | VT_LVAL;
3770 r = vtop->r & VT_VALMASK;
3771 /* two word case handling :
3772 store second register at word + 4 (or +8 for x86-64) */
3773 if (USING_TWO_WORDS(dbt)) {
3774 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3775 vtop[-1].type.t = load_type;
3776 store(r, vtop - 1);
3777 vswap();
3778 incr_offset(PTR_SIZE);
3779 vswap();
3780 /* XXX: it works because r2 is spilled last ! */
3781 store(vtop->r2, vtop - 1);
3782 } else {
3783 /* single word */
3784 store(r, vtop - 1);
3786 vswap();
3787 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3791 /* post defines POST/PRE add. c is the token ++ or -- */
3792 ST_FUNC void inc(int post, int c)
3794 test_lvalue();
3795 vdup(); /* save lvalue */
3796 if (post) {
3797 gv_dup(); /* duplicate value */
3798 vrotb(3);
3799 vrotb(3);
3801 /* add constant */
3802 vpushi(c - TOK_MID);
3803 gen_op('+');
3804 vstore(); /* store value */
3805 if (post)
3806 vpop(); /* if post op, return saved value */
3809 ST_FUNC CString* parse_mult_str (const char *msg)
3811 /* read the string */
3812 if (tok != TOK_STR)
3813 expect(msg);
3814 cstr_reset(&initstr);
3815 while (tok == TOK_STR) {
3816 /* XXX: add \0 handling too ? */
3817 cstr_cat(&initstr, tokc.str.data, -1);
3818 next();
3820 cstr_ccat(&initstr, '\0');
3821 return &initstr;
3824 /* If I is >= 1 and a power of two, returns log2(i)+1.
3825 If I is 0 returns 0. */
3826 ST_FUNC int exact_log2p1(int i)
3828 int ret;
3829 if (!i)
3830 return 0;
3831 for (ret = 1; i >= 1 << 8; ret += 8)
3832 i >>= 8;
3833 if (i >= 1 << 4)
3834 ret += 4, i >>= 4;
3835 if (i >= 1 << 2)
3836 ret += 2, i >>= 2;
3837 if (i >= 1 << 1)
3838 ret++;
3839 return ret;
3842 /* Parse __attribute__((...)) GNUC extension. */
3843 static void parse_attribute(AttributeDef *ad)
3845 int t, n;
3846 char *astr;
3848 redo:
3849 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3850 return;
3851 next();
3852 skip('(');
3853 skip('(');
3854 while (tok != ')') {
3855 if (tok < TOK_IDENT)
3856 expect("attribute name");
3857 t = tok;
3858 next();
3859 switch(t) {
3860 case TOK_CLEANUP1:
3861 case TOK_CLEANUP2:
3863 Sym *s;
3865 skip('(');
3866 s = sym_find(tok);
3867 if (!s) {
3868 tcc_warning_c(warn_implicit_function_declaration)(
3869 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3870 s = external_global_sym(tok, &func_old_type);
3871 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3872 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3873 ad->cleanup_func = s;
3874 next();
3875 skip(')');
3876 break;
3878 case TOK_CONSTRUCTOR1:
3879 case TOK_CONSTRUCTOR2:
3880 ad->f.func_ctor = 1;
3881 break;
3882 case TOK_DESTRUCTOR1:
3883 case TOK_DESTRUCTOR2:
3884 ad->f.func_dtor = 1;
3885 break;
3886 case TOK_ALWAYS_INLINE1:
3887 case TOK_ALWAYS_INLINE2:
3888 ad->f.func_alwinl = 1;
3889 break;
3890 case TOK_SECTION1:
3891 case TOK_SECTION2:
3892 skip('(');
3893 astr = parse_mult_str("section name")->data;
3894 ad->section = find_section(tcc_state, astr);
3895 skip(')');
3896 break;
3897 case TOK_ALIAS1:
3898 case TOK_ALIAS2:
3899 skip('(');
3900 astr = parse_mult_str("alias(\"target\")")->data;
3901 /* save string as token, for later */
3902 ad->alias_target = tok_alloc_const(astr);
3903 skip(')');
3904 break;
3905 case TOK_VISIBILITY1:
3906 case TOK_VISIBILITY2:
3907 skip('(');
3908 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3909 if (!strcmp (astr, "default"))
3910 ad->a.visibility = STV_DEFAULT;
3911 else if (!strcmp (astr, "hidden"))
3912 ad->a.visibility = STV_HIDDEN;
3913 else if (!strcmp (astr, "internal"))
3914 ad->a.visibility = STV_INTERNAL;
3915 else if (!strcmp (astr, "protected"))
3916 ad->a.visibility = STV_PROTECTED;
3917 else
3918 expect("visibility(\"default|hidden|internal|protected\")");
3919 skip(')');
3920 break;
3921 case TOK_ALIGNED1:
3922 case TOK_ALIGNED2:
3923 if (tok == '(') {
3924 next();
3925 n = expr_const();
3926 if (n <= 0 || (n & (n - 1)) != 0)
3927 tcc_error("alignment must be a positive power of two");
3928 skip(')');
3929 } else {
3930 n = MAX_ALIGN;
3932 ad->a.aligned = exact_log2p1(n);
3933 if (n != 1 << (ad->a.aligned - 1))
3934 tcc_error("alignment of %d is larger than implemented", n);
3935 break;
3936 case TOK_PACKED1:
3937 case TOK_PACKED2:
3938 ad->a.packed = 1;
3939 break;
3940 case TOK_WEAK1:
3941 case TOK_WEAK2:
3942 ad->a.weak = 1;
3943 break;
3944 case TOK_NODEBUG1:
3945 case TOK_NODEBUG2:
3946 ad->a.nodebug = 1;
3947 break;
3948 case TOK_UNUSED1:
3949 case TOK_UNUSED2:
3950 /* currently, no need to handle it because tcc does not
3951 track unused objects */
3952 break;
3953 case TOK_NORETURN1:
3954 case TOK_NORETURN2:
3955 ad->f.func_noreturn = 1;
3956 break;
3957 case TOK_CDECL1:
3958 case TOK_CDECL2:
3959 case TOK_CDECL3:
3960 ad->f.func_call = FUNC_CDECL;
3961 break;
3962 case TOK_STDCALL1:
3963 case TOK_STDCALL2:
3964 case TOK_STDCALL3:
3965 ad->f.func_call = FUNC_STDCALL;
3966 break;
3967 #ifdef TCC_TARGET_I386
3968 case TOK_REGPARM1:
3969 case TOK_REGPARM2:
3970 skip('(');
3971 n = expr_const();
3972 if (n > 3)
3973 n = 3;
3974 else if (n < 0)
3975 n = 0;
3976 if (n > 0)
3977 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3978 skip(')');
3979 break;
3980 case TOK_FASTCALL1:
3981 case TOK_FASTCALL2:
3982 case TOK_FASTCALL3:
3983 ad->f.func_call = FUNC_FASTCALLW;
3984 break;
3985 case TOK_THISCALL1:
3986 case TOK_THISCALL2:
3987 case TOK_THISCALL3:
3988 ad->f.func_call = FUNC_THISCALL;
3989 break;
3990 #endif
3991 case TOK_MODE:
3992 skip('(');
3993 switch(tok) {
3994 case TOK_MODE_DI:
3995 ad->attr_mode = VT_LLONG + 1;
3996 break;
3997 case TOK_MODE_QI:
3998 ad->attr_mode = VT_BYTE + 1;
3999 break;
4000 case TOK_MODE_HI:
4001 ad->attr_mode = VT_SHORT + 1;
4002 break;
4003 case TOK_MODE_SI:
4004 case TOK_MODE_word:
4005 ad->attr_mode = VT_INT + 1;
4006 break;
4007 default:
4008 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4009 break;
4011 next();
4012 skip(')');
4013 break;
4014 case TOK_DLLEXPORT:
4015 ad->a.dllexport = 1;
4016 break;
4017 case TOK_NODECORATE:
4018 ad->a.nodecorate = 1;
4019 break;
4020 case TOK_DLLIMPORT:
4021 ad->a.dllimport = 1;
4022 break;
4023 default:
4024 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4025 /* skip parameters */
4026 if (tok == '(') {
4027 int parenthesis = 0;
4028 do {
4029 if (tok == '(')
4030 parenthesis++;
4031 else if (tok == ')')
4032 parenthesis--;
4033 next();
4034 } while (parenthesis && tok != -1);
4036 break;
4038 if (tok != ',')
4039 break;
4040 next();
4042 skip(')');
4043 skip(')');
4044 goto redo;
4047 static Sym * find_field (CType *type, int v, int *cumofs)
4049 Sym *s = type->ref;
4050 int v1 = v | SYM_FIELD;
4051 if (!(v & SYM_FIELD)) { /* top-level call */
4052 if ((type->t & VT_BTYPE) != VT_STRUCT)
4053 expect("struct or union");
4054 if (v < TOK_UIDENT)
4055 expect("field name");
4056 if (s->c < 0)
4057 tcc_error("dereferencing incomplete type '%s'",
4058 get_tok_str(s->v & ~SYM_STRUCT, 0));
4060 while ((s = s->next) != NULL) {
4061 if (s->v == v1) {
4062 *cumofs = s->c;
4063 return s;
4065 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4066 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4067 /* try to find field in anonymous sub-struct/union */
4068 Sym *ret = find_field (&s->type, v1, cumofs);
4069 if (ret) {
4070 *cumofs += s->c;
4071 return ret;
4075 if (!(v & SYM_FIELD))
4076 tcc_error("field not found: %s", get_tok_str(v, NULL));
4077 return s;
4080 static void check_fields (CType *type, int check)
4082 Sym *s = type->ref;
4084 while ((s = s->next) != NULL) {
4085 int v = s->v & ~SYM_FIELD;
4086 if (v < SYM_FIRST_ANOM) {
4087 TokenSym *ts = table_ident[v - TOK_IDENT];
4088 if (check && (ts->tok & SYM_FIELD))
4089 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4090 ts->tok ^= SYM_FIELD;
4091 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4092 check_fields (&s->type, check);
4096 static void struct_layout(CType *type, AttributeDef *ad)
4098 int size, align, maxalign, offset, c, bit_pos, bit_size;
4099 int packed, a, bt, prevbt, prev_bit_size;
4100 int pcc = !tcc_state->ms_bitfields;
4101 int pragma_pack = *tcc_state->pack_stack_ptr;
4102 Sym *f;
4104 maxalign = 1;
4105 offset = 0;
4106 c = 0;
4107 bit_pos = 0;
4108 prevbt = VT_STRUCT; /* make it never match */
4109 prev_bit_size = 0;
4111 //#define BF_DEBUG
4113 for (f = type->ref->next; f; f = f->next) {
4114 if (f->type.t & VT_BITFIELD)
4115 bit_size = BIT_SIZE(f->type.t);
4116 else
4117 bit_size = -1;
4118 size = type_size(&f->type, &align);
4119 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4120 packed = 0;
4122 if (pcc && bit_size == 0) {
4123 /* in pcc mode, packing does not affect zero-width bitfields */
4125 } else {
4126 /* in pcc mode, attribute packed overrides if set. */
4127 if (pcc && (f->a.packed || ad->a.packed))
4128 align = packed = 1;
4130 /* pragma pack overrides align if lesser and packs bitfields always */
4131 if (pragma_pack) {
4132 packed = 1;
4133 if (pragma_pack < align)
4134 align = pragma_pack;
4135 /* in pcc mode pragma pack also overrides individual align */
4136 if (pcc && pragma_pack < a)
4137 a = 0;
4140 /* some individual align was specified */
4141 if (a)
4142 align = a;
4144 if (type->ref->type.t == VT_UNION) {
4145 if (pcc && bit_size >= 0)
4146 size = (bit_size + 7) >> 3;
4147 offset = 0;
4148 if (size > c)
4149 c = size;
4151 } else if (bit_size < 0) {
4152 if (pcc)
4153 c += (bit_pos + 7) >> 3;
4154 c = (c + align - 1) & -align;
4155 offset = c;
4156 if (size > 0)
4157 c += size;
4158 bit_pos = 0;
4159 prevbt = VT_STRUCT;
4160 prev_bit_size = 0;
4162 } else {
4163 /* A bit-field. Layout is more complicated. There are two
4164 options: PCC (GCC) compatible and MS compatible */
4165 if (pcc) {
4166 /* In PCC layout a bit-field is placed adjacent to the
4167 preceding bit-fields, except if:
4168 - it has zero-width
4169 - an individual alignment was given
4170 - it would overflow its base type container and
4171 there is no packing */
4172 if (bit_size == 0) {
4173 new_field:
4174 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4175 bit_pos = 0;
4176 } else if (f->a.aligned) {
4177 goto new_field;
4178 } else if (!packed) {
4179 int a8 = align * 8;
4180 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4181 if (ofs > size / align)
4182 goto new_field;
4185 /* in pcc mode, long long bitfields have type int if they fit */
4186 if (size == 8 && bit_size <= 32)
4187 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4189 while (bit_pos >= align * 8)
4190 c += align, bit_pos -= align * 8;
4191 offset = c;
4193 /* In PCC layout named bit-fields influence the alignment
4194 of the containing struct using the base types alignment,
4195 except for packed fields (which here have correct align). */
4196 if (f->v & SYM_FIRST_ANOM
4197 // && bit_size // ??? gcc on ARM/rpi does that
4199 align = 1;
4201 } else {
4202 bt = f->type.t & VT_BTYPE;
4203 if ((bit_pos + bit_size > size * 8)
4204 || (bit_size > 0) == (bt != prevbt)
4206 c = (c + align - 1) & -align;
4207 offset = c;
4208 bit_pos = 0;
4209 /* In MS bitfield mode a bit-field run always uses
4210 at least as many bits as the underlying type.
4211 To start a new run it's also required that this
4212 or the last bit-field had non-zero width. */
4213 if (bit_size || prev_bit_size)
4214 c += size;
4216 /* In MS layout the records alignment is normally
4217 influenced by the field, except for a zero-width
4218 field at the start of a run (but by further zero-width
4219 fields it is again). */
4220 if (bit_size == 0 && prevbt != bt)
4221 align = 1;
4222 prevbt = bt;
4223 prev_bit_size = bit_size;
4226 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4227 | (bit_pos << VT_STRUCT_SHIFT);
4228 bit_pos += bit_size;
4230 if (align > maxalign)
4231 maxalign = align;
4233 #ifdef BF_DEBUG
4234 printf("set field %s offset %-2d size %-2d align %-2d",
4235 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4236 if (f->type.t & VT_BITFIELD) {
4237 printf(" pos %-2d bits %-2d",
4238 BIT_POS(f->type.t),
4239 BIT_SIZE(f->type.t)
4242 printf("\n");
4243 #endif
4245 f->c = offset;
4246 f->r = 0;
4249 if (pcc)
4250 c += (bit_pos + 7) >> 3;
4252 /* store size and alignment */
4253 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4254 if (a < maxalign)
4255 a = maxalign;
4256 type->ref->r = a;
4257 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4258 /* can happen if individual align for some member was given. In
4259 this case MSVC ignores maxalign when aligning the size */
4260 a = pragma_pack;
4261 if (a < bt)
4262 a = bt;
4264 c = (c + a - 1) & -a;
4265 type->ref->c = c;
4267 #ifdef BF_DEBUG
4268 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4269 #endif
4271 /* check whether we can access bitfields by their type */
4272 for (f = type->ref->next; f; f = f->next) {
4273 int s, px, cx, c0;
4274 CType t;
4276 if (0 == (f->type.t & VT_BITFIELD))
4277 continue;
4278 f->type.ref = f;
4279 f->auxtype = -1;
4280 bit_size = BIT_SIZE(f->type.t);
4281 if (bit_size == 0)
4282 continue;
4283 bit_pos = BIT_POS(f->type.t);
4284 size = type_size(&f->type, &align);
4286 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4287 #ifdef TCC_TARGET_ARM
4288 && !(f->c & (align - 1))
4289 #endif
4291 continue;
4293 /* try to access the field using a different type */
4294 c0 = -1, s = align = 1;
4295 t.t = VT_BYTE;
4296 for (;;) {
4297 px = f->c * 8 + bit_pos;
4298 cx = (px >> 3) & -align;
4299 px = px - (cx << 3);
4300 if (c0 == cx)
4301 break;
4302 s = (px + bit_size + 7) >> 3;
4303 if (s > 4) {
4304 t.t = VT_LLONG;
4305 } else if (s > 2) {
4306 t.t = VT_INT;
4307 } else if (s > 1) {
4308 t.t = VT_SHORT;
4309 } else {
4310 t.t = VT_BYTE;
4312 s = type_size(&t, &align);
4313 c0 = cx;
4316 if (px + bit_size <= s * 8 && cx + s <= c
4317 #ifdef TCC_TARGET_ARM
4318 && !(cx & (align - 1))
4319 #endif
4321 /* update offset and bit position */
4322 f->c = cx;
4323 bit_pos = px;
4324 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4325 | (bit_pos << VT_STRUCT_SHIFT);
4326 if (s != size)
4327 f->auxtype = t.t;
4328 #ifdef BF_DEBUG
4329 printf("FIX field %s offset %-2d size %-2d align %-2d "
4330 "pos %-2d bits %-2d\n",
4331 get_tok_str(f->v & ~SYM_FIELD, NULL),
4332 cx, s, align, px, bit_size);
4333 #endif
4334 } else {
4335 /* fall back to load/store single-byte wise */
4336 f->auxtype = VT_STRUCT;
4337 #ifdef BF_DEBUG
4338 printf("FIX field %s : load byte-wise\n",
4339 get_tok_str(f->v & ~SYM_FIELD, NULL));
4340 #endif
4345 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4346 static void struct_decl(CType *type, int u)
4348 int v, c, size, align, flexible;
4349 int bit_size, bsize, bt, ut;
4350 Sym *s, *ss, **ps;
4351 AttributeDef ad, ad1;
4352 CType type1, btype;
4354 memset(&ad, 0, sizeof ad);
4355 next();
4356 parse_attribute(&ad);
4358 v = 0;
4359 if (tok >= TOK_IDENT) /* struct/enum tag */
4360 v = tok, next();
4362 bt = ut = 0;
4363 if (u == VT_ENUM) {
4364 ut = VT_INT;
4365 if (tok == ':') { /* C2x enum : <type> ... */
4366 next();
4367 if (!parse_btype(&btype, &ad1, 0)
4368 || !is_integer_btype(btype.t & VT_BTYPE))
4369 expect("enum type");
4370 bt = ut = btype.t & (VT_BTYPE|VT_LONG|VT_UNSIGNED|VT_DEFSIGN);
4374 if (v) {
4375 /* struct already defined ? return it */
4376 s = struct_find(v);
4377 if (s && (s->sym_scope == local_scope || (tok != '{' && tok != ';'))) {
4378 if (u == s->type.t)
4379 goto do_decl;
4380 if (u == VT_ENUM && IS_ENUM(s->type.t)) /* XXX: check integral types */
4381 goto do_decl;
4382 tcc_error("redeclaration of '%s'", get_tok_str(v, NULL));
4384 } else {
4385 if (tok != '{')
4386 expect("struct/union/enum name");
4387 v = anon_sym++;
4389 /* Record the original enum/struct/union token. */
4390 type1.t = u | ut;
4391 type1.ref = NULL;
4392 /* we put an undefined size for struct/union */
4393 s = sym_push(v | SYM_STRUCT, &type1, 0, bt ? 0 : -1);
4394 s->r = 0; /* default alignment is zero as gcc */
4395 do_decl:
4396 type->t = s->type.t;
4397 type->ref = s;
4399 if (tok == '{') {
4400 next();
4401 if (s->c != -1
4402 && !(u == VT_ENUM && s->c == 0)) /* not yet defined typed enum */
4403 tcc_error("struct/union/enum already defined");
4404 s->c = -2;
4405 /* cannot be empty */
4406 /* non empty enums are not allowed */
4407 ps = &s->next;
4408 if (u == VT_ENUM) {
4409 long long ll = 0, pl = 0, nl = 0;
4410 CType t;
4411 t.ref = s;
4412 /* enum symbols have static storage */
4413 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4414 if (bt)
4415 t.t = bt|VT_STATIC|VT_ENUM_VAL;
4416 for(;;) {
4417 v = tok;
4418 if (v < TOK_UIDENT)
4419 expect("identifier");
4420 ss = sym_find(v);
4421 if (ss && !local_stack)
4422 tcc_error("redefinition of enumerator '%s'",
4423 get_tok_str(v, NULL));
4424 next();
4425 if (tok == '=') {
4426 next();
4427 ll = expr_const64();
4429 ss = sym_push(v, &t, VT_CONST, 0);
4430 ss->enum_val = ll;
4431 *ps = ss, ps = &ss->next;
4432 if (ll < nl)
4433 nl = ll;
4434 if (ll > pl)
4435 pl = ll;
4436 if (tok != ',')
4437 break;
4438 next();
4439 ll++;
4440 /* NOTE: we accept a trailing comma */
4441 if (tok == '}')
4442 break;
4444 skip('}');
4446 if (bt) {
4447 t.t = bt;
4448 s->c = 2;
4449 goto enum_done;
4452 /* set integral type of the enum */
4453 t.t = VT_INT;
4454 if (nl >= 0) {
4455 if (pl != (unsigned)pl)
4456 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4457 t.t |= VT_UNSIGNED;
4458 } else if (pl != (int)pl || nl != (int)nl)
4459 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4461 /* set type for enum members */
4462 for (ss = s->next; ss; ss = ss->next) {
4463 ll = ss->enum_val;
4464 if (ll == (int)ll) /* default is int if it fits */
4465 continue;
4466 if (t.t & VT_UNSIGNED) {
4467 ss->type.t |= VT_UNSIGNED;
4468 if (ll == (unsigned)ll)
4469 continue;
4471 ss->type.t = (ss->type.t & ~VT_BTYPE)
4472 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4474 s->c = 1;
4475 enum_done:
4476 s->type.t = type->t = t.t | VT_ENUM;
4478 } else {
4479 c = 0;
4480 flexible = 0;
4481 while (tok != '}') {
4482 if (!parse_btype(&btype, &ad1, 0)) {
4483 if (tok == TOK_STATIC_ASSERT) {
4484 do_Static_assert();
4485 continue;
4487 skip(';');
4488 continue;
4490 while (1) {
4491 if (flexible)
4492 tcc_error("flexible array member '%s' not at the end of struct",
4493 get_tok_str(v, NULL));
4494 bit_size = -1;
4495 v = 0;
4496 type1 = btype;
4497 if (tok != ':') {
4498 if (tok != ';')
4499 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4500 if (v == 0) {
4501 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4502 expect("identifier");
4503 else {
4504 int v = btype.ref->v;
4505 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4506 if (tcc_state->ms_extensions == 0)
4507 expect("identifier");
4511 if (type_size(&type1, &align) < 0) {
4512 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4513 flexible = 1;
4514 else
4515 tcc_error("field '%s' has incomplete type",
4516 get_tok_str(v, NULL));
4518 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4519 (type1.t & VT_BTYPE) == VT_VOID ||
4520 (type1.t & VT_STORAGE))
4521 tcc_error("invalid type for '%s'",
4522 get_tok_str(v, NULL));
4524 if (tok == ':') {
4525 next();
4526 bit_size = expr_const();
4527 /* XXX: handle v = 0 case for messages */
4528 if (bit_size < 0)
4529 tcc_error("negative width in bit-field '%s'",
4530 get_tok_str(v, NULL));
4531 if (v && bit_size == 0)
4532 tcc_error("zero width for bit-field '%s'",
4533 get_tok_str(v, NULL));
4534 parse_attribute(&ad1);
4536 size = type_size(&type1, &align);
4537 if (bit_size >= 0) {
4538 bt = type1.t & VT_BTYPE;
4539 if (bt != VT_INT &&
4540 bt != VT_BYTE &&
4541 bt != VT_SHORT &&
4542 bt != VT_BOOL &&
4543 bt != VT_LLONG)
4544 tcc_error("bitfields must have scalar type");
4545 bsize = size * 8;
4546 if (bit_size > bsize) {
4547 tcc_error("width of '%s' exceeds its type",
4548 get_tok_str(v, NULL));
4549 } else if (bit_size == bsize
4550 && !ad.a.packed && !ad1.a.packed) {
4551 /* no need for bit fields */
4553 } else if (bit_size == 64) {
4554 tcc_error("field width 64 not implemented");
4555 } else {
4556 type1.t = (type1.t & ~VT_STRUCT_MASK)
4557 | VT_BITFIELD
4558 | (bit_size << (VT_STRUCT_SHIFT + 6));
4561 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4562 /* Remember we've seen a real field to check
4563 for placement of flexible array member. */
4564 c = 1;
4566 /* If member is a struct or bit-field, enforce
4567 placing into the struct (as anonymous). */
4568 if (v == 0 &&
4569 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4570 bit_size >= 0)) {
4571 v = anon_sym++;
4573 if (v) {
4574 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4575 ss->a = ad1.a;
4576 *ps = ss;
4577 ps = &ss->next;
4579 if (tok == ';' || tok == TOK_EOF)
4580 break;
4581 skip(',');
4583 skip(';');
4585 skip('}');
4586 parse_attribute(&ad);
4587 if (ad.cleanup_func) {
4588 tcc_warning("attribute '__cleanup__' ignored on type");
4590 check_fields(type, 1);
4591 check_fields(type, 0);
4592 struct_layout(type, &ad);
4593 if (debug_modes)
4594 tcc_debug_fix_anon(tcc_state, type);
4599 static void sym_to_attr(AttributeDef *ad, Sym *s)
4601 merge_symattr(&ad->a, &s->a);
4602 merge_funcattr(&ad->f, &s->f);
4605 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4606 are added to the element type, copied because it could be a typedef. */
4607 static void parse_btype_qualify(CType *type, int qualifiers)
4609 while (type->t & VT_ARRAY) {
4610 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4611 type = &type->ref->type;
4613 type->t |= qualifiers;
4616 /* return 0 if no type declaration. otherwise, return the basic type
4617 and skip it.
4619 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4621 int t, u, bt, st, type_found, typespec_found, g, n;
4622 Sym *s;
4623 CType type1;
4625 memset(ad, 0, sizeof(AttributeDef));
4626 type_found = 0;
4627 typespec_found = 0;
4628 t = VT_INT;
4629 bt = st = -1;
4630 type->ref = NULL;
4632 while(1) {
4633 switch(tok) {
4634 case TOK_EXTENSION:
4635 /* currently, we really ignore extension */
4636 next();
4637 continue;
4639 /* basic types */
4640 case TOK_CHAR:
4641 u = VT_BYTE;
4642 basic_type:
4643 next();
4644 basic_type1:
4645 if (u == VT_SHORT || u == VT_LONG) {
4646 if (st != -1 || (bt != -1 && bt != VT_INT))
4647 tmbt: tcc_error("too many basic types");
4648 st = u;
4649 } else {
4650 if (bt != -1 || (st != -1 && u != VT_INT))
4651 goto tmbt;
4652 bt = u;
4654 if (u != VT_INT)
4655 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4656 typespec_found = 1;
4657 break;
4658 case TOK_VOID:
4659 u = VT_VOID;
4660 goto basic_type;
4661 case TOK_SHORT:
4662 u = VT_SHORT;
4663 goto basic_type;
4664 case TOK_INT:
4665 u = VT_INT;
4666 goto basic_type;
4667 case TOK_ALIGNAS:
4668 { int n;
4669 AttributeDef ad1;
4670 next();
4671 skip('(');
4672 memset(&ad1, 0, sizeof(AttributeDef));
4673 if (parse_btype(&type1, &ad1, 0)) {
4674 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4675 if (ad1.a.aligned)
4676 n = 1 << (ad1.a.aligned - 1);
4677 else
4678 type_size(&type1, &n);
4679 } else {
4680 n = expr_const();
4681 if (n < 0 || (n & (n - 1)) != 0)
4682 tcc_error("alignment must be a positive power of two");
4684 skip(')');
4685 ad->a.aligned = exact_log2p1(n);
4687 continue;
4688 case TOK_LONG:
4689 if ((t & VT_BTYPE) == VT_DOUBLE) {
4690 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4691 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4692 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4693 } else {
4694 u = VT_LONG;
4695 goto basic_type;
4697 next();
4698 break;
4699 #ifdef TCC_TARGET_ARM64
4700 case TOK_UINT128:
4701 /* GCC's __uint128_t appears in some Linux header files. Make it a
4702 synonym for long double to get the size and alignment right. */
4703 u = VT_LDOUBLE;
4704 goto basic_type;
4705 #endif
4706 case TOK_BOOL:
4707 u = VT_BOOL;
4708 goto basic_type;
4709 case TOK_COMPLEX:
4710 tcc_error("_Complex is not yet supported");
4711 case TOK_FLOAT:
4712 u = VT_FLOAT;
4713 goto basic_type;
4714 case TOK_DOUBLE:
4715 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4716 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4717 } else {
4718 u = VT_DOUBLE;
4719 goto basic_type;
4721 next();
4722 break;
4723 case TOK_ENUM:
4724 struct_decl(&type1, VT_ENUM);
4725 basic_type2:
4726 u = type1.t;
4727 type->ref = type1.ref;
4728 goto basic_type1;
4729 case TOK_STRUCT:
4730 struct_decl(&type1, VT_STRUCT);
4731 goto basic_type2;
4732 case TOK_UNION:
4733 struct_decl(&type1, VT_UNION);
4734 goto basic_type2;
4736 /* type modifiers */
4737 case TOK__Atomic:
4738 next();
4739 type->t = t;
4740 parse_btype_qualify(type, VT_ATOMIC);
4741 t = type->t;
4742 if (tok == '(') {
4743 parse_expr_type(&type1);
4744 /* remove all storage modifiers except typedef */
4745 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4746 if (type1.ref)
4747 sym_to_attr(ad, type1.ref);
4748 goto basic_type2;
4750 break;
4751 case TOK_CONST1:
4752 case TOK_CONST2:
4753 case TOK_CONST3:
4754 type->t = t;
4755 parse_btype_qualify(type, VT_CONSTANT);
4756 t = type->t;
4757 next();
4758 break;
4759 case TOK_VOLATILE1:
4760 case TOK_VOLATILE2:
4761 case TOK_VOLATILE3:
4762 type->t = t;
4763 parse_btype_qualify(type, VT_VOLATILE);
4764 t = type->t;
4765 next();
4766 break;
4767 case TOK_SIGNED1:
4768 case TOK_SIGNED2:
4769 case TOK_SIGNED3:
4770 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4771 tcc_error("signed and unsigned modifier");
4772 t |= VT_DEFSIGN;
4773 next();
4774 typespec_found = 1;
4775 break;
4776 case TOK_REGISTER:
4777 case TOK_AUTO:
4778 case TOK_RESTRICT1:
4779 case TOK_RESTRICT2:
4780 case TOK_RESTRICT3:
4781 next();
4782 break;
4783 case TOK_UNSIGNED:
4784 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4785 tcc_error("signed and unsigned modifier");
4786 t |= VT_DEFSIGN | VT_UNSIGNED;
4787 next();
4788 typespec_found = 1;
4789 break;
4791 /* storage */
4792 case TOK_EXTERN:
4793 g = VT_EXTERN;
4794 goto storage;
4795 case TOK_STATIC:
4796 g = VT_STATIC;
4797 goto storage;
4798 case TOK_TYPEDEF:
4799 g = VT_TYPEDEF;
4800 goto storage;
4801 storage:
4802 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4803 tcc_error("multiple storage classes");
4804 t |= g;
4805 next();
4806 break;
4807 case TOK_INLINE1:
4808 case TOK_INLINE2:
4809 case TOK_INLINE3:
4810 t |= VT_INLINE;
4811 next();
4812 break;
4813 case TOK_NORETURN3:
4814 next();
4815 ad->f.func_noreturn = 1;
4816 break;
4817 /* GNUC attribute */
4818 case TOK_ATTRIBUTE1:
4819 case TOK_ATTRIBUTE2:
4820 parse_attribute(ad);
4821 if (ad->attr_mode) {
4822 u = ad->attr_mode -1;
4823 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4825 continue;
4826 /* GNUC typeof */
4827 case TOK_TYPEOF1:
4828 case TOK_TYPEOF2:
4829 case TOK_TYPEOF3:
4830 next();
4831 parse_expr_type(&type1);
4832 /* remove all storage modifiers except typedef */
4833 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4834 if (type1.ref)
4835 sym_to_attr(ad, type1.ref);
4836 goto basic_type2;
4837 case TOK_THREAD_LOCAL:
4838 tcc_error("_Thread_local is not implemented");
4839 default:
4840 if (typespec_found)
4841 goto the_end;
4842 s = sym_find(tok);
4843 if (!s || !(s->type.t & VT_TYPEDEF))
4844 goto the_end;
4846 n = tok, next();
4847 if (tok == ':' && ignore_label) {
4848 /* ignore if it's a label */
4849 unget_tok(n);
4850 goto the_end;
4853 t &= ~(VT_BTYPE|VT_LONG);
4854 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4855 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4856 type->ref = s->type.ref;
4857 if (t)
4858 parse_btype_qualify(type, t);
4859 t = type->t;
4860 /* get attributes from typedef */
4861 sym_to_attr(ad, s);
4862 typespec_found = 1;
4863 st = bt = -2;
4864 break;
4866 type_found = 1;
4868 the_end:
4869 if (tcc_state->char_is_unsigned) {
4870 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4871 t |= VT_UNSIGNED;
4873 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4874 bt = t & (VT_BTYPE|VT_LONG);
4875 if (bt == VT_LONG)
4876 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4877 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4878 if (bt == VT_LDOUBLE)
4879 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4880 #endif
4881 type->t = t;
4882 return type_found;
4885 /* convert a function parameter type (array to pointer and function to
4886 function pointer) */
4887 static inline void convert_parameter_type(CType *pt)
4889 /* remove const and volatile qualifiers (XXX: const could be used
4890 to indicate a const function parameter */
4891 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4892 /* array must be transformed to pointer according to ANSI C */
4893 pt->t &= ~(VT_ARRAY | VT_VLA);
4894 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4895 mk_pointer(pt);
4899 ST_FUNC CString* parse_asm_str(void)
4901 skip('(');
4902 return parse_mult_str("string constant");
4905 /* Parse an asm label and return the token */
4906 static int asm_label_instr(void)
4908 int v;
4909 char *astr;
4911 next();
4912 astr = parse_asm_str()->data;
4913 skip(')');
4914 #ifdef ASM_DEBUG
4915 printf("asm_alias: \"%s\"\n", astr);
4916 #endif
4917 v = tok_alloc_const(astr);
4918 return v;
4921 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4923 int n, l, t1, arg_size, align;
4924 Sym **plast, *s, *first;
4925 AttributeDef ad1;
4926 CType pt;
4927 TokenString *vla_array_tok = NULL;
4928 int *vla_array_str = NULL;
4930 if (tok == '(') {
4931 /* function type, or recursive declarator (return if so) */
4932 next();
4933 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4934 return 0;
4935 if (tok == ')')
4936 l = 0;
4937 else if (parse_btype(&pt, &ad1, 0))
4938 l = FUNC_NEW;
4939 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4940 merge_attr (ad, &ad1);
4941 return 0;
4942 } else
4943 l = FUNC_OLD;
4945 first = NULL;
4946 plast = &first;
4947 arg_size = 0;
4948 ++local_scope;
4949 if (l) {
4950 for(;;) {
4951 /* read param name and compute offset */
4952 if (l != FUNC_OLD) {
4953 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4954 break;
4955 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4956 if ((pt.t & VT_BTYPE) == VT_VOID)
4957 tcc_error("parameter declared as void");
4958 if (n == 0)
4959 n = SYM_FIELD;
4960 } else {
4961 n = tok;
4962 pt.t = VT_VOID; /* invalid type */
4963 pt.ref = NULL;
4964 next();
4966 if (n < TOK_UIDENT)
4967 expect("identifier");
4968 convert_parameter_type(&pt);
4969 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4970 /* these symbols may be evaluated for VLArrays (see below, under
4971 nocode_wanted) which is why we push them here as normal symbols
4972 temporarily. Example: int func(int a, int b[++a]); */
4973 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4974 *plast = s;
4975 plast = &s->next;
4976 if (tok == ')')
4977 break;
4978 skip(',');
4979 if (l == FUNC_NEW && tok == TOK_DOTS) {
4980 l = FUNC_ELLIPSIS;
4981 next();
4982 break;
4984 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4985 tcc_error("invalid type");
4987 } else
4988 /* if no parameters, then old type prototype */
4989 l = FUNC_OLD;
4990 skip(')');
4991 /* remove parameter symbols from token table, keep on stack */
4992 if (first) {
4993 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4994 for (s = first; s; s = s->next)
4995 s->v |= SYM_FIELD;
4997 --local_scope;
4998 /* NOTE: const is ignored in returned type as it has a special
4999 meaning in gcc / C++ */
5000 type->t &= ~VT_CONSTANT;
5001 /* some ancient pre-K&R C allows a function to return an array
5002 and the array brackets to be put after the arguments, such
5003 that "int c()[]" means something like "int[] c()" */
5004 if (tok == '[') {
5005 next();
5006 skip(']'); /* only handle simple "[]" */
5007 mk_pointer(type);
5009 /* we push a anonymous symbol which will contain the function prototype */
5010 ad->f.func_args = arg_size;
5011 ad->f.func_type = l;
5012 s = sym_push(SYM_FIELD, type, 0, 0);
5013 s->a = ad->a;
5014 s->f = ad->f;
5015 s->next = first;
5016 type->t = VT_FUNC;
5017 type->ref = s;
5018 } else if (tok == '[') {
5019 int saved_nocode_wanted = nocode_wanted;
5020 /* array definition */
5021 next();
5022 n = -1;
5023 t1 = 0;
5024 if (td & TYPE_PARAM) while (1) {
5025 /* XXX The optional type-quals and static should only be accepted
5026 in parameter decls. The '*' as well, and then even only
5027 in prototypes (not function defs). */
5028 switch (tok) {
5029 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5030 case TOK_CONST1:
5031 case TOK_VOLATILE1:
5032 case TOK_STATIC:
5033 case '*':
5034 next();
5035 continue;
5036 default:
5037 break;
5039 if (tok != ']') {
5040 /* Code generation is not done now but has to be done
5041 at start of function. Save code here for later use. */
5042 nocode_wanted = 1;
5043 skip_or_save_block(&vla_array_tok);
5044 unget_tok(0);
5045 vla_array_str = vla_array_tok->str;
5046 begin_macro(vla_array_tok, 2);
5047 next();
5048 gexpr();
5049 end_macro();
5050 next();
5051 goto check;
5053 break;
5055 } else if (tok != ']') {
5056 if (!local_stack || (storage & VT_STATIC))
5057 vpushi(expr_const());
5058 else {
5059 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5060 length must always be evaluated, even under nocode_wanted,
5061 so that its size slot is initialized (e.g. under sizeof
5062 or typeof). */
5063 nocode_wanted = 0;
5064 gexpr();
5066 check:
5067 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5068 n = vtop->c.i;
5069 if (n < 0)
5070 tcc_error("invalid array size");
5071 } else {
5072 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5073 tcc_error("size of variable length array should be an integer");
5074 n = 0;
5075 t1 = VT_VLA;
5078 skip(']');
5079 /* parse next post type */
5080 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5082 if ((type->t & VT_BTYPE) == VT_FUNC)
5083 tcc_error("declaration of an array of functions");
5084 if ((type->t & VT_BTYPE) == VT_VOID
5085 || type_size(type, &align) < 0)
5086 tcc_error("declaration of an array of incomplete type elements");
5088 t1 |= type->t & VT_VLA;
5090 if (t1 & VT_VLA) {
5091 if (n < 0) {
5092 if (td & TYPE_NEST)
5093 tcc_error("need explicit inner array size in VLAs");
5095 else {
5096 loc -= type_size(&int_type, &align);
5097 loc &= -align;
5098 n = loc;
5100 vpush_type_size(type, &align);
5101 gen_op('*');
5102 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5103 vswap();
5104 vstore();
5107 if (n != -1)
5108 vpop();
5109 nocode_wanted = saved_nocode_wanted;
5111 /* we push an anonymous symbol which will contain the array
5112 element type */
5113 s = sym_push(SYM_FIELD, type, 0, n);
5114 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5115 type->ref = s;
5117 if (vla_array_str) {
5118 /* for function args, the top dimension is converted to pointer */
5119 if ((t1 & VT_VLA) && (td & TYPE_NEST))
5120 s->vla_array_str = vla_array_str;
5121 else
5122 tok_str_free_str(vla_array_str);
5125 return 1;
5128 /* Parse a type declarator (except basic type), and return the type
5129 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5130 expected. 'type' should contain the basic type. 'ad' is the
5131 attribute definition of the basic type. It can be modified by
5132 type_decl(). If this (possibly abstract) declarator is a pointer chain
5133 it returns the innermost pointed to type (equals *type, but is a different
5134 pointer), otherwise returns type itself, that's used for recursive calls. */
5135 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5137 CType *post, *ret;
5138 int qualifiers, storage;
5140 /* recursive type, remove storage bits first, apply them later again */
5141 storage = type->t & VT_STORAGE;
5142 type->t &= ~VT_STORAGE;
5143 post = ret = type;
5145 while (tok == '*') {
5146 qualifiers = 0;
5147 redo:
5148 next();
5149 switch(tok) {
5150 case TOK__Atomic:
5151 qualifiers |= VT_ATOMIC;
5152 goto redo;
5153 case TOK_CONST1:
5154 case TOK_CONST2:
5155 case TOK_CONST3:
5156 qualifiers |= VT_CONSTANT;
5157 goto redo;
5158 case TOK_VOLATILE1:
5159 case TOK_VOLATILE2:
5160 case TOK_VOLATILE3:
5161 qualifiers |= VT_VOLATILE;
5162 goto redo;
5163 case TOK_RESTRICT1:
5164 case TOK_RESTRICT2:
5165 case TOK_RESTRICT3:
5166 goto redo;
5167 /* XXX: clarify attribute handling */
5168 case TOK_ATTRIBUTE1:
5169 case TOK_ATTRIBUTE2:
5170 parse_attribute(ad);
5171 break;
5173 mk_pointer(type);
5174 type->t |= qualifiers;
5175 if (ret == type)
5176 /* innermost pointed to type is the one for the first derivation */
5177 ret = pointed_type(type);
5180 if (tok == '(') {
5181 /* This is possibly a parameter type list for abstract declarators
5182 ('int ()'), use post_type for testing this. */
5183 if (!post_type(type, ad, 0, td)) {
5184 /* It's not, so it's a nested declarator, and the post operations
5185 apply to the innermost pointed to type (if any). */
5186 /* XXX: this is not correct to modify 'ad' at this point, but
5187 the syntax is not clear */
5188 parse_attribute(ad);
5189 post = type_decl(type, ad, v, td);
5190 skip(')');
5191 } else
5192 goto abstract;
5193 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5194 /* type identifier */
5195 *v = tok;
5196 next();
5197 } else {
5198 abstract:
5199 if (!(td & TYPE_ABSTRACT))
5200 expect("identifier");
5201 *v = 0;
5203 post_type(post, ad, post != ret ? 0 : storage,
5204 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5205 parse_attribute(ad);
5206 type->t |= storage;
5207 return ret;
5210 /* indirection with full error checking and bound check */
5211 ST_FUNC void indir(void)
5213 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5214 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5215 return;
5216 expect("pointer");
5218 if (vtop->r & VT_LVAL)
5219 gv(RC_INT);
5220 vtop->type = *pointed_type(&vtop->type);
5221 /* Arrays and functions are never lvalues */
5222 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5223 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5224 vtop->r |= VT_LVAL;
5225 /* if bound checking, the referenced pointer must be checked */
5226 #ifdef CONFIG_TCC_BCHECK
5227 if (tcc_state->do_bounds_check)
5228 vtop->r |= VT_MUSTBOUND;
5229 #endif
5233 /* pass a parameter to a function and do type checking and casting */
5234 static void gfunc_param_typed(Sym *func, Sym *arg)
5236 int func_type;
5237 CType type;
5239 func_type = func->f.func_type;
5240 if (func_type == FUNC_OLD ||
5241 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5242 /* default casting : only need to convert float to double */
5243 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5244 gen_cast_s(VT_DOUBLE);
5245 } else if (vtop->type.t & VT_BITFIELD) {
5246 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5247 type.ref = vtop->type.ref;
5248 gen_cast(&type);
5249 } else if (vtop->r & VT_MUSTCAST) {
5250 force_charshort_cast();
5252 } else if (arg == NULL) {
5253 tcc_error("too many arguments to function");
5254 } else {
5255 type = arg->type;
5256 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5257 gen_assign_cast(&type);
5261 /* parse an expression and return its type without any side effect. */
5262 static void expr_type(CType *type, void (*expr_fn)(void))
5264 nocode_wanted++;
5265 expr_fn();
5266 *type = vtop->type;
5267 vpop();
5268 nocode_wanted--;
5271 /* parse an expression of the form '(type)' or '(expr)' and return its
5272 type */
5273 static void parse_expr_type(CType *type)
5275 int n;
5276 AttributeDef ad;
5278 skip('(');
5279 if (parse_btype(type, &ad, 0)) {
5280 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5281 } else {
5282 expr_type(type, gexpr);
5284 skip(')');
5287 static void parse_type(CType *type)
5289 AttributeDef ad;
5290 int n;
5292 if (!parse_btype(type, &ad, 0)) {
5293 expect("type");
5295 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5298 static void parse_builtin_params(int nc, const char *args)
5300 char c, sep = '(';
5301 CType type;
5302 if (nc)
5303 nocode_wanted++;
5304 next();
5305 if (*args == 0)
5306 skip(sep);
5307 while ((c = *args++)) {
5308 skip(sep);
5309 sep = ',';
5310 if (c == 't') {
5311 parse_type(&type);
5312 vpush(&type);
5313 continue;
5315 expr_eq();
5316 type.ref = NULL;
5317 type.t = 0;
5318 switch (c) {
5319 case 'e':
5320 continue;
5321 case 'V':
5322 type.t = VT_CONSTANT;
5323 case 'v':
5324 type.t |= VT_VOID;
5325 mk_pointer (&type);
5326 break;
5327 case 'S':
5328 type.t = VT_CONSTANT;
5329 case 's':
5330 type.t |= char_type.t;
5331 mk_pointer (&type);
5332 break;
5333 case 'i':
5334 type.t = VT_INT;
5335 break;
5336 case 'l':
5337 type.t = VT_SIZE_T;
5338 break;
5339 default:
5340 break;
5342 gen_assign_cast(&type);
5344 skip(')');
5345 if (nc)
5346 nocode_wanted--;
5349 static void parse_atomic(int atok)
5351 int size, align, arg, t, save = 0;
5352 CType *atom, *atom_ptr, ct = {0};
5353 SValue store;
5354 char buf[40];
5355 static const char *const templates[] = {
5357 * Each entry consists of callback and function template.
5358 * The template represents argument types and return type.
5360 * ? void (return-only)
5361 * b bool
5362 * a atomic
5363 * A read-only atomic
5364 * p pointer to memory
5365 * v value
5366 * l load pointer
5367 * s save pointer
5368 * m memory model
5371 /* keep in order of appearance in tcctok.h: */
5372 /* __atomic_store */ "alm.?",
5373 /* __atomic_load */ "Asm.v",
5374 /* __atomic_exchange */ "alsm.v",
5375 /* __atomic_compare_exchange */ "aplbmm.b",
5376 /* __atomic_fetch_add */ "avm.v",
5377 /* __atomic_fetch_sub */ "avm.v",
5378 /* __atomic_fetch_or */ "avm.v",
5379 /* __atomic_fetch_xor */ "avm.v",
5380 /* __atomic_fetch_and */ "avm.v",
5381 /* __atomic_fetch_nand */ "avm.v",
5382 /* __atomic_and_fetch */ "avm.v",
5383 /* __atomic_sub_fetch */ "avm.v",
5384 /* __atomic_or_fetch */ "avm.v",
5385 /* __atomic_xor_fetch */ "avm.v",
5386 /* __atomic_and_fetch */ "avm.v",
5387 /* __atomic_nand_fetch */ "avm.v"
5389 const char *template = templates[(atok - TOK___atomic_store)];
5391 atom = atom_ptr = NULL;
5392 size = 0; /* pacify compiler */
5393 next();
5394 skip('(');
5395 for (arg = 0;;) {
5396 expr_eq();
5397 switch (template[arg]) {
5398 case 'a':
5399 case 'A':
5400 atom_ptr = &vtop->type;
5401 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5402 expect("pointer");
5403 atom = pointed_type(atom_ptr);
5404 size = type_size(atom, &align);
5405 if (size > 8
5406 || (size & (size - 1))
5407 || (atok > TOK___atomic_compare_exchange
5408 && (0 == btype_size(atom->t & VT_BTYPE)
5409 || (atom->t & VT_BTYPE) == VT_PTR)))
5410 expect("integral or integer-sized pointer target type");
5411 /* GCC does not care either: */
5412 /* if (!(atom->t & VT_ATOMIC))
5413 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5414 break;
5416 case 'p':
5417 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5418 || type_size(pointed_type(&vtop->type), &align) != size)
5419 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5420 gen_assign_cast(atom_ptr);
5421 break;
5422 case 'v':
5423 gen_assign_cast(atom);
5424 break;
5425 case 'l':
5426 indir();
5427 gen_assign_cast(atom);
5428 break;
5429 case 's':
5430 save = 1;
5431 indir();
5432 store = *vtop;
5433 vpop();
5434 break;
5435 case 'm':
5436 gen_assign_cast(&int_type);
5437 break;
5438 case 'b':
5439 ct.t = VT_BOOL;
5440 gen_assign_cast(&ct);
5441 break;
5443 if ('.' == template[++arg])
5444 break;
5445 skip(',');
5447 skip(')');
5449 ct.t = VT_VOID;
5450 switch (template[arg + 1]) {
5451 case 'b':
5452 ct.t = VT_BOOL;
5453 break;
5454 case 'v':
5455 ct = *atom;
5456 break;
5459 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5460 vpush_helper_func(tok_alloc_const(buf));
5461 vrott(arg - save + 1);
5462 gfunc_call(arg - save);
5464 vpush(&ct);
5465 PUT_R_RET(vtop, ct.t);
5466 t = ct.t & VT_BTYPE;
5467 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5468 #ifdef PROMOTE_RET
5469 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5470 #else
5471 vtop->type.t = VT_INT;
5472 #endif
5474 gen_cast(&ct);
5475 if (save) {
5476 vpush(&ct);
5477 *vtop = store;
5478 vswap();
5479 vstore();
5483 ST_FUNC void unary(void)
5485 int n, t, align, size, r;
5486 CType type;
5487 Sym *s;
5488 AttributeDef ad;
5490 /* generate line number info */
5491 if (debug_modes)
5492 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5494 type.ref = NULL;
5495 /* XXX: GCC 2.95.3 does not generate a table although it should be
5496 better here */
5497 tok_next:
5498 switch(tok) {
5499 case TOK_EXTENSION:
5500 next();
5501 goto tok_next;
5502 case TOK_LCHAR:
5503 #ifdef TCC_TARGET_PE
5504 t = VT_SHORT|VT_UNSIGNED;
5505 goto push_tokc;
5506 #endif
5507 case TOK_CINT:
5508 case TOK_CCHAR:
5509 t = VT_INT;
5510 push_tokc:
5511 type.t = t;
5512 vsetc(&type, VT_CONST, &tokc);
5513 next();
5514 break;
5515 case TOK_CUINT:
5516 t = VT_INT | VT_UNSIGNED;
5517 goto push_tokc;
5518 case TOK_CLLONG:
5519 t = VT_LLONG;
5520 goto push_tokc;
5521 case TOK_CULLONG:
5522 t = VT_LLONG | VT_UNSIGNED;
5523 goto push_tokc;
5524 case TOK_CFLOAT:
5525 t = VT_FLOAT;
5526 goto push_tokc;
5527 case TOK_CDOUBLE:
5528 t = VT_DOUBLE;
5529 goto push_tokc;
5530 case TOK_CLDOUBLE:
5531 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5532 t = VT_DOUBLE | VT_LONG;
5533 #else
5534 t = VT_LDOUBLE;
5535 #endif
5536 goto push_tokc;
5537 case TOK_CLONG:
5538 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5539 goto push_tokc;
5540 case TOK_CULONG:
5541 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5542 goto push_tokc;
5543 case TOK___FUNCTION__:
5544 if (!gnu_ext)
5545 goto tok_identifier;
5546 /* fall thru */
5547 case TOK___FUNC__:
5548 tok = TOK_STR;
5549 cstr_reset(&tokcstr);
5550 cstr_cat(&tokcstr, funcname, 0);
5551 tokc.str.size = tokcstr.size;
5552 tokc.str.data = tokcstr.data;
5553 goto case_TOK_STR;
5554 case TOK_LSTR:
5555 #ifdef TCC_TARGET_PE
5556 t = VT_SHORT | VT_UNSIGNED;
5557 #else
5558 t = VT_INT;
5559 #endif
5560 goto str_init;
5561 case TOK_STR:
5562 case_TOK_STR:
5563 /* string parsing */
5564 t = char_type.t;
5565 str_init:
5566 if (tcc_state->warn_write_strings & WARN_ON)
5567 t |= VT_CONSTANT;
5568 type.t = t;
5569 mk_pointer(&type);
5570 type.t |= VT_ARRAY;
5571 memset(&ad, 0, sizeof(AttributeDef));
5572 ad.section = rodata_section;
5573 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5574 break;
5575 case TOK_SOTYPE:
5576 case '(':
5577 t = tok;
5578 next();
5579 /* cast ? */
5580 if (parse_btype(&type, &ad, 0)) {
5581 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5582 skip(')');
5583 /* check ISOC99 compound literal */
5584 if (tok == '{') {
5585 /* data is allocated locally by default */
5586 if (global_expr)
5587 r = VT_CONST;
5588 else
5589 r = VT_LOCAL;
5590 /* all except arrays are lvalues */
5591 if (!(type.t & VT_ARRAY))
5592 r |= VT_LVAL;
5593 memset(&ad, 0, sizeof(AttributeDef));
5594 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5595 } else if (t == TOK_SOTYPE) { /* from sizeof/alignof (...) */
5596 vpush(&type);
5597 return;
5598 } else {
5599 unary();
5600 gen_cast(&type);
5602 } else if (tok == '{') {
5603 int saved_nocode_wanted = nocode_wanted;
5604 if (CONST_WANTED && !NOEVAL_WANTED)
5605 expect("constant");
5606 if (0 == local_scope)
5607 tcc_error("statement expression outside of function");
5608 /* save all registers */
5609 save_regs(0);
5610 /* statement expression : we do not accept break/continue
5611 inside as GCC does. We do retain the nocode_wanted state,
5612 as statement expressions can't ever be entered from the
5613 outside, so any reactivation of code emission (from labels
5614 or loop heads) can be disabled again after the end of it. */
5615 block(STMT_EXPR);
5616 /* If the statement expr can be entered, then we retain the current
5617 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5618 If it can't be entered then the state is that from before the
5619 statement expression. */
5620 if (saved_nocode_wanted)
5621 nocode_wanted = saved_nocode_wanted;
5622 skip(')');
5623 } else {
5624 gexpr();
5625 skip(')');
5627 break;
5628 case '*':
5629 next();
5630 unary();
5631 indir();
5632 break;
5633 case '&':
5634 next();
5635 unary();
5636 /* functions names must be treated as function pointers,
5637 except for unary '&' and sizeof. Since we consider that
5638 functions are not lvalues, we only have to handle it
5639 there and in function calls. */
5640 /* arrays can also be used although they are not lvalues */
5641 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5642 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5643 test_lvalue();
5644 if (vtop->sym)
5645 vtop->sym->a.addrtaken = 1;
5646 mk_pointer(&vtop->type);
5647 gaddrof();
5648 break;
5649 case '!':
5650 next();
5651 unary();
5652 gen_test_zero(TOK_EQ);
5653 break;
5654 case '~':
5655 next();
5656 unary();
5657 vpushi(-1);
5658 gen_op('^');
5659 break;
5660 case '+':
5661 next();
5662 unary();
5663 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5664 tcc_error("pointer not accepted for unary plus");
5665 /* In order to force cast, we add zero, except for floating point
5666 where we really need an noop (otherwise -0.0 will be transformed
5667 into +0.0). */
5668 if (!is_float(vtop->type.t)) {
5669 vpushi(0);
5670 gen_op('+');
5672 break;
5673 case TOK_SIZEOF:
5674 case TOK_ALIGNOF1:
5675 case TOK_ALIGNOF2:
5676 case TOK_ALIGNOF3:
5677 t = tok;
5678 next();
5679 if (tok == '(')
5680 tok = TOK_SOTYPE;
5681 expr_type(&type, unary);
5682 if (t == TOK_SIZEOF) {
5683 vpush_type_size(&type, &align);
5684 gen_cast_s(VT_SIZE_T);
5685 } else {
5686 type_size(&type, &align);
5687 s = NULL;
5688 if (vtop[1].r & VT_SYM)
5689 s = vtop[1].sym; /* hack: accessing previous vtop */
5690 if (s && s->a.aligned)
5691 align = 1 << (s->a.aligned - 1);
5692 vpushs(align);
5694 break;
5696 case TOK_builtin_expect:
5697 /* __builtin_expect is a no-op for now */
5698 parse_builtin_params(0, "ee");
5699 vpop();
5700 break;
5701 case TOK_builtin_types_compatible_p:
5702 parse_builtin_params(0, "tt");
5703 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5704 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5705 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5706 vtop -= 2;
5707 vpushi(n);
5708 break;
5709 case TOK_builtin_choose_expr:
5711 int64_t c;
5712 next();
5713 skip('(');
5714 c = expr_const64();
5715 skip(',');
5716 if (!c) {
5717 nocode_wanted++;
5719 expr_eq();
5720 if (!c) {
5721 vpop();
5722 nocode_wanted--;
5724 skip(',');
5725 if (c) {
5726 nocode_wanted++;
5728 expr_eq();
5729 if (c) {
5730 vpop();
5731 nocode_wanted--;
5733 skip(')');
5735 break;
5736 case TOK_builtin_constant_p:
5737 parse_builtin_params(1, "e");
5738 n = 1;
5739 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5740 || ((vtop->r & VT_SYM) && vtop->sym->a.addrtaken)
5742 n = 0;
5743 vtop--;
5744 vpushi(n);
5745 break;
5746 case TOK_builtin_unreachable:
5747 parse_builtin_params(0, ""); /* just skip '()' */
5748 type.t = VT_VOID;
5749 vpush(&type);
5750 CODE_OFF();
5751 break;
5752 case TOK_builtin_frame_address:
5753 case TOK_builtin_return_address:
5755 int tok1 = tok;
5756 int level;
5757 next();
5758 skip('(');
5759 level = expr_const();
5760 if (level < 0)
5761 tcc_error("%s only takes positive integers", get_tok_str(tok1, 0));
5762 skip(')');
5763 type.t = VT_VOID;
5764 mk_pointer(&type);
5765 vset(&type, VT_LOCAL, 0); /* local frame */
5766 while (level--) {
5767 #ifdef TCC_TARGET_RISCV64
5768 vpushi(2*PTR_SIZE);
5769 gen_op('-');
5770 #endif
5771 mk_pointer(&vtop->type);
5772 indir(); /* -> parent frame */
5774 if (tok1 == TOK_builtin_return_address) {
5775 // assume return address is just above frame pointer on stack
5776 #ifdef TCC_TARGET_ARM
5777 vpushi(2*PTR_SIZE);
5778 gen_op('+');
5779 #elif defined TCC_TARGET_RISCV64
5780 vpushi(PTR_SIZE);
5781 gen_op('-');
5782 #else
5783 vpushi(PTR_SIZE);
5784 gen_op('+');
5785 #endif
5786 mk_pointer(&vtop->type);
5787 indir();
5790 break;
5791 #ifdef TCC_TARGET_RISCV64
5792 case TOK_builtin_va_start:
5793 parse_builtin_params(0, "ee");
5794 r = vtop->r & VT_VALMASK;
5795 if (r == VT_LLOCAL)
5796 r = VT_LOCAL;
5797 if (r != VT_LOCAL)
5798 tcc_error("__builtin_va_start expects a local variable");
5799 gen_va_start();
5800 vstore();
5801 break;
5802 #endif
5803 #ifdef TCC_TARGET_X86_64
5804 #ifdef TCC_TARGET_PE
5805 case TOK_builtin_va_start:
5806 parse_builtin_params(0, "ee");
5807 r = vtop->r & VT_VALMASK;
5808 if (r == VT_LLOCAL)
5809 r = VT_LOCAL;
5810 if (r != VT_LOCAL)
5811 tcc_error("__builtin_va_start expects a local variable");
5812 vtop->r = r;
5813 vtop->type = char_pointer_type;
5814 vtop->c.i += 8;
5815 vstore();
5816 break;
5817 #else
5818 case TOK_builtin_va_arg_types:
5819 parse_builtin_params(0, "t");
5820 vpushi(classify_x86_64_va_arg(&vtop->type));
5821 vswap();
5822 vpop();
5823 break;
5824 #endif
5825 #endif
5827 #ifdef TCC_TARGET_ARM64
5828 case TOK_builtin_va_start: {
5829 parse_builtin_params(0, "ee");
5830 //xx check types
5831 gen_va_start();
5832 vpushi(0);
5833 vtop->type.t = VT_VOID;
5834 break;
5836 case TOK_builtin_va_arg: {
5837 parse_builtin_params(0, "et");
5838 type = vtop->type;
5839 vpop();
5840 //xx check types
5841 gen_va_arg(&type);
5842 vtop->type = type;
5843 break;
5845 case TOK___arm64_clear_cache: {
5846 parse_builtin_params(0, "ee");
5847 gen_clear_cache();
5848 vpushi(0);
5849 vtop->type.t = VT_VOID;
5850 break;
5852 #endif
5854 /* atomic operations */
5855 case TOK___atomic_store:
5856 case TOK___atomic_load:
5857 case TOK___atomic_exchange:
5858 case TOK___atomic_compare_exchange:
5859 case TOK___atomic_fetch_add:
5860 case TOK___atomic_fetch_sub:
5861 case TOK___atomic_fetch_or:
5862 case TOK___atomic_fetch_xor:
5863 case TOK___atomic_fetch_and:
5864 case TOK___atomic_fetch_nand:
5865 case TOK___atomic_add_fetch:
5866 case TOK___atomic_sub_fetch:
5867 case TOK___atomic_or_fetch:
5868 case TOK___atomic_xor_fetch:
5869 case TOK___atomic_and_fetch:
5870 case TOK___atomic_nand_fetch:
5871 parse_atomic(tok);
5872 break;
5874 /* pre operations */
5875 case TOK_INC:
5876 case TOK_DEC:
5877 t = tok;
5878 next();
5879 unary();
5880 inc(0, t);
5881 break;
5882 case '-':
5883 next();
5884 unary();
5885 if (is_float(vtop->type.t)) {
5886 gen_opif(TOK_NEG);
5887 } else {
5888 vpushi(0);
5889 vswap();
5890 gen_op('-');
5892 break;
5893 case TOK_LAND:
5894 if (!gnu_ext)
5895 goto tok_identifier;
5896 next();
5897 /* allow to take the address of a label */
5898 if (tok < TOK_UIDENT)
5899 expect("label identifier");
5900 s = label_find(tok);
5901 if (!s) {
5902 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5903 } else {
5904 if (s->r == LABEL_DECLARED)
5905 s->r = LABEL_FORWARD;
5907 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5908 s->type.t = VT_VOID;
5909 mk_pointer(&s->type);
5910 s->type.t |= VT_STATIC;
5912 vpushsym(&s->type, s);
5913 next();
5914 break;
5916 case TOK_GENERIC:
5918 CType controlling_type;
5919 int has_default = 0;
5920 int has_match = 0;
5921 int learn = 0;
5922 TokenString *str = NULL;
5923 int saved_nocode_wanted = nocode_wanted;
5924 nocode_wanted &= ~CONST_WANTED_MASK;
5926 next();
5927 skip('(');
5928 expr_type(&controlling_type, expr_eq);
5929 convert_parameter_type (&controlling_type);
5931 nocode_wanted = saved_nocode_wanted;
5933 for (;;) {
5934 learn = 0;
5935 skip(',');
5936 if (tok == TOK_DEFAULT) {
5937 if (has_default)
5938 tcc_error("too many 'default'");
5939 has_default = 1;
5940 if (!has_match)
5941 learn = 1;
5942 next();
5943 } else {
5944 AttributeDef ad_tmp;
5945 int itmp;
5946 CType cur_type;
5948 parse_btype(&cur_type, &ad_tmp, 0);
5949 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5950 if (compare_types(&controlling_type, &cur_type, 0)) {
5951 if (has_match) {
5952 tcc_error("type match twice");
5954 has_match = 1;
5955 learn = 1;
5958 skip(':');
5959 if (learn) {
5960 if (str)
5961 tok_str_free(str);
5962 skip_or_save_block(&str);
5963 } else {
5964 skip_or_save_block(NULL);
5966 if (tok == ')')
5967 break;
5969 if (!str) {
5970 char buf[60];
5971 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5972 tcc_error("type '%s' does not match any association", buf);
5974 begin_macro(str, 1);
5975 next();
5976 expr_eq();
5977 if (tok != TOK_EOF)
5978 expect(",");
5979 end_macro();
5980 next();
5981 break;
5983 // special qnan , snan and infinity values
5984 case TOK___NAN__:
5985 n = 0x7fc00000;
5986 special_math_val:
5987 vpushi(n);
5988 vtop->type.t = VT_FLOAT;
5989 next();
5990 break;
5991 case TOK___SNAN__:
5992 n = 0x7f800001;
5993 goto special_math_val;
5994 case TOK___INF__:
5995 n = 0x7f800000;
5996 goto special_math_val;
5998 default:
5999 tok_identifier:
6000 if (tok < TOK_UIDENT)
6001 tcc_error("expression expected before '%s'", get_tok_str(tok, &tokc));
6002 t = tok;
6003 next();
6004 s = sym_find(t);
6005 if (!s || IS_ASM_SYM(s)) {
6006 const char *name = get_tok_str(t, NULL);
6007 if (tok != '(')
6008 tcc_error("'%s' undeclared", name);
6009 /* for simple function calls, we tolerate undeclared
6010 external reference to int() function */
6011 tcc_warning_c(warn_implicit_function_declaration)(
6012 "implicit declaration of function '%s'", name);
6013 s = external_global_sym(t, &func_old_type);
6016 r = s->r;
6017 /* A symbol that has a register is a local register variable,
6018 which starts out as VT_LOCAL value. */
6019 if ((r & VT_VALMASK) < VT_CONST)
6020 r = (r & ~VT_VALMASK) | VT_LOCAL;
6022 vset(&s->type, r, s->c);
6023 /* Point to s as backpointer (even without r&VT_SYM).
6024 Will be used by at least the x86 inline asm parser for
6025 regvars. */
6026 vtop->sym = s;
6028 if (r & VT_SYM) {
6029 vtop->c.i = 0;
6030 #ifdef TCC_TARGET_PE
6031 if (s->a.dllimport) {
6032 mk_pointer(&vtop->type);
6033 vtop->r |= VT_LVAL;
6034 indir();
6036 #endif
6037 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6038 vtop->c.i = s->enum_val;
6040 break;
6043 /* post operations */
6044 while (1) {
6045 if (tok == TOK_INC || tok == TOK_DEC) {
6046 inc(1, tok);
6047 next();
6048 } else if (tok == '.' || tok == TOK_ARROW) {
6049 int qualifiers, cumofs;
6050 /* field */
6051 if (tok == TOK_ARROW)
6052 indir();
6053 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6054 test_lvalue();
6055 /* expect pointer on structure */
6056 next();
6057 s = find_field(&vtop->type, tok, &cumofs);
6058 /* add field offset to pointer */
6059 gaddrof();
6060 vtop->type = char_pointer_type; /* change type to 'char *' */
6061 vpushi(cumofs);
6062 gen_op('+');
6063 /* change type to field type, and set to lvalue */
6064 vtop->type = s->type;
6065 vtop->type.t |= qualifiers;
6066 /* an array is never an lvalue */
6067 if (!(vtop->type.t & VT_ARRAY)) {
6068 vtop->r |= VT_LVAL;
6069 #ifdef CONFIG_TCC_BCHECK
6070 /* if bound checking, the referenced pointer must be checked */
6071 if (tcc_state->do_bounds_check)
6072 vtop->r |= VT_MUSTBOUND;
6073 #endif
6075 next();
6076 } else if (tok == '[') {
6077 next();
6078 gexpr();
6079 gen_op('+');
6080 indir();
6081 skip(']');
6082 } else if (tok == '(') {
6083 SValue ret;
6084 Sym *sa;
6085 int nb_args, ret_nregs, ret_align, regsize, variadic;
6086 TokenString *p, *p2;
6088 /* function call */
6089 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6090 /* pointer test (no array accepted) */
6091 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6092 vtop->type = *pointed_type(&vtop->type);
6093 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6094 goto error_func;
6095 } else {
6096 error_func:
6097 expect("function pointer");
6099 } else {
6100 vtop->r &= ~VT_LVAL; /* no lvalue */
6102 /* get return type */
6103 s = vtop->type.ref;
6104 next();
6105 sa = s->next; /* first parameter */
6106 nb_args = regsize = 0;
6107 ret.r2 = VT_CONST;
6108 /* compute first implicit argument if a structure is returned */
6109 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6110 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6111 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6112 &ret_align, &regsize);
6113 if (ret_nregs <= 0) {
6114 /* get some space for the returned structure */
6115 size = type_size(&s->type, &align);
6116 #ifdef TCC_TARGET_ARM64
6117 /* On arm64, a small struct is return in registers.
6118 It is much easier to write it to memory if we know
6119 that we are allowed to write some extra bytes, so
6120 round the allocated space up to a power of 2: */
6121 if (size < 16)
6122 while (size & (size - 1))
6123 size = (size | (size - 1)) + 1;
6124 #endif
6125 loc = (loc - size) & -align;
6126 ret.type = s->type;
6127 ret.r = VT_LOCAL | VT_LVAL;
6128 /* pass it as 'int' to avoid structure arg passing
6129 problems */
6130 vseti(VT_LOCAL, loc);
6131 #ifdef CONFIG_TCC_BCHECK
6132 if (tcc_state->do_bounds_check)
6133 --loc;
6134 #endif
6135 ret.c = vtop->c;
6136 if (ret_nregs < 0)
6137 vtop--;
6138 else
6139 nb_args++;
6141 } else {
6142 ret_nregs = 1;
6143 ret.type = s->type;
6146 if (ret_nregs > 0) {
6147 /* return in register */
6148 ret.c.i = 0;
6149 PUT_R_RET(&ret, ret.type.t);
6152 p = NULL;
6153 if (tok != ')') {
6154 r = tcc_state->reverse_funcargs;
6155 for(;;) {
6156 if (r) {
6157 skip_or_save_block(&p2);
6158 p2->prev = p, p = p2;
6159 } else {
6160 expr_eq();
6161 gfunc_param_typed(s, sa);
6163 nb_args++;
6164 if (sa)
6165 sa = sa->next;
6166 if (tok == ')')
6167 break;
6168 skip(',');
6171 if (sa)
6172 tcc_error("too few arguments to function");
6174 if (p) { /* with reverse_funcargs */
6175 for (n = 0; p; p = p2, ++n) {
6176 p2 = p, sa = s;
6177 do {
6178 sa = sa->next, p2 = p2->prev;
6179 } while (p2 && sa);
6180 p2 = p->prev;
6181 begin_macro(p, 1), next();
6182 expr_eq();
6183 gfunc_param_typed(s, sa);
6184 end_macro();
6186 vrev(n);
6189 next();
6190 vcheck_cmp(); /* the generators don't like VT_CMP on vtop */
6191 gfunc_call(nb_args);
6193 if (ret_nregs < 0) {
6194 vsetc(&ret.type, ret.r, &ret.c);
6195 #ifdef TCC_TARGET_RISCV64
6196 arch_transfer_ret_regs(1);
6197 #endif
6198 } else {
6199 /* return value */
6200 n = ret_nregs;
6201 while (n > 1) {
6202 int rc = reg_classes[ret.r] & ~(RC_INT | RC_FLOAT);
6203 /* We assume that when a structure is returned in multiple
6204 registers, their classes are consecutive values of the
6205 suite s(n) = 2^n */
6206 rc <<= --n;
6207 for (r = 0; r < NB_REGS; ++r)
6208 if (reg_classes[r] & rc)
6209 break;
6210 vsetc(&ret.type, r, &ret.c);
6212 vsetc(&ret.type, ret.r, &ret.c);
6213 vtop->r2 = ret.r2;
6215 /* handle packed struct return */
6216 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6217 int addr, offset;
6219 size = type_size(&s->type, &align);
6220 /* We're writing whole regs often, make sure there's enough
6221 space. Assume register size is power of 2. */
6222 size = (size + regsize - 1) & -regsize;
6223 if (ret_align > align)
6224 align = ret_align;
6225 loc = (loc - size) & -align;
6226 addr = loc;
6227 offset = 0;
6228 for (;;) {
6229 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6230 vswap();
6231 vstore();
6232 vtop--;
6233 if (--ret_nregs == 0)
6234 break;
6235 offset += regsize;
6237 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6240 /* Promote char/short return values. This is matters only
6241 for calling function that were not compiled by TCC and
6242 only on some architectures. For those where it doesn't
6243 matter we expect things to be already promoted to int,
6244 but not larger. */
6245 t = s->type.t & VT_BTYPE;
6246 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6247 #ifdef PROMOTE_RET
6248 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6249 #else
6250 vtop->type.t = VT_INT;
6251 #endif
6254 if (s->f.func_noreturn) {
6255 if (debug_modes)
6256 tcc_tcov_block_end(tcc_state, -1);
6257 CODE_OFF();
6259 } else {
6260 break;
6265 #ifndef precedence_parser /* original top-down parser */
6267 static void expr_prod(void)
6269 int t;
6271 unary();
6272 while ((t = tok) == '*' || t == '/' || t == '%') {
6273 next();
6274 unary();
6275 gen_op(t);
6279 static void expr_sum(void)
6281 int t;
6283 expr_prod();
6284 while ((t = tok) == '+' || t == '-') {
6285 next();
6286 expr_prod();
6287 gen_op(t);
6291 static void expr_shift(void)
6293 int t;
6295 expr_sum();
6296 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6297 next();
6298 expr_sum();
6299 gen_op(t);
6303 static void expr_cmp(void)
6305 int t;
6307 expr_shift();
6308 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6309 t == TOK_ULT || t == TOK_UGE) {
6310 next();
6311 expr_shift();
6312 gen_op(t);
6316 static void expr_cmpeq(void)
6318 int t;
6320 expr_cmp();
6321 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6322 next();
6323 expr_cmp();
6324 gen_op(t);
6328 static void expr_and(void)
6330 expr_cmpeq();
6331 while (tok == '&') {
6332 next();
6333 expr_cmpeq();
6334 gen_op('&');
6338 static void expr_xor(void)
6340 expr_and();
6341 while (tok == '^') {
6342 next();
6343 expr_and();
6344 gen_op('^');
6348 static void expr_or(void)
6350 expr_xor();
6351 while (tok == '|') {
6352 next();
6353 expr_xor();
6354 gen_op('|');
6358 static void expr_landor(int op);
6360 static void expr_land(void)
6362 expr_or();
6363 if (tok == TOK_LAND)
6364 expr_landor(tok);
6367 static void expr_lor(void)
6369 expr_land();
6370 if (tok == TOK_LOR)
6371 expr_landor(tok);
6374 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6375 #else /* defined precedence_parser */
6376 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6377 # define expr_lor() unary(), expr_infix(1)
6379 static int precedence(int tok)
6381 switch (tok) {
6382 case TOK_LOR: return 1;
6383 case TOK_LAND: return 2;
6384 case '|': return 3;
6385 case '^': return 4;
6386 case '&': return 5;
6387 case TOK_EQ: case TOK_NE: return 6;
6388 relat: case TOK_ULT: case TOK_UGE: return 7;
6389 case TOK_SHL: case TOK_SAR: return 8;
6390 case '+': case '-': return 9;
6391 case '*': case '/': case '%': return 10;
6392 default:
6393 if (tok >= TOK_ULE && tok <= TOK_GT)
6394 goto relat;
6395 return 0;
6398 static unsigned char prec[256];
6399 static void init_prec(void)
6401 int i;
6402 for (i = 0; i < 256; i++)
6403 prec[i] = precedence(i);
6405 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6407 static void expr_landor(int op);
6409 static void expr_infix(int p)
6411 int t = tok, p2;
6412 while ((p2 = precedence(t)) >= p) {
6413 if (t == TOK_LOR || t == TOK_LAND) {
6414 expr_landor(t);
6415 } else {
6416 next();
6417 unary();
6418 if (precedence(tok) > p2)
6419 expr_infix(p2 + 1);
6420 gen_op(t);
6422 t = tok;
6425 #endif
6427 /* Assuming vtop is a value used in a conditional context
6428 (i.e. compared with zero) return 0 if it's false, 1 if
6429 true and -1 if it can't be statically determined. */
6430 static int condition_3way(void)
6432 int c = -1;
6433 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6434 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6435 vdup();
6436 gen_cast_s(VT_BOOL);
6437 c = vtop->c.i;
6438 vpop();
6440 return c;
6443 static void expr_landor(int op)
6445 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6446 for(;;) {
6447 c = f ? i : condition_3way();
6448 if (c < 0)
6449 save_regs(1), cc = 0;
6450 else if (c != i)
6451 nocode_wanted++, f = 1;
6452 if (tok != op)
6453 break;
6454 if (c < 0)
6455 t = gvtst(i, t);
6456 else
6457 vpop();
6458 next();
6459 expr_landor_next(op);
6461 if (cc || f) {
6462 vpop();
6463 vpushi(i ^ f);
6464 gsym(t);
6465 nocode_wanted -= f;
6466 } else {
6467 gvtst_set(i, t);
6471 static int is_cond_bool(SValue *sv)
6473 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6474 && (sv->type.t & VT_BTYPE) == VT_INT)
6475 return (unsigned)sv->c.i < 2;
6476 if (sv->r == VT_CMP)
6477 return 1;
6478 return 0;
6481 static void expr_cond(void)
6483 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6484 SValue sv;
6485 CType type;
6487 expr_lor();
6488 if (tok == '?') {
6489 next();
6490 c = condition_3way();
6491 g = (tok == ':' && gnu_ext);
6492 tt = 0;
6493 if (!g) {
6494 if (c < 0) {
6495 save_regs(1);
6496 tt = gvtst(1, 0);
6497 } else {
6498 vpop();
6500 } else if (c < 0) {
6501 /* needed to avoid having different registers saved in
6502 each branch */
6503 save_regs(1);
6504 gv_dup();
6505 tt = gvtst(0, 0);
6508 if (c == 0)
6509 nocode_wanted++;
6510 if (!g)
6511 gexpr();
6513 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6514 mk_pointer(&vtop->type);
6515 sv = *vtop; /* save value to handle it later */
6516 vtop--; /* no vpop so that FP stack is not flushed */
6518 if (g) {
6519 u = tt;
6520 } else if (c < 0) {
6521 u = gjmp(0);
6522 gsym(tt);
6523 } else
6524 u = 0;
6526 if (c == 0)
6527 nocode_wanted--;
6528 if (c == 1)
6529 nocode_wanted++;
6530 skip(':');
6531 expr_cond();
6533 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6534 mk_pointer(&vtop->type);
6536 /* cast operands to correct type according to ISOC rules */
6537 if (!combine_types(&type, &sv, vtop, '?'))
6538 type_incompatibility_error(&sv.type, &vtop->type,
6539 "type mismatch in conditional expression (have '%s' and '%s')");
6541 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6542 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6543 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6544 this code jumps directly to the if's then/else branches. */
6545 t1 = gvtst(0, 0);
6546 t2 = gjmp(0);
6547 gsym(u);
6548 vpushv(&sv);
6549 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6550 gvtst_set(0, t1);
6551 gvtst_set(1, t2);
6552 gen_cast(&type);
6553 // tcc_warning("two conditions expr_cond");
6554 return;
6557 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6558 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6559 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6561 /* now we convert second operand */
6562 if (c != 1) {
6563 gen_cast(&type);
6564 if (islv) {
6565 mk_pointer(&vtop->type);
6566 gaddrof();
6567 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6568 gaddrof();
6571 rc = RC_TYPE(type.t);
6572 /* for long longs, we use fixed registers to avoid having
6573 to handle a complicated move */
6574 if (USING_TWO_WORDS(type.t))
6575 rc = RC_RET(type.t);
6577 tt = r2 = 0;
6578 if (c < 0) {
6579 r2 = gv(rc);
6580 tt = gjmp(0);
6582 gsym(u);
6583 if (c == 1)
6584 nocode_wanted--;
6586 /* this is horrible, but we must also convert first
6587 operand */
6588 if (c != 0) {
6589 *vtop = sv;
6590 gen_cast(&type);
6591 if (islv) {
6592 mk_pointer(&vtop->type);
6593 gaddrof();
6594 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6595 gaddrof();
6598 if (c < 0) {
6599 r1 = gv(rc);
6600 move_reg(r2, r1, islv ? VT_PTR : type.t);
6601 vtop->r = r2;
6602 gsym(tt);
6605 if (islv)
6606 indir();
6610 static void expr_eq(void)
6612 int t;
6614 expr_cond();
6615 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6616 test_lvalue();
6617 next();
6618 if (t == '=') {
6619 expr_eq();
6620 } else {
6621 vdup();
6622 expr_eq();
6623 gen_op(TOK_ASSIGN_OP(t));
6625 vstore();
6629 ST_FUNC void gexpr(void)
6631 expr_eq();
6632 if (tok == ',') {
6633 do {
6634 vpop();
6635 next();
6636 expr_eq();
6637 } while (tok == ',');
6639 /* convert array & function to pointer */
6640 convert_parameter_type(&vtop->type);
6642 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6643 if ((vtop->r & VT_VALMASK) == VT_CONST && nocode_wanted && !CONST_WANTED)
6644 gv(RC_TYPE(vtop->type.t));
6648 /* parse a constant expression and return value in vtop. */
6649 static void expr_const1(void)
6651 nocode_wanted += CONST_WANTED_BIT;
6652 expr_cond();
6653 nocode_wanted -= CONST_WANTED_BIT;
6656 /* parse an integer constant and return its value. */
6657 static inline int64_t expr_const64(void)
6659 int64_t c;
6660 expr_const1();
6661 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6662 expect("constant expression");
6663 c = vtop->c.i;
6664 vpop();
6665 return c;
6668 /* parse an integer constant and return its value.
6669 Complain if it doesn't fit 32bit (signed or unsigned). */
6670 ST_FUNC int expr_const(void)
6672 int c;
6673 int64_t wc = expr_const64();
6674 c = wc;
6675 if (c != wc && (unsigned)c != wc)
6676 tcc_error("constant exceeds 32 bit");
6677 return c;
6680 /* ------------------------------------------------------------------------- */
6681 /* return from function */
6683 #ifndef TCC_TARGET_ARM64
6684 static void gfunc_return(CType *func_type)
6686 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6687 CType type, ret_type;
6688 int ret_align, ret_nregs, regsize;
6689 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6690 &ret_align, &regsize);
6691 if (ret_nregs < 0) {
6692 #ifdef TCC_TARGET_RISCV64
6693 arch_transfer_ret_regs(0);
6694 #endif
6695 } else if (0 == ret_nregs) {
6696 /* if returning structure, must copy it to implicit
6697 first pointer arg location */
6698 type = *func_type;
6699 mk_pointer(&type);
6700 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6701 indir();
6702 vswap();
6703 /* copy structure value to pointer */
6704 vstore();
6705 } else {
6706 /* returning structure packed into registers */
6707 int size, addr, align, rc, n;
6708 size = type_size(func_type,&align);
6709 if ((align & (ret_align - 1))
6710 && ((vtop->r & VT_VALMASK) < VT_CONST /* pointer to struct */
6711 || (vtop->c.i & (ret_align - 1))
6712 )) {
6713 loc = (loc - size) & -ret_align;
6714 addr = loc;
6715 type = *func_type;
6716 vset(&type, VT_LOCAL | VT_LVAL, addr);
6717 vswap();
6718 vstore();
6719 vpop();
6720 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6722 vtop->type = ret_type;
6723 rc = RC_RET(ret_type.t);
6724 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6725 for (n = ret_nregs; --n > 0;) {
6726 vdup();
6727 gv(rc);
6728 vswap();
6729 incr_offset(regsize);
6730 /* We assume that when a structure is returned in multiple
6731 registers, their classes are consecutive values of the
6732 suite s(n) = 2^n */
6733 rc <<= 1;
6735 gv(rc);
6736 vtop -= ret_nregs - 1;
6738 } else {
6739 gv(RC_RET(func_type->t));
6741 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6743 #endif
6745 static void check_func_return(void)
6747 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6748 return;
6749 if (!strcmp (funcname, "main")
6750 && (func_vt.t & VT_BTYPE) == VT_INT) {
6751 /* main returns 0 by default */
6752 vpushi(0);
6753 gen_assign_cast(&func_vt);
6754 gfunc_return(&func_vt);
6755 } else {
6756 tcc_warning("function might return no value: '%s'", funcname);
6760 /* ------------------------------------------------------------------------- */
6761 /* switch/case */
6763 static int case_cmp(uint64_t a, uint64_t b)
6765 if (cur_switch->sv.type.t & VT_UNSIGNED)
6766 return a < b ? -1 : a > b;
6767 else
6768 return (int64_t)a < (int64_t)b ? -1 : (int64_t)a > (int64_t)b;
6771 static int case_cmp_qs(const void *pa, const void *pb)
6773 return case_cmp((*(struct case_t**)pa)->v1, (*(struct case_t**)pb)->v1);
6776 static void case_sort(struct switch_t *sw)
6778 struct case_t **p;
6779 if (sw->n < 2)
6780 return;
6781 qsort(sw->p, sw->n, sizeof *sw->p, case_cmp_qs);
6782 p = sw->p;
6783 while (p < sw->p + sw->n - 1) {
6784 if (case_cmp(p[0]->v2, p[1]->v1) >= 0) {
6785 int l1 = p[0]->line, l2 = p[1]->line;
6786 /* using special format "%i:..." to show specific line */
6787 tcc_error("%i:duplicate case value", l1 > l2 ? l1 : l2);
6788 } else if (p[0]->v2 + 1 == p[1]->v1 && p[0]->ind == p[1]->ind) {
6789 /* treat "case 1: case 2: case 3:" like "case 1 ... 3: */
6790 p[1]->v1 = p[0]->v1;
6791 tcc_free(p[0]);
6792 memmove(p, p + 1, (--sw->n - (p - sw->p)) * sizeof *p);
6793 } else
6794 ++p;
6798 static int gcase(struct case_t **base, int len, int dsym)
6800 struct case_t *p;
6801 int t, l2, e;
6803 t = vtop->type.t & VT_BTYPE;
6804 if (t != VT_LLONG)
6805 t = VT_INT;
6806 while (len) {
6807 /* binary search while len > 8, else linear */
6808 l2 = len > 8 ? len/2 : 0;
6809 p = base[l2];
6810 vdup(), vpush64(t, p->v2);
6811 if (l2 == 0 && p->v1 == p->v2) {
6812 gen_op(TOK_EQ); /* jmp to case when equal */
6813 gsym_addr(gvtst(0, 0), p->ind);
6814 } else {
6815 /* case v1 ... v2 */
6816 gen_op(TOK_GT); /* jmp over when > V2 */
6817 if (len == 1) /* last case test jumps to default when false */
6818 dsym = gvtst(0, dsym), e = 0;
6819 else
6820 e = gvtst(0, 0);
6821 vdup(), vpush64(t, p->v1);
6822 gen_op(TOK_GE); /* jmp to case when >= V1 */
6823 gsym_addr(gvtst(0, 0), p->ind);
6824 dsym = gcase(base, l2, dsym);
6825 gsym(e);
6827 ++l2, base += l2, len -= l2;
6829 /* jump automagically will suppress more jumps */
6830 return gjmp(dsym);
6833 static void end_switch(void)
6835 struct switch_t *sw = cur_switch;
6836 dynarray_reset(&sw->p, &sw->n);
6837 cur_switch = sw->prev;
6838 tcc_free(sw);
6841 /* ------------------------------------------------------------------------- */
6842 /* __attribute__((cleanup(fn))) */
6844 static void try_call_scope_cleanup(Sym *stop)
6846 Sym *cls = cur_scope->cl.s;
6848 for (; cls != stop; cls = cls->next) {
6849 Sym *fs = cls->cleanup_func;
6850 Sym *vs = cls->prev_tok;
6852 vpushsym(&fs->type, fs);
6853 vset(&vs->type, vs->r, vs->c);
6854 vtop->sym = vs;
6855 mk_pointer(&vtop->type);
6856 gaddrof();
6857 gfunc_call(1);
6861 static void try_call_cleanup_goto(Sym *cleanupstate)
6863 Sym *oc, *cc;
6864 int ocd, ccd;
6866 if (!cur_scope->cl.s)
6867 return;
6869 /* search NCA of both cleanup chains given parents and initial depth */
6870 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6871 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->next)
6873 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->next)
6875 for (; cc != oc; cc = cc->next, oc = oc->next, --ccd)
6878 try_call_scope_cleanup(cc);
6881 /* call 'func' for each __attribute__((cleanup(func))) */
6882 static void block_cleanup(struct scope *o)
6884 int jmp = 0;
6885 Sym *g, **pg;
6886 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6887 if (g->prev_tok->r & LABEL_FORWARD) {
6888 Sym *pcl = g->next;
6889 if (!jmp)
6890 jmp = gjmp(0);
6891 gsym(pcl->jnext);
6892 try_call_scope_cleanup(o->cl.s);
6893 pcl->jnext = gjmp(0);
6894 if (!o->cl.n)
6895 goto remove_pending;
6896 g->c = o->cl.n;
6897 pg = &g->prev;
6898 } else {
6899 remove_pending:
6900 *pg = g->prev;
6901 sym_free(g);
6904 gsym(jmp);
6905 try_call_scope_cleanup(o->cl.s);
6908 /* ------------------------------------------------------------------------- */
6909 /* VLA */
6911 static void vla_restore(int loc)
6913 if (loc)
6914 gen_vla_sp_restore(loc);
6917 static void vla_leave(struct scope *o)
6919 struct scope *c = cur_scope, *v = NULL;
6920 for (; c != o && c; c = c->prev)
6921 if (c->vla.num)
6922 v = c;
6923 if (v)
6924 vla_restore(v->vla.locorig);
6927 /* ------------------------------------------------------------------------- */
6928 /* local scopes */
6930 static void new_scope(struct scope *o)
6932 /* copy and link previous scope */
6933 *o = *cur_scope;
6934 o->prev = cur_scope;
6935 cur_scope = o;
6936 cur_scope->vla.num = 0;
6938 /* record local declaration stack position */
6939 o->lstk = local_stack;
6940 o->llstk = local_label_stack;
6941 ++local_scope;
6944 static void prev_scope(struct scope *o, int is_expr)
6946 vla_leave(o->prev);
6948 if (o->cl.s != o->prev->cl.s)
6949 block_cleanup(o->prev);
6951 /* pop locally defined labels */
6952 label_pop(&local_label_stack, o->llstk, is_expr);
6954 /* In the is_expr case (a statement expression is finished here),
6955 vtop might refer to symbols on the local_stack. Either via the
6956 type or via vtop->sym. We can't pop those nor any that in turn
6957 might be referred to. To make it easier we don't roll back
6958 any symbols in that case; some upper level call to block() will
6959 do that. We do have to remove such symbols from the lookup
6960 tables, though. sym_pop will do that. */
6962 /* pop locally defined symbols */
6963 pop_local_syms(o->lstk, is_expr);
6964 cur_scope = o->prev;
6965 --local_scope;
6968 /* leave a scope via break/continue(/goto) */
6969 static void leave_scope(struct scope *o)
6971 if (!o)
6972 return;
6973 try_call_scope_cleanup(o->cl.s);
6974 vla_leave(o);
6977 /* short versiona for scopes with 'if/do/while/switch' which can
6978 declare only types (of struct/union/enum) */
6979 static void new_scope_s(struct scope *o)
6981 o->lstk = local_stack;
6982 ++local_scope;
6985 static void prev_scope_s(struct scope *o)
6987 sym_pop(&local_stack, o->lstk, 0);
6988 --local_scope;
6991 /* ------------------------------------------------------------------------- */
6992 /* call block from 'for do while' loops */
6994 static void lblock(int *bsym, int *csym)
6996 struct scope *lo = loop_scope, *co = cur_scope;
6997 int *b = co->bsym, *c = co->csym;
6998 if (csym) {
6999 co->csym = csym;
7000 loop_scope = co;
7002 co->bsym = bsym;
7003 block(0);
7004 co->bsym = b;
7005 if (csym) {
7006 co->csym = c;
7007 loop_scope = lo;
7011 static void block(int flags)
7013 int a, b, c, d, e, t;
7014 struct scope o;
7015 Sym *s;
7017 if (flags & STMT_EXPR) {
7018 /* default return value is (void) */
7019 vpushi(0);
7020 vtop->type.t = VT_VOID;
7023 again:
7024 t = tok;
7025 /* If the token carries a value, next() might destroy it. Only with
7026 invalid code such as f(){"123"4;} */
7027 if (TOK_HAS_VALUE(t))
7028 goto expr;
7029 next();
7031 if (debug_modes)
7032 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
7034 if (t == TOK_IF) {
7035 new_scope_s(&o);
7036 skip('(');
7037 gexpr();
7038 skip(')');
7039 a = gvtst(1, 0);
7040 block(0);
7041 if (tok == TOK_ELSE) {
7042 d = gjmp(0);
7043 gsym(a);
7044 next();
7045 block(0);
7046 gsym(d); /* patch else jmp */
7047 } else {
7048 gsym(a);
7050 prev_scope_s(&o);
7052 } else if (t == TOK_WHILE) {
7053 new_scope_s(&o);
7054 d = gind();
7055 skip('(');
7056 gexpr();
7057 skip(')');
7058 a = gvtst(1, 0);
7059 b = 0;
7060 lblock(&a, &b);
7061 gjmp_addr(d);
7062 gsym_addr(b, d);
7063 gsym(a);
7064 prev_scope_s(&o);
7066 } else if (t == '{') {
7067 if (debug_modes)
7068 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7069 new_scope(&o);
7071 /* handle local labels declarations */
7072 while (tok == TOK_LABEL) {
7073 do {
7074 next();
7075 if (tok < TOK_UIDENT)
7076 expect("label identifier");
7077 label_push(&local_label_stack, tok, LABEL_DECLARED);
7078 next();
7079 } while (tok == ',');
7080 skip(';');
7083 while (tok != '}') {
7084 decl(VT_LOCAL);
7085 if (tok != '}') {
7086 if (flags & STMT_EXPR)
7087 vpop();
7088 block(flags | STMT_COMPOUND);
7092 prev_scope(&o, flags & STMT_EXPR);
7093 if (debug_modes)
7094 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7095 if (local_scope)
7096 next();
7097 else if (!nocode_wanted)
7098 check_func_return();
7100 } else if (t == TOK_RETURN) {
7101 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7102 if (tok != ';') {
7103 gexpr();
7104 if (b) {
7105 gen_assign_cast(&func_vt);
7106 } else {
7107 if (vtop->type.t != VT_VOID)
7108 tcc_warning("void function returns a value");
7109 vtop--;
7111 } else if (b) {
7112 tcc_warning("'return' with no value");
7113 b = 0;
7115 leave_scope(root_scope);
7116 if (b)
7117 gfunc_return(&func_vt);
7118 skip(';');
7119 /* jump unless last stmt in top-level block */
7120 if (tok != '}' || local_scope != 1)
7121 rsym = gjmp(rsym);
7122 if (debug_modes)
7123 tcc_tcov_block_end (tcc_state, -1);
7124 CODE_OFF();
7126 } else if (t == TOK_BREAK) {
7127 /* compute jump */
7128 if (!cur_scope->bsym)
7129 tcc_error("cannot break");
7130 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7131 leave_scope(cur_switch->scope);
7132 else
7133 leave_scope(loop_scope);
7134 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7135 skip(';');
7137 } else if (t == TOK_CONTINUE) {
7138 /* compute jump */
7139 if (!cur_scope->csym)
7140 tcc_error("cannot continue");
7141 leave_scope(loop_scope);
7142 *cur_scope->csym = gjmp(*cur_scope->csym);
7143 skip(';');
7145 } else if (t == TOK_FOR) {
7146 new_scope(&o);
7148 skip('(');
7149 if (tok != ';') {
7150 /* c99 for-loop init decl? */
7151 if (!decl(VT_JMP)) {
7152 /* no, regular for-loop init expr */
7153 gexpr();
7154 vpop();
7157 skip(';');
7158 a = b = 0;
7159 c = d = gind();
7160 if (tok != ';') {
7161 gexpr();
7162 a = gvtst(1, 0);
7164 skip(';');
7165 if (tok != ')') {
7166 e = gjmp(0);
7167 d = gind();
7168 gexpr();
7169 vpop();
7170 gjmp_addr(c);
7171 gsym(e);
7173 skip(')');
7174 lblock(&a, &b);
7175 gjmp_addr(d);
7176 gsym_addr(b, d);
7177 gsym(a);
7178 prev_scope(&o, 0);
7180 } else if (t == TOK_DO) {
7181 new_scope_s(&o);
7182 a = b = 0;
7183 d = gind();
7184 lblock(&a, &b);
7185 gsym(b);
7186 skip(TOK_WHILE);
7187 skip('(');
7188 gexpr();
7189 skip(')');
7190 skip(';');
7191 c = gvtst(0, 0);
7192 gsym_addr(c, d);
7193 gsym(a);
7194 prev_scope_s(&o);
7196 } else if (t == TOK_SWITCH) {
7197 struct switch_t *sw;
7199 sw = tcc_mallocz(sizeof *sw);
7200 sw->bsym = &a;
7201 sw->scope = cur_scope;
7202 sw->prev = cur_switch;
7203 sw->nocode_wanted = nocode_wanted;
7204 cur_switch = sw;
7206 new_scope_s(&o);
7207 skip('(');
7208 gexpr();
7209 skip(')');
7210 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
7211 tcc_error("switch value not an integer");
7212 sw->sv = *vtop--; /* save switch value */
7213 a = 0;
7214 b = gjmp(0); /* jump to first case */
7215 lblock(&a, NULL);
7216 a = gjmp(a); /* add implicit break */
7217 /* case lookup */
7218 gsym(b);
7219 prev_scope_s(&o);
7220 if (sw->nocode_wanted)
7221 goto skip_switch;
7222 case_sort(sw);
7223 sw->bsym = NULL; /* marker for 32bit:gen_opl() */
7224 vpushv(&sw->sv);
7225 gv(RC_INT);
7226 d = gcase(sw->p, sw->n, 0);
7227 vpop();
7228 if (sw->def_sym)
7229 gsym_addr(d, sw->def_sym);
7230 else
7231 gsym(d);
7232 skip_switch:
7233 /* break label */
7234 gsym(a);
7235 end_switch();
7237 } else if (t == TOK_CASE) {
7238 struct case_t *cr;
7239 if (!cur_switch)
7240 expect("switch");
7241 cr = tcc_malloc(sizeof(struct case_t));
7242 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7243 t = cur_switch->sv.type.t;
7244 cr->v1 = cr->v2 = value64(expr_const64(), t);
7245 if (tok == TOK_DOTS && gnu_ext) {
7246 next();
7247 cr->v2 = value64(expr_const64(), t);
7248 if (case_cmp(cr->v2, cr->v1) < 0)
7249 tcc_warning("empty case range");
7251 /* case and default are unreachable from a switch under nocode_wanted */
7252 if (!cur_switch->nocode_wanted)
7253 cr->ind = gind();
7254 cr->line = file->line_num;
7255 skip(':');
7256 goto block_after_label;
7258 } else if (t == TOK_DEFAULT) {
7259 if (!cur_switch)
7260 expect("switch");
7261 if (cur_switch->def_sym)
7262 tcc_error("too many 'default'");
7263 cur_switch->def_sym = cur_switch->nocode_wanted ? -1 : gind();
7264 skip(':');
7265 goto block_after_label;
7267 } else if (t == TOK_GOTO) {
7268 vla_restore(cur_scope->vla.locorig);
7269 if (tok == '*' && gnu_ext) {
7270 /* computed goto */
7271 next();
7272 gexpr();
7273 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7274 expect("pointer");
7275 ggoto();
7277 } else if (tok >= TOK_UIDENT) {
7278 s = label_find(tok);
7279 /* put forward definition if needed */
7280 if (!s)
7281 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7282 else if (s->r == LABEL_DECLARED)
7283 s->r = LABEL_FORWARD;
7285 if (s->r & LABEL_FORWARD) {
7286 /* start new goto chain for cleanups, linked via label->next */
7287 if (cur_scope->cl.s && !nocode_wanted) {
7288 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7289 pending_gotos->prev_tok = s;
7290 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7291 pending_gotos->next = s;
7293 s->jnext = gjmp(s->jnext);
7294 } else {
7295 try_call_cleanup_goto(s->cleanupstate);
7296 gjmp_addr(s->jind);
7298 next();
7300 } else {
7301 expect("label identifier");
7303 skip(';');
7305 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7306 asm_instr();
7308 } else {
7309 if (tok == ':' && t >= TOK_UIDENT) {
7310 /* label case */
7311 next();
7312 s = label_find(t);
7313 if (s) {
7314 if (s->r == LABEL_DEFINED)
7315 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7316 s->r = LABEL_DEFINED;
7317 if (s->next) {
7318 Sym *pcl; /* pending cleanup goto */
7319 for (pcl = s->next; pcl; pcl = pcl->prev)
7320 gsym(pcl->jnext);
7321 sym_pop(&s->next, NULL, 0);
7322 } else
7323 gsym(s->jnext);
7324 } else {
7325 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7327 s->jind = gind();
7328 s->cleanupstate = cur_scope->cl.s;
7330 block_after_label:
7332 /* Accept attributes after labels (e.g. 'unused') */
7333 AttributeDef ad_tmp;
7334 parse_attribute(&ad_tmp);
7336 if (debug_modes)
7337 tcc_tcov_reset_ind(tcc_state);
7338 vla_restore(cur_scope->vla.loc);
7340 if (tok != '}') {
7341 if (0 == (flags & STMT_COMPOUND))
7342 goto again;
7343 /* C23: insert implicit null-statement whithin compound statement */
7344 } else {
7345 /* we accept this, but it is a mistake */
7346 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7348 } else {
7349 /* expression case */
7350 if (t != ';') {
7351 unget_tok(t);
7352 expr:
7353 if (flags & STMT_EXPR) {
7354 vpop();
7355 gexpr();
7356 } else {
7357 gexpr();
7358 vpop();
7360 skip(';');
7365 if (debug_modes)
7366 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7369 /* This skips over a stream of tokens containing balanced {} and ()
7370 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7371 with a '{'). If STR then allocates and stores the skipped tokens
7372 in *STR. This doesn't check if () and {} are nested correctly,
7373 i.e. "({)}" is accepted. */
7374 static void skip_or_save_block(TokenString **str)
7376 int braces = tok == '{';
7377 int level = 0;
7378 if (str)
7379 *str = tok_str_alloc();
7381 while (1) {
7382 int t = tok;
7383 if (level == 0
7384 && (t == ','
7385 || t == ';'
7386 || t == '}'
7387 || t == ')'
7388 || t == ']'))
7389 break;
7390 if (t == TOK_EOF) {
7391 if (str || level > 0)
7392 tcc_error("unexpected end of file");
7393 else
7394 break;
7396 if (str)
7397 tok_str_add_tok(*str);
7398 next();
7399 if (t == '{' || t == '(' || t == '[') {
7400 level++;
7401 } else if (t == '}' || t == ')' || t == ']') {
7402 level--;
7403 if (level == 0 && braces && t == '}')
7404 break;
7407 if (str)
7408 tok_str_add(*str, TOK_EOF);
7411 #define EXPR_CONST 1
7412 #define EXPR_ANY 2
7414 static void parse_init_elem(int expr_type)
7416 int saved_global_expr;
7417 switch(expr_type) {
7418 case EXPR_CONST:
7419 /* compound literals must be allocated globally in this case */
7420 saved_global_expr = global_expr;
7421 global_expr = 1;
7422 expr_const1();
7423 global_expr = saved_global_expr;
7424 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7425 (compound literals). */
7426 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7427 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7428 || vtop->sym->v < SYM_FIRST_ANOM))
7429 #ifdef TCC_TARGET_PE
7430 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7431 #endif
7433 tcc_error("initializer element is not constant");
7434 break;
7435 case EXPR_ANY:
7436 expr_eq();
7437 break;
7441 #if 1
7442 static void init_assert(init_params *p, int offset)
7444 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7445 : !nocode_wanted && offset > p->local_offset)
7446 tcc_internal_error("initializer overflow");
7448 #else
7449 #define init_assert(sec, offset)
7450 #endif
7452 /* put zeros for variable based init */
7453 static void init_putz(init_params *p, unsigned long c, int size)
7455 init_assert(p, c + size);
7456 if (p->sec) {
7457 /* nothing to do because globals are already set to zero */
7458 } else {
7459 vpush_helper_func(TOK_memset);
7460 vseti(VT_LOCAL, c);
7461 vpushi(0);
7462 vpushs(size);
7463 #if defined TCC_TARGET_ARM && defined TCC_ARM_EABI
7464 vswap(); /* using __aeabi_memset(void*, size_t, int) */
7465 #endif
7466 gfunc_call(3);
7470 #define DIF_FIRST 1
7471 #define DIF_SIZE_ONLY 2
7472 #define DIF_HAVE_ELEM 4
7473 #define DIF_CLEAR 8
7475 /* delete relocations for specified range c ... c + size. Unfortunatly
7476 in very special cases, relocations may occur unordered */
7477 static void decl_design_delrels(Section *sec, int c, int size)
7479 ElfW_Rel *rel, *rel2, *rel_end;
7480 if (!sec || !sec->reloc)
7481 return;
7482 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7483 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7484 while (rel < rel_end) {
7485 if (rel->r_offset >= c && rel->r_offset < c + size) {
7486 sec->reloc->data_offset -= sizeof *rel;
7487 } else {
7488 if (rel2 != rel)
7489 memcpy(rel2, rel, sizeof *rel);
7490 ++rel2;
7492 ++rel;
7496 static void decl_design_flex(init_params *p, Sym *ref, int index)
7498 if (ref == p->flex_array_ref) {
7499 if (index >= ref->c)
7500 ref->c = index + 1;
7501 } else if (ref->c < 0)
7502 tcc_error("flexible array has zero size in this context");
7505 /* t is the array or struct type. c is the array or struct
7506 address. cur_field is the pointer to the current
7507 field, for arrays the 'c' member contains the current start
7508 index. 'flags' is as in decl_initializer.
7509 'al' contains the already initialized length of the
7510 current container (starting at c). This returns the new length of that. */
7511 static int decl_designator(init_params *p, CType *type, unsigned long c,
7512 Sym **cur_field, int flags, int al)
7514 Sym *s, *f;
7515 int index, index_last, align, l, nb_elems, elem_size;
7516 unsigned long corig = c;
7518 elem_size = 0;
7519 nb_elems = 1;
7521 if (flags & DIF_HAVE_ELEM)
7522 goto no_designator;
7524 if (gnu_ext && tok >= TOK_UIDENT) {
7525 l = tok, next();
7526 if (tok == ':')
7527 goto struct_field;
7528 unget_tok(l);
7531 /* NOTE: we only support ranges for last designator */
7532 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7533 if (tok == '[') {
7534 if (!(type->t & VT_ARRAY))
7535 expect("array type");
7536 next();
7537 index = index_last = expr_const();
7538 if (tok == TOK_DOTS && gnu_ext) {
7539 next();
7540 index_last = expr_const();
7542 skip(']');
7543 s = type->ref;
7544 decl_design_flex(p, s, index_last);
7545 if (index < 0 || index_last >= s->c || index_last < index)
7546 tcc_error("index exceeds array bounds or range is empty");
7547 if (cur_field)
7548 (*cur_field)->c = index_last;
7549 type = pointed_type(type);
7550 elem_size = type_size(type, &align);
7551 c += index * elem_size;
7552 nb_elems = index_last - index + 1;
7553 } else {
7554 int cumofs;
7555 next();
7556 l = tok;
7557 struct_field:
7558 next();
7559 f = find_field(type, l, &cumofs);
7560 if (cur_field)
7561 *cur_field = f;
7562 type = &f->type;
7563 c += cumofs;
7565 cur_field = NULL;
7567 if (!cur_field) {
7568 if (tok == '=') {
7569 next();
7570 } else if (!gnu_ext) {
7571 expect("=");
7573 } else {
7574 no_designator:
7575 if (type->t & VT_ARRAY) {
7576 index = (*cur_field)->c;
7577 s = type->ref;
7578 decl_design_flex(p, s, index);
7579 if (index >= s->c)
7580 tcc_error("too many initializers");
7581 type = pointed_type(type);
7582 elem_size = type_size(type, &align);
7583 c += index * elem_size;
7584 } else {
7585 f = *cur_field;
7586 /* Skip bitfield padding. Also with size 32 and 64. */
7587 while (f && (f->v & SYM_FIRST_ANOM) &&
7588 is_integer_btype(f->type.t & VT_BTYPE))
7589 *cur_field = f = f->next;
7590 if (!f)
7591 tcc_error("too many initializers");
7592 type = &f->type;
7593 c += f->c;
7597 if (!elem_size) /* for structs */
7598 elem_size = type_size(type, &align);
7600 /* Using designators the same element can be initialized more
7601 than once. In that case we need to delete possibly already
7602 existing relocations. */
7603 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7604 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7605 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7608 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7610 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7611 Sym aref = {0};
7612 CType t1;
7613 int i;
7614 if (p->sec || (type->t & VT_ARRAY)) {
7615 /* make init_putv/vstore believe it were a struct */
7616 aref.c = elem_size;
7617 t1.t = VT_STRUCT, t1.ref = &aref;
7618 type = &t1;
7620 if (p->sec)
7621 vpush_ref(type, p->sec, c, elem_size);
7622 else
7623 vset(type, VT_LOCAL|VT_LVAL, c);
7624 for (i = 1; i < nb_elems; i++) {
7625 vdup();
7626 init_putv(p, type, c + elem_size * i);
7628 vpop();
7631 c += nb_elems * elem_size;
7632 if (c - corig > al)
7633 al = c - corig;
7634 return al;
7637 /* store a value or an expression directly in global data or in local array */
7638 static void init_putv(init_params *p, CType *type, unsigned long c)
7640 int bt;
7641 void *ptr;
7642 CType dtype;
7643 int size, align;
7644 Section *sec = p->sec;
7645 uint64_t val;
7647 dtype = *type;
7648 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7650 size = type_size(type, &align);
7651 if (type->t & VT_BITFIELD)
7652 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7653 init_assert(p, c + size);
7655 if (sec) {
7656 /* XXX: not portable */
7657 /* XXX: generate error if incorrect relocation */
7658 gen_assign_cast(&dtype);
7659 bt = type->t & VT_BTYPE;
7661 if ((vtop->r & VT_SYM)
7662 && bt != VT_PTR
7663 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7664 || (type->t & VT_BITFIELD))
7665 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7667 tcc_error("initializer element is not computable at load time");
7669 if (NODATA_WANTED) {
7670 vtop--;
7671 return;
7674 ptr = sec->data + c;
7675 val = vtop->c.i;
7677 /* XXX: make code faster ? */
7678 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7679 vtop->sym->v >= SYM_FIRST_ANOM &&
7680 /* XXX This rejects compound literals like
7681 '(void *){ptr}'. The problem is that '&sym' is
7682 represented the same way, which would be ruled out
7683 by the SYM_FIRST_ANOM check above, but also '"string"'
7684 in 'char *p = "string"' is represented the same
7685 with the type being VT_PTR and the symbol being an
7686 anonymous one. That is, there's no difference in vtop
7687 between '(void *){x}' and '&(void *){x}'. Ignore
7688 pointer typed entities here. Hopefully no real code
7689 will ever use compound literals with scalar type. */
7690 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7691 /* These come from compound literals, memcpy stuff over. */
7692 Section *ssec;
7693 ElfSym *esym;
7694 ElfW_Rel *rel;
7695 esym = elfsym(vtop->sym);
7696 ssec = tcc_state->sections[esym->st_shndx];
7697 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7698 if (ssec->reloc) {
7699 /* We need to copy over all memory contents, and that
7700 includes relocations. Use the fact that relocs are
7701 created it order, so look from the end of relocs
7702 until we hit one before the copied region. */
7703 unsigned long relofs = ssec->reloc->data_offset;
7704 while (relofs >= sizeof(*rel)) {
7705 relofs -= sizeof(*rel);
7706 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7707 if (rel->r_offset >= esym->st_value + size)
7708 continue;
7709 if (rel->r_offset < esym->st_value)
7710 break;
7711 put_elf_reloca(symtab_section, sec,
7712 c + rel->r_offset - esym->st_value,
7713 ELFW(R_TYPE)(rel->r_info),
7714 ELFW(R_SYM)(rel->r_info),
7715 #if PTR_SIZE == 8
7716 rel->r_addend
7717 #else
7719 #endif
7723 } else {
7724 if (type->t & VT_BITFIELD) {
7725 int bit_pos, bit_size, bits, n;
7726 unsigned char *p, v, m;
7727 bit_pos = BIT_POS(vtop->type.t);
7728 bit_size = BIT_SIZE(vtop->type.t);
7729 p = (unsigned char*)ptr + (bit_pos >> 3);
7730 bit_pos &= 7, bits = 0;
7731 while (bit_size) {
7732 n = 8 - bit_pos;
7733 if (n > bit_size)
7734 n = bit_size;
7735 v = val >> bits << bit_pos;
7736 m = ((1 << n) - 1) << bit_pos;
7737 *p = (*p & ~m) | (v & m);
7738 bits += n, bit_size -= n, bit_pos = 0, ++p;
7740 } else
7741 switch(bt) {
7742 case VT_BOOL:
7743 *(char *)ptr = val != 0;
7744 break;
7745 case VT_BYTE:
7746 *(char *)ptr = val;
7747 break;
7748 case VT_SHORT:
7749 write16le(ptr, val);
7750 break;
7751 case VT_FLOAT:
7752 write32le(ptr, val);
7753 break;
7754 case VT_DOUBLE:
7755 write64le(ptr, val);
7756 break;
7757 case VT_LDOUBLE:
7758 #if defined TCC_IS_NATIVE_387
7759 /* Host and target platform may be different but both have x87.
7760 On windows, tcc does not use VT_LDOUBLE, except when it is a
7761 cross compiler. In this case a mingw gcc as host compiler
7762 comes here with 10-byte long doubles, while msvc or tcc won't.
7763 tcc itself can still translate by asm.
7764 In any case we avoid possibly random bytes 11 and 12.
7766 if (sizeof (long double) >= 10)
7767 memcpy(ptr, &vtop->c.ld, 10);
7768 #ifdef __TINYC__
7769 else if (sizeof (long double) == sizeof (double))
7770 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7771 #endif
7772 else
7773 #endif
7774 /* For other platforms it should work natively, but may not work
7775 for cross compilers */
7776 if (sizeof(long double) == LDOUBLE_SIZE)
7777 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7778 else if (sizeof(double) == LDOUBLE_SIZE)
7779 *(double*)ptr = (double)vtop->c.ld;
7780 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7781 ; /* nothing to do for 0.0 */
7782 #ifndef TCC_CROSS_TEST
7783 else
7784 tcc_error("can't cross compile long double constants");
7785 #endif
7786 break;
7788 #if PTR_SIZE == 8
7789 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7790 case VT_LLONG:
7791 case VT_PTR:
7792 if (vtop->r & VT_SYM)
7793 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7794 else
7795 write64le(ptr, val);
7796 break;
7797 case VT_INT:
7798 write32le(ptr, val);
7799 break;
7800 #else
7801 case VT_LLONG:
7802 write64le(ptr, val);
7803 break;
7804 case VT_PTR:
7805 case VT_INT:
7806 if (vtop->r & VT_SYM)
7807 greloc(sec, vtop->sym, c, R_DATA_PTR);
7808 write32le(ptr, val);
7809 break;
7810 #endif
7811 default:
7812 //tcc_internal_error("unexpected type");
7813 break;
7816 vtop--;
7817 } else {
7818 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7819 vswap();
7820 vstore();
7821 vpop();
7825 /* 't' contains the type and storage info. 'c' is the offset of the
7826 object in section 'sec'. If 'sec' is NULL, it means stack based
7827 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7828 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7829 size only evaluation is wanted (only for arrays). */
7830 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7832 int len, n, no_oblock, i;
7833 int size1, align1;
7834 Sym *s, *f;
7835 Sym indexsym;
7836 CType *t1;
7838 /* generate line number info */
7839 if (debug_modes && !(flags & DIF_SIZE_ONLY) && !p->sec)
7840 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7842 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7843 /* In case of strings we have special handling for arrays, so
7844 don't consume them as initializer value (which would commit them
7845 to some anonymous symbol). */
7846 tok != TOK_LSTR && tok != TOK_STR &&
7847 (!(flags & DIF_SIZE_ONLY)
7848 /* a struct may be initialized from a struct of same type, as in
7849 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7850 In that case we need to parse the element in order to check
7851 it for compatibility below */
7852 || (type->t & VT_BTYPE) == VT_STRUCT)
7854 int ncw_prev = nocode_wanted;
7855 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7856 ++nocode_wanted;
7857 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7858 nocode_wanted = ncw_prev;
7859 flags |= DIF_HAVE_ELEM;
7862 if (type->t & VT_ARRAY) {
7863 no_oblock = 1;
7864 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7865 tok == '{') {
7866 skip('{');
7867 no_oblock = 0;
7870 s = type->ref;
7871 n = s->c;
7872 t1 = pointed_type(type);
7873 size1 = type_size(t1, &align1);
7875 /* only parse strings here if correct type (otherwise: handle
7876 them as ((w)char *) expressions */
7877 if ((tok == TOK_LSTR &&
7878 #ifdef TCC_TARGET_PE
7879 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7880 #else
7881 (t1->t & VT_BTYPE) == VT_INT
7882 #endif
7883 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7884 len = 0;
7885 cstr_reset(&initstr);
7886 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7887 tcc_error("unhandled string literal merging");
7888 while (tok == TOK_STR || tok == TOK_LSTR) {
7889 if (initstr.size)
7890 initstr.size -= size1;
7891 if (tok == TOK_STR)
7892 len += tokc.str.size;
7893 else
7894 len += tokc.str.size / sizeof(nwchar_t);
7895 len--;
7896 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7897 next();
7899 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7900 && tok != TOK_EOF) {
7901 /* Not a lone literal but part of a bigger expression. */
7902 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7903 tokc.str.size = initstr.size;
7904 tokc.str.data = initstr.data;
7905 goto do_init_array;
7908 decl_design_flex(p, s, len);
7909 if (!(flags & DIF_SIZE_ONLY)) {
7910 int nb = n, ch;
7911 if (len < nb)
7912 nb = len;
7913 if (len > nb)
7914 tcc_warning("initializer-string for array is too long");
7915 /* in order to go faster for common case (char
7916 string in global variable, we handle it
7917 specifically */
7918 if (p->sec && size1 == 1) {
7919 init_assert(p, c + nb);
7920 if (!NODATA_WANTED)
7921 memcpy(p->sec->data + c, initstr.data, nb);
7922 } else {
7923 for(i=0;i<n;i++) {
7924 if (i >= nb) {
7925 /* only add trailing zero if enough storage (no
7926 warning in this case since it is standard) */
7927 if (flags & DIF_CLEAR)
7928 break;
7929 if (n - i >= 4) {
7930 init_putz(p, c + i * size1, (n - i) * size1);
7931 break;
7933 ch = 0;
7934 } else if (size1 == 1)
7935 ch = ((unsigned char *)initstr.data)[i];
7936 else
7937 ch = ((nwchar_t *)initstr.data)[i];
7938 vpushi(ch);
7939 init_putv(p, t1, c + i * size1);
7943 } else {
7945 do_init_array:
7946 indexsym.c = 0;
7947 f = &indexsym;
7949 do_init_list:
7950 /* zero memory once in advance */
7951 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7952 init_putz(p, c, n*size1);
7953 flags |= DIF_CLEAR;
7956 len = 0;
7957 /* GNU extension: if the initializer is empty for a flex array,
7958 it's size is zero. We won't enter the loop, so set the size
7959 now. */
7960 decl_design_flex(p, s, len);
7961 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7962 len = decl_designator(p, type, c, &f, flags, len);
7963 flags &= ~DIF_HAVE_ELEM;
7964 if (type->t & VT_ARRAY) {
7965 ++indexsym.c;
7966 /* special test for multi dimensional arrays (may not
7967 be strictly correct if designators are used at the
7968 same time) */
7969 if (no_oblock && len >= n*size1)
7970 break;
7971 } else {
7972 if (s->type.t == VT_UNION)
7973 f = NULL;
7974 else
7975 f = f->next;
7976 if (no_oblock && f == NULL)
7977 break;
7980 if (tok == '}')
7981 break;
7982 skip(',');
7985 if (!no_oblock)
7986 skip('}');
7988 } else if ((flags & DIF_HAVE_ELEM)
7989 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7990 The source type might have VT_CONSTANT set, which is
7991 of course assignable to non-const elements. */
7992 && is_compatible_unqualified_types(type, &vtop->type)) {
7993 goto one_elem;
7995 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7996 no_oblock = 1;
7997 if ((flags & DIF_FIRST) || tok == '{') {
7998 skip('{');
7999 no_oblock = 0;
8001 s = type->ref;
8002 f = s->next;
8003 n = s->c;
8004 size1 = 1;
8005 goto do_init_list;
8007 } else if (tok == '{') {
8008 if (flags & DIF_HAVE_ELEM)
8009 skip(';');
8010 next();
8011 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8012 skip('}');
8014 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
8015 /* If we supported only ISO C we wouldn't have to accept calling
8016 this on anything than an array if DIF_SIZE_ONLY (and even then
8017 only on the outermost level, so no recursion would be needed),
8018 because initializing a flex array member isn't supported.
8019 But GNU C supports it, so we need to recurse even into
8020 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8021 /* just skip expression */
8022 if (flags & DIF_HAVE_ELEM)
8023 vpop();
8024 else
8025 skip_or_save_block(NULL);
8027 } else {
8028 if (!(flags & DIF_HAVE_ELEM)) {
8029 /* This should happen only when we haven't parsed
8030 the init element above for fear of committing a
8031 string constant to memory too early. */
8032 if (tok != TOK_STR && tok != TOK_LSTR)
8033 expect("string constant");
8034 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8036 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8037 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8038 && vtop->c.i == 0
8039 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8041 vpop();
8042 else
8043 init_putv(p, type, c);
8047 /* parse an initializer for type 't' if 'has_init' is non zero, and
8048 allocate space in local or global data space ('r' is either
8049 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8050 variable 'v' of scope 'scope' is declared before initializers
8051 are parsed. If 'v' is zero, then a reference to the new object
8052 is put in the value stack. If 'has_init' is 2, a special parsing
8053 is done to handle string constants. */
8054 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8055 int has_init, int v, int global)
8057 int size, align, addr;
8058 TokenString *init_str = NULL;
8060 Section *sec;
8061 Sym *flexible_array;
8062 Sym *sym;
8063 int saved_nocode_wanted = nocode_wanted;
8064 #ifdef CONFIG_TCC_BCHECK
8065 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8066 #endif
8067 init_params p = {0};
8069 /* Always allocate static or global variables */
8070 if (v && (r & VT_VALMASK) == VT_CONST)
8071 nocode_wanted |= DATA_ONLY_WANTED;
8073 flexible_array = NULL;
8074 size = type_size(type, &align);
8076 /* exactly one flexible array may be initialized, either the
8077 toplevel array or the last member of the toplevel struct */
8079 if (size < 0) {
8080 // error out except for top-level incomplete arrays
8081 // (arrays of incomplete types are handled in array parsing)
8082 if (!(type->t & VT_ARRAY))
8083 tcc_error("initialization of incomplete type");
8085 /* If the base type itself was an array type of unspecified size
8086 (like in 'typedef int arr[]; arr x = {1};') then we will
8087 overwrite the unknown size by the real one for this decl.
8088 We need to unshare the ref symbol holding that size. */
8089 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8090 p.flex_array_ref = type->ref;
8092 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8093 Sym *field = type->ref->next;
8094 if (field) {
8095 while (field->next)
8096 field = field->next;
8097 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8098 flexible_array = field;
8099 p.flex_array_ref = field->type.ref;
8100 size = -1;
8105 if (size < 0) {
8106 /* If unknown size, do a dry-run 1st pass */
8107 if (!has_init)
8108 tcc_error("unknown type size");
8109 if (has_init == 2) {
8110 /* only get strings */
8111 init_str = tok_str_alloc();
8112 while (tok == TOK_STR || tok == TOK_LSTR) {
8113 tok_str_add_tok(init_str);
8114 next();
8116 tok_str_add(init_str, TOK_EOF);
8117 } else
8118 skip_or_save_block(&init_str);
8119 unget_tok(0);
8121 /* compute size */
8122 begin_macro(init_str, 1);
8123 next();
8124 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8125 /* prepare second initializer parsing */
8126 macro_ptr = init_str->str;
8127 next();
8129 /* if still unknown size, error */
8130 size = type_size(type, &align);
8131 if (size < 0)
8132 tcc_error("unknown type size");
8134 /* If there's a flex member and it was used in the initializer
8135 adjust size. */
8136 if (flexible_array && flexible_array->type.ref->c > 0)
8137 size += flexible_array->type.ref->c
8138 * pointed_size(&flexible_array->type);
8141 /* take into account specified alignment if bigger */
8142 if (ad->a.aligned) {
8143 int speca = 1 << (ad->a.aligned - 1);
8144 if (speca > align)
8145 align = speca;
8146 } else if (ad->a.packed) {
8147 align = 1;
8150 if (!v && NODATA_WANTED)
8151 size = 0, align = 1;
8153 if ((r & VT_VALMASK) == VT_LOCAL) {
8154 sec = NULL;
8155 #ifdef CONFIG_TCC_BCHECK
8156 if (bcheck && v) {
8157 /* add padding between stack variables for bound checking */
8158 loc -= align;
8160 #endif
8161 loc = (loc - size) & -align;
8162 addr = loc;
8163 p.local_offset = addr + size;
8164 #ifdef CONFIG_TCC_BCHECK
8165 if (bcheck && v) {
8166 /* add padding between stack variables for bound checking */
8167 loc -= align;
8169 #endif
8170 if (v) {
8171 /* local variable */
8172 #ifdef CONFIG_TCC_ASM
8173 if (ad->asm_label) {
8174 int reg = asm_parse_regvar(ad->asm_label);
8175 if (reg >= 0)
8176 r = (r & ~VT_VALMASK) | reg;
8178 #endif
8179 sym = sym_push(v, type, r, addr);
8180 if (ad->cleanup_func) {
8181 Sym *cls = sym_push2(&all_cleanups,
8182 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8183 cls->prev_tok = sym;
8184 cls->cleanup_func = ad->cleanup_func;
8185 cls->next = cur_scope->cl.s;
8186 cur_scope->cl.s = cls;
8189 sym->a = ad->a;
8190 } else {
8191 /* push local reference */
8192 vset(type, r, addr);
8194 } else {
8195 sym = NULL;
8196 if (v && global) {
8197 /* see if the symbol was already defined */
8198 sym = sym_find(v);
8199 if (sym) {
8200 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8201 && sym->type.ref->c > type->ref->c) {
8202 /* flex array was already declared with explicit size
8203 extern int arr[10];
8204 int arr[] = { 1,2,3 }; */
8205 type->ref->c = sym->type.ref->c;
8206 size = type_size(type, &align);
8208 patch_storage(sym, ad, type);
8209 /* we accept several definitions of the same global variable. */
8210 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8211 goto no_alloc;
8215 /* allocate symbol in corresponding section */
8216 sec = ad->section;
8217 if (!sec) {
8218 CType *tp = type;
8219 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8220 tp = &tp->ref->type;
8221 if (tp->t & VT_CONSTANT) {
8222 sec = rodata_section;
8223 } else if (has_init) {
8224 sec = data_section;
8225 /*if (g_debug & 4)
8226 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8227 } else if (tcc_state->nocommon)
8228 sec = bss_section;
8231 if (sec) {
8232 addr = section_add(sec, size, align);
8233 #ifdef CONFIG_TCC_BCHECK
8234 /* add padding if bound check */
8235 if (bcheck)
8236 section_add(sec, 1, 1);
8237 #endif
8238 } else {
8239 addr = align; /* SHN_COMMON is special, symbol value is align */
8240 sec = common_section;
8243 if (v) {
8244 if (!sym) {
8245 sym = sym_push(v, type, r | VT_SYM, 0);
8246 patch_storage(sym, ad, NULL);
8248 /* update symbol definition */
8249 put_extern_sym(sym, sec, addr, size);
8250 } else {
8251 /* push global reference */
8252 vpush_ref(type, sec, addr, size);
8253 sym = vtop->sym;
8254 vtop->r |= r;
8257 #ifdef CONFIG_TCC_BCHECK
8258 /* handles bounds now because the symbol must be defined
8259 before for the relocation */
8260 if (bcheck) {
8261 addr_t *bounds_ptr;
8263 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8264 /* then add global bound info */
8265 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8266 bounds_ptr[0] = 0; /* relocated */
8267 bounds_ptr[1] = size;
8269 #endif
8272 if (type->t & VT_VLA) {
8273 int a;
8275 if (NODATA_WANTED)
8276 goto no_alloc;
8278 /* save before-VLA stack pointer if needed */
8279 if (cur_scope->vla.num == 0) {
8280 if (cur_scope->prev && cur_scope->prev->vla.num) {
8281 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8282 } else {
8283 gen_vla_sp_save(loc -= PTR_SIZE);
8284 cur_scope->vla.locorig = loc;
8288 vpush_type_size(type, &a);
8289 gen_vla_alloc(type, a);
8290 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8291 /* on _WIN64, because of the function args scratch area, the
8292 result of alloca differs from RSP and is returned in RAX. */
8293 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8294 #endif
8295 gen_vla_sp_save(addr);
8296 cur_scope->vla.loc = addr;
8297 cur_scope->vla.num++;
8298 } else if (has_init) {
8299 p.sec = sec;
8300 decl_initializer(&p, type, addr, DIF_FIRST);
8301 /* patch flexible array member size back to -1, */
8302 /* for possible subsequent similar declarations */
8303 if (flexible_array)
8304 flexible_array->type.ref->c = -1;
8307 no_alloc:
8308 /* restore parse state if needed */
8309 if (init_str) {
8310 end_macro();
8311 next();
8314 nocode_wanted = saved_nocode_wanted;
8317 /* generate vla code saved in post_type() */
8318 static void func_vla_arg_code(Sym *arg)
8320 int align;
8321 TokenString *vla_array_tok = NULL;
8323 if (arg->type.ref)
8324 func_vla_arg_code(arg->type.ref);
8326 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8327 loc -= type_size(&int_type, &align);
8328 loc &= -align;
8329 arg->type.ref->c = loc;
8331 unget_tok(0);
8332 vla_array_tok = tok_str_alloc();
8333 vla_array_tok->str = arg->type.ref->vla_array_str;
8334 begin_macro(vla_array_tok, 1);
8335 next();
8336 gexpr();
8337 end_macro();
8338 next();
8339 vpush_type_size(&arg->type.ref->type, &align);
8340 gen_op('*');
8341 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8342 vswap();
8343 vstore();
8344 vpop();
8348 static void func_vla_arg(Sym *sym)
8350 Sym *arg;
8352 for (arg = sym->type.ref->next; arg; arg = arg->next)
8353 if ((arg->type.t & VT_BTYPE) == VT_PTR && (arg->type.ref->type.t & VT_VLA))
8354 func_vla_arg_code(arg->type.ref);
8357 /* parse a function defined by symbol 'sym' and generate its code in
8358 'cur_text_section' */
8359 static void gen_function(Sym *sym)
8361 struct scope f = { 0 };
8362 cur_scope = root_scope = &f;
8363 nocode_wanted = 0;
8365 ind = cur_text_section->data_offset;
8366 if (sym->a.aligned) {
8367 size_t newoff = section_add(cur_text_section, 0,
8368 1 << (sym->a.aligned - 1));
8369 gen_fill_nops(newoff - ind);
8372 funcname = get_tok_str(sym->v, NULL);
8373 func_ind = ind;
8374 func_vt = sym->type.ref->type;
8375 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8377 /* NOTE: we patch the symbol size later */
8378 put_extern_sym(sym, cur_text_section, ind, 0);
8380 if (sym->type.ref->f.func_ctor)
8381 add_array (tcc_state, ".init_array", sym->c);
8382 if (sym->type.ref->f.func_dtor)
8383 add_array (tcc_state, ".fini_array", sym->c);
8385 /* put debug symbol */
8386 tcc_debug_funcstart(tcc_state, sym);
8388 /* push a dummy symbol to enable local sym storage */
8389 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8390 local_scope = 1; /* for function parameters */
8391 nb_temp_local_vars = 0;
8392 gfunc_prolog(sym);
8393 tcc_debug_prolog_epilog(tcc_state, 0);
8395 local_scope = 0;
8396 rsym = 0;
8397 func_vla_arg(sym);
8398 block(0);
8399 gsym(rsym);
8401 nocode_wanted = 0;
8402 /* reset local stack */
8403 pop_local_syms(NULL, 0);
8404 tcc_debug_prolog_epilog(tcc_state, 1);
8405 gfunc_epilog();
8407 /* end of function */
8408 tcc_debug_funcend(tcc_state, ind - func_ind);
8410 /* patch symbol size */
8411 elfsym(sym)->st_size = ind - func_ind;
8413 cur_text_section->data_offset = ind;
8414 local_scope = 0;
8415 label_pop(&global_label_stack, NULL, 0);
8416 sym_pop(&all_cleanups, NULL, 0);
8418 /* It's better to crash than to generate wrong code */
8419 cur_text_section = NULL;
8420 funcname = ""; /* for safety */
8421 func_vt.t = VT_VOID; /* for safety */
8422 func_var = 0; /* for safety */
8423 ind = 0; /* for safety */
8424 func_ind = -1;
8425 nocode_wanted = DATA_ONLY_WANTED;
8426 check_vstack();
8428 /* do this after funcend debug info */
8429 next();
8432 static void gen_inline_functions(TCCState *s)
8434 Sym *sym;
8435 int inline_generated, i;
8436 struct InlineFunc *fn;
8438 tcc_open_bf(s, ":inline:", 0);
8439 /* iterate while inline function are referenced */
8440 do {
8441 inline_generated = 0;
8442 for (i = 0; i < s->nb_inline_fns; ++i) {
8443 fn = s->inline_fns[i];
8444 sym = fn->sym;
8445 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8446 /* the function was used or forced (and then not internal):
8447 generate its code and convert it to a normal function */
8448 fn->sym = NULL;
8449 tccpp_putfile(fn->filename);
8450 begin_macro(fn->func_str, 1);
8451 next();
8452 cur_text_section = text_section;
8453 gen_function(sym);
8454 end_macro();
8456 inline_generated = 1;
8459 } while (inline_generated);
8460 tcc_close();
8463 static void free_inline_functions(TCCState *s)
8465 int i;
8466 /* free tokens of unused inline functions */
8467 for (i = 0; i < s->nb_inline_fns; ++i) {
8468 struct InlineFunc *fn = s->inline_fns[i];
8469 if (fn->sym)
8470 tok_str_free(fn->func_str);
8472 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8475 static void do_Static_assert(void)
8477 int c;
8478 const char *msg;
8480 next();
8481 skip('(');
8482 c = expr_const();
8483 msg = "_Static_assert fail";
8484 if (tok == ',') {
8485 next();
8486 msg = parse_mult_str("string constant")->data;
8488 skip(')');
8489 if (c == 0)
8490 tcc_error("%s", msg);
8491 skip(';');
8494 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8495 or VT_CMP if parsing old style parameter list
8496 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8497 static int decl(int l)
8499 int v, has_init, r, oldint;
8500 CType type, btype;
8501 Sym *sym;
8502 AttributeDef ad, adbase;
8503 ElfSym *esym;
8505 while (1) {
8507 oldint = 0;
8508 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8509 if (l == VT_JMP)
8510 return 0;
8511 /* skip redundant ';' if not in old parameter decl scope */
8512 if (tok == ';' && l != VT_CMP) {
8513 next();
8514 continue;
8516 if (tok == TOK_STATIC_ASSERT) {
8517 do_Static_assert();
8518 continue;
8520 if (l != VT_CONST)
8521 break;
8522 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8523 /* global asm block */
8524 asm_global_instr();
8525 continue;
8527 if (tok >= TOK_UIDENT) {
8528 /* special test for old K&R protos without explicit int
8529 type. Only accepted when defining global data */
8530 btype.t = VT_INT;
8531 oldint = 1;
8532 } else {
8533 if (tok != TOK_EOF)
8534 expect("declaration");
8535 break;
8539 if (tok == ';') {
8540 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8541 v = btype.ref->v;
8542 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8543 tcc_warning("unnamed struct/union that defines no instances");
8544 next();
8545 continue;
8547 if (IS_ENUM(btype.t)) {
8548 next();
8549 continue;
8553 while (1) { /* iterate thru each declaration */
8554 type = btype;
8555 ad = adbase;
8556 type_decl(&type, &ad, &v, TYPE_DIRECT);
8557 #if 0
8559 char buf[500];
8560 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8561 printf("type = '%s'\n", buf);
8563 #endif
8564 if ((type.t & VT_BTYPE) == VT_FUNC) {
8565 if ((type.t & VT_STATIC) && (l != VT_CONST))
8566 tcc_error("function without file scope cannot be static");
8567 /* if old style function prototype, we accept a
8568 declaration list */
8569 sym = type.ref;
8570 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8571 func_vt = type;
8572 decl(VT_CMP);
8575 if ((type.t & (VT_EXTERN|VT_INLINE)) == (VT_EXTERN|VT_INLINE)) {
8576 /* always_inline functions must be handled as if they
8577 don't generate multiple global defs, even if extern
8578 inline, i.e. GNU inline semantics for those. Rewrite
8579 them into static inline. */
8580 if (tcc_state->gnu89_inline || sym->f.func_alwinl)
8581 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
8582 else
8583 type.t &= ~VT_INLINE; /* always compile otherwise */
8586 } else if (oldint) {
8587 tcc_warning("type defaults to int");
8590 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8591 ad.asm_label = asm_label_instr();
8592 /* parse one last attribute list, after asm label */
8593 parse_attribute(&ad);
8594 #if 0
8595 /* gcc does not allow __asm__("label") with function definition,
8596 but why not ... */
8597 if (tok == '{')
8598 expect(";");
8599 #endif
8602 #ifdef TCC_TARGET_PE
8603 if (ad.a.dllimport || ad.a.dllexport) {
8604 if (type.t & VT_STATIC)
8605 tcc_error("cannot have dll linkage with static");
8606 if (type.t & VT_TYPEDEF) {
8607 tcc_warning("'%s' attribute ignored for typedef",
8608 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8609 (ad.a.dllexport = 0, "dllexport"));
8610 } else if (ad.a.dllimport) {
8611 if ((type.t & VT_BTYPE) == VT_FUNC)
8612 ad.a.dllimport = 0;
8613 else
8614 type.t |= VT_EXTERN;
8617 #endif
8618 if (tok == '{') {
8619 if (l != VT_CONST)
8620 tcc_error("cannot use local functions");
8621 if ((type.t & VT_BTYPE) != VT_FUNC)
8622 expect("function definition");
8624 /* reject abstract declarators in function definition
8625 make old style params without decl have int type */
8626 sym = type.ref;
8627 while ((sym = sym->next) != NULL) {
8628 if (!(sym->v & ~SYM_FIELD))
8629 expect("identifier");
8630 if (sym->type.t == VT_VOID)
8631 sym->type = int_type;
8634 /* apply post-declaraton attributes */
8635 merge_funcattr(&type.ref->f, &ad.f);
8637 /* put function symbol */
8638 type.t &= ~VT_EXTERN;
8639 sym = external_sym(v, &type, 0, &ad);
8641 /* static inline functions are just recorded as a kind
8642 of macro. Their code will be emitted at the end of
8643 the compilation unit only if they are used */
8644 if (sym->type.t & VT_INLINE) {
8645 struct InlineFunc *fn;
8646 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8647 strcpy(fn->filename, file->filename);
8648 fn->sym = sym;
8649 dynarray_add(&tcc_state->inline_fns,
8650 &tcc_state->nb_inline_fns, fn);
8651 skip_or_save_block(&fn->func_str);
8652 } else {
8653 /* compute text section */
8654 cur_text_section = ad.section;
8655 if (!cur_text_section)
8656 cur_text_section = text_section;
8657 else if (cur_text_section->sh_num > bss_section->sh_num)
8658 cur_text_section->sh_flags = text_section->sh_flags;
8659 gen_function(sym);
8661 break;
8662 } else {
8663 if (l == VT_CMP) {
8664 /* find parameter in function parameter list */
8665 for (sym = func_vt.ref->next; sym; sym = sym->next)
8666 if ((sym->v & ~SYM_FIELD) == v)
8667 goto found;
8668 tcc_error("declaration for parameter '%s' but no such parameter",
8669 get_tok_str(v, NULL));
8670 found:
8671 if (type.t & VT_STORAGE) /* 'register' is okay */
8672 tcc_error("storage class specified for '%s'",
8673 get_tok_str(v, NULL));
8674 if (sym->type.t != VT_VOID)
8675 tcc_error("redefinition of parameter '%s'",
8676 get_tok_str(v, NULL));
8677 convert_parameter_type(&type);
8678 sym->type = type;
8679 } else if (type.t & VT_TYPEDEF) {
8680 /* save typedefed type */
8681 /* XXX: test storage specifiers ? */
8682 sym = sym_find(v);
8683 if (sym && sym->sym_scope == local_scope) {
8684 if (!is_compatible_types(&sym->type, &type)
8685 || !(sym->type.t & VT_TYPEDEF))
8686 tcc_error("incompatible redefinition of '%s'",
8687 get_tok_str(v, NULL));
8688 sym->type = type;
8689 } else {
8690 sym = sym_push(v, &type, 0, 0);
8692 sym->a = ad.a;
8693 if ((type.t & VT_BTYPE) == VT_FUNC)
8694 merge_funcattr(&sym->type.ref->f, &ad.f);
8695 if (debug_modes)
8696 tcc_debug_typedef (tcc_state, sym);
8697 } else if ((type.t & VT_BTYPE) == VT_VOID
8698 && !(type.t & VT_EXTERN)) {
8699 tcc_error("declaration of void object");
8700 } else {
8701 r = 0;
8702 if ((type.t & VT_BTYPE) == VT_FUNC) {
8703 /* external function definition */
8704 /* specific case for func_call attribute */
8705 merge_funcattr(&type.ref->f, &ad.f);
8706 } else if (!(type.t & VT_ARRAY)) {
8707 /* not lvalue if array */
8708 r |= VT_LVAL;
8710 has_init = (tok == '=');
8711 if (has_init && (type.t & VT_VLA))
8712 tcc_error("variable length array cannot be initialized");
8714 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8715 || (type.t & VT_BTYPE) == VT_FUNC
8716 /* as with GCC, uninitialized global arrays with no size
8717 are considered extern: */
8718 || ((type.t & VT_ARRAY) && !has_init
8719 && l == VT_CONST && type.ref->c < 0)
8721 /* external variable or function */
8722 type.t |= VT_EXTERN;
8723 external_sym(v, &type, r, &ad);
8724 } else {
8725 if (l == VT_CONST || (type.t & VT_STATIC))
8726 r |= VT_CONST;
8727 else
8728 r |= VT_LOCAL;
8729 if (has_init)
8730 next();
8731 else if (l == VT_CONST)
8732 /* uninitialized global variables may be overridden */
8733 type.t |= VT_EXTERN;
8734 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8737 if (ad.alias_target && l == VT_CONST) {
8738 /* Aliases need to be emitted when their target symbol
8739 is emitted, even if perhaps unreferenced.
8740 We only support the case where the base is already
8741 defined, otherwise we would need deferring to emit
8742 the aliases until the end of the compile unit. */
8743 esym = elfsym(sym_find(ad.alias_target));
8744 if (!esym)
8745 tcc_error("unsupported forward __alias__ attribute");
8746 put_extern_sym2(sym_find(v), esym->st_shndx,
8747 esym->st_value, esym->st_size, 1);
8750 if (tok != ',') {
8751 if (l == VT_JMP)
8752 return 1;
8753 skip(';');
8754 break;
8756 next();
8760 return 0;
8763 /* ------------------------------------------------------------------------- */
8764 #undef gjmp_addr
8765 #undef gjmp
8766 /* ------------------------------------------------------------------------- */