change which to command -v
[tinycc.git] / tccgen.c
blobc582c4ad39159bc7cf5cca4c30feb7eff0330417
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int nocode_wanted; /* no code generation wanted */
55 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
56 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
58 /* no code output after unconditional jumps such as with if (0) ... */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
64 #define NOEVAL_MASK 0x0000FFFF
65 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
67 /* no code output when parsing constant expressions */
68 #define CONST_WANTED_BIT 0x00010000
69 #define CONST_WANTED_MASK 0x0FFF0000
70 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int func_ind;
77 ST_DATA const char *funcname;
78 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
79 static CString initstr;
81 #if PTR_SIZE == 4
82 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
83 #define VT_PTRDIFF_T VT_INT
84 #elif LONG_SIZE == 4
85 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
86 #define VT_PTRDIFF_T VT_LLONG
87 #else
88 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
89 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 #endif
92 static struct switch_t {
93 struct case_t {
94 int64_t v1, v2;
95 int sym;
96 } **p; int n; /* list of case ranges */
97 int def_sym; /* default symbol */
98 int nocode_wanted;
99 int *bsym;
100 struct scope *scope;
101 struct switch_t *prev;
102 SValue sv;
103 } *cur_switch; /* current switch */
105 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
106 /*list of temporary local variables on the stack in current function. */
107 static struct temp_local_variable {
108 int location; //offset on stack. Svalue.c.i
109 short size;
110 short align;
111 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
112 static int nb_temp_local_vars;
114 static struct scope {
115 struct scope *prev;
116 struct { int loc, locorig, num; } vla;
117 struct { Sym *s; int n; } cl;
118 int *bsym, *csym;
119 Sym *lstk, *llstk;
120 } *cur_scope, *loop_scope, *root_scope;
122 typedef struct {
123 Section *sec;
124 int local_offset;
125 Sym *flex_array_ref;
126 } init_params;
128 #if 1
129 #define precedence_parser
130 static void init_prec(void);
131 #endif
133 static void gen_cast(CType *type);
134 static void gen_cast_s(int t);
135 static inline CType *pointed_type(CType *type);
136 static int is_compatible_types(CType *type1, CType *type2);
137 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
138 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
139 static void parse_expr_type(CType *type);
140 static void init_putv(init_params *p, CType *type, unsigned long c);
141 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
142 static void block(int is_expr);
143 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
144 static int decl(int l);
145 static void expr_eq(void);
146 static void vpush_type_size(CType *type, int *a);
147 static int is_compatible_unqualified_types(CType *type1, CType *type2);
148 static inline int64_t expr_const64(void);
149 static void vpush64(int ty, unsigned long long v);
150 static void vpush(CType *type);
151 static int gvtst(int inv, int t);
152 static void gen_inline_functions(TCCState *s);
153 static void free_inline_functions(TCCState *s);
154 static void skip_or_save_block(TokenString **str);
155 static void gv_dup(void);
156 static int get_temp_local_var(int size,int align);
157 static void clear_temp_local_var_list();
158 static void cast_error(CType *st, CType *dt);
160 /* ------------------------------------------------------------------------- */
161 /* Automagical code suppression */
163 /* Clear 'nocode_wanted' at forward label if it was used */
164 ST_FUNC void gsym(int t)
166 if (t) {
167 gsym_addr(t, ind);
168 CODE_ON();
172 /* Clear 'nocode_wanted' if current pc is a label */
173 static int gind()
175 int t = ind;
176 CODE_ON();
177 if (debug_modes)
178 tcc_tcov_block_begin(tcc_state);
179 return t;
182 /* Set 'nocode_wanted' after unconditional (backwards) jump */
183 static void gjmp_addr_acs(int t)
185 gjmp_addr(t);
186 CODE_OFF();
189 /* Set 'nocode_wanted' after unconditional (forwards) jump */
190 static int gjmp_acs(int t)
192 t = gjmp(t);
193 CODE_OFF();
194 return t;
197 /* These are #undef'd at the end of this file */
198 #define gjmp_addr gjmp_addr_acs
199 #define gjmp gjmp_acs
200 /* ------------------------------------------------------------------------- */
202 ST_INLN int is_float(int t)
204 int bt = t & VT_BTYPE;
205 return bt == VT_LDOUBLE
206 || bt == VT_DOUBLE
207 || bt == VT_FLOAT
208 || bt == VT_QFLOAT;
211 static inline int is_integer_btype(int bt)
213 return bt == VT_BYTE
214 || bt == VT_BOOL
215 || bt == VT_SHORT
216 || bt == VT_INT
217 || bt == VT_LLONG;
220 static int btype_size(int bt)
222 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
223 bt == VT_SHORT ? 2 :
224 bt == VT_INT ? 4 :
225 bt == VT_LLONG ? 8 :
226 bt == VT_PTR ? PTR_SIZE : 0;
229 /* returns function return register from type */
230 static int R_RET(int t)
232 if (!is_float(t))
233 return REG_IRET;
234 #ifdef TCC_TARGET_X86_64
235 if ((t & VT_BTYPE) == VT_LDOUBLE)
236 return TREG_ST0;
237 #elif defined TCC_TARGET_RISCV64
238 if ((t & VT_BTYPE) == VT_LDOUBLE)
239 return REG_IRET;
240 #endif
241 return REG_FRET;
244 /* returns 2nd function return register, if any */
245 static int R2_RET(int t)
247 t &= VT_BTYPE;
248 #if PTR_SIZE == 4
249 if (t == VT_LLONG)
250 return REG_IRE2;
251 #elif defined TCC_TARGET_X86_64
252 if (t == VT_QLONG)
253 return REG_IRE2;
254 if (t == VT_QFLOAT)
255 return REG_FRE2;
256 #elif defined TCC_TARGET_RISCV64
257 if (t == VT_LDOUBLE)
258 return REG_IRE2;
259 #endif
260 return VT_CONST;
263 /* returns true for two-word types */
264 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
266 /* put function return registers to stack value */
267 static void PUT_R_RET(SValue *sv, int t)
269 sv->r = R_RET(t), sv->r2 = R2_RET(t);
272 /* returns function return register class for type t */
273 static int RC_RET(int t)
275 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
278 /* returns generic register class for type t */
279 static int RC_TYPE(int t)
281 if (!is_float(t))
282 return RC_INT;
283 #ifdef TCC_TARGET_X86_64
284 if ((t & VT_BTYPE) == VT_LDOUBLE)
285 return RC_ST0;
286 if ((t & VT_BTYPE) == VT_QFLOAT)
287 return RC_FRET;
288 #elif defined TCC_TARGET_RISCV64
289 if ((t & VT_BTYPE) == VT_LDOUBLE)
290 return RC_INT;
291 #endif
292 return RC_FLOAT;
295 /* returns 2nd register class corresponding to t and rc */
296 static int RC2_TYPE(int t, int rc)
298 if (!USING_TWO_WORDS(t))
299 return 0;
300 #ifdef RC_IRE2
301 if (rc == RC_IRET)
302 return RC_IRE2;
303 #endif
304 #ifdef RC_FRE2
305 if (rc == RC_FRET)
306 return RC_FRE2;
307 #endif
308 if (rc & RC_FLOAT)
309 return RC_FLOAT;
310 return RC_INT;
313 /* we use our own 'finite' function to avoid potential problems with
314 non standard math libs */
315 /* XXX: endianness dependent */
316 ST_FUNC int ieee_finite(double d)
318 int p[4];
319 memcpy(p, &d, sizeof(double));
320 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
323 /* compiling intel long double natively */
324 #if (defined __i386__ || defined __x86_64__) \
325 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
326 # define TCC_IS_NATIVE_387
327 #endif
329 ST_FUNC void test_lvalue(void)
331 if (!(vtop->r & VT_LVAL))
332 expect("lvalue");
335 ST_FUNC void check_vstack(void)
337 if (vtop != vstack - 1)
338 tcc_error("internal compiler error: vstack leak (%d)",
339 (int)(vtop - vstack + 1));
342 /* vstack debugging aid */
343 #if 0
344 void pv (const char *lbl, int a, int b)
346 int i;
347 for (i = a; i < a + b; ++i) {
348 SValue *p = &vtop[-i];
349 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
350 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
353 #endif
355 /* ------------------------------------------------------------------------- */
356 /* initialize vstack and types. This must be done also for tcc -E */
357 ST_FUNC void tccgen_init(TCCState *s1)
359 vtop = vstack - 1;
360 memset(vtop, 0, sizeof *vtop);
362 /* define some often used types */
363 int_type.t = VT_INT;
365 char_type.t = VT_BYTE;
366 if (s1->char_is_unsigned)
367 char_type.t |= VT_UNSIGNED;
368 char_pointer_type = char_type;
369 mk_pointer(&char_pointer_type);
371 func_old_type.t = VT_FUNC;
372 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
373 func_old_type.ref->f.func_call = FUNC_CDECL;
374 func_old_type.ref->f.func_type = FUNC_OLD;
375 #ifdef precedence_parser
376 init_prec();
377 #endif
378 cstr_new(&initstr);
381 ST_FUNC int tccgen_compile(TCCState *s1)
383 cur_text_section = NULL;
384 funcname = "";
385 func_ind = -1;
386 anon_sym = SYM_FIRST_ANOM;
387 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
388 local_scope = 0;
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 sym_free_first = NULL;
421 global_label_stack = local_label_stack = NULL;
422 cstr_free(&initstr);
423 dynarray_reset(&stk_data, &nb_stk_data);
426 /* ------------------------------------------------------------------------- */
427 ST_FUNC ElfSym *elfsym(Sym *s)
429 if (!s || !s->c)
430 return NULL;
431 return &((ElfSym *)symtab_section->data)[s->c];
434 /* apply storage attributes to Elf symbol */
435 ST_FUNC void update_storage(Sym *sym)
437 ElfSym *esym;
438 int sym_bind, old_sym_bind;
440 esym = elfsym(sym);
441 if (!esym)
442 return;
444 if (sym->a.visibility)
445 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
446 | sym->a.visibility;
448 if (sym->type.t & (VT_STATIC | VT_INLINE))
449 sym_bind = STB_LOCAL;
450 else if (sym->a.weak)
451 sym_bind = STB_WEAK;
452 else
453 sym_bind = STB_GLOBAL;
454 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
455 if (sym_bind != old_sym_bind) {
456 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
459 #ifdef TCC_TARGET_PE
460 if (sym->a.dllimport)
461 esym->st_other |= ST_PE_IMPORT;
462 if (sym->a.dllexport)
463 esym->st_other |= ST_PE_EXPORT;
464 #endif
466 #if 0
467 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
468 get_tok_str(sym->v, NULL),
469 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
470 sym->a.visibility,
471 sym->a.dllexport,
472 sym->a.dllimport
474 #endif
477 /* ------------------------------------------------------------------------- */
478 /* update sym->c so that it points to an external symbol in section
479 'section' with value 'value' */
481 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
482 addr_t value, unsigned long size,
483 int can_add_underscore)
485 int sym_type, sym_bind, info, other, t;
486 ElfSym *esym;
487 const char *name;
488 char buf1[256];
490 if (!sym->c) {
491 name = get_tok_str(sym->v, NULL);
492 t = sym->type.t;
493 if ((t & VT_BTYPE) == VT_FUNC) {
494 sym_type = STT_FUNC;
495 } else if ((t & VT_BTYPE) == VT_VOID) {
496 sym_type = STT_NOTYPE;
497 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
498 sym_type = STT_FUNC;
499 } else {
500 sym_type = STT_OBJECT;
502 if (t & (VT_STATIC | VT_INLINE))
503 sym_bind = STB_LOCAL;
504 else
505 sym_bind = STB_GLOBAL;
506 other = 0;
508 #ifdef TCC_TARGET_PE
509 if (sym_type == STT_FUNC && sym->type.ref) {
510 Sym *ref = sym->type.ref;
511 if (ref->a.nodecorate) {
512 can_add_underscore = 0;
514 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
515 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
516 name = buf1;
517 other |= ST_PE_STDCALL;
518 can_add_underscore = 0;
521 #endif
523 if (sym->asm_label) {
524 name = get_tok_str(sym->asm_label, NULL);
525 can_add_underscore = 0;
528 if (tcc_state->leading_underscore && can_add_underscore) {
529 buf1[0] = '_';
530 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
531 name = buf1;
534 info = ELFW(ST_INFO)(sym_bind, sym_type);
535 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
537 if (debug_modes)
538 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
540 } else {
541 esym = elfsym(sym);
542 esym->st_value = value;
543 esym->st_size = size;
544 esym->st_shndx = sh_num;
546 update_storage(sym);
549 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
551 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
552 return;
553 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
556 /* add a new relocation entry to symbol 'sym' in section 's' */
557 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
558 addr_t addend)
560 int c = 0;
562 if (nocode_wanted && s == cur_text_section)
563 return;
565 if (sym) {
566 if (0 == sym->c)
567 put_extern_sym(sym, NULL, 0, 0);
568 c = sym->c;
571 /* now we can add ELF relocation info */
572 put_elf_reloca(symtab_section, s, offset, type, c, addend);
575 #if PTR_SIZE == 4
576 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
578 greloca(s, sym, offset, type, 0);
580 #endif
582 /* ------------------------------------------------------------------------- */
583 /* symbol allocator */
584 static Sym *__sym_malloc(void)
586 Sym *sym_pool, *sym, *last_sym;
587 int i;
589 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
590 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
592 last_sym = sym_free_first;
593 sym = sym_pool;
594 for(i = 0; i < SYM_POOL_NB; i++) {
595 sym->next = last_sym;
596 last_sym = sym;
597 sym++;
599 sym_free_first = last_sym;
600 return last_sym;
603 static inline Sym *sym_malloc(void)
605 Sym *sym;
606 #ifndef SYM_DEBUG
607 sym = sym_free_first;
608 if (!sym)
609 sym = __sym_malloc();
610 sym_free_first = sym->next;
611 return sym;
612 #else
613 sym = tcc_malloc(sizeof(Sym));
614 return sym;
615 #endif
618 ST_INLN void sym_free(Sym *sym)
620 #ifndef SYM_DEBUG
621 sym->next = sym_free_first;
622 sym_free_first = sym;
623 #else
624 tcc_free(sym);
625 #endif
628 /* push, without hashing */
629 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
631 Sym *s;
633 s = sym_malloc();
634 memset(s, 0, sizeof *s);
635 s->v = v;
636 s->type.t = t;
637 s->c = c;
638 /* add in stack */
639 s->prev = *ps;
640 *ps = s;
641 return s;
644 /* find a symbol and return its associated structure. 's' is the top
645 of the symbol stack */
646 ST_FUNC Sym *sym_find2(Sym *s, int v)
648 while (s) {
649 if (s->v == v)
650 return s;
651 else if (s->v == -1)
652 return NULL;
653 s = s->prev;
655 return NULL;
658 /* structure lookup */
659 ST_INLN Sym *struct_find(int v)
661 v -= TOK_IDENT;
662 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
663 return NULL;
664 return table_ident[v]->sym_struct;
667 /* find an identifier */
668 ST_INLN Sym *sym_find(int v)
670 v -= TOK_IDENT;
671 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
672 return NULL;
673 return table_ident[v]->sym_identifier;
676 static int sym_scope(Sym *s)
678 if (IS_ENUM_VAL (s->type.t))
679 return s->type.ref->sym_scope;
680 else
681 return s->sym_scope;
684 /* push a given symbol on the symbol stack */
685 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
687 Sym *s, **ps;
688 TokenSym *ts;
690 if (local_stack)
691 ps = &local_stack;
692 else
693 ps = &global_stack;
694 s = sym_push2(ps, v, type->t, c);
695 s->type.ref = type->ref;
696 s->r = r;
697 /* don't record fields or anonymous symbols */
698 /* XXX: simplify */
699 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
700 /* record symbol in token array */
701 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
702 if (v & SYM_STRUCT)
703 ps = &ts->sym_struct;
704 else
705 ps = &ts->sym_identifier;
706 s->prev_tok = *ps;
707 *ps = s;
708 s->sym_scope = local_scope;
709 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
710 tcc_error("redeclaration of '%s'",
711 get_tok_str(v & ~SYM_STRUCT, NULL));
713 return s;
716 /* push a global identifier */
717 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
719 Sym *s, **ps;
720 s = sym_push2(&global_stack, v, t, c);
721 s->r = VT_CONST | VT_SYM;
722 /* don't record anonymous symbol */
723 if (v < SYM_FIRST_ANOM) {
724 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
725 /* modify the top most local identifier, so that sym_identifier will
726 point to 's' when popped; happens when called from inline asm */
727 while (*ps != NULL && (*ps)->sym_scope)
728 ps = &(*ps)->prev_tok;
729 s->prev_tok = *ps;
730 *ps = s;
732 return s;
735 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
736 pop them yet from the list, but do remove them from the token array. */
737 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
739 Sym *s, *ss, **ps;
740 TokenSym *ts;
741 int v;
743 s = *ptop;
744 while(s != b) {
745 ss = s->prev;
746 v = s->v;
747 /* remove symbol in token array */
748 /* XXX: simplify */
749 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
750 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
751 if (v & SYM_STRUCT)
752 ps = &ts->sym_struct;
753 else
754 ps = &ts->sym_identifier;
755 *ps = s->prev_tok;
757 if (!keep)
758 sym_free(s);
759 s = ss;
761 if (!keep)
762 *ptop = b;
765 /* label lookup */
766 ST_FUNC Sym *label_find(int v)
768 v -= TOK_IDENT;
769 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
770 return NULL;
771 return table_ident[v]->sym_label;
774 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
776 Sym *s, **ps;
777 s = sym_push2(ptop, v, VT_STATIC, 0);
778 s->r = flags;
779 ps = &table_ident[v - TOK_IDENT]->sym_label;
780 if (ptop == &global_label_stack) {
781 /* modify the top most local identifier, so that
782 sym_identifier will point to 's' when popped */
783 while (*ps != NULL)
784 ps = &(*ps)->prev_tok;
786 s->prev_tok = *ps;
787 *ps = s;
788 return s;
791 /* pop labels until element last is reached. Look if any labels are
792 undefined. Define symbols if '&&label' was used. */
793 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
795 Sym *s, *s1;
796 for(s = *ptop; s != slast; s = s1) {
797 s1 = s->prev;
798 if (s->r == LABEL_DECLARED) {
799 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
800 } else if (s->r == LABEL_FORWARD) {
801 tcc_error("label '%s' used but not defined",
802 get_tok_str(s->v, NULL));
803 } else {
804 if (s->c) {
805 /* define corresponding symbol. A size of
806 1 is put. */
807 put_extern_sym(s, cur_text_section, s->jnext, 1);
810 /* remove label */
811 if (s->r != LABEL_GONE)
812 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
813 if (!keep)
814 sym_free(s);
815 else
816 s->r = LABEL_GONE;
818 if (!keep)
819 *ptop = slast;
822 /* ------------------------------------------------------------------------- */
823 static void vcheck_cmp(void)
825 /* cannot let cpu flags if other instruction are generated. Also
826 avoid leaving VT_JMP anywhere except on the top of the stack
827 because it would complicate the code generator.
829 Don't do this when nocode_wanted. vtop might come from
830 !nocode_wanted regions (see 88_codeopt.c) and transforming
831 it to a register without actually generating code is wrong
832 as their value might still be used for real. All values
833 we push under nocode_wanted will eventually be popped
834 again, so that the VT_CMP/VT_JMP value will be in vtop
835 when code is unsuppressed again. */
837 /* However if it's just automatic suppression via CODE_OFF/ON()
838 then it seems that we better let things work undisturbed.
839 How can it work at all under nocode_wanted? Well, gv() will
840 actually clear it at the gsym() in load()/VT_JMP in the
841 generator backends */
843 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
844 gv(RC_INT);
847 static void vsetc(CType *type, int r, CValue *vc)
849 if (vtop >= vstack + (VSTACK_SIZE - 1))
850 tcc_error("memory full (vstack)");
851 vcheck_cmp();
852 vtop++;
853 vtop->type = *type;
854 vtop->r = r;
855 vtop->r2 = VT_CONST;
856 vtop->c = *vc;
857 vtop->sym = NULL;
860 ST_FUNC void vswap(void)
862 SValue tmp;
864 vcheck_cmp();
865 tmp = vtop[0];
866 vtop[0] = vtop[-1];
867 vtop[-1] = tmp;
870 /* pop stack value */
871 ST_FUNC void vpop(void)
873 int v;
874 v = vtop->r & VT_VALMASK;
875 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
876 /* for x86, we need to pop the FP stack */
877 if (v == TREG_ST0) {
878 o(0xd8dd); /* fstp %st(0) */
879 } else
880 #endif
881 if (v == VT_CMP) {
882 /* need to put correct jump if && or || without test */
883 gsym(vtop->jtrue);
884 gsym(vtop->jfalse);
886 vtop--;
889 /* push constant of type "type" with useless value */
890 static void vpush(CType *type)
892 vset(type, VT_CONST, 0);
895 /* push arbitrary 64bit constant */
896 static void vpush64(int ty, unsigned long long v)
898 CValue cval;
899 CType ctype;
900 ctype.t = ty;
901 ctype.ref = NULL;
902 cval.i = v;
903 vsetc(&ctype, VT_CONST, &cval);
906 /* push integer constant */
907 ST_FUNC void vpushi(int v)
909 vpush64(VT_INT, v);
912 /* push a pointer sized constant */
913 static void vpushs(addr_t v)
915 vpush64(VT_SIZE_T, v);
918 /* push long long constant */
919 static inline void vpushll(long long v)
921 vpush64(VT_LLONG, v);
924 ST_FUNC void vset(CType *type, int r, int v)
926 CValue cval;
927 cval.i = v;
928 vsetc(type, r, &cval);
931 static void vseti(int r, int v)
933 CType type;
934 type.t = VT_INT;
935 type.ref = NULL;
936 vset(&type, r, v);
939 ST_FUNC void vpushv(SValue *v)
941 if (vtop >= vstack + (VSTACK_SIZE - 1))
942 tcc_error("memory full (vstack)");
943 vtop++;
944 *vtop = *v;
947 static void vdup(void)
949 vpushv(vtop);
952 /* rotate n first stack elements to the bottom
953 I1 ... In -> I2 ... In I1 [top is right]
955 ST_FUNC void vrotb(int n)
957 int i;
958 SValue tmp;
960 vcheck_cmp();
961 tmp = vtop[-n + 1];
962 for(i=-n+1;i!=0;i++)
963 vtop[i] = vtop[i+1];
964 vtop[0] = tmp;
967 /* rotate the n elements before entry e towards the top
968 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
970 ST_FUNC void vrote(SValue *e, int n)
972 int i;
973 SValue tmp;
975 vcheck_cmp();
976 tmp = *e;
977 for(i = 0;i < n - 1; i++)
978 e[-i] = e[-i - 1];
979 e[-n + 1] = tmp;
982 /* rotate n first stack elements to the top
983 I1 ... In -> In I1 ... I(n-1) [top is right]
985 ST_FUNC void vrott(int n)
987 vrote(vtop, n);
990 /* ------------------------------------------------------------------------- */
991 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
993 /* called from generators to set the result from relational ops */
994 ST_FUNC void vset_VT_CMP(int op)
996 vtop->r = VT_CMP;
997 vtop->cmp_op = op;
998 vtop->jfalse = 0;
999 vtop->jtrue = 0;
1002 /* called once before asking generators to load VT_CMP to a register */
1003 static void vset_VT_JMP(void)
1005 int op = vtop->cmp_op;
1007 if (vtop->jtrue || vtop->jfalse) {
1008 int origt = vtop->type.t;
1009 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1010 int inv = op & (op < 2); /* small optimization */
1011 vseti(VT_JMP+inv, gvtst(inv, 0));
1012 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1013 } else {
1014 /* otherwise convert flags (rsp. 0/1) to register */
1015 vtop->c.i = op;
1016 if (op < 2) /* doesn't seem to happen */
1017 vtop->r = VT_CONST;
1021 /* Set CPU Flags, doesn't yet jump */
1022 static void gvtst_set(int inv, int t)
1024 int *p;
1026 if (vtop->r != VT_CMP) {
1027 vpushi(0);
1028 gen_op(TOK_NE);
1029 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1030 vset_VT_CMP(vtop->c.i != 0);
1033 p = inv ? &vtop->jfalse : &vtop->jtrue;
1034 *p = gjmp_append(*p, t);
1037 /* Generate value test
1039 * Generate a test for any value (jump, comparison and integers) */
1040 static int gvtst(int inv, int t)
1042 int op, x, u;
1044 gvtst_set(inv, t);
1045 t = vtop->jtrue, u = vtop->jfalse;
1046 if (inv)
1047 x = u, u = t, t = x;
1048 op = vtop->cmp_op;
1050 /* jump to the wanted target */
1051 if (op > 1)
1052 t = gjmp_cond(op ^ inv, t);
1053 else if (op != inv)
1054 t = gjmp(t);
1055 /* resolve complementary jumps to here */
1056 gsym(u);
1058 vtop--;
1059 return t;
1062 /* generate a zero or nozero test */
1063 static void gen_test_zero(int op)
1065 if (vtop->r == VT_CMP) {
1066 int j;
1067 if (op == TOK_EQ) {
1068 j = vtop->jfalse;
1069 vtop->jfalse = vtop->jtrue;
1070 vtop->jtrue = j;
1071 vtop->cmp_op ^= 1;
1073 } else {
1074 vpushi(0);
1075 gen_op(op);
1079 /* ------------------------------------------------------------------------- */
1080 /* push a symbol value of TYPE */
1081 ST_FUNC void vpushsym(CType *type, Sym *sym)
1083 CValue cval;
1084 cval.i = 0;
1085 vsetc(type, VT_CONST | VT_SYM, &cval);
1086 vtop->sym = sym;
1089 /* Return a static symbol pointing to a section */
1090 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1092 int v;
1093 Sym *sym;
1095 v = anon_sym++;
1096 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1097 sym->type.t |= VT_STATIC;
1098 put_extern_sym(sym, sec, offset, size);
1099 return sym;
1102 /* push a reference to a section offset by adding a dummy symbol */
1103 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1105 vpushsym(type, get_sym_ref(type, sec, offset, size));
1108 /* define a new external reference to a symbol 'v' of type 'u' */
1109 ST_FUNC Sym *external_global_sym(int v, CType *type)
1111 Sym *s;
1113 s = sym_find(v);
1114 if (!s) {
1115 /* push forward reference */
1116 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1117 s->type.ref = type->ref;
1118 } else if (IS_ASM_SYM(s)) {
1119 s->type.t = type->t | (s->type.t & VT_EXTERN);
1120 s->type.ref = type->ref;
1121 update_storage(s);
1123 return s;
1126 /* create an external reference with no specific type similar to asm labels.
1127 This avoids type conflicts if the symbol is used from C too */
1128 ST_FUNC Sym *external_helper_sym(int v)
1130 CType ct = { VT_ASM_FUNC, NULL };
1131 return external_global_sym(v, &ct);
1134 /* push a reference to an helper function (such as memmove) */
1135 ST_FUNC void vpush_helper_func(int v)
1137 vpushsym(&func_old_type, external_helper_sym(v));
1140 /* Merge symbol attributes. */
1141 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1143 if (sa1->aligned && !sa->aligned)
1144 sa->aligned = sa1->aligned;
1145 sa->packed |= sa1->packed;
1146 sa->weak |= sa1->weak;
1147 sa->nodebug |= sa1->nodebug;
1148 if (sa1->visibility != STV_DEFAULT) {
1149 int vis = sa->visibility;
1150 if (vis == STV_DEFAULT
1151 || vis > sa1->visibility)
1152 vis = sa1->visibility;
1153 sa->visibility = vis;
1155 sa->dllexport |= sa1->dllexport;
1156 sa->nodecorate |= sa1->nodecorate;
1157 sa->dllimport |= sa1->dllimport;
1160 /* Merge function attributes. */
1161 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1163 if (fa1->func_call && !fa->func_call)
1164 fa->func_call = fa1->func_call;
1165 if (fa1->func_type && !fa->func_type)
1166 fa->func_type = fa1->func_type;
1167 if (fa1->func_args && !fa->func_args)
1168 fa->func_args = fa1->func_args;
1169 if (fa1->func_noreturn)
1170 fa->func_noreturn = 1;
1171 if (fa1->func_ctor)
1172 fa->func_ctor = 1;
1173 if (fa1->func_dtor)
1174 fa->func_dtor = 1;
1177 /* Merge attributes. */
1178 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1180 merge_symattr(&ad->a, &ad1->a);
1181 merge_funcattr(&ad->f, &ad1->f);
1183 if (ad1->section)
1184 ad->section = ad1->section;
1185 if (ad1->alias_target)
1186 ad->alias_target = ad1->alias_target;
1187 if (ad1->asm_label)
1188 ad->asm_label = ad1->asm_label;
1189 if (ad1->attr_mode)
1190 ad->attr_mode = ad1->attr_mode;
1193 /* Merge some type attributes. */
1194 static void patch_type(Sym *sym, CType *type)
1196 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1197 if (!(sym->type.t & VT_EXTERN))
1198 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1199 sym->type.t &= ~VT_EXTERN;
1202 if (IS_ASM_SYM(sym)) {
1203 /* stay static if both are static */
1204 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1205 sym->type.ref = type->ref;
1208 if (!is_compatible_types(&sym->type, type)) {
1209 tcc_error("incompatible types for redefinition of '%s'",
1210 get_tok_str(sym->v, NULL));
1212 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1213 int static_proto = sym->type.t & VT_STATIC;
1214 /* warn if static follows non-static function declaration */
1215 if ((type->t & VT_STATIC) && !static_proto
1216 /* XXX this test for inline shouldn't be here. Until we
1217 implement gnu-inline mode again it silences a warning for
1218 mingw caused by our workarounds. */
1219 && !((type->t | sym->type.t) & VT_INLINE))
1220 tcc_warning("static storage ignored for redefinition of '%s'",
1221 get_tok_str(sym->v, NULL));
1223 /* set 'inline' if both agree or if one has static */
1224 if ((type->t | sym->type.t) & VT_INLINE) {
1225 if (!((type->t ^ sym->type.t) & VT_INLINE)
1226 || ((type->t | sym->type.t) & VT_STATIC))
1227 static_proto |= VT_INLINE;
1230 if (0 == (type->t & VT_EXTERN)) {
1231 struct FuncAttr f = sym->type.ref->f;
1232 /* put complete type, use static from prototype */
1233 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1234 sym->type.ref = type->ref;
1235 merge_funcattr(&sym->type.ref->f, &f);
1236 } else {
1237 sym->type.t &= ~VT_INLINE | static_proto;
1240 if (sym->type.ref->f.func_type == FUNC_OLD
1241 && type->ref->f.func_type != FUNC_OLD) {
1242 sym->type.ref = type->ref;
1245 } else {
1246 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1247 /* set array size if it was omitted in extern declaration */
1248 sym->type.ref->c = type->ref->c;
1250 if ((type->t ^ sym->type.t) & VT_STATIC)
1251 tcc_warning("storage mismatch for redefinition of '%s'",
1252 get_tok_str(sym->v, NULL));
1256 /* Merge some storage attributes. */
1257 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1259 if (type)
1260 patch_type(sym, type);
1262 #ifdef TCC_TARGET_PE
1263 if (sym->a.dllimport != ad->a.dllimport)
1264 tcc_error("incompatible dll linkage for redefinition of '%s'",
1265 get_tok_str(sym->v, NULL));
1266 #endif
1267 merge_symattr(&sym->a, &ad->a);
1268 if (ad->asm_label)
1269 sym->asm_label = ad->asm_label;
1270 update_storage(sym);
1273 /* copy sym to other stack */
1274 static Sym *sym_copy(Sym *s0, Sym **ps)
1276 Sym *s;
1277 s = sym_malloc(), *s = *s0;
1278 s->prev = *ps, *ps = s;
1279 if (s->v < SYM_FIRST_ANOM) {
1280 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1281 s->prev_tok = *ps, *ps = s;
1283 return s;
1286 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1287 static void sym_copy_ref(Sym *s, Sym **ps)
1289 int bt = s->type.t & VT_BTYPE;
1290 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1291 Sym **sp = &s->type.ref;
1292 for (s = *sp, *sp = NULL; s; s = s->next) {
1293 Sym *s2 = sym_copy(s, ps);
1294 sp = &(*sp = s2)->next;
1295 sym_copy_ref(s2, ps);
1300 /* define a new external reference to a symbol 'v' */
1301 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1303 Sym *s;
1305 /* look for global symbol */
1306 s = sym_find(v);
1307 while (s && s->sym_scope)
1308 s = s->prev_tok;
1310 if (!s) {
1311 /* push forward reference */
1312 s = global_identifier_push(v, type->t, 0);
1313 s->r |= r;
1314 s->a = ad->a;
1315 s->asm_label = ad->asm_label;
1316 s->type.ref = type->ref;
1317 /* copy type to the global stack */
1318 if (local_stack)
1319 sym_copy_ref(s, &global_stack);
1320 } else {
1321 patch_storage(s, ad, type);
1323 /* push variables on local_stack if any */
1324 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1325 s = sym_copy(s, &local_stack);
1326 return s;
1329 /* save registers up to (vtop - n) stack entry */
1330 ST_FUNC void save_regs(int n)
1332 SValue *p, *p1;
1333 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1334 save_reg(p->r);
1337 /* save r to the memory stack, and mark it as being free */
1338 ST_FUNC void save_reg(int r)
1340 save_reg_upstack(r, 0);
1343 /* save r to the memory stack, and mark it as being free,
1344 if seen up to (vtop - n) stack entry */
1345 ST_FUNC void save_reg_upstack(int r, int n)
1347 int l, size, align, bt;
1348 SValue *p, *p1, sv;
1350 if ((r &= VT_VALMASK) >= VT_CONST)
1351 return;
1352 if (nocode_wanted)
1353 return;
1354 l = 0;
1355 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1356 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1357 /* must save value on stack if not already done */
1358 if (!l) {
1359 bt = p->type.t & VT_BTYPE;
1360 if (bt == VT_VOID)
1361 continue;
1362 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1363 bt = VT_PTR;
1364 sv.type.t = bt;
1365 size = type_size(&sv.type, &align);
1366 l = get_temp_local_var(size,align);
1367 sv.r = VT_LOCAL | VT_LVAL;
1368 sv.c.i = l;
1369 store(p->r & VT_VALMASK, &sv);
1370 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1371 /* x86 specific: need to pop fp register ST0 if saved */
1372 if (r == TREG_ST0) {
1373 o(0xd8dd); /* fstp %st(0) */
1375 #endif
1376 /* special long long case */
1377 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1378 sv.c.i += PTR_SIZE;
1379 store(p->r2, &sv);
1382 /* mark that stack entry as being saved on the stack */
1383 if (p->r & VT_LVAL) {
1384 /* also clear the bounded flag because the
1385 relocation address of the function was stored in
1386 p->c.i */
1387 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1388 } else {
1389 p->r = VT_LVAL | VT_LOCAL;
1391 p->sym = NULL;
1392 p->r2 = VT_CONST;
1393 p->c.i = l;
1398 #ifdef TCC_TARGET_ARM
1399 /* find a register of class 'rc2' with at most one reference on stack.
1400 * If none, call get_reg(rc) */
1401 ST_FUNC int get_reg_ex(int rc, int rc2)
1403 int r;
1404 SValue *p;
1406 for(r=0;r<NB_REGS;r++) {
1407 if (reg_classes[r] & rc2) {
1408 int n;
1409 n=0;
1410 for(p = vstack; p <= vtop; p++) {
1411 if ((p->r & VT_VALMASK) == r ||
1412 p->r2 == r)
1413 n++;
1415 if (n <= 1)
1416 return r;
1419 return get_reg(rc);
1421 #endif
1423 /* find a free register of class 'rc'. If none, save one register */
1424 ST_FUNC int get_reg(int rc)
1426 int r;
1427 SValue *p;
1429 /* find a free register */
1430 for(r=0;r<NB_REGS;r++) {
1431 if (reg_classes[r] & rc) {
1432 if (nocode_wanted)
1433 return r;
1434 for(p=vstack;p<=vtop;p++) {
1435 if ((p->r & VT_VALMASK) == r ||
1436 p->r2 == r)
1437 goto notfound;
1439 return r;
1441 notfound: ;
1444 /* no register left : free the first one on the stack (VERY
1445 IMPORTANT to start from the bottom to ensure that we don't
1446 spill registers used in gen_opi()) */
1447 for(p=vstack;p<=vtop;p++) {
1448 /* look at second register (if long long) */
1449 r = p->r2;
1450 if (r < VT_CONST && (reg_classes[r] & rc))
1451 goto save_found;
1452 r = p->r & VT_VALMASK;
1453 if (r < VT_CONST && (reg_classes[r] & rc)) {
1454 save_found:
1455 save_reg(r);
1456 return r;
1459 /* Should never comes here */
1460 return -1;
1463 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1464 static int get_temp_local_var(int size,int align){
1465 int i;
1466 struct temp_local_variable *temp_var;
1467 int found_var;
1468 SValue *p;
1469 int r;
1470 char free;
1471 char found;
1472 found=0;
1473 for(i=0;i<nb_temp_local_vars;i++){
1474 temp_var=&arr_temp_local_vars[i];
1475 if(temp_var->size<size||align!=temp_var->align){
1476 continue;
1478 /*check if temp_var is free*/
1479 free=1;
1480 for(p=vstack;p<=vtop;p++) {
1481 r=p->r&VT_VALMASK;
1482 if(r==VT_LOCAL||r==VT_LLOCAL){
1483 if(p->c.i==temp_var->location){
1484 free=0;
1485 break;
1489 if(free){
1490 found_var=temp_var->location;
1491 found=1;
1492 break;
1495 if(!found){
1496 loc = (loc - size) & -align;
1497 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1498 temp_var=&arr_temp_local_vars[i];
1499 temp_var->location=loc;
1500 temp_var->size=size;
1501 temp_var->align=align;
1502 nb_temp_local_vars++;
1504 found_var=loc;
1506 return found_var;
1509 static void clear_temp_local_var_list(){
1510 nb_temp_local_vars=0;
1513 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1514 if needed */
1515 static void move_reg(int r, int s, int t)
1517 SValue sv;
1519 if (r != s) {
1520 save_reg(r);
1521 sv.type.t = t;
1522 sv.type.ref = NULL;
1523 sv.r = s;
1524 sv.c.i = 0;
1525 load(r, &sv);
1529 /* get address of vtop (vtop MUST BE an lvalue) */
1530 ST_FUNC void gaddrof(void)
1532 vtop->r &= ~VT_LVAL;
1533 /* tricky: if saved lvalue, then we can go back to lvalue */
1534 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1535 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1538 #ifdef CONFIG_TCC_BCHECK
1539 /* generate a bounded pointer addition */
1540 static void gen_bounded_ptr_add(void)
1542 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1543 if (save) {
1544 vpushv(&vtop[-1]);
1545 vrott(3);
1547 vpush_helper_func(TOK___bound_ptr_add);
1548 vrott(3);
1549 gfunc_call(2);
1550 vtop -= save;
1551 vpushi(0);
1552 /* returned pointer is in REG_IRET */
1553 vtop->r = REG_IRET | VT_BOUNDED;
1554 if (nocode_wanted)
1555 return;
1556 /* relocation offset of the bounding function call point */
1557 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1560 /* patch pointer addition in vtop so that pointer dereferencing is
1561 also tested */
1562 static void gen_bounded_ptr_deref(void)
1564 addr_t func;
1565 int size, align;
1566 ElfW_Rel *rel;
1567 Sym *sym;
1569 if (nocode_wanted)
1570 return;
1572 size = type_size(&vtop->type, &align);
1573 switch(size) {
1574 case 1: func = TOK___bound_ptr_indir1; break;
1575 case 2: func = TOK___bound_ptr_indir2; break;
1576 case 4: func = TOK___bound_ptr_indir4; break;
1577 case 8: func = TOK___bound_ptr_indir8; break;
1578 case 12: func = TOK___bound_ptr_indir12; break;
1579 case 16: func = TOK___bound_ptr_indir16; break;
1580 default:
1581 /* may happen with struct member access */
1582 return;
1584 sym = external_helper_sym(func);
1585 if (!sym->c)
1586 put_extern_sym(sym, NULL, 0, 0);
1587 /* patch relocation */
1588 /* XXX: find a better solution ? */
1589 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1590 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1593 /* generate lvalue bound code */
1594 static void gbound(void)
1596 CType type1;
1598 vtop->r &= ~VT_MUSTBOUND;
1599 /* if lvalue, then use checking code before dereferencing */
1600 if (vtop->r & VT_LVAL) {
1601 /* if not VT_BOUNDED value, then make one */
1602 if (!(vtop->r & VT_BOUNDED)) {
1603 /* must save type because we must set it to int to get pointer */
1604 type1 = vtop->type;
1605 vtop->type.t = VT_PTR;
1606 gaddrof();
1607 vpushi(0);
1608 gen_bounded_ptr_add();
1609 vtop->r |= VT_LVAL;
1610 vtop->type = type1;
1612 /* then check for dereferencing */
1613 gen_bounded_ptr_deref();
1617 /* we need to call __bound_ptr_add before we start to load function
1618 args into registers */
1619 ST_FUNC void gbound_args(int nb_args)
1621 int i, v;
1622 SValue *sv;
1624 for (i = 1; i <= nb_args; ++i)
1625 if (vtop[1 - i].r & VT_MUSTBOUND) {
1626 vrotb(i);
1627 gbound();
1628 vrott(i);
1631 sv = vtop - nb_args;
1632 if (sv->r & VT_SYM) {
1633 v = sv->sym->v;
1634 if (v == TOK_setjmp
1635 || v == TOK__setjmp
1636 #ifndef TCC_TARGET_PE
1637 || v == TOK_sigsetjmp
1638 || v == TOK___sigsetjmp
1639 #endif
1641 vpush_helper_func(TOK___bound_setjmp);
1642 vpushv(sv + 1);
1643 gfunc_call(1);
1644 func_bound_add_epilog = 1;
1646 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1647 if (v == TOK_alloca)
1648 func_bound_add_epilog = 1;
1649 #endif
1650 #if TARGETOS_NetBSD
1651 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1652 sv->sym->asm_label = TOK___bound_longjmp;
1653 #endif
1657 /* Add bounds for local symbols from S to E (via ->prev) */
1658 static void add_local_bounds(Sym *s, Sym *e)
1660 for (; s != e; s = s->prev) {
1661 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1662 continue;
1663 /* Add arrays/structs/unions because we always take address */
1664 if ((s->type.t & VT_ARRAY)
1665 || (s->type.t & VT_BTYPE) == VT_STRUCT
1666 || s->a.addrtaken) {
1667 /* add local bound info */
1668 int align, size = type_size(&s->type, &align);
1669 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1670 2 * sizeof(addr_t));
1671 bounds_ptr[0] = s->c;
1672 bounds_ptr[1] = size;
1676 #endif
1678 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1679 static void pop_local_syms(Sym *b, int keep)
1681 #ifdef CONFIG_TCC_BCHECK
1682 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1683 add_local_bounds(local_stack, b);
1684 #endif
1685 if (debug_modes)
1686 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1687 sym_pop(&local_stack, b, keep);
1690 static void incr_bf_adr(int o)
1692 vtop->type = char_pointer_type;
1693 gaddrof();
1694 vpushs(o);
1695 gen_op('+');
1696 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1697 vtop->r |= VT_LVAL;
1700 /* single-byte load mode for packed or otherwise unaligned bitfields */
1701 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1703 int n, o, bits;
1704 save_reg_upstack(vtop->r, 1);
1705 vpush64(type->t & VT_BTYPE, 0); // B X
1706 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1707 do {
1708 vswap(); // X B
1709 incr_bf_adr(o);
1710 vdup(); // X B B
1711 n = 8 - bit_pos;
1712 if (n > bit_size)
1713 n = bit_size;
1714 if (bit_pos)
1715 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1716 if (n < 8)
1717 vpushi((1 << n) - 1), gen_op('&');
1718 gen_cast(type);
1719 if (bits)
1720 vpushi(bits), gen_op(TOK_SHL);
1721 vrotb(3); // B Y X
1722 gen_op('|'); // B X
1723 bits += n, bit_size -= n, o = 1;
1724 } while (bit_size);
1725 vswap(), vpop();
1726 if (!(type->t & VT_UNSIGNED)) {
1727 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1728 vpushi(n), gen_op(TOK_SHL);
1729 vpushi(n), gen_op(TOK_SAR);
1733 /* single-byte store mode for packed or otherwise unaligned bitfields */
1734 static void store_packed_bf(int bit_pos, int bit_size)
1736 int bits, n, o, m, c;
1737 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1738 vswap(); // X B
1739 save_reg_upstack(vtop->r, 1);
1740 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1741 do {
1742 incr_bf_adr(o); // X B
1743 vswap(); //B X
1744 c ? vdup() : gv_dup(); // B V X
1745 vrott(3); // X B V
1746 if (bits)
1747 vpushi(bits), gen_op(TOK_SHR);
1748 if (bit_pos)
1749 vpushi(bit_pos), gen_op(TOK_SHL);
1750 n = 8 - bit_pos;
1751 if (n > bit_size)
1752 n = bit_size;
1753 if (n < 8) {
1754 m = ((1 << n) - 1) << bit_pos;
1755 vpushi(m), gen_op('&'); // X B V1
1756 vpushv(vtop-1); // X B V1 B
1757 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1758 gen_op('&'); // X B V1 B1
1759 gen_op('|'); // X B V2
1761 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1762 vstore(), vpop(); // X B
1763 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1764 } while (bit_size);
1765 vpop(), vpop();
1768 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1770 int t;
1771 if (0 == sv->type.ref)
1772 return 0;
1773 t = sv->type.ref->auxtype;
1774 if (t != -1 && t != VT_STRUCT) {
1775 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1776 sv->r |= VT_LVAL;
1778 return t;
1781 /* store vtop a register belonging to class 'rc'. lvalues are
1782 converted to values. Cannot be used if cannot be converted to
1783 register value (such as structures). */
1784 ST_FUNC int gv(int rc)
1786 int r, r2, r_ok, r2_ok, rc2, bt;
1787 int bit_pos, bit_size, size, align;
1789 /* NOTE: get_reg can modify vstack[] */
1790 if (vtop->type.t & VT_BITFIELD) {
1791 CType type;
1793 bit_pos = BIT_POS(vtop->type.t);
1794 bit_size = BIT_SIZE(vtop->type.t);
1795 /* remove bit field info to avoid loops */
1796 vtop->type.t &= ~VT_STRUCT_MASK;
1798 type.ref = NULL;
1799 type.t = vtop->type.t & VT_UNSIGNED;
1800 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1801 type.t |= VT_UNSIGNED;
1803 r = adjust_bf(vtop, bit_pos, bit_size);
1805 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1806 type.t |= VT_LLONG;
1807 else
1808 type.t |= VT_INT;
1810 if (r == VT_STRUCT) {
1811 load_packed_bf(&type, bit_pos, bit_size);
1812 } else {
1813 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1814 /* cast to int to propagate signedness in following ops */
1815 gen_cast(&type);
1816 /* generate shifts */
1817 vpushi(bits - (bit_pos + bit_size));
1818 gen_op(TOK_SHL);
1819 vpushi(bits - bit_size);
1820 /* NOTE: transformed to SHR if unsigned */
1821 gen_op(TOK_SAR);
1823 r = gv(rc);
1824 } else {
1825 if (is_float(vtop->type.t) &&
1826 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1827 /* CPUs usually cannot use float constants, so we store them
1828 generically in data segment */
1829 init_params p = { rodata_section };
1830 unsigned long offset;
1831 size = type_size(&vtop->type, &align);
1832 if (NODATA_WANTED)
1833 size = 0, align = 1;
1834 offset = section_add(p.sec, size, align);
1835 vpush_ref(&vtop->type, p.sec, offset, size);
1836 vswap();
1837 init_putv(&p, &vtop->type, offset);
1838 vtop->r |= VT_LVAL;
1840 #ifdef CONFIG_TCC_BCHECK
1841 if (vtop->r & VT_MUSTBOUND)
1842 gbound();
1843 #endif
1845 bt = vtop->type.t & VT_BTYPE;
1847 #ifdef TCC_TARGET_RISCV64
1848 /* XXX mega hack */
1849 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1850 rc = RC_INT;
1851 #endif
1852 rc2 = RC2_TYPE(bt, rc);
1854 /* need to reload if:
1855 - constant
1856 - lvalue (need to dereference pointer)
1857 - already a register, but not in the right class */
1858 r = vtop->r & VT_VALMASK;
1859 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1860 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1862 if (!r_ok || !r2_ok) {
1863 if (!r_ok)
1864 r = get_reg(rc);
1865 if (rc2) {
1866 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1867 int original_type = vtop->type.t;
1869 /* two register type load :
1870 expand to two words temporarily */
1871 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1872 /* load constant */
1873 unsigned long long ll = vtop->c.i;
1874 vtop->c.i = ll; /* first word */
1875 load(r, vtop);
1876 vtop->r = r; /* save register value */
1877 vpushi(ll >> 32); /* second word */
1878 } else if (vtop->r & VT_LVAL) {
1879 /* We do not want to modifier the long long pointer here.
1880 So we save any other instances down the stack */
1881 save_reg_upstack(vtop->r, 1);
1882 /* load from memory */
1883 vtop->type.t = load_type;
1884 load(r, vtop);
1885 vdup();
1886 vtop[-1].r = r; /* save register value */
1887 /* increment pointer to get second word */
1888 vtop->type.t = VT_PTRDIFF_T;
1889 gaddrof();
1890 vpushs(PTR_SIZE);
1891 gen_op('+');
1892 vtop->r |= VT_LVAL;
1893 vtop->type.t = load_type;
1894 } else {
1895 /* move registers */
1896 if (!r_ok)
1897 load(r, vtop);
1898 if (r2_ok && vtop->r2 < VT_CONST)
1899 goto done;
1900 vdup();
1901 vtop[-1].r = r; /* save register value */
1902 vtop->r = vtop[-1].r2;
1904 /* Allocate second register. Here we rely on the fact that
1905 get_reg() tries first to free r2 of an SValue. */
1906 r2 = get_reg(rc2);
1907 load(r2, vtop);
1908 vpop();
1909 /* write second register */
1910 vtop->r2 = r2;
1911 done:
1912 vtop->type.t = original_type;
1913 } else {
1914 if (vtop->r == VT_CMP)
1915 vset_VT_JMP();
1916 /* one register type load */
1917 load(r, vtop);
1920 vtop->r = r;
1921 #ifdef TCC_TARGET_C67
1922 /* uses register pairs for doubles */
1923 if (bt == VT_DOUBLE)
1924 vtop->r2 = r+1;
1925 #endif
1927 return r;
1930 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1931 ST_FUNC void gv2(int rc1, int rc2)
1933 /* generate more generic register first. But VT_JMP or VT_CMP
1934 values must be generated first in all cases to avoid possible
1935 reload errors */
1936 if (vtop->r != VT_CMP && rc1 <= rc2) {
1937 vswap();
1938 gv(rc1);
1939 vswap();
1940 gv(rc2);
1941 /* test if reload is needed for first register */
1942 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1943 vswap();
1944 gv(rc1);
1945 vswap();
1947 } else {
1948 gv(rc2);
1949 vswap();
1950 gv(rc1);
1951 vswap();
1952 /* test if reload is needed for first register */
1953 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1954 gv(rc2);
1959 #if PTR_SIZE == 4
1960 /* expand 64bit on stack in two ints */
1961 ST_FUNC void lexpand(void)
1963 int u, v;
1964 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1965 v = vtop->r & (VT_VALMASK | VT_LVAL);
1966 if (v == VT_CONST) {
1967 vdup();
1968 vtop[0].c.i >>= 32;
1969 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1970 vdup();
1971 vtop[0].c.i += 4;
1972 } else {
1973 gv(RC_INT);
1974 vdup();
1975 vtop[0].r = vtop[-1].r2;
1976 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1978 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1980 #endif
1982 #if PTR_SIZE == 4
1983 /* build a long long from two ints */
1984 static void lbuild(int t)
1986 gv2(RC_INT, RC_INT);
1987 vtop[-1].r2 = vtop[0].r;
1988 vtop[-1].type.t = t;
1989 vpop();
1991 #endif
1993 /* convert stack entry to register and duplicate its value in another
1994 register */
1995 static void gv_dup(void)
1997 int t, rc, r;
1999 t = vtop->type.t;
2000 #if PTR_SIZE == 4
2001 if ((t & VT_BTYPE) == VT_LLONG) {
2002 if (t & VT_BITFIELD) {
2003 gv(RC_INT);
2004 t = vtop->type.t;
2006 lexpand();
2007 gv_dup();
2008 vswap();
2009 vrotb(3);
2010 gv_dup();
2011 vrotb(4);
2012 /* stack: H L L1 H1 */
2013 lbuild(t);
2014 vrotb(3);
2015 vrotb(3);
2016 vswap();
2017 lbuild(t);
2018 vswap();
2019 return;
2021 #endif
2022 /* duplicate value */
2023 rc = RC_TYPE(t);
2024 gv(rc);
2025 r = get_reg(rc);
2026 vdup();
2027 load(r, vtop);
2028 vtop->r = r;
2031 #if PTR_SIZE == 4
2032 /* generate CPU independent (unsigned) long long operations */
2033 static void gen_opl(int op)
2035 int t, a, b, op1, c, i;
2036 int func;
2037 unsigned short reg_iret = REG_IRET;
2038 unsigned short reg_lret = REG_IRE2;
2039 SValue tmp;
2041 switch(op) {
2042 case '/':
2043 case TOK_PDIV:
2044 func = TOK___divdi3;
2045 goto gen_func;
2046 case TOK_UDIV:
2047 func = TOK___udivdi3;
2048 goto gen_func;
2049 case '%':
2050 func = TOK___moddi3;
2051 goto gen_mod_func;
2052 case TOK_UMOD:
2053 func = TOK___umoddi3;
2054 gen_mod_func:
2055 #ifdef TCC_ARM_EABI
2056 reg_iret = TREG_R2;
2057 reg_lret = TREG_R3;
2058 #endif
2059 gen_func:
2060 /* call generic long long function */
2061 vpush_helper_func(func);
2062 vrott(3);
2063 gfunc_call(2);
2064 vpushi(0);
2065 vtop->r = reg_iret;
2066 vtop->r2 = reg_lret;
2067 break;
2068 case '^':
2069 case '&':
2070 case '|':
2071 case '*':
2072 case '+':
2073 case '-':
2074 //pv("gen_opl A",0,2);
2075 t = vtop->type.t;
2076 vswap();
2077 lexpand();
2078 vrotb(3);
2079 lexpand();
2080 /* stack: L1 H1 L2 H2 */
2081 tmp = vtop[0];
2082 vtop[0] = vtop[-3];
2083 vtop[-3] = tmp;
2084 tmp = vtop[-2];
2085 vtop[-2] = vtop[-3];
2086 vtop[-3] = tmp;
2087 vswap();
2088 /* stack: H1 H2 L1 L2 */
2089 //pv("gen_opl B",0,4);
2090 if (op == '*') {
2091 vpushv(vtop - 1);
2092 vpushv(vtop - 1);
2093 gen_op(TOK_UMULL);
2094 lexpand();
2095 /* stack: H1 H2 L1 L2 ML MH */
2096 for(i=0;i<4;i++)
2097 vrotb(6);
2098 /* stack: ML MH H1 H2 L1 L2 */
2099 tmp = vtop[0];
2100 vtop[0] = vtop[-2];
2101 vtop[-2] = tmp;
2102 /* stack: ML MH H1 L2 H2 L1 */
2103 gen_op('*');
2104 vrotb(3);
2105 vrotb(3);
2106 gen_op('*');
2107 /* stack: ML MH M1 M2 */
2108 gen_op('+');
2109 gen_op('+');
2110 } else if (op == '+' || op == '-') {
2111 /* XXX: add non carry method too (for MIPS or alpha) */
2112 if (op == '+')
2113 op1 = TOK_ADDC1;
2114 else
2115 op1 = TOK_SUBC1;
2116 gen_op(op1);
2117 /* stack: H1 H2 (L1 op L2) */
2118 vrotb(3);
2119 vrotb(3);
2120 gen_op(op1 + 1); /* TOK_xxxC2 */
2121 } else {
2122 gen_op(op);
2123 /* stack: H1 H2 (L1 op L2) */
2124 vrotb(3);
2125 vrotb(3);
2126 /* stack: (L1 op L2) H1 H2 */
2127 gen_op(op);
2128 /* stack: (L1 op L2) (H1 op H2) */
2130 /* stack: L H */
2131 lbuild(t);
2132 break;
2133 case TOK_SAR:
2134 case TOK_SHR:
2135 case TOK_SHL:
2136 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2137 t = vtop[-1].type.t;
2138 vswap();
2139 lexpand();
2140 vrotb(3);
2141 /* stack: L H shift */
2142 c = (int)vtop->c.i;
2143 /* constant: simpler */
2144 /* NOTE: all comments are for SHL. the other cases are
2145 done by swapping words */
2146 vpop();
2147 if (op != TOK_SHL)
2148 vswap();
2149 if (c >= 32) {
2150 /* stack: L H */
2151 vpop();
2152 if (c > 32) {
2153 vpushi(c - 32);
2154 gen_op(op);
2156 if (op != TOK_SAR) {
2157 vpushi(0);
2158 } else {
2159 gv_dup();
2160 vpushi(31);
2161 gen_op(TOK_SAR);
2163 vswap();
2164 } else {
2165 vswap();
2166 gv_dup();
2167 /* stack: H L L */
2168 vpushi(c);
2169 gen_op(op);
2170 vswap();
2171 vpushi(32 - c);
2172 if (op == TOK_SHL)
2173 gen_op(TOK_SHR);
2174 else
2175 gen_op(TOK_SHL);
2176 vrotb(3);
2177 /* stack: L L H */
2178 vpushi(c);
2179 if (op == TOK_SHL)
2180 gen_op(TOK_SHL);
2181 else
2182 gen_op(TOK_SHR);
2183 gen_op('|');
2185 if (op != TOK_SHL)
2186 vswap();
2187 lbuild(t);
2188 } else {
2189 /* XXX: should provide a faster fallback on x86 ? */
2190 switch(op) {
2191 case TOK_SAR:
2192 func = TOK___ashrdi3;
2193 goto gen_func;
2194 case TOK_SHR:
2195 func = TOK___lshrdi3;
2196 goto gen_func;
2197 case TOK_SHL:
2198 func = TOK___ashldi3;
2199 goto gen_func;
2202 break;
2203 default:
2204 /* compare operations */
2205 t = vtop->type.t;
2206 vswap();
2207 lexpand();
2208 vrotb(3);
2209 lexpand();
2210 /* stack: L1 H1 L2 H2 */
2211 tmp = vtop[-1];
2212 vtop[-1] = vtop[-2];
2213 vtop[-2] = tmp;
2214 /* stack: L1 L2 H1 H2 */
2215 save_regs(4);
2216 /* compare high */
2217 op1 = op;
2218 /* when values are equal, we need to compare low words. since
2219 the jump is inverted, we invert the test too. */
2220 if (op1 == TOK_LT)
2221 op1 = TOK_LE;
2222 else if (op1 == TOK_GT)
2223 op1 = TOK_GE;
2224 else if (op1 == TOK_ULT)
2225 op1 = TOK_ULE;
2226 else if (op1 == TOK_UGT)
2227 op1 = TOK_UGE;
2228 a = 0;
2229 b = 0;
2230 gen_op(op1);
2231 if (op == TOK_NE) {
2232 b = gvtst(0, 0);
2233 } else {
2234 a = gvtst(1, 0);
2235 if (op != TOK_EQ) {
2236 /* generate non equal test */
2237 vpushi(0);
2238 vset_VT_CMP(TOK_NE);
2239 b = gvtst(0, 0);
2242 /* compare low. Always unsigned */
2243 op1 = op;
2244 if (op1 == TOK_LT)
2245 op1 = TOK_ULT;
2246 else if (op1 == TOK_LE)
2247 op1 = TOK_ULE;
2248 else if (op1 == TOK_GT)
2249 op1 = TOK_UGT;
2250 else if (op1 == TOK_GE)
2251 op1 = TOK_UGE;
2252 gen_op(op1);
2253 #if 0//def TCC_TARGET_I386
2254 if (op == TOK_NE) { gsym(b); break; }
2255 if (op == TOK_EQ) { gsym(a); break; }
2256 #endif
2257 gvtst_set(1, a);
2258 gvtst_set(0, b);
2259 break;
2262 #endif
2264 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2266 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2267 return (a ^ b) >> 63 ? -x : x;
2270 static int gen_opic_lt(uint64_t a, uint64_t b)
2272 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2275 /* handle integer constant optimizations and various machine
2276 independent opt */
2277 static void gen_opic(int op)
2279 SValue *v1 = vtop - 1;
2280 SValue *v2 = vtop;
2281 int t1 = v1->type.t & VT_BTYPE;
2282 int t2 = v2->type.t & VT_BTYPE;
2283 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2284 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2285 uint64_t l1 = c1 ? v1->c.i : 0;
2286 uint64_t l2 = c2 ? v2->c.i : 0;
2287 int shm = (t1 == VT_LLONG) ? 63 : 31;
2288 int r;
2290 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2291 l1 = ((uint32_t)l1 |
2292 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2293 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2294 l2 = ((uint32_t)l2 |
2295 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2297 if (c1 && c2) {
2298 switch(op) {
2299 case '+': l1 += l2; break;
2300 case '-': l1 -= l2; break;
2301 case '&': l1 &= l2; break;
2302 case '^': l1 ^= l2; break;
2303 case '|': l1 |= l2; break;
2304 case '*': l1 *= l2; break;
2306 case TOK_PDIV:
2307 case '/':
2308 case '%':
2309 case TOK_UDIV:
2310 case TOK_UMOD:
2311 /* if division by zero, generate explicit division */
2312 if (l2 == 0) {
2313 if (CONST_WANTED && !NOEVAL_WANTED)
2314 tcc_error("division by zero in constant");
2315 goto general_case;
2317 switch(op) {
2318 default: l1 = gen_opic_sdiv(l1, l2); break;
2319 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2320 case TOK_UDIV: l1 = l1 / l2; break;
2321 case TOK_UMOD: l1 = l1 % l2; break;
2323 break;
2324 case TOK_SHL: l1 <<= (l2 & shm); break;
2325 case TOK_SHR: l1 >>= (l2 & shm); break;
2326 case TOK_SAR:
2327 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2328 break;
2329 /* tests */
2330 case TOK_ULT: l1 = l1 < l2; break;
2331 case TOK_UGE: l1 = l1 >= l2; break;
2332 case TOK_EQ: l1 = l1 == l2; break;
2333 case TOK_NE: l1 = l1 != l2; break;
2334 case TOK_ULE: l1 = l1 <= l2; break;
2335 case TOK_UGT: l1 = l1 > l2; break;
2336 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2337 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2338 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2339 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2340 /* logical */
2341 case TOK_LAND: l1 = l1 && l2; break;
2342 case TOK_LOR: l1 = l1 || l2; break;
2343 default:
2344 goto general_case;
2346 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2347 l1 = ((uint32_t)l1 |
2348 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2349 v1->c.i = l1;
2350 v1->r |= v2->r & VT_NONCONST;
2351 vtop--;
2352 } else {
2353 /* if commutative ops, put c2 as constant */
2354 if (c1 && (op == '+' || op == '&' || op == '^' ||
2355 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2356 vswap();
2357 c2 = c1; //c = c1, c1 = c2, c2 = c;
2358 l2 = l1; //l = l1, l1 = l2, l2 = l;
2360 if (c1 && ((l1 == 0 &&
2361 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2362 (l1 == -1 && op == TOK_SAR))) {
2363 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2364 vpop();
2365 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2366 (op == '|' &&
2367 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2368 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2369 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2370 if (l2 == 1)
2371 vtop->c.i = 0;
2372 vswap();
2373 vtop--;
2374 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2375 op == TOK_PDIV) &&
2376 l2 == 1) ||
2377 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2378 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2379 l2 == 0) ||
2380 (op == '&' &&
2381 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2382 /* filter out NOP operations like x*1, x-0, x&-1... */
2383 vtop--;
2384 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2385 /* try to use shifts instead of muls or divs */
2386 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2387 int n = -1;
2388 while (l2) {
2389 l2 >>= 1;
2390 n++;
2392 vtop->c.i = n;
2393 if (op == '*')
2394 op = TOK_SHL;
2395 else if (op == TOK_PDIV)
2396 op = TOK_SAR;
2397 else
2398 op = TOK_SHR;
2400 goto general_case;
2401 } else if (c2 && (op == '+' || op == '-') &&
2402 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2403 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2404 /* symbol + constant case */
2405 if (op == '-')
2406 l2 = -l2;
2407 l2 += vtop[-1].c.i;
2408 /* The backends can't always deal with addends to symbols
2409 larger than +-1<<31. Don't construct such. */
2410 if ((int)l2 != l2)
2411 goto general_case;
2412 vtop--;
2413 vtop->c.i = l2;
2414 } else {
2415 general_case:
2416 /* call low level op generator */
2417 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2418 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2419 gen_opl(op);
2420 else
2421 gen_opi(op);
2423 if (vtop->r == VT_CONST)
2424 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2428 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2429 # define gen_negf gen_opf
2430 #elif defined TCC_TARGET_ARM
2431 void gen_negf(int op)
2433 /* arm will detect 0-x and replace by vneg */
2434 vpushi(0), vswap(), gen_op('-');
2436 #else
2437 /* XXX: implement in gen_opf() for other backends too */
2438 void gen_negf(int op)
2440 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2441 subtract(-0, x), but with them it's really a sign flip
2442 operation. We implement this with bit manipulation and have
2443 to do some type reinterpretation for this, which TCC can do
2444 only via memory. */
2446 int align, size, bt;
2448 size = type_size(&vtop->type, &align);
2449 bt = vtop->type.t & VT_BTYPE;
2450 save_reg(gv(RC_TYPE(bt)));
2451 vdup();
2452 incr_bf_adr(size - 1);
2453 vdup();
2454 vpushi(0x80); /* flip sign */
2455 gen_op('^');
2456 vstore();
2457 vpop();
2459 #endif
2461 /* generate a floating point operation with constant propagation */
2462 static void gen_opif(int op)
2464 int c1, c2, cast_int = 0;
2465 SValue *v1, *v2;
2466 #if defined _MSC_VER && defined __x86_64__
2467 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2468 volatile
2469 #endif
2470 long double f1, f2;
2472 v1 = vtop - 1;
2473 v2 = vtop;
2474 if (op == TOK_NEG)
2475 v1 = v2;
2477 /* currently, we cannot do computations with forward symbols */
2478 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2479 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2480 if (c1 && c2) {
2481 if (v1->type.t == VT_FLOAT) {
2482 f1 = v1->c.f;
2483 f2 = v2->c.f;
2484 } else if (v1->type.t == VT_DOUBLE) {
2485 f1 = v1->c.d;
2486 f2 = v2->c.d;
2487 } else {
2488 f1 = v1->c.ld;
2489 f2 = v2->c.ld;
2491 /* NOTE: we only do constant propagation if finite number (not
2492 NaN or infinity) (ANSI spec) */
2493 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2494 goto general_case;
2495 switch(op) {
2496 case '+': f1 += f2; break;
2497 case '-': f1 -= f2; break;
2498 case '*': f1 *= f2; break;
2499 case '/':
2500 if (f2 == 0.0) {
2501 union { float f; unsigned u; } x1, x2, y;
2502 /* If not in initializer we need to potentially generate
2503 FP exceptions at runtime, otherwise we want to fold. */
2504 if (!CONST_WANTED)
2505 goto general_case;
2506 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2507 when used to compile the f1 /= f2 below, would be -nan */
2508 x1.f = f1, x2.f = f2;
2509 if (f1 == 0.0)
2510 y.u = 0x7fc00000; /* nan */
2511 else
2512 y.u = 0x7f800000; /* infinity */
2513 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2514 f1 = y.f;
2515 break;
2517 f1 /= f2;
2518 break;
2519 case TOK_NEG:
2520 f1 = -f1;
2521 goto unary_result;
2522 case TOK_EQ:
2523 f1 = f1 == f2;
2524 make_int:
2525 cast_int = 1;
2526 break;
2527 case TOK_NE:
2528 f1 = f1 != f2;
2529 goto make_int;
2530 case TOK_LT:
2531 f1 = f1 < f2;
2532 goto make_int;
2533 case TOK_GE:
2534 f1 = f1 >= f2;
2535 goto make_int;
2536 case TOK_LE:
2537 f1 = f1 <= f2;
2538 goto make_int;
2539 case TOK_GT:
2540 f1 = f1 > f2;
2541 goto make_int;
2542 /* XXX: also handles tests ? */
2543 default:
2544 goto general_case;
2546 vtop--;
2547 unary_result:
2548 /* XXX: overflow test ? */
2549 if (v1->type.t == VT_FLOAT) {
2550 v1->c.f = f1;
2551 } else if (v1->type.t == VT_DOUBLE) {
2552 v1->c.d = f1;
2553 } else {
2554 v1->c.ld = f1;
2556 if (cast_int)
2557 gen_cast_s(VT_INT);
2558 } else {
2559 general_case:
2560 if (op == TOK_NEG) {
2561 gen_negf(op);
2562 } else {
2563 gen_opf(op);
2568 /* print a type. If 'varstr' is not NULL, then the variable is also
2569 printed in the type */
2570 /* XXX: union */
2571 /* XXX: add array and function pointers */
2572 static void type_to_str(char *buf, int buf_size,
2573 CType *type, const char *varstr)
2575 int bt, v, t;
2576 Sym *s, *sa;
2577 char buf1[256];
2578 const char *tstr;
2580 t = type->t;
2581 bt = t & VT_BTYPE;
2582 buf[0] = '\0';
2584 if (t & VT_EXTERN)
2585 pstrcat(buf, buf_size, "extern ");
2586 if (t & VT_STATIC)
2587 pstrcat(buf, buf_size, "static ");
2588 if (t & VT_TYPEDEF)
2589 pstrcat(buf, buf_size, "typedef ");
2590 if (t & VT_INLINE)
2591 pstrcat(buf, buf_size, "inline ");
2592 if (bt != VT_PTR) {
2593 if (t & VT_VOLATILE)
2594 pstrcat(buf, buf_size, "volatile ");
2595 if (t & VT_CONSTANT)
2596 pstrcat(buf, buf_size, "const ");
2598 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2599 || ((t & VT_UNSIGNED)
2600 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2601 && !IS_ENUM(t)
2603 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2605 buf_size -= strlen(buf);
2606 buf += strlen(buf);
2608 switch(bt) {
2609 case VT_VOID:
2610 tstr = "void";
2611 goto add_tstr;
2612 case VT_BOOL:
2613 tstr = "_Bool";
2614 goto add_tstr;
2615 case VT_BYTE:
2616 tstr = "char";
2617 goto add_tstr;
2618 case VT_SHORT:
2619 tstr = "short";
2620 goto add_tstr;
2621 case VT_INT:
2622 tstr = "int";
2623 goto maybe_long;
2624 case VT_LLONG:
2625 tstr = "long long";
2626 maybe_long:
2627 if (t & VT_LONG)
2628 tstr = "long";
2629 if (!IS_ENUM(t))
2630 goto add_tstr;
2631 tstr = "enum ";
2632 goto tstruct;
2633 case VT_FLOAT:
2634 tstr = "float";
2635 goto add_tstr;
2636 case VT_DOUBLE:
2637 tstr = "double";
2638 if (!(t & VT_LONG))
2639 goto add_tstr;
2640 case VT_LDOUBLE:
2641 tstr = "long double";
2642 add_tstr:
2643 pstrcat(buf, buf_size, tstr);
2644 break;
2645 case VT_STRUCT:
2646 tstr = "struct ";
2647 if (IS_UNION(t))
2648 tstr = "union ";
2649 tstruct:
2650 pstrcat(buf, buf_size, tstr);
2651 v = type->ref->v & ~SYM_STRUCT;
2652 if (v >= SYM_FIRST_ANOM)
2653 pstrcat(buf, buf_size, "<anonymous>");
2654 else
2655 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2656 break;
2657 case VT_FUNC:
2658 s = type->ref;
2659 buf1[0]=0;
2660 if (varstr && '*' == *varstr) {
2661 pstrcat(buf1, sizeof(buf1), "(");
2662 pstrcat(buf1, sizeof(buf1), varstr);
2663 pstrcat(buf1, sizeof(buf1), ")");
2665 pstrcat(buf1, buf_size, "(");
2666 sa = s->next;
2667 while (sa != NULL) {
2668 char buf2[256];
2669 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2670 pstrcat(buf1, sizeof(buf1), buf2);
2671 sa = sa->next;
2672 if (sa)
2673 pstrcat(buf1, sizeof(buf1), ", ");
2675 if (s->f.func_type == FUNC_ELLIPSIS)
2676 pstrcat(buf1, sizeof(buf1), ", ...");
2677 pstrcat(buf1, sizeof(buf1), ")");
2678 type_to_str(buf, buf_size, &s->type, buf1);
2679 goto no_var;
2680 case VT_PTR:
2681 s = type->ref;
2682 if (t & (VT_ARRAY|VT_VLA)) {
2683 if (varstr && '*' == *varstr)
2684 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2685 else
2686 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2687 type_to_str(buf, buf_size, &s->type, buf1);
2688 goto no_var;
2690 pstrcpy(buf1, sizeof(buf1), "*");
2691 if (t & VT_CONSTANT)
2692 pstrcat(buf1, buf_size, "const ");
2693 if (t & VT_VOLATILE)
2694 pstrcat(buf1, buf_size, "volatile ");
2695 if (varstr)
2696 pstrcat(buf1, sizeof(buf1), varstr);
2697 type_to_str(buf, buf_size, &s->type, buf1);
2698 goto no_var;
2700 if (varstr) {
2701 pstrcat(buf, buf_size, " ");
2702 pstrcat(buf, buf_size, varstr);
2704 no_var: ;
2707 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2709 char buf1[256], buf2[256];
2710 type_to_str(buf1, sizeof(buf1), st, NULL);
2711 type_to_str(buf2, sizeof(buf2), dt, NULL);
2712 tcc_error(fmt, buf1, buf2);
2715 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2717 char buf1[256], buf2[256];
2718 type_to_str(buf1, sizeof(buf1), st, NULL);
2719 type_to_str(buf2, sizeof(buf2), dt, NULL);
2720 tcc_warning(fmt, buf1, buf2);
2723 static int pointed_size(CType *type)
2725 int align;
2726 return type_size(pointed_type(type), &align);
2729 static inline int is_null_pointer(SValue *p)
2731 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2732 return 0;
2733 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2734 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2735 ((p->type.t & VT_BTYPE) == VT_PTR &&
2736 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2737 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2738 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2742 /* compare function types. OLD functions match any new functions */
2743 static int is_compatible_func(CType *type1, CType *type2)
2745 Sym *s1, *s2;
2747 s1 = type1->ref;
2748 s2 = type2->ref;
2749 if (s1->f.func_call != s2->f.func_call)
2750 return 0;
2751 if (s1->f.func_type != s2->f.func_type
2752 && s1->f.func_type != FUNC_OLD
2753 && s2->f.func_type != FUNC_OLD)
2754 return 0;
2755 for (;;) {
2756 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2757 return 0;
2758 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2759 return 1;
2760 s1 = s1->next;
2761 s2 = s2->next;
2762 if (!s1)
2763 return !s2;
2764 if (!s2)
2765 return 0;
2769 /* return true if type1 and type2 are the same. If unqualified is
2770 true, qualifiers on the types are ignored.
2772 static int compare_types(CType *type1, CType *type2, int unqualified)
2774 int bt1, t1, t2;
2776 t1 = type1->t & VT_TYPE;
2777 t2 = type2->t & VT_TYPE;
2778 if (unqualified) {
2779 /* strip qualifiers before comparing */
2780 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2781 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2784 /* Default Vs explicit signedness only matters for char */
2785 if ((t1 & VT_BTYPE) != VT_BYTE) {
2786 t1 &= ~VT_DEFSIGN;
2787 t2 &= ~VT_DEFSIGN;
2789 /* XXX: bitfields ? */
2790 if (t1 != t2)
2791 return 0;
2793 if ((t1 & VT_ARRAY)
2794 && !(type1->ref->c < 0
2795 || type2->ref->c < 0
2796 || type1->ref->c == type2->ref->c))
2797 return 0;
2799 /* test more complicated cases */
2800 bt1 = t1 & VT_BTYPE;
2801 if (bt1 == VT_PTR) {
2802 type1 = pointed_type(type1);
2803 type2 = pointed_type(type2);
2804 return is_compatible_types(type1, type2);
2805 } else if (bt1 == VT_STRUCT) {
2806 return (type1->ref == type2->ref);
2807 } else if (bt1 == VT_FUNC) {
2808 return is_compatible_func(type1, type2);
2809 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2810 /* If both are enums then they must be the same, if only one is then
2811 t1 and t2 must be equal, which was checked above already. */
2812 return type1->ref == type2->ref;
2813 } else {
2814 return 1;
2818 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2819 type is stored in DEST if non-null (except for pointer plus/minus) . */
2820 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2822 CType *type1 = &op1->type, *type2 = &op2->type, type;
2823 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2824 int ret = 1;
2826 type.t = VT_VOID;
2827 type.ref = NULL;
2829 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2830 ret = op == '?' ? 1 : 0;
2831 /* NOTE: as an extension, we accept void on only one side */
2832 type.t = VT_VOID;
2833 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2834 if (op == '+') ; /* Handled in caller */
2835 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2836 /* If one is a null ptr constant the result type is the other. */
2837 else if (is_null_pointer (op2)) type = *type1;
2838 else if (is_null_pointer (op1)) type = *type2;
2839 else if (bt1 != bt2) {
2840 /* accept comparison or cond-expr between pointer and integer
2841 with a warning */
2842 if ((op == '?' || TOK_ISCOND(op))
2843 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2844 tcc_warning("pointer/integer mismatch in %s",
2845 op == '?' ? "conditional expression" : "comparison");
2846 else if (op != '-' || !is_integer_btype(bt2))
2847 ret = 0;
2848 type = *(bt1 == VT_PTR ? type1 : type2);
2849 } else {
2850 CType *pt1 = pointed_type(type1);
2851 CType *pt2 = pointed_type(type2);
2852 int pbt1 = pt1->t & VT_BTYPE;
2853 int pbt2 = pt2->t & VT_BTYPE;
2854 int newquals, copied = 0;
2855 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2856 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2857 if (op != '?' && !TOK_ISCOND(op))
2858 ret = 0;
2859 else
2860 type_incompatibility_warning(type1, type2,
2861 op == '?'
2862 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2863 : "pointer type mismatch in comparison('%s' and '%s')");
2865 if (op == '?') {
2866 /* pointers to void get preferred, otherwise the
2867 pointed to types minus qualifs should be compatible */
2868 type = *((pbt1 == VT_VOID) ? type1 : type2);
2869 /* combine qualifs */
2870 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2871 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2872 & newquals)
2874 /* copy the pointer target symbol */
2875 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2876 0, type.ref->c);
2877 copied = 1;
2878 pointed_type(&type)->t |= newquals;
2880 /* pointers to incomplete arrays get converted to
2881 pointers to completed ones if possible */
2882 if (pt1->t & VT_ARRAY
2883 && pt2->t & VT_ARRAY
2884 && pointed_type(&type)->ref->c < 0
2885 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2887 if (!copied)
2888 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2889 0, type.ref->c);
2890 pointed_type(&type)->ref =
2891 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2892 0, pointed_type(&type)->ref->c);
2893 pointed_type(&type)->ref->c =
2894 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2898 if (TOK_ISCOND(op))
2899 type.t = VT_SIZE_T;
2900 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2901 if (op != '?' || !compare_types(type1, type2, 1))
2902 ret = 0;
2903 type = *type1;
2904 } else if (is_float(bt1) || is_float(bt2)) {
2905 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2906 type.t = VT_LDOUBLE;
2907 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2908 type.t = VT_DOUBLE;
2909 } else {
2910 type.t = VT_FLOAT;
2912 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2913 /* cast to biggest op */
2914 type.t = VT_LLONG | VT_LONG;
2915 if (bt1 == VT_LLONG)
2916 type.t &= t1;
2917 if (bt2 == VT_LLONG)
2918 type.t &= t2;
2919 /* convert to unsigned if it does not fit in a long long */
2920 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2921 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2922 type.t |= VT_UNSIGNED;
2923 } else {
2924 /* integer operations */
2925 type.t = VT_INT | (VT_LONG & (t1 | t2));
2926 /* convert to unsigned if it does not fit in an integer */
2927 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2928 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2929 type.t |= VT_UNSIGNED;
2931 if (dest)
2932 *dest = type;
2933 return ret;
2936 /* generic gen_op: handles types problems */
2937 ST_FUNC void gen_op(int op)
2939 int t1, t2, bt1, bt2, t;
2940 CType type1, combtype;
2942 redo:
2943 t1 = vtop[-1].type.t;
2944 t2 = vtop[0].type.t;
2945 bt1 = t1 & VT_BTYPE;
2946 bt2 = t2 & VT_BTYPE;
2948 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2949 if (bt2 == VT_FUNC) {
2950 mk_pointer(&vtop->type);
2951 gaddrof();
2953 if (bt1 == VT_FUNC) {
2954 vswap();
2955 mk_pointer(&vtop->type);
2956 gaddrof();
2957 vswap();
2959 goto redo;
2960 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2961 tcc_error("invalid operand types for binary operation");
2962 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2963 /* at least one operand is a pointer */
2964 /* relational op: must be both pointers */
2965 int align;
2966 if (TOK_ISCOND(op))
2967 goto std_op;
2968 /* if both pointers, then it must be the '-' op */
2969 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2970 if (op != '-')
2971 tcc_error("cannot use pointers here");
2972 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2973 vrott(3);
2974 gen_opic(op);
2975 vtop->type.t = VT_PTRDIFF_T;
2976 vswap();
2977 gen_op(TOK_PDIV);
2978 } else {
2979 /* exactly one pointer : must be '+' or '-'. */
2980 if (op != '-' && op != '+')
2981 tcc_error("cannot use pointers here");
2982 /* Put pointer as first operand */
2983 if (bt2 == VT_PTR) {
2984 vswap();
2985 t = t1, t1 = t2, t2 = t;
2987 #if PTR_SIZE == 4
2988 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2989 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2990 gen_cast_s(VT_INT);
2991 #endif
2992 type1 = vtop[-1].type;
2993 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2994 gen_op('*');
2995 #ifdef CONFIG_TCC_BCHECK
2996 if (tcc_state->do_bounds_check && !CONST_WANTED) {
2997 /* if bounded pointers, we generate a special code to
2998 test bounds */
2999 if (op == '-') {
3000 vpushi(0);
3001 vswap();
3002 gen_op('-');
3004 gen_bounded_ptr_add();
3005 } else
3006 #endif
3008 gen_opic(op);
3010 type1.t &= ~(VT_ARRAY|VT_VLA);
3011 /* put again type if gen_opic() swaped operands */
3012 vtop->type = type1;
3014 } else {
3015 /* floats can only be used for a few operations */
3016 if (is_float(combtype.t)
3017 && op != '+' && op != '-' && op != '*' && op != '/'
3018 && !TOK_ISCOND(op))
3019 tcc_error("invalid operands for binary operation");
3020 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3021 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3022 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3023 t |= VT_UNSIGNED;
3024 t |= (VT_LONG & t1);
3025 combtype.t = t;
3027 std_op:
3028 t = t2 = combtype.t;
3029 /* XXX: currently, some unsigned operations are explicit, so
3030 we modify them here */
3031 if (t & VT_UNSIGNED) {
3032 if (op == TOK_SAR)
3033 op = TOK_SHR;
3034 else if (op == '/')
3035 op = TOK_UDIV;
3036 else if (op == '%')
3037 op = TOK_UMOD;
3038 else if (op == TOK_LT)
3039 op = TOK_ULT;
3040 else if (op == TOK_GT)
3041 op = TOK_UGT;
3042 else if (op == TOK_LE)
3043 op = TOK_ULE;
3044 else if (op == TOK_GE)
3045 op = TOK_UGE;
3047 vswap();
3048 gen_cast_s(t);
3049 vswap();
3050 /* special case for shifts and long long: we keep the shift as
3051 an integer */
3052 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3053 t2 = VT_INT;
3054 gen_cast_s(t2);
3055 if (is_float(t))
3056 gen_opif(op);
3057 else
3058 gen_opic(op);
3059 if (TOK_ISCOND(op)) {
3060 /* relational op: the result is an int */
3061 vtop->type.t = VT_INT;
3062 } else {
3063 vtop->type.t = t;
3066 // Make sure that we have converted to an rvalue:
3067 if (vtop->r & VT_LVAL)
3068 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3071 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3072 #define gen_cvt_itof1 gen_cvt_itof
3073 #else
3074 /* generic itof for unsigned long long case */
3075 static void gen_cvt_itof1(int t)
3077 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3078 (VT_LLONG | VT_UNSIGNED)) {
3080 if (t == VT_FLOAT)
3081 vpush_helper_func(TOK___floatundisf);
3082 #if LDOUBLE_SIZE != 8
3083 else if (t == VT_LDOUBLE)
3084 vpush_helper_func(TOK___floatundixf);
3085 #endif
3086 else
3087 vpush_helper_func(TOK___floatundidf);
3088 vrott(2);
3089 gfunc_call(1);
3090 vpushi(0);
3091 PUT_R_RET(vtop, t);
3092 } else {
3093 gen_cvt_itof(t);
3096 #endif
3098 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3099 #define gen_cvt_ftoi1 gen_cvt_ftoi
3100 #else
3101 /* generic ftoi for unsigned long long case */
3102 static void gen_cvt_ftoi1(int t)
3104 int st;
3105 if (t == (VT_LLONG | VT_UNSIGNED)) {
3106 /* not handled natively */
3107 st = vtop->type.t & VT_BTYPE;
3108 if (st == VT_FLOAT)
3109 vpush_helper_func(TOK___fixunssfdi);
3110 #if LDOUBLE_SIZE != 8
3111 else if (st == VT_LDOUBLE)
3112 vpush_helper_func(TOK___fixunsxfdi);
3113 #endif
3114 else
3115 vpush_helper_func(TOK___fixunsdfdi);
3116 vrott(2);
3117 gfunc_call(1);
3118 vpushi(0);
3119 PUT_R_RET(vtop, t);
3120 } else {
3121 gen_cvt_ftoi(t);
3124 #endif
3126 /* special delayed cast for char/short */
3127 static void force_charshort_cast(void)
3129 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3130 int dbt = vtop->type.t;
3131 vtop->r &= ~VT_MUSTCAST;
3132 vtop->type.t = sbt;
3133 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3134 vtop->type.t = dbt;
3137 static void gen_cast_s(int t)
3139 CType type;
3140 type.t = t;
3141 type.ref = NULL;
3142 gen_cast(&type);
3145 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3146 static void gen_cast(CType *type)
3148 int sbt, dbt, sf, df, c;
3149 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3151 /* special delayed cast for char/short */
3152 if (vtop->r & VT_MUSTCAST)
3153 force_charshort_cast();
3155 /* bitfields first get cast to ints */
3156 if (vtop->type.t & VT_BITFIELD)
3157 gv(RC_INT);
3159 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3160 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3161 if (sbt == VT_FUNC)
3162 sbt = VT_PTR;
3164 again:
3165 if (sbt != dbt) {
3166 sf = is_float(sbt);
3167 df = is_float(dbt);
3168 dbt_bt = dbt & VT_BTYPE;
3169 sbt_bt = sbt & VT_BTYPE;
3170 if (dbt_bt == VT_VOID)
3171 goto done;
3172 if (sbt_bt == VT_VOID) {
3173 error:
3174 cast_error(&vtop->type, type);
3177 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3178 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3179 /* don't try to convert to ldouble when cross-compiling
3180 (except when it's '0' which is needed for arm:gen_negf()) */
3181 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3182 c = 0;
3183 #endif
3184 if (c) {
3185 /* constant case: we can do it now */
3186 /* XXX: in ISOC, cannot do it if error in convert */
3187 if (sbt == VT_FLOAT)
3188 vtop->c.ld = vtop->c.f;
3189 else if (sbt == VT_DOUBLE)
3190 vtop->c.ld = vtop->c.d;
3192 if (df) {
3193 if (sbt_bt == VT_LLONG) {
3194 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3195 vtop->c.ld = vtop->c.i;
3196 else
3197 vtop->c.ld = -(long double)-vtop->c.i;
3198 } else if(!sf) {
3199 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3200 vtop->c.ld = (uint32_t)vtop->c.i;
3201 else
3202 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3205 if (dbt == VT_FLOAT)
3206 vtop->c.f = (float)vtop->c.ld;
3207 else if (dbt == VT_DOUBLE)
3208 vtop->c.d = (double)vtop->c.ld;
3209 } else if (sf && dbt == VT_BOOL) {
3210 vtop->c.i = (vtop->c.ld != 0);
3211 } else {
3212 if(sf)
3213 vtop->c.i = vtop->c.ld;
3214 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3216 else if (sbt & VT_UNSIGNED)
3217 vtop->c.i = (uint32_t)vtop->c.i;
3218 else
3219 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3221 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3223 else if (dbt == VT_BOOL)
3224 vtop->c.i = (vtop->c.i != 0);
3225 else {
3226 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3227 dbt_bt == VT_SHORT ? 0xffff :
3228 0xffffffff;
3229 vtop->c.i &= m;
3230 if (!(dbt & VT_UNSIGNED))
3231 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3234 goto done;
3236 } else if (dbt == VT_BOOL
3237 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3238 == (VT_CONST | VT_SYM)) {
3239 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3240 vtop->r = VT_CONST;
3241 vtop->c.i = 1;
3242 goto done;
3245 /* cannot generate code for global or static initializers */
3246 if (nocode_wanted & DATA_ONLY_WANTED)
3247 goto done;
3249 /* non constant case: generate code */
3250 if (dbt == VT_BOOL) {
3251 gen_test_zero(TOK_NE);
3252 goto done;
3255 if (sf || df) {
3256 if (sf && df) {
3257 /* convert from fp to fp */
3258 gen_cvt_ftof(dbt);
3259 } else if (df) {
3260 /* convert int to fp */
3261 gen_cvt_itof1(dbt);
3262 } else {
3263 /* convert fp to int */
3264 sbt = dbt;
3265 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3266 sbt = VT_INT;
3267 gen_cvt_ftoi1(sbt);
3268 goto again; /* may need char/short cast */
3270 goto done;
3273 ds = btype_size(dbt_bt);
3274 ss = btype_size(sbt_bt);
3275 if (ds == 0 || ss == 0)
3276 goto error;
3278 if (IS_ENUM(type->t) && type->ref->c < 0)
3279 tcc_error("cast to incomplete type");
3281 /* same size and no sign conversion needed */
3282 if (ds == ss && ds >= 4)
3283 goto done;
3284 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3285 tcc_warning("cast between pointer and integer of different size");
3286 if (sbt_bt == VT_PTR) {
3287 /* put integer type to allow logical operations below */
3288 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3292 /* processor allows { int a = 0, b = *(char*)&a; }
3293 That means that if we cast to less width, we can just
3294 change the type and read it still later. */
3295 #define ALLOW_SUBTYPE_ACCESS 1
3297 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3298 /* value still in memory */
3299 if (ds <= ss)
3300 goto done;
3301 /* ss <= 4 here */
3302 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3303 gv(RC_INT);
3304 goto done; /* no 64bit envolved */
3307 gv(RC_INT);
3309 trunc = 0;
3310 #if PTR_SIZE == 4
3311 if (ds == 8) {
3312 /* generate high word */
3313 if (sbt & VT_UNSIGNED) {
3314 vpushi(0);
3315 gv(RC_INT);
3316 } else {
3317 gv_dup();
3318 vpushi(31);
3319 gen_op(TOK_SAR);
3321 lbuild(dbt);
3322 } else if (ss == 8) {
3323 /* from long long: just take low order word */
3324 lexpand();
3325 vpop();
3327 ss = 4;
3329 #elif PTR_SIZE == 8
3330 if (ds == 8) {
3331 /* need to convert from 32bit to 64bit */
3332 if (sbt & VT_UNSIGNED) {
3333 #if defined(TCC_TARGET_RISCV64)
3334 /* RISC-V keeps 32bit vals in registers sign-extended.
3335 So here we need a zero-extension. */
3336 trunc = 32;
3337 #else
3338 goto done;
3339 #endif
3340 } else {
3341 gen_cvt_sxtw();
3342 goto done;
3344 ss = ds, ds = 4, dbt = sbt;
3345 } else if (ss == 8) {
3346 /* RISC-V keeps 32bit vals in registers sign-extended.
3347 So here we need a sign-extension for signed types and
3348 zero-extension. for unsigned types. */
3349 #if !defined(TCC_TARGET_RISCV64)
3350 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3351 #endif
3352 } else {
3353 ss = 4;
3355 #endif
3357 if (ds >= ss)
3358 goto done;
3359 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3360 if (ss == 4) {
3361 gen_cvt_csti(dbt);
3362 goto done;
3364 #endif
3365 bits = (ss - ds) * 8;
3366 /* for unsigned, gen_op will convert SAR to SHR */
3367 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3368 vpushi(bits);
3369 gen_op(TOK_SHL);
3370 vpushi(bits - trunc);
3371 gen_op(TOK_SAR);
3372 vpushi(trunc);
3373 gen_op(TOK_SHR);
3375 done:
3376 vtop->type = *type;
3377 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3380 /* return type size as known at compile time. Put alignment at 'a' */
3381 ST_FUNC int type_size(CType *type, int *a)
3383 Sym *s;
3384 int bt;
3386 bt = type->t & VT_BTYPE;
3387 if (bt == VT_STRUCT) {
3388 /* struct/union */
3389 s = type->ref;
3390 *a = s->r;
3391 return s->c;
3392 } else if (bt == VT_PTR) {
3393 if (type->t & VT_ARRAY) {
3394 int ts;
3396 s = type->ref;
3397 ts = type_size(&s->type, a);
3399 if (ts < 0 && s->c < 0)
3400 ts = -ts;
3402 return ts * s->c;
3403 } else {
3404 *a = PTR_SIZE;
3405 return PTR_SIZE;
3407 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3408 *a = 0;
3409 return -1; /* incomplete enum */
3410 } else if (bt == VT_LDOUBLE) {
3411 *a = LDOUBLE_ALIGN;
3412 return LDOUBLE_SIZE;
3413 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3414 #ifdef TCC_TARGET_I386
3415 #ifdef TCC_TARGET_PE
3416 *a = 8;
3417 #else
3418 *a = 4;
3419 #endif
3420 #elif defined(TCC_TARGET_ARM)
3421 #ifdef TCC_ARM_EABI
3422 *a = 8;
3423 #else
3424 *a = 4;
3425 #endif
3426 #else
3427 *a = 8;
3428 #endif
3429 return 8;
3430 } else if (bt == VT_INT || bt == VT_FLOAT) {
3431 *a = 4;
3432 return 4;
3433 } else if (bt == VT_SHORT) {
3434 *a = 2;
3435 return 2;
3436 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3437 *a = 8;
3438 return 16;
3439 } else {
3440 /* char, void, function, _Bool */
3441 *a = 1;
3442 return 1;
3446 /* push type size as known at runtime time on top of value stack. Put
3447 alignment at 'a' */
3448 static void vpush_type_size(CType *type, int *a)
3450 if (type->t & VT_VLA) {
3451 type_size(&type->ref->type, a);
3452 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3453 } else {
3454 int size = type_size(type, a);
3455 if (size < 0)
3456 tcc_error("unknown type size");
3457 #if PTR_SIZE == 8
3458 vpushll(size);
3459 #else
3460 vpushi(size);
3461 #endif
3465 /* return the pointed type of t */
3466 static inline CType *pointed_type(CType *type)
3468 return &type->ref->type;
3471 /* modify type so that its it is a pointer to type. */
3472 ST_FUNC void mk_pointer(CType *type)
3474 Sym *s;
3475 s = sym_push(SYM_FIELD, type, 0, -1);
3476 type->t = VT_PTR | (type->t & VT_STORAGE);
3477 type->ref = s;
3480 /* return true if type1 and type2 are exactly the same (including
3481 qualifiers).
3483 static int is_compatible_types(CType *type1, CType *type2)
3485 return compare_types(type1,type2,0);
3488 /* return true if type1 and type2 are the same (ignoring qualifiers).
3490 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3492 return compare_types(type1,type2,1);
3495 static void cast_error(CType *st, CType *dt)
3497 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3500 /* verify type compatibility to store vtop in 'dt' type */
3501 static void verify_assign_cast(CType *dt)
3503 CType *st, *type1, *type2;
3504 int dbt, sbt, qualwarn, lvl;
3506 st = &vtop->type; /* source type */
3507 dbt = dt->t & VT_BTYPE;
3508 sbt = st->t & VT_BTYPE;
3509 if (dt->t & VT_CONSTANT)
3510 tcc_warning("assignment of read-only location");
3511 switch(dbt) {
3512 case VT_VOID:
3513 if (sbt != dbt)
3514 tcc_error("assignment to void expression");
3515 break;
3516 case VT_PTR:
3517 /* special cases for pointers */
3518 /* '0' can also be a pointer */
3519 if (is_null_pointer(vtop))
3520 break;
3521 /* accept implicit pointer to integer cast with warning */
3522 if (is_integer_btype(sbt)) {
3523 tcc_warning("assignment makes pointer from integer without a cast");
3524 break;
3526 type1 = pointed_type(dt);
3527 if (sbt == VT_PTR)
3528 type2 = pointed_type(st);
3529 else if (sbt == VT_FUNC)
3530 type2 = st; /* a function is implicitly a function pointer */
3531 else
3532 goto error;
3533 if (is_compatible_types(type1, type2))
3534 break;
3535 for (qualwarn = lvl = 0;; ++lvl) {
3536 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3537 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3538 qualwarn = 1;
3539 dbt = type1->t & (VT_BTYPE|VT_LONG);
3540 sbt = type2->t & (VT_BTYPE|VT_LONG);
3541 if (dbt != VT_PTR || sbt != VT_PTR)
3542 break;
3543 type1 = pointed_type(type1);
3544 type2 = pointed_type(type2);
3546 if (!is_compatible_unqualified_types(type1, type2)) {
3547 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3548 /* void * can match anything */
3549 } else if (dbt == sbt
3550 && is_integer_btype(sbt & VT_BTYPE)
3551 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3552 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3553 /* Like GCC don't warn by default for merely changes
3554 in pointer target signedness. Do warn for different
3555 base types, though, in particular for unsigned enums
3556 and signed int targets. */
3557 } else {
3558 tcc_warning("assignment from incompatible pointer type");
3559 break;
3562 if (qualwarn)
3563 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3564 break;
3565 case VT_BYTE:
3566 case VT_SHORT:
3567 case VT_INT:
3568 case VT_LLONG:
3569 if (sbt == VT_PTR || sbt == VT_FUNC) {
3570 tcc_warning("assignment makes integer from pointer without a cast");
3571 } else if (sbt == VT_STRUCT) {
3572 goto case_VT_STRUCT;
3574 /* XXX: more tests */
3575 break;
3576 case VT_STRUCT:
3577 case_VT_STRUCT:
3578 if (!is_compatible_unqualified_types(dt, st)) {
3579 error:
3580 cast_error(st, dt);
3582 break;
3586 static void gen_assign_cast(CType *dt)
3588 verify_assign_cast(dt);
3589 gen_cast(dt);
3592 /* store vtop in lvalue pushed on stack */
3593 ST_FUNC void vstore(void)
3595 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3597 ft = vtop[-1].type.t;
3598 sbt = vtop->type.t & VT_BTYPE;
3599 dbt = ft & VT_BTYPE;
3600 verify_assign_cast(&vtop[-1].type);
3602 if (sbt == VT_STRUCT) {
3603 /* if structure, only generate pointer */
3604 /* structure assignment : generate memcpy */
3605 size = type_size(&vtop->type, &align);
3606 /* destination, keep on stack() as result */
3607 vpushv(vtop - 1);
3608 #ifdef CONFIG_TCC_BCHECK
3609 if (vtop->r & VT_MUSTBOUND)
3610 gbound(); /* check would be wrong after gaddrof() */
3611 #endif
3612 vtop->type.t = VT_PTR;
3613 gaddrof();
3614 /* source */
3615 vswap();
3616 #ifdef CONFIG_TCC_BCHECK
3617 if (vtop->r & VT_MUSTBOUND)
3618 gbound();
3619 #endif
3620 vtop->type.t = VT_PTR;
3621 gaddrof();
3623 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3624 if (1
3625 #ifdef CONFIG_TCC_BCHECK
3626 && !tcc_state->do_bounds_check
3627 #endif
3629 gen_struct_copy(size);
3630 } else
3631 #endif
3633 /* type size */
3634 vpushi(size);
3635 /* Use memmove, rather than memcpy, as dest and src may be same: */
3636 #ifdef TCC_ARM_EABI
3637 if(!(align & 7))
3638 vpush_helper_func(TOK_memmove8);
3639 else if(!(align & 3))
3640 vpush_helper_func(TOK_memmove4);
3641 else
3642 #endif
3643 vpush_helper_func(TOK_memmove);
3644 vrott(4);
3645 gfunc_call(3);
3648 } else if (ft & VT_BITFIELD) {
3649 /* bitfield store handling */
3651 /* save lvalue as expression result (example: s.b = s.a = n;) */
3652 vdup(), vtop[-1] = vtop[-2];
3654 bit_pos = BIT_POS(ft);
3655 bit_size = BIT_SIZE(ft);
3656 /* remove bit field info to avoid loops */
3657 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3659 if (dbt == VT_BOOL) {
3660 gen_cast(&vtop[-1].type);
3661 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3663 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3664 if (dbt != VT_BOOL) {
3665 gen_cast(&vtop[-1].type);
3666 dbt = vtop[-1].type.t & VT_BTYPE;
3668 if (r == VT_STRUCT) {
3669 store_packed_bf(bit_pos, bit_size);
3670 } else {
3671 unsigned long long mask = (1ULL << bit_size) - 1;
3672 if (dbt != VT_BOOL) {
3673 /* mask source */
3674 if (dbt == VT_LLONG)
3675 vpushll(mask);
3676 else
3677 vpushi((unsigned)mask);
3678 gen_op('&');
3680 /* shift source */
3681 vpushi(bit_pos);
3682 gen_op(TOK_SHL);
3683 vswap();
3684 /* duplicate destination */
3685 vdup();
3686 vrott(3);
3687 /* load destination, mask and or with source */
3688 if (dbt == VT_LLONG)
3689 vpushll(~(mask << bit_pos));
3690 else
3691 vpushi(~((unsigned)mask << bit_pos));
3692 gen_op('&');
3693 gen_op('|');
3694 /* store result */
3695 vstore();
3696 /* ... and discard */
3697 vpop();
3699 } else if (dbt == VT_VOID) {
3700 --vtop;
3701 } else {
3702 /* optimize char/short casts */
3703 delayed_cast = 0;
3704 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3705 && is_integer_btype(sbt)
3707 if ((vtop->r & VT_MUSTCAST)
3708 && btype_size(dbt) > btype_size(sbt)
3710 force_charshort_cast();
3711 delayed_cast = 1;
3712 } else {
3713 gen_cast(&vtop[-1].type);
3716 #ifdef CONFIG_TCC_BCHECK
3717 /* bound check case */
3718 if (vtop[-1].r & VT_MUSTBOUND) {
3719 vswap();
3720 gbound();
3721 vswap();
3723 #endif
3724 gv(RC_TYPE(dbt)); /* generate value */
3726 if (delayed_cast) {
3727 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3728 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3729 vtop->type.t = ft & VT_TYPE;
3732 /* if lvalue was saved on stack, must read it */
3733 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3734 SValue sv;
3735 r = get_reg(RC_INT);
3736 sv.type.t = VT_PTRDIFF_T;
3737 sv.r = VT_LOCAL | VT_LVAL;
3738 sv.c.i = vtop[-1].c.i;
3739 load(r, &sv);
3740 vtop[-1].r = r | VT_LVAL;
3743 r = vtop->r & VT_VALMASK;
3744 /* two word case handling :
3745 store second register at word + 4 (or +8 for x86-64) */
3746 if (USING_TWO_WORDS(dbt)) {
3747 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3748 vtop[-1].type.t = load_type;
3749 store(r, vtop - 1);
3750 vswap();
3751 /* convert to int to increment easily */
3752 vtop->type.t = VT_PTRDIFF_T;
3753 gaddrof();
3754 vpushs(PTR_SIZE);
3755 gen_op('+');
3756 vtop->r |= VT_LVAL;
3757 vswap();
3758 vtop[-1].type.t = load_type;
3759 /* XXX: it works because r2 is spilled last ! */
3760 store(vtop->r2, vtop - 1);
3761 } else {
3762 /* single word */
3763 store(r, vtop - 1);
3765 vswap();
3766 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3770 /* post defines POST/PRE add. c is the token ++ or -- */
3771 ST_FUNC void inc(int post, int c)
3773 test_lvalue();
3774 vdup(); /* save lvalue */
3775 if (post) {
3776 gv_dup(); /* duplicate value */
3777 vrotb(3);
3778 vrotb(3);
3780 /* add constant */
3781 vpushi(c - TOK_MID);
3782 gen_op('+');
3783 vstore(); /* store value */
3784 if (post)
3785 vpop(); /* if post op, return saved value */
3788 ST_FUNC CString* parse_mult_str (const char *msg)
3790 /* read the string */
3791 if (tok != TOK_STR)
3792 expect(msg);
3793 cstr_reset(&initstr);
3794 while (tok == TOK_STR) {
3795 /* XXX: add \0 handling too ? */
3796 cstr_cat(&initstr, tokc.str.data, -1);
3797 next();
3799 cstr_ccat(&initstr, '\0');
3800 return &initstr;
3803 /* If I is >= 1 and a power of two, returns log2(i)+1.
3804 If I is 0 returns 0. */
3805 ST_FUNC int exact_log2p1(int i)
3807 int ret;
3808 if (!i)
3809 return 0;
3810 for (ret = 1; i >= 1 << 8; ret += 8)
3811 i >>= 8;
3812 if (i >= 1 << 4)
3813 ret += 4, i >>= 4;
3814 if (i >= 1 << 2)
3815 ret += 2, i >>= 2;
3816 if (i >= 1 << 1)
3817 ret++;
3818 return ret;
3821 /* Parse __attribute__((...)) GNUC extension. */
3822 static void parse_attribute(AttributeDef *ad)
3824 int t, n;
3825 char *astr;
3827 redo:
3828 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3829 return;
3830 next();
3831 skip('(');
3832 skip('(');
3833 while (tok != ')') {
3834 if (tok < TOK_IDENT)
3835 expect("attribute name");
3836 t = tok;
3837 next();
3838 switch(t) {
3839 case TOK_CLEANUP1:
3840 case TOK_CLEANUP2:
3842 Sym *s;
3844 skip('(');
3845 s = sym_find(tok);
3846 if (!s) {
3847 tcc_warning_c(warn_implicit_function_declaration)(
3848 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3849 s = external_global_sym(tok, &func_old_type);
3850 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3851 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3852 ad->cleanup_func = s;
3853 next();
3854 skip(')');
3855 break;
3857 case TOK_CONSTRUCTOR1:
3858 case TOK_CONSTRUCTOR2:
3859 ad->f.func_ctor = 1;
3860 break;
3861 case TOK_DESTRUCTOR1:
3862 case TOK_DESTRUCTOR2:
3863 ad->f.func_dtor = 1;
3864 break;
3865 case TOK_ALWAYS_INLINE1:
3866 case TOK_ALWAYS_INLINE2:
3867 ad->f.func_alwinl = 1;
3868 break;
3869 case TOK_SECTION1:
3870 case TOK_SECTION2:
3871 skip('(');
3872 astr = parse_mult_str("section name")->data;
3873 ad->section = find_section(tcc_state, astr);
3874 skip(')');
3875 break;
3876 case TOK_ALIAS1:
3877 case TOK_ALIAS2:
3878 skip('(');
3879 astr = parse_mult_str("alias(\"target\")")->data;
3880 /* save string as token, for later */
3881 ad->alias_target = tok_alloc_const(astr);
3882 skip(')');
3883 break;
3884 case TOK_VISIBILITY1:
3885 case TOK_VISIBILITY2:
3886 skip('(');
3887 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3888 if (!strcmp (astr, "default"))
3889 ad->a.visibility = STV_DEFAULT;
3890 else if (!strcmp (astr, "hidden"))
3891 ad->a.visibility = STV_HIDDEN;
3892 else if (!strcmp (astr, "internal"))
3893 ad->a.visibility = STV_INTERNAL;
3894 else if (!strcmp (astr, "protected"))
3895 ad->a.visibility = STV_PROTECTED;
3896 else
3897 expect("visibility(\"default|hidden|internal|protected\")");
3898 skip(')');
3899 break;
3900 case TOK_ALIGNED1:
3901 case TOK_ALIGNED2:
3902 if (tok == '(') {
3903 next();
3904 n = expr_const();
3905 if (n <= 0 || (n & (n - 1)) != 0)
3906 tcc_error("alignment must be a positive power of two");
3907 skip(')');
3908 } else {
3909 n = MAX_ALIGN;
3911 ad->a.aligned = exact_log2p1(n);
3912 if (n != 1 << (ad->a.aligned - 1))
3913 tcc_error("alignment of %d is larger than implemented", n);
3914 break;
3915 case TOK_PACKED1:
3916 case TOK_PACKED2:
3917 ad->a.packed = 1;
3918 break;
3919 case TOK_WEAK1:
3920 case TOK_WEAK2:
3921 ad->a.weak = 1;
3922 break;
3923 case TOK_NODEBUG1:
3924 case TOK_NODEBUG2:
3925 ad->a.nodebug = 1;
3926 break;
3927 case TOK_UNUSED1:
3928 case TOK_UNUSED2:
3929 /* currently, no need to handle it because tcc does not
3930 track unused objects */
3931 break;
3932 case TOK_NORETURN1:
3933 case TOK_NORETURN2:
3934 ad->f.func_noreturn = 1;
3935 break;
3936 case TOK_CDECL1:
3937 case TOK_CDECL2:
3938 case TOK_CDECL3:
3939 ad->f.func_call = FUNC_CDECL;
3940 break;
3941 case TOK_STDCALL1:
3942 case TOK_STDCALL2:
3943 case TOK_STDCALL3:
3944 ad->f.func_call = FUNC_STDCALL;
3945 break;
3946 #ifdef TCC_TARGET_I386
3947 case TOK_REGPARM1:
3948 case TOK_REGPARM2:
3949 skip('(');
3950 n = expr_const();
3951 if (n > 3)
3952 n = 3;
3953 else if (n < 0)
3954 n = 0;
3955 if (n > 0)
3956 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3957 skip(')');
3958 break;
3959 case TOK_FASTCALL1:
3960 case TOK_FASTCALL2:
3961 case TOK_FASTCALL3:
3962 ad->f.func_call = FUNC_FASTCALLW;
3963 break;
3964 #endif
3965 case TOK_MODE:
3966 skip('(');
3967 switch(tok) {
3968 case TOK_MODE_DI:
3969 ad->attr_mode = VT_LLONG + 1;
3970 break;
3971 case TOK_MODE_QI:
3972 ad->attr_mode = VT_BYTE + 1;
3973 break;
3974 case TOK_MODE_HI:
3975 ad->attr_mode = VT_SHORT + 1;
3976 break;
3977 case TOK_MODE_SI:
3978 case TOK_MODE_word:
3979 ad->attr_mode = VT_INT + 1;
3980 break;
3981 default:
3982 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3983 break;
3985 next();
3986 skip(')');
3987 break;
3988 case TOK_DLLEXPORT:
3989 ad->a.dllexport = 1;
3990 break;
3991 case TOK_NODECORATE:
3992 ad->a.nodecorate = 1;
3993 break;
3994 case TOK_DLLIMPORT:
3995 ad->a.dllimport = 1;
3996 break;
3997 default:
3998 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
3999 /* skip parameters */
4000 if (tok == '(') {
4001 int parenthesis = 0;
4002 do {
4003 if (tok == '(')
4004 parenthesis++;
4005 else if (tok == ')')
4006 parenthesis--;
4007 next();
4008 } while (parenthesis && tok != -1);
4010 break;
4012 if (tok != ',')
4013 break;
4014 next();
4016 skip(')');
4017 skip(')');
4018 goto redo;
4021 static Sym * find_field (CType *type, int v, int *cumofs)
4023 Sym *s = type->ref;
4024 int v1 = v | SYM_FIELD;
4026 while ((s = s->next) != NULL) {
4027 if (s->v == v1) {
4028 *cumofs += s->c;
4029 return s;
4031 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4032 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4033 /* try to find field in anonymous sub-struct/union */
4034 Sym *ret = find_field (&s->type, v1, cumofs);
4035 if (ret) {
4036 *cumofs += s->c;
4037 return ret;
4042 if (!(v & SYM_FIELD)) { /* top-level call */
4043 s = type->ref;
4044 if (s->c < 0)
4045 tcc_error("dereferencing incomplete type '%s'",
4046 get_tok_str(s->v & ~SYM_STRUCT, 0));
4047 else
4048 tcc_error("field not found: %s",
4049 get_tok_str(v, &tokc));
4051 return NULL;
4054 static void check_fields (CType *type, int check)
4056 Sym *s = type->ref;
4058 while ((s = s->next) != NULL) {
4059 int v = s->v & ~SYM_FIELD;
4060 if (v < SYM_FIRST_ANOM) {
4061 TokenSym *ts = table_ident[v - TOK_IDENT];
4062 if (check && (ts->tok & SYM_FIELD))
4063 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4064 ts->tok ^= SYM_FIELD;
4065 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4066 check_fields (&s->type, check);
4070 static void struct_layout(CType *type, AttributeDef *ad)
4072 int size, align, maxalign, offset, c, bit_pos, bit_size;
4073 int packed, a, bt, prevbt, prev_bit_size;
4074 int pcc = !tcc_state->ms_bitfields;
4075 int pragma_pack = *tcc_state->pack_stack_ptr;
4076 Sym *f;
4078 maxalign = 1;
4079 offset = 0;
4080 c = 0;
4081 bit_pos = 0;
4082 prevbt = VT_STRUCT; /* make it never match */
4083 prev_bit_size = 0;
4085 //#define BF_DEBUG
4087 for (f = type->ref->next; f; f = f->next) {
4088 if (f->type.t & VT_BITFIELD)
4089 bit_size = BIT_SIZE(f->type.t);
4090 else
4091 bit_size = -1;
4092 size = type_size(&f->type, &align);
4093 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4094 packed = 0;
4096 if (pcc && bit_size == 0) {
4097 /* in pcc mode, packing does not affect zero-width bitfields */
4099 } else {
4100 /* in pcc mode, attribute packed overrides if set. */
4101 if (pcc && (f->a.packed || ad->a.packed))
4102 align = packed = 1;
4104 /* pragma pack overrides align if lesser and packs bitfields always */
4105 if (pragma_pack) {
4106 packed = 1;
4107 if (pragma_pack < align)
4108 align = pragma_pack;
4109 /* in pcc mode pragma pack also overrides individual align */
4110 if (pcc && pragma_pack < a)
4111 a = 0;
4114 /* some individual align was specified */
4115 if (a)
4116 align = a;
4118 if (type->ref->type.t == VT_UNION) {
4119 if (pcc && bit_size >= 0)
4120 size = (bit_size + 7) >> 3;
4121 offset = 0;
4122 if (size > c)
4123 c = size;
4125 } else if (bit_size < 0) {
4126 if (pcc)
4127 c += (bit_pos + 7) >> 3;
4128 c = (c + align - 1) & -align;
4129 offset = c;
4130 if (size > 0)
4131 c += size;
4132 bit_pos = 0;
4133 prevbt = VT_STRUCT;
4134 prev_bit_size = 0;
4136 } else {
4137 /* A bit-field. Layout is more complicated. There are two
4138 options: PCC (GCC) compatible and MS compatible */
4139 if (pcc) {
4140 /* In PCC layout a bit-field is placed adjacent to the
4141 preceding bit-fields, except if:
4142 - it has zero-width
4143 - an individual alignment was given
4144 - it would overflow its base type container and
4145 there is no packing */
4146 if (bit_size == 0) {
4147 new_field:
4148 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4149 bit_pos = 0;
4150 } else if (f->a.aligned) {
4151 goto new_field;
4152 } else if (!packed) {
4153 int a8 = align * 8;
4154 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4155 if (ofs > size / align)
4156 goto new_field;
4159 /* in pcc mode, long long bitfields have type int if they fit */
4160 if (size == 8 && bit_size <= 32)
4161 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4163 while (bit_pos >= align * 8)
4164 c += align, bit_pos -= align * 8;
4165 offset = c;
4167 /* In PCC layout named bit-fields influence the alignment
4168 of the containing struct using the base types alignment,
4169 except for packed fields (which here have correct align). */
4170 if (f->v & SYM_FIRST_ANOM
4171 // && bit_size // ??? gcc on ARM/rpi does that
4173 align = 1;
4175 } else {
4176 bt = f->type.t & VT_BTYPE;
4177 if ((bit_pos + bit_size > size * 8)
4178 || (bit_size > 0) == (bt != prevbt)
4180 c = (c + align - 1) & -align;
4181 offset = c;
4182 bit_pos = 0;
4183 /* In MS bitfield mode a bit-field run always uses
4184 at least as many bits as the underlying type.
4185 To start a new run it's also required that this
4186 or the last bit-field had non-zero width. */
4187 if (bit_size || prev_bit_size)
4188 c += size;
4190 /* In MS layout the records alignment is normally
4191 influenced by the field, except for a zero-width
4192 field at the start of a run (but by further zero-width
4193 fields it is again). */
4194 if (bit_size == 0 && prevbt != bt)
4195 align = 1;
4196 prevbt = bt;
4197 prev_bit_size = bit_size;
4200 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4201 | (bit_pos << VT_STRUCT_SHIFT);
4202 bit_pos += bit_size;
4204 if (align > maxalign)
4205 maxalign = align;
4207 #ifdef BF_DEBUG
4208 printf("set field %s offset %-2d size %-2d align %-2d",
4209 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4210 if (f->type.t & VT_BITFIELD) {
4211 printf(" pos %-2d bits %-2d",
4212 BIT_POS(f->type.t),
4213 BIT_SIZE(f->type.t)
4216 printf("\n");
4217 #endif
4219 f->c = offset;
4220 f->r = 0;
4223 if (pcc)
4224 c += (bit_pos + 7) >> 3;
4226 /* store size and alignment */
4227 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4228 if (a < maxalign)
4229 a = maxalign;
4230 type->ref->r = a;
4231 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4232 /* can happen if individual align for some member was given. In
4233 this case MSVC ignores maxalign when aligning the size */
4234 a = pragma_pack;
4235 if (a < bt)
4236 a = bt;
4238 c = (c + a - 1) & -a;
4239 type->ref->c = c;
4241 #ifdef BF_DEBUG
4242 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4243 #endif
4245 /* check whether we can access bitfields by their type */
4246 for (f = type->ref->next; f; f = f->next) {
4247 int s, px, cx, c0;
4248 CType t;
4250 if (0 == (f->type.t & VT_BITFIELD))
4251 continue;
4252 f->type.ref = f;
4253 f->auxtype = -1;
4254 bit_size = BIT_SIZE(f->type.t);
4255 if (bit_size == 0)
4256 continue;
4257 bit_pos = BIT_POS(f->type.t);
4258 size = type_size(&f->type, &align);
4260 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4261 #ifdef TCC_TARGET_ARM
4262 && !(f->c & (align - 1))
4263 #endif
4265 continue;
4267 /* try to access the field using a different type */
4268 c0 = -1, s = align = 1;
4269 t.t = VT_BYTE;
4270 for (;;) {
4271 px = f->c * 8 + bit_pos;
4272 cx = (px >> 3) & -align;
4273 px = px - (cx << 3);
4274 if (c0 == cx)
4275 break;
4276 s = (px + bit_size + 7) >> 3;
4277 if (s > 4) {
4278 t.t = VT_LLONG;
4279 } else if (s > 2) {
4280 t.t = VT_INT;
4281 } else if (s > 1) {
4282 t.t = VT_SHORT;
4283 } else {
4284 t.t = VT_BYTE;
4286 s = type_size(&t, &align);
4287 c0 = cx;
4290 if (px + bit_size <= s * 8 && cx + s <= c
4291 #ifdef TCC_TARGET_ARM
4292 && !(cx & (align - 1))
4293 #endif
4295 /* update offset and bit position */
4296 f->c = cx;
4297 bit_pos = px;
4298 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4299 | (bit_pos << VT_STRUCT_SHIFT);
4300 if (s != size)
4301 f->auxtype = t.t;
4302 #ifdef BF_DEBUG
4303 printf("FIX field %s offset %-2d size %-2d align %-2d "
4304 "pos %-2d bits %-2d\n",
4305 get_tok_str(f->v & ~SYM_FIELD, NULL),
4306 cx, s, align, px, bit_size);
4307 #endif
4308 } else {
4309 /* fall back to load/store single-byte wise */
4310 f->auxtype = VT_STRUCT;
4311 #ifdef BF_DEBUG
4312 printf("FIX field %s : load byte-wise\n",
4313 get_tok_str(f->v & ~SYM_FIELD, NULL));
4314 #endif
4319 static void do_Static_assert(void);
4321 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4322 static void struct_decl(CType *type, int u)
4324 int v, c, size, align, flexible;
4325 int bit_size, bsize, bt;
4326 Sym *s, *ss, **ps;
4327 AttributeDef ad, ad1;
4328 CType type1, btype;
4330 memset(&ad, 0, sizeof ad);
4331 next();
4332 parse_attribute(&ad);
4333 if (tok != '{') {
4334 v = tok;
4335 next();
4336 /* struct already defined ? return it */
4337 if (v < TOK_IDENT)
4338 expect("struct/union/enum name");
4339 s = struct_find(v);
4340 if (s && (s->sym_scope == local_scope || tok != '{')) {
4341 if (u == s->type.t)
4342 goto do_decl;
4343 if (u == VT_ENUM && IS_ENUM(s->type.t))
4344 goto do_decl;
4345 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4347 } else {
4348 v = anon_sym++;
4350 /* Record the original enum/struct/union token. */
4351 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4352 type1.ref = NULL;
4353 /* we put an undefined size for struct/union */
4354 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4355 s->r = 0; /* default alignment is zero as gcc */
4356 do_decl:
4357 type->t = s->type.t;
4358 type->ref = s;
4360 if (tok == '{') {
4361 next();
4362 if (s->c != -1)
4363 tcc_error("struct/union/enum already defined");
4364 s->c = -2;
4365 /* cannot be empty */
4366 /* non empty enums are not allowed */
4367 ps = &s->next;
4368 if (u == VT_ENUM) {
4369 long long ll = 0, pl = 0, nl = 0;
4370 CType t;
4371 t.ref = s;
4372 /* enum symbols have static storage */
4373 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4374 for(;;) {
4375 v = tok;
4376 if (v < TOK_UIDENT)
4377 expect("identifier");
4378 ss = sym_find(v);
4379 if (ss && !local_stack)
4380 tcc_error("redefinition of enumerator '%s'",
4381 get_tok_str(v, NULL));
4382 next();
4383 if (tok == '=') {
4384 next();
4385 ll = expr_const64();
4387 ss = sym_push(v, &t, VT_CONST, 0);
4388 ss->enum_val = ll;
4389 *ps = ss, ps = &ss->next;
4390 if (ll < nl)
4391 nl = ll;
4392 if (ll > pl)
4393 pl = ll;
4394 if (tok != ',')
4395 break;
4396 next();
4397 ll++;
4398 /* NOTE: we accept a trailing comma */
4399 if (tok == '}')
4400 break;
4402 skip('}');
4403 /* set integral type of the enum */
4404 t.t = VT_INT;
4405 if (nl >= 0) {
4406 if (pl != (unsigned)pl)
4407 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4408 t.t |= VT_UNSIGNED;
4409 } else if (pl != (int)pl || nl != (int)nl)
4410 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4411 s->type.t = type->t = t.t | VT_ENUM;
4412 s->c = 0;
4413 /* set type for enum members */
4414 for (ss = s->next; ss; ss = ss->next) {
4415 ll = ss->enum_val;
4416 if (ll == (int)ll) /* default is int if it fits */
4417 continue;
4418 if (t.t & VT_UNSIGNED) {
4419 ss->type.t |= VT_UNSIGNED;
4420 if (ll == (unsigned)ll)
4421 continue;
4423 ss->type.t = (ss->type.t & ~VT_BTYPE)
4424 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4426 } else {
4427 c = 0;
4428 flexible = 0;
4429 while (tok != '}') {
4430 if (tok == TOK_STATIC_ASSERT) {
4431 do_Static_assert();
4432 continue;
4434 if (!parse_btype(&btype, &ad1, 0)) {
4435 skip(';');
4436 continue;
4438 while (1) {
4439 if (flexible)
4440 tcc_error("flexible array member '%s' not at the end of struct",
4441 get_tok_str(v, NULL));
4442 bit_size = -1;
4443 v = 0;
4444 type1 = btype;
4445 if (tok != ':') {
4446 if (tok != ';')
4447 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4448 if (v == 0) {
4449 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4450 expect("identifier");
4451 else {
4452 int v = btype.ref->v;
4453 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4454 if (tcc_state->ms_extensions == 0)
4455 expect("identifier");
4459 if (type_size(&type1, &align) < 0) {
4460 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4461 flexible = 1;
4462 else
4463 tcc_error("field '%s' has incomplete type",
4464 get_tok_str(v, NULL));
4466 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4467 (type1.t & VT_BTYPE) == VT_VOID ||
4468 (type1.t & VT_STORAGE))
4469 tcc_error("invalid type for '%s'",
4470 get_tok_str(v, NULL));
4472 if (tok == ':') {
4473 next();
4474 bit_size = expr_const();
4475 /* XXX: handle v = 0 case for messages */
4476 if (bit_size < 0)
4477 tcc_error("negative width in bit-field '%s'",
4478 get_tok_str(v, NULL));
4479 if (v && bit_size == 0)
4480 tcc_error("zero width for bit-field '%s'",
4481 get_tok_str(v, NULL));
4482 parse_attribute(&ad1);
4484 size = type_size(&type1, &align);
4485 if (bit_size >= 0) {
4486 bt = type1.t & VT_BTYPE;
4487 if (bt != VT_INT &&
4488 bt != VT_BYTE &&
4489 bt != VT_SHORT &&
4490 bt != VT_BOOL &&
4491 bt != VT_LLONG)
4492 tcc_error("bitfields must have scalar type");
4493 bsize = size * 8;
4494 if (bit_size > bsize) {
4495 tcc_error("width of '%s' exceeds its type",
4496 get_tok_str(v, NULL));
4497 } else if (bit_size == bsize
4498 && !ad.a.packed && !ad1.a.packed) {
4499 /* no need for bit fields */
4501 } else if (bit_size == 64) {
4502 tcc_error("field width 64 not implemented");
4503 } else {
4504 type1.t = (type1.t & ~VT_STRUCT_MASK)
4505 | VT_BITFIELD
4506 | (bit_size << (VT_STRUCT_SHIFT + 6));
4509 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4510 /* Remember we've seen a real field to check
4511 for placement of flexible array member. */
4512 c = 1;
4514 /* If member is a struct or bit-field, enforce
4515 placing into the struct (as anonymous). */
4516 if (v == 0 &&
4517 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4518 bit_size >= 0)) {
4519 v = anon_sym++;
4521 if (v) {
4522 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4523 ss->a = ad1.a;
4524 *ps = ss;
4525 ps = &ss->next;
4527 if (tok == ';' || tok == TOK_EOF)
4528 break;
4529 skip(',');
4531 skip(';');
4533 skip('}');
4534 parse_attribute(&ad);
4535 if (ad.cleanup_func) {
4536 tcc_warning("attribute '__cleanup__' ignored on type");
4538 check_fields(type, 1);
4539 check_fields(type, 0);
4540 struct_layout(type, &ad);
4541 if (debug_modes)
4542 tcc_debug_fix_anon(tcc_state, type);
4547 static void sym_to_attr(AttributeDef *ad, Sym *s)
4549 merge_symattr(&ad->a, &s->a);
4550 merge_funcattr(&ad->f, &s->f);
4553 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4554 are added to the element type, copied because it could be a typedef. */
4555 static void parse_btype_qualify(CType *type, int qualifiers)
4557 while (type->t & VT_ARRAY) {
4558 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4559 type = &type->ref->type;
4561 type->t |= qualifiers;
4564 /* return 0 if no type declaration. otherwise, return the basic type
4565 and skip it.
4567 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4569 int t, u, bt, st, type_found, typespec_found, g, n;
4570 Sym *s;
4571 CType type1;
4573 memset(ad, 0, sizeof(AttributeDef));
4574 type_found = 0;
4575 typespec_found = 0;
4576 t = VT_INT;
4577 bt = st = -1;
4578 type->ref = NULL;
4580 while(1) {
4581 switch(tok) {
4582 case TOK_EXTENSION:
4583 /* currently, we really ignore extension */
4584 next();
4585 continue;
4587 /* basic types */
4588 case TOK_CHAR:
4589 u = VT_BYTE;
4590 basic_type:
4591 next();
4592 basic_type1:
4593 if (u == VT_SHORT || u == VT_LONG) {
4594 if (st != -1 || (bt != -1 && bt != VT_INT))
4595 tmbt: tcc_error("too many basic types");
4596 st = u;
4597 } else {
4598 if (bt != -1 || (st != -1 && u != VT_INT))
4599 goto tmbt;
4600 bt = u;
4602 if (u != VT_INT)
4603 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4604 typespec_found = 1;
4605 break;
4606 case TOK_VOID:
4607 u = VT_VOID;
4608 goto basic_type;
4609 case TOK_SHORT:
4610 u = VT_SHORT;
4611 goto basic_type;
4612 case TOK_INT:
4613 u = VT_INT;
4614 goto basic_type;
4615 case TOK_ALIGNAS:
4616 { int n;
4617 AttributeDef ad1;
4618 next();
4619 skip('(');
4620 memset(&ad1, 0, sizeof(AttributeDef));
4621 if (parse_btype(&type1, &ad1, 0)) {
4622 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4623 if (ad1.a.aligned)
4624 n = 1 << (ad1.a.aligned - 1);
4625 else
4626 type_size(&type1, &n);
4627 } else {
4628 n = expr_const();
4629 if (n < 0 || (n & (n - 1)) != 0)
4630 tcc_error("alignment must be a positive power of two");
4632 skip(')');
4633 ad->a.aligned = exact_log2p1(n);
4635 continue;
4636 case TOK_LONG:
4637 if ((t & VT_BTYPE) == VT_DOUBLE) {
4638 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4639 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4640 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4641 } else {
4642 u = VT_LONG;
4643 goto basic_type;
4645 next();
4646 break;
4647 #ifdef TCC_TARGET_ARM64
4648 case TOK_UINT128:
4649 /* GCC's __uint128_t appears in some Linux header files. Make it a
4650 synonym for long double to get the size and alignment right. */
4651 u = VT_LDOUBLE;
4652 goto basic_type;
4653 #endif
4654 case TOK_BOOL:
4655 u = VT_BOOL;
4656 goto basic_type;
4657 case TOK_COMPLEX:
4658 tcc_error("_Complex is not yet supported");
4659 case TOK_FLOAT:
4660 u = VT_FLOAT;
4661 goto basic_type;
4662 case TOK_DOUBLE:
4663 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4664 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4665 } else {
4666 u = VT_DOUBLE;
4667 goto basic_type;
4669 next();
4670 break;
4671 case TOK_ENUM:
4672 struct_decl(&type1, VT_ENUM);
4673 basic_type2:
4674 u = type1.t;
4675 type->ref = type1.ref;
4676 goto basic_type1;
4677 case TOK_STRUCT:
4678 struct_decl(&type1, VT_STRUCT);
4679 goto basic_type2;
4680 case TOK_UNION:
4681 struct_decl(&type1, VT_UNION);
4682 goto basic_type2;
4684 /* type modifiers */
4685 case TOK__Atomic:
4686 next();
4687 type->t = t;
4688 parse_btype_qualify(type, VT_ATOMIC);
4689 t = type->t;
4690 if (tok == '(') {
4691 parse_expr_type(&type1);
4692 /* remove all storage modifiers except typedef */
4693 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4694 if (type1.ref)
4695 sym_to_attr(ad, type1.ref);
4696 goto basic_type2;
4698 break;
4699 case TOK_CONST1:
4700 case TOK_CONST2:
4701 case TOK_CONST3:
4702 type->t = t;
4703 parse_btype_qualify(type, VT_CONSTANT);
4704 t = type->t;
4705 next();
4706 break;
4707 case TOK_VOLATILE1:
4708 case TOK_VOLATILE2:
4709 case TOK_VOLATILE3:
4710 type->t = t;
4711 parse_btype_qualify(type, VT_VOLATILE);
4712 t = type->t;
4713 next();
4714 break;
4715 case TOK_SIGNED1:
4716 case TOK_SIGNED2:
4717 case TOK_SIGNED3:
4718 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4719 tcc_error("signed and unsigned modifier");
4720 t |= VT_DEFSIGN;
4721 next();
4722 typespec_found = 1;
4723 break;
4724 case TOK_REGISTER:
4725 case TOK_AUTO:
4726 case TOK_RESTRICT1:
4727 case TOK_RESTRICT2:
4728 case TOK_RESTRICT3:
4729 next();
4730 break;
4731 case TOK_UNSIGNED:
4732 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4733 tcc_error("signed and unsigned modifier");
4734 t |= VT_DEFSIGN | VT_UNSIGNED;
4735 next();
4736 typespec_found = 1;
4737 break;
4739 /* storage */
4740 case TOK_EXTERN:
4741 g = VT_EXTERN;
4742 goto storage;
4743 case TOK_STATIC:
4744 g = VT_STATIC;
4745 goto storage;
4746 case TOK_TYPEDEF:
4747 g = VT_TYPEDEF;
4748 goto storage;
4749 storage:
4750 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4751 tcc_error("multiple storage classes");
4752 t |= g;
4753 next();
4754 break;
4755 case TOK_INLINE1:
4756 case TOK_INLINE2:
4757 case TOK_INLINE3:
4758 t |= VT_INLINE;
4759 next();
4760 break;
4761 case TOK_NORETURN3:
4762 next();
4763 ad->f.func_noreturn = 1;
4764 break;
4765 /* GNUC attribute */
4766 case TOK_ATTRIBUTE1:
4767 case TOK_ATTRIBUTE2:
4768 parse_attribute(ad);
4769 if (ad->attr_mode) {
4770 u = ad->attr_mode -1;
4771 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4773 continue;
4774 /* GNUC typeof */
4775 case TOK_TYPEOF1:
4776 case TOK_TYPEOF2:
4777 case TOK_TYPEOF3:
4778 next();
4779 parse_expr_type(&type1);
4780 /* remove all storage modifiers except typedef */
4781 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4782 if (type1.ref)
4783 sym_to_attr(ad, type1.ref);
4784 goto basic_type2;
4785 case TOK_THREAD_LOCAL:
4786 tcc_error("_Thread_local is not implemented");
4787 default:
4788 if (typespec_found)
4789 goto the_end;
4790 s = sym_find(tok);
4791 if (!s || !(s->type.t & VT_TYPEDEF))
4792 goto the_end;
4794 n = tok, next();
4795 if (tok == ':' && ignore_label) {
4796 /* ignore if it's a label */
4797 unget_tok(n);
4798 goto the_end;
4801 t &= ~(VT_BTYPE|VT_LONG);
4802 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4803 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4804 type->ref = s->type.ref;
4805 if (t)
4806 parse_btype_qualify(type, t);
4807 t = type->t;
4808 /* get attributes from typedef */
4809 sym_to_attr(ad, s);
4810 typespec_found = 1;
4811 st = bt = -2;
4812 break;
4814 type_found = 1;
4816 the_end:
4817 if (tcc_state->char_is_unsigned) {
4818 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4819 t |= VT_UNSIGNED;
4821 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4822 bt = t & (VT_BTYPE|VT_LONG);
4823 if (bt == VT_LONG)
4824 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4825 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4826 if (bt == VT_LDOUBLE)
4827 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4828 #endif
4829 type->t = t;
4830 return type_found;
4833 /* convert a function parameter type (array to pointer and function to
4834 function pointer) */
4835 static inline void convert_parameter_type(CType *pt)
4837 /* remove const and volatile qualifiers (XXX: const could be used
4838 to indicate a const function parameter */
4839 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4840 /* array must be transformed to pointer according to ANSI C */
4841 pt->t &= ~VT_ARRAY;
4842 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4843 mk_pointer(pt);
4847 ST_FUNC CString* parse_asm_str(void)
4849 skip('(');
4850 return parse_mult_str("string constant");
4853 /* Parse an asm label and return the token */
4854 static int asm_label_instr(void)
4856 int v;
4857 char *astr;
4859 next();
4860 astr = parse_asm_str()->data;
4861 skip(')');
4862 #ifdef ASM_DEBUG
4863 printf("asm_alias: \"%s\"\n", astr);
4864 #endif
4865 v = tok_alloc_const(astr);
4866 return v;
4869 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4871 int n, l, t1, arg_size, align;
4872 Sym **plast, *s, *first;
4873 AttributeDef ad1;
4874 CType pt;
4875 TokenString *vla_array_tok = NULL;
4876 int *vla_array_str = NULL;
4878 if (tok == '(') {
4879 /* function type, or recursive declarator (return if so) */
4880 next();
4881 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4882 return 0;
4883 if (tok == ')')
4884 l = 0;
4885 else if (parse_btype(&pt, &ad1, 0))
4886 l = FUNC_NEW;
4887 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4888 merge_attr (ad, &ad1);
4889 return 0;
4890 } else
4891 l = FUNC_OLD;
4893 first = NULL;
4894 plast = &first;
4895 arg_size = 0;
4896 ++local_scope;
4897 if (l) {
4898 for(;;) {
4899 /* read param name and compute offset */
4900 if (l != FUNC_OLD) {
4901 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4902 break;
4903 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4904 if ((pt.t & VT_BTYPE) == VT_VOID)
4905 tcc_error("parameter declared as void");
4906 if (n == 0)
4907 n = SYM_FIELD;
4908 } else {
4909 n = tok;
4910 pt.t = VT_VOID; /* invalid type */
4911 pt.ref = NULL;
4912 next();
4914 if (n < TOK_UIDENT)
4915 expect("identifier");
4916 convert_parameter_type(&pt);
4917 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4918 /* these symbols may be evaluated for VLArrays (see below, under
4919 nocode_wanted) which is why we push them here as normal symbols
4920 temporarily. Example: int func(int a, int b[++a]); */
4921 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4922 *plast = s;
4923 plast = &s->next;
4924 if (tok == ')')
4925 break;
4926 skip(',');
4927 if (l == FUNC_NEW && tok == TOK_DOTS) {
4928 l = FUNC_ELLIPSIS;
4929 next();
4930 break;
4932 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4933 tcc_error("invalid type");
4935 } else
4936 /* if no parameters, then old type prototype */
4937 l = FUNC_OLD;
4938 skip(')');
4939 /* remove parameter symbols from token table, keep on stack */
4940 if (first) {
4941 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4942 for (s = first; s; s = s->next)
4943 s->v |= SYM_FIELD;
4945 --local_scope;
4946 /* NOTE: const is ignored in returned type as it has a special
4947 meaning in gcc / C++ */
4948 type->t &= ~VT_CONSTANT;
4949 /* some ancient pre-K&R C allows a function to return an array
4950 and the array brackets to be put after the arguments, such
4951 that "int c()[]" means something like "int[] c()" */
4952 if (tok == '[') {
4953 next();
4954 skip(']'); /* only handle simple "[]" */
4955 mk_pointer(type);
4957 /* we push a anonymous symbol which will contain the function prototype */
4958 ad->f.func_args = arg_size;
4959 ad->f.func_type = l;
4960 s = sym_push(SYM_FIELD, type, 0, 0);
4961 s->a = ad->a;
4962 s->f = ad->f;
4963 s->next = first;
4964 type->t = VT_FUNC;
4965 type->ref = s;
4966 } else if (tok == '[') {
4967 int saved_nocode_wanted = nocode_wanted;
4968 /* array definition */
4969 next();
4970 n = -1;
4971 t1 = 0;
4972 if (td & TYPE_PARAM) while (1) {
4973 /* XXX The optional type-quals and static should only be accepted
4974 in parameter decls. The '*' as well, and then even only
4975 in prototypes (not function defs). */
4976 switch (tok) {
4977 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4978 case TOK_CONST1:
4979 case TOK_VOLATILE1:
4980 case TOK_STATIC:
4981 case '*':
4982 next();
4983 continue;
4984 default:
4985 break;
4987 if (tok != ']') {
4988 /* Code generation is not done now but has to be done
4989 at start of function. Save code here for later use. */
4990 nocode_wanted = 1;
4991 skip_or_save_block(&vla_array_tok);
4992 unget_tok(0);
4993 vla_array_str = vla_array_tok->str;
4994 begin_macro(vla_array_tok, 2);
4995 next();
4996 gexpr();
4997 end_macro();
4998 next();
4999 goto check;
5001 break;
5003 } else if (tok != ']') {
5004 if (!local_stack || (storage & VT_STATIC))
5005 vpushi(expr_const());
5006 else {
5007 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5008 length must always be evaluated, even under nocode_wanted,
5009 so that its size slot is initialized (e.g. under sizeof
5010 or typeof). */
5011 nocode_wanted = 0;
5012 gexpr();
5014 check:
5015 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5016 n = vtop->c.i;
5017 if (n < 0)
5018 tcc_error("invalid array size");
5019 } else {
5020 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5021 tcc_error("size of variable length array should be an integer");
5022 n = 0;
5023 t1 = VT_VLA;
5026 skip(']');
5027 /* parse next post type */
5028 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5030 if ((type->t & VT_BTYPE) == VT_FUNC)
5031 tcc_error("declaration of an array of functions");
5032 if ((type->t & VT_BTYPE) == VT_VOID
5033 || type_size(type, &align) < 0)
5034 tcc_error("declaration of an array of incomplete type elements");
5036 t1 |= type->t & VT_VLA;
5038 if (t1 & VT_VLA) {
5039 if (n < 0) {
5040 if (td & TYPE_NEST)
5041 tcc_error("need explicit inner array size in VLAs");
5043 else {
5044 loc -= type_size(&int_type, &align);
5045 loc &= -align;
5046 n = loc;
5048 vpush_type_size(type, &align);
5049 gen_op('*');
5050 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5051 vswap();
5052 vstore();
5055 if (n != -1)
5056 vpop();
5057 nocode_wanted = saved_nocode_wanted;
5059 /* we push an anonymous symbol which will contain the array
5060 element type */
5061 s = sym_push(SYM_FIELD, type, 0, n);
5062 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5063 type->ref = s;
5065 if (vla_array_str) {
5066 if (t1 & VT_VLA)
5067 s->vla_array_str = vla_array_str;
5068 else
5069 tok_str_free_str(vla_array_str);
5072 return 1;
5075 /* Parse a type declarator (except basic type), and return the type
5076 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5077 expected. 'type' should contain the basic type. 'ad' is the
5078 attribute definition of the basic type. It can be modified by
5079 type_decl(). If this (possibly abstract) declarator is a pointer chain
5080 it returns the innermost pointed to type (equals *type, but is a different
5081 pointer), otherwise returns type itself, that's used for recursive calls. */
5082 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5084 CType *post, *ret;
5085 int qualifiers, storage;
5087 /* recursive type, remove storage bits first, apply them later again */
5088 storage = type->t & VT_STORAGE;
5089 type->t &= ~VT_STORAGE;
5090 post = ret = type;
5092 while (tok == '*') {
5093 qualifiers = 0;
5094 redo:
5095 next();
5096 switch(tok) {
5097 case TOK__Atomic:
5098 qualifiers |= VT_ATOMIC;
5099 goto redo;
5100 case TOK_CONST1:
5101 case TOK_CONST2:
5102 case TOK_CONST3:
5103 qualifiers |= VT_CONSTANT;
5104 goto redo;
5105 case TOK_VOLATILE1:
5106 case TOK_VOLATILE2:
5107 case TOK_VOLATILE3:
5108 qualifiers |= VT_VOLATILE;
5109 goto redo;
5110 case TOK_RESTRICT1:
5111 case TOK_RESTRICT2:
5112 case TOK_RESTRICT3:
5113 goto redo;
5114 /* XXX: clarify attribute handling */
5115 case TOK_ATTRIBUTE1:
5116 case TOK_ATTRIBUTE2:
5117 parse_attribute(ad);
5118 break;
5120 mk_pointer(type);
5121 type->t |= qualifiers;
5122 if (ret == type)
5123 /* innermost pointed to type is the one for the first derivation */
5124 ret = pointed_type(type);
5127 if (tok == '(') {
5128 /* This is possibly a parameter type list for abstract declarators
5129 ('int ()'), use post_type for testing this. */
5130 if (!post_type(type, ad, 0, td)) {
5131 /* It's not, so it's a nested declarator, and the post operations
5132 apply to the innermost pointed to type (if any). */
5133 /* XXX: this is not correct to modify 'ad' at this point, but
5134 the syntax is not clear */
5135 parse_attribute(ad);
5136 post = type_decl(type, ad, v, td);
5137 skip(')');
5138 } else
5139 goto abstract;
5140 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5141 /* type identifier */
5142 *v = tok;
5143 next();
5144 } else {
5145 abstract:
5146 if (!(td & TYPE_ABSTRACT))
5147 expect("identifier");
5148 *v = 0;
5150 post_type(post, ad, post != ret ? 0 : storage,
5151 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5152 parse_attribute(ad);
5153 type->t |= storage;
5154 return ret;
5157 /* indirection with full error checking and bound check */
5158 ST_FUNC void indir(void)
5160 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5161 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5162 return;
5163 expect("pointer");
5165 if (vtop->r & VT_LVAL)
5166 gv(RC_INT);
5167 vtop->type = *pointed_type(&vtop->type);
5168 /* Arrays and functions are never lvalues */
5169 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5170 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5171 vtop->r |= VT_LVAL;
5172 /* if bound checking, the referenced pointer must be checked */
5173 #ifdef CONFIG_TCC_BCHECK
5174 if (tcc_state->do_bounds_check)
5175 vtop->r |= VT_MUSTBOUND;
5176 #endif
5180 /* pass a parameter to a function and do type checking and casting */
5181 static void gfunc_param_typed(Sym *func, Sym *arg)
5183 int func_type;
5184 CType type;
5186 func_type = func->f.func_type;
5187 if (func_type == FUNC_OLD ||
5188 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5189 /* default casting : only need to convert float to double */
5190 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5191 gen_cast_s(VT_DOUBLE);
5192 } else if (vtop->type.t & VT_BITFIELD) {
5193 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5194 type.ref = vtop->type.ref;
5195 gen_cast(&type);
5196 } else if (vtop->r & VT_MUSTCAST) {
5197 force_charshort_cast();
5199 } else if (arg == NULL) {
5200 tcc_error("too many arguments to function");
5201 } else {
5202 type = arg->type;
5203 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5204 gen_assign_cast(&type);
5208 /* parse an expression and return its type without any side effect. */
5209 static void expr_type(CType *type, void (*expr_fn)(void))
5211 nocode_wanted++;
5212 expr_fn();
5213 *type = vtop->type;
5214 vpop();
5215 nocode_wanted--;
5218 /* parse an expression of the form '(type)' or '(expr)' and return its
5219 type */
5220 static void parse_expr_type(CType *type)
5222 int n;
5223 AttributeDef ad;
5225 skip('(');
5226 if (parse_btype(type, &ad, 0)) {
5227 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5228 } else {
5229 expr_type(type, gexpr);
5231 skip(')');
5234 static void parse_type(CType *type)
5236 AttributeDef ad;
5237 int n;
5239 if (!parse_btype(type, &ad, 0)) {
5240 expect("type");
5242 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5245 static void parse_builtin_params(int nc, const char *args)
5247 char c, sep = '(';
5248 CType type;
5249 if (nc)
5250 nocode_wanted++;
5251 next();
5252 if (*args == 0)
5253 skip(sep);
5254 while ((c = *args++)) {
5255 skip(sep);
5256 sep = ',';
5257 if (c == 't') {
5258 parse_type(&type);
5259 vpush(&type);
5260 continue;
5262 expr_eq();
5263 type.ref = NULL;
5264 type.t = 0;
5265 switch (c) {
5266 case 'e':
5267 continue;
5268 case 'V':
5269 type.t = VT_CONSTANT;
5270 case 'v':
5271 type.t |= VT_VOID;
5272 mk_pointer (&type);
5273 break;
5274 case 'S':
5275 type.t = VT_CONSTANT;
5276 case 's':
5277 type.t |= char_type.t;
5278 mk_pointer (&type);
5279 break;
5280 case 'i':
5281 type.t = VT_INT;
5282 break;
5283 case 'l':
5284 type.t = VT_SIZE_T;
5285 break;
5286 default:
5287 break;
5289 gen_assign_cast(&type);
5291 skip(')');
5292 if (nc)
5293 nocode_wanted--;
5296 static void parse_atomic(int atok)
5298 int size, align, arg, t, save = 0;
5299 CType *atom, *atom_ptr, ct = {0};
5300 SValue store;
5301 char buf[40];
5302 static const char *const templates[] = {
5304 * Each entry consists of callback and function template.
5305 * The template represents argument types and return type.
5307 * ? void (return-only)
5308 * b bool
5309 * a atomic
5310 * A read-only atomic
5311 * p pointer to memory
5312 * v value
5313 * l load pointer
5314 * s save pointer
5315 * m memory model
5318 /* keep in order of appearance in tcctok.h: */
5319 /* __atomic_store */ "alm.?",
5320 /* __atomic_load */ "Asm.v",
5321 /* __atomic_exchange */ "alsm.v",
5322 /* __atomic_compare_exchange */ "aplbmm.b",
5323 /* __atomic_fetch_add */ "avm.v",
5324 /* __atomic_fetch_sub */ "avm.v",
5325 /* __atomic_fetch_or */ "avm.v",
5326 /* __atomic_fetch_xor */ "avm.v",
5327 /* __atomic_fetch_and */ "avm.v",
5328 /* __atomic_fetch_nand */ "avm.v",
5329 /* __atomic_and_fetch */ "avm.v",
5330 /* __atomic_sub_fetch */ "avm.v",
5331 /* __atomic_or_fetch */ "avm.v",
5332 /* __atomic_xor_fetch */ "avm.v",
5333 /* __atomic_and_fetch */ "avm.v",
5334 /* __atomic_nand_fetch */ "avm.v"
5336 const char *template = templates[(atok - TOK___atomic_store)];
5338 atom = atom_ptr = NULL;
5339 size = 0; /* pacify compiler */
5340 next();
5341 skip('(');
5342 for (arg = 0;;) {
5343 expr_eq();
5344 switch (template[arg]) {
5345 case 'a':
5346 case 'A':
5347 atom_ptr = &vtop->type;
5348 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5349 expect("pointer");
5350 atom = pointed_type(atom_ptr);
5351 size = type_size(atom, &align);
5352 if (size > 8
5353 || (size & (size - 1))
5354 || (atok > TOK___atomic_compare_exchange
5355 && (0 == btype_size(atom->t & VT_BTYPE)
5356 || (atom->t & VT_BTYPE) == VT_PTR)))
5357 expect("integral or integer-sized pointer target type");
5358 /* GCC does not care either: */
5359 /* if (!(atom->t & VT_ATOMIC))
5360 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5361 break;
5363 case 'p':
5364 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5365 || type_size(pointed_type(&vtop->type), &align) != size)
5366 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5367 gen_assign_cast(atom_ptr);
5368 break;
5369 case 'v':
5370 gen_assign_cast(atom);
5371 break;
5372 case 'l':
5373 indir();
5374 gen_assign_cast(atom);
5375 break;
5376 case 's':
5377 save = 1;
5378 indir();
5379 store = *vtop;
5380 vpop();
5381 break;
5382 case 'm':
5383 gen_assign_cast(&int_type);
5384 break;
5385 case 'b':
5386 ct.t = VT_BOOL;
5387 gen_assign_cast(&ct);
5388 break;
5390 if ('.' == template[++arg])
5391 break;
5392 skip(',');
5394 skip(')');
5396 ct.t = VT_VOID;
5397 switch (template[arg + 1]) {
5398 case 'b':
5399 ct.t = VT_BOOL;
5400 break;
5401 case 'v':
5402 ct = *atom;
5403 break;
5406 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5407 vpush_helper_func(tok_alloc_const(buf));
5408 vrott(arg - save + 1);
5409 gfunc_call(arg - save);
5411 vpush(&ct);
5412 PUT_R_RET(vtop, ct.t);
5413 t = ct.t & VT_BTYPE;
5414 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5415 #ifdef PROMOTE_RET
5416 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5417 #else
5418 vtop->type.t = VT_INT;
5419 #endif
5421 gen_cast(&ct);
5422 if (save) {
5423 vpush(&ct);
5424 *vtop = store;
5425 vswap();
5426 vstore();
5430 ST_FUNC void unary(void)
5432 int n, t, align, size, r, sizeof_caller;
5433 CType type;
5434 Sym *s;
5435 AttributeDef ad;
5437 /* generate line number info */
5438 if (debug_modes)
5439 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5441 sizeof_caller = in_sizeof;
5442 in_sizeof = 0;
5443 type.ref = NULL;
5444 /* XXX: GCC 2.95.3 does not generate a table although it should be
5445 better here */
5446 tok_next:
5447 switch(tok) {
5448 case TOK_EXTENSION:
5449 next();
5450 goto tok_next;
5451 case TOK_LCHAR:
5452 #ifdef TCC_TARGET_PE
5453 t = VT_SHORT|VT_UNSIGNED;
5454 goto push_tokc;
5455 #endif
5456 case TOK_CINT:
5457 case TOK_CCHAR:
5458 t = VT_INT;
5459 push_tokc:
5460 type.t = t;
5461 vsetc(&type, VT_CONST, &tokc);
5462 next();
5463 break;
5464 case TOK_CUINT:
5465 t = VT_INT | VT_UNSIGNED;
5466 goto push_tokc;
5467 case TOK_CLLONG:
5468 t = VT_LLONG;
5469 goto push_tokc;
5470 case TOK_CULLONG:
5471 t = VT_LLONG | VT_UNSIGNED;
5472 goto push_tokc;
5473 case TOK_CFLOAT:
5474 t = VT_FLOAT;
5475 goto push_tokc;
5476 case TOK_CDOUBLE:
5477 t = VT_DOUBLE;
5478 goto push_tokc;
5479 case TOK_CLDOUBLE:
5480 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5481 t = VT_DOUBLE | VT_LONG;
5482 #else
5483 t = VT_LDOUBLE;
5484 #endif
5485 goto push_tokc;
5486 case TOK_CLONG:
5487 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5488 goto push_tokc;
5489 case TOK_CULONG:
5490 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5491 goto push_tokc;
5492 case TOK___FUNCTION__:
5493 if (!gnu_ext)
5494 goto tok_identifier;
5495 /* fall thru */
5496 case TOK___FUNC__:
5498 Section *sec;
5499 int len;
5500 /* special function name identifier */
5501 len = strlen(funcname) + 1;
5502 /* generate char[len] type */
5503 type.t = char_type.t;
5504 if (tcc_state->warn_write_strings & WARN_ON)
5505 type.t |= VT_CONSTANT;
5506 mk_pointer(&type);
5507 type.t |= VT_ARRAY;
5508 type.ref->c = len;
5509 sec = rodata_section;
5510 vpush_ref(&type, sec, sec->data_offset, len);
5511 if (!NODATA_WANTED)
5512 memcpy(section_ptr_add(sec, len), funcname, len);
5513 next();
5515 break;
5516 case TOK_LSTR:
5517 #ifdef TCC_TARGET_PE
5518 t = VT_SHORT | VT_UNSIGNED;
5519 #else
5520 t = VT_INT;
5521 #endif
5522 goto str_init;
5523 case TOK_STR:
5524 /* string parsing */
5525 t = char_type.t;
5526 str_init:
5527 if (tcc_state->warn_write_strings & WARN_ON)
5528 t |= VT_CONSTANT;
5529 type.t = t;
5530 mk_pointer(&type);
5531 type.t |= VT_ARRAY;
5532 memset(&ad, 0, sizeof(AttributeDef));
5533 ad.section = rodata_section;
5534 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5535 break;
5536 case '(':
5537 next();
5538 /* cast ? */
5539 if (parse_btype(&type, &ad, 0)) {
5540 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5541 skip(')');
5542 /* check ISOC99 compound literal */
5543 if (tok == '{') {
5544 /* data is allocated locally by default */
5545 if (global_expr)
5546 r = VT_CONST;
5547 else
5548 r = VT_LOCAL;
5549 /* all except arrays are lvalues */
5550 if (!(type.t & VT_ARRAY))
5551 r |= VT_LVAL;
5552 memset(&ad, 0, sizeof(AttributeDef));
5553 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5554 } else {
5555 if (sizeof_caller) {
5556 vpush(&type);
5557 return;
5559 unary();
5560 gen_cast(&type);
5562 } else if (tok == '{') {
5563 int saved_nocode_wanted = nocode_wanted;
5564 if (CONST_WANTED && !NOEVAL_WANTED)
5565 expect("constant");
5566 if (0 == local_scope)
5567 tcc_error("statement expression outside of function");
5568 /* save all registers */
5569 save_regs(0);
5570 /* statement expression : we do not accept break/continue
5571 inside as GCC does. We do retain the nocode_wanted state,
5572 as statement expressions can't ever be entered from the
5573 outside, so any reactivation of code emission (from labels
5574 or loop heads) can be disabled again after the end of it. */
5575 block(1);
5576 /* If the statement expr can be entered, then we retain the current
5577 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5578 If it can't be entered then the state is that from before the
5579 statement expression. */
5580 if (saved_nocode_wanted)
5581 nocode_wanted = saved_nocode_wanted;
5582 skip(')');
5583 } else {
5584 gexpr();
5585 skip(')');
5587 break;
5588 case '*':
5589 next();
5590 unary();
5591 indir();
5592 break;
5593 case '&':
5594 next();
5595 unary();
5596 /* functions names must be treated as function pointers,
5597 except for unary '&' and sizeof. Since we consider that
5598 functions are not lvalues, we only have to handle it
5599 there and in function calls. */
5600 /* arrays can also be used although they are not lvalues */
5601 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5602 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5603 test_lvalue();
5604 if (vtop->sym)
5605 vtop->sym->a.addrtaken = 1;
5606 mk_pointer(&vtop->type);
5607 gaddrof();
5608 break;
5609 case '!':
5610 next();
5611 unary();
5612 gen_test_zero(TOK_EQ);
5613 break;
5614 case '~':
5615 next();
5616 unary();
5617 vpushi(-1);
5618 gen_op('^');
5619 break;
5620 case '+':
5621 next();
5622 unary();
5623 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5624 tcc_error("pointer not accepted for unary plus");
5625 /* In order to force cast, we add zero, except for floating point
5626 where we really need an noop (otherwise -0.0 will be transformed
5627 into +0.0). */
5628 if (!is_float(vtop->type.t)) {
5629 vpushi(0);
5630 gen_op('+');
5632 break;
5633 case TOK_SIZEOF:
5634 case TOK_ALIGNOF1:
5635 case TOK_ALIGNOF2:
5636 case TOK_ALIGNOF3:
5637 t = tok;
5638 next();
5639 in_sizeof++;
5640 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5641 if (t == TOK_SIZEOF) {
5642 vpush_type_size(&type, &align);
5643 gen_cast_s(VT_SIZE_T);
5644 } else {
5645 type_size(&type, &align);
5646 s = NULL;
5647 if (vtop[1].r & VT_SYM)
5648 s = vtop[1].sym; /* hack: accessing previous vtop */
5649 if (s && s->a.aligned)
5650 align = 1 << (s->a.aligned - 1);
5651 vpushs(align);
5653 break;
5655 case TOK_builtin_expect:
5656 /* __builtin_expect is a no-op for now */
5657 parse_builtin_params(0, "ee");
5658 vpop();
5659 break;
5660 case TOK_builtin_types_compatible_p:
5661 parse_builtin_params(0, "tt");
5662 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5663 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5664 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5665 vtop -= 2;
5666 vpushi(n);
5667 break;
5668 case TOK_builtin_choose_expr:
5670 int64_t c;
5671 next();
5672 skip('(');
5673 c = expr_const64();
5674 skip(',');
5675 if (!c) {
5676 nocode_wanted++;
5678 expr_eq();
5679 if (!c) {
5680 vpop();
5681 nocode_wanted--;
5683 skip(',');
5684 if (c) {
5685 nocode_wanted++;
5687 expr_eq();
5688 if (c) {
5689 vpop();
5690 nocode_wanted--;
5692 skip(')');
5694 break;
5695 case TOK_builtin_constant_p:
5696 constant_p = 1;
5697 parse_builtin_params(1, "e");
5698 n = constant_p &&
5699 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5700 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5701 vtop--;
5702 vpushi(n);
5703 break;
5704 case TOK_builtin_frame_address:
5705 case TOK_builtin_return_address:
5707 int tok1 = tok;
5708 int64_t level;
5709 next();
5710 skip('(');
5711 level = expr_const64();
5712 if (level < 0) {
5713 tcc_error("%s only takes positive integers",
5714 tok1 == TOK_builtin_return_address ?
5715 "__builtin_return_address" :
5716 "__builtin_frame_address");
5718 skip(')');
5719 type.t = VT_VOID;
5720 mk_pointer(&type);
5721 vset(&type, VT_LOCAL, 0); /* local frame */
5722 while (level--) {
5723 #ifdef TCC_TARGET_RISCV64
5724 vpushi(2*PTR_SIZE);
5725 gen_op('-');
5726 #endif
5727 mk_pointer(&vtop->type);
5728 indir(); /* -> parent frame */
5730 if (tok1 == TOK_builtin_return_address) {
5731 // assume return address is just above frame pointer on stack
5732 #ifdef TCC_TARGET_ARM
5733 vpushi(2*PTR_SIZE);
5734 gen_op('+');
5735 #elif defined TCC_TARGET_RISCV64
5736 vpushi(PTR_SIZE);
5737 gen_op('-');
5738 #else
5739 vpushi(PTR_SIZE);
5740 gen_op('+');
5741 #endif
5742 mk_pointer(&vtop->type);
5743 indir();
5746 break;
5747 #ifdef TCC_TARGET_RISCV64
5748 case TOK_builtin_va_start:
5749 parse_builtin_params(0, "ee");
5750 r = vtop->r & VT_VALMASK;
5751 if (r == VT_LLOCAL)
5752 r = VT_LOCAL;
5753 if (r != VT_LOCAL)
5754 tcc_error("__builtin_va_start expects a local variable");
5755 gen_va_start();
5756 vstore();
5757 break;
5758 #endif
5759 #ifdef TCC_TARGET_X86_64
5760 #ifdef TCC_TARGET_PE
5761 case TOK_builtin_va_start:
5762 parse_builtin_params(0, "ee");
5763 r = vtop->r & VT_VALMASK;
5764 if (r == VT_LLOCAL)
5765 r = VT_LOCAL;
5766 if (r != VT_LOCAL)
5767 tcc_error("__builtin_va_start expects a local variable");
5768 vtop->r = r;
5769 vtop->type = char_pointer_type;
5770 vtop->c.i += 8;
5771 vstore();
5772 break;
5773 #else
5774 case TOK_builtin_va_arg_types:
5775 parse_builtin_params(0, "t");
5776 vpushi(classify_x86_64_va_arg(&vtop->type));
5777 vswap();
5778 vpop();
5779 break;
5780 #endif
5781 #endif
5783 #ifdef TCC_TARGET_ARM64
5784 case TOK_builtin_va_start: {
5785 parse_builtin_params(0, "ee");
5786 //xx check types
5787 gen_va_start();
5788 vpushi(0);
5789 vtop->type.t = VT_VOID;
5790 break;
5792 case TOK_builtin_va_arg: {
5793 parse_builtin_params(0, "et");
5794 type = vtop->type;
5795 vpop();
5796 //xx check types
5797 gen_va_arg(&type);
5798 vtop->type = type;
5799 break;
5801 case TOK___arm64_clear_cache: {
5802 parse_builtin_params(0, "ee");
5803 gen_clear_cache();
5804 vpushi(0);
5805 vtop->type.t = VT_VOID;
5806 break;
5808 #endif
5810 /* atomic operations */
5811 case TOK___atomic_store:
5812 case TOK___atomic_load:
5813 case TOK___atomic_exchange:
5814 case TOK___atomic_compare_exchange:
5815 case TOK___atomic_fetch_add:
5816 case TOK___atomic_fetch_sub:
5817 case TOK___atomic_fetch_or:
5818 case TOK___atomic_fetch_xor:
5819 case TOK___atomic_fetch_and:
5820 case TOK___atomic_fetch_nand:
5821 case TOK___atomic_add_fetch:
5822 case TOK___atomic_sub_fetch:
5823 case TOK___atomic_or_fetch:
5824 case TOK___atomic_xor_fetch:
5825 case TOK___atomic_and_fetch:
5826 case TOK___atomic_nand_fetch:
5827 parse_atomic(tok);
5828 break;
5830 /* pre operations */
5831 case TOK_INC:
5832 case TOK_DEC:
5833 t = tok;
5834 next();
5835 unary();
5836 inc(0, t);
5837 break;
5838 case '-':
5839 next();
5840 unary();
5841 if (is_float(vtop->type.t)) {
5842 gen_opif(TOK_NEG);
5843 } else {
5844 vpushi(0);
5845 vswap();
5846 gen_op('-');
5848 break;
5849 case TOK_LAND:
5850 if (!gnu_ext)
5851 goto tok_identifier;
5852 next();
5853 /* allow to take the address of a label */
5854 if (tok < TOK_UIDENT)
5855 expect("label identifier");
5856 s = label_find(tok);
5857 if (!s) {
5858 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5859 } else {
5860 if (s->r == LABEL_DECLARED)
5861 s->r = LABEL_FORWARD;
5863 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5864 s->type.t = VT_VOID;
5865 mk_pointer(&s->type);
5866 s->type.t |= VT_STATIC;
5868 vpushsym(&s->type, s);
5869 next();
5870 break;
5872 case TOK_GENERIC:
5874 CType controlling_type;
5875 int has_default = 0;
5876 int has_match = 0;
5877 int learn = 0;
5878 TokenString *str = NULL;
5879 int saved_nocode_wanted = nocode_wanted;
5880 nocode_wanted &= ~CONST_WANTED_MASK;
5882 next();
5883 skip('(');
5884 expr_type(&controlling_type, expr_eq);
5885 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5886 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5887 mk_pointer(&controlling_type);
5889 nocode_wanted = saved_nocode_wanted;
5891 for (;;) {
5892 learn = 0;
5893 skip(',');
5894 if (tok == TOK_DEFAULT) {
5895 if (has_default)
5896 tcc_error("too many 'default'");
5897 has_default = 1;
5898 if (!has_match)
5899 learn = 1;
5900 next();
5901 } else {
5902 AttributeDef ad_tmp;
5903 int itmp;
5904 CType cur_type;
5906 parse_btype(&cur_type, &ad_tmp, 0);
5907 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5908 if (compare_types(&controlling_type, &cur_type, 0)) {
5909 if (has_match) {
5910 tcc_error("type match twice");
5912 has_match = 1;
5913 learn = 1;
5916 skip(':');
5917 if (learn) {
5918 if (str)
5919 tok_str_free(str);
5920 skip_or_save_block(&str);
5921 } else {
5922 skip_or_save_block(NULL);
5924 if (tok == ')')
5925 break;
5927 if (!str) {
5928 char buf[60];
5929 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5930 tcc_error("type '%s' does not match any association", buf);
5932 begin_macro(str, 1);
5933 next();
5934 expr_eq();
5935 if (tok != TOK_EOF)
5936 expect(",");
5937 end_macro();
5938 next();
5939 break;
5941 // special qnan , snan and infinity values
5942 case TOK___NAN__:
5943 n = 0x7fc00000;
5944 special_math_val:
5945 vpushi(n);
5946 vtop->type.t = VT_FLOAT;
5947 next();
5948 break;
5949 case TOK___SNAN__:
5950 n = 0x7f800001;
5951 goto special_math_val;
5952 case TOK___INF__:
5953 n = 0x7f800000;
5954 goto special_math_val;
5956 default:
5957 tok_identifier:
5958 t = tok;
5959 next();
5960 if (t < TOK_UIDENT)
5961 expect("identifier");
5962 s = sym_find(t);
5963 if (!s || IS_ASM_SYM(s)) {
5964 const char *name = get_tok_str(t, NULL);
5965 if (tok != '(')
5966 tcc_error("'%s' undeclared", name);
5967 /* for simple function calls, we tolerate undeclared
5968 external reference to int() function */
5969 tcc_warning_c(warn_implicit_function_declaration)(
5970 "implicit declaration of function '%s'", name);
5971 s = external_global_sym(t, &func_old_type);
5974 r = s->r;
5975 /* A symbol that has a register is a local register variable,
5976 which starts out as VT_LOCAL value. */
5977 if ((r & VT_VALMASK) < VT_CONST)
5978 r = (r & ~VT_VALMASK) | VT_LOCAL;
5980 vset(&s->type, r, s->c);
5981 /* Point to s as backpointer (even without r&VT_SYM).
5982 Will be used by at least the x86 inline asm parser for
5983 regvars. */
5984 vtop->sym = s;
5986 if (r & VT_SYM) {
5987 vtop->c.i = 0;
5988 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5989 vtop->c.i = s->enum_val;
5991 break;
5994 /* post operations */
5995 while (1) {
5996 if (tok == TOK_INC || tok == TOK_DEC) {
5997 inc(1, tok);
5998 next();
5999 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6000 int qualifiers, cumofs = 0;
6001 /* field */
6002 if (tok == TOK_ARROW)
6003 indir();
6004 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6005 test_lvalue();
6006 gaddrof();
6007 /* expect pointer on structure */
6008 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6009 expect("struct or union");
6010 if (tok == TOK_CDOUBLE)
6011 expect("field name");
6012 next();
6013 if (tok == TOK_CINT || tok == TOK_CUINT)
6014 expect("field name");
6015 s = find_field(&vtop->type, tok, &cumofs);
6016 /* add field offset to pointer */
6017 vtop->type = char_pointer_type; /* change type to 'char *' */
6018 vpushi(cumofs);
6019 gen_op('+');
6020 /* change type to field type, and set to lvalue */
6021 vtop->type = s->type;
6022 vtop->type.t |= qualifiers;
6023 /* an array is never an lvalue */
6024 if (!(vtop->type.t & VT_ARRAY)) {
6025 vtop->r |= VT_LVAL;
6026 #ifdef CONFIG_TCC_BCHECK
6027 /* if bound checking, the referenced pointer must be checked */
6028 if (tcc_state->do_bounds_check)
6029 vtop->r |= VT_MUSTBOUND;
6030 #endif
6032 next();
6033 } else if (tok == '[') {
6034 next();
6035 gexpr();
6036 gen_op('+');
6037 indir();
6038 skip(']');
6039 } else if (tok == '(') {
6040 SValue ret;
6041 Sym *sa;
6042 int nb_args, ret_nregs, ret_align, regsize, variadic;
6044 /* function call */
6045 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6046 /* pointer test (no array accepted) */
6047 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6048 vtop->type = *pointed_type(&vtop->type);
6049 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6050 goto error_func;
6051 } else {
6052 error_func:
6053 expect("function pointer");
6055 } else {
6056 vtop->r &= ~VT_LVAL; /* no lvalue */
6058 /* get return type */
6059 s = vtop->type.ref;
6060 next();
6061 sa = s->next; /* first parameter */
6062 nb_args = regsize = 0;
6063 ret.r2 = VT_CONST;
6064 /* compute first implicit argument if a structure is returned */
6065 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6066 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6067 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6068 &ret_align, &regsize);
6069 if (ret_nregs <= 0) {
6070 /* get some space for the returned structure */
6071 size = type_size(&s->type, &align);
6072 #ifdef TCC_TARGET_ARM64
6073 /* On arm64, a small struct is return in registers.
6074 It is much easier to write it to memory if we know
6075 that we are allowed to write some extra bytes, so
6076 round the allocated space up to a power of 2: */
6077 if (size < 16)
6078 while (size & (size - 1))
6079 size = (size | (size - 1)) + 1;
6080 #endif
6081 loc = (loc - size) & -align;
6082 ret.type = s->type;
6083 ret.r = VT_LOCAL | VT_LVAL;
6084 /* pass it as 'int' to avoid structure arg passing
6085 problems */
6086 vseti(VT_LOCAL, loc);
6087 #ifdef CONFIG_TCC_BCHECK
6088 if (tcc_state->do_bounds_check)
6089 --loc;
6090 #endif
6091 ret.c = vtop->c;
6092 if (ret_nregs < 0)
6093 vtop--;
6094 else
6095 nb_args++;
6097 } else {
6098 ret_nregs = 1;
6099 ret.type = s->type;
6102 if (ret_nregs > 0) {
6103 /* return in register */
6104 ret.c.i = 0;
6105 PUT_R_RET(&ret, ret.type.t);
6107 if (tok != ')') {
6108 for(;;) {
6109 expr_eq();
6110 gfunc_param_typed(s, sa);
6111 nb_args++;
6112 if (sa)
6113 sa = sa->next;
6114 if (tok == ')')
6115 break;
6116 skip(',');
6119 if (sa)
6120 tcc_error("too few arguments to function");
6121 skip(')');
6122 gfunc_call(nb_args);
6124 if (ret_nregs < 0) {
6125 vsetc(&ret.type, ret.r, &ret.c);
6126 #ifdef TCC_TARGET_RISCV64
6127 arch_transfer_ret_regs(1);
6128 #endif
6129 } else {
6130 /* return value */
6131 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6132 vsetc(&ret.type, r, &ret.c);
6133 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6136 /* handle packed struct return */
6137 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6138 int addr, offset;
6140 size = type_size(&s->type, &align);
6141 /* We're writing whole regs often, make sure there's enough
6142 space. Assume register size is power of 2. */
6143 if (regsize > align)
6144 align = regsize;
6145 loc = (loc - size) & -align;
6146 addr = loc;
6147 offset = 0;
6148 for (;;) {
6149 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6150 vswap();
6151 vstore();
6152 vtop--;
6153 if (--ret_nregs == 0)
6154 break;
6155 offset += regsize;
6157 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6160 /* Promote char/short return values. This is matters only
6161 for calling function that were not compiled by TCC and
6162 only on some architectures. For those where it doesn't
6163 matter we expect things to be already promoted to int,
6164 but not larger. */
6165 t = s->type.t & VT_BTYPE;
6166 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6167 #ifdef PROMOTE_RET
6168 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6169 #else
6170 vtop->type.t = VT_INT;
6171 #endif
6174 if (s->f.func_noreturn) {
6175 if (debug_modes)
6176 tcc_tcov_block_end(tcc_state, -1);
6177 CODE_OFF();
6179 } else {
6180 break;
6185 #ifndef precedence_parser /* original top-down parser */
6187 static void expr_prod(void)
6189 int t;
6191 unary();
6192 while ((t = tok) == '*' || t == '/' || t == '%') {
6193 next();
6194 unary();
6195 gen_op(t);
6199 static void expr_sum(void)
6201 int t;
6203 expr_prod();
6204 while ((t = tok) == '+' || t == '-') {
6205 next();
6206 expr_prod();
6207 gen_op(t);
6211 static void expr_shift(void)
6213 int t;
6215 expr_sum();
6216 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6217 next();
6218 expr_sum();
6219 gen_op(t);
6223 static void expr_cmp(void)
6225 int t;
6227 expr_shift();
6228 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6229 t == TOK_ULT || t == TOK_UGE) {
6230 next();
6231 expr_shift();
6232 gen_op(t);
6236 static void expr_cmpeq(void)
6238 int t;
6240 expr_cmp();
6241 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6242 next();
6243 expr_cmp();
6244 gen_op(t);
6248 static void expr_and(void)
6250 expr_cmpeq();
6251 while (tok == '&') {
6252 next();
6253 expr_cmpeq();
6254 gen_op('&');
6258 static void expr_xor(void)
6260 expr_and();
6261 while (tok == '^') {
6262 next();
6263 expr_and();
6264 gen_op('^');
6268 static void expr_or(void)
6270 expr_xor();
6271 while (tok == '|') {
6272 next();
6273 expr_xor();
6274 gen_op('|');
6278 static void expr_landor(int op);
6280 static void expr_land(void)
6282 expr_or();
6283 if (tok == TOK_LAND)
6284 expr_landor(tok);
6287 static void expr_lor(void)
6289 expr_land();
6290 if (tok == TOK_LOR)
6291 expr_landor(tok);
6294 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6295 #else /* defined precedence_parser */
6296 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6297 # define expr_lor() unary(), expr_infix(1)
6299 static int precedence(int tok)
6301 switch (tok) {
6302 case TOK_LOR: return 1;
6303 case TOK_LAND: return 2;
6304 case '|': return 3;
6305 case '^': return 4;
6306 case '&': return 5;
6307 case TOK_EQ: case TOK_NE: return 6;
6308 relat: case TOK_ULT: case TOK_UGE: return 7;
6309 case TOK_SHL: case TOK_SAR: return 8;
6310 case '+': case '-': return 9;
6311 case '*': case '/': case '%': return 10;
6312 default:
6313 if (tok >= TOK_ULE && tok <= TOK_GT)
6314 goto relat;
6315 return 0;
6318 static unsigned char prec[256];
6319 static void init_prec(void)
6321 int i;
6322 for (i = 0; i < 256; i++)
6323 prec[i] = precedence(i);
6325 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6327 static void expr_landor(int op);
6329 static void expr_infix(int p)
6331 int t = tok, p2;
6332 while ((p2 = precedence(t)) >= p) {
6333 if (t == TOK_LOR || t == TOK_LAND) {
6334 expr_landor(t);
6335 } else {
6336 next();
6337 unary();
6338 if (precedence(tok) > p2)
6339 expr_infix(p2 + 1);
6340 gen_op(t);
6342 t = tok;
6345 #endif
6347 /* Assuming vtop is a value used in a conditional context
6348 (i.e. compared with zero) return 0 if it's false, 1 if
6349 true and -1 if it can't be statically determined. */
6350 static int condition_3way(void)
6352 int c = -1;
6353 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6354 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6355 vdup();
6356 gen_cast_s(VT_BOOL);
6357 c = vtop->c.i;
6358 vpop();
6360 return c;
6363 static void expr_landor(int op)
6365 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6366 for(;;) {
6367 c = f ? i : condition_3way();
6368 if (c < 0)
6369 save_regs(1), cc = 0;
6370 else if (c != i)
6371 nocode_wanted++, f = 1;
6372 if (tok != op)
6373 break;
6374 if (c < 0)
6375 t = gvtst(i, t);
6376 else
6377 vpop();
6378 next();
6379 expr_landor_next(op);
6381 if (cc || f) {
6382 vpop();
6383 vpushi(i ^ f);
6384 gsym(t);
6385 nocode_wanted -= f;
6386 } else {
6387 gvtst_set(i, t);
6391 static int is_cond_bool(SValue *sv)
6393 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6394 && (sv->type.t & VT_BTYPE) == VT_INT)
6395 return (unsigned)sv->c.i < 2;
6396 if (sv->r == VT_CMP)
6397 return 1;
6398 return 0;
6401 static void expr_cond(void)
6403 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6404 SValue sv;
6405 CType type;
6407 expr_lor();
6408 if (tok == '?') {
6409 next();
6410 c = condition_3way();
6411 g = (tok == ':' && gnu_ext);
6412 tt = 0;
6413 if (!g) {
6414 if (c < 0) {
6415 save_regs(1);
6416 tt = gvtst(1, 0);
6417 } else {
6418 vpop();
6420 } else if (c < 0) {
6421 /* needed to avoid having different registers saved in
6422 each branch */
6423 save_regs(1);
6424 gv_dup();
6425 tt = gvtst(0, 0);
6428 if (c == 0)
6429 nocode_wanted++;
6430 if (!g)
6431 gexpr();
6433 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6434 mk_pointer(&vtop->type);
6435 sv = *vtop; /* save value to handle it later */
6436 vtop--; /* no vpop so that FP stack is not flushed */
6438 if (g) {
6439 u = tt;
6440 } else if (c < 0) {
6441 u = gjmp(0);
6442 gsym(tt);
6443 } else
6444 u = 0;
6446 if (c == 0)
6447 nocode_wanted--;
6448 if (c == 1)
6449 nocode_wanted++;
6450 skip(':');
6451 expr_cond();
6453 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6454 mk_pointer(&vtop->type);
6456 /* cast operands to correct type according to ISOC rules */
6457 if (!combine_types(&type, &sv, vtop, '?'))
6458 type_incompatibility_error(&sv.type, &vtop->type,
6459 "type mismatch in conditional expression (have '%s' and '%s')");
6461 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6462 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6463 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6464 this code jumps directly to the if's then/else branches. */
6465 t1 = gvtst(0, 0);
6466 t2 = gjmp(0);
6467 gsym(u);
6468 vpushv(&sv);
6469 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6470 gvtst_set(0, t1);
6471 gvtst_set(1, t2);
6472 gen_cast(&type);
6473 // tcc_warning("two conditions expr_cond");
6474 return;
6477 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6478 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6479 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6481 /* now we convert second operand */
6482 if (c != 1) {
6483 gen_cast(&type);
6484 if (islv) {
6485 mk_pointer(&vtop->type);
6486 gaddrof();
6487 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6488 gaddrof();
6491 rc = RC_TYPE(type.t);
6492 /* for long longs, we use fixed registers to avoid having
6493 to handle a complicated move */
6494 if (USING_TWO_WORDS(type.t))
6495 rc = RC_RET(type.t);
6497 tt = r2 = 0;
6498 if (c < 0) {
6499 r2 = gv(rc);
6500 tt = gjmp(0);
6502 gsym(u);
6503 if (c == 1)
6504 nocode_wanted--;
6506 /* this is horrible, but we must also convert first
6507 operand */
6508 if (c != 0) {
6509 *vtop = sv;
6510 gen_cast(&type);
6511 if (islv) {
6512 mk_pointer(&vtop->type);
6513 gaddrof();
6514 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6515 gaddrof();
6518 if (c < 0) {
6519 r1 = gv(rc);
6520 move_reg(r2, r1, islv ? VT_PTR : type.t);
6521 vtop->r = r2;
6522 gsym(tt);
6525 if (islv)
6526 indir();
6530 static void expr_eq(void)
6532 int t;
6534 expr_cond();
6535 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6536 test_lvalue();
6537 next();
6538 if (t == '=') {
6539 expr_eq();
6540 } else {
6541 vdup();
6542 expr_eq();
6543 gen_op(TOK_ASSIGN_OP(t));
6545 vstore();
6549 ST_FUNC void gexpr(void)
6551 while (1) {
6552 expr_eq();
6553 if (tok != ',')
6554 break;
6555 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6556 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6557 vpop();
6558 next();
6562 /* parse a constant expression and return value in vtop. */
6563 static void expr_const1(void)
6565 nocode_wanted += CONST_WANTED_BIT;
6566 expr_cond();
6567 nocode_wanted -= CONST_WANTED_BIT;
6570 /* parse an integer constant and return its value. */
6571 static inline int64_t expr_const64(void)
6573 int64_t c;
6574 expr_const1();
6575 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6576 expect("constant expression");
6577 c = vtop->c.i;
6578 vpop();
6579 return c;
6582 /* parse an integer constant and return its value.
6583 Complain if it doesn't fit 32bit (signed or unsigned). */
6584 ST_FUNC int expr_const(void)
6586 int c;
6587 int64_t wc = expr_const64();
6588 c = wc;
6589 if (c != wc && (unsigned)c != wc)
6590 tcc_error("constant exceeds 32 bit");
6591 return c;
6594 /* ------------------------------------------------------------------------- */
6595 /* return from function */
6597 #ifndef TCC_TARGET_ARM64
6598 static void gfunc_return(CType *func_type)
6600 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6601 CType type, ret_type;
6602 int ret_align, ret_nregs, regsize;
6603 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6604 &ret_align, &regsize);
6605 if (ret_nregs < 0) {
6606 #ifdef TCC_TARGET_RISCV64
6607 arch_transfer_ret_regs(0);
6608 #endif
6609 } else if (0 == ret_nregs) {
6610 /* if returning structure, must copy it to implicit
6611 first pointer arg location */
6612 type = *func_type;
6613 mk_pointer(&type);
6614 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6615 indir();
6616 vswap();
6617 /* copy structure value to pointer */
6618 vstore();
6619 } else {
6620 /* returning structure packed into registers */
6621 int size, addr, align, rc;
6622 size = type_size(func_type,&align);
6623 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6624 (vtop->c.i & (ret_align-1)))
6625 && (align & (ret_align-1))) {
6626 loc = (loc - size) & -ret_align;
6627 addr = loc;
6628 type = *func_type;
6629 vset(&type, VT_LOCAL | VT_LVAL, addr);
6630 vswap();
6631 vstore();
6632 vpop();
6633 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6635 vtop->type = ret_type;
6636 rc = RC_RET(ret_type.t);
6637 if (ret_nregs == 1)
6638 gv(rc);
6639 else {
6640 for (;;) {
6641 vdup();
6642 gv(rc);
6643 vpop();
6644 if (--ret_nregs == 0)
6645 break;
6646 /* We assume that when a structure is returned in multiple
6647 registers, their classes are consecutive values of the
6648 suite s(n) = 2^n */
6649 rc <<= 1;
6650 vtop->c.i += regsize;
6654 } else {
6655 gv(RC_RET(func_type->t));
6657 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6659 #endif
6661 static void check_func_return(void)
6663 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6664 return;
6665 if (!strcmp (funcname, "main")
6666 && (func_vt.t & VT_BTYPE) == VT_INT) {
6667 /* main returns 0 by default */
6668 vpushi(0);
6669 gen_assign_cast(&func_vt);
6670 gfunc_return(&func_vt);
6671 } else {
6672 tcc_warning("function might return no value: '%s'", funcname);
6676 /* ------------------------------------------------------------------------- */
6677 /* switch/case */
6679 static int case_cmpi(const void *pa, const void *pb)
6681 int64_t a = (*(struct case_t**) pa)->v1;
6682 int64_t b = (*(struct case_t**) pb)->v1;
6683 return a < b ? -1 : a > b;
6686 static int case_cmpu(const void *pa, const void *pb)
6688 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6689 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6690 return a < b ? -1 : a > b;
6693 static void gtst_addr(int t, int a)
6695 gsym_addr(gvtst(0, t), a);
6698 static void gcase(struct case_t **base, int len, int *bsym)
6700 struct case_t *p;
6701 int e;
6702 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6703 while (len > 8) {
6704 /* binary search */
6705 p = base[len/2];
6706 vdup();
6707 if (ll)
6708 vpushll(p->v2);
6709 else
6710 vpushi(p->v2);
6711 gen_op(TOK_LE);
6712 e = gvtst(1, 0);
6713 vdup();
6714 if (ll)
6715 vpushll(p->v1);
6716 else
6717 vpushi(p->v1);
6718 gen_op(TOK_GE);
6719 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6720 /* x < v1 */
6721 gcase(base, len/2, bsym);
6722 /* x > v2 */
6723 gsym(e);
6724 e = len/2 + 1;
6725 base += e; len -= e;
6727 /* linear scan */
6728 while (len--) {
6729 p = *base++;
6730 vdup();
6731 if (ll)
6732 vpushll(p->v2);
6733 else
6734 vpushi(p->v2);
6735 if (p->v1 == p->v2) {
6736 gen_op(TOK_EQ);
6737 gtst_addr(0, p->sym);
6738 } else {
6739 gen_op(TOK_LE);
6740 e = gvtst(1, 0);
6741 vdup();
6742 if (ll)
6743 vpushll(p->v1);
6744 else
6745 vpushi(p->v1);
6746 gen_op(TOK_GE);
6747 gtst_addr(0, p->sym);
6748 gsym(e);
6751 *bsym = gjmp(*bsym);
6754 /* ------------------------------------------------------------------------- */
6755 /* __attribute__((cleanup(fn))) */
6757 static void try_call_scope_cleanup(Sym *stop)
6759 Sym *cls = cur_scope->cl.s;
6761 for (; cls != stop; cls = cls->ncl) {
6762 Sym *fs = cls->next;
6763 Sym *vs = cls->prev_tok;
6765 vpushsym(&fs->type, fs);
6766 vset(&vs->type, vs->r, vs->c);
6767 vtop->sym = vs;
6768 mk_pointer(&vtop->type);
6769 gaddrof();
6770 gfunc_call(1);
6774 static void try_call_cleanup_goto(Sym *cleanupstate)
6776 Sym *oc, *cc;
6777 int ocd, ccd;
6779 if (!cur_scope->cl.s)
6780 return;
6782 /* search NCA of both cleanup chains given parents and initial depth */
6783 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6784 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6786 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6788 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6791 try_call_scope_cleanup(cc);
6794 /* call 'func' for each __attribute__((cleanup(func))) */
6795 static void block_cleanup(struct scope *o)
6797 int jmp = 0;
6798 Sym *g, **pg;
6799 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6800 if (g->prev_tok->r & LABEL_FORWARD) {
6801 Sym *pcl = g->next;
6802 if (!jmp)
6803 jmp = gjmp(0);
6804 gsym(pcl->jnext);
6805 try_call_scope_cleanup(o->cl.s);
6806 pcl->jnext = gjmp(0);
6807 if (!o->cl.n)
6808 goto remove_pending;
6809 g->c = o->cl.n;
6810 pg = &g->prev;
6811 } else {
6812 remove_pending:
6813 *pg = g->prev;
6814 sym_free(g);
6817 gsym(jmp);
6818 try_call_scope_cleanup(o->cl.s);
6821 /* ------------------------------------------------------------------------- */
6822 /* VLA */
6824 static void vla_restore(int loc)
6826 if (loc)
6827 gen_vla_sp_restore(loc);
6830 static void vla_leave(struct scope *o)
6832 struct scope *c = cur_scope, *v = NULL;
6833 for (; c != o && c; c = c->prev)
6834 if (c->vla.num)
6835 v = c;
6836 if (v)
6837 vla_restore(v->vla.locorig);
6840 /* ------------------------------------------------------------------------- */
6841 /* local scopes */
6843 static void new_scope(struct scope *o)
6845 /* copy and link previous scope */
6846 *o = *cur_scope;
6847 o->prev = cur_scope;
6848 cur_scope = o;
6849 cur_scope->vla.num = 0;
6851 /* record local declaration stack position */
6852 o->lstk = local_stack;
6853 o->llstk = local_label_stack;
6854 ++local_scope;
6857 static void prev_scope(struct scope *o, int is_expr)
6859 vla_leave(o->prev);
6861 if (o->cl.s != o->prev->cl.s)
6862 block_cleanup(o->prev);
6864 /* pop locally defined labels */
6865 label_pop(&local_label_stack, o->llstk, is_expr);
6867 /* In the is_expr case (a statement expression is finished here),
6868 vtop might refer to symbols on the local_stack. Either via the
6869 type or via vtop->sym. We can't pop those nor any that in turn
6870 might be referred to. To make it easier we don't roll back
6871 any symbols in that case; some upper level call to block() will
6872 do that. We do have to remove such symbols from the lookup
6873 tables, though. sym_pop will do that. */
6875 /* pop locally defined symbols */
6876 pop_local_syms(o->lstk, is_expr);
6877 cur_scope = o->prev;
6878 --local_scope;
6881 /* leave a scope via break/continue(/goto) */
6882 static void leave_scope(struct scope *o)
6884 if (!o)
6885 return;
6886 try_call_scope_cleanup(o->cl.s);
6887 vla_leave(o);
6890 /* short versiona for scopes with 'if/do/while/switch' which can
6891 declare only types (of struct/union/enum) */
6892 static void new_scope_s(struct scope *o)
6894 o->lstk = local_stack;
6895 ++local_scope;
6898 static void prev_scope_s(struct scope *o)
6900 sym_pop(&local_stack, o->lstk, 0);
6901 --local_scope;
6904 /* ------------------------------------------------------------------------- */
6905 /* call block from 'for do while' loops */
6907 static void lblock(int *bsym, int *csym)
6909 struct scope *lo = loop_scope, *co = cur_scope;
6910 int *b = co->bsym, *c = co->csym;
6911 if (csym) {
6912 co->csym = csym;
6913 loop_scope = co;
6915 co->bsym = bsym;
6916 block(0);
6917 co->bsym = b;
6918 if (csym) {
6919 co->csym = c;
6920 loop_scope = lo;
6924 static void block(int is_expr)
6926 int a, b, c, d, e, t;
6927 struct scope o;
6928 Sym *s;
6930 if (is_expr) {
6931 /* default return value is (void) */
6932 vpushi(0);
6933 vtop->type.t = VT_VOID;
6936 again:
6937 t = tok;
6938 /* If the token carries a value, next() might destroy it. Only with
6939 invalid code such as f(){"123"4;} */
6940 if (TOK_HAS_VALUE(t))
6941 goto expr;
6942 next();
6944 if (debug_modes)
6945 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6947 if (t == TOK_IF) {
6948 new_scope_s(&o);
6949 skip('(');
6950 gexpr();
6951 skip(')');
6952 a = gvtst(1, 0);
6953 block(0);
6954 if (tok == TOK_ELSE) {
6955 d = gjmp(0);
6956 gsym(a);
6957 next();
6958 block(0);
6959 gsym(d); /* patch else jmp */
6960 } else {
6961 gsym(a);
6963 prev_scope_s(&o);
6965 } else if (t == TOK_WHILE) {
6966 new_scope_s(&o);
6967 d = gind();
6968 skip('(');
6969 gexpr();
6970 skip(')');
6971 a = gvtst(1, 0);
6972 b = 0;
6973 lblock(&a, &b);
6974 gjmp_addr(d);
6975 gsym_addr(b, d);
6976 gsym(a);
6977 prev_scope_s(&o);
6979 } else if (t == '{') {
6980 if (debug_modes)
6981 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6982 new_scope(&o);
6984 /* handle local labels declarations */
6985 while (tok == TOK_LABEL) {
6986 do {
6987 next();
6988 if (tok < TOK_UIDENT)
6989 expect("label identifier");
6990 label_push(&local_label_stack, tok, LABEL_DECLARED);
6991 next();
6992 } while (tok == ',');
6993 skip(';');
6996 while (tok != '}') {
6997 decl(VT_LOCAL);
6998 if (tok != '}') {
6999 if (is_expr)
7000 vpop();
7001 block(is_expr);
7005 prev_scope(&o, is_expr);
7006 if (debug_modes)
7007 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7008 if (local_scope)
7009 next();
7010 else if (!nocode_wanted)
7011 check_func_return();
7013 } else if (t == TOK_RETURN) {
7014 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7015 if (tok != ';') {
7016 gexpr();
7017 if (b) {
7018 gen_assign_cast(&func_vt);
7019 } else {
7020 if (vtop->type.t != VT_VOID)
7021 tcc_warning("void function returns a value");
7022 vtop--;
7024 } else if (b) {
7025 tcc_warning("'return' with no value");
7026 b = 0;
7028 leave_scope(root_scope);
7029 if (b)
7030 gfunc_return(&func_vt);
7031 skip(';');
7032 /* jump unless last stmt in top-level block */
7033 if (tok != '}' || local_scope != 1)
7034 rsym = gjmp(rsym);
7035 if (debug_modes)
7036 tcc_tcov_block_end (tcc_state, -1);
7037 CODE_OFF();
7039 } else if (t == TOK_BREAK) {
7040 /* compute jump */
7041 if (!cur_scope->bsym)
7042 tcc_error("cannot break");
7043 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7044 leave_scope(cur_switch->scope);
7045 else
7046 leave_scope(loop_scope);
7047 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7048 skip(';');
7050 } else if (t == TOK_CONTINUE) {
7051 /* compute jump */
7052 if (!cur_scope->csym)
7053 tcc_error("cannot continue");
7054 leave_scope(loop_scope);
7055 *cur_scope->csym = gjmp(*cur_scope->csym);
7056 skip(';');
7058 } else if (t == TOK_FOR) {
7059 new_scope(&o);
7061 skip('(');
7062 if (tok != ';') {
7063 /* c99 for-loop init decl? */
7064 if (!decl(VT_JMP)) {
7065 /* no, regular for-loop init expr */
7066 gexpr();
7067 vpop();
7070 skip(';');
7071 a = b = 0;
7072 c = d = gind();
7073 if (tok != ';') {
7074 gexpr();
7075 a = gvtst(1, 0);
7077 skip(';');
7078 if (tok != ')') {
7079 e = gjmp(0);
7080 d = gind();
7081 gexpr();
7082 vpop();
7083 gjmp_addr(c);
7084 gsym(e);
7086 skip(')');
7087 lblock(&a, &b);
7088 gjmp_addr(d);
7089 gsym_addr(b, d);
7090 gsym(a);
7091 prev_scope(&o, 0);
7093 } else if (t == TOK_DO) {
7094 new_scope_s(&o);
7095 a = b = 0;
7096 d = gind();
7097 lblock(&a, &b);
7098 gsym(b);
7099 skip(TOK_WHILE);
7100 skip('(');
7101 gexpr();
7102 skip(')');
7103 skip(';');
7104 c = gvtst(0, 0);
7105 gsym_addr(c, d);
7106 gsym(a);
7107 prev_scope_s(&o);
7109 } else if (t == TOK_SWITCH) {
7110 struct switch_t *sw;
7112 sw = tcc_mallocz(sizeof *sw);
7113 sw->bsym = &a;
7114 sw->scope = cur_scope;
7115 sw->prev = cur_switch;
7116 sw->nocode_wanted = nocode_wanted;
7117 cur_switch = sw;
7119 new_scope_s(&o);
7120 skip('(');
7121 gexpr();
7122 skip(')');
7123 sw->sv = *vtop--; /* save switch value */
7124 a = 0;
7125 b = gjmp(0); /* jump to first case */
7126 lblock(&a, NULL);
7127 a = gjmp(a); /* add implicit break */
7128 /* case lookup */
7129 gsym(b);
7130 prev_scope_s(&o);
7132 if (sw->nocode_wanted)
7133 goto skip_switch;
7134 if (sw->sv.type.t & VT_UNSIGNED)
7135 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7136 else
7137 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7138 for (b = 1; b < sw->n; b++)
7139 if (sw->sv.type.t & VT_UNSIGNED
7140 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7141 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7142 tcc_error("duplicate case value");
7143 vpushv(&sw->sv);
7144 gv(RC_INT);
7145 d = 0, gcase(sw->p, sw->n, &d);
7146 vpop();
7147 if (sw->def_sym)
7148 gsym_addr(d, sw->def_sym);
7149 else
7150 gsym(d);
7151 skip_switch:
7152 /* break label */
7153 gsym(a);
7155 dynarray_reset(&sw->p, &sw->n);
7156 cur_switch = sw->prev;
7157 tcc_free(sw);
7159 } else if (t == TOK_CASE) {
7160 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7161 if (!cur_switch)
7162 expect("switch");
7163 cr->v1 = cr->v2 = expr_const64();
7164 if (gnu_ext && tok == TOK_DOTS) {
7165 next();
7166 cr->v2 = expr_const64();
7167 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7168 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7169 tcc_warning("empty case range");
7171 /* case and default are unreachable from a switch under nocode_wanted */
7172 if (!cur_switch->nocode_wanted)
7173 cr->sym = gind();
7174 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7175 skip(':');
7176 is_expr = 0;
7177 goto block_after_label;
7179 } else if (t == TOK_DEFAULT) {
7180 if (!cur_switch)
7181 expect("switch");
7182 if (cur_switch->def_sym)
7183 tcc_error("too many 'default'");
7184 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7185 skip(':');
7186 is_expr = 0;
7187 goto block_after_label;
7189 } else if (t == TOK_GOTO) {
7190 vla_restore(cur_scope->vla.locorig);
7191 if (tok == '*' && gnu_ext) {
7192 /* computed goto */
7193 next();
7194 gexpr();
7195 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7196 expect("pointer");
7197 ggoto();
7199 } else if (tok >= TOK_UIDENT) {
7200 s = label_find(tok);
7201 /* put forward definition if needed */
7202 if (!s)
7203 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7204 else if (s->r == LABEL_DECLARED)
7205 s->r = LABEL_FORWARD;
7207 if (s->r & LABEL_FORWARD) {
7208 /* start new goto chain for cleanups, linked via label->next */
7209 if (cur_scope->cl.s && !nocode_wanted) {
7210 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7211 pending_gotos->prev_tok = s;
7212 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7213 pending_gotos->next = s;
7215 s->jnext = gjmp(s->jnext);
7216 } else {
7217 try_call_cleanup_goto(s->cleanupstate);
7218 gjmp_addr(s->jnext);
7220 next();
7222 } else {
7223 expect("label identifier");
7225 skip(';');
7227 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7228 asm_instr();
7230 } else {
7231 if (tok == ':' && t >= TOK_UIDENT) {
7232 /* label case */
7233 next();
7234 s = label_find(t);
7235 if (s) {
7236 if (s->r == LABEL_DEFINED)
7237 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7238 s->r = LABEL_DEFINED;
7239 if (s->next) {
7240 Sym *pcl; /* pending cleanup goto */
7241 for (pcl = s->next; pcl; pcl = pcl->prev)
7242 gsym(pcl->jnext);
7243 sym_pop(&s->next, NULL, 0);
7244 } else
7245 gsym(s->jnext);
7246 } else {
7247 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7249 s->jnext = gind();
7250 s->cleanupstate = cur_scope->cl.s;
7252 block_after_label:
7254 /* Accept attributes after labels (e.g. 'unused') */
7255 AttributeDef ad_tmp;
7256 parse_attribute(&ad_tmp);
7258 if (debug_modes)
7259 tcc_tcov_reset_ind(tcc_state);
7260 vla_restore(cur_scope->vla.loc);
7261 if (tok != '}')
7262 goto again;
7263 /* we accept this, but it is a mistake */
7264 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7266 } else {
7267 /* expression case */
7268 if (t != ';') {
7269 unget_tok(t);
7270 expr:
7271 if (is_expr) {
7272 vpop();
7273 gexpr();
7274 } else {
7275 gexpr();
7276 vpop();
7278 skip(';');
7283 if (debug_modes)
7284 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7287 /* This skips over a stream of tokens containing balanced {} and ()
7288 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7289 with a '{'). If STR then allocates and stores the skipped tokens
7290 in *STR. This doesn't check if () and {} are nested correctly,
7291 i.e. "({)}" is accepted. */
7292 static void skip_or_save_block(TokenString **str)
7294 int braces = tok == '{';
7295 int level = 0;
7296 if (str)
7297 *str = tok_str_alloc();
7299 while (1) {
7300 int t = tok;
7301 if (level == 0
7302 && (t == ','
7303 || t == ';'
7304 || t == '}'
7305 || t == ')'
7306 || t == ']'))
7307 break;
7308 if (t == TOK_EOF) {
7309 if (str || level > 0)
7310 tcc_error("unexpected end of file");
7311 else
7312 break;
7314 if (str)
7315 tok_str_add_tok(*str);
7316 next();
7317 if (t == '{' || t == '(' || t == '[') {
7318 level++;
7319 } else if (t == '}' || t == ')' || t == ']') {
7320 level--;
7321 if (level == 0 && braces && t == '}')
7322 break;
7325 if (str) {
7326 tok_str_add(*str, -1);
7327 tok_str_add(*str, 0);
7331 #define EXPR_CONST 1
7332 #define EXPR_ANY 2
7334 static void parse_init_elem(int expr_type)
7336 int saved_global_expr;
7337 switch(expr_type) {
7338 case EXPR_CONST:
7339 /* compound literals must be allocated globally in this case */
7340 saved_global_expr = global_expr;
7341 global_expr = 1;
7342 expr_const1();
7343 global_expr = saved_global_expr;
7344 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7345 (compound literals). */
7346 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7347 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7348 || vtop->sym->v < SYM_FIRST_ANOM))
7349 #ifdef TCC_TARGET_PE
7350 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7351 #endif
7353 tcc_error("initializer element is not constant");
7354 break;
7355 case EXPR_ANY:
7356 expr_eq();
7357 break;
7361 #if 1
7362 static void init_assert(init_params *p, int offset)
7364 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7365 : !nocode_wanted && offset > p->local_offset)
7366 tcc_internal_error("initializer overflow");
7368 #else
7369 #define init_assert(sec, offset)
7370 #endif
7372 /* put zeros for variable based init */
7373 static void init_putz(init_params *p, unsigned long c, int size)
7375 init_assert(p, c + size);
7376 if (p->sec) {
7377 /* nothing to do because globals are already set to zero */
7378 } else {
7379 vpush_helper_func(TOK_memset);
7380 vseti(VT_LOCAL, c);
7381 #ifdef TCC_TARGET_ARM
7382 vpushs(size);
7383 vpushi(0);
7384 #else
7385 vpushi(0);
7386 vpushs(size);
7387 #endif
7388 gfunc_call(3);
7392 #define DIF_FIRST 1
7393 #define DIF_SIZE_ONLY 2
7394 #define DIF_HAVE_ELEM 4
7395 #define DIF_CLEAR 8
7397 /* delete relocations for specified range c ... c + size. Unfortunatly
7398 in very special cases, relocations may occur unordered */
7399 static void decl_design_delrels(Section *sec, int c, int size)
7401 ElfW_Rel *rel, *rel2, *rel_end;
7402 if (!sec || !sec->reloc)
7403 return;
7404 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7405 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7406 while (rel < rel_end) {
7407 if (rel->r_offset >= c && rel->r_offset < c + size) {
7408 sec->reloc->data_offset -= sizeof *rel;
7409 } else {
7410 if (rel2 != rel)
7411 memcpy(rel2, rel, sizeof *rel);
7412 ++rel2;
7414 ++rel;
7418 static void decl_design_flex(init_params *p, Sym *ref, int index)
7420 if (ref == p->flex_array_ref) {
7421 if (index >= ref->c)
7422 ref->c = index + 1;
7423 } else if (ref->c < 0)
7424 tcc_error("flexible array has zero size in this context");
7427 /* t is the array or struct type. c is the array or struct
7428 address. cur_field is the pointer to the current
7429 field, for arrays the 'c' member contains the current start
7430 index. 'flags' is as in decl_initializer.
7431 'al' contains the already initialized length of the
7432 current container (starting at c). This returns the new length of that. */
7433 static int decl_designator(init_params *p, CType *type, unsigned long c,
7434 Sym **cur_field, int flags, int al)
7436 Sym *s, *f;
7437 int index, index_last, align, l, nb_elems, elem_size;
7438 unsigned long corig = c;
7440 elem_size = 0;
7441 nb_elems = 1;
7443 if (flags & DIF_HAVE_ELEM)
7444 goto no_designator;
7446 if (gnu_ext && tok >= TOK_UIDENT) {
7447 l = tok, next();
7448 if (tok == ':')
7449 goto struct_field;
7450 unget_tok(l);
7453 /* NOTE: we only support ranges for last designator */
7454 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7455 if (tok == '[') {
7456 if (!(type->t & VT_ARRAY))
7457 expect("array type");
7458 next();
7459 index = index_last = expr_const();
7460 if (tok == TOK_DOTS && gnu_ext) {
7461 next();
7462 index_last = expr_const();
7464 skip(']');
7465 s = type->ref;
7466 decl_design_flex(p, s, index_last);
7467 if (index < 0 || index_last >= s->c || index_last < index)
7468 tcc_error("index exceeds array bounds or range is empty");
7469 if (cur_field)
7470 (*cur_field)->c = index_last;
7471 type = pointed_type(type);
7472 elem_size = type_size(type, &align);
7473 c += index * elem_size;
7474 nb_elems = index_last - index + 1;
7475 } else {
7476 int cumofs;
7477 next();
7478 l = tok;
7479 struct_field:
7480 next();
7481 if ((type->t & VT_BTYPE) != VT_STRUCT)
7482 expect("struct/union type");
7483 cumofs = 0;
7484 f = find_field(type, l, &cumofs);
7485 if (cur_field)
7486 *cur_field = f;
7487 type = &f->type;
7488 c += cumofs;
7490 cur_field = NULL;
7492 if (!cur_field) {
7493 if (tok == '=') {
7494 next();
7495 } else if (!gnu_ext) {
7496 expect("=");
7498 } else {
7499 no_designator:
7500 if (type->t & VT_ARRAY) {
7501 index = (*cur_field)->c;
7502 s = type->ref;
7503 decl_design_flex(p, s, index);
7504 if (index >= s->c)
7505 tcc_error("too many initializers");
7506 type = pointed_type(type);
7507 elem_size = type_size(type, &align);
7508 c += index * elem_size;
7509 } else {
7510 f = *cur_field;
7511 /* Skip bitfield padding. Also with size 32 and 64. */
7512 while (f && (f->v & SYM_FIRST_ANOM) &&
7513 is_integer_btype(f->type.t & VT_BTYPE))
7514 *cur_field = f = f->next;
7515 if (!f)
7516 tcc_error("too many initializers");
7517 type = &f->type;
7518 c += f->c;
7522 if (!elem_size) /* for structs */
7523 elem_size = type_size(type, &align);
7525 /* Using designators the same element can be initialized more
7526 than once. In that case we need to delete possibly already
7527 existing relocations. */
7528 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7529 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7530 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7533 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7535 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7536 Sym aref = {0};
7537 CType t1;
7538 int i;
7539 if (p->sec || (type->t & VT_ARRAY)) {
7540 /* make init_putv/vstore believe it were a struct */
7541 aref.c = elem_size;
7542 t1.t = VT_STRUCT, t1.ref = &aref;
7543 type = &t1;
7545 if (p->sec)
7546 vpush_ref(type, p->sec, c, elem_size);
7547 else
7548 vset(type, VT_LOCAL|VT_LVAL, c);
7549 for (i = 1; i < nb_elems; i++) {
7550 vdup();
7551 init_putv(p, type, c + elem_size * i);
7553 vpop();
7556 c += nb_elems * elem_size;
7557 if (c - corig > al)
7558 al = c - corig;
7559 return al;
7562 /* store a value or an expression directly in global data or in local array */
7563 static void init_putv(init_params *p, CType *type, unsigned long c)
7565 int bt;
7566 void *ptr;
7567 CType dtype;
7568 int size, align;
7569 Section *sec = p->sec;
7570 uint64_t val;
7572 dtype = *type;
7573 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7575 size = type_size(type, &align);
7576 if (type->t & VT_BITFIELD)
7577 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7578 init_assert(p, c + size);
7580 if (sec) {
7581 /* XXX: not portable */
7582 /* XXX: generate error if incorrect relocation */
7583 gen_assign_cast(&dtype);
7584 bt = type->t & VT_BTYPE;
7586 if ((vtop->r & VT_SYM)
7587 && bt != VT_PTR
7588 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7589 || (type->t & VT_BITFIELD))
7590 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7592 tcc_error("initializer element is not computable at load time");
7594 if (NODATA_WANTED) {
7595 vtop--;
7596 return;
7599 ptr = sec->data + c;
7600 val = vtop->c.i;
7602 /* XXX: make code faster ? */
7603 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7604 vtop->sym->v >= SYM_FIRST_ANOM &&
7605 /* XXX This rejects compound literals like
7606 '(void *){ptr}'. The problem is that '&sym' is
7607 represented the same way, which would be ruled out
7608 by the SYM_FIRST_ANOM check above, but also '"string"'
7609 in 'char *p = "string"' is represented the same
7610 with the type being VT_PTR and the symbol being an
7611 anonymous one. That is, there's no difference in vtop
7612 between '(void *){x}' and '&(void *){x}'. Ignore
7613 pointer typed entities here. Hopefully no real code
7614 will ever use compound literals with scalar type. */
7615 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7616 /* These come from compound literals, memcpy stuff over. */
7617 Section *ssec;
7618 ElfSym *esym;
7619 ElfW_Rel *rel;
7620 esym = elfsym(vtop->sym);
7621 ssec = tcc_state->sections[esym->st_shndx];
7622 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7623 if (ssec->reloc) {
7624 /* We need to copy over all memory contents, and that
7625 includes relocations. Use the fact that relocs are
7626 created it order, so look from the end of relocs
7627 until we hit one before the copied region. */
7628 unsigned long relofs = ssec->reloc->data_offset;
7629 while (relofs >= sizeof(*rel)) {
7630 relofs -= sizeof(*rel);
7631 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7632 if (rel->r_offset >= esym->st_value + size)
7633 continue;
7634 if (rel->r_offset < esym->st_value)
7635 break;
7636 put_elf_reloca(symtab_section, sec,
7637 c + rel->r_offset - esym->st_value,
7638 ELFW(R_TYPE)(rel->r_info),
7639 ELFW(R_SYM)(rel->r_info),
7640 #if PTR_SIZE == 8
7641 rel->r_addend
7642 #else
7644 #endif
7648 } else {
7649 if (type->t & VT_BITFIELD) {
7650 int bit_pos, bit_size, bits, n;
7651 unsigned char *p, v, m;
7652 bit_pos = BIT_POS(vtop->type.t);
7653 bit_size = BIT_SIZE(vtop->type.t);
7654 p = (unsigned char*)ptr + (bit_pos >> 3);
7655 bit_pos &= 7, bits = 0;
7656 while (bit_size) {
7657 n = 8 - bit_pos;
7658 if (n > bit_size)
7659 n = bit_size;
7660 v = val >> bits << bit_pos;
7661 m = ((1 << n) - 1) << bit_pos;
7662 *p = (*p & ~m) | (v & m);
7663 bits += n, bit_size -= n, bit_pos = 0, ++p;
7665 } else
7666 switch(bt) {
7667 case VT_BOOL:
7668 *(char *)ptr = val != 0;
7669 break;
7670 case VT_BYTE:
7671 *(char *)ptr = val;
7672 break;
7673 case VT_SHORT:
7674 write16le(ptr, val);
7675 break;
7676 case VT_FLOAT:
7677 write32le(ptr, val);
7678 break;
7679 case VT_DOUBLE:
7680 write64le(ptr, val);
7681 break;
7682 case VT_LDOUBLE:
7683 #if defined TCC_IS_NATIVE_387
7684 /* Host and target platform may be different but both have x87.
7685 On windows, tcc does not use VT_LDOUBLE, except when it is a
7686 cross compiler. In this case a mingw gcc as host compiler
7687 comes here with 10-byte long doubles, while msvc or tcc won't.
7688 tcc itself can still translate by asm.
7689 In any case we avoid possibly random bytes 11 and 12.
7691 if (sizeof (long double) >= 10)
7692 memcpy(ptr, &vtop->c.ld, 10);
7693 #ifdef __TINYC__
7694 else if (sizeof (long double) == sizeof (double))
7695 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7696 #endif
7697 else
7698 #endif
7699 /* For other platforms it should work natively, but may not work
7700 for cross compilers */
7701 if (sizeof(long double) == LDOUBLE_SIZE)
7702 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7703 else if (sizeof(double) == LDOUBLE_SIZE)
7704 *(double*)ptr = (double)vtop->c.ld;
7705 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7706 ; /* nothing to do for 0.0 */
7707 #ifndef TCC_CROSS_TEST
7708 else
7709 tcc_error("can't cross compile long double constants");
7710 #endif
7711 break;
7713 #if PTR_SIZE == 8
7714 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7715 case VT_LLONG:
7716 case VT_PTR:
7717 if (vtop->r & VT_SYM)
7718 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7719 else
7720 write64le(ptr, val);
7721 break;
7722 case VT_INT:
7723 write32le(ptr, val);
7724 break;
7725 #else
7726 case VT_LLONG:
7727 write64le(ptr, val);
7728 break;
7729 case VT_PTR:
7730 case VT_INT:
7731 if (vtop->r & VT_SYM)
7732 greloc(sec, vtop->sym, c, R_DATA_PTR);
7733 write32le(ptr, val);
7734 break;
7735 #endif
7736 default:
7737 //tcc_internal_error("unexpected type");
7738 break;
7741 vtop--;
7742 } else {
7743 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7744 vswap();
7745 vstore();
7746 vpop();
7750 /* 't' contains the type and storage info. 'c' is the offset of the
7751 object in section 'sec'. If 'sec' is NULL, it means stack based
7752 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7753 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7754 size only evaluation is wanted (only for arrays). */
7755 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7757 int len, n, no_oblock, i;
7758 int size1, align1;
7759 Sym *s, *f;
7760 Sym indexsym;
7761 CType *t1;
7763 /* generate line number info */
7764 if (debug_modes && !p->sec)
7765 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7767 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7768 /* In case of strings we have special handling for arrays, so
7769 don't consume them as initializer value (which would commit them
7770 to some anonymous symbol). */
7771 tok != TOK_LSTR && tok != TOK_STR &&
7772 (!(flags & DIF_SIZE_ONLY)
7773 /* a struct may be initialized from a struct of same type, as in
7774 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7775 In that case we need to parse the element in order to check
7776 it for compatibility below */
7777 || (type->t & VT_BTYPE) == VT_STRUCT)
7779 int ncw_prev = nocode_wanted;
7780 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7781 ++nocode_wanted;
7782 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7783 nocode_wanted = ncw_prev;
7784 flags |= DIF_HAVE_ELEM;
7787 if (type->t & VT_ARRAY) {
7788 no_oblock = 1;
7789 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7790 tok == '{') {
7791 skip('{');
7792 no_oblock = 0;
7795 s = type->ref;
7796 n = s->c;
7797 t1 = pointed_type(type);
7798 size1 = type_size(t1, &align1);
7800 /* only parse strings here if correct type (otherwise: handle
7801 them as ((w)char *) expressions */
7802 if ((tok == TOK_LSTR &&
7803 #ifdef TCC_TARGET_PE
7804 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7805 #else
7806 (t1->t & VT_BTYPE) == VT_INT
7807 #endif
7808 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7809 len = 0;
7810 cstr_reset(&initstr);
7811 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7812 tcc_error("unhandled string literal merging");
7813 while (tok == TOK_STR || tok == TOK_LSTR) {
7814 if (initstr.size)
7815 initstr.size -= size1;
7816 if (tok == TOK_STR)
7817 len += tokc.str.size;
7818 else
7819 len += tokc.str.size / sizeof(nwchar_t);
7820 len--;
7821 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7822 next();
7824 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7825 && tok != TOK_EOF) {
7826 /* Not a lone literal but part of a bigger expression. */
7827 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7828 tokc.str.size = initstr.size;
7829 tokc.str.data = initstr.data;
7830 goto do_init_array;
7833 decl_design_flex(p, s, len);
7834 if (!(flags & DIF_SIZE_ONLY)) {
7835 int nb = n, ch;
7836 if (len < nb)
7837 nb = len;
7838 if (len > nb)
7839 tcc_warning("initializer-string for array is too long");
7840 /* in order to go faster for common case (char
7841 string in global variable, we handle it
7842 specifically */
7843 if (p->sec && size1 == 1) {
7844 init_assert(p, c + nb);
7845 if (!NODATA_WANTED)
7846 memcpy(p->sec->data + c, initstr.data, nb);
7847 } else {
7848 for(i=0;i<n;i++) {
7849 if (i >= nb) {
7850 /* only add trailing zero if enough storage (no
7851 warning in this case since it is standard) */
7852 if (flags & DIF_CLEAR)
7853 break;
7854 if (n - i >= 4) {
7855 init_putz(p, c + i * size1, (n - i) * size1);
7856 break;
7858 ch = 0;
7859 } else if (size1 == 1)
7860 ch = ((unsigned char *)initstr.data)[i];
7861 else
7862 ch = ((nwchar_t *)initstr.data)[i];
7863 vpushi(ch);
7864 init_putv(p, t1, c + i * size1);
7868 } else {
7870 do_init_array:
7871 indexsym.c = 0;
7872 f = &indexsym;
7874 do_init_list:
7875 /* zero memory once in advance */
7876 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7877 init_putz(p, c, n*size1);
7878 flags |= DIF_CLEAR;
7881 len = 0;
7882 /* GNU extension: if the initializer is empty for a flex array,
7883 it's size is zero. We won't enter the loop, so set the size
7884 now. */
7885 decl_design_flex(p, s, len);
7886 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7887 len = decl_designator(p, type, c, &f, flags, len);
7888 flags &= ~DIF_HAVE_ELEM;
7889 if (type->t & VT_ARRAY) {
7890 ++indexsym.c;
7891 /* special test for multi dimensional arrays (may not
7892 be strictly correct if designators are used at the
7893 same time) */
7894 if (no_oblock && len >= n*size1)
7895 break;
7896 } else {
7897 if (s->type.t == VT_UNION)
7898 f = NULL;
7899 else
7900 f = f->next;
7901 if (no_oblock && f == NULL)
7902 break;
7905 if (tok == '}')
7906 break;
7907 skip(',');
7910 if (!no_oblock)
7911 skip('}');
7913 } else if ((flags & DIF_HAVE_ELEM)
7914 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7915 The source type might have VT_CONSTANT set, which is
7916 of course assignable to non-const elements. */
7917 && is_compatible_unqualified_types(type, &vtop->type)) {
7918 goto one_elem;
7920 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7921 no_oblock = 1;
7922 if ((flags & DIF_FIRST) || tok == '{') {
7923 skip('{');
7924 no_oblock = 0;
7926 s = type->ref;
7927 f = s->next;
7928 n = s->c;
7929 size1 = 1;
7930 goto do_init_list;
7932 } else if (tok == '{') {
7933 if (flags & DIF_HAVE_ELEM)
7934 skip(';');
7935 next();
7936 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7937 skip('}');
7939 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7940 /* If we supported only ISO C we wouldn't have to accept calling
7941 this on anything than an array if DIF_SIZE_ONLY (and even then
7942 only on the outermost level, so no recursion would be needed),
7943 because initializing a flex array member isn't supported.
7944 But GNU C supports it, so we need to recurse even into
7945 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7946 /* just skip expression */
7947 if (flags & DIF_HAVE_ELEM)
7948 vpop();
7949 else
7950 skip_or_save_block(NULL);
7952 } else {
7953 if (!(flags & DIF_HAVE_ELEM)) {
7954 /* This should happen only when we haven't parsed
7955 the init element above for fear of committing a
7956 string constant to memory too early. */
7957 if (tok != TOK_STR && tok != TOK_LSTR)
7958 expect("string constant");
7959 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7961 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7962 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7963 && vtop->c.i == 0
7964 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7966 vpop();
7967 else
7968 init_putv(p, type, c);
7972 /* parse an initializer for type 't' if 'has_init' is non zero, and
7973 allocate space in local or global data space ('r' is either
7974 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7975 variable 'v' of scope 'scope' is declared before initializers
7976 are parsed. If 'v' is zero, then a reference to the new object
7977 is put in the value stack. If 'has_init' is 2, a special parsing
7978 is done to handle string constants. */
7979 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7980 int has_init, int v, int global)
7982 int size, align, addr;
7983 TokenString *init_str = NULL;
7985 Section *sec;
7986 Sym *flexible_array;
7987 Sym *sym;
7988 int saved_nocode_wanted = nocode_wanted;
7989 #ifdef CONFIG_TCC_BCHECK
7990 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7991 #endif
7992 init_params p = {0};
7994 /* Always allocate static or global variables */
7995 if (v && (r & VT_VALMASK) == VT_CONST)
7996 nocode_wanted |= DATA_ONLY_WANTED;
7998 flexible_array = NULL;
7999 size = type_size(type, &align);
8001 /* exactly one flexible array may be initialized, either the
8002 toplevel array or the last member of the toplevel struct */
8004 if (size < 0) {
8005 /* If the base type itself was an array type of unspecified size
8006 (like in 'typedef int arr[]; arr x = {1};') then we will
8007 overwrite the unknown size by the real one for this decl.
8008 We need to unshare the ref symbol holding that size. */
8009 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8010 p.flex_array_ref = type->ref;
8012 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8013 Sym *field = type->ref->next;
8014 if (field) {
8015 while (field->next)
8016 field = field->next;
8017 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8018 flexible_array = field;
8019 p.flex_array_ref = field->type.ref;
8020 size = -1;
8025 if (size < 0) {
8026 /* If unknown size, do a dry-run 1st pass */
8027 if (!has_init)
8028 tcc_error("unknown type size");
8029 if (has_init == 2) {
8030 /* only get strings */
8031 init_str = tok_str_alloc();
8032 while (tok == TOK_STR || tok == TOK_LSTR) {
8033 tok_str_add_tok(init_str);
8034 next();
8036 tok_str_add(init_str, -1);
8037 tok_str_add(init_str, 0);
8038 } else
8039 skip_or_save_block(&init_str);
8040 unget_tok(0);
8042 /* compute size */
8043 begin_macro(init_str, 1);
8044 next();
8045 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8046 /* prepare second initializer parsing */
8047 macro_ptr = init_str->str;
8048 next();
8050 /* if still unknown size, error */
8051 size = type_size(type, &align);
8052 if (size < 0)
8053 tcc_error("unknown type size");
8055 /* If there's a flex member and it was used in the initializer
8056 adjust size. */
8057 if (flexible_array && flexible_array->type.ref->c > 0)
8058 size += flexible_array->type.ref->c
8059 * pointed_size(&flexible_array->type);
8062 /* take into account specified alignment if bigger */
8063 if (ad->a.aligned) {
8064 int speca = 1 << (ad->a.aligned - 1);
8065 if (speca > align)
8066 align = speca;
8067 } else if (ad->a.packed) {
8068 align = 1;
8071 if (!v && NODATA_WANTED)
8072 size = 0, align = 1;
8074 if ((r & VT_VALMASK) == VT_LOCAL) {
8075 sec = NULL;
8076 #ifdef CONFIG_TCC_BCHECK
8077 if (bcheck && v) {
8078 /* add padding between stack variables for bound checking */
8079 loc -= align;
8081 #endif
8082 loc = (loc - size) & -align;
8083 addr = loc;
8084 p.local_offset = addr + size;
8085 #ifdef CONFIG_TCC_BCHECK
8086 if (bcheck && v) {
8087 /* add padding between stack variables for bound checking */
8088 loc -= align;
8090 #endif
8091 if (v) {
8092 /* local variable */
8093 #ifdef CONFIG_TCC_ASM
8094 if (ad->asm_label) {
8095 int reg = asm_parse_regvar(ad->asm_label);
8096 if (reg >= 0)
8097 r = (r & ~VT_VALMASK) | reg;
8099 #endif
8100 sym = sym_push(v, type, r, addr);
8101 if (ad->cleanup_func) {
8102 Sym *cls = sym_push2(&all_cleanups,
8103 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8104 cls->prev_tok = sym;
8105 cls->next = ad->cleanup_func;
8106 cls->ncl = cur_scope->cl.s;
8107 cur_scope->cl.s = cls;
8110 sym->a = ad->a;
8111 } else {
8112 /* push local reference */
8113 vset(type, r, addr);
8115 } else {
8116 sym = NULL;
8117 if (v && global) {
8118 /* see if the symbol was already defined */
8119 sym = sym_find(v);
8120 if (sym) {
8121 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8122 && sym->type.ref->c > type->ref->c) {
8123 /* flex array was already declared with explicit size
8124 extern int arr[10];
8125 int arr[] = { 1,2,3 }; */
8126 type->ref->c = sym->type.ref->c;
8127 size = type_size(type, &align);
8129 patch_storage(sym, ad, type);
8130 /* we accept several definitions of the same global variable. */
8131 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8132 goto no_alloc;
8136 /* allocate symbol in corresponding section */
8137 sec = ad->section;
8138 if (!sec) {
8139 CType *tp = type;
8140 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8141 tp = &tp->ref->type;
8142 if (tp->t & VT_CONSTANT) {
8143 sec = rodata_section;
8144 } else if (has_init) {
8145 sec = data_section;
8146 /*if (tcc_state->g_debug & 4)
8147 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8148 } else if (tcc_state->nocommon)
8149 sec = bss_section;
8152 if (sec) {
8153 addr = section_add(sec, size, align);
8154 #ifdef CONFIG_TCC_BCHECK
8155 /* add padding if bound check */
8156 if (bcheck)
8157 section_add(sec, 1, 1);
8158 #endif
8159 } else {
8160 addr = align; /* SHN_COMMON is special, symbol value is align */
8161 sec = common_section;
8164 if (v) {
8165 if (!sym) {
8166 sym = sym_push(v, type, r | VT_SYM, 0);
8167 patch_storage(sym, ad, NULL);
8169 /* update symbol definition */
8170 put_extern_sym(sym, sec, addr, size);
8171 } else {
8172 /* push global reference */
8173 vpush_ref(type, sec, addr, size);
8174 sym = vtop->sym;
8175 vtop->r |= r;
8178 #ifdef CONFIG_TCC_BCHECK
8179 /* handles bounds now because the symbol must be defined
8180 before for the relocation */
8181 if (bcheck) {
8182 addr_t *bounds_ptr;
8184 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8185 /* then add global bound info */
8186 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8187 bounds_ptr[0] = 0; /* relocated */
8188 bounds_ptr[1] = size;
8190 #endif
8193 if (type->t & VT_VLA) {
8194 int a;
8196 if (NODATA_WANTED)
8197 goto no_alloc;
8199 /* save before-VLA stack pointer if needed */
8200 if (cur_scope->vla.num == 0) {
8201 if (cur_scope->prev && cur_scope->prev->vla.num) {
8202 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8203 } else {
8204 gen_vla_sp_save(loc -= PTR_SIZE);
8205 cur_scope->vla.locorig = loc;
8209 vpush_type_size(type, &a);
8210 gen_vla_alloc(type, a);
8211 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8212 /* on _WIN64, because of the function args scratch area, the
8213 result of alloca differs from RSP and is returned in RAX. */
8214 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8215 #endif
8216 gen_vla_sp_save(addr);
8217 cur_scope->vla.loc = addr;
8218 cur_scope->vla.num++;
8219 } else if (has_init) {
8220 p.sec = sec;
8221 decl_initializer(&p, type, addr, DIF_FIRST);
8222 /* patch flexible array member size back to -1, */
8223 /* for possible subsequent similar declarations */
8224 if (flexible_array)
8225 flexible_array->type.ref->c = -1;
8228 no_alloc:
8229 /* restore parse state if needed */
8230 if (init_str) {
8231 end_macro();
8232 next();
8235 nocode_wanted = saved_nocode_wanted;
8238 /* generate vla code saved in post_type() */
8239 static void func_vla_arg_code(Sym *arg)
8241 int align;
8242 TokenString *vla_array_tok = NULL;
8244 if (arg->type.ref)
8245 func_vla_arg_code(arg->type.ref);
8247 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8248 loc -= type_size(&int_type, &align);
8249 loc &= -align;
8250 arg->type.ref->c = loc;
8252 unget_tok(0);
8253 vla_array_tok = tok_str_alloc();
8254 vla_array_tok->str = arg->type.ref->vla_array_str;
8255 begin_macro(vla_array_tok, 1);
8256 next();
8257 gexpr();
8258 end_macro();
8259 next();
8260 vpush_type_size(&arg->type.ref->type, &align);
8261 gen_op('*');
8262 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8263 vswap();
8264 vstore();
8265 vpop();
8269 static void func_vla_arg(Sym *sym)
8271 Sym *arg;
8273 for (arg = sym->type.ref->next; arg; arg = arg->next)
8274 if (arg->type.t & VT_VLA)
8275 func_vla_arg_code(arg);
8278 /* parse a function defined by symbol 'sym' and generate its code in
8279 'cur_text_section' */
8280 static void gen_function(Sym *sym)
8282 struct scope f = { 0 };
8283 cur_scope = root_scope = &f;
8284 nocode_wanted = 0;
8285 ind = cur_text_section->data_offset;
8286 if (sym->a.aligned) {
8287 size_t newoff = section_add(cur_text_section, 0,
8288 1 << (sym->a.aligned - 1));
8289 gen_fill_nops(newoff - ind);
8291 /* NOTE: we patch the symbol size later */
8292 put_extern_sym(sym, cur_text_section, ind, 0);
8293 if (sym->type.ref->f.func_ctor)
8294 add_array (tcc_state, ".init_array", sym->c);
8295 if (sym->type.ref->f.func_dtor)
8296 add_array (tcc_state, ".fini_array", sym->c);
8298 funcname = get_tok_str(sym->v, NULL);
8299 func_ind = ind;
8300 func_vt = sym->type.ref->type;
8301 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8303 /* put debug symbol */
8304 tcc_debug_funcstart(tcc_state, sym);
8305 /* push a dummy symbol to enable local sym storage */
8306 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8307 local_scope = 1; /* for function parameters */
8308 gfunc_prolog(sym);
8309 tcc_debug_prolog_epilog(tcc_state, 0);
8310 local_scope = 0;
8311 rsym = 0;
8312 clear_temp_local_var_list();
8313 func_vla_arg(sym);
8314 block(0);
8315 gsym(rsym);
8316 nocode_wanted = 0;
8317 /* reset local stack */
8318 pop_local_syms(NULL, 0);
8319 tcc_debug_prolog_epilog(tcc_state, 1);
8320 gfunc_epilog();
8321 cur_text_section->data_offset = ind;
8322 local_scope = 0;
8323 label_pop(&global_label_stack, NULL, 0);
8324 sym_pop(&all_cleanups, NULL, 0);
8325 /* patch symbol size */
8326 elfsym(sym)->st_size = ind - func_ind;
8327 /* end of function */
8328 tcc_debug_funcend(tcc_state, ind - func_ind);
8329 /* It's better to crash than to generate wrong code */
8330 cur_text_section = NULL;
8331 funcname = ""; /* for safety */
8332 func_vt.t = VT_VOID; /* for safety */
8333 func_var = 0; /* for safety */
8334 ind = 0; /* for safety */
8335 func_ind = -1;
8336 nocode_wanted = DATA_ONLY_WANTED;
8337 check_vstack();
8338 /* do this after funcend debug info */
8339 next();
8342 static void gen_inline_functions(TCCState *s)
8344 Sym *sym;
8345 int inline_generated, i;
8346 struct InlineFunc *fn;
8348 tcc_open_bf(s, ":inline:", 0);
8349 /* iterate while inline function are referenced */
8350 do {
8351 inline_generated = 0;
8352 for (i = 0; i < s->nb_inline_fns; ++i) {
8353 fn = s->inline_fns[i];
8354 sym = fn->sym;
8355 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8356 /* the function was used or forced (and then not internal):
8357 generate its code and convert it to a normal function */
8358 fn->sym = NULL;
8359 tcc_debug_putfile(s, fn->filename);
8360 begin_macro(fn->func_str, 1);
8361 next();
8362 cur_text_section = text_section;
8363 gen_function(sym);
8364 end_macro();
8366 inline_generated = 1;
8369 } while (inline_generated);
8370 tcc_close();
8373 static void free_inline_functions(TCCState *s)
8375 int i;
8376 /* free tokens of unused inline functions */
8377 for (i = 0; i < s->nb_inline_fns; ++i) {
8378 struct InlineFunc *fn = s->inline_fns[i];
8379 if (fn->sym)
8380 tok_str_free(fn->func_str);
8382 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8385 static void do_Static_assert(void)
8387 int c;
8388 const char *msg;
8390 next();
8391 skip('(');
8392 c = expr_const();
8393 msg = "_Static_assert fail";
8394 if (tok == ',') {
8395 next();
8396 msg = parse_mult_str("string constant")->data;
8398 skip(')');
8399 if (c == 0)
8400 tcc_error("%s", msg);
8401 skip(';');
8404 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8405 or VT_CMP if parsing old style parameter list
8406 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8407 static int decl(int l)
8409 int v, has_init, r, oldint;
8410 CType type, btype;
8411 Sym *sym;
8412 AttributeDef ad, adbase;
8414 while (1) {
8416 if (tok == TOK_STATIC_ASSERT) {
8417 do_Static_assert();
8418 continue;
8421 oldint = 0;
8422 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8423 if (l == VT_JMP)
8424 return 0;
8425 /* skip redundant ';' if not in old parameter decl scope */
8426 if (tok == ';' && l != VT_CMP) {
8427 next();
8428 continue;
8430 if (l != VT_CONST)
8431 break;
8432 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8433 /* global asm block */
8434 asm_global_instr();
8435 continue;
8437 if (tok >= TOK_UIDENT) {
8438 /* special test for old K&R protos without explicit int
8439 type. Only accepted when defining global data */
8440 btype.t = VT_INT;
8441 oldint = 1;
8442 } else {
8443 if (tok != TOK_EOF)
8444 expect("declaration");
8445 break;
8449 if (tok == ';') {
8450 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8451 v = btype.ref->v;
8452 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8453 tcc_warning("unnamed struct/union that defines no instances");
8454 next();
8455 continue;
8457 if (IS_ENUM(btype.t)) {
8458 next();
8459 continue;
8463 while (1) { /* iterate thru each declaration */
8464 type = btype;
8465 ad = adbase;
8466 type_decl(&type, &ad, &v, TYPE_DIRECT);
8467 #if 0
8469 char buf[500];
8470 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8471 printf("type = '%s'\n", buf);
8473 #endif
8474 if ((type.t & VT_BTYPE) == VT_FUNC) {
8475 if ((type.t & VT_STATIC) && (l != VT_CONST))
8476 tcc_error("function without file scope cannot be static");
8477 /* if old style function prototype, we accept a
8478 declaration list */
8479 sym = type.ref;
8480 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8481 func_vt = type;
8482 decl(VT_CMP);
8484 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8485 if (sym->f.func_alwinl
8486 && ((type.t & (VT_EXTERN | VT_INLINE))
8487 == (VT_EXTERN | VT_INLINE))) {
8488 /* always_inline functions must be handled as if they
8489 don't generate multiple global defs, even if extern
8490 inline, i.e. GNU inline semantics for those. Rewrite
8491 them into static inline. */
8492 type.t &= ~VT_EXTERN;
8493 type.t |= VT_STATIC;
8495 #endif
8496 /* always compile 'extern inline' */
8497 if (type.t & VT_EXTERN)
8498 type.t &= ~VT_INLINE;
8500 } else if (oldint) {
8501 tcc_warning("type defaults to int");
8504 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8505 ad.asm_label = asm_label_instr();
8506 /* parse one last attribute list, after asm label */
8507 parse_attribute(&ad);
8508 #if 0
8509 /* gcc does not allow __asm__("label") with function definition,
8510 but why not ... */
8511 if (tok == '{')
8512 expect(";");
8513 #endif
8516 #ifdef TCC_TARGET_PE
8517 if (ad.a.dllimport || ad.a.dllexport) {
8518 if (type.t & VT_STATIC)
8519 tcc_error("cannot have dll linkage with static");
8520 if (type.t & VT_TYPEDEF) {
8521 tcc_warning("'%s' attribute ignored for typedef",
8522 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8523 (ad.a.dllexport = 0, "dllexport"));
8524 } else if (ad.a.dllimport) {
8525 if ((type.t & VT_BTYPE) == VT_FUNC)
8526 ad.a.dllimport = 0;
8527 else
8528 type.t |= VT_EXTERN;
8531 #endif
8532 if (tok == '{') {
8533 if (l != VT_CONST)
8534 tcc_error("cannot use local functions");
8535 if ((type.t & VT_BTYPE) != VT_FUNC)
8536 expect("function definition");
8538 /* reject abstract declarators in function definition
8539 make old style params without decl have int type */
8540 sym = type.ref;
8541 while ((sym = sym->next) != NULL) {
8542 if (!(sym->v & ~SYM_FIELD))
8543 expect("identifier");
8544 if (sym->type.t == VT_VOID)
8545 sym->type = int_type;
8548 /* apply post-declaraton attributes */
8549 merge_funcattr(&type.ref->f, &ad.f);
8551 /* put function symbol */
8552 type.t &= ~VT_EXTERN;
8553 sym = external_sym(v, &type, 0, &ad);
8555 /* static inline functions are just recorded as a kind
8556 of macro. Their code will be emitted at the end of
8557 the compilation unit only if they are used */
8558 if (sym->type.t & VT_INLINE) {
8559 struct InlineFunc *fn;
8560 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8561 strcpy(fn->filename, file->filename);
8562 fn->sym = sym;
8563 skip_or_save_block(&fn->func_str);
8564 dynarray_add(&tcc_state->inline_fns,
8565 &tcc_state->nb_inline_fns, fn);
8566 } else {
8567 /* compute text section */
8568 cur_text_section = ad.section;
8569 if (!cur_text_section)
8570 cur_text_section = text_section;
8571 gen_function(sym);
8573 break;
8574 } else {
8575 if (l == VT_CMP) {
8576 /* find parameter in function parameter list */
8577 for (sym = func_vt.ref->next; sym; sym = sym->next)
8578 if ((sym->v & ~SYM_FIELD) == v)
8579 goto found;
8580 tcc_error("declaration for parameter '%s' but no such parameter",
8581 get_tok_str(v, NULL));
8582 found:
8583 if (type.t & VT_STORAGE) /* 'register' is okay */
8584 tcc_error("storage class specified for '%s'",
8585 get_tok_str(v, NULL));
8586 if (sym->type.t != VT_VOID)
8587 tcc_error("redefinition of parameter '%s'",
8588 get_tok_str(v, NULL));
8589 convert_parameter_type(&type);
8590 sym->type = type;
8591 } else if (type.t & VT_TYPEDEF) {
8592 /* save typedefed type */
8593 /* XXX: test storage specifiers ? */
8594 sym = sym_find(v);
8595 if (sym && sym->sym_scope == local_scope) {
8596 if (!is_compatible_types(&sym->type, &type)
8597 || !(sym->type.t & VT_TYPEDEF))
8598 tcc_error("incompatible redefinition of '%s'",
8599 get_tok_str(v, NULL));
8600 sym->type = type;
8601 } else {
8602 sym = sym_push(v, &type, 0, 0);
8604 sym->a = ad.a;
8605 if ((type.t & VT_BTYPE) == VT_FUNC)
8606 merge_funcattr(&sym->type.ref->f, &ad.f);
8607 if (debug_modes)
8608 tcc_debug_typedef (tcc_state, sym);
8609 } else if ((type.t & VT_BTYPE) == VT_VOID
8610 && !(type.t & VT_EXTERN)) {
8611 tcc_error("declaration of void object");
8612 } else {
8613 r = 0;
8614 if ((type.t & VT_BTYPE) == VT_FUNC) {
8615 /* external function definition */
8616 /* specific case for func_call attribute */
8617 merge_funcattr(&type.ref->f, &ad.f);
8618 } else if (!(type.t & VT_ARRAY)) {
8619 /* not lvalue if array */
8620 r |= VT_LVAL;
8622 has_init = (tok == '=');
8623 if (has_init && (type.t & VT_VLA))
8624 tcc_error("variable length array cannot be initialized");
8625 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8626 || (type.t & VT_BTYPE) == VT_FUNC
8627 /* as with GCC, uninitialized global arrays with no size
8628 are considered extern: */
8629 || ((type.t & VT_ARRAY) && !has_init
8630 && l == VT_CONST && type.ref->c < 0)
8632 /* external variable or function */
8633 type.t |= VT_EXTERN;
8634 sym = external_sym(v, &type, r, &ad);
8635 if (ad.alias_target) {
8636 /* Aliases need to be emitted when their target
8637 symbol is emitted, even if perhaps unreferenced.
8638 We only support the case where the base is
8639 already defined, otherwise we would need
8640 deferring to emit the aliases until the end of
8641 the compile unit. */
8642 Sym *alias_target = sym_find(ad.alias_target);
8643 ElfSym *esym = elfsym(alias_target);
8644 if (!esym)
8645 tcc_error("unsupported forward __alias__ attribute");
8646 put_extern_sym2(sym, esym->st_shndx,
8647 esym->st_value, esym->st_size, 1);
8649 } else {
8650 if (l == VT_CONST || (type.t & VT_STATIC))
8651 r |= VT_CONST;
8652 else
8653 r |= VT_LOCAL;
8654 if (has_init)
8655 next();
8656 else if (l == VT_CONST)
8657 /* uninitialized global variables may be overridden */
8658 type.t |= VT_EXTERN;
8659 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8662 if (tok != ',') {
8663 if (l == VT_JMP)
8664 return 1;
8665 skip(';');
8666 break;
8668 next();
8672 return 0;
8675 /* ------------------------------------------------------------------------- */
8676 #undef gjmp_addr
8677 #undef gjmp
8678 /* ------------------------------------------------------------------------- */