Add tcov support in Makefile
[tinycc.git] / tccgen.c
blob88436a8355b2471aedd4f7861792e67bac9ce764
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int constant_p;
48 ST_DATA char debug_modes;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int nocode_wanted; /* no code generation wanted */
55 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
56 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
58 /* no code output after unconditional jumps such as with if (0) ... */
59 #define CODE_OFF_BIT 0x20000000
60 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
61 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
63 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
64 #define NOEVAL_MASK 0x0000FFFF
65 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
67 /* no code output when parsing constant expressions */
68 #define CONST_WANTED_BIT 0x00010000
69 #define CONST_WANTED_MASK 0x0FFF0000
70 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int func_ind;
77 ST_DATA const char *funcname;
78 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
79 static CString initstr;
81 #if PTR_SIZE == 4
82 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
83 #define VT_PTRDIFF_T VT_INT
84 #elif LONG_SIZE == 4
85 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
86 #define VT_PTRDIFF_T VT_LLONG
87 #else
88 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
89 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 #endif
92 static struct switch_t {
93 struct case_t {
94 int64_t v1, v2;
95 int sym;
96 } **p; int n; /* list of case ranges */
97 int def_sym; /* default symbol */
98 int nocode_wanted;
99 int *bsym;
100 struct scope *scope;
101 struct switch_t *prev;
102 SValue sv;
103 } *cur_switch; /* current switch */
105 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
106 /*list of temporary local variables on the stack in current function. */
107 static struct temp_local_variable {
108 int location; //offset on stack. Svalue.c.i
109 short size;
110 short align;
111 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
112 static int nb_temp_local_vars;
114 static struct scope {
115 struct scope *prev;
116 struct { int loc, locorig, num; } vla;
117 struct { Sym *s; int n; } cl;
118 int *bsym, *csym;
119 Sym *lstk, *llstk;
120 } *cur_scope, *loop_scope, *root_scope;
122 typedef struct {
123 Section *sec;
124 int local_offset;
125 Sym *flex_array_ref;
126 } init_params;
128 #if 1
129 #define precedence_parser
130 static void init_prec(void);
131 #endif
133 static void gen_cast(CType *type);
134 static void gen_cast_s(int t);
135 static inline CType *pointed_type(CType *type);
136 static int is_compatible_types(CType *type1, CType *type2);
137 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
138 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
139 static void parse_expr_type(CType *type);
140 static void init_putv(init_params *p, CType *type, unsigned long c);
141 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
142 static void block(int is_expr);
143 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
144 static int decl(int l);
145 static void expr_eq(void);
146 static void vpush_type_size(CType *type, int *a);
147 static int is_compatible_unqualified_types(CType *type1, CType *type2);
148 static inline int64_t expr_const64(void);
149 static void vpush64(int ty, unsigned long long v);
150 static void vpush(CType *type);
151 static int gvtst(int inv, int t);
152 static void gen_inline_functions(TCCState *s);
153 static void free_inline_functions(TCCState *s);
154 static void skip_or_save_block(TokenString **str);
155 static void gv_dup(void);
156 static int get_temp_local_var(int size,int align);
157 static void clear_temp_local_var_list();
158 static void cast_error(CType *st, CType *dt);
160 /* ------------------------------------------------------------------------- */
161 /* Automagical code suppression */
163 /* Clear 'nocode_wanted' at forward label if it was used */
164 ST_FUNC void gsym(int t)
166 if (t) {
167 gsym_addr(t, ind);
168 CODE_ON();
172 /* Clear 'nocode_wanted' if current pc is a label */
173 static int gind()
175 int t = ind;
176 CODE_ON();
177 if (debug_modes)
178 tcc_tcov_block_begin(tcc_state);
179 return t;
182 /* Set 'nocode_wanted' after unconditional (backwards) jump */
183 static void gjmp_addr_acs(int t)
185 gjmp_addr(t);
186 CODE_OFF();
189 /* Set 'nocode_wanted' after unconditional (forwards) jump */
190 static int gjmp_acs(int t)
192 t = gjmp(t);
193 CODE_OFF();
194 return t;
197 /* These are #undef'd at the end of this file */
198 #define gjmp_addr gjmp_addr_acs
199 #define gjmp gjmp_acs
200 /* ------------------------------------------------------------------------- */
202 ST_INLN int is_float(int t)
204 int bt = t & VT_BTYPE;
205 return bt == VT_LDOUBLE
206 || bt == VT_DOUBLE
207 || bt == VT_FLOAT
208 || bt == VT_QFLOAT;
211 static inline int is_integer_btype(int bt)
213 return bt == VT_BYTE
214 || bt == VT_BOOL
215 || bt == VT_SHORT
216 || bt == VT_INT
217 || bt == VT_LLONG;
220 static int btype_size(int bt)
222 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
223 bt == VT_SHORT ? 2 :
224 bt == VT_INT ? 4 :
225 bt == VT_LLONG ? 8 :
226 bt == VT_PTR ? PTR_SIZE : 0;
229 /* returns function return register from type */
230 static int R_RET(int t)
232 if (!is_float(t))
233 return REG_IRET;
234 #ifdef TCC_TARGET_X86_64
235 if ((t & VT_BTYPE) == VT_LDOUBLE)
236 return TREG_ST0;
237 #elif defined TCC_TARGET_RISCV64
238 if ((t & VT_BTYPE) == VT_LDOUBLE)
239 return REG_IRET;
240 #endif
241 return REG_FRET;
244 /* returns 2nd function return register, if any */
245 static int R2_RET(int t)
247 t &= VT_BTYPE;
248 #if PTR_SIZE == 4
249 if (t == VT_LLONG)
250 return REG_IRE2;
251 #elif defined TCC_TARGET_X86_64
252 if (t == VT_QLONG)
253 return REG_IRE2;
254 if (t == VT_QFLOAT)
255 return REG_FRE2;
256 #elif defined TCC_TARGET_RISCV64
257 if (t == VT_LDOUBLE)
258 return REG_IRE2;
259 #endif
260 return VT_CONST;
263 /* returns true for two-word types */
264 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
266 /* put function return registers to stack value */
267 static void PUT_R_RET(SValue *sv, int t)
269 sv->r = R_RET(t), sv->r2 = R2_RET(t);
272 /* returns function return register class for type t */
273 static int RC_RET(int t)
275 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
278 /* returns generic register class for type t */
279 static int RC_TYPE(int t)
281 if (!is_float(t))
282 return RC_INT;
283 #ifdef TCC_TARGET_X86_64
284 if ((t & VT_BTYPE) == VT_LDOUBLE)
285 return RC_ST0;
286 if ((t & VT_BTYPE) == VT_QFLOAT)
287 return RC_FRET;
288 #elif defined TCC_TARGET_RISCV64
289 if ((t & VT_BTYPE) == VT_LDOUBLE)
290 return RC_INT;
291 #endif
292 return RC_FLOAT;
295 /* returns 2nd register class corresponding to t and rc */
296 static int RC2_TYPE(int t, int rc)
298 if (!USING_TWO_WORDS(t))
299 return 0;
300 #ifdef RC_IRE2
301 if (rc == RC_IRET)
302 return RC_IRE2;
303 #endif
304 #ifdef RC_FRE2
305 if (rc == RC_FRET)
306 return RC_FRE2;
307 #endif
308 if (rc & RC_FLOAT)
309 return RC_FLOAT;
310 return RC_INT;
313 /* we use our own 'finite' function to avoid potential problems with
314 non standard math libs */
315 /* XXX: endianness dependent */
316 ST_FUNC int ieee_finite(double d)
318 int p[4];
319 memcpy(p, &d, sizeof(double));
320 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
323 /* compiling intel long double natively */
324 #if (defined __i386__ || defined __x86_64__) \
325 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
326 # define TCC_IS_NATIVE_387
327 #endif
329 ST_FUNC void test_lvalue(void)
331 if (!(vtop->r & VT_LVAL))
332 expect("lvalue");
335 ST_FUNC void check_vstack(void)
337 if (vtop != vstack - 1)
338 tcc_error("internal compiler error: vstack leak (%d)",
339 (int)(vtop - vstack + 1));
342 /* vstack debugging aid */
343 #if 0
344 void pv (const char *lbl, int a, int b)
346 int i;
347 for (i = a; i < a + b; ++i) {
348 SValue *p = &vtop[-i];
349 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
350 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
353 #endif
355 /* ------------------------------------------------------------------------- */
356 /* initialize vstack and types. This must be done also for tcc -E */
357 ST_FUNC void tccgen_init(TCCState *s1)
359 vtop = vstack - 1;
360 memset(vtop, 0, sizeof *vtop);
362 /* define some often used types */
363 int_type.t = VT_INT;
365 char_type.t = VT_BYTE;
366 if (s1->char_is_unsigned)
367 char_type.t |= VT_UNSIGNED;
368 char_pointer_type = char_type;
369 mk_pointer(&char_pointer_type);
371 func_old_type.t = VT_FUNC;
372 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
373 func_old_type.ref->f.func_call = FUNC_CDECL;
374 func_old_type.ref->f.func_type = FUNC_OLD;
375 #ifdef precedence_parser
376 init_prec();
377 #endif
378 cstr_new(&initstr);
381 ST_FUNC int tccgen_compile(TCCState *s1)
383 cur_text_section = NULL;
384 funcname = "";
385 func_ind = -1;
386 anon_sym = SYM_FIRST_ANOM;
387 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
388 local_scope = 0;
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 sym_free_first = NULL;
421 global_label_stack = local_label_stack = NULL;
422 cstr_free(&initstr);
423 dynarray_reset(&stk_data, &nb_stk_data);
426 /* ------------------------------------------------------------------------- */
427 ST_FUNC ElfSym *elfsym(Sym *s)
429 if (!s || !s->c)
430 return NULL;
431 return &((ElfSym *)symtab_section->data)[s->c];
434 /* apply storage attributes to Elf symbol */
435 ST_FUNC void update_storage(Sym *sym)
437 ElfSym *esym;
438 int sym_bind, old_sym_bind;
440 esym = elfsym(sym);
441 if (!esym)
442 return;
444 if (sym->a.visibility)
445 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
446 | sym->a.visibility;
448 if (sym->type.t & (VT_STATIC | VT_INLINE))
449 sym_bind = STB_LOCAL;
450 else if (sym->a.weak)
451 sym_bind = STB_WEAK;
452 else
453 sym_bind = STB_GLOBAL;
454 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
455 if (sym_bind != old_sym_bind) {
456 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
459 #ifdef TCC_TARGET_PE
460 if (sym->a.dllimport)
461 esym->st_other |= ST_PE_IMPORT;
462 if (sym->a.dllexport)
463 esym->st_other |= ST_PE_EXPORT;
464 #endif
466 #if 0
467 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
468 get_tok_str(sym->v, NULL),
469 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
470 sym->a.visibility,
471 sym->a.dllexport,
472 sym->a.dllimport
474 #endif
477 /* ------------------------------------------------------------------------- */
478 /* update sym->c so that it points to an external symbol in section
479 'section' with value 'value' */
481 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
482 addr_t value, unsigned long size,
483 int can_add_underscore)
485 int sym_type, sym_bind, info, other, t;
486 ElfSym *esym;
487 const char *name;
488 char buf1[256];
490 if (!sym->c) {
491 name = get_tok_str(sym->v, NULL);
492 t = sym->type.t;
493 if ((t & VT_BTYPE) == VT_FUNC) {
494 sym_type = STT_FUNC;
495 } else if ((t & VT_BTYPE) == VT_VOID) {
496 sym_type = STT_NOTYPE;
497 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
498 sym_type = STT_FUNC;
499 } else {
500 sym_type = STT_OBJECT;
502 if (t & (VT_STATIC | VT_INLINE))
503 sym_bind = STB_LOCAL;
504 else
505 sym_bind = STB_GLOBAL;
506 other = 0;
508 #ifdef TCC_TARGET_PE
509 if (sym_type == STT_FUNC && sym->type.ref) {
510 Sym *ref = sym->type.ref;
511 if (ref->a.nodecorate) {
512 can_add_underscore = 0;
514 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
515 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
516 name = buf1;
517 other |= ST_PE_STDCALL;
518 can_add_underscore = 0;
521 #endif
523 if (sym->asm_label) {
524 name = get_tok_str(sym->asm_label, NULL);
525 can_add_underscore = 0;
528 if (tcc_state->leading_underscore && can_add_underscore) {
529 buf1[0] = '_';
530 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
531 name = buf1;
534 info = ELFW(ST_INFO)(sym_bind, sym_type);
535 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
537 if (debug_modes)
538 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
540 } else {
541 esym = elfsym(sym);
542 esym->st_value = value;
543 esym->st_size = size;
544 esym->st_shndx = sh_num;
546 update_storage(sym);
549 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
551 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
552 return;
553 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
556 /* add a new relocation entry to symbol 'sym' in section 's' */
557 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
558 addr_t addend)
560 int c = 0;
562 if (nocode_wanted && s == cur_text_section)
563 return;
565 if (sym) {
566 if (0 == sym->c)
567 put_extern_sym(sym, NULL, 0, 0);
568 c = sym->c;
571 /* now we can add ELF relocation info */
572 put_elf_reloca(symtab_section, s, offset, type, c, addend);
575 #if PTR_SIZE == 4
576 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
578 greloca(s, sym, offset, type, 0);
580 #endif
582 /* ------------------------------------------------------------------------- */
583 /* symbol allocator */
584 static Sym *__sym_malloc(void)
586 Sym *sym_pool, *sym, *last_sym;
587 int i;
589 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
590 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
592 last_sym = sym_free_first;
593 sym = sym_pool;
594 for(i = 0; i < SYM_POOL_NB; i++) {
595 sym->next = last_sym;
596 last_sym = sym;
597 sym++;
599 sym_free_first = last_sym;
600 return last_sym;
603 static inline Sym *sym_malloc(void)
605 Sym *sym;
606 #ifndef SYM_DEBUG
607 sym = sym_free_first;
608 if (!sym)
609 sym = __sym_malloc();
610 sym_free_first = sym->next;
611 return sym;
612 #else
613 sym = tcc_malloc(sizeof(Sym));
614 return sym;
615 #endif
618 ST_INLN void sym_free(Sym *sym)
620 #ifndef SYM_DEBUG
621 sym->next = sym_free_first;
622 sym_free_first = sym;
623 #else
624 tcc_free(sym);
625 #endif
628 /* push, without hashing */
629 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
631 Sym *s;
633 s = sym_malloc();
634 memset(s, 0, sizeof *s);
635 s->v = v;
636 s->type.t = t;
637 s->c = c;
638 /* add in stack */
639 s->prev = *ps;
640 *ps = s;
641 return s;
644 /* find a symbol and return its associated structure. 's' is the top
645 of the symbol stack */
646 ST_FUNC Sym *sym_find2(Sym *s, int v)
648 while (s) {
649 if (s->v == v)
650 return s;
651 else if (s->v == -1)
652 return NULL;
653 s = s->prev;
655 return NULL;
658 /* structure lookup */
659 ST_INLN Sym *struct_find(int v)
661 v -= TOK_IDENT;
662 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
663 return NULL;
664 return table_ident[v]->sym_struct;
667 /* find an identifier */
668 ST_INLN Sym *sym_find(int v)
670 v -= TOK_IDENT;
671 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
672 return NULL;
673 return table_ident[v]->sym_identifier;
676 static int sym_scope(Sym *s)
678 if (IS_ENUM_VAL (s->type.t))
679 return s->type.ref->sym_scope;
680 else
681 return s->sym_scope;
684 /* push a given symbol on the symbol stack */
685 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
687 Sym *s, **ps;
688 TokenSym *ts;
690 if (local_stack)
691 ps = &local_stack;
692 else
693 ps = &global_stack;
694 s = sym_push2(ps, v, type->t, c);
695 s->type.ref = type->ref;
696 s->r = r;
697 /* don't record fields or anonymous symbols */
698 /* XXX: simplify */
699 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
700 /* record symbol in token array */
701 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
702 if (v & SYM_STRUCT)
703 ps = &ts->sym_struct;
704 else
705 ps = &ts->sym_identifier;
706 s->prev_tok = *ps;
707 *ps = s;
708 s->sym_scope = local_scope;
709 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
710 tcc_error("redeclaration of '%s'",
711 get_tok_str(v & ~SYM_STRUCT, NULL));
713 return s;
716 /* push a global identifier */
717 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
719 Sym *s, **ps;
720 s = sym_push2(&global_stack, v, t, c);
721 s->r = VT_CONST | VT_SYM;
722 /* don't record anonymous symbol */
723 if (v < SYM_FIRST_ANOM) {
724 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
725 /* modify the top most local identifier, so that sym_identifier will
726 point to 's' when popped; happens when called from inline asm */
727 while (*ps != NULL && (*ps)->sym_scope)
728 ps = &(*ps)->prev_tok;
729 s->prev_tok = *ps;
730 *ps = s;
732 return s;
735 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
736 pop them yet from the list, but do remove them from the token array. */
737 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
739 Sym *s, *ss, **ps;
740 TokenSym *ts;
741 int v;
743 s = *ptop;
744 while(s != b) {
745 ss = s->prev;
746 v = s->v;
747 /* remove symbol in token array */
748 /* XXX: simplify */
749 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
750 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
751 if (v & SYM_STRUCT)
752 ps = &ts->sym_struct;
753 else
754 ps = &ts->sym_identifier;
755 *ps = s->prev_tok;
757 if (!keep)
758 sym_free(s);
759 s = ss;
761 if (!keep)
762 *ptop = b;
765 /* label lookup */
766 ST_FUNC Sym *label_find(int v)
768 v -= TOK_IDENT;
769 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
770 return NULL;
771 return table_ident[v]->sym_label;
774 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
776 Sym *s, **ps;
777 s = sym_push2(ptop, v, VT_STATIC, 0);
778 s->r = flags;
779 ps = &table_ident[v - TOK_IDENT]->sym_label;
780 if (ptop == &global_label_stack) {
781 /* modify the top most local identifier, so that
782 sym_identifier will point to 's' when popped */
783 while (*ps != NULL)
784 ps = &(*ps)->prev_tok;
786 s->prev_tok = *ps;
787 *ps = s;
788 return s;
791 /* pop labels until element last is reached. Look if any labels are
792 undefined. Define symbols if '&&label' was used. */
793 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
795 Sym *s, *s1;
796 for(s = *ptop; s != slast; s = s1) {
797 s1 = s->prev;
798 if (s->r == LABEL_DECLARED) {
799 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
800 } else if (s->r == LABEL_FORWARD) {
801 tcc_error("label '%s' used but not defined",
802 get_tok_str(s->v, NULL));
803 } else {
804 if (s->c) {
805 /* define corresponding symbol. A size of
806 1 is put. */
807 put_extern_sym(s, cur_text_section, s->jnext, 1);
810 /* remove label */
811 if (s->r != LABEL_GONE)
812 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
813 if (!keep)
814 sym_free(s);
815 else
816 s->r = LABEL_GONE;
818 if (!keep)
819 *ptop = slast;
822 /* ------------------------------------------------------------------------- */
823 static void vcheck_cmp(void)
825 /* cannot let cpu flags if other instruction are generated. Also
826 avoid leaving VT_JMP anywhere except on the top of the stack
827 because it would complicate the code generator.
829 Don't do this when nocode_wanted. vtop might come from
830 !nocode_wanted regions (see 88_codeopt.c) and transforming
831 it to a register without actually generating code is wrong
832 as their value might still be used for real. All values
833 we push under nocode_wanted will eventually be popped
834 again, so that the VT_CMP/VT_JMP value will be in vtop
835 when code is unsuppressed again. */
837 /* However if it's just automatic suppression via CODE_OFF/ON()
838 then it seems that we better let things work undisturbed.
839 How can it work at all under nocode_wanted? Well, gv() will
840 actually clear it at the gsym() in load()/VT_JMP in the
841 generator backends */
843 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
844 gv(RC_INT);
847 static void vsetc(CType *type, int r, CValue *vc)
849 if (vtop >= vstack + (VSTACK_SIZE - 1))
850 tcc_error("memory full (vstack)");
851 vcheck_cmp();
852 vtop++;
853 vtop->type = *type;
854 vtop->r = r;
855 vtop->r2 = VT_CONST;
856 vtop->c = *vc;
857 vtop->sym = NULL;
860 ST_FUNC void vswap(void)
862 SValue tmp;
864 vcheck_cmp();
865 tmp = vtop[0];
866 vtop[0] = vtop[-1];
867 vtop[-1] = tmp;
870 /* pop stack value */
871 ST_FUNC void vpop(void)
873 int v;
874 v = vtop->r & VT_VALMASK;
875 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
876 /* for x86, we need to pop the FP stack */
877 if (v == TREG_ST0) {
878 o(0xd8dd); /* fstp %st(0) */
879 } else
880 #endif
881 if (v == VT_CMP) {
882 /* need to put correct jump if && or || without test */
883 gsym(vtop->jtrue);
884 gsym(vtop->jfalse);
886 vtop--;
889 /* push constant of type "type" with useless value */
890 static void vpush(CType *type)
892 vset(type, VT_CONST, 0);
895 /* push arbitrary 64bit constant */
896 static void vpush64(int ty, unsigned long long v)
898 CValue cval;
899 CType ctype;
900 ctype.t = ty;
901 ctype.ref = NULL;
902 cval.i = v;
903 vsetc(&ctype, VT_CONST, &cval);
906 /* push integer constant */
907 ST_FUNC void vpushi(int v)
909 vpush64(VT_INT, v);
912 /* push a pointer sized constant */
913 static void vpushs(addr_t v)
915 vpush64(VT_SIZE_T, v);
918 /* push long long constant */
919 static inline void vpushll(long long v)
921 vpush64(VT_LLONG, v);
924 ST_FUNC void vset(CType *type, int r, int v)
926 CValue cval;
927 cval.i = v;
928 vsetc(type, r, &cval);
931 static void vseti(int r, int v)
933 CType type;
934 type.t = VT_INT;
935 type.ref = NULL;
936 vset(&type, r, v);
939 ST_FUNC void vpushv(SValue *v)
941 if (vtop >= vstack + (VSTACK_SIZE - 1))
942 tcc_error("memory full (vstack)");
943 vtop++;
944 *vtop = *v;
947 static void vdup(void)
949 vpushv(vtop);
952 /* rotate n first stack elements to the bottom
953 I1 ... In -> I2 ... In I1 [top is right]
955 ST_FUNC void vrotb(int n)
957 int i;
958 SValue tmp;
960 vcheck_cmp();
961 tmp = vtop[-n + 1];
962 for(i=-n+1;i!=0;i++)
963 vtop[i] = vtop[i+1];
964 vtop[0] = tmp;
967 /* rotate the n elements before entry e towards the top
968 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
970 ST_FUNC void vrote(SValue *e, int n)
972 int i;
973 SValue tmp;
975 vcheck_cmp();
976 tmp = *e;
977 for(i = 0;i < n - 1; i++)
978 e[-i] = e[-i - 1];
979 e[-n + 1] = tmp;
982 /* rotate n first stack elements to the top
983 I1 ... In -> In I1 ... I(n-1) [top is right]
985 ST_FUNC void vrott(int n)
987 vrote(vtop, n);
990 /* ------------------------------------------------------------------------- */
991 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
993 /* called from generators to set the result from relational ops */
994 ST_FUNC void vset_VT_CMP(int op)
996 vtop->r = VT_CMP;
997 vtop->cmp_op = op;
998 vtop->jfalse = 0;
999 vtop->jtrue = 0;
1002 /* called once before asking generators to load VT_CMP to a register */
1003 static void vset_VT_JMP(void)
1005 int op = vtop->cmp_op;
1007 if (vtop->jtrue || vtop->jfalse) {
1008 int origt = vtop->type.t;
1009 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1010 int inv = op & (op < 2); /* small optimization */
1011 vseti(VT_JMP+inv, gvtst(inv, 0));
1012 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1013 } else {
1014 /* otherwise convert flags (rsp. 0/1) to register */
1015 vtop->c.i = op;
1016 if (op < 2) /* doesn't seem to happen */
1017 vtop->r = VT_CONST;
1021 /* Set CPU Flags, doesn't yet jump */
1022 static void gvtst_set(int inv, int t)
1024 int *p;
1026 if (vtop->r != VT_CMP) {
1027 vpushi(0);
1028 gen_op(TOK_NE);
1029 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1030 vset_VT_CMP(vtop->c.i != 0);
1033 p = inv ? &vtop->jfalse : &vtop->jtrue;
1034 *p = gjmp_append(*p, t);
1037 /* Generate value test
1039 * Generate a test for any value (jump, comparison and integers) */
1040 static int gvtst(int inv, int t)
1042 int op, x, u;
1044 gvtst_set(inv, t);
1045 t = vtop->jtrue, u = vtop->jfalse;
1046 if (inv)
1047 x = u, u = t, t = x;
1048 op = vtop->cmp_op;
1050 /* jump to the wanted target */
1051 if (op > 1)
1052 t = gjmp_cond(op ^ inv, t);
1053 else if (op != inv)
1054 t = gjmp(t);
1055 /* resolve complementary jumps to here */
1056 gsym(u);
1058 vtop--;
1059 return t;
1062 /* generate a zero or nozero test */
1063 static void gen_test_zero(int op)
1065 if (vtop->r == VT_CMP) {
1066 int j;
1067 if (op == TOK_EQ) {
1068 j = vtop->jfalse;
1069 vtop->jfalse = vtop->jtrue;
1070 vtop->jtrue = j;
1071 vtop->cmp_op ^= 1;
1073 } else {
1074 vpushi(0);
1075 gen_op(op);
1079 /* ------------------------------------------------------------------------- */
1080 /* push a symbol value of TYPE */
1081 ST_FUNC void vpushsym(CType *type, Sym *sym)
1083 CValue cval;
1084 cval.i = 0;
1085 vsetc(type, VT_CONST | VT_SYM, &cval);
1086 vtop->sym = sym;
1089 /* Return a static symbol pointing to a section */
1090 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1092 int v;
1093 Sym *sym;
1095 v = anon_sym++;
1096 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1097 sym->type.t |= VT_STATIC;
1098 put_extern_sym(sym, sec, offset, size);
1099 return sym;
1102 /* push a reference to a section offset by adding a dummy symbol */
1103 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1105 vpushsym(type, get_sym_ref(type, sec, offset, size));
1108 /* define a new external reference to a symbol 'v' of type 'u' */
1109 ST_FUNC Sym *external_global_sym(int v, CType *type)
1111 Sym *s;
1113 s = sym_find(v);
1114 if (!s) {
1115 /* push forward reference */
1116 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1117 s->type.ref = type->ref;
1118 } else if (IS_ASM_SYM(s)) {
1119 s->type.t = type->t | (s->type.t & VT_EXTERN);
1120 s->type.ref = type->ref;
1121 update_storage(s);
1123 return s;
1126 /* create an external reference with no specific type similar to asm labels.
1127 This avoids type conflicts if the symbol is used from C too */
1128 ST_FUNC Sym *external_helper_sym(int v)
1130 CType ct = { VT_ASM_FUNC, NULL };
1131 return external_global_sym(v, &ct);
1134 /* push a reference to an helper function (such as memmove) */
1135 ST_FUNC void vpush_helper_func(int v)
1137 vpushsym(&func_old_type, external_helper_sym(v));
1140 /* Merge symbol attributes. */
1141 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1143 if (sa1->aligned && !sa->aligned)
1144 sa->aligned = sa1->aligned;
1145 sa->packed |= sa1->packed;
1146 sa->weak |= sa1->weak;
1147 sa->nodebug |= sa1->nodebug;
1148 if (sa1->visibility != STV_DEFAULT) {
1149 int vis = sa->visibility;
1150 if (vis == STV_DEFAULT
1151 || vis > sa1->visibility)
1152 vis = sa1->visibility;
1153 sa->visibility = vis;
1155 sa->dllexport |= sa1->dllexport;
1156 sa->nodecorate |= sa1->nodecorate;
1157 sa->dllimport |= sa1->dllimport;
1160 /* Merge function attributes. */
1161 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1163 if (fa1->func_call && !fa->func_call)
1164 fa->func_call = fa1->func_call;
1165 if (fa1->func_type && !fa->func_type)
1166 fa->func_type = fa1->func_type;
1167 if (fa1->func_args && !fa->func_args)
1168 fa->func_args = fa1->func_args;
1169 if (fa1->func_noreturn)
1170 fa->func_noreturn = 1;
1171 if (fa1->func_ctor)
1172 fa->func_ctor = 1;
1173 if (fa1->func_dtor)
1174 fa->func_dtor = 1;
1177 /* Merge attributes. */
1178 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1180 merge_symattr(&ad->a, &ad1->a);
1181 merge_funcattr(&ad->f, &ad1->f);
1183 if (ad1->section)
1184 ad->section = ad1->section;
1185 if (ad1->alias_target)
1186 ad->alias_target = ad1->alias_target;
1187 if (ad1->asm_label)
1188 ad->asm_label = ad1->asm_label;
1189 if (ad1->attr_mode)
1190 ad->attr_mode = ad1->attr_mode;
1193 /* Merge some type attributes. */
1194 static void patch_type(Sym *sym, CType *type)
1196 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1197 if (!(sym->type.t & VT_EXTERN))
1198 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1199 sym->type.t &= ~VT_EXTERN;
1202 if (IS_ASM_SYM(sym)) {
1203 /* stay static if both are static */
1204 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1205 sym->type.ref = type->ref;
1208 if (!is_compatible_types(&sym->type, type)) {
1209 tcc_error("incompatible types for redefinition of '%s'",
1210 get_tok_str(sym->v, NULL));
1212 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1213 int static_proto = sym->type.t & VT_STATIC;
1214 /* warn if static follows non-static function declaration */
1215 if ((type->t & VT_STATIC) && !static_proto
1216 /* XXX this test for inline shouldn't be here. Until we
1217 implement gnu-inline mode again it silences a warning for
1218 mingw caused by our workarounds. */
1219 && !((type->t | sym->type.t) & VT_INLINE))
1220 tcc_warning("static storage ignored for redefinition of '%s'",
1221 get_tok_str(sym->v, NULL));
1223 /* set 'inline' if both agree or if one has static */
1224 if ((type->t | sym->type.t) & VT_INLINE) {
1225 if (!((type->t ^ sym->type.t) & VT_INLINE)
1226 || ((type->t | sym->type.t) & VT_STATIC))
1227 static_proto |= VT_INLINE;
1230 if (0 == (type->t & VT_EXTERN)) {
1231 struct FuncAttr f = sym->type.ref->f;
1232 /* put complete type, use static from prototype */
1233 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1234 sym->type.ref = type->ref;
1235 merge_funcattr(&sym->type.ref->f, &f);
1236 } else {
1237 sym->type.t &= ~VT_INLINE | static_proto;
1240 if (sym->type.ref->f.func_type == FUNC_OLD
1241 && type->ref->f.func_type != FUNC_OLD) {
1242 sym->type.ref = type->ref;
1245 } else {
1246 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1247 /* set array size if it was omitted in extern declaration */
1248 sym->type.ref->c = type->ref->c;
1250 if ((type->t ^ sym->type.t) & VT_STATIC)
1251 tcc_warning("storage mismatch for redefinition of '%s'",
1252 get_tok_str(sym->v, NULL));
1256 /* Merge some storage attributes. */
1257 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1259 if (type)
1260 patch_type(sym, type);
1262 #ifdef TCC_TARGET_PE
1263 if (sym->a.dllimport != ad->a.dllimport)
1264 tcc_error("incompatible dll linkage for redefinition of '%s'",
1265 get_tok_str(sym->v, NULL));
1266 #endif
1267 merge_symattr(&sym->a, &ad->a);
1268 if (ad->asm_label)
1269 sym->asm_label = ad->asm_label;
1270 update_storage(sym);
1273 /* copy sym to other stack */
1274 static Sym *sym_copy(Sym *s0, Sym **ps)
1276 Sym *s;
1277 s = sym_malloc(), *s = *s0;
1278 s->prev = *ps, *ps = s;
1279 if (s->v < SYM_FIRST_ANOM) {
1280 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1281 s->prev_tok = *ps, *ps = s;
1283 return s;
1286 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1287 static void sym_copy_ref(Sym *s, Sym **ps)
1289 int bt = s->type.t & VT_BTYPE;
1290 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1291 Sym **sp = &s->type.ref;
1292 for (s = *sp, *sp = NULL; s; s = s->next) {
1293 Sym *s2 = sym_copy(s, ps);
1294 sp = &(*sp = s2)->next;
1295 sym_copy_ref(s2, ps);
1300 /* define a new external reference to a symbol 'v' */
1301 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1303 Sym *s;
1305 /* look for global symbol */
1306 s = sym_find(v);
1307 while (s && s->sym_scope)
1308 s = s->prev_tok;
1310 if (!s) {
1311 /* push forward reference */
1312 s = global_identifier_push(v, type->t, 0);
1313 s->r |= r;
1314 s->a = ad->a;
1315 s->asm_label = ad->asm_label;
1316 s->type.ref = type->ref;
1317 /* copy type to the global stack */
1318 if (local_stack)
1319 sym_copy_ref(s, &global_stack);
1320 } else {
1321 patch_storage(s, ad, type);
1323 /* push variables on local_stack if any */
1324 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1325 s = sym_copy(s, &local_stack);
1326 return s;
1329 /* save registers up to (vtop - n) stack entry */
1330 ST_FUNC void save_regs(int n)
1332 SValue *p, *p1;
1333 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1334 save_reg(p->r);
1337 /* save r to the memory stack, and mark it as being free */
1338 ST_FUNC void save_reg(int r)
1340 save_reg_upstack(r, 0);
1343 /* save r to the memory stack, and mark it as being free,
1344 if seen up to (vtop - n) stack entry */
1345 ST_FUNC void save_reg_upstack(int r, int n)
1347 int l, size, align, bt;
1348 SValue *p, *p1, sv;
1350 if ((r &= VT_VALMASK) >= VT_CONST)
1351 return;
1352 if (nocode_wanted)
1353 return;
1354 l = 0;
1355 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1356 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1357 /* must save value on stack if not already done */
1358 if (!l) {
1359 bt = p->type.t & VT_BTYPE;
1360 if (bt == VT_VOID)
1361 continue;
1362 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1363 bt = VT_PTR;
1364 sv.type.t = bt;
1365 size = type_size(&sv.type, &align);
1366 l = get_temp_local_var(size,align);
1367 sv.r = VT_LOCAL | VT_LVAL;
1368 sv.c.i = l;
1369 store(p->r & VT_VALMASK, &sv);
1370 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1371 /* x86 specific: need to pop fp register ST0 if saved */
1372 if (r == TREG_ST0) {
1373 o(0xd8dd); /* fstp %st(0) */
1375 #endif
1376 /* special long long case */
1377 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1378 sv.c.i += PTR_SIZE;
1379 store(p->r2, &sv);
1382 /* mark that stack entry as being saved on the stack */
1383 if (p->r & VT_LVAL) {
1384 /* also clear the bounded flag because the
1385 relocation address of the function was stored in
1386 p->c.i */
1387 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1388 } else {
1389 p->r = VT_LVAL | VT_LOCAL;
1391 p->sym = NULL;
1392 p->r2 = VT_CONST;
1393 p->c.i = l;
1398 #ifdef TCC_TARGET_ARM
1399 /* find a register of class 'rc2' with at most one reference on stack.
1400 * If none, call get_reg(rc) */
1401 ST_FUNC int get_reg_ex(int rc, int rc2)
1403 int r;
1404 SValue *p;
1406 for(r=0;r<NB_REGS;r++) {
1407 if (reg_classes[r] & rc2) {
1408 int n;
1409 n=0;
1410 for(p = vstack; p <= vtop; p++) {
1411 if ((p->r & VT_VALMASK) == r ||
1412 p->r2 == r)
1413 n++;
1415 if (n <= 1)
1416 return r;
1419 return get_reg(rc);
1421 #endif
1423 /* find a free register of class 'rc'. If none, save one register */
1424 ST_FUNC int get_reg(int rc)
1426 int r;
1427 SValue *p;
1429 /* find a free register */
1430 for(r=0;r<NB_REGS;r++) {
1431 if (reg_classes[r] & rc) {
1432 if (nocode_wanted)
1433 return r;
1434 for(p=vstack;p<=vtop;p++) {
1435 if ((p->r & VT_VALMASK) == r ||
1436 p->r2 == r)
1437 goto notfound;
1439 return r;
1441 notfound: ;
1444 /* no register left : free the first one on the stack (VERY
1445 IMPORTANT to start from the bottom to ensure that we don't
1446 spill registers used in gen_opi()) */
1447 for(p=vstack;p<=vtop;p++) {
1448 /* look at second register (if long long) */
1449 r = p->r2;
1450 if (r < VT_CONST && (reg_classes[r] & rc))
1451 goto save_found;
1452 r = p->r & VT_VALMASK;
1453 if (r < VT_CONST && (reg_classes[r] & rc)) {
1454 save_found:
1455 save_reg(r);
1456 return r;
1459 /* Should never comes here */
1460 return -1;
1463 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1464 static int get_temp_local_var(int size,int align){
1465 int i;
1466 struct temp_local_variable *temp_var;
1467 int found_var;
1468 SValue *p;
1469 int r;
1470 char free;
1471 char found;
1472 found=0;
1473 for(i=0;i<nb_temp_local_vars;i++){
1474 temp_var=&arr_temp_local_vars[i];
1475 if(temp_var->size<size||align!=temp_var->align){
1476 continue;
1478 /*check if temp_var is free*/
1479 free=1;
1480 for(p=vstack;p<=vtop;p++) {
1481 r=p->r&VT_VALMASK;
1482 if(r==VT_LOCAL||r==VT_LLOCAL){
1483 if(p->c.i==temp_var->location){
1484 free=0;
1485 break;
1489 if(free){
1490 found_var=temp_var->location;
1491 found=1;
1492 break;
1495 if(!found){
1496 loc = (loc - size) & -align;
1497 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1498 temp_var=&arr_temp_local_vars[i];
1499 temp_var->location=loc;
1500 temp_var->size=size;
1501 temp_var->align=align;
1502 nb_temp_local_vars++;
1504 found_var=loc;
1506 return found_var;
1509 static void clear_temp_local_var_list(){
1510 nb_temp_local_vars=0;
1513 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1514 if needed */
1515 static void move_reg(int r, int s, int t)
1517 SValue sv;
1519 if (r != s) {
1520 save_reg(r);
1521 sv.type.t = t;
1522 sv.type.ref = NULL;
1523 sv.r = s;
1524 sv.c.i = 0;
1525 load(r, &sv);
1529 /* get address of vtop (vtop MUST BE an lvalue) */
1530 ST_FUNC void gaddrof(void)
1532 vtop->r &= ~VT_LVAL;
1533 /* tricky: if saved lvalue, then we can go back to lvalue */
1534 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1535 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1538 #ifdef CONFIG_TCC_BCHECK
1539 /* generate a bounded pointer addition */
1540 static void gen_bounded_ptr_add(void)
1542 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1543 if (save) {
1544 vpushv(&vtop[-1]);
1545 vrott(3);
1547 vpush_helper_func(TOK___bound_ptr_add);
1548 vrott(3);
1549 gfunc_call(2);
1550 vtop -= save;
1551 vpushi(0);
1552 /* returned pointer is in REG_IRET */
1553 vtop->r = REG_IRET | VT_BOUNDED;
1554 if (nocode_wanted)
1555 return;
1556 /* relocation offset of the bounding function call point */
1557 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1560 /* patch pointer addition in vtop so that pointer dereferencing is
1561 also tested */
1562 static void gen_bounded_ptr_deref(void)
1564 addr_t func;
1565 int size, align;
1566 ElfW_Rel *rel;
1567 Sym *sym;
1569 if (nocode_wanted)
1570 return;
1572 size = type_size(&vtop->type, &align);
1573 switch(size) {
1574 case 1: func = TOK___bound_ptr_indir1; break;
1575 case 2: func = TOK___bound_ptr_indir2; break;
1576 case 4: func = TOK___bound_ptr_indir4; break;
1577 case 8: func = TOK___bound_ptr_indir8; break;
1578 case 12: func = TOK___bound_ptr_indir12; break;
1579 case 16: func = TOK___bound_ptr_indir16; break;
1580 default:
1581 /* may happen with struct member access */
1582 return;
1584 sym = external_helper_sym(func);
1585 if (!sym->c)
1586 put_extern_sym(sym, NULL, 0, 0);
1587 /* patch relocation */
1588 /* XXX: find a better solution ? */
1589 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1590 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1593 /* generate lvalue bound code */
1594 static void gbound(void)
1596 CType type1;
1598 vtop->r &= ~VT_MUSTBOUND;
1599 /* if lvalue, then use checking code before dereferencing */
1600 if (vtop->r & VT_LVAL) {
1601 /* if not VT_BOUNDED value, then make one */
1602 if (!(vtop->r & VT_BOUNDED)) {
1603 /* must save type because we must set it to int to get pointer */
1604 type1 = vtop->type;
1605 vtop->type.t = VT_PTR;
1606 gaddrof();
1607 vpushi(0);
1608 gen_bounded_ptr_add();
1609 vtop->r |= VT_LVAL;
1610 vtop->type = type1;
1612 /* then check for dereferencing */
1613 gen_bounded_ptr_deref();
1617 /* we need to call __bound_ptr_add before we start to load function
1618 args into registers */
1619 ST_FUNC void gbound_args(int nb_args)
1621 int i, v;
1622 SValue *sv;
1624 for (i = 1; i <= nb_args; ++i)
1625 if (vtop[1 - i].r & VT_MUSTBOUND) {
1626 vrotb(i);
1627 gbound();
1628 vrott(i);
1631 sv = vtop - nb_args;
1632 if (sv->r & VT_SYM) {
1633 v = sv->sym->v;
1634 if (v == TOK_setjmp
1635 || v == TOK__setjmp
1636 #ifndef TCC_TARGET_PE
1637 || v == TOK_sigsetjmp
1638 || v == TOK___sigsetjmp
1639 #endif
1641 vpush_helper_func(TOK___bound_setjmp);
1642 vpushv(sv + 1);
1643 gfunc_call(1);
1644 func_bound_add_epilog = 1;
1646 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1647 if (v == TOK_alloca)
1648 func_bound_add_epilog = 1;
1649 #endif
1650 #if TARGETOS_NetBSD
1651 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1652 sv->sym->asm_label = TOK___bound_longjmp;
1653 #endif
1657 /* Add bounds for local symbols from S to E (via ->prev) */
1658 static void add_local_bounds(Sym *s, Sym *e)
1660 for (; s != e; s = s->prev) {
1661 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1662 continue;
1663 /* Add arrays/structs/unions because we always take address */
1664 if ((s->type.t & VT_ARRAY)
1665 || (s->type.t & VT_BTYPE) == VT_STRUCT
1666 || s->a.addrtaken) {
1667 /* add local bound info */
1668 int align, size = type_size(&s->type, &align);
1669 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1670 2 * sizeof(addr_t));
1671 bounds_ptr[0] = s->c;
1672 bounds_ptr[1] = size;
1676 #endif
1678 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1679 static void pop_local_syms(Sym *b, int keep)
1681 #ifdef CONFIG_TCC_BCHECK
1682 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1683 add_local_bounds(local_stack, b);
1684 #endif
1685 if (debug_modes)
1686 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1687 sym_pop(&local_stack, b, keep);
1690 /* increment an lvalue pointer */
1691 static void incr_offset(int offset)
1693 int t = vtop->type.t;
1694 gaddrof(); /* remove VT_LVAL */
1695 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1696 vpushs(offset);
1697 gen_op('+');
1698 vtop->r |= VT_LVAL;
1699 vtop->type.t = t;
1702 static void incr_bf_adr(int o)
1704 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1705 incr_offset(o);
1708 /* single-byte load mode for packed or otherwise unaligned bitfields */
1709 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1711 int n, o, bits;
1712 save_reg_upstack(vtop->r, 1);
1713 vpush64(type->t & VT_BTYPE, 0); // B X
1714 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1715 do {
1716 vswap(); // X B
1717 incr_bf_adr(o);
1718 vdup(); // X B B
1719 n = 8 - bit_pos;
1720 if (n > bit_size)
1721 n = bit_size;
1722 if (bit_pos)
1723 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1724 if (n < 8)
1725 vpushi((1 << n) - 1), gen_op('&');
1726 gen_cast(type);
1727 if (bits)
1728 vpushi(bits), gen_op(TOK_SHL);
1729 vrotb(3); // B Y X
1730 gen_op('|'); // B X
1731 bits += n, bit_size -= n, o = 1;
1732 } while (bit_size);
1733 vswap(), vpop();
1734 if (!(type->t & VT_UNSIGNED)) {
1735 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1736 vpushi(n), gen_op(TOK_SHL);
1737 vpushi(n), gen_op(TOK_SAR);
1741 /* single-byte store mode for packed or otherwise unaligned bitfields */
1742 static void store_packed_bf(int bit_pos, int bit_size)
1744 int bits, n, o, m, c;
1745 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1746 vswap(); // X B
1747 save_reg_upstack(vtop->r, 1);
1748 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1749 do {
1750 incr_bf_adr(o); // X B
1751 vswap(); //B X
1752 c ? vdup() : gv_dup(); // B V X
1753 vrott(3); // X B V
1754 if (bits)
1755 vpushi(bits), gen_op(TOK_SHR);
1756 if (bit_pos)
1757 vpushi(bit_pos), gen_op(TOK_SHL);
1758 n = 8 - bit_pos;
1759 if (n > bit_size)
1760 n = bit_size;
1761 if (n < 8) {
1762 m = ((1 << n) - 1) << bit_pos;
1763 vpushi(m), gen_op('&'); // X B V1
1764 vpushv(vtop-1); // X B V1 B
1765 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1766 gen_op('&'); // X B V1 B1
1767 gen_op('|'); // X B V2
1769 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1770 vstore(), vpop(); // X B
1771 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1772 } while (bit_size);
1773 vpop(), vpop();
1776 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1778 int t;
1779 if (0 == sv->type.ref)
1780 return 0;
1781 t = sv->type.ref->auxtype;
1782 if (t != -1 && t != VT_STRUCT) {
1783 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1784 sv->r |= VT_LVAL;
1786 return t;
1789 /* store vtop a register belonging to class 'rc'. lvalues are
1790 converted to values. Cannot be used if cannot be converted to
1791 register value (such as structures). */
1792 ST_FUNC int gv(int rc)
1794 int r, r2, r_ok, r2_ok, rc2, bt;
1795 int bit_pos, bit_size, size, align;
1797 /* NOTE: get_reg can modify vstack[] */
1798 if (vtop->type.t & VT_BITFIELD) {
1799 CType type;
1801 bit_pos = BIT_POS(vtop->type.t);
1802 bit_size = BIT_SIZE(vtop->type.t);
1803 /* remove bit field info to avoid loops */
1804 vtop->type.t &= ~VT_STRUCT_MASK;
1806 type.ref = NULL;
1807 type.t = vtop->type.t & VT_UNSIGNED;
1808 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1809 type.t |= VT_UNSIGNED;
1811 r = adjust_bf(vtop, bit_pos, bit_size);
1813 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1814 type.t |= VT_LLONG;
1815 else
1816 type.t |= VT_INT;
1818 if (r == VT_STRUCT) {
1819 load_packed_bf(&type, bit_pos, bit_size);
1820 } else {
1821 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1822 /* cast to int to propagate signedness in following ops */
1823 gen_cast(&type);
1824 /* generate shifts */
1825 vpushi(bits - (bit_pos + bit_size));
1826 gen_op(TOK_SHL);
1827 vpushi(bits - bit_size);
1828 /* NOTE: transformed to SHR if unsigned */
1829 gen_op(TOK_SAR);
1831 r = gv(rc);
1832 } else {
1833 if (is_float(vtop->type.t) &&
1834 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1835 /* CPUs usually cannot use float constants, so we store them
1836 generically in data segment */
1837 init_params p = { rodata_section };
1838 unsigned long offset;
1839 size = type_size(&vtop->type, &align);
1840 if (NODATA_WANTED)
1841 size = 0, align = 1;
1842 offset = section_add(p.sec, size, align);
1843 vpush_ref(&vtop->type, p.sec, offset, size);
1844 vswap();
1845 init_putv(&p, &vtop->type, offset);
1846 vtop->r |= VT_LVAL;
1848 #ifdef CONFIG_TCC_BCHECK
1849 if (vtop->r & VT_MUSTBOUND)
1850 gbound();
1851 #endif
1853 bt = vtop->type.t & VT_BTYPE;
1855 #ifdef TCC_TARGET_RISCV64
1856 /* XXX mega hack */
1857 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1858 rc = RC_INT;
1859 #endif
1860 rc2 = RC2_TYPE(bt, rc);
1862 /* need to reload if:
1863 - constant
1864 - lvalue (need to dereference pointer)
1865 - already a register, but not in the right class */
1866 r = vtop->r & VT_VALMASK;
1867 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1868 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1870 if (!r_ok || !r2_ok) {
1872 if (!r_ok) {
1873 if (1 /* we can 'mov (r),r' in cases */
1874 && r < VT_CONST
1875 && (reg_classes[r] & rc)
1876 && !rc2
1878 save_reg_upstack(r, 1);
1879 else
1880 r = get_reg(rc);
1883 if (rc2) {
1884 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1885 int original_type = vtop->type.t;
1887 /* two register type load :
1888 expand to two words temporarily */
1889 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1890 /* load constant */
1891 unsigned long long ll = vtop->c.i;
1892 vtop->c.i = ll; /* first word */
1893 load(r, vtop);
1894 vtop->r = r; /* save register value */
1895 vpushi(ll >> 32); /* second word */
1896 } else if (vtop->r & VT_LVAL) {
1897 /* We do not want to modifier the long long pointer here.
1898 So we save any other instances down the stack */
1899 save_reg_upstack(vtop->r, 1);
1900 /* load from memory */
1901 vtop->type.t = load_type;
1902 load(r, vtop);
1903 vdup();
1904 vtop[-1].r = r; /* save register value */
1905 /* increment pointer to get second word */
1906 incr_offset(PTR_SIZE);
1907 } else {
1908 /* move registers */
1909 if (!r_ok)
1910 load(r, vtop);
1911 if (r2_ok && vtop->r2 < VT_CONST)
1912 goto done;
1913 vdup();
1914 vtop[-1].r = r; /* save register value */
1915 vtop->r = vtop[-1].r2;
1917 /* Allocate second register. Here we rely on the fact that
1918 get_reg() tries first to free r2 of an SValue. */
1919 r2 = get_reg(rc2);
1920 load(r2, vtop);
1921 vpop();
1922 /* write second register */
1923 vtop->r2 = r2;
1924 done:
1925 vtop->type.t = original_type;
1926 } else {
1927 if (vtop->r == VT_CMP)
1928 vset_VT_JMP();
1929 /* one register type load */
1930 load(r, vtop);
1933 vtop->r = r;
1934 #ifdef TCC_TARGET_C67
1935 /* uses register pairs for doubles */
1936 if (bt == VT_DOUBLE)
1937 vtop->r2 = r+1;
1938 #endif
1940 return r;
1943 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1944 ST_FUNC void gv2(int rc1, int rc2)
1946 /* generate more generic register first. But VT_JMP or VT_CMP
1947 values must be generated first in all cases to avoid possible
1948 reload errors */
1949 if (vtop->r != VT_CMP && rc1 <= rc2) {
1950 vswap();
1951 gv(rc1);
1952 vswap();
1953 gv(rc2);
1954 /* test if reload is needed for first register */
1955 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1956 vswap();
1957 gv(rc1);
1958 vswap();
1960 } else {
1961 gv(rc2);
1962 vswap();
1963 gv(rc1);
1964 vswap();
1965 /* test if reload is needed for first register */
1966 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1967 gv(rc2);
1972 #if PTR_SIZE == 4
1973 /* expand 64bit on stack in two ints */
1974 ST_FUNC void lexpand(void)
1976 int u, v;
1977 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1978 v = vtop->r & (VT_VALMASK | VT_LVAL);
1979 if (v == VT_CONST) {
1980 vdup();
1981 vtop[0].c.i >>= 32;
1982 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1983 vdup();
1984 vtop[0].c.i += 4;
1985 } else {
1986 gv(RC_INT);
1987 vdup();
1988 vtop[0].r = vtop[-1].r2;
1989 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1991 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1993 #endif
1995 #if PTR_SIZE == 4
1996 /* build a long long from two ints */
1997 static void lbuild(int t)
1999 gv2(RC_INT, RC_INT);
2000 vtop[-1].r2 = vtop[0].r;
2001 vtop[-1].type.t = t;
2002 vpop();
2004 #endif
2006 /* convert stack entry to register and duplicate its value in another
2007 register */
2008 static void gv_dup(void)
2010 int t, rc, r;
2012 t = vtop->type.t;
2013 #if PTR_SIZE == 4
2014 if ((t & VT_BTYPE) == VT_LLONG) {
2015 if (t & VT_BITFIELD) {
2016 gv(RC_INT);
2017 t = vtop->type.t;
2019 lexpand();
2020 gv_dup();
2021 vswap();
2022 vrotb(3);
2023 gv_dup();
2024 vrotb(4);
2025 /* stack: H L L1 H1 */
2026 lbuild(t);
2027 vrotb(3);
2028 vrotb(3);
2029 vswap();
2030 lbuild(t);
2031 vswap();
2032 return;
2034 #endif
2035 /* duplicate value */
2036 rc = RC_TYPE(t);
2037 gv(rc);
2038 r = get_reg(rc);
2039 vdup();
2040 load(r, vtop);
2041 vtop->r = r;
2044 #if PTR_SIZE == 4
2045 /* generate CPU independent (unsigned) long long operations */
2046 static void gen_opl(int op)
2048 int t, a, b, op1, c, i;
2049 int func;
2050 unsigned short reg_iret = REG_IRET;
2051 unsigned short reg_lret = REG_IRE2;
2052 SValue tmp;
2054 switch(op) {
2055 case '/':
2056 case TOK_PDIV:
2057 func = TOK___divdi3;
2058 goto gen_func;
2059 case TOK_UDIV:
2060 func = TOK___udivdi3;
2061 goto gen_func;
2062 case '%':
2063 func = TOK___moddi3;
2064 goto gen_mod_func;
2065 case TOK_UMOD:
2066 func = TOK___umoddi3;
2067 gen_mod_func:
2068 #ifdef TCC_ARM_EABI
2069 reg_iret = TREG_R2;
2070 reg_lret = TREG_R3;
2071 #endif
2072 gen_func:
2073 /* call generic long long function */
2074 vpush_helper_func(func);
2075 vrott(3);
2076 gfunc_call(2);
2077 vpushi(0);
2078 vtop->r = reg_iret;
2079 vtop->r2 = reg_lret;
2080 break;
2081 case '^':
2082 case '&':
2083 case '|':
2084 case '*':
2085 case '+':
2086 case '-':
2087 //pv("gen_opl A",0,2);
2088 t = vtop->type.t;
2089 vswap();
2090 lexpand();
2091 vrotb(3);
2092 lexpand();
2093 /* stack: L1 H1 L2 H2 */
2094 tmp = vtop[0];
2095 vtop[0] = vtop[-3];
2096 vtop[-3] = tmp;
2097 tmp = vtop[-2];
2098 vtop[-2] = vtop[-3];
2099 vtop[-3] = tmp;
2100 vswap();
2101 /* stack: H1 H2 L1 L2 */
2102 //pv("gen_opl B",0,4);
2103 if (op == '*') {
2104 vpushv(vtop - 1);
2105 vpushv(vtop - 1);
2106 gen_op(TOK_UMULL);
2107 lexpand();
2108 /* stack: H1 H2 L1 L2 ML MH */
2109 for(i=0;i<4;i++)
2110 vrotb(6);
2111 /* stack: ML MH H1 H2 L1 L2 */
2112 tmp = vtop[0];
2113 vtop[0] = vtop[-2];
2114 vtop[-2] = tmp;
2115 /* stack: ML MH H1 L2 H2 L1 */
2116 gen_op('*');
2117 vrotb(3);
2118 vrotb(3);
2119 gen_op('*');
2120 /* stack: ML MH M1 M2 */
2121 gen_op('+');
2122 gen_op('+');
2123 } else if (op == '+' || op == '-') {
2124 /* XXX: add non carry method too (for MIPS or alpha) */
2125 if (op == '+')
2126 op1 = TOK_ADDC1;
2127 else
2128 op1 = TOK_SUBC1;
2129 gen_op(op1);
2130 /* stack: H1 H2 (L1 op L2) */
2131 vrotb(3);
2132 vrotb(3);
2133 gen_op(op1 + 1); /* TOK_xxxC2 */
2134 } else {
2135 gen_op(op);
2136 /* stack: H1 H2 (L1 op L2) */
2137 vrotb(3);
2138 vrotb(3);
2139 /* stack: (L1 op L2) H1 H2 */
2140 gen_op(op);
2141 /* stack: (L1 op L2) (H1 op H2) */
2143 /* stack: L H */
2144 lbuild(t);
2145 break;
2146 case TOK_SAR:
2147 case TOK_SHR:
2148 case TOK_SHL:
2149 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2150 t = vtop[-1].type.t;
2151 vswap();
2152 lexpand();
2153 vrotb(3);
2154 /* stack: L H shift */
2155 c = (int)vtop->c.i;
2156 /* constant: simpler */
2157 /* NOTE: all comments are for SHL. the other cases are
2158 done by swapping words */
2159 vpop();
2160 if (op != TOK_SHL)
2161 vswap();
2162 if (c >= 32) {
2163 /* stack: L H */
2164 vpop();
2165 if (c > 32) {
2166 vpushi(c - 32);
2167 gen_op(op);
2169 if (op != TOK_SAR) {
2170 vpushi(0);
2171 } else {
2172 gv_dup();
2173 vpushi(31);
2174 gen_op(TOK_SAR);
2176 vswap();
2177 } else {
2178 vswap();
2179 gv_dup();
2180 /* stack: H L L */
2181 vpushi(c);
2182 gen_op(op);
2183 vswap();
2184 vpushi(32 - c);
2185 if (op == TOK_SHL)
2186 gen_op(TOK_SHR);
2187 else
2188 gen_op(TOK_SHL);
2189 vrotb(3);
2190 /* stack: L L H */
2191 vpushi(c);
2192 if (op == TOK_SHL)
2193 gen_op(TOK_SHL);
2194 else
2195 gen_op(TOK_SHR);
2196 gen_op('|');
2198 if (op != TOK_SHL)
2199 vswap();
2200 lbuild(t);
2201 } else {
2202 /* XXX: should provide a faster fallback on x86 ? */
2203 switch(op) {
2204 case TOK_SAR:
2205 func = TOK___ashrdi3;
2206 goto gen_func;
2207 case TOK_SHR:
2208 func = TOK___lshrdi3;
2209 goto gen_func;
2210 case TOK_SHL:
2211 func = TOK___ashldi3;
2212 goto gen_func;
2215 break;
2216 default:
2217 /* compare operations */
2218 t = vtop->type.t;
2219 vswap();
2220 lexpand();
2221 vrotb(3);
2222 lexpand();
2223 /* stack: L1 H1 L2 H2 */
2224 tmp = vtop[-1];
2225 vtop[-1] = vtop[-2];
2226 vtop[-2] = tmp;
2227 /* stack: L1 L2 H1 H2 */
2228 save_regs(4);
2229 /* compare high */
2230 op1 = op;
2231 /* when values are equal, we need to compare low words. since
2232 the jump is inverted, we invert the test too. */
2233 if (op1 == TOK_LT)
2234 op1 = TOK_LE;
2235 else if (op1 == TOK_GT)
2236 op1 = TOK_GE;
2237 else if (op1 == TOK_ULT)
2238 op1 = TOK_ULE;
2239 else if (op1 == TOK_UGT)
2240 op1 = TOK_UGE;
2241 a = 0;
2242 b = 0;
2243 gen_op(op1);
2244 if (op == TOK_NE) {
2245 b = gvtst(0, 0);
2246 } else {
2247 a = gvtst(1, 0);
2248 if (op != TOK_EQ) {
2249 /* generate non equal test */
2250 vpushi(0);
2251 vset_VT_CMP(TOK_NE);
2252 b = gvtst(0, 0);
2255 /* compare low. Always unsigned */
2256 op1 = op;
2257 if (op1 == TOK_LT)
2258 op1 = TOK_ULT;
2259 else if (op1 == TOK_LE)
2260 op1 = TOK_ULE;
2261 else if (op1 == TOK_GT)
2262 op1 = TOK_UGT;
2263 else if (op1 == TOK_GE)
2264 op1 = TOK_UGE;
2265 gen_op(op1);
2266 #if 0//def TCC_TARGET_I386
2267 if (op == TOK_NE) { gsym(b); break; }
2268 if (op == TOK_EQ) { gsym(a); break; }
2269 #endif
2270 gvtst_set(1, a);
2271 gvtst_set(0, b);
2272 break;
2275 #endif
2277 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2279 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2280 return (a ^ b) >> 63 ? -x : x;
2283 static int gen_opic_lt(uint64_t a, uint64_t b)
2285 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2288 /* handle integer constant optimizations and various machine
2289 independent opt */
2290 static void gen_opic(int op)
2292 SValue *v1 = vtop - 1;
2293 SValue *v2 = vtop;
2294 int t1 = v1->type.t & VT_BTYPE;
2295 int t2 = v2->type.t & VT_BTYPE;
2296 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2297 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2298 uint64_t l1 = c1 ? v1->c.i : 0;
2299 uint64_t l2 = c2 ? v2->c.i : 0;
2300 int shm = (t1 == VT_LLONG) ? 63 : 31;
2301 int r;
2303 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2304 l1 = ((uint32_t)l1 |
2305 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2306 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2307 l2 = ((uint32_t)l2 |
2308 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2310 if (c1 && c2) {
2311 switch(op) {
2312 case '+': l1 += l2; break;
2313 case '-': l1 -= l2; break;
2314 case '&': l1 &= l2; break;
2315 case '^': l1 ^= l2; break;
2316 case '|': l1 |= l2; break;
2317 case '*': l1 *= l2; break;
2319 case TOK_PDIV:
2320 case '/':
2321 case '%':
2322 case TOK_UDIV:
2323 case TOK_UMOD:
2324 /* if division by zero, generate explicit division */
2325 if (l2 == 0) {
2326 if (CONST_WANTED && !NOEVAL_WANTED)
2327 tcc_error("division by zero in constant");
2328 goto general_case;
2330 switch(op) {
2331 default: l1 = gen_opic_sdiv(l1, l2); break;
2332 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2333 case TOK_UDIV: l1 = l1 / l2; break;
2334 case TOK_UMOD: l1 = l1 % l2; break;
2336 break;
2337 case TOK_SHL: l1 <<= (l2 & shm); break;
2338 case TOK_SHR: l1 >>= (l2 & shm); break;
2339 case TOK_SAR:
2340 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2341 break;
2342 /* tests */
2343 case TOK_ULT: l1 = l1 < l2; break;
2344 case TOK_UGE: l1 = l1 >= l2; break;
2345 case TOK_EQ: l1 = l1 == l2; break;
2346 case TOK_NE: l1 = l1 != l2; break;
2347 case TOK_ULE: l1 = l1 <= l2; break;
2348 case TOK_UGT: l1 = l1 > l2; break;
2349 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2350 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2351 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2352 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2353 /* logical */
2354 case TOK_LAND: l1 = l1 && l2; break;
2355 case TOK_LOR: l1 = l1 || l2; break;
2356 default:
2357 goto general_case;
2359 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2360 l1 = ((uint32_t)l1 |
2361 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2362 v1->c.i = l1;
2363 v1->r |= v2->r & VT_NONCONST;
2364 vtop--;
2365 } else {
2366 /* if commutative ops, put c2 as constant */
2367 if (c1 && (op == '+' || op == '&' || op == '^' ||
2368 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2369 vswap();
2370 c2 = c1; //c = c1, c1 = c2, c2 = c;
2371 l2 = l1; //l = l1, l1 = l2, l2 = l;
2373 if (c1 && ((l1 == 0 &&
2374 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2375 (l1 == -1 && op == TOK_SAR))) {
2376 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2377 vpop();
2378 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2379 (op == '|' &&
2380 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2381 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2382 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2383 if (l2 == 1)
2384 vtop->c.i = 0;
2385 vswap();
2386 vtop--;
2387 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2388 op == TOK_PDIV) &&
2389 l2 == 1) ||
2390 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2391 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2392 l2 == 0) ||
2393 (op == '&' &&
2394 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2395 /* filter out NOP operations like x*1, x-0, x&-1... */
2396 vtop--;
2397 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2398 /* try to use shifts instead of muls or divs */
2399 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2400 int n = -1;
2401 while (l2) {
2402 l2 >>= 1;
2403 n++;
2405 vtop->c.i = n;
2406 if (op == '*')
2407 op = TOK_SHL;
2408 else if (op == TOK_PDIV)
2409 op = TOK_SAR;
2410 else
2411 op = TOK_SHR;
2413 goto general_case;
2414 } else if (c2 && (op == '+' || op == '-') &&
2415 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2416 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2417 /* symbol + constant case */
2418 if (op == '-')
2419 l2 = -l2;
2420 l2 += vtop[-1].c.i;
2421 /* The backends can't always deal with addends to symbols
2422 larger than +-1<<31. Don't construct such. */
2423 if ((int)l2 != l2)
2424 goto general_case;
2425 vtop--;
2426 vtop->c.i = l2;
2427 } else {
2428 general_case:
2429 /* call low level op generator */
2430 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2431 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2432 gen_opl(op);
2433 else
2434 gen_opi(op);
2436 if (vtop->r == VT_CONST)
2437 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2441 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2442 # define gen_negf gen_opf
2443 #elif defined TCC_TARGET_ARM
2444 void gen_negf(int op)
2446 /* arm will detect 0-x and replace by vneg */
2447 vpushi(0), vswap(), gen_op('-');
2449 #else
2450 /* XXX: implement in gen_opf() for other backends too */
2451 void gen_negf(int op)
2453 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2454 subtract(-0, x), but with them it's really a sign flip
2455 operation. We implement this with bit manipulation and have
2456 to do some type reinterpretation for this, which TCC can do
2457 only via memory. */
2459 int align, size, bt;
2461 size = type_size(&vtop->type, &align);
2462 bt = vtop->type.t & VT_BTYPE;
2463 save_reg(gv(RC_TYPE(bt)));
2464 vdup();
2465 incr_bf_adr(size - 1);
2466 vdup();
2467 vpushi(0x80); /* flip sign */
2468 gen_op('^');
2469 vstore();
2470 vpop();
2472 #endif
2474 /* generate a floating point operation with constant propagation */
2475 static void gen_opif(int op)
2477 int c1, c2, cast_int = 0;
2478 SValue *v1, *v2;
2479 #if defined _MSC_VER && defined __x86_64__
2480 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2481 volatile
2482 #endif
2483 long double f1, f2;
2485 v1 = vtop - 1;
2486 v2 = vtop;
2487 if (op == TOK_NEG)
2488 v1 = v2;
2490 /* currently, we cannot do computations with forward symbols */
2491 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2492 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2493 if (c1 && c2) {
2494 if (v1->type.t == VT_FLOAT) {
2495 f1 = v1->c.f;
2496 f2 = v2->c.f;
2497 } else if (v1->type.t == VT_DOUBLE) {
2498 f1 = v1->c.d;
2499 f2 = v2->c.d;
2500 } else {
2501 f1 = v1->c.ld;
2502 f2 = v2->c.ld;
2504 /* NOTE: we only do constant propagation if finite number (not
2505 NaN or infinity) (ANSI spec) */
2506 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2507 goto general_case;
2508 switch(op) {
2509 case '+': f1 += f2; break;
2510 case '-': f1 -= f2; break;
2511 case '*': f1 *= f2; break;
2512 case '/':
2513 if (f2 == 0.0) {
2514 union { float f; unsigned u; } x1, x2, y;
2515 /* If not in initializer we need to potentially generate
2516 FP exceptions at runtime, otherwise we want to fold. */
2517 if (!CONST_WANTED)
2518 goto general_case;
2519 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2520 when used to compile the f1 /= f2 below, would be -nan */
2521 x1.f = f1, x2.f = f2;
2522 if (f1 == 0.0)
2523 y.u = 0x7fc00000; /* nan */
2524 else
2525 y.u = 0x7f800000; /* infinity */
2526 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2527 f1 = y.f;
2528 break;
2530 f1 /= f2;
2531 break;
2532 case TOK_NEG:
2533 f1 = -f1;
2534 goto unary_result;
2535 case TOK_EQ:
2536 f1 = f1 == f2;
2537 make_int:
2538 cast_int = 1;
2539 break;
2540 case TOK_NE:
2541 f1 = f1 != f2;
2542 goto make_int;
2543 case TOK_LT:
2544 f1 = f1 < f2;
2545 goto make_int;
2546 case TOK_GE:
2547 f1 = f1 >= f2;
2548 goto make_int;
2549 case TOK_LE:
2550 f1 = f1 <= f2;
2551 goto make_int;
2552 case TOK_GT:
2553 f1 = f1 > f2;
2554 goto make_int;
2555 /* XXX: also handles tests ? */
2556 default:
2557 goto general_case;
2559 vtop--;
2560 unary_result:
2561 /* XXX: overflow test ? */
2562 if (v1->type.t == VT_FLOAT) {
2563 v1->c.f = f1;
2564 } else if (v1->type.t == VT_DOUBLE) {
2565 v1->c.d = f1;
2566 } else {
2567 v1->c.ld = f1;
2569 if (cast_int)
2570 gen_cast_s(VT_INT);
2571 } else {
2572 general_case:
2573 if (op == TOK_NEG) {
2574 gen_negf(op);
2575 } else {
2576 gen_opf(op);
2581 /* print a type. If 'varstr' is not NULL, then the variable is also
2582 printed in the type */
2583 /* XXX: union */
2584 /* XXX: add array and function pointers */
2585 static void type_to_str(char *buf, int buf_size,
2586 CType *type, const char *varstr)
2588 int bt, v, t;
2589 Sym *s, *sa;
2590 char buf1[256];
2591 const char *tstr;
2593 t = type->t;
2594 bt = t & VT_BTYPE;
2595 buf[0] = '\0';
2597 if (t & VT_EXTERN)
2598 pstrcat(buf, buf_size, "extern ");
2599 if (t & VT_STATIC)
2600 pstrcat(buf, buf_size, "static ");
2601 if (t & VT_TYPEDEF)
2602 pstrcat(buf, buf_size, "typedef ");
2603 if (t & VT_INLINE)
2604 pstrcat(buf, buf_size, "inline ");
2605 if (bt != VT_PTR) {
2606 if (t & VT_VOLATILE)
2607 pstrcat(buf, buf_size, "volatile ");
2608 if (t & VT_CONSTANT)
2609 pstrcat(buf, buf_size, "const ");
2611 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2612 || ((t & VT_UNSIGNED)
2613 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2614 && !IS_ENUM(t)
2616 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2618 buf_size -= strlen(buf);
2619 buf += strlen(buf);
2621 switch(bt) {
2622 case VT_VOID:
2623 tstr = "void";
2624 goto add_tstr;
2625 case VT_BOOL:
2626 tstr = "_Bool";
2627 goto add_tstr;
2628 case VT_BYTE:
2629 tstr = "char";
2630 goto add_tstr;
2631 case VT_SHORT:
2632 tstr = "short";
2633 goto add_tstr;
2634 case VT_INT:
2635 tstr = "int";
2636 goto maybe_long;
2637 case VT_LLONG:
2638 tstr = "long long";
2639 maybe_long:
2640 if (t & VT_LONG)
2641 tstr = "long";
2642 if (!IS_ENUM(t))
2643 goto add_tstr;
2644 tstr = "enum ";
2645 goto tstruct;
2646 case VT_FLOAT:
2647 tstr = "float";
2648 goto add_tstr;
2649 case VT_DOUBLE:
2650 tstr = "double";
2651 if (!(t & VT_LONG))
2652 goto add_tstr;
2653 case VT_LDOUBLE:
2654 tstr = "long double";
2655 add_tstr:
2656 pstrcat(buf, buf_size, tstr);
2657 break;
2658 case VT_STRUCT:
2659 tstr = "struct ";
2660 if (IS_UNION(t))
2661 tstr = "union ";
2662 tstruct:
2663 pstrcat(buf, buf_size, tstr);
2664 v = type->ref->v & ~SYM_STRUCT;
2665 if (v >= SYM_FIRST_ANOM)
2666 pstrcat(buf, buf_size, "<anonymous>");
2667 else
2668 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2669 break;
2670 case VT_FUNC:
2671 s = type->ref;
2672 buf1[0]=0;
2673 if (varstr && '*' == *varstr) {
2674 pstrcat(buf1, sizeof(buf1), "(");
2675 pstrcat(buf1, sizeof(buf1), varstr);
2676 pstrcat(buf1, sizeof(buf1), ")");
2678 pstrcat(buf1, buf_size, "(");
2679 sa = s->next;
2680 while (sa != NULL) {
2681 char buf2[256];
2682 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2683 pstrcat(buf1, sizeof(buf1), buf2);
2684 sa = sa->next;
2685 if (sa)
2686 pstrcat(buf1, sizeof(buf1), ", ");
2688 if (s->f.func_type == FUNC_ELLIPSIS)
2689 pstrcat(buf1, sizeof(buf1), ", ...");
2690 pstrcat(buf1, sizeof(buf1), ")");
2691 type_to_str(buf, buf_size, &s->type, buf1);
2692 goto no_var;
2693 case VT_PTR:
2694 s = type->ref;
2695 if (t & (VT_ARRAY|VT_VLA)) {
2696 if (varstr && '*' == *varstr)
2697 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2698 else
2699 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2700 type_to_str(buf, buf_size, &s->type, buf1);
2701 goto no_var;
2703 pstrcpy(buf1, sizeof(buf1), "*");
2704 if (t & VT_CONSTANT)
2705 pstrcat(buf1, buf_size, "const ");
2706 if (t & VT_VOLATILE)
2707 pstrcat(buf1, buf_size, "volatile ");
2708 if (varstr)
2709 pstrcat(buf1, sizeof(buf1), varstr);
2710 type_to_str(buf, buf_size, &s->type, buf1);
2711 goto no_var;
2713 if (varstr) {
2714 pstrcat(buf, buf_size, " ");
2715 pstrcat(buf, buf_size, varstr);
2717 no_var: ;
2720 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2722 char buf1[256], buf2[256];
2723 type_to_str(buf1, sizeof(buf1), st, NULL);
2724 type_to_str(buf2, sizeof(buf2), dt, NULL);
2725 tcc_error(fmt, buf1, buf2);
2728 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2730 char buf1[256], buf2[256];
2731 type_to_str(buf1, sizeof(buf1), st, NULL);
2732 type_to_str(buf2, sizeof(buf2), dt, NULL);
2733 tcc_warning(fmt, buf1, buf2);
2736 static int pointed_size(CType *type)
2738 int align;
2739 return type_size(pointed_type(type), &align);
2742 static inline int is_null_pointer(SValue *p)
2744 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2745 return 0;
2746 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2747 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2748 ((p->type.t & VT_BTYPE) == VT_PTR &&
2749 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2750 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2751 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2755 /* compare function types. OLD functions match any new functions */
2756 static int is_compatible_func(CType *type1, CType *type2)
2758 Sym *s1, *s2;
2760 s1 = type1->ref;
2761 s2 = type2->ref;
2762 if (s1->f.func_call != s2->f.func_call)
2763 return 0;
2764 if (s1->f.func_type != s2->f.func_type
2765 && s1->f.func_type != FUNC_OLD
2766 && s2->f.func_type != FUNC_OLD)
2767 return 0;
2768 for (;;) {
2769 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2770 return 0;
2771 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2772 return 1;
2773 s1 = s1->next;
2774 s2 = s2->next;
2775 if (!s1)
2776 return !s2;
2777 if (!s2)
2778 return 0;
2782 /* return true if type1 and type2 are the same. If unqualified is
2783 true, qualifiers on the types are ignored.
2785 static int compare_types(CType *type1, CType *type2, int unqualified)
2787 int bt1, t1, t2;
2789 t1 = type1->t & VT_TYPE;
2790 t2 = type2->t & VT_TYPE;
2791 if (unqualified) {
2792 /* strip qualifiers before comparing */
2793 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2794 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2797 /* Default Vs explicit signedness only matters for char */
2798 if ((t1 & VT_BTYPE) != VT_BYTE) {
2799 t1 &= ~VT_DEFSIGN;
2800 t2 &= ~VT_DEFSIGN;
2802 /* XXX: bitfields ? */
2803 if (t1 != t2)
2804 return 0;
2806 if ((t1 & VT_ARRAY)
2807 && !(type1->ref->c < 0
2808 || type2->ref->c < 0
2809 || type1->ref->c == type2->ref->c))
2810 return 0;
2812 /* test more complicated cases */
2813 bt1 = t1 & VT_BTYPE;
2814 if (bt1 == VT_PTR) {
2815 type1 = pointed_type(type1);
2816 type2 = pointed_type(type2);
2817 return is_compatible_types(type1, type2);
2818 } else if (bt1 == VT_STRUCT) {
2819 return (type1->ref == type2->ref);
2820 } else if (bt1 == VT_FUNC) {
2821 return is_compatible_func(type1, type2);
2822 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2823 /* If both are enums then they must be the same, if only one is then
2824 t1 and t2 must be equal, which was checked above already. */
2825 return type1->ref == type2->ref;
2826 } else {
2827 return 1;
2831 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2832 type is stored in DEST if non-null (except for pointer plus/minus) . */
2833 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2835 CType *type1 = &op1->type, *type2 = &op2->type, type;
2836 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2837 int ret = 1;
2839 type.t = VT_VOID;
2840 type.ref = NULL;
2842 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2843 ret = op == '?' ? 1 : 0;
2844 /* NOTE: as an extension, we accept void on only one side */
2845 type.t = VT_VOID;
2846 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2847 if (op == '+') ; /* Handled in caller */
2848 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2849 /* If one is a null ptr constant the result type is the other. */
2850 else if (is_null_pointer (op2)) type = *type1;
2851 else if (is_null_pointer (op1)) type = *type2;
2852 else if (bt1 != bt2) {
2853 /* accept comparison or cond-expr between pointer and integer
2854 with a warning */
2855 if ((op == '?' || TOK_ISCOND(op))
2856 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2857 tcc_warning("pointer/integer mismatch in %s",
2858 op == '?' ? "conditional expression" : "comparison");
2859 else if (op != '-' || !is_integer_btype(bt2))
2860 ret = 0;
2861 type = *(bt1 == VT_PTR ? type1 : type2);
2862 } else {
2863 CType *pt1 = pointed_type(type1);
2864 CType *pt2 = pointed_type(type2);
2865 int pbt1 = pt1->t & VT_BTYPE;
2866 int pbt2 = pt2->t & VT_BTYPE;
2867 int newquals, copied = 0;
2868 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2869 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2870 if (op != '?' && !TOK_ISCOND(op))
2871 ret = 0;
2872 else
2873 type_incompatibility_warning(type1, type2,
2874 op == '?'
2875 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2876 : "pointer type mismatch in comparison('%s' and '%s')");
2878 if (op == '?') {
2879 /* pointers to void get preferred, otherwise the
2880 pointed to types minus qualifs should be compatible */
2881 type = *((pbt1 == VT_VOID) ? type1 : type2);
2882 /* combine qualifs */
2883 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2884 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2885 & newquals)
2887 /* copy the pointer target symbol */
2888 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2889 0, type.ref->c);
2890 copied = 1;
2891 pointed_type(&type)->t |= newquals;
2893 /* pointers to incomplete arrays get converted to
2894 pointers to completed ones if possible */
2895 if (pt1->t & VT_ARRAY
2896 && pt2->t & VT_ARRAY
2897 && pointed_type(&type)->ref->c < 0
2898 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2900 if (!copied)
2901 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2902 0, type.ref->c);
2903 pointed_type(&type)->ref =
2904 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2905 0, pointed_type(&type)->ref->c);
2906 pointed_type(&type)->ref->c =
2907 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2911 if (TOK_ISCOND(op))
2912 type.t = VT_SIZE_T;
2913 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2914 if (op != '?' || !compare_types(type1, type2, 1))
2915 ret = 0;
2916 type = *type1;
2917 } else if (is_float(bt1) || is_float(bt2)) {
2918 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2919 type.t = VT_LDOUBLE;
2920 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2921 type.t = VT_DOUBLE;
2922 } else {
2923 type.t = VT_FLOAT;
2925 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2926 /* cast to biggest op */
2927 type.t = VT_LLONG | VT_LONG;
2928 if (bt1 == VT_LLONG)
2929 type.t &= t1;
2930 if (bt2 == VT_LLONG)
2931 type.t &= t2;
2932 /* convert to unsigned if it does not fit in a long long */
2933 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2934 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2935 type.t |= VT_UNSIGNED;
2936 } else {
2937 /* integer operations */
2938 type.t = VT_INT | (VT_LONG & (t1 | t2));
2939 /* convert to unsigned if it does not fit in an integer */
2940 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2941 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2942 type.t |= VT_UNSIGNED;
2944 if (dest)
2945 *dest = type;
2946 return ret;
2949 /* generic gen_op: handles types problems */
2950 ST_FUNC void gen_op(int op)
2952 int t1, t2, bt1, bt2, t;
2953 CType type1, combtype;
2955 redo:
2956 t1 = vtop[-1].type.t;
2957 t2 = vtop[0].type.t;
2958 bt1 = t1 & VT_BTYPE;
2959 bt2 = t2 & VT_BTYPE;
2961 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2962 if (bt2 == VT_FUNC) {
2963 mk_pointer(&vtop->type);
2964 gaddrof();
2966 if (bt1 == VT_FUNC) {
2967 vswap();
2968 mk_pointer(&vtop->type);
2969 gaddrof();
2970 vswap();
2972 goto redo;
2973 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2974 tcc_error("invalid operand types for binary operation");
2975 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2976 /* at least one operand is a pointer */
2977 /* relational op: must be both pointers */
2978 int align;
2979 if (TOK_ISCOND(op))
2980 goto std_op;
2981 /* if both pointers, then it must be the '-' op */
2982 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2983 if (op != '-')
2984 tcc_error("cannot use pointers here");
2985 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2986 vrott(3);
2987 gen_opic(op);
2988 vtop->type.t = VT_PTRDIFF_T;
2989 vswap();
2990 gen_op(TOK_PDIV);
2991 } else {
2992 /* exactly one pointer : must be '+' or '-'. */
2993 if (op != '-' && op != '+')
2994 tcc_error("cannot use pointers here");
2995 /* Put pointer as first operand */
2996 if (bt2 == VT_PTR) {
2997 vswap();
2998 t = t1, t1 = t2, t2 = t;
3000 #if PTR_SIZE == 4
3001 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3002 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3003 gen_cast_s(VT_INT);
3004 #endif
3005 type1 = vtop[-1].type;
3006 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3007 gen_op('*');
3008 #ifdef CONFIG_TCC_BCHECK
3009 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3010 /* if bounded pointers, we generate a special code to
3011 test bounds */
3012 if (op == '-') {
3013 vpushi(0);
3014 vswap();
3015 gen_op('-');
3017 gen_bounded_ptr_add();
3018 } else
3019 #endif
3021 gen_opic(op);
3023 type1.t &= ~(VT_ARRAY|VT_VLA);
3024 /* put again type if gen_opic() swaped operands */
3025 vtop->type = type1;
3027 } else {
3028 /* floats can only be used for a few operations */
3029 if (is_float(combtype.t)
3030 && op != '+' && op != '-' && op != '*' && op != '/'
3031 && !TOK_ISCOND(op))
3032 tcc_error("invalid operands for binary operation");
3033 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3034 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3035 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3036 t |= VT_UNSIGNED;
3037 t |= (VT_LONG & t1);
3038 combtype.t = t;
3040 std_op:
3041 t = t2 = combtype.t;
3042 /* XXX: currently, some unsigned operations are explicit, so
3043 we modify them here */
3044 if (t & VT_UNSIGNED) {
3045 if (op == TOK_SAR)
3046 op = TOK_SHR;
3047 else if (op == '/')
3048 op = TOK_UDIV;
3049 else if (op == '%')
3050 op = TOK_UMOD;
3051 else if (op == TOK_LT)
3052 op = TOK_ULT;
3053 else if (op == TOK_GT)
3054 op = TOK_UGT;
3055 else if (op == TOK_LE)
3056 op = TOK_ULE;
3057 else if (op == TOK_GE)
3058 op = TOK_UGE;
3060 vswap();
3061 gen_cast_s(t);
3062 vswap();
3063 /* special case for shifts and long long: we keep the shift as
3064 an integer */
3065 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3066 t2 = VT_INT;
3067 gen_cast_s(t2);
3068 if (is_float(t))
3069 gen_opif(op);
3070 else
3071 gen_opic(op);
3072 if (TOK_ISCOND(op)) {
3073 /* relational op: the result is an int */
3074 vtop->type.t = VT_INT;
3075 } else {
3076 vtop->type.t = t;
3079 // Make sure that we have converted to an rvalue:
3080 if (vtop->r & VT_LVAL)
3081 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3084 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3085 #define gen_cvt_itof1 gen_cvt_itof
3086 #else
3087 /* generic itof for unsigned long long case */
3088 static void gen_cvt_itof1(int t)
3090 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3091 (VT_LLONG | VT_UNSIGNED)) {
3093 if (t == VT_FLOAT)
3094 vpush_helper_func(TOK___floatundisf);
3095 #if LDOUBLE_SIZE != 8
3096 else if (t == VT_LDOUBLE)
3097 vpush_helper_func(TOK___floatundixf);
3098 #endif
3099 else
3100 vpush_helper_func(TOK___floatundidf);
3101 vrott(2);
3102 gfunc_call(1);
3103 vpushi(0);
3104 PUT_R_RET(vtop, t);
3105 } else {
3106 gen_cvt_itof(t);
3109 #endif
3111 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3112 #define gen_cvt_ftoi1 gen_cvt_ftoi
3113 #else
3114 /* generic ftoi for unsigned long long case */
3115 static void gen_cvt_ftoi1(int t)
3117 int st;
3118 if (t == (VT_LLONG | VT_UNSIGNED)) {
3119 /* not handled natively */
3120 st = vtop->type.t & VT_BTYPE;
3121 if (st == VT_FLOAT)
3122 vpush_helper_func(TOK___fixunssfdi);
3123 #if LDOUBLE_SIZE != 8
3124 else if (st == VT_LDOUBLE)
3125 vpush_helper_func(TOK___fixunsxfdi);
3126 #endif
3127 else
3128 vpush_helper_func(TOK___fixunsdfdi);
3129 vrott(2);
3130 gfunc_call(1);
3131 vpushi(0);
3132 PUT_R_RET(vtop, t);
3133 } else {
3134 gen_cvt_ftoi(t);
3137 #endif
3139 /* special delayed cast for char/short */
3140 static void force_charshort_cast(void)
3142 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3143 int dbt = vtop->type.t;
3144 vtop->r &= ~VT_MUSTCAST;
3145 vtop->type.t = sbt;
3146 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3147 vtop->type.t = dbt;
3150 static void gen_cast_s(int t)
3152 CType type;
3153 type.t = t;
3154 type.ref = NULL;
3155 gen_cast(&type);
3158 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3159 static void gen_cast(CType *type)
3161 int sbt, dbt, sf, df, c;
3162 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3164 /* special delayed cast for char/short */
3165 if (vtop->r & VT_MUSTCAST)
3166 force_charshort_cast();
3168 /* bitfields first get cast to ints */
3169 if (vtop->type.t & VT_BITFIELD)
3170 gv(RC_INT);
3172 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3173 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3174 if (sbt == VT_FUNC)
3175 sbt = VT_PTR;
3177 again:
3178 if (sbt != dbt) {
3179 sf = is_float(sbt);
3180 df = is_float(dbt);
3181 dbt_bt = dbt & VT_BTYPE;
3182 sbt_bt = sbt & VT_BTYPE;
3183 if (dbt_bt == VT_VOID)
3184 goto done;
3185 if (sbt_bt == VT_VOID) {
3186 error:
3187 cast_error(&vtop->type, type);
3190 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3191 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3192 /* don't try to convert to ldouble when cross-compiling
3193 (except when it's '0' which is needed for arm:gen_negf()) */
3194 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3195 c = 0;
3196 #endif
3197 if (c) {
3198 /* constant case: we can do it now */
3199 /* XXX: in ISOC, cannot do it if error in convert */
3200 if (sbt == VT_FLOAT)
3201 vtop->c.ld = vtop->c.f;
3202 else if (sbt == VT_DOUBLE)
3203 vtop->c.ld = vtop->c.d;
3205 if (df) {
3206 if (sbt_bt == VT_LLONG) {
3207 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3208 vtop->c.ld = vtop->c.i;
3209 else
3210 vtop->c.ld = -(long double)-vtop->c.i;
3211 } else if(!sf) {
3212 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3213 vtop->c.ld = (uint32_t)vtop->c.i;
3214 else
3215 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3218 if (dbt == VT_FLOAT)
3219 vtop->c.f = (float)vtop->c.ld;
3220 else if (dbt == VT_DOUBLE)
3221 vtop->c.d = (double)vtop->c.ld;
3222 } else if (sf && dbt == VT_BOOL) {
3223 vtop->c.i = (vtop->c.ld != 0);
3224 } else {
3225 if(sf)
3226 vtop->c.i = vtop->c.ld;
3227 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3229 else if (sbt & VT_UNSIGNED)
3230 vtop->c.i = (uint32_t)vtop->c.i;
3231 else
3232 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3234 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3236 else if (dbt == VT_BOOL)
3237 vtop->c.i = (vtop->c.i != 0);
3238 else {
3239 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3240 dbt_bt == VT_SHORT ? 0xffff :
3241 0xffffffff;
3242 vtop->c.i &= m;
3243 if (!(dbt & VT_UNSIGNED))
3244 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3247 goto done;
3249 } else if (dbt == VT_BOOL
3250 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3251 == (VT_CONST | VT_SYM)) {
3252 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3253 vtop->r = VT_CONST;
3254 vtop->c.i = 1;
3255 goto done;
3258 /* cannot generate code for global or static initializers */
3259 if (nocode_wanted & DATA_ONLY_WANTED)
3260 goto done;
3262 /* non constant case: generate code */
3263 if (dbt == VT_BOOL) {
3264 gen_test_zero(TOK_NE);
3265 goto done;
3268 if (sf || df) {
3269 if (sf && df) {
3270 /* convert from fp to fp */
3271 gen_cvt_ftof(dbt);
3272 } else if (df) {
3273 /* convert int to fp */
3274 gen_cvt_itof1(dbt);
3275 } else {
3276 /* convert fp to int */
3277 sbt = dbt;
3278 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3279 sbt = VT_INT;
3280 gen_cvt_ftoi1(sbt);
3281 goto again; /* may need char/short cast */
3283 goto done;
3286 ds = btype_size(dbt_bt);
3287 ss = btype_size(sbt_bt);
3288 if (ds == 0 || ss == 0)
3289 goto error;
3291 if (IS_ENUM(type->t) && type->ref->c < 0)
3292 tcc_error("cast to incomplete type");
3294 /* same size and no sign conversion needed */
3295 if (ds == ss && ds >= 4)
3296 goto done;
3297 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3298 tcc_warning("cast between pointer and integer of different size");
3299 if (sbt_bt == VT_PTR) {
3300 /* put integer type to allow logical operations below */
3301 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3305 /* processor allows { int a = 0, b = *(char*)&a; }
3306 That means that if we cast to less width, we can just
3307 change the type and read it still later. */
3308 #define ALLOW_SUBTYPE_ACCESS 1
3310 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3311 /* value still in memory */
3312 if (ds <= ss)
3313 goto done;
3314 /* ss <= 4 here */
3315 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3316 gv(RC_INT);
3317 goto done; /* no 64bit envolved */
3320 gv(RC_INT);
3322 trunc = 0;
3323 #if PTR_SIZE == 4
3324 if (ds == 8) {
3325 /* generate high word */
3326 if (sbt & VT_UNSIGNED) {
3327 vpushi(0);
3328 gv(RC_INT);
3329 } else {
3330 gv_dup();
3331 vpushi(31);
3332 gen_op(TOK_SAR);
3334 lbuild(dbt);
3335 } else if (ss == 8) {
3336 /* from long long: just take low order word */
3337 lexpand();
3338 vpop();
3340 ss = 4;
3342 #elif PTR_SIZE == 8
3343 if (ds == 8) {
3344 /* need to convert from 32bit to 64bit */
3345 if (sbt & VT_UNSIGNED) {
3346 #if defined(TCC_TARGET_RISCV64)
3347 /* RISC-V keeps 32bit vals in registers sign-extended.
3348 So here we need a zero-extension. */
3349 trunc = 32;
3350 #else
3351 goto done;
3352 #endif
3353 } else {
3354 gen_cvt_sxtw();
3355 goto done;
3357 ss = ds, ds = 4, dbt = sbt;
3358 } else if (ss == 8) {
3359 /* RISC-V keeps 32bit vals in registers sign-extended.
3360 So here we need a sign-extension for signed types and
3361 zero-extension. for unsigned types. */
3362 #if !defined(TCC_TARGET_RISCV64)
3363 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3364 #endif
3365 } else {
3366 ss = 4;
3368 #endif
3370 if (ds >= ss)
3371 goto done;
3372 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3373 if (ss == 4) {
3374 gen_cvt_csti(dbt);
3375 goto done;
3377 #endif
3378 bits = (ss - ds) * 8;
3379 /* for unsigned, gen_op will convert SAR to SHR */
3380 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3381 vpushi(bits);
3382 gen_op(TOK_SHL);
3383 vpushi(bits - trunc);
3384 gen_op(TOK_SAR);
3385 vpushi(trunc);
3386 gen_op(TOK_SHR);
3388 done:
3389 vtop->type = *type;
3390 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3393 /* return type size as known at compile time. Put alignment at 'a' */
3394 ST_FUNC int type_size(CType *type, int *a)
3396 Sym *s;
3397 int bt;
3399 bt = type->t & VT_BTYPE;
3400 if (bt == VT_STRUCT) {
3401 /* struct/union */
3402 s = type->ref;
3403 *a = s->r;
3404 return s->c;
3405 } else if (bt == VT_PTR) {
3406 if (type->t & VT_ARRAY) {
3407 int ts;
3409 s = type->ref;
3410 ts = type_size(&s->type, a);
3412 if (ts < 0 && s->c < 0)
3413 ts = -ts;
3415 return ts * s->c;
3416 } else {
3417 *a = PTR_SIZE;
3418 return PTR_SIZE;
3420 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3421 *a = 0;
3422 return -1; /* incomplete enum */
3423 } else if (bt == VT_LDOUBLE) {
3424 *a = LDOUBLE_ALIGN;
3425 return LDOUBLE_SIZE;
3426 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3427 #ifdef TCC_TARGET_I386
3428 #ifdef TCC_TARGET_PE
3429 *a = 8;
3430 #else
3431 *a = 4;
3432 #endif
3433 #elif defined(TCC_TARGET_ARM)
3434 #ifdef TCC_ARM_EABI
3435 *a = 8;
3436 #else
3437 *a = 4;
3438 #endif
3439 #else
3440 *a = 8;
3441 #endif
3442 return 8;
3443 } else if (bt == VT_INT || bt == VT_FLOAT) {
3444 *a = 4;
3445 return 4;
3446 } else if (bt == VT_SHORT) {
3447 *a = 2;
3448 return 2;
3449 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3450 *a = 8;
3451 return 16;
3452 } else {
3453 /* char, void, function, _Bool */
3454 *a = 1;
3455 return 1;
3459 /* push type size as known at runtime time on top of value stack. Put
3460 alignment at 'a' */
3461 static void vpush_type_size(CType *type, int *a)
3463 if (type->t & VT_VLA) {
3464 type_size(&type->ref->type, a);
3465 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3466 } else {
3467 int size = type_size(type, a);
3468 if (size < 0)
3469 tcc_error("unknown type size");
3470 #if PTR_SIZE == 8
3471 vpushll(size);
3472 #else
3473 vpushi(size);
3474 #endif
3478 /* return the pointed type of t */
3479 static inline CType *pointed_type(CType *type)
3481 return &type->ref->type;
3484 /* modify type so that its it is a pointer to type. */
3485 ST_FUNC void mk_pointer(CType *type)
3487 Sym *s;
3488 s = sym_push(SYM_FIELD, type, 0, -1);
3489 type->t = VT_PTR | (type->t & VT_STORAGE);
3490 type->ref = s;
3493 /* return true if type1 and type2 are exactly the same (including
3494 qualifiers).
3496 static int is_compatible_types(CType *type1, CType *type2)
3498 return compare_types(type1,type2,0);
3501 /* return true if type1 and type2 are the same (ignoring qualifiers).
3503 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3505 return compare_types(type1,type2,1);
3508 static void cast_error(CType *st, CType *dt)
3510 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3513 /* verify type compatibility to store vtop in 'dt' type */
3514 static void verify_assign_cast(CType *dt)
3516 CType *st, *type1, *type2;
3517 int dbt, sbt, qualwarn, lvl;
3519 st = &vtop->type; /* source type */
3520 dbt = dt->t & VT_BTYPE;
3521 sbt = st->t & VT_BTYPE;
3522 if (dt->t & VT_CONSTANT)
3523 tcc_warning("assignment of read-only location");
3524 switch(dbt) {
3525 case VT_VOID:
3526 if (sbt != dbt)
3527 tcc_error("assignment to void expression");
3528 break;
3529 case VT_PTR:
3530 /* special cases for pointers */
3531 /* '0' can also be a pointer */
3532 if (is_null_pointer(vtop))
3533 break;
3534 /* accept implicit pointer to integer cast with warning */
3535 if (is_integer_btype(sbt)) {
3536 tcc_warning("assignment makes pointer from integer without a cast");
3537 break;
3539 type1 = pointed_type(dt);
3540 if (sbt == VT_PTR)
3541 type2 = pointed_type(st);
3542 else if (sbt == VT_FUNC)
3543 type2 = st; /* a function is implicitly a function pointer */
3544 else
3545 goto error;
3546 if (is_compatible_types(type1, type2))
3547 break;
3548 for (qualwarn = lvl = 0;; ++lvl) {
3549 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3550 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3551 qualwarn = 1;
3552 dbt = type1->t & (VT_BTYPE|VT_LONG);
3553 sbt = type2->t & (VT_BTYPE|VT_LONG);
3554 if (dbt != VT_PTR || sbt != VT_PTR)
3555 break;
3556 type1 = pointed_type(type1);
3557 type2 = pointed_type(type2);
3559 if (!is_compatible_unqualified_types(type1, type2)) {
3560 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3561 /* void * can match anything */
3562 } else if (dbt == sbt
3563 && is_integer_btype(sbt & VT_BTYPE)
3564 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3565 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3566 /* Like GCC don't warn by default for merely changes
3567 in pointer target signedness. Do warn for different
3568 base types, though, in particular for unsigned enums
3569 and signed int targets. */
3570 } else {
3571 tcc_warning("assignment from incompatible pointer type");
3572 break;
3575 if (qualwarn)
3576 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3577 break;
3578 case VT_BYTE:
3579 case VT_SHORT:
3580 case VT_INT:
3581 case VT_LLONG:
3582 if (sbt == VT_PTR || sbt == VT_FUNC) {
3583 tcc_warning("assignment makes integer from pointer without a cast");
3584 } else if (sbt == VT_STRUCT) {
3585 goto case_VT_STRUCT;
3587 /* XXX: more tests */
3588 break;
3589 case VT_STRUCT:
3590 case_VT_STRUCT:
3591 if (!is_compatible_unqualified_types(dt, st)) {
3592 error:
3593 cast_error(st, dt);
3595 break;
3599 static void gen_assign_cast(CType *dt)
3601 verify_assign_cast(dt);
3602 gen_cast(dt);
3605 /* store vtop in lvalue pushed on stack */
3606 ST_FUNC void vstore(void)
3608 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3610 ft = vtop[-1].type.t;
3611 sbt = vtop->type.t & VT_BTYPE;
3612 dbt = ft & VT_BTYPE;
3613 verify_assign_cast(&vtop[-1].type);
3615 if (sbt == VT_STRUCT) {
3616 /* if structure, only generate pointer */
3617 /* structure assignment : generate memcpy */
3618 size = type_size(&vtop->type, &align);
3619 /* destination, keep on stack() as result */
3620 vpushv(vtop - 1);
3621 #ifdef CONFIG_TCC_BCHECK
3622 if (vtop->r & VT_MUSTBOUND)
3623 gbound(); /* check would be wrong after gaddrof() */
3624 #endif
3625 vtop->type.t = VT_PTR;
3626 gaddrof();
3627 /* source */
3628 vswap();
3629 #ifdef CONFIG_TCC_BCHECK
3630 if (vtop->r & VT_MUSTBOUND)
3631 gbound();
3632 #endif
3633 vtop->type.t = VT_PTR;
3634 gaddrof();
3636 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3637 if (1
3638 #ifdef CONFIG_TCC_BCHECK
3639 && !tcc_state->do_bounds_check
3640 #endif
3642 gen_struct_copy(size);
3643 } else
3644 #endif
3646 /* type size */
3647 vpushi(size);
3648 /* Use memmove, rather than memcpy, as dest and src may be same: */
3649 #ifdef TCC_ARM_EABI
3650 if(!(align & 7))
3651 vpush_helper_func(TOK_memmove8);
3652 else if(!(align & 3))
3653 vpush_helper_func(TOK_memmove4);
3654 else
3655 #endif
3656 vpush_helper_func(TOK_memmove);
3657 vrott(4);
3658 gfunc_call(3);
3661 } else if (ft & VT_BITFIELD) {
3662 /* bitfield store handling */
3664 /* save lvalue as expression result (example: s.b = s.a = n;) */
3665 vdup(), vtop[-1] = vtop[-2];
3667 bit_pos = BIT_POS(ft);
3668 bit_size = BIT_SIZE(ft);
3669 /* remove bit field info to avoid loops */
3670 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3672 if (dbt == VT_BOOL) {
3673 gen_cast(&vtop[-1].type);
3674 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3676 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3677 if (dbt != VT_BOOL) {
3678 gen_cast(&vtop[-1].type);
3679 dbt = vtop[-1].type.t & VT_BTYPE;
3681 if (r == VT_STRUCT) {
3682 store_packed_bf(bit_pos, bit_size);
3683 } else {
3684 unsigned long long mask = (1ULL << bit_size) - 1;
3685 if (dbt != VT_BOOL) {
3686 /* mask source */
3687 if (dbt == VT_LLONG)
3688 vpushll(mask);
3689 else
3690 vpushi((unsigned)mask);
3691 gen_op('&');
3693 /* shift source */
3694 vpushi(bit_pos);
3695 gen_op(TOK_SHL);
3696 vswap();
3697 /* duplicate destination */
3698 vdup();
3699 vrott(3);
3700 /* load destination, mask and or with source */
3701 if (dbt == VT_LLONG)
3702 vpushll(~(mask << bit_pos));
3703 else
3704 vpushi(~((unsigned)mask << bit_pos));
3705 gen_op('&');
3706 gen_op('|');
3707 /* store result */
3708 vstore();
3709 /* ... and discard */
3710 vpop();
3712 } else if (dbt == VT_VOID) {
3713 --vtop;
3714 } else {
3715 /* optimize char/short casts */
3716 delayed_cast = 0;
3717 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3718 && is_integer_btype(sbt)
3720 if ((vtop->r & VT_MUSTCAST)
3721 && btype_size(dbt) > btype_size(sbt)
3723 force_charshort_cast();
3724 delayed_cast = 1;
3725 } else {
3726 gen_cast(&vtop[-1].type);
3729 #ifdef CONFIG_TCC_BCHECK
3730 /* bound check case */
3731 if (vtop[-1].r & VT_MUSTBOUND) {
3732 vswap();
3733 gbound();
3734 vswap();
3736 #endif
3737 gv(RC_TYPE(dbt)); /* generate value */
3739 if (delayed_cast) {
3740 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3741 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3742 vtop->type.t = ft & VT_TYPE;
3745 /* if lvalue was saved on stack, must read it */
3746 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3747 SValue sv;
3748 r = get_reg(RC_INT);
3749 sv.type.t = VT_PTRDIFF_T;
3750 sv.r = VT_LOCAL | VT_LVAL;
3751 sv.c.i = vtop[-1].c.i;
3752 load(r, &sv);
3753 vtop[-1].r = r | VT_LVAL;
3756 r = vtop->r & VT_VALMASK;
3757 /* two word case handling :
3758 store second register at word + 4 (or +8 for x86-64) */
3759 if (USING_TWO_WORDS(dbt)) {
3760 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3761 vtop[-1].type.t = load_type;
3762 store(r, vtop - 1);
3763 vswap();
3764 incr_offset(PTR_SIZE);
3765 vswap();
3766 /* XXX: it works because r2 is spilled last ! */
3767 store(vtop->r2, vtop - 1);
3768 } else {
3769 /* single word */
3770 store(r, vtop - 1);
3772 vswap();
3773 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3777 /* post defines POST/PRE add. c is the token ++ or -- */
3778 ST_FUNC void inc(int post, int c)
3780 test_lvalue();
3781 vdup(); /* save lvalue */
3782 if (post) {
3783 gv_dup(); /* duplicate value */
3784 vrotb(3);
3785 vrotb(3);
3787 /* add constant */
3788 vpushi(c - TOK_MID);
3789 gen_op('+');
3790 vstore(); /* store value */
3791 if (post)
3792 vpop(); /* if post op, return saved value */
3795 ST_FUNC CString* parse_mult_str (const char *msg)
3797 /* read the string */
3798 if (tok != TOK_STR)
3799 expect(msg);
3800 cstr_reset(&initstr);
3801 while (tok == TOK_STR) {
3802 /* XXX: add \0 handling too ? */
3803 cstr_cat(&initstr, tokc.str.data, -1);
3804 next();
3806 cstr_ccat(&initstr, '\0');
3807 return &initstr;
3810 /* If I is >= 1 and a power of two, returns log2(i)+1.
3811 If I is 0 returns 0. */
3812 ST_FUNC int exact_log2p1(int i)
3814 int ret;
3815 if (!i)
3816 return 0;
3817 for (ret = 1; i >= 1 << 8; ret += 8)
3818 i >>= 8;
3819 if (i >= 1 << 4)
3820 ret += 4, i >>= 4;
3821 if (i >= 1 << 2)
3822 ret += 2, i >>= 2;
3823 if (i >= 1 << 1)
3824 ret++;
3825 return ret;
3828 /* Parse __attribute__((...)) GNUC extension. */
3829 static void parse_attribute(AttributeDef *ad)
3831 int t, n;
3832 char *astr;
3834 redo:
3835 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3836 return;
3837 next();
3838 skip('(');
3839 skip('(');
3840 while (tok != ')') {
3841 if (tok < TOK_IDENT)
3842 expect("attribute name");
3843 t = tok;
3844 next();
3845 switch(t) {
3846 case TOK_CLEANUP1:
3847 case TOK_CLEANUP2:
3849 Sym *s;
3851 skip('(');
3852 s = sym_find(tok);
3853 if (!s) {
3854 tcc_warning_c(warn_implicit_function_declaration)(
3855 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3856 s = external_global_sym(tok, &func_old_type);
3857 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3858 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3859 ad->cleanup_func = s;
3860 next();
3861 skip(')');
3862 break;
3864 case TOK_CONSTRUCTOR1:
3865 case TOK_CONSTRUCTOR2:
3866 ad->f.func_ctor = 1;
3867 break;
3868 case TOK_DESTRUCTOR1:
3869 case TOK_DESTRUCTOR2:
3870 ad->f.func_dtor = 1;
3871 break;
3872 case TOK_ALWAYS_INLINE1:
3873 case TOK_ALWAYS_INLINE2:
3874 ad->f.func_alwinl = 1;
3875 break;
3876 case TOK_SECTION1:
3877 case TOK_SECTION2:
3878 skip('(');
3879 astr = parse_mult_str("section name")->data;
3880 ad->section = find_section(tcc_state, astr);
3881 skip(')');
3882 break;
3883 case TOK_ALIAS1:
3884 case TOK_ALIAS2:
3885 skip('(');
3886 astr = parse_mult_str("alias(\"target\")")->data;
3887 /* save string as token, for later */
3888 ad->alias_target = tok_alloc_const(astr);
3889 skip(')');
3890 break;
3891 case TOK_VISIBILITY1:
3892 case TOK_VISIBILITY2:
3893 skip('(');
3894 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3895 if (!strcmp (astr, "default"))
3896 ad->a.visibility = STV_DEFAULT;
3897 else if (!strcmp (astr, "hidden"))
3898 ad->a.visibility = STV_HIDDEN;
3899 else if (!strcmp (astr, "internal"))
3900 ad->a.visibility = STV_INTERNAL;
3901 else if (!strcmp (astr, "protected"))
3902 ad->a.visibility = STV_PROTECTED;
3903 else
3904 expect("visibility(\"default|hidden|internal|protected\")");
3905 skip(')');
3906 break;
3907 case TOK_ALIGNED1:
3908 case TOK_ALIGNED2:
3909 if (tok == '(') {
3910 next();
3911 n = expr_const();
3912 if (n <= 0 || (n & (n - 1)) != 0)
3913 tcc_error("alignment must be a positive power of two");
3914 skip(')');
3915 } else {
3916 n = MAX_ALIGN;
3918 ad->a.aligned = exact_log2p1(n);
3919 if (n != 1 << (ad->a.aligned - 1))
3920 tcc_error("alignment of %d is larger than implemented", n);
3921 break;
3922 case TOK_PACKED1:
3923 case TOK_PACKED2:
3924 ad->a.packed = 1;
3925 break;
3926 case TOK_WEAK1:
3927 case TOK_WEAK2:
3928 ad->a.weak = 1;
3929 break;
3930 case TOK_NODEBUG1:
3931 case TOK_NODEBUG2:
3932 ad->a.nodebug = 1;
3933 break;
3934 case TOK_UNUSED1:
3935 case TOK_UNUSED2:
3936 /* currently, no need to handle it because tcc does not
3937 track unused objects */
3938 break;
3939 case TOK_NORETURN1:
3940 case TOK_NORETURN2:
3941 ad->f.func_noreturn = 1;
3942 break;
3943 case TOK_CDECL1:
3944 case TOK_CDECL2:
3945 case TOK_CDECL3:
3946 ad->f.func_call = FUNC_CDECL;
3947 break;
3948 case TOK_STDCALL1:
3949 case TOK_STDCALL2:
3950 case TOK_STDCALL3:
3951 ad->f.func_call = FUNC_STDCALL;
3952 break;
3953 #ifdef TCC_TARGET_I386
3954 case TOK_REGPARM1:
3955 case TOK_REGPARM2:
3956 skip('(');
3957 n = expr_const();
3958 if (n > 3)
3959 n = 3;
3960 else if (n < 0)
3961 n = 0;
3962 if (n > 0)
3963 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3964 skip(')');
3965 break;
3966 case TOK_FASTCALL1:
3967 case TOK_FASTCALL2:
3968 case TOK_FASTCALL3:
3969 ad->f.func_call = FUNC_FASTCALLW;
3970 break;
3971 #endif
3972 case TOK_MODE:
3973 skip('(');
3974 switch(tok) {
3975 case TOK_MODE_DI:
3976 ad->attr_mode = VT_LLONG + 1;
3977 break;
3978 case TOK_MODE_QI:
3979 ad->attr_mode = VT_BYTE + 1;
3980 break;
3981 case TOK_MODE_HI:
3982 ad->attr_mode = VT_SHORT + 1;
3983 break;
3984 case TOK_MODE_SI:
3985 case TOK_MODE_word:
3986 ad->attr_mode = VT_INT + 1;
3987 break;
3988 default:
3989 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3990 break;
3992 next();
3993 skip(')');
3994 break;
3995 case TOK_DLLEXPORT:
3996 ad->a.dllexport = 1;
3997 break;
3998 case TOK_NODECORATE:
3999 ad->a.nodecorate = 1;
4000 break;
4001 case TOK_DLLIMPORT:
4002 ad->a.dllimport = 1;
4003 break;
4004 default:
4005 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4006 /* skip parameters */
4007 if (tok == '(') {
4008 int parenthesis = 0;
4009 do {
4010 if (tok == '(')
4011 parenthesis++;
4012 else if (tok == ')')
4013 parenthesis--;
4014 next();
4015 } while (parenthesis && tok != -1);
4017 break;
4019 if (tok != ',')
4020 break;
4021 next();
4023 skip(')');
4024 skip(')');
4025 goto redo;
4028 static Sym * find_field (CType *type, int v, int *cumofs)
4030 Sym *s = type->ref;
4031 int v1 = v | SYM_FIELD;
4033 while ((s = s->next) != NULL) {
4034 if (s->v == v1) {
4035 *cumofs += s->c;
4036 return s;
4038 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4039 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4040 /* try to find field in anonymous sub-struct/union */
4041 Sym *ret = find_field (&s->type, v1, cumofs);
4042 if (ret) {
4043 *cumofs += s->c;
4044 return ret;
4049 if (!(v & SYM_FIELD)) { /* top-level call */
4050 s = type->ref;
4051 if (s->c < 0)
4052 tcc_error("dereferencing incomplete type '%s'",
4053 get_tok_str(s->v & ~SYM_STRUCT, 0));
4054 else
4055 tcc_error("field not found: %s",
4056 get_tok_str(v, &tokc));
4058 return NULL;
4061 static void check_fields (CType *type, int check)
4063 Sym *s = type->ref;
4065 while ((s = s->next) != NULL) {
4066 int v = s->v & ~SYM_FIELD;
4067 if (v < SYM_FIRST_ANOM) {
4068 TokenSym *ts = table_ident[v - TOK_IDENT];
4069 if (check && (ts->tok & SYM_FIELD))
4070 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4071 ts->tok ^= SYM_FIELD;
4072 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4073 check_fields (&s->type, check);
4077 static void struct_layout(CType *type, AttributeDef *ad)
4079 int size, align, maxalign, offset, c, bit_pos, bit_size;
4080 int packed, a, bt, prevbt, prev_bit_size;
4081 int pcc = !tcc_state->ms_bitfields;
4082 int pragma_pack = *tcc_state->pack_stack_ptr;
4083 Sym *f;
4085 maxalign = 1;
4086 offset = 0;
4087 c = 0;
4088 bit_pos = 0;
4089 prevbt = VT_STRUCT; /* make it never match */
4090 prev_bit_size = 0;
4092 //#define BF_DEBUG
4094 for (f = type->ref->next; f; f = f->next) {
4095 if (f->type.t & VT_BITFIELD)
4096 bit_size = BIT_SIZE(f->type.t);
4097 else
4098 bit_size = -1;
4099 size = type_size(&f->type, &align);
4100 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4101 packed = 0;
4103 if (pcc && bit_size == 0) {
4104 /* in pcc mode, packing does not affect zero-width bitfields */
4106 } else {
4107 /* in pcc mode, attribute packed overrides if set. */
4108 if (pcc && (f->a.packed || ad->a.packed))
4109 align = packed = 1;
4111 /* pragma pack overrides align if lesser and packs bitfields always */
4112 if (pragma_pack) {
4113 packed = 1;
4114 if (pragma_pack < align)
4115 align = pragma_pack;
4116 /* in pcc mode pragma pack also overrides individual align */
4117 if (pcc && pragma_pack < a)
4118 a = 0;
4121 /* some individual align was specified */
4122 if (a)
4123 align = a;
4125 if (type->ref->type.t == VT_UNION) {
4126 if (pcc && bit_size >= 0)
4127 size = (bit_size + 7) >> 3;
4128 offset = 0;
4129 if (size > c)
4130 c = size;
4132 } else if (bit_size < 0) {
4133 if (pcc)
4134 c += (bit_pos + 7) >> 3;
4135 c = (c + align - 1) & -align;
4136 offset = c;
4137 if (size > 0)
4138 c += size;
4139 bit_pos = 0;
4140 prevbt = VT_STRUCT;
4141 prev_bit_size = 0;
4143 } else {
4144 /* A bit-field. Layout is more complicated. There are two
4145 options: PCC (GCC) compatible and MS compatible */
4146 if (pcc) {
4147 /* In PCC layout a bit-field is placed adjacent to the
4148 preceding bit-fields, except if:
4149 - it has zero-width
4150 - an individual alignment was given
4151 - it would overflow its base type container and
4152 there is no packing */
4153 if (bit_size == 0) {
4154 new_field:
4155 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4156 bit_pos = 0;
4157 } else if (f->a.aligned) {
4158 goto new_field;
4159 } else if (!packed) {
4160 int a8 = align * 8;
4161 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4162 if (ofs > size / align)
4163 goto new_field;
4166 /* in pcc mode, long long bitfields have type int if they fit */
4167 if (size == 8 && bit_size <= 32)
4168 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4170 while (bit_pos >= align * 8)
4171 c += align, bit_pos -= align * 8;
4172 offset = c;
4174 /* In PCC layout named bit-fields influence the alignment
4175 of the containing struct using the base types alignment,
4176 except for packed fields (which here have correct align). */
4177 if (f->v & SYM_FIRST_ANOM
4178 // && bit_size // ??? gcc on ARM/rpi does that
4180 align = 1;
4182 } else {
4183 bt = f->type.t & VT_BTYPE;
4184 if ((bit_pos + bit_size > size * 8)
4185 || (bit_size > 0) == (bt != prevbt)
4187 c = (c + align - 1) & -align;
4188 offset = c;
4189 bit_pos = 0;
4190 /* In MS bitfield mode a bit-field run always uses
4191 at least as many bits as the underlying type.
4192 To start a new run it's also required that this
4193 or the last bit-field had non-zero width. */
4194 if (bit_size || prev_bit_size)
4195 c += size;
4197 /* In MS layout the records alignment is normally
4198 influenced by the field, except for a zero-width
4199 field at the start of a run (but by further zero-width
4200 fields it is again). */
4201 if (bit_size == 0 && prevbt != bt)
4202 align = 1;
4203 prevbt = bt;
4204 prev_bit_size = bit_size;
4207 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4208 | (bit_pos << VT_STRUCT_SHIFT);
4209 bit_pos += bit_size;
4211 if (align > maxalign)
4212 maxalign = align;
4214 #ifdef BF_DEBUG
4215 printf("set field %s offset %-2d size %-2d align %-2d",
4216 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4217 if (f->type.t & VT_BITFIELD) {
4218 printf(" pos %-2d bits %-2d",
4219 BIT_POS(f->type.t),
4220 BIT_SIZE(f->type.t)
4223 printf("\n");
4224 #endif
4226 f->c = offset;
4227 f->r = 0;
4230 if (pcc)
4231 c += (bit_pos + 7) >> 3;
4233 /* store size and alignment */
4234 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4235 if (a < maxalign)
4236 a = maxalign;
4237 type->ref->r = a;
4238 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4239 /* can happen if individual align for some member was given. In
4240 this case MSVC ignores maxalign when aligning the size */
4241 a = pragma_pack;
4242 if (a < bt)
4243 a = bt;
4245 c = (c + a - 1) & -a;
4246 type->ref->c = c;
4248 #ifdef BF_DEBUG
4249 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4250 #endif
4252 /* check whether we can access bitfields by their type */
4253 for (f = type->ref->next; f; f = f->next) {
4254 int s, px, cx, c0;
4255 CType t;
4257 if (0 == (f->type.t & VT_BITFIELD))
4258 continue;
4259 f->type.ref = f;
4260 f->auxtype = -1;
4261 bit_size = BIT_SIZE(f->type.t);
4262 if (bit_size == 0)
4263 continue;
4264 bit_pos = BIT_POS(f->type.t);
4265 size = type_size(&f->type, &align);
4267 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4268 #ifdef TCC_TARGET_ARM
4269 && !(f->c & (align - 1))
4270 #endif
4272 continue;
4274 /* try to access the field using a different type */
4275 c0 = -1, s = align = 1;
4276 t.t = VT_BYTE;
4277 for (;;) {
4278 px = f->c * 8 + bit_pos;
4279 cx = (px >> 3) & -align;
4280 px = px - (cx << 3);
4281 if (c0 == cx)
4282 break;
4283 s = (px + bit_size + 7) >> 3;
4284 if (s > 4) {
4285 t.t = VT_LLONG;
4286 } else if (s > 2) {
4287 t.t = VT_INT;
4288 } else if (s > 1) {
4289 t.t = VT_SHORT;
4290 } else {
4291 t.t = VT_BYTE;
4293 s = type_size(&t, &align);
4294 c0 = cx;
4297 if (px + bit_size <= s * 8 && cx + s <= c
4298 #ifdef TCC_TARGET_ARM
4299 && !(cx & (align - 1))
4300 #endif
4302 /* update offset and bit position */
4303 f->c = cx;
4304 bit_pos = px;
4305 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4306 | (bit_pos << VT_STRUCT_SHIFT);
4307 if (s != size)
4308 f->auxtype = t.t;
4309 #ifdef BF_DEBUG
4310 printf("FIX field %s offset %-2d size %-2d align %-2d "
4311 "pos %-2d bits %-2d\n",
4312 get_tok_str(f->v & ~SYM_FIELD, NULL),
4313 cx, s, align, px, bit_size);
4314 #endif
4315 } else {
4316 /* fall back to load/store single-byte wise */
4317 f->auxtype = VT_STRUCT;
4318 #ifdef BF_DEBUG
4319 printf("FIX field %s : load byte-wise\n",
4320 get_tok_str(f->v & ~SYM_FIELD, NULL));
4321 #endif
4326 static void do_Static_assert(void);
4328 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4329 static void struct_decl(CType *type, int u)
4331 int v, c, size, align, flexible;
4332 int bit_size, bsize, bt;
4333 Sym *s, *ss, **ps;
4334 AttributeDef ad, ad1;
4335 CType type1, btype;
4337 memset(&ad, 0, sizeof ad);
4338 next();
4339 parse_attribute(&ad);
4340 if (tok != '{') {
4341 v = tok;
4342 next();
4343 /* struct already defined ? return it */
4344 if (v < TOK_IDENT)
4345 expect("struct/union/enum name");
4346 s = struct_find(v);
4347 if (s && (s->sym_scope == local_scope || tok != '{')) {
4348 if (u == s->type.t)
4349 goto do_decl;
4350 if (u == VT_ENUM && IS_ENUM(s->type.t))
4351 goto do_decl;
4352 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4354 } else {
4355 v = anon_sym++;
4357 /* Record the original enum/struct/union token. */
4358 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4359 type1.ref = NULL;
4360 /* we put an undefined size for struct/union */
4361 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4362 s->r = 0; /* default alignment is zero as gcc */
4363 do_decl:
4364 type->t = s->type.t;
4365 type->ref = s;
4367 if (tok == '{') {
4368 next();
4369 if (s->c != -1)
4370 tcc_error("struct/union/enum already defined");
4371 s->c = -2;
4372 /* cannot be empty */
4373 /* non empty enums are not allowed */
4374 ps = &s->next;
4375 if (u == VT_ENUM) {
4376 long long ll = 0, pl = 0, nl = 0;
4377 CType t;
4378 t.ref = s;
4379 /* enum symbols have static storage */
4380 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4381 for(;;) {
4382 v = tok;
4383 if (v < TOK_UIDENT)
4384 expect("identifier");
4385 ss = sym_find(v);
4386 if (ss && !local_stack)
4387 tcc_error("redefinition of enumerator '%s'",
4388 get_tok_str(v, NULL));
4389 next();
4390 if (tok == '=') {
4391 next();
4392 ll = expr_const64();
4394 ss = sym_push(v, &t, VT_CONST, 0);
4395 ss->enum_val = ll;
4396 *ps = ss, ps = &ss->next;
4397 if (ll < nl)
4398 nl = ll;
4399 if (ll > pl)
4400 pl = ll;
4401 if (tok != ',')
4402 break;
4403 next();
4404 ll++;
4405 /* NOTE: we accept a trailing comma */
4406 if (tok == '}')
4407 break;
4409 skip('}');
4410 /* set integral type of the enum */
4411 t.t = VT_INT;
4412 if (nl >= 0) {
4413 if (pl != (unsigned)pl)
4414 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4415 t.t |= VT_UNSIGNED;
4416 } else if (pl != (int)pl || nl != (int)nl)
4417 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4418 s->type.t = type->t = t.t | VT_ENUM;
4419 s->c = 0;
4420 /* set type for enum members */
4421 for (ss = s->next; ss; ss = ss->next) {
4422 ll = ss->enum_val;
4423 if (ll == (int)ll) /* default is int if it fits */
4424 continue;
4425 if (t.t & VT_UNSIGNED) {
4426 ss->type.t |= VT_UNSIGNED;
4427 if (ll == (unsigned)ll)
4428 continue;
4430 ss->type.t = (ss->type.t & ~VT_BTYPE)
4431 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4433 } else {
4434 c = 0;
4435 flexible = 0;
4436 while (tok != '}') {
4437 if (tok == TOK_STATIC_ASSERT) {
4438 do_Static_assert();
4439 continue;
4441 if (!parse_btype(&btype, &ad1, 0)) {
4442 skip(';');
4443 continue;
4445 while (1) {
4446 if (flexible)
4447 tcc_error("flexible array member '%s' not at the end of struct",
4448 get_tok_str(v, NULL));
4449 bit_size = -1;
4450 v = 0;
4451 type1 = btype;
4452 if (tok != ':') {
4453 if (tok != ';')
4454 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4455 if (v == 0) {
4456 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4457 expect("identifier");
4458 else {
4459 int v = btype.ref->v;
4460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4461 if (tcc_state->ms_extensions == 0)
4462 expect("identifier");
4466 if (type_size(&type1, &align) < 0) {
4467 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4468 flexible = 1;
4469 else
4470 tcc_error("field '%s' has incomplete type",
4471 get_tok_str(v, NULL));
4473 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4474 (type1.t & VT_BTYPE) == VT_VOID ||
4475 (type1.t & VT_STORAGE))
4476 tcc_error("invalid type for '%s'",
4477 get_tok_str(v, NULL));
4479 if (tok == ':') {
4480 next();
4481 bit_size = expr_const();
4482 /* XXX: handle v = 0 case for messages */
4483 if (bit_size < 0)
4484 tcc_error("negative width in bit-field '%s'",
4485 get_tok_str(v, NULL));
4486 if (v && bit_size == 0)
4487 tcc_error("zero width for bit-field '%s'",
4488 get_tok_str(v, NULL));
4489 parse_attribute(&ad1);
4491 size = type_size(&type1, &align);
4492 if (bit_size >= 0) {
4493 bt = type1.t & VT_BTYPE;
4494 if (bt != VT_INT &&
4495 bt != VT_BYTE &&
4496 bt != VT_SHORT &&
4497 bt != VT_BOOL &&
4498 bt != VT_LLONG)
4499 tcc_error("bitfields must have scalar type");
4500 bsize = size * 8;
4501 if (bit_size > bsize) {
4502 tcc_error("width of '%s' exceeds its type",
4503 get_tok_str(v, NULL));
4504 } else if (bit_size == bsize
4505 && !ad.a.packed && !ad1.a.packed) {
4506 /* no need for bit fields */
4508 } else if (bit_size == 64) {
4509 tcc_error("field width 64 not implemented");
4510 } else {
4511 type1.t = (type1.t & ~VT_STRUCT_MASK)
4512 | VT_BITFIELD
4513 | (bit_size << (VT_STRUCT_SHIFT + 6));
4516 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4517 /* Remember we've seen a real field to check
4518 for placement of flexible array member. */
4519 c = 1;
4521 /* If member is a struct or bit-field, enforce
4522 placing into the struct (as anonymous). */
4523 if (v == 0 &&
4524 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4525 bit_size >= 0)) {
4526 v = anon_sym++;
4528 if (v) {
4529 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4530 ss->a = ad1.a;
4531 *ps = ss;
4532 ps = &ss->next;
4534 if (tok == ';' || tok == TOK_EOF)
4535 break;
4536 skip(',');
4538 skip(';');
4540 skip('}');
4541 parse_attribute(&ad);
4542 if (ad.cleanup_func) {
4543 tcc_warning("attribute '__cleanup__' ignored on type");
4545 check_fields(type, 1);
4546 check_fields(type, 0);
4547 struct_layout(type, &ad);
4548 if (debug_modes)
4549 tcc_debug_fix_anon(tcc_state, type);
4554 static void sym_to_attr(AttributeDef *ad, Sym *s)
4556 merge_symattr(&ad->a, &s->a);
4557 merge_funcattr(&ad->f, &s->f);
4560 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4561 are added to the element type, copied because it could be a typedef. */
4562 static void parse_btype_qualify(CType *type, int qualifiers)
4564 while (type->t & VT_ARRAY) {
4565 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4566 type = &type->ref->type;
4568 type->t |= qualifiers;
4571 /* return 0 if no type declaration. otherwise, return the basic type
4572 and skip it.
4574 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4576 int t, u, bt, st, type_found, typespec_found, g, n;
4577 Sym *s;
4578 CType type1;
4580 memset(ad, 0, sizeof(AttributeDef));
4581 type_found = 0;
4582 typespec_found = 0;
4583 t = VT_INT;
4584 bt = st = -1;
4585 type->ref = NULL;
4587 while(1) {
4588 switch(tok) {
4589 case TOK_EXTENSION:
4590 /* currently, we really ignore extension */
4591 next();
4592 continue;
4594 /* basic types */
4595 case TOK_CHAR:
4596 u = VT_BYTE;
4597 basic_type:
4598 next();
4599 basic_type1:
4600 if (u == VT_SHORT || u == VT_LONG) {
4601 if (st != -1 || (bt != -1 && bt != VT_INT))
4602 tmbt: tcc_error("too many basic types");
4603 st = u;
4604 } else {
4605 if (bt != -1 || (st != -1 && u != VT_INT))
4606 goto tmbt;
4607 bt = u;
4609 if (u != VT_INT)
4610 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4611 typespec_found = 1;
4612 break;
4613 case TOK_VOID:
4614 u = VT_VOID;
4615 goto basic_type;
4616 case TOK_SHORT:
4617 u = VT_SHORT;
4618 goto basic_type;
4619 case TOK_INT:
4620 u = VT_INT;
4621 goto basic_type;
4622 case TOK_ALIGNAS:
4623 { int n;
4624 AttributeDef ad1;
4625 next();
4626 skip('(');
4627 memset(&ad1, 0, sizeof(AttributeDef));
4628 if (parse_btype(&type1, &ad1, 0)) {
4629 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4630 if (ad1.a.aligned)
4631 n = 1 << (ad1.a.aligned - 1);
4632 else
4633 type_size(&type1, &n);
4634 } else {
4635 n = expr_const();
4636 if (n < 0 || (n & (n - 1)) != 0)
4637 tcc_error("alignment must be a positive power of two");
4639 skip(')');
4640 ad->a.aligned = exact_log2p1(n);
4642 continue;
4643 case TOK_LONG:
4644 if ((t & VT_BTYPE) == VT_DOUBLE) {
4645 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4646 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4647 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4648 } else {
4649 u = VT_LONG;
4650 goto basic_type;
4652 next();
4653 break;
4654 #ifdef TCC_TARGET_ARM64
4655 case TOK_UINT128:
4656 /* GCC's __uint128_t appears in some Linux header files. Make it a
4657 synonym for long double to get the size and alignment right. */
4658 u = VT_LDOUBLE;
4659 goto basic_type;
4660 #endif
4661 case TOK_BOOL:
4662 u = VT_BOOL;
4663 goto basic_type;
4664 case TOK_COMPLEX:
4665 tcc_error("_Complex is not yet supported");
4666 case TOK_FLOAT:
4667 u = VT_FLOAT;
4668 goto basic_type;
4669 case TOK_DOUBLE:
4670 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4671 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4672 } else {
4673 u = VT_DOUBLE;
4674 goto basic_type;
4676 next();
4677 break;
4678 case TOK_ENUM:
4679 struct_decl(&type1, VT_ENUM);
4680 basic_type2:
4681 u = type1.t;
4682 type->ref = type1.ref;
4683 goto basic_type1;
4684 case TOK_STRUCT:
4685 struct_decl(&type1, VT_STRUCT);
4686 goto basic_type2;
4687 case TOK_UNION:
4688 struct_decl(&type1, VT_UNION);
4689 goto basic_type2;
4691 /* type modifiers */
4692 case TOK__Atomic:
4693 next();
4694 type->t = t;
4695 parse_btype_qualify(type, VT_ATOMIC);
4696 t = type->t;
4697 if (tok == '(') {
4698 parse_expr_type(&type1);
4699 /* remove all storage modifiers except typedef */
4700 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4701 if (type1.ref)
4702 sym_to_attr(ad, type1.ref);
4703 goto basic_type2;
4705 break;
4706 case TOK_CONST1:
4707 case TOK_CONST2:
4708 case TOK_CONST3:
4709 type->t = t;
4710 parse_btype_qualify(type, VT_CONSTANT);
4711 t = type->t;
4712 next();
4713 break;
4714 case TOK_VOLATILE1:
4715 case TOK_VOLATILE2:
4716 case TOK_VOLATILE3:
4717 type->t = t;
4718 parse_btype_qualify(type, VT_VOLATILE);
4719 t = type->t;
4720 next();
4721 break;
4722 case TOK_SIGNED1:
4723 case TOK_SIGNED2:
4724 case TOK_SIGNED3:
4725 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4726 tcc_error("signed and unsigned modifier");
4727 t |= VT_DEFSIGN;
4728 next();
4729 typespec_found = 1;
4730 break;
4731 case TOK_REGISTER:
4732 case TOK_AUTO:
4733 case TOK_RESTRICT1:
4734 case TOK_RESTRICT2:
4735 case TOK_RESTRICT3:
4736 next();
4737 break;
4738 case TOK_UNSIGNED:
4739 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4740 tcc_error("signed and unsigned modifier");
4741 t |= VT_DEFSIGN | VT_UNSIGNED;
4742 next();
4743 typespec_found = 1;
4744 break;
4746 /* storage */
4747 case TOK_EXTERN:
4748 g = VT_EXTERN;
4749 goto storage;
4750 case TOK_STATIC:
4751 g = VT_STATIC;
4752 goto storage;
4753 case TOK_TYPEDEF:
4754 g = VT_TYPEDEF;
4755 goto storage;
4756 storage:
4757 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4758 tcc_error("multiple storage classes");
4759 t |= g;
4760 next();
4761 break;
4762 case TOK_INLINE1:
4763 case TOK_INLINE2:
4764 case TOK_INLINE3:
4765 t |= VT_INLINE;
4766 next();
4767 break;
4768 case TOK_NORETURN3:
4769 next();
4770 ad->f.func_noreturn = 1;
4771 break;
4772 /* GNUC attribute */
4773 case TOK_ATTRIBUTE1:
4774 case TOK_ATTRIBUTE2:
4775 parse_attribute(ad);
4776 if (ad->attr_mode) {
4777 u = ad->attr_mode -1;
4778 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4780 continue;
4781 /* GNUC typeof */
4782 case TOK_TYPEOF1:
4783 case TOK_TYPEOF2:
4784 case TOK_TYPEOF3:
4785 next();
4786 parse_expr_type(&type1);
4787 /* remove all storage modifiers except typedef */
4788 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4789 if (type1.ref)
4790 sym_to_attr(ad, type1.ref);
4791 goto basic_type2;
4792 case TOK_THREAD_LOCAL:
4793 tcc_error("_Thread_local is not implemented");
4794 default:
4795 if (typespec_found)
4796 goto the_end;
4797 s = sym_find(tok);
4798 if (!s || !(s->type.t & VT_TYPEDEF))
4799 goto the_end;
4801 n = tok, next();
4802 if (tok == ':' && ignore_label) {
4803 /* ignore if it's a label */
4804 unget_tok(n);
4805 goto the_end;
4808 t &= ~(VT_BTYPE|VT_LONG);
4809 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4810 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4811 type->ref = s->type.ref;
4812 if (t)
4813 parse_btype_qualify(type, t);
4814 t = type->t;
4815 /* get attributes from typedef */
4816 sym_to_attr(ad, s);
4817 typespec_found = 1;
4818 st = bt = -2;
4819 break;
4821 type_found = 1;
4823 the_end:
4824 if (tcc_state->char_is_unsigned) {
4825 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4826 t |= VT_UNSIGNED;
4828 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4829 bt = t & (VT_BTYPE|VT_LONG);
4830 if (bt == VT_LONG)
4831 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4832 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4833 if (bt == VT_LDOUBLE)
4834 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4835 #endif
4836 type->t = t;
4837 return type_found;
4840 /* convert a function parameter type (array to pointer and function to
4841 function pointer) */
4842 static inline void convert_parameter_type(CType *pt)
4844 /* remove const and volatile qualifiers (XXX: const could be used
4845 to indicate a const function parameter */
4846 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4847 /* array must be transformed to pointer according to ANSI C */
4848 pt->t &= ~VT_ARRAY;
4849 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4850 mk_pointer(pt);
4854 ST_FUNC CString* parse_asm_str(void)
4856 skip('(');
4857 return parse_mult_str("string constant");
4860 /* Parse an asm label and return the token */
4861 static int asm_label_instr(void)
4863 int v;
4864 char *astr;
4866 next();
4867 astr = parse_asm_str()->data;
4868 skip(')');
4869 #ifdef ASM_DEBUG
4870 printf("asm_alias: \"%s\"\n", astr);
4871 #endif
4872 v = tok_alloc_const(astr);
4873 return v;
4876 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4878 int n, l, t1, arg_size, align;
4879 Sym **plast, *s, *first;
4880 AttributeDef ad1;
4881 CType pt;
4882 TokenString *vla_array_tok = NULL;
4883 int *vla_array_str = NULL;
4885 if (tok == '(') {
4886 /* function type, or recursive declarator (return if so) */
4887 next();
4888 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
4889 return 0;
4890 if (tok == ')')
4891 l = 0;
4892 else if (parse_btype(&pt, &ad1, 0))
4893 l = FUNC_NEW;
4894 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
4895 merge_attr (ad, &ad1);
4896 return 0;
4897 } else
4898 l = FUNC_OLD;
4900 first = NULL;
4901 plast = &first;
4902 arg_size = 0;
4903 ++local_scope;
4904 if (l) {
4905 for(;;) {
4906 /* read param name and compute offset */
4907 if (l != FUNC_OLD) {
4908 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4909 break;
4910 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
4911 if ((pt.t & VT_BTYPE) == VT_VOID)
4912 tcc_error("parameter declared as void");
4913 if (n == 0)
4914 n = SYM_FIELD;
4915 } else {
4916 n = tok;
4917 pt.t = VT_VOID; /* invalid type */
4918 pt.ref = NULL;
4919 next();
4921 if (n < TOK_UIDENT)
4922 expect("identifier");
4923 convert_parameter_type(&pt);
4924 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4925 /* these symbols may be evaluated for VLArrays (see below, under
4926 nocode_wanted) which is why we push them here as normal symbols
4927 temporarily. Example: int func(int a, int b[++a]); */
4928 s = sym_push(n, &pt, VT_LOCAL|VT_LVAL, 0);
4929 *plast = s;
4930 plast = &s->next;
4931 if (tok == ')')
4932 break;
4933 skip(',');
4934 if (l == FUNC_NEW && tok == TOK_DOTS) {
4935 l = FUNC_ELLIPSIS;
4936 next();
4937 break;
4939 if (l == FUNC_NEW && !parse_btype(&pt, &ad1, 0))
4940 tcc_error("invalid type");
4942 } else
4943 /* if no parameters, then old type prototype */
4944 l = FUNC_OLD;
4945 skip(')');
4946 /* remove parameter symbols from token table, keep on stack */
4947 if (first) {
4948 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
4949 for (s = first; s; s = s->next)
4950 s->v |= SYM_FIELD;
4952 --local_scope;
4953 /* NOTE: const is ignored in returned type as it has a special
4954 meaning in gcc / C++ */
4955 type->t &= ~VT_CONSTANT;
4956 /* some ancient pre-K&R C allows a function to return an array
4957 and the array brackets to be put after the arguments, such
4958 that "int c()[]" means something like "int[] c()" */
4959 if (tok == '[') {
4960 next();
4961 skip(']'); /* only handle simple "[]" */
4962 mk_pointer(type);
4964 /* we push a anonymous symbol which will contain the function prototype */
4965 ad->f.func_args = arg_size;
4966 ad->f.func_type = l;
4967 s = sym_push(SYM_FIELD, type, 0, 0);
4968 s->a = ad->a;
4969 s->f = ad->f;
4970 s->next = first;
4971 type->t = VT_FUNC;
4972 type->ref = s;
4973 } else if (tok == '[') {
4974 int saved_nocode_wanted = nocode_wanted;
4975 /* array definition */
4976 next();
4977 n = -1;
4978 t1 = 0;
4979 if (td & TYPE_PARAM) while (1) {
4980 /* XXX The optional type-quals and static should only be accepted
4981 in parameter decls. The '*' as well, and then even only
4982 in prototypes (not function defs). */
4983 switch (tok) {
4984 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4985 case TOK_CONST1:
4986 case TOK_VOLATILE1:
4987 case TOK_STATIC:
4988 case '*':
4989 next();
4990 continue;
4991 default:
4992 break;
4994 if (tok != ']') {
4995 /* Code generation is not done now but has to be done
4996 at start of function. Save code here for later use. */
4997 nocode_wanted = 1;
4998 skip_or_save_block(&vla_array_tok);
4999 unget_tok(0);
5000 vla_array_str = vla_array_tok->str;
5001 begin_macro(vla_array_tok, 2);
5002 next();
5003 gexpr();
5004 end_macro();
5005 next();
5006 goto check;
5008 break;
5010 } else if (tok != ']') {
5011 if (!local_stack || (storage & VT_STATIC))
5012 vpushi(expr_const());
5013 else {
5014 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5015 length must always be evaluated, even under nocode_wanted,
5016 so that its size slot is initialized (e.g. under sizeof
5017 or typeof). */
5018 nocode_wanted = 0;
5019 gexpr();
5021 check:
5022 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5023 n = vtop->c.i;
5024 if (n < 0)
5025 tcc_error("invalid array size");
5026 } else {
5027 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5028 tcc_error("size of variable length array should be an integer");
5029 n = 0;
5030 t1 = VT_VLA;
5033 skip(']');
5034 /* parse next post type */
5035 post_type(type, ad, storage, (td & ~(TYPE_DIRECT|TYPE_ABSTRACT)) | TYPE_NEST);
5037 if ((type->t & VT_BTYPE) == VT_FUNC)
5038 tcc_error("declaration of an array of functions");
5039 if ((type->t & VT_BTYPE) == VT_VOID
5040 || type_size(type, &align) < 0)
5041 tcc_error("declaration of an array of incomplete type elements");
5043 t1 |= type->t & VT_VLA;
5045 if (t1 & VT_VLA) {
5046 if (n < 0) {
5047 if (td & TYPE_NEST)
5048 tcc_error("need explicit inner array size in VLAs");
5050 else {
5051 loc -= type_size(&int_type, &align);
5052 loc &= -align;
5053 n = loc;
5055 vpush_type_size(type, &align);
5056 gen_op('*');
5057 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5058 vswap();
5059 vstore();
5062 if (n != -1)
5063 vpop();
5064 nocode_wanted = saved_nocode_wanted;
5066 /* we push an anonymous symbol which will contain the array
5067 element type */
5068 s = sym_push(SYM_FIELD, type, 0, n);
5069 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5070 type->ref = s;
5072 if (vla_array_str) {
5073 if (t1 & VT_VLA)
5074 s->vla_array_str = vla_array_str;
5075 else
5076 tok_str_free_str(vla_array_str);
5079 return 1;
5082 /* Parse a type declarator (except basic type), and return the type
5083 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5084 expected. 'type' should contain the basic type. 'ad' is the
5085 attribute definition of the basic type. It can be modified by
5086 type_decl(). If this (possibly abstract) declarator is a pointer chain
5087 it returns the innermost pointed to type (equals *type, but is a different
5088 pointer), otherwise returns type itself, that's used for recursive calls. */
5089 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5091 CType *post, *ret;
5092 int qualifiers, storage;
5094 /* recursive type, remove storage bits first, apply them later again */
5095 storage = type->t & VT_STORAGE;
5096 type->t &= ~VT_STORAGE;
5097 post = ret = type;
5099 while (tok == '*') {
5100 qualifiers = 0;
5101 redo:
5102 next();
5103 switch(tok) {
5104 case TOK__Atomic:
5105 qualifiers |= VT_ATOMIC;
5106 goto redo;
5107 case TOK_CONST1:
5108 case TOK_CONST2:
5109 case TOK_CONST3:
5110 qualifiers |= VT_CONSTANT;
5111 goto redo;
5112 case TOK_VOLATILE1:
5113 case TOK_VOLATILE2:
5114 case TOK_VOLATILE3:
5115 qualifiers |= VT_VOLATILE;
5116 goto redo;
5117 case TOK_RESTRICT1:
5118 case TOK_RESTRICT2:
5119 case TOK_RESTRICT3:
5120 goto redo;
5121 /* XXX: clarify attribute handling */
5122 case TOK_ATTRIBUTE1:
5123 case TOK_ATTRIBUTE2:
5124 parse_attribute(ad);
5125 break;
5127 mk_pointer(type);
5128 type->t |= qualifiers;
5129 if (ret == type)
5130 /* innermost pointed to type is the one for the first derivation */
5131 ret = pointed_type(type);
5134 if (tok == '(') {
5135 /* This is possibly a parameter type list for abstract declarators
5136 ('int ()'), use post_type for testing this. */
5137 if (!post_type(type, ad, 0, td)) {
5138 /* It's not, so it's a nested declarator, and the post operations
5139 apply to the innermost pointed to type (if any). */
5140 /* XXX: this is not correct to modify 'ad' at this point, but
5141 the syntax is not clear */
5142 parse_attribute(ad);
5143 post = type_decl(type, ad, v, td);
5144 skip(')');
5145 } else
5146 goto abstract;
5147 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5148 /* type identifier */
5149 *v = tok;
5150 next();
5151 } else {
5152 abstract:
5153 if (!(td & TYPE_ABSTRACT))
5154 expect("identifier");
5155 *v = 0;
5157 post_type(post, ad, post != ret ? 0 : storage,
5158 td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5159 parse_attribute(ad);
5160 type->t |= storage;
5161 return ret;
5164 /* indirection with full error checking and bound check */
5165 ST_FUNC void indir(void)
5167 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5168 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5169 return;
5170 expect("pointer");
5172 if (vtop->r & VT_LVAL)
5173 gv(RC_INT);
5174 vtop->type = *pointed_type(&vtop->type);
5175 /* Arrays and functions are never lvalues */
5176 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5177 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5178 vtop->r |= VT_LVAL;
5179 /* if bound checking, the referenced pointer must be checked */
5180 #ifdef CONFIG_TCC_BCHECK
5181 if (tcc_state->do_bounds_check)
5182 vtop->r |= VT_MUSTBOUND;
5183 #endif
5187 /* pass a parameter to a function and do type checking and casting */
5188 static void gfunc_param_typed(Sym *func, Sym *arg)
5190 int func_type;
5191 CType type;
5193 func_type = func->f.func_type;
5194 if (func_type == FUNC_OLD ||
5195 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5196 /* default casting : only need to convert float to double */
5197 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5198 gen_cast_s(VT_DOUBLE);
5199 } else if (vtop->type.t & VT_BITFIELD) {
5200 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5201 type.ref = vtop->type.ref;
5202 gen_cast(&type);
5203 } else if (vtop->r & VT_MUSTCAST) {
5204 force_charshort_cast();
5206 } else if (arg == NULL) {
5207 tcc_error("too many arguments to function");
5208 } else {
5209 type = arg->type;
5210 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5211 gen_assign_cast(&type);
5215 /* parse an expression and return its type without any side effect. */
5216 static void expr_type(CType *type, void (*expr_fn)(void))
5218 nocode_wanted++;
5219 expr_fn();
5220 *type = vtop->type;
5221 vpop();
5222 nocode_wanted--;
5225 /* parse an expression of the form '(type)' or '(expr)' and return its
5226 type */
5227 static void parse_expr_type(CType *type)
5229 int n;
5230 AttributeDef ad;
5232 skip('(');
5233 if (parse_btype(type, &ad, 0)) {
5234 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5235 } else {
5236 expr_type(type, gexpr);
5238 skip(')');
5241 static void parse_type(CType *type)
5243 AttributeDef ad;
5244 int n;
5246 if (!parse_btype(type, &ad, 0)) {
5247 expect("type");
5249 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5252 static void parse_builtin_params(int nc, const char *args)
5254 char c, sep = '(';
5255 CType type;
5256 if (nc)
5257 nocode_wanted++;
5258 next();
5259 if (*args == 0)
5260 skip(sep);
5261 while ((c = *args++)) {
5262 skip(sep);
5263 sep = ',';
5264 if (c == 't') {
5265 parse_type(&type);
5266 vpush(&type);
5267 continue;
5269 expr_eq();
5270 type.ref = NULL;
5271 type.t = 0;
5272 switch (c) {
5273 case 'e':
5274 continue;
5275 case 'V':
5276 type.t = VT_CONSTANT;
5277 case 'v':
5278 type.t |= VT_VOID;
5279 mk_pointer (&type);
5280 break;
5281 case 'S':
5282 type.t = VT_CONSTANT;
5283 case 's':
5284 type.t |= char_type.t;
5285 mk_pointer (&type);
5286 break;
5287 case 'i':
5288 type.t = VT_INT;
5289 break;
5290 case 'l':
5291 type.t = VT_SIZE_T;
5292 break;
5293 default:
5294 break;
5296 gen_assign_cast(&type);
5298 skip(')');
5299 if (nc)
5300 nocode_wanted--;
5303 static void parse_atomic(int atok)
5305 int size, align, arg, t, save = 0;
5306 CType *atom, *atom_ptr, ct = {0};
5307 SValue store;
5308 char buf[40];
5309 static const char *const templates[] = {
5311 * Each entry consists of callback and function template.
5312 * The template represents argument types and return type.
5314 * ? void (return-only)
5315 * b bool
5316 * a atomic
5317 * A read-only atomic
5318 * p pointer to memory
5319 * v value
5320 * l load pointer
5321 * s save pointer
5322 * m memory model
5325 /* keep in order of appearance in tcctok.h: */
5326 /* __atomic_store */ "alm.?",
5327 /* __atomic_load */ "Asm.v",
5328 /* __atomic_exchange */ "alsm.v",
5329 /* __atomic_compare_exchange */ "aplbmm.b",
5330 /* __atomic_fetch_add */ "avm.v",
5331 /* __atomic_fetch_sub */ "avm.v",
5332 /* __atomic_fetch_or */ "avm.v",
5333 /* __atomic_fetch_xor */ "avm.v",
5334 /* __atomic_fetch_and */ "avm.v",
5335 /* __atomic_fetch_nand */ "avm.v",
5336 /* __atomic_and_fetch */ "avm.v",
5337 /* __atomic_sub_fetch */ "avm.v",
5338 /* __atomic_or_fetch */ "avm.v",
5339 /* __atomic_xor_fetch */ "avm.v",
5340 /* __atomic_and_fetch */ "avm.v",
5341 /* __atomic_nand_fetch */ "avm.v"
5343 const char *template = templates[(atok - TOK___atomic_store)];
5345 atom = atom_ptr = NULL;
5346 size = 0; /* pacify compiler */
5347 next();
5348 skip('(');
5349 for (arg = 0;;) {
5350 expr_eq();
5351 switch (template[arg]) {
5352 case 'a':
5353 case 'A':
5354 atom_ptr = &vtop->type;
5355 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5356 expect("pointer");
5357 atom = pointed_type(atom_ptr);
5358 size = type_size(atom, &align);
5359 if (size > 8
5360 || (size & (size - 1))
5361 || (atok > TOK___atomic_compare_exchange
5362 && (0 == btype_size(atom->t & VT_BTYPE)
5363 || (atom->t & VT_BTYPE) == VT_PTR)))
5364 expect("integral or integer-sized pointer target type");
5365 /* GCC does not care either: */
5366 /* if (!(atom->t & VT_ATOMIC))
5367 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5368 break;
5370 case 'p':
5371 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5372 || type_size(pointed_type(&vtop->type), &align) != size)
5373 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5374 gen_assign_cast(atom_ptr);
5375 break;
5376 case 'v':
5377 gen_assign_cast(atom);
5378 break;
5379 case 'l':
5380 indir();
5381 gen_assign_cast(atom);
5382 break;
5383 case 's':
5384 save = 1;
5385 indir();
5386 store = *vtop;
5387 vpop();
5388 break;
5389 case 'm':
5390 gen_assign_cast(&int_type);
5391 break;
5392 case 'b':
5393 ct.t = VT_BOOL;
5394 gen_assign_cast(&ct);
5395 break;
5397 if ('.' == template[++arg])
5398 break;
5399 skip(',');
5401 skip(')');
5403 ct.t = VT_VOID;
5404 switch (template[arg + 1]) {
5405 case 'b':
5406 ct.t = VT_BOOL;
5407 break;
5408 case 'v':
5409 ct = *atom;
5410 break;
5413 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5414 vpush_helper_func(tok_alloc_const(buf));
5415 vrott(arg - save + 1);
5416 gfunc_call(arg - save);
5418 vpush(&ct);
5419 PUT_R_RET(vtop, ct.t);
5420 t = ct.t & VT_BTYPE;
5421 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5422 #ifdef PROMOTE_RET
5423 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5424 #else
5425 vtop->type.t = VT_INT;
5426 #endif
5428 gen_cast(&ct);
5429 if (save) {
5430 vpush(&ct);
5431 *vtop = store;
5432 vswap();
5433 vstore();
5437 ST_FUNC void unary(void)
5439 int n, t, align, size, r, sizeof_caller;
5440 CType type;
5441 Sym *s;
5442 AttributeDef ad;
5444 /* generate line number info */
5445 if (debug_modes)
5446 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
5448 sizeof_caller = in_sizeof;
5449 in_sizeof = 0;
5450 type.ref = NULL;
5451 /* XXX: GCC 2.95.3 does not generate a table although it should be
5452 better here */
5453 tok_next:
5454 switch(tok) {
5455 case TOK_EXTENSION:
5456 next();
5457 goto tok_next;
5458 case TOK_LCHAR:
5459 #ifdef TCC_TARGET_PE
5460 t = VT_SHORT|VT_UNSIGNED;
5461 goto push_tokc;
5462 #endif
5463 case TOK_CINT:
5464 case TOK_CCHAR:
5465 t = VT_INT;
5466 push_tokc:
5467 type.t = t;
5468 vsetc(&type, VT_CONST, &tokc);
5469 next();
5470 break;
5471 case TOK_CUINT:
5472 t = VT_INT | VT_UNSIGNED;
5473 goto push_tokc;
5474 case TOK_CLLONG:
5475 t = VT_LLONG;
5476 goto push_tokc;
5477 case TOK_CULLONG:
5478 t = VT_LLONG | VT_UNSIGNED;
5479 goto push_tokc;
5480 case TOK_CFLOAT:
5481 t = VT_FLOAT;
5482 goto push_tokc;
5483 case TOK_CDOUBLE:
5484 t = VT_DOUBLE;
5485 goto push_tokc;
5486 case TOK_CLDOUBLE:
5487 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5488 t = VT_DOUBLE | VT_LONG;
5489 #else
5490 t = VT_LDOUBLE;
5491 #endif
5492 goto push_tokc;
5493 case TOK_CLONG:
5494 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5495 goto push_tokc;
5496 case TOK_CULONG:
5497 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5498 goto push_tokc;
5499 case TOK___FUNCTION__:
5500 if (!gnu_ext)
5501 goto tok_identifier;
5502 /* fall thru */
5503 case TOK___FUNC__:
5505 Section *sec;
5506 int len;
5507 /* special function name identifier */
5508 len = strlen(funcname) + 1;
5509 /* generate char[len] type */
5510 type.t = char_type.t;
5511 if (tcc_state->warn_write_strings & WARN_ON)
5512 type.t |= VT_CONSTANT;
5513 mk_pointer(&type);
5514 type.t |= VT_ARRAY;
5515 type.ref->c = len;
5516 sec = rodata_section;
5517 vpush_ref(&type, sec, sec->data_offset, len);
5518 if (!NODATA_WANTED)
5519 memcpy(section_ptr_add(sec, len), funcname, len);
5520 next();
5522 break;
5523 case TOK_LSTR:
5524 #ifdef TCC_TARGET_PE
5525 t = VT_SHORT | VT_UNSIGNED;
5526 #else
5527 t = VT_INT;
5528 #endif
5529 goto str_init;
5530 case TOK_STR:
5531 /* string parsing */
5532 t = char_type.t;
5533 str_init:
5534 if (tcc_state->warn_write_strings & WARN_ON)
5535 t |= VT_CONSTANT;
5536 type.t = t;
5537 mk_pointer(&type);
5538 type.t |= VT_ARRAY;
5539 memset(&ad, 0, sizeof(AttributeDef));
5540 ad.section = rodata_section;
5541 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5542 break;
5543 case '(':
5544 next();
5545 /* cast ? */
5546 if (parse_btype(&type, &ad, 0)) {
5547 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5548 skip(')');
5549 /* check ISOC99 compound literal */
5550 if (tok == '{') {
5551 /* data is allocated locally by default */
5552 if (global_expr)
5553 r = VT_CONST;
5554 else
5555 r = VT_LOCAL;
5556 /* all except arrays are lvalues */
5557 if (!(type.t & VT_ARRAY))
5558 r |= VT_LVAL;
5559 memset(&ad, 0, sizeof(AttributeDef));
5560 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5561 } else {
5562 if (sizeof_caller) {
5563 vpush(&type);
5564 return;
5566 unary();
5567 gen_cast(&type);
5569 } else if (tok == '{') {
5570 int saved_nocode_wanted = nocode_wanted;
5571 if (CONST_WANTED && !NOEVAL_WANTED)
5572 expect("constant");
5573 if (0 == local_scope)
5574 tcc_error("statement expression outside of function");
5575 /* save all registers */
5576 save_regs(0);
5577 /* statement expression : we do not accept break/continue
5578 inside as GCC does. We do retain the nocode_wanted state,
5579 as statement expressions can't ever be entered from the
5580 outside, so any reactivation of code emission (from labels
5581 or loop heads) can be disabled again after the end of it. */
5582 block(1);
5583 /* If the statement expr can be entered, then we retain the current
5584 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5585 If it can't be entered then the state is that from before the
5586 statement expression. */
5587 if (saved_nocode_wanted)
5588 nocode_wanted = saved_nocode_wanted;
5589 skip(')');
5590 } else {
5591 gexpr();
5592 skip(')');
5594 break;
5595 case '*':
5596 next();
5597 unary();
5598 indir();
5599 break;
5600 case '&':
5601 next();
5602 unary();
5603 /* functions names must be treated as function pointers,
5604 except for unary '&' and sizeof. Since we consider that
5605 functions are not lvalues, we only have to handle it
5606 there and in function calls. */
5607 /* arrays can also be used although they are not lvalues */
5608 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5609 !(vtop->type.t & (VT_ARRAY | VT_VLA)))
5610 test_lvalue();
5611 if (vtop->sym)
5612 vtop->sym->a.addrtaken = 1;
5613 mk_pointer(&vtop->type);
5614 gaddrof();
5615 break;
5616 case '!':
5617 next();
5618 unary();
5619 gen_test_zero(TOK_EQ);
5620 break;
5621 case '~':
5622 next();
5623 unary();
5624 vpushi(-1);
5625 gen_op('^');
5626 break;
5627 case '+':
5628 next();
5629 unary();
5630 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5631 tcc_error("pointer not accepted for unary plus");
5632 /* In order to force cast, we add zero, except for floating point
5633 where we really need an noop (otherwise -0.0 will be transformed
5634 into +0.0). */
5635 if (!is_float(vtop->type.t)) {
5636 vpushi(0);
5637 gen_op('+');
5639 break;
5640 case TOK_SIZEOF:
5641 case TOK_ALIGNOF1:
5642 case TOK_ALIGNOF2:
5643 case TOK_ALIGNOF3:
5644 t = tok;
5645 next();
5646 in_sizeof++;
5647 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5648 if (t == TOK_SIZEOF) {
5649 vpush_type_size(&type, &align);
5650 gen_cast_s(VT_SIZE_T);
5651 } else {
5652 type_size(&type, &align);
5653 s = NULL;
5654 if (vtop[1].r & VT_SYM)
5655 s = vtop[1].sym; /* hack: accessing previous vtop */
5656 if (s && s->a.aligned)
5657 align = 1 << (s->a.aligned - 1);
5658 vpushs(align);
5660 break;
5662 case TOK_builtin_expect:
5663 /* __builtin_expect is a no-op for now */
5664 parse_builtin_params(0, "ee");
5665 vpop();
5666 break;
5667 case TOK_builtin_types_compatible_p:
5668 parse_builtin_params(0, "tt");
5669 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5670 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5671 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5672 vtop -= 2;
5673 vpushi(n);
5674 break;
5675 case TOK_builtin_choose_expr:
5677 int64_t c;
5678 next();
5679 skip('(');
5680 c = expr_const64();
5681 skip(',');
5682 if (!c) {
5683 nocode_wanted++;
5685 expr_eq();
5686 if (!c) {
5687 vpop();
5688 nocode_wanted--;
5690 skip(',');
5691 if (c) {
5692 nocode_wanted++;
5694 expr_eq();
5695 if (c) {
5696 vpop();
5697 nocode_wanted--;
5699 skip(')');
5701 break;
5702 case TOK_builtin_constant_p:
5703 constant_p = 1;
5704 parse_builtin_params(1, "e");
5705 n = constant_p &&
5706 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5707 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5708 vtop--;
5709 vpushi(n);
5710 break;
5711 case TOK_builtin_frame_address:
5712 case TOK_builtin_return_address:
5714 int tok1 = tok;
5715 int64_t level;
5716 next();
5717 skip('(');
5718 level = expr_const64();
5719 if (level < 0) {
5720 tcc_error("%s only takes positive integers",
5721 tok1 == TOK_builtin_return_address ?
5722 "__builtin_return_address" :
5723 "__builtin_frame_address");
5725 skip(')');
5726 type.t = VT_VOID;
5727 mk_pointer(&type);
5728 vset(&type, VT_LOCAL, 0); /* local frame */
5729 while (level--) {
5730 #ifdef TCC_TARGET_RISCV64
5731 vpushi(2*PTR_SIZE);
5732 gen_op('-');
5733 #endif
5734 mk_pointer(&vtop->type);
5735 indir(); /* -> parent frame */
5737 if (tok1 == TOK_builtin_return_address) {
5738 // assume return address is just above frame pointer on stack
5739 #ifdef TCC_TARGET_ARM
5740 vpushi(2*PTR_SIZE);
5741 gen_op('+');
5742 #elif defined TCC_TARGET_RISCV64
5743 vpushi(PTR_SIZE);
5744 gen_op('-');
5745 #else
5746 vpushi(PTR_SIZE);
5747 gen_op('+');
5748 #endif
5749 mk_pointer(&vtop->type);
5750 indir();
5753 break;
5754 #ifdef TCC_TARGET_RISCV64
5755 case TOK_builtin_va_start:
5756 parse_builtin_params(0, "ee");
5757 r = vtop->r & VT_VALMASK;
5758 if (r == VT_LLOCAL)
5759 r = VT_LOCAL;
5760 if (r != VT_LOCAL)
5761 tcc_error("__builtin_va_start expects a local variable");
5762 gen_va_start();
5763 vstore();
5764 break;
5765 #endif
5766 #ifdef TCC_TARGET_X86_64
5767 #ifdef TCC_TARGET_PE
5768 case TOK_builtin_va_start:
5769 parse_builtin_params(0, "ee");
5770 r = vtop->r & VT_VALMASK;
5771 if (r == VT_LLOCAL)
5772 r = VT_LOCAL;
5773 if (r != VT_LOCAL)
5774 tcc_error("__builtin_va_start expects a local variable");
5775 vtop->r = r;
5776 vtop->type = char_pointer_type;
5777 vtop->c.i += 8;
5778 vstore();
5779 break;
5780 #else
5781 case TOK_builtin_va_arg_types:
5782 parse_builtin_params(0, "t");
5783 vpushi(classify_x86_64_va_arg(&vtop->type));
5784 vswap();
5785 vpop();
5786 break;
5787 #endif
5788 #endif
5790 #ifdef TCC_TARGET_ARM64
5791 case TOK_builtin_va_start: {
5792 parse_builtin_params(0, "ee");
5793 //xx check types
5794 gen_va_start();
5795 vpushi(0);
5796 vtop->type.t = VT_VOID;
5797 break;
5799 case TOK_builtin_va_arg: {
5800 parse_builtin_params(0, "et");
5801 type = vtop->type;
5802 vpop();
5803 //xx check types
5804 gen_va_arg(&type);
5805 vtop->type = type;
5806 break;
5808 case TOK___arm64_clear_cache: {
5809 parse_builtin_params(0, "ee");
5810 gen_clear_cache();
5811 vpushi(0);
5812 vtop->type.t = VT_VOID;
5813 break;
5815 #endif
5817 /* atomic operations */
5818 case TOK___atomic_store:
5819 case TOK___atomic_load:
5820 case TOK___atomic_exchange:
5821 case TOK___atomic_compare_exchange:
5822 case TOK___atomic_fetch_add:
5823 case TOK___atomic_fetch_sub:
5824 case TOK___atomic_fetch_or:
5825 case TOK___atomic_fetch_xor:
5826 case TOK___atomic_fetch_and:
5827 case TOK___atomic_fetch_nand:
5828 case TOK___atomic_add_fetch:
5829 case TOK___atomic_sub_fetch:
5830 case TOK___atomic_or_fetch:
5831 case TOK___atomic_xor_fetch:
5832 case TOK___atomic_and_fetch:
5833 case TOK___atomic_nand_fetch:
5834 parse_atomic(tok);
5835 break;
5837 /* pre operations */
5838 case TOK_INC:
5839 case TOK_DEC:
5840 t = tok;
5841 next();
5842 unary();
5843 inc(0, t);
5844 break;
5845 case '-':
5846 next();
5847 unary();
5848 if (is_float(vtop->type.t)) {
5849 gen_opif(TOK_NEG);
5850 } else {
5851 vpushi(0);
5852 vswap();
5853 gen_op('-');
5855 break;
5856 case TOK_LAND:
5857 if (!gnu_ext)
5858 goto tok_identifier;
5859 next();
5860 /* allow to take the address of a label */
5861 if (tok < TOK_UIDENT)
5862 expect("label identifier");
5863 s = label_find(tok);
5864 if (!s) {
5865 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5866 } else {
5867 if (s->r == LABEL_DECLARED)
5868 s->r = LABEL_FORWARD;
5870 if ((s->type.t & VT_BTYPE) != VT_PTR) {
5871 s->type.t = VT_VOID;
5872 mk_pointer(&s->type);
5873 s->type.t |= VT_STATIC;
5875 vpushsym(&s->type, s);
5876 next();
5877 break;
5879 case TOK_GENERIC:
5881 CType controlling_type;
5882 int has_default = 0;
5883 int has_match = 0;
5884 int learn = 0;
5885 TokenString *str = NULL;
5886 int saved_nocode_wanted = nocode_wanted;
5887 nocode_wanted &= ~CONST_WANTED_MASK;
5889 next();
5890 skip('(');
5891 expr_type(&controlling_type, expr_eq);
5892 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5893 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5894 mk_pointer(&controlling_type);
5896 nocode_wanted = saved_nocode_wanted;
5898 for (;;) {
5899 learn = 0;
5900 skip(',');
5901 if (tok == TOK_DEFAULT) {
5902 if (has_default)
5903 tcc_error("too many 'default'");
5904 has_default = 1;
5905 if (!has_match)
5906 learn = 1;
5907 next();
5908 } else {
5909 AttributeDef ad_tmp;
5910 int itmp;
5911 CType cur_type;
5913 parse_btype(&cur_type, &ad_tmp, 0);
5914 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5915 if (compare_types(&controlling_type, &cur_type, 0)) {
5916 if (has_match) {
5917 tcc_error("type match twice");
5919 has_match = 1;
5920 learn = 1;
5923 skip(':');
5924 if (learn) {
5925 if (str)
5926 tok_str_free(str);
5927 skip_or_save_block(&str);
5928 } else {
5929 skip_or_save_block(NULL);
5931 if (tok == ')')
5932 break;
5934 if (!str) {
5935 char buf[60];
5936 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5937 tcc_error("type '%s' does not match any association", buf);
5939 begin_macro(str, 1);
5940 next();
5941 expr_eq();
5942 if (tok != TOK_EOF)
5943 expect(",");
5944 end_macro();
5945 next();
5946 break;
5948 // special qnan , snan and infinity values
5949 case TOK___NAN__:
5950 n = 0x7fc00000;
5951 special_math_val:
5952 vpushi(n);
5953 vtop->type.t = VT_FLOAT;
5954 next();
5955 break;
5956 case TOK___SNAN__:
5957 n = 0x7f800001;
5958 goto special_math_val;
5959 case TOK___INF__:
5960 n = 0x7f800000;
5961 goto special_math_val;
5963 default:
5964 tok_identifier:
5965 t = tok;
5966 next();
5967 if (t < TOK_UIDENT)
5968 expect("identifier");
5969 s = sym_find(t);
5970 if (!s || IS_ASM_SYM(s)) {
5971 const char *name = get_tok_str(t, NULL);
5972 if (tok != '(')
5973 tcc_error("'%s' undeclared", name);
5974 /* for simple function calls, we tolerate undeclared
5975 external reference to int() function */
5976 tcc_warning_c(warn_implicit_function_declaration)(
5977 "implicit declaration of function '%s'", name);
5978 s = external_global_sym(t, &func_old_type);
5981 r = s->r;
5982 /* A symbol that has a register is a local register variable,
5983 which starts out as VT_LOCAL value. */
5984 if ((r & VT_VALMASK) < VT_CONST)
5985 r = (r & ~VT_VALMASK) | VT_LOCAL;
5987 vset(&s->type, r, s->c);
5988 /* Point to s as backpointer (even without r&VT_SYM).
5989 Will be used by at least the x86 inline asm parser for
5990 regvars. */
5991 vtop->sym = s;
5993 if (r & VT_SYM) {
5994 vtop->c.i = 0;
5995 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5996 vtop->c.i = s->enum_val;
5998 break;
6001 /* post operations */
6002 while (1) {
6003 if (tok == TOK_INC || tok == TOK_DEC) {
6004 inc(1, tok);
6005 next();
6006 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6007 int qualifiers, cumofs = 0;
6008 /* field */
6009 if (tok == TOK_ARROW)
6010 indir();
6011 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6012 test_lvalue();
6013 gaddrof();
6014 /* expect pointer on structure */
6015 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6016 expect("struct or union");
6017 if (tok == TOK_CDOUBLE)
6018 expect("field name");
6019 next();
6020 if (tok == TOK_CINT || tok == TOK_CUINT)
6021 expect("field name");
6022 s = find_field(&vtop->type, tok, &cumofs);
6023 /* add field offset to pointer */
6024 vtop->type = char_pointer_type; /* change type to 'char *' */
6025 vpushi(cumofs);
6026 gen_op('+');
6027 /* change type to field type, and set to lvalue */
6028 vtop->type = s->type;
6029 vtop->type.t |= qualifiers;
6030 /* an array is never an lvalue */
6031 if (!(vtop->type.t & VT_ARRAY)) {
6032 vtop->r |= VT_LVAL;
6033 #ifdef CONFIG_TCC_BCHECK
6034 /* if bound checking, the referenced pointer must be checked */
6035 if (tcc_state->do_bounds_check)
6036 vtop->r |= VT_MUSTBOUND;
6037 #endif
6039 next();
6040 } else if (tok == '[') {
6041 next();
6042 gexpr();
6043 gen_op('+');
6044 indir();
6045 skip(']');
6046 } else if (tok == '(') {
6047 SValue ret;
6048 Sym *sa;
6049 int nb_args, ret_nregs, ret_align, regsize, variadic;
6051 /* function call */
6052 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6053 /* pointer test (no array accepted) */
6054 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6055 vtop->type = *pointed_type(&vtop->type);
6056 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6057 goto error_func;
6058 } else {
6059 error_func:
6060 expect("function pointer");
6062 } else {
6063 vtop->r &= ~VT_LVAL; /* no lvalue */
6065 /* get return type */
6066 s = vtop->type.ref;
6067 next();
6068 sa = s->next; /* first parameter */
6069 nb_args = regsize = 0;
6070 ret.r2 = VT_CONST;
6071 /* compute first implicit argument if a structure is returned */
6072 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6073 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6074 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6075 &ret_align, &regsize);
6076 if (ret_nregs <= 0) {
6077 /* get some space for the returned structure */
6078 size = type_size(&s->type, &align);
6079 #ifdef TCC_TARGET_ARM64
6080 /* On arm64, a small struct is return in registers.
6081 It is much easier to write it to memory if we know
6082 that we are allowed to write some extra bytes, so
6083 round the allocated space up to a power of 2: */
6084 if (size < 16)
6085 while (size & (size - 1))
6086 size = (size | (size - 1)) + 1;
6087 #endif
6088 loc = (loc - size) & -align;
6089 ret.type = s->type;
6090 ret.r = VT_LOCAL | VT_LVAL;
6091 /* pass it as 'int' to avoid structure arg passing
6092 problems */
6093 vseti(VT_LOCAL, loc);
6094 #ifdef CONFIG_TCC_BCHECK
6095 if (tcc_state->do_bounds_check)
6096 --loc;
6097 #endif
6098 ret.c = vtop->c;
6099 if (ret_nregs < 0)
6100 vtop--;
6101 else
6102 nb_args++;
6104 } else {
6105 ret_nregs = 1;
6106 ret.type = s->type;
6109 if (ret_nregs > 0) {
6110 /* return in register */
6111 ret.c.i = 0;
6112 PUT_R_RET(&ret, ret.type.t);
6114 if (tok != ')') {
6115 for(;;) {
6116 expr_eq();
6117 gfunc_param_typed(s, sa);
6118 nb_args++;
6119 if (sa)
6120 sa = sa->next;
6121 if (tok == ')')
6122 break;
6123 skip(',');
6126 if (sa)
6127 tcc_error("too few arguments to function");
6128 skip(')');
6129 gfunc_call(nb_args);
6131 if (ret_nregs < 0) {
6132 vsetc(&ret.type, ret.r, &ret.c);
6133 #ifdef TCC_TARGET_RISCV64
6134 arch_transfer_ret_regs(1);
6135 #endif
6136 } else {
6137 /* return value */
6138 n = ret_nregs;
6139 while (n > 1) {
6140 int rc = reg_classes[ret.r] & ~(RC_INT | RC_FLOAT);
6141 /* We assume that when a structure is returned in multiple
6142 registers, their classes are consecutive values of the
6143 suite s(n) = 2^n */
6144 rc <<= --n;
6145 for (r = 0; r < NB_REGS; ++r)
6146 if (reg_classes[r] & rc)
6147 break;
6148 vsetc(&ret.type, r, &ret.c);
6150 vsetc(&ret.type, ret.r, &ret.c);
6151 vtop->r2 = ret.r2;
6153 /* handle packed struct return */
6154 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6155 int addr, offset;
6157 size = type_size(&s->type, &align);
6158 /* We're writing whole regs often, make sure there's enough
6159 space. Assume register size is power of 2. */
6160 size = (size + regsize - 1) & -regsize;
6161 if (ret_align > align)
6162 align = ret_align;
6163 loc = (loc - size) & -align;
6164 addr = loc;
6165 offset = 0;
6166 for (;;) {
6167 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6168 vswap();
6169 vstore();
6170 vtop--;
6171 if (--ret_nregs == 0)
6172 break;
6173 offset += regsize;
6175 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6178 /* Promote char/short return values. This is matters only
6179 for calling function that were not compiled by TCC and
6180 only on some architectures. For those where it doesn't
6181 matter we expect things to be already promoted to int,
6182 but not larger. */
6183 t = s->type.t & VT_BTYPE;
6184 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6185 #ifdef PROMOTE_RET
6186 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6187 #else
6188 vtop->type.t = VT_INT;
6189 #endif
6192 if (s->f.func_noreturn) {
6193 if (debug_modes)
6194 tcc_tcov_block_end(tcc_state, -1);
6195 CODE_OFF();
6197 } else {
6198 break;
6203 #ifndef precedence_parser /* original top-down parser */
6205 static void expr_prod(void)
6207 int t;
6209 unary();
6210 while ((t = tok) == '*' || t == '/' || t == '%') {
6211 next();
6212 unary();
6213 gen_op(t);
6217 static void expr_sum(void)
6219 int t;
6221 expr_prod();
6222 while ((t = tok) == '+' || t == '-') {
6223 next();
6224 expr_prod();
6225 gen_op(t);
6229 static void expr_shift(void)
6231 int t;
6233 expr_sum();
6234 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6235 next();
6236 expr_sum();
6237 gen_op(t);
6241 static void expr_cmp(void)
6243 int t;
6245 expr_shift();
6246 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6247 t == TOK_ULT || t == TOK_UGE) {
6248 next();
6249 expr_shift();
6250 gen_op(t);
6254 static void expr_cmpeq(void)
6256 int t;
6258 expr_cmp();
6259 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6260 next();
6261 expr_cmp();
6262 gen_op(t);
6266 static void expr_and(void)
6268 expr_cmpeq();
6269 while (tok == '&') {
6270 next();
6271 expr_cmpeq();
6272 gen_op('&');
6276 static void expr_xor(void)
6278 expr_and();
6279 while (tok == '^') {
6280 next();
6281 expr_and();
6282 gen_op('^');
6286 static void expr_or(void)
6288 expr_xor();
6289 while (tok == '|') {
6290 next();
6291 expr_xor();
6292 gen_op('|');
6296 static void expr_landor(int op);
6298 static void expr_land(void)
6300 expr_or();
6301 if (tok == TOK_LAND)
6302 expr_landor(tok);
6305 static void expr_lor(void)
6307 expr_land();
6308 if (tok == TOK_LOR)
6309 expr_landor(tok);
6312 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6313 #else /* defined precedence_parser */
6314 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6315 # define expr_lor() unary(), expr_infix(1)
6317 static int precedence(int tok)
6319 switch (tok) {
6320 case TOK_LOR: return 1;
6321 case TOK_LAND: return 2;
6322 case '|': return 3;
6323 case '^': return 4;
6324 case '&': return 5;
6325 case TOK_EQ: case TOK_NE: return 6;
6326 relat: case TOK_ULT: case TOK_UGE: return 7;
6327 case TOK_SHL: case TOK_SAR: return 8;
6328 case '+': case '-': return 9;
6329 case '*': case '/': case '%': return 10;
6330 default:
6331 if (tok >= TOK_ULE && tok <= TOK_GT)
6332 goto relat;
6333 return 0;
6336 static unsigned char prec[256];
6337 static void init_prec(void)
6339 int i;
6340 for (i = 0; i < 256; i++)
6341 prec[i] = precedence(i);
6343 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6345 static void expr_landor(int op);
6347 static void expr_infix(int p)
6349 int t = tok, p2;
6350 while ((p2 = precedence(t)) >= p) {
6351 if (t == TOK_LOR || t == TOK_LAND) {
6352 expr_landor(t);
6353 } else {
6354 next();
6355 unary();
6356 if (precedence(tok) > p2)
6357 expr_infix(p2 + 1);
6358 gen_op(t);
6360 t = tok;
6363 #endif
6365 /* Assuming vtop is a value used in a conditional context
6366 (i.e. compared with zero) return 0 if it's false, 1 if
6367 true and -1 if it can't be statically determined. */
6368 static int condition_3way(void)
6370 int c = -1;
6371 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6372 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6373 vdup();
6374 gen_cast_s(VT_BOOL);
6375 c = vtop->c.i;
6376 vpop();
6378 return c;
6381 static void expr_landor(int op)
6383 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6384 for(;;) {
6385 c = f ? i : condition_3way();
6386 if (c < 0)
6387 save_regs(1), cc = 0;
6388 else if (c != i)
6389 nocode_wanted++, f = 1;
6390 if (tok != op)
6391 break;
6392 if (c < 0)
6393 t = gvtst(i, t);
6394 else
6395 vpop();
6396 next();
6397 expr_landor_next(op);
6399 if (cc || f) {
6400 vpop();
6401 vpushi(i ^ f);
6402 gsym(t);
6403 nocode_wanted -= f;
6404 } else {
6405 gvtst_set(i, t);
6409 static int is_cond_bool(SValue *sv)
6411 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6412 && (sv->type.t & VT_BTYPE) == VT_INT)
6413 return (unsigned)sv->c.i < 2;
6414 if (sv->r == VT_CMP)
6415 return 1;
6416 return 0;
6419 static void expr_cond(void)
6421 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6422 SValue sv;
6423 CType type;
6425 expr_lor();
6426 if (tok == '?') {
6427 next();
6428 c = condition_3way();
6429 g = (tok == ':' && gnu_ext);
6430 tt = 0;
6431 if (!g) {
6432 if (c < 0) {
6433 save_regs(1);
6434 tt = gvtst(1, 0);
6435 } else {
6436 vpop();
6438 } else if (c < 0) {
6439 /* needed to avoid having different registers saved in
6440 each branch */
6441 save_regs(1);
6442 gv_dup();
6443 tt = gvtst(0, 0);
6446 if (c == 0)
6447 nocode_wanted++;
6448 if (!g)
6449 gexpr();
6451 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6452 mk_pointer(&vtop->type);
6453 sv = *vtop; /* save value to handle it later */
6454 vtop--; /* no vpop so that FP stack is not flushed */
6456 if (g) {
6457 u = tt;
6458 } else if (c < 0) {
6459 u = gjmp(0);
6460 gsym(tt);
6461 } else
6462 u = 0;
6464 if (c == 0)
6465 nocode_wanted--;
6466 if (c == 1)
6467 nocode_wanted++;
6468 skip(':');
6469 expr_cond();
6471 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6472 mk_pointer(&vtop->type);
6474 /* cast operands to correct type according to ISOC rules */
6475 if (!combine_types(&type, &sv, vtop, '?'))
6476 type_incompatibility_error(&sv.type, &vtop->type,
6477 "type mismatch in conditional expression (have '%s' and '%s')");
6479 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6480 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6481 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6482 this code jumps directly to the if's then/else branches. */
6483 t1 = gvtst(0, 0);
6484 t2 = gjmp(0);
6485 gsym(u);
6486 vpushv(&sv);
6487 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6488 gvtst_set(0, t1);
6489 gvtst_set(1, t2);
6490 gen_cast(&type);
6491 // tcc_warning("two conditions expr_cond");
6492 return;
6495 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6496 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6497 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6499 /* now we convert second operand */
6500 if (c != 1) {
6501 gen_cast(&type);
6502 if (islv) {
6503 mk_pointer(&vtop->type);
6504 gaddrof();
6505 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6506 gaddrof();
6509 rc = RC_TYPE(type.t);
6510 /* for long longs, we use fixed registers to avoid having
6511 to handle a complicated move */
6512 if (USING_TWO_WORDS(type.t))
6513 rc = RC_RET(type.t);
6515 tt = r2 = 0;
6516 if (c < 0) {
6517 r2 = gv(rc);
6518 tt = gjmp(0);
6520 gsym(u);
6521 if (c == 1)
6522 nocode_wanted--;
6524 /* this is horrible, but we must also convert first
6525 operand */
6526 if (c != 0) {
6527 *vtop = sv;
6528 gen_cast(&type);
6529 if (islv) {
6530 mk_pointer(&vtop->type);
6531 gaddrof();
6532 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6533 gaddrof();
6536 if (c < 0) {
6537 r1 = gv(rc);
6538 move_reg(r2, r1, islv ? VT_PTR : type.t);
6539 vtop->r = r2;
6540 gsym(tt);
6543 if (islv)
6544 indir();
6548 static void expr_eq(void)
6550 int t;
6552 expr_cond();
6553 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6554 test_lvalue();
6555 next();
6556 if (t == '=') {
6557 expr_eq();
6558 } else {
6559 vdup();
6560 expr_eq();
6561 gen_op(TOK_ASSIGN_OP(t));
6563 vstore();
6567 ST_FUNC void gexpr(void)
6569 while (1) {
6570 expr_eq();
6571 if (tok != ',')
6572 break;
6573 constant_p &= (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6574 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6575 vpop();
6576 next();
6580 /* parse a constant expression and return value in vtop. */
6581 static void expr_const1(void)
6583 nocode_wanted += CONST_WANTED_BIT;
6584 expr_cond();
6585 nocode_wanted -= CONST_WANTED_BIT;
6588 /* parse an integer constant and return its value. */
6589 static inline int64_t expr_const64(void)
6591 int64_t c;
6592 expr_const1();
6593 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
6594 expect("constant expression");
6595 c = vtop->c.i;
6596 vpop();
6597 return c;
6600 /* parse an integer constant and return its value.
6601 Complain if it doesn't fit 32bit (signed or unsigned). */
6602 ST_FUNC int expr_const(void)
6604 int c;
6605 int64_t wc = expr_const64();
6606 c = wc;
6607 if (c != wc && (unsigned)c != wc)
6608 tcc_error("constant exceeds 32 bit");
6609 return c;
6612 /* ------------------------------------------------------------------------- */
6613 /* return from function */
6615 #ifndef TCC_TARGET_ARM64
6616 static void gfunc_return(CType *func_type)
6618 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6619 CType type, ret_type;
6620 int ret_align, ret_nregs, regsize;
6621 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6622 &ret_align, &regsize);
6623 if (ret_nregs < 0) {
6624 #ifdef TCC_TARGET_RISCV64
6625 arch_transfer_ret_regs(0);
6626 #endif
6627 } else if (0 == ret_nregs) {
6628 /* if returning structure, must copy it to implicit
6629 first pointer arg location */
6630 type = *func_type;
6631 mk_pointer(&type);
6632 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6633 indir();
6634 vswap();
6635 /* copy structure value to pointer */
6636 vstore();
6637 } else {
6638 /* returning structure packed into registers */
6639 int size, addr, align, rc, n;
6640 size = type_size(func_type,&align);
6641 if ((align & (ret_align - 1))
6642 && ((vtop->r & VT_VALMASK) < VT_CONST /* pointer to struct */
6643 || (vtop->c.i & (ret_align - 1))
6644 )) {
6645 loc = (loc - size) & -ret_align;
6646 addr = loc;
6647 type = *func_type;
6648 vset(&type, VT_LOCAL | VT_LVAL, addr);
6649 vswap();
6650 vstore();
6651 vpop();
6652 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6654 vtop->type = ret_type;
6655 rc = RC_RET(ret_type.t);
6656 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6657 for (n = ret_nregs; --n > 0;) {
6658 vdup();
6659 gv(rc);
6660 vswap();
6661 incr_offset(regsize);
6662 /* We assume that when a structure is returned in multiple
6663 registers, their classes are consecutive values of the
6664 suite s(n) = 2^n */
6665 rc <<= 1;
6667 gv(rc);
6668 vtop -= ret_nregs - 1;
6670 } else {
6671 gv(RC_RET(func_type->t));
6673 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6675 #endif
6677 static void check_func_return(void)
6679 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6680 return;
6681 if (!strcmp (funcname, "main")
6682 && (func_vt.t & VT_BTYPE) == VT_INT) {
6683 /* main returns 0 by default */
6684 vpushi(0);
6685 gen_assign_cast(&func_vt);
6686 gfunc_return(&func_vt);
6687 } else {
6688 tcc_warning("function might return no value: '%s'", funcname);
6692 /* ------------------------------------------------------------------------- */
6693 /* switch/case */
6695 static int case_cmpi(const void *pa, const void *pb)
6697 int64_t a = (*(struct case_t**) pa)->v1;
6698 int64_t b = (*(struct case_t**) pb)->v1;
6699 return a < b ? -1 : a > b;
6702 static int case_cmpu(const void *pa, const void *pb)
6704 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6705 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6706 return a < b ? -1 : a > b;
6709 static void gtst_addr(int t, int a)
6711 gsym_addr(gvtst(0, t), a);
6714 static void gcase(struct case_t **base, int len, int *bsym)
6716 struct case_t *p;
6717 int e;
6718 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6719 while (len > 8) {
6720 /* binary search */
6721 p = base[len/2];
6722 vdup();
6723 if (ll)
6724 vpushll(p->v2);
6725 else
6726 vpushi(p->v2);
6727 gen_op(TOK_LE);
6728 e = gvtst(1, 0);
6729 vdup();
6730 if (ll)
6731 vpushll(p->v1);
6732 else
6733 vpushi(p->v1);
6734 gen_op(TOK_GE);
6735 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6736 /* x < v1 */
6737 gcase(base, len/2, bsym);
6738 /* x > v2 */
6739 gsym(e);
6740 e = len/2 + 1;
6741 base += e; len -= e;
6743 /* linear scan */
6744 while (len--) {
6745 p = *base++;
6746 vdup();
6747 if (ll)
6748 vpushll(p->v2);
6749 else
6750 vpushi(p->v2);
6751 if (p->v1 == p->v2) {
6752 gen_op(TOK_EQ);
6753 gtst_addr(0, p->sym);
6754 } else {
6755 gen_op(TOK_LE);
6756 e = gvtst(1, 0);
6757 vdup();
6758 if (ll)
6759 vpushll(p->v1);
6760 else
6761 vpushi(p->v1);
6762 gen_op(TOK_GE);
6763 gtst_addr(0, p->sym);
6764 gsym(e);
6767 *bsym = gjmp(*bsym);
6770 /* ------------------------------------------------------------------------- */
6771 /* __attribute__((cleanup(fn))) */
6773 static void try_call_scope_cleanup(Sym *stop)
6775 Sym *cls = cur_scope->cl.s;
6777 for (; cls != stop; cls = cls->ncl) {
6778 Sym *fs = cls->next;
6779 Sym *vs = cls->prev_tok;
6781 vpushsym(&fs->type, fs);
6782 vset(&vs->type, vs->r, vs->c);
6783 vtop->sym = vs;
6784 mk_pointer(&vtop->type);
6785 gaddrof();
6786 gfunc_call(1);
6790 static void try_call_cleanup_goto(Sym *cleanupstate)
6792 Sym *oc, *cc;
6793 int ocd, ccd;
6795 if (!cur_scope->cl.s)
6796 return;
6798 /* search NCA of both cleanup chains given parents and initial depth */
6799 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6800 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6802 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6804 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6807 try_call_scope_cleanup(cc);
6810 /* call 'func' for each __attribute__((cleanup(func))) */
6811 static void block_cleanup(struct scope *o)
6813 int jmp = 0;
6814 Sym *g, **pg;
6815 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6816 if (g->prev_tok->r & LABEL_FORWARD) {
6817 Sym *pcl = g->next;
6818 if (!jmp)
6819 jmp = gjmp(0);
6820 gsym(pcl->jnext);
6821 try_call_scope_cleanup(o->cl.s);
6822 pcl->jnext = gjmp(0);
6823 if (!o->cl.n)
6824 goto remove_pending;
6825 g->c = o->cl.n;
6826 pg = &g->prev;
6827 } else {
6828 remove_pending:
6829 *pg = g->prev;
6830 sym_free(g);
6833 gsym(jmp);
6834 try_call_scope_cleanup(o->cl.s);
6837 /* ------------------------------------------------------------------------- */
6838 /* VLA */
6840 static void vla_restore(int loc)
6842 if (loc)
6843 gen_vla_sp_restore(loc);
6846 static void vla_leave(struct scope *o)
6848 struct scope *c = cur_scope, *v = NULL;
6849 for (; c != o && c; c = c->prev)
6850 if (c->vla.num)
6851 v = c;
6852 if (v)
6853 vla_restore(v->vla.locorig);
6856 /* ------------------------------------------------------------------------- */
6857 /* local scopes */
6859 static void new_scope(struct scope *o)
6861 /* copy and link previous scope */
6862 *o = *cur_scope;
6863 o->prev = cur_scope;
6864 cur_scope = o;
6865 cur_scope->vla.num = 0;
6867 /* record local declaration stack position */
6868 o->lstk = local_stack;
6869 o->llstk = local_label_stack;
6870 ++local_scope;
6873 static void prev_scope(struct scope *o, int is_expr)
6875 vla_leave(o->prev);
6877 if (o->cl.s != o->prev->cl.s)
6878 block_cleanup(o->prev);
6880 /* pop locally defined labels */
6881 label_pop(&local_label_stack, o->llstk, is_expr);
6883 /* In the is_expr case (a statement expression is finished here),
6884 vtop might refer to symbols on the local_stack. Either via the
6885 type or via vtop->sym. We can't pop those nor any that in turn
6886 might be referred to. To make it easier we don't roll back
6887 any symbols in that case; some upper level call to block() will
6888 do that. We do have to remove such symbols from the lookup
6889 tables, though. sym_pop will do that. */
6891 /* pop locally defined symbols */
6892 pop_local_syms(o->lstk, is_expr);
6893 cur_scope = o->prev;
6894 --local_scope;
6897 /* leave a scope via break/continue(/goto) */
6898 static void leave_scope(struct scope *o)
6900 if (!o)
6901 return;
6902 try_call_scope_cleanup(o->cl.s);
6903 vla_leave(o);
6906 /* short versiona for scopes with 'if/do/while/switch' which can
6907 declare only types (of struct/union/enum) */
6908 static void new_scope_s(struct scope *o)
6910 o->lstk = local_stack;
6911 ++local_scope;
6914 static void prev_scope_s(struct scope *o)
6916 sym_pop(&local_stack, o->lstk, 0);
6917 --local_scope;
6920 /* ------------------------------------------------------------------------- */
6921 /* call block from 'for do while' loops */
6923 static void lblock(int *bsym, int *csym)
6925 struct scope *lo = loop_scope, *co = cur_scope;
6926 int *b = co->bsym, *c = co->csym;
6927 if (csym) {
6928 co->csym = csym;
6929 loop_scope = co;
6931 co->bsym = bsym;
6932 block(0);
6933 co->bsym = b;
6934 if (csym) {
6935 co->csym = c;
6936 loop_scope = lo;
6940 static void block(int is_expr)
6942 int a, b, c, d, e, t;
6943 struct scope o;
6944 Sym *s;
6946 if (is_expr) {
6947 /* default return value is (void) */
6948 vpushi(0);
6949 vtop->type.t = VT_VOID;
6952 again:
6953 t = tok;
6954 /* If the token carries a value, next() might destroy it. Only with
6955 invalid code such as f(){"123"4;} */
6956 if (TOK_HAS_VALUE(t))
6957 goto expr;
6958 next();
6960 if (debug_modes)
6961 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_begin (tcc_state);
6963 if (t == TOK_IF) {
6964 new_scope_s(&o);
6965 skip('(');
6966 gexpr();
6967 skip(')');
6968 a = gvtst(1, 0);
6969 block(0);
6970 if (tok == TOK_ELSE) {
6971 d = gjmp(0);
6972 gsym(a);
6973 next();
6974 block(0);
6975 gsym(d); /* patch else jmp */
6976 } else {
6977 gsym(a);
6979 prev_scope_s(&o);
6981 } else if (t == TOK_WHILE) {
6982 new_scope_s(&o);
6983 d = gind();
6984 skip('(');
6985 gexpr();
6986 skip(')');
6987 a = gvtst(1, 0);
6988 b = 0;
6989 lblock(&a, &b);
6990 gjmp_addr(d);
6991 gsym_addr(b, d);
6992 gsym(a);
6993 prev_scope_s(&o);
6995 } else if (t == '{') {
6996 if (debug_modes)
6997 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
6998 new_scope(&o);
7000 /* handle local labels declarations */
7001 while (tok == TOK_LABEL) {
7002 do {
7003 next();
7004 if (tok < TOK_UIDENT)
7005 expect("label identifier");
7006 label_push(&local_label_stack, tok, LABEL_DECLARED);
7007 next();
7008 } while (tok == ',');
7009 skip(';');
7012 while (tok != '}') {
7013 decl(VT_LOCAL);
7014 if (tok != '}') {
7015 if (is_expr)
7016 vpop();
7017 block(is_expr);
7021 prev_scope(&o, is_expr);
7022 if (debug_modes)
7023 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7024 if (local_scope)
7025 next();
7026 else if (!nocode_wanted)
7027 check_func_return();
7029 } else if (t == TOK_RETURN) {
7030 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7031 if (tok != ';') {
7032 gexpr();
7033 if (b) {
7034 gen_assign_cast(&func_vt);
7035 } else {
7036 if (vtop->type.t != VT_VOID)
7037 tcc_warning("void function returns a value");
7038 vtop--;
7040 } else if (b) {
7041 tcc_warning("'return' with no value");
7042 b = 0;
7044 leave_scope(root_scope);
7045 if (b)
7046 gfunc_return(&func_vt);
7047 skip(';');
7048 /* jump unless last stmt in top-level block */
7049 if (tok != '}' || local_scope != 1)
7050 rsym = gjmp(rsym);
7051 if (debug_modes)
7052 tcc_tcov_block_end (tcc_state, -1);
7053 CODE_OFF();
7055 } else if (t == TOK_BREAK) {
7056 /* compute jump */
7057 if (!cur_scope->bsym)
7058 tcc_error("cannot break");
7059 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7060 leave_scope(cur_switch->scope);
7061 else
7062 leave_scope(loop_scope);
7063 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7064 skip(';');
7066 } else if (t == TOK_CONTINUE) {
7067 /* compute jump */
7068 if (!cur_scope->csym)
7069 tcc_error("cannot continue");
7070 leave_scope(loop_scope);
7071 *cur_scope->csym = gjmp(*cur_scope->csym);
7072 skip(';');
7074 } else if (t == TOK_FOR) {
7075 new_scope(&o);
7077 skip('(');
7078 if (tok != ';') {
7079 /* c99 for-loop init decl? */
7080 if (!decl(VT_JMP)) {
7081 /* no, regular for-loop init expr */
7082 gexpr();
7083 vpop();
7086 skip(';');
7087 a = b = 0;
7088 c = d = gind();
7089 if (tok != ';') {
7090 gexpr();
7091 a = gvtst(1, 0);
7093 skip(';');
7094 if (tok != ')') {
7095 e = gjmp(0);
7096 d = gind();
7097 gexpr();
7098 vpop();
7099 gjmp_addr(c);
7100 gsym(e);
7102 skip(')');
7103 lblock(&a, &b);
7104 gjmp_addr(d);
7105 gsym_addr(b, d);
7106 gsym(a);
7107 prev_scope(&o, 0);
7109 } else if (t == TOK_DO) {
7110 new_scope_s(&o);
7111 a = b = 0;
7112 d = gind();
7113 lblock(&a, &b);
7114 gsym(b);
7115 skip(TOK_WHILE);
7116 skip('(');
7117 gexpr();
7118 skip(')');
7119 skip(';');
7120 c = gvtst(0, 0);
7121 gsym_addr(c, d);
7122 gsym(a);
7123 prev_scope_s(&o);
7125 } else if (t == TOK_SWITCH) {
7126 struct switch_t *sw;
7128 sw = tcc_mallocz(sizeof *sw);
7129 sw->bsym = &a;
7130 sw->scope = cur_scope;
7131 sw->prev = cur_switch;
7132 sw->nocode_wanted = nocode_wanted;
7133 cur_switch = sw;
7135 new_scope_s(&o);
7136 skip('(');
7137 gexpr();
7138 skip(')');
7139 sw->sv = *vtop--; /* save switch value */
7140 a = 0;
7141 b = gjmp(0); /* jump to first case */
7142 lblock(&a, NULL);
7143 a = gjmp(a); /* add implicit break */
7144 /* case lookup */
7145 gsym(b);
7146 prev_scope_s(&o);
7148 if (sw->nocode_wanted)
7149 goto skip_switch;
7150 if (sw->sv.type.t & VT_UNSIGNED)
7151 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7152 else
7153 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7154 for (b = 1; b < sw->n; b++)
7155 if (sw->sv.type.t & VT_UNSIGNED
7156 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7157 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7158 tcc_error("duplicate case value");
7159 vpushv(&sw->sv);
7160 gv(RC_INT);
7161 d = 0, gcase(sw->p, sw->n, &d);
7162 vpop();
7163 if (sw->def_sym)
7164 gsym_addr(d, sw->def_sym);
7165 else
7166 gsym(d);
7167 skip_switch:
7168 /* break label */
7169 gsym(a);
7171 dynarray_reset(&sw->p, &sw->n);
7172 cur_switch = sw->prev;
7173 tcc_free(sw);
7175 } else if (t == TOK_CASE) {
7176 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7177 if (!cur_switch)
7178 expect("switch");
7179 cr->v1 = cr->v2 = expr_const64();
7180 if (gnu_ext && tok == TOK_DOTS) {
7181 next();
7182 cr->v2 = expr_const64();
7183 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7184 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7185 tcc_warning("empty case range");
7187 /* case and default are unreachable from a switch under nocode_wanted */
7188 if (!cur_switch->nocode_wanted)
7189 cr->sym = gind();
7190 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7191 skip(':');
7192 is_expr = 0;
7193 goto block_after_label;
7195 } else if (t == TOK_DEFAULT) {
7196 if (!cur_switch)
7197 expect("switch");
7198 if (cur_switch->def_sym)
7199 tcc_error("too many 'default'");
7200 cur_switch->def_sym = cur_switch->nocode_wanted ? 1 : gind();
7201 skip(':');
7202 is_expr = 0;
7203 goto block_after_label;
7205 } else if (t == TOK_GOTO) {
7206 vla_restore(cur_scope->vla.locorig);
7207 if (tok == '*' && gnu_ext) {
7208 /* computed goto */
7209 next();
7210 gexpr();
7211 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7212 expect("pointer");
7213 ggoto();
7215 } else if (tok >= TOK_UIDENT) {
7216 s = label_find(tok);
7217 /* put forward definition if needed */
7218 if (!s)
7219 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7220 else if (s->r == LABEL_DECLARED)
7221 s->r = LABEL_FORWARD;
7223 if (s->r & LABEL_FORWARD) {
7224 /* start new goto chain for cleanups, linked via label->next */
7225 if (cur_scope->cl.s && !nocode_wanted) {
7226 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7227 pending_gotos->prev_tok = s;
7228 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7229 pending_gotos->next = s;
7231 s->jnext = gjmp(s->jnext);
7232 } else {
7233 try_call_cleanup_goto(s->cleanupstate);
7234 gjmp_addr(s->jnext);
7236 next();
7238 } else {
7239 expect("label identifier");
7241 skip(';');
7243 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7244 asm_instr();
7246 } else {
7247 if (tok == ':' && t >= TOK_UIDENT) {
7248 /* label case */
7249 next();
7250 s = label_find(t);
7251 if (s) {
7252 if (s->r == LABEL_DEFINED)
7253 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7254 s->r = LABEL_DEFINED;
7255 if (s->next) {
7256 Sym *pcl; /* pending cleanup goto */
7257 for (pcl = s->next; pcl; pcl = pcl->prev)
7258 gsym(pcl->jnext);
7259 sym_pop(&s->next, NULL, 0);
7260 } else
7261 gsym(s->jnext);
7262 } else {
7263 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7265 s->jnext = gind();
7266 s->cleanupstate = cur_scope->cl.s;
7268 block_after_label:
7270 /* Accept attributes after labels (e.g. 'unused') */
7271 AttributeDef ad_tmp;
7272 parse_attribute(&ad_tmp);
7274 if (debug_modes)
7275 tcc_tcov_reset_ind(tcc_state);
7276 vla_restore(cur_scope->vla.loc);
7277 if (tok != '}')
7278 goto again;
7279 /* we accept this, but it is a mistake */
7280 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7282 } else {
7283 /* expression case */
7284 if (t != ';') {
7285 unget_tok(t);
7286 expr:
7287 if (is_expr) {
7288 vpop();
7289 gexpr();
7290 } else {
7291 gexpr();
7292 vpop();
7294 skip(';');
7299 if (debug_modes)
7300 tcc_tcov_check_line (tcc_state, 0), tcc_tcov_block_end (tcc_state, 0);
7303 /* This skips over a stream of tokens containing balanced {} and ()
7304 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7305 with a '{'). If STR then allocates and stores the skipped tokens
7306 in *STR. This doesn't check if () and {} are nested correctly,
7307 i.e. "({)}" is accepted. */
7308 static void skip_or_save_block(TokenString **str)
7310 int braces = tok == '{';
7311 int level = 0;
7312 if (str)
7313 *str = tok_str_alloc();
7315 while (1) {
7316 int t = tok;
7317 if (level == 0
7318 && (t == ','
7319 || t == ';'
7320 || t == '}'
7321 || t == ')'
7322 || t == ']'))
7323 break;
7324 if (t == TOK_EOF) {
7325 if (str || level > 0)
7326 tcc_error("unexpected end of file");
7327 else
7328 break;
7330 if (str)
7331 tok_str_add_tok(*str);
7332 next();
7333 if (t == '{' || t == '(' || t == '[') {
7334 level++;
7335 } else if (t == '}' || t == ')' || t == ']') {
7336 level--;
7337 if (level == 0 && braces && t == '}')
7338 break;
7341 if (str) {
7342 tok_str_add(*str, -1);
7343 tok_str_add(*str, 0);
7347 #define EXPR_CONST 1
7348 #define EXPR_ANY 2
7350 static void parse_init_elem(int expr_type)
7352 int saved_global_expr;
7353 switch(expr_type) {
7354 case EXPR_CONST:
7355 /* compound literals must be allocated globally in this case */
7356 saved_global_expr = global_expr;
7357 global_expr = 1;
7358 expr_const1();
7359 global_expr = saved_global_expr;
7360 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7361 (compound literals). */
7362 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7363 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7364 || vtop->sym->v < SYM_FIRST_ANOM))
7365 #ifdef TCC_TARGET_PE
7366 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7367 #endif
7369 tcc_error("initializer element is not constant");
7370 break;
7371 case EXPR_ANY:
7372 expr_eq();
7373 break;
7377 #if 1
7378 static void init_assert(init_params *p, int offset)
7380 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7381 : !nocode_wanted && offset > p->local_offset)
7382 tcc_internal_error("initializer overflow");
7384 #else
7385 #define init_assert(sec, offset)
7386 #endif
7388 /* put zeros for variable based init */
7389 static void init_putz(init_params *p, unsigned long c, int size)
7391 init_assert(p, c + size);
7392 if (p->sec) {
7393 /* nothing to do because globals are already set to zero */
7394 } else {
7395 vpush_helper_func(TOK_memset);
7396 vseti(VT_LOCAL, c);
7397 #ifdef TCC_TARGET_ARM
7398 vpushs(size);
7399 vpushi(0);
7400 #else
7401 vpushi(0);
7402 vpushs(size);
7403 #endif
7404 gfunc_call(3);
7408 #define DIF_FIRST 1
7409 #define DIF_SIZE_ONLY 2
7410 #define DIF_HAVE_ELEM 4
7411 #define DIF_CLEAR 8
7413 /* delete relocations for specified range c ... c + size. Unfortunatly
7414 in very special cases, relocations may occur unordered */
7415 static void decl_design_delrels(Section *sec, int c, int size)
7417 ElfW_Rel *rel, *rel2, *rel_end;
7418 if (!sec || !sec->reloc)
7419 return;
7420 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7421 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7422 while (rel < rel_end) {
7423 if (rel->r_offset >= c && rel->r_offset < c + size) {
7424 sec->reloc->data_offset -= sizeof *rel;
7425 } else {
7426 if (rel2 != rel)
7427 memcpy(rel2, rel, sizeof *rel);
7428 ++rel2;
7430 ++rel;
7434 static void decl_design_flex(init_params *p, Sym *ref, int index)
7436 if (ref == p->flex_array_ref) {
7437 if (index >= ref->c)
7438 ref->c = index + 1;
7439 } else if (ref->c < 0)
7440 tcc_error("flexible array has zero size in this context");
7443 /* t is the array or struct type. c is the array or struct
7444 address. cur_field is the pointer to the current
7445 field, for arrays the 'c' member contains the current start
7446 index. 'flags' is as in decl_initializer.
7447 'al' contains the already initialized length of the
7448 current container (starting at c). This returns the new length of that. */
7449 static int decl_designator(init_params *p, CType *type, unsigned long c,
7450 Sym **cur_field, int flags, int al)
7452 Sym *s, *f;
7453 int index, index_last, align, l, nb_elems, elem_size;
7454 unsigned long corig = c;
7456 elem_size = 0;
7457 nb_elems = 1;
7459 if (flags & DIF_HAVE_ELEM)
7460 goto no_designator;
7462 if (gnu_ext && tok >= TOK_UIDENT) {
7463 l = tok, next();
7464 if (tok == ':')
7465 goto struct_field;
7466 unget_tok(l);
7469 /* NOTE: we only support ranges for last designator */
7470 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7471 if (tok == '[') {
7472 if (!(type->t & VT_ARRAY))
7473 expect("array type");
7474 next();
7475 index = index_last = expr_const();
7476 if (tok == TOK_DOTS && gnu_ext) {
7477 next();
7478 index_last = expr_const();
7480 skip(']');
7481 s = type->ref;
7482 decl_design_flex(p, s, index_last);
7483 if (index < 0 || index_last >= s->c || index_last < index)
7484 tcc_error("index exceeds array bounds or range is empty");
7485 if (cur_field)
7486 (*cur_field)->c = index_last;
7487 type = pointed_type(type);
7488 elem_size = type_size(type, &align);
7489 c += index * elem_size;
7490 nb_elems = index_last - index + 1;
7491 } else {
7492 int cumofs;
7493 next();
7494 l = tok;
7495 struct_field:
7496 next();
7497 if ((type->t & VT_BTYPE) != VT_STRUCT)
7498 expect("struct/union type");
7499 cumofs = 0;
7500 f = find_field(type, l, &cumofs);
7501 if (cur_field)
7502 *cur_field = f;
7503 type = &f->type;
7504 c += cumofs;
7506 cur_field = NULL;
7508 if (!cur_field) {
7509 if (tok == '=') {
7510 next();
7511 } else if (!gnu_ext) {
7512 expect("=");
7514 } else {
7515 no_designator:
7516 if (type->t & VT_ARRAY) {
7517 index = (*cur_field)->c;
7518 s = type->ref;
7519 decl_design_flex(p, s, index);
7520 if (index >= s->c)
7521 tcc_error("too many initializers");
7522 type = pointed_type(type);
7523 elem_size = type_size(type, &align);
7524 c += index * elem_size;
7525 } else {
7526 f = *cur_field;
7527 /* Skip bitfield padding. Also with size 32 and 64. */
7528 while (f && (f->v & SYM_FIRST_ANOM) &&
7529 is_integer_btype(f->type.t & VT_BTYPE))
7530 *cur_field = f = f->next;
7531 if (!f)
7532 tcc_error("too many initializers");
7533 type = &f->type;
7534 c += f->c;
7538 if (!elem_size) /* for structs */
7539 elem_size = type_size(type, &align);
7541 /* Using designators the same element can be initialized more
7542 than once. In that case we need to delete possibly already
7543 existing relocations. */
7544 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7545 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7546 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7549 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7551 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7552 Sym aref = {0};
7553 CType t1;
7554 int i;
7555 if (p->sec || (type->t & VT_ARRAY)) {
7556 /* make init_putv/vstore believe it were a struct */
7557 aref.c = elem_size;
7558 t1.t = VT_STRUCT, t1.ref = &aref;
7559 type = &t1;
7561 if (p->sec)
7562 vpush_ref(type, p->sec, c, elem_size);
7563 else
7564 vset(type, VT_LOCAL|VT_LVAL, c);
7565 for (i = 1; i < nb_elems; i++) {
7566 vdup();
7567 init_putv(p, type, c + elem_size * i);
7569 vpop();
7572 c += nb_elems * elem_size;
7573 if (c - corig > al)
7574 al = c - corig;
7575 return al;
7578 /* store a value or an expression directly in global data or in local array */
7579 static void init_putv(init_params *p, CType *type, unsigned long c)
7581 int bt;
7582 void *ptr;
7583 CType dtype;
7584 int size, align;
7585 Section *sec = p->sec;
7586 uint64_t val;
7588 dtype = *type;
7589 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7591 size = type_size(type, &align);
7592 if (type->t & VT_BITFIELD)
7593 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7594 init_assert(p, c + size);
7596 if (sec) {
7597 /* XXX: not portable */
7598 /* XXX: generate error if incorrect relocation */
7599 gen_assign_cast(&dtype);
7600 bt = type->t & VT_BTYPE;
7602 if ((vtop->r & VT_SYM)
7603 && bt != VT_PTR
7604 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7605 || (type->t & VT_BITFIELD))
7606 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7608 tcc_error("initializer element is not computable at load time");
7610 if (NODATA_WANTED) {
7611 vtop--;
7612 return;
7615 ptr = sec->data + c;
7616 val = vtop->c.i;
7618 /* XXX: make code faster ? */
7619 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7620 vtop->sym->v >= SYM_FIRST_ANOM &&
7621 /* XXX This rejects compound literals like
7622 '(void *){ptr}'. The problem is that '&sym' is
7623 represented the same way, which would be ruled out
7624 by the SYM_FIRST_ANOM check above, but also '"string"'
7625 in 'char *p = "string"' is represented the same
7626 with the type being VT_PTR and the symbol being an
7627 anonymous one. That is, there's no difference in vtop
7628 between '(void *){x}' and '&(void *){x}'. Ignore
7629 pointer typed entities here. Hopefully no real code
7630 will ever use compound literals with scalar type. */
7631 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7632 /* These come from compound literals, memcpy stuff over. */
7633 Section *ssec;
7634 ElfSym *esym;
7635 ElfW_Rel *rel;
7636 esym = elfsym(vtop->sym);
7637 ssec = tcc_state->sections[esym->st_shndx];
7638 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7639 if (ssec->reloc) {
7640 /* We need to copy over all memory contents, and that
7641 includes relocations. Use the fact that relocs are
7642 created it order, so look from the end of relocs
7643 until we hit one before the copied region. */
7644 unsigned long relofs = ssec->reloc->data_offset;
7645 while (relofs >= sizeof(*rel)) {
7646 relofs -= sizeof(*rel);
7647 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
7648 if (rel->r_offset >= esym->st_value + size)
7649 continue;
7650 if (rel->r_offset < esym->st_value)
7651 break;
7652 put_elf_reloca(symtab_section, sec,
7653 c + rel->r_offset - esym->st_value,
7654 ELFW(R_TYPE)(rel->r_info),
7655 ELFW(R_SYM)(rel->r_info),
7656 #if PTR_SIZE == 8
7657 rel->r_addend
7658 #else
7660 #endif
7664 } else {
7665 if (type->t & VT_BITFIELD) {
7666 int bit_pos, bit_size, bits, n;
7667 unsigned char *p, v, m;
7668 bit_pos = BIT_POS(vtop->type.t);
7669 bit_size = BIT_SIZE(vtop->type.t);
7670 p = (unsigned char*)ptr + (bit_pos >> 3);
7671 bit_pos &= 7, bits = 0;
7672 while (bit_size) {
7673 n = 8 - bit_pos;
7674 if (n > bit_size)
7675 n = bit_size;
7676 v = val >> bits << bit_pos;
7677 m = ((1 << n) - 1) << bit_pos;
7678 *p = (*p & ~m) | (v & m);
7679 bits += n, bit_size -= n, bit_pos = 0, ++p;
7681 } else
7682 switch(bt) {
7683 case VT_BOOL:
7684 *(char *)ptr = val != 0;
7685 break;
7686 case VT_BYTE:
7687 *(char *)ptr = val;
7688 break;
7689 case VT_SHORT:
7690 write16le(ptr, val);
7691 break;
7692 case VT_FLOAT:
7693 write32le(ptr, val);
7694 break;
7695 case VT_DOUBLE:
7696 write64le(ptr, val);
7697 break;
7698 case VT_LDOUBLE:
7699 #if defined TCC_IS_NATIVE_387
7700 /* Host and target platform may be different but both have x87.
7701 On windows, tcc does not use VT_LDOUBLE, except when it is a
7702 cross compiler. In this case a mingw gcc as host compiler
7703 comes here with 10-byte long doubles, while msvc or tcc won't.
7704 tcc itself can still translate by asm.
7705 In any case we avoid possibly random bytes 11 and 12.
7707 if (sizeof (long double) >= 10)
7708 memcpy(ptr, &vtop->c.ld, 10);
7709 #ifdef __TINYC__
7710 else if (sizeof (long double) == sizeof (double))
7711 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7712 #endif
7713 else
7714 #endif
7715 /* For other platforms it should work natively, but may not work
7716 for cross compilers */
7717 if (sizeof(long double) == LDOUBLE_SIZE)
7718 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
7719 else if (sizeof(double) == LDOUBLE_SIZE)
7720 *(double*)ptr = (double)vtop->c.ld;
7721 else if (0 == memcmp(ptr, &vtop->c.ld, LDOUBLE_SIZE))
7722 ; /* nothing to do for 0.0 */
7723 #ifndef TCC_CROSS_TEST
7724 else
7725 tcc_error("can't cross compile long double constants");
7726 #endif
7727 break;
7729 #if PTR_SIZE == 8
7730 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7731 case VT_LLONG:
7732 case VT_PTR:
7733 if (vtop->r & VT_SYM)
7734 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7735 else
7736 write64le(ptr, val);
7737 break;
7738 case VT_INT:
7739 write32le(ptr, val);
7740 break;
7741 #else
7742 case VT_LLONG:
7743 write64le(ptr, val);
7744 break;
7745 case VT_PTR:
7746 case VT_INT:
7747 if (vtop->r & VT_SYM)
7748 greloc(sec, vtop->sym, c, R_DATA_PTR);
7749 write32le(ptr, val);
7750 break;
7751 #endif
7752 default:
7753 //tcc_internal_error("unexpected type");
7754 break;
7757 vtop--;
7758 } else {
7759 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7760 vswap();
7761 vstore();
7762 vpop();
7766 /* 't' contains the type and storage info. 'c' is the offset of the
7767 object in section 'sec'. If 'sec' is NULL, it means stack based
7768 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7769 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7770 size only evaluation is wanted (only for arrays). */
7771 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7773 int len, n, no_oblock, i;
7774 int size1, align1;
7775 Sym *s, *f;
7776 Sym indexsym;
7777 CType *t1;
7779 /* generate line number info */
7780 if (debug_modes && !p->sec)
7781 tcc_debug_line(tcc_state), tcc_tcov_check_line (tcc_state, 1);
7783 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7784 /* In case of strings we have special handling for arrays, so
7785 don't consume them as initializer value (which would commit them
7786 to some anonymous symbol). */
7787 tok != TOK_LSTR && tok != TOK_STR &&
7788 (!(flags & DIF_SIZE_ONLY)
7789 /* a struct may be initialized from a struct of same type, as in
7790 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7791 In that case we need to parse the element in order to check
7792 it for compatibility below */
7793 || (type->t & VT_BTYPE) == VT_STRUCT)
7795 int ncw_prev = nocode_wanted;
7796 if ((flags & DIF_SIZE_ONLY) && !p->sec)
7797 ++nocode_wanted;
7798 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7799 nocode_wanted = ncw_prev;
7800 flags |= DIF_HAVE_ELEM;
7803 if (type->t & VT_ARRAY) {
7804 no_oblock = 1;
7805 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7806 tok == '{') {
7807 skip('{');
7808 no_oblock = 0;
7811 s = type->ref;
7812 n = s->c;
7813 t1 = pointed_type(type);
7814 size1 = type_size(t1, &align1);
7816 /* only parse strings here if correct type (otherwise: handle
7817 them as ((w)char *) expressions */
7818 if ((tok == TOK_LSTR &&
7819 #ifdef TCC_TARGET_PE
7820 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7821 #else
7822 (t1->t & VT_BTYPE) == VT_INT
7823 #endif
7824 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7825 len = 0;
7826 cstr_reset(&initstr);
7827 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7828 tcc_error("unhandled string literal merging");
7829 while (tok == TOK_STR || tok == TOK_LSTR) {
7830 if (initstr.size)
7831 initstr.size -= size1;
7832 if (tok == TOK_STR)
7833 len += tokc.str.size;
7834 else
7835 len += tokc.str.size / sizeof(nwchar_t);
7836 len--;
7837 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7838 next();
7840 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7841 && tok != TOK_EOF) {
7842 /* Not a lone literal but part of a bigger expression. */
7843 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7844 tokc.str.size = initstr.size;
7845 tokc.str.data = initstr.data;
7846 goto do_init_array;
7849 decl_design_flex(p, s, len);
7850 if (!(flags & DIF_SIZE_ONLY)) {
7851 int nb = n, ch;
7852 if (len < nb)
7853 nb = len;
7854 if (len > nb)
7855 tcc_warning("initializer-string for array is too long");
7856 /* in order to go faster for common case (char
7857 string in global variable, we handle it
7858 specifically */
7859 if (p->sec && size1 == 1) {
7860 init_assert(p, c + nb);
7861 if (!NODATA_WANTED)
7862 memcpy(p->sec->data + c, initstr.data, nb);
7863 } else {
7864 for(i=0;i<n;i++) {
7865 if (i >= nb) {
7866 /* only add trailing zero if enough storage (no
7867 warning in this case since it is standard) */
7868 if (flags & DIF_CLEAR)
7869 break;
7870 if (n - i >= 4) {
7871 init_putz(p, c + i * size1, (n - i) * size1);
7872 break;
7874 ch = 0;
7875 } else if (size1 == 1)
7876 ch = ((unsigned char *)initstr.data)[i];
7877 else
7878 ch = ((nwchar_t *)initstr.data)[i];
7879 vpushi(ch);
7880 init_putv(p, t1, c + i * size1);
7884 } else {
7886 do_init_array:
7887 indexsym.c = 0;
7888 f = &indexsym;
7890 do_init_list:
7891 /* zero memory once in advance */
7892 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7893 init_putz(p, c, n*size1);
7894 flags |= DIF_CLEAR;
7897 len = 0;
7898 /* GNU extension: if the initializer is empty for a flex array,
7899 it's size is zero. We won't enter the loop, so set the size
7900 now. */
7901 decl_design_flex(p, s, len);
7902 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7903 len = decl_designator(p, type, c, &f, flags, len);
7904 flags &= ~DIF_HAVE_ELEM;
7905 if (type->t & VT_ARRAY) {
7906 ++indexsym.c;
7907 /* special test for multi dimensional arrays (may not
7908 be strictly correct if designators are used at the
7909 same time) */
7910 if (no_oblock && len >= n*size1)
7911 break;
7912 } else {
7913 if (s->type.t == VT_UNION)
7914 f = NULL;
7915 else
7916 f = f->next;
7917 if (no_oblock && f == NULL)
7918 break;
7921 if (tok == '}')
7922 break;
7923 skip(',');
7926 if (!no_oblock)
7927 skip('}');
7929 } else if ((flags & DIF_HAVE_ELEM)
7930 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7931 The source type might have VT_CONSTANT set, which is
7932 of course assignable to non-const elements. */
7933 && is_compatible_unqualified_types(type, &vtop->type)) {
7934 goto one_elem;
7936 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7937 no_oblock = 1;
7938 if ((flags & DIF_FIRST) || tok == '{') {
7939 skip('{');
7940 no_oblock = 0;
7942 s = type->ref;
7943 f = s->next;
7944 n = s->c;
7945 size1 = 1;
7946 goto do_init_list;
7948 } else if (tok == '{') {
7949 if (flags & DIF_HAVE_ELEM)
7950 skip(';');
7951 next();
7952 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7953 skip('}');
7955 } else one_elem: if ((flags & DIF_SIZE_ONLY)) {
7956 /* If we supported only ISO C we wouldn't have to accept calling
7957 this on anything than an array if DIF_SIZE_ONLY (and even then
7958 only on the outermost level, so no recursion would be needed),
7959 because initializing a flex array member isn't supported.
7960 But GNU C supports it, so we need to recurse even into
7961 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7962 /* just skip expression */
7963 if (flags & DIF_HAVE_ELEM)
7964 vpop();
7965 else
7966 skip_or_save_block(NULL);
7968 } else {
7969 if (!(flags & DIF_HAVE_ELEM)) {
7970 /* This should happen only when we haven't parsed
7971 the init element above for fear of committing a
7972 string constant to memory too early. */
7973 if (tok != TOK_STR && tok != TOK_LSTR)
7974 expect("string constant");
7975 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7977 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
7978 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
7979 && vtop->c.i == 0
7980 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
7982 vpop();
7983 else
7984 init_putv(p, type, c);
7988 /* parse an initializer for type 't' if 'has_init' is non zero, and
7989 allocate space in local or global data space ('r' is either
7990 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7991 variable 'v' of scope 'scope' is declared before initializers
7992 are parsed. If 'v' is zero, then a reference to the new object
7993 is put in the value stack. If 'has_init' is 2, a special parsing
7994 is done to handle string constants. */
7995 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7996 int has_init, int v, int global)
7998 int size, align, addr;
7999 TokenString *init_str = NULL;
8001 Section *sec;
8002 Sym *flexible_array;
8003 Sym *sym;
8004 int saved_nocode_wanted = nocode_wanted;
8005 #ifdef CONFIG_TCC_BCHECK
8006 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8007 #endif
8008 init_params p = {0};
8010 /* Always allocate static or global variables */
8011 if (v && (r & VT_VALMASK) == VT_CONST)
8012 nocode_wanted |= DATA_ONLY_WANTED;
8014 flexible_array = NULL;
8015 size = type_size(type, &align);
8017 /* exactly one flexible array may be initialized, either the
8018 toplevel array or the last member of the toplevel struct */
8020 if (size < 0) {
8021 /* If the base type itself was an array type of unspecified size
8022 (like in 'typedef int arr[]; arr x = {1};') then we will
8023 overwrite the unknown size by the real one for this decl.
8024 We need to unshare the ref symbol holding that size. */
8025 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8026 p.flex_array_ref = type->ref;
8028 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8029 Sym *field = type->ref->next;
8030 if (field) {
8031 while (field->next)
8032 field = field->next;
8033 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8034 flexible_array = field;
8035 p.flex_array_ref = field->type.ref;
8036 size = -1;
8041 if (size < 0) {
8042 /* If unknown size, do a dry-run 1st pass */
8043 if (!has_init)
8044 tcc_error("unknown type size");
8045 if (has_init == 2) {
8046 /* only get strings */
8047 init_str = tok_str_alloc();
8048 while (tok == TOK_STR || tok == TOK_LSTR) {
8049 tok_str_add_tok(init_str);
8050 next();
8052 tok_str_add(init_str, -1);
8053 tok_str_add(init_str, 0);
8054 } else
8055 skip_or_save_block(&init_str);
8056 unget_tok(0);
8058 /* compute size */
8059 begin_macro(init_str, 1);
8060 next();
8061 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8062 /* prepare second initializer parsing */
8063 macro_ptr = init_str->str;
8064 next();
8066 /* if still unknown size, error */
8067 size = type_size(type, &align);
8068 if (size < 0)
8069 tcc_error("unknown type size");
8071 /* If there's a flex member and it was used in the initializer
8072 adjust size. */
8073 if (flexible_array && flexible_array->type.ref->c > 0)
8074 size += flexible_array->type.ref->c
8075 * pointed_size(&flexible_array->type);
8078 /* take into account specified alignment if bigger */
8079 if (ad->a.aligned) {
8080 int speca = 1 << (ad->a.aligned - 1);
8081 if (speca > align)
8082 align = speca;
8083 } else if (ad->a.packed) {
8084 align = 1;
8087 if (!v && NODATA_WANTED)
8088 size = 0, align = 1;
8090 if ((r & VT_VALMASK) == VT_LOCAL) {
8091 sec = NULL;
8092 #ifdef CONFIG_TCC_BCHECK
8093 if (bcheck && v) {
8094 /* add padding between stack variables for bound checking */
8095 loc -= align;
8097 #endif
8098 loc = (loc - size) & -align;
8099 addr = loc;
8100 p.local_offset = addr + size;
8101 #ifdef CONFIG_TCC_BCHECK
8102 if (bcheck && v) {
8103 /* add padding between stack variables for bound checking */
8104 loc -= align;
8106 #endif
8107 if (v) {
8108 /* local variable */
8109 #ifdef CONFIG_TCC_ASM
8110 if (ad->asm_label) {
8111 int reg = asm_parse_regvar(ad->asm_label);
8112 if (reg >= 0)
8113 r = (r & ~VT_VALMASK) | reg;
8115 #endif
8116 sym = sym_push(v, type, r, addr);
8117 if (ad->cleanup_func) {
8118 Sym *cls = sym_push2(&all_cleanups,
8119 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8120 cls->prev_tok = sym;
8121 cls->next = ad->cleanup_func;
8122 cls->ncl = cur_scope->cl.s;
8123 cur_scope->cl.s = cls;
8126 sym->a = ad->a;
8127 } else {
8128 /* push local reference */
8129 vset(type, r, addr);
8131 } else {
8132 sym = NULL;
8133 if (v && global) {
8134 /* see if the symbol was already defined */
8135 sym = sym_find(v);
8136 if (sym) {
8137 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8138 && sym->type.ref->c > type->ref->c) {
8139 /* flex array was already declared with explicit size
8140 extern int arr[10];
8141 int arr[] = { 1,2,3 }; */
8142 type->ref->c = sym->type.ref->c;
8143 size = type_size(type, &align);
8145 patch_storage(sym, ad, type);
8146 /* we accept several definitions of the same global variable. */
8147 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8148 goto no_alloc;
8152 /* allocate symbol in corresponding section */
8153 sec = ad->section;
8154 if (!sec) {
8155 CType *tp = type;
8156 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8157 tp = &tp->ref->type;
8158 if (tp->t & VT_CONSTANT) {
8159 sec = rodata_section;
8160 } else if (has_init) {
8161 sec = data_section;
8162 /*if (tcc_state->g_debug & 4)
8163 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8164 } else if (tcc_state->nocommon)
8165 sec = bss_section;
8168 if (sec) {
8169 addr = section_add(sec, size, align);
8170 #ifdef CONFIG_TCC_BCHECK
8171 /* add padding if bound check */
8172 if (bcheck)
8173 section_add(sec, 1, 1);
8174 #endif
8175 } else {
8176 addr = align; /* SHN_COMMON is special, symbol value is align */
8177 sec = common_section;
8180 if (v) {
8181 if (!sym) {
8182 sym = sym_push(v, type, r | VT_SYM, 0);
8183 patch_storage(sym, ad, NULL);
8185 /* update symbol definition */
8186 put_extern_sym(sym, sec, addr, size);
8187 } else {
8188 /* push global reference */
8189 vpush_ref(type, sec, addr, size);
8190 sym = vtop->sym;
8191 vtop->r |= r;
8194 #ifdef CONFIG_TCC_BCHECK
8195 /* handles bounds now because the symbol must be defined
8196 before for the relocation */
8197 if (bcheck) {
8198 addr_t *bounds_ptr;
8200 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8201 /* then add global bound info */
8202 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8203 bounds_ptr[0] = 0; /* relocated */
8204 bounds_ptr[1] = size;
8206 #endif
8209 if (type->t & VT_VLA) {
8210 int a;
8212 if (NODATA_WANTED)
8213 goto no_alloc;
8215 /* save before-VLA stack pointer if needed */
8216 if (cur_scope->vla.num == 0) {
8217 if (cur_scope->prev && cur_scope->prev->vla.num) {
8218 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8219 } else {
8220 gen_vla_sp_save(loc -= PTR_SIZE);
8221 cur_scope->vla.locorig = loc;
8225 vpush_type_size(type, &a);
8226 gen_vla_alloc(type, a);
8227 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8228 /* on _WIN64, because of the function args scratch area, the
8229 result of alloca differs from RSP and is returned in RAX. */
8230 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8231 #endif
8232 gen_vla_sp_save(addr);
8233 cur_scope->vla.loc = addr;
8234 cur_scope->vla.num++;
8235 } else if (has_init) {
8236 p.sec = sec;
8237 decl_initializer(&p, type, addr, DIF_FIRST);
8238 /* patch flexible array member size back to -1, */
8239 /* for possible subsequent similar declarations */
8240 if (flexible_array)
8241 flexible_array->type.ref->c = -1;
8244 no_alloc:
8245 /* restore parse state if needed */
8246 if (init_str) {
8247 end_macro();
8248 next();
8251 nocode_wanted = saved_nocode_wanted;
8254 /* generate vla code saved in post_type() */
8255 static void func_vla_arg_code(Sym *arg)
8257 int align;
8258 TokenString *vla_array_tok = NULL;
8260 if (arg->type.ref)
8261 func_vla_arg_code(arg->type.ref);
8263 if ((arg->type.t & VT_VLA) && arg->type.ref->vla_array_str) {
8264 loc -= type_size(&int_type, &align);
8265 loc &= -align;
8266 arg->type.ref->c = loc;
8268 unget_tok(0);
8269 vla_array_tok = tok_str_alloc();
8270 vla_array_tok->str = arg->type.ref->vla_array_str;
8271 begin_macro(vla_array_tok, 1);
8272 next();
8273 gexpr();
8274 end_macro();
8275 next();
8276 vpush_type_size(&arg->type.ref->type, &align);
8277 gen_op('*');
8278 vset(&int_type, VT_LOCAL|VT_LVAL, arg->type.ref->c);
8279 vswap();
8280 vstore();
8281 vpop();
8285 static void func_vla_arg(Sym *sym)
8287 Sym *arg;
8289 for (arg = sym->type.ref->next; arg; arg = arg->next)
8290 if (arg->type.t & VT_VLA)
8291 func_vla_arg_code(arg);
8294 /* parse a function defined by symbol 'sym' and generate its code in
8295 'cur_text_section' */
8296 static void gen_function(Sym *sym)
8298 struct scope f = { 0 };
8299 cur_scope = root_scope = &f;
8300 nocode_wanted = 0;
8301 ind = cur_text_section->data_offset;
8302 if (sym->a.aligned) {
8303 size_t newoff = section_add(cur_text_section, 0,
8304 1 << (sym->a.aligned - 1));
8305 gen_fill_nops(newoff - ind);
8307 /* NOTE: we patch the symbol size later */
8308 put_extern_sym(sym, cur_text_section, ind, 0);
8309 if (sym->type.ref->f.func_ctor)
8310 add_array (tcc_state, ".init_array", sym->c);
8311 if (sym->type.ref->f.func_dtor)
8312 add_array (tcc_state, ".fini_array", sym->c);
8314 funcname = get_tok_str(sym->v, NULL);
8315 func_ind = ind;
8316 func_vt = sym->type.ref->type;
8317 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8319 /* put debug symbol */
8320 tcc_debug_funcstart(tcc_state, sym);
8321 /* push a dummy symbol to enable local sym storage */
8322 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8323 local_scope = 1; /* for function parameters */
8324 gfunc_prolog(sym);
8325 tcc_debug_prolog_epilog(tcc_state, 0);
8326 local_scope = 0;
8327 rsym = 0;
8328 clear_temp_local_var_list();
8329 func_vla_arg(sym);
8330 block(0);
8331 gsym(rsym);
8332 nocode_wanted = 0;
8333 /* reset local stack */
8334 pop_local_syms(NULL, 0);
8335 tcc_debug_prolog_epilog(tcc_state, 1);
8336 gfunc_epilog();
8337 cur_text_section->data_offset = ind;
8338 local_scope = 0;
8339 label_pop(&global_label_stack, NULL, 0);
8340 sym_pop(&all_cleanups, NULL, 0);
8341 /* patch symbol size */
8342 elfsym(sym)->st_size = ind - func_ind;
8343 /* end of function */
8344 tcc_debug_funcend(tcc_state, ind - func_ind);
8345 /* It's better to crash than to generate wrong code */
8346 cur_text_section = NULL;
8347 funcname = ""; /* for safety */
8348 func_vt.t = VT_VOID; /* for safety */
8349 func_var = 0; /* for safety */
8350 ind = 0; /* for safety */
8351 func_ind = -1;
8352 nocode_wanted = DATA_ONLY_WANTED;
8353 check_vstack();
8354 /* do this after funcend debug info */
8355 next();
8358 static void gen_inline_functions(TCCState *s)
8360 Sym *sym;
8361 int inline_generated, i;
8362 struct InlineFunc *fn;
8364 tcc_open_bf(s, ":inline:", 0);
8365 /* iterate while inline function are referenced */
8366 do {
8367 inline_generated = 0;
8368 for (i = 0; i < s->nb_inline_fns; ++i) {
8369 fn = s->inline_fns[i];
8370 sym = fn->sym;
8371 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8372 /* the function was used or forced (and then not internal):
8373 generate its code and convert it to a normal function */
8374 fn->sym = NULL;
8375 tcc_debug_putfile(s, fn->filename);
8376 begin_macro(fn->func_str, 1);
8377 next();
8378 cur_text_section = text_section;
8379 gen_function(sym);
8380 end_macro();
8382 inline_generated = 1;
8385 } while (inline_generated);
8386 tcc_close();
8389 static void free_inline_functions(TCCState *s)
8391 int i;
8392 /* free tokens of unused inline functions */
8393 for (i = 0; i < s->nb_inline_fns; ++i) {
8394 struct InlineFunc *fn = s->inline_fns[i];
8395 if (fn->sym)
8396 tok_str_free(fn->func_str);
8398 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8401 static void do_Static_assert(void)
8403 int c;
8404 const char *msg;
8406 next();
8407 skip('(');
8408 c = expr_const();
8409 msg = "_Static_assert fail";
8410 if (tok == ',') {
8411 next();
8412 msg = parse_mult_str("string constant")->data;
8414 skip(')');
8415 if (c == 0)
8416 tcc_error("%s", msg);
8417 skip(';');
8420 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8421 or VT_CMP if parsing old style parameter list
8422 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8423 static int decl(int l)
8425 int v, has_init, r, oldint;
8426 CType type, btype;
8427 Sym *sym;
8428 AttributeDef ad, adbase;
8430 while (1) {
8432 if (tok == TOK_STATIC_ASSERT) {
8433 do_Static_assert();
8434 continue;
8437 oldint = 0;
8438 if (!parse_btype(&btype, &adbase, l == VT_LOCAL)) {
8439 if (l == VT_JMP)
8440 return 0;
8441 /* skip redundant ';' if not in old parameter decl scope */
8442 if (tok == ';' && l != VT_CMP) {
8443 next();
8444 continue;
8446 if (l != VT_CONST)
8447 break;
8448 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8449 /* global asm block */
8450 asm_global_instr();
8451 continue;
8453 if (tok >= TOK_UIDENT) {
8454 /* special test for old K&R protos without explicit int
8455 type. Only accepted when defining global data */
8456 btype.t = VT_INT;
8457 oldint = 1;
8458 } else {
8459 if (tok != TOK_EOF)
8460 expect("declaration");
8461 break;
8465 if (tok == ';') {
8466 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8467 v = btype.ref->v;
8468 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8469 tcc_warning("unnamed struct/union that defines no instances");
8470 next();
8471 continue;
8473 if (IS_ENUM(btype.t)) {
8474 next();
8475 continue;
8479 while (1) { /* iterate thru each declaration */
8480 type = btype;
8481 ad = adbase;
8482 type_decl(&type, &ad, &v, TYPE_DIRECT);
8483 #if 0
8485 char buf[500];
8486 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8487 printf("type = '%s'\n", buf);
8489 #endif
8490 if ((type.t & VT_BTYPE) == VT_FUNC) {
8491 if ((type.t & VT_STATIC) && (l != VT_CONST))
8492 tcc_error("function without file scope cannot be static");
8493 /* if old style function prototype, we accept a
8494 declaration list */
8495 sym = type.ref;
8496 if (sym->f.func_type == FUNC_OLD && l == VT_CONST) {
8497 func_vt = type;
8498 decl(VT_CMP);
8500 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8501 if (sym->f.func_alwinl
8502 && ((type.t & (VT_EXTERN | VT_INLINE))
8503 == (VT_EXTERN | VT_INLINE))) {
8504 /* always_inline functions must be handled as if they
8505 don't generate multiple global defs, even if extern
8506 inline, i.e. GNU inline semantics for those. Rewrite
8507 them into static inline. */
8508 type.t &= ~VT_EXTERN;
8509 type.t |= VT_STATIC;
8511 #endif
8512 /* always compile 'extern inline' */
8513 if (type.t & VT_EXTERN)
8514 type.t &= ~VT_INLINE;
8516 } else if (oldint) {
8517 tcc_warning("type defaults to int");
8520 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8521 ad.asm_label = asm_label_instr();
8522 /* parse one last attribute list, after asm label */
8523 parse_attribute(&ad);
8524 #if 0
8525 /* gcc does not allow __asm__("label") with function definition,
8526 but why not ... */
8527 if (tok == '{')
8528 expect(";");
8529 #endif
8532 #ifdef TCC_TARGET_PE
8533 if (ad.a.dllimport || ad.a.dllexport) {
8534 if (type.t & VT_STATIC)
8535 tcc_error("cannot have dll linkage with static");
8536 if (type.t & VT_TYPEDEF) {
8537 tcc_warning("'%s' attribute ignored for typedef",
8538 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8539 (ad.a.dllexport = 0, "dllexport"));
8540 } else if (ad.a.dllimport) {
8541 if ((type.t & VT_BTYPE) == VT_FUNC)
8542 ad.a.dllimport = 0;
8543 else
8544 type.t |= VT_EXTERN;
8547 #endif
8548 if (tok == '{') {
8549 if (l != VT_CONST)
8550 tcc_error("cannot use local functions");
8551 if ((type.t & VT_BTYPE) != VT_FUNC)
8552 expect("function definition");
8554 /* reject abstract declarators in function definition
8555 make old style params without decl have int type */
8556 sym = type.ref;
8557 while ((sym = sym->next) != NULL) {
8558 if (!(sym->v & ~SYM_FIELD))
8559 expect("identifier");
8560 if (sym->type.t == VT_VOID)
8561 sym->type = int_type;
8564 /* apply post-declaraton attributes */
8565 merge_funcattr(&type.ref->f, &ad.f);
8567 /* put function symbol */
8568 type.t &= ~VT_EXTERN;
8569 sym = external_sym(v, &type, 0, &ad);
8571 /* static inline functions are just recorded as a kind
8572 of macro. Their code will be emitted at the end of
8573 the compilation unit only if they are used */
8574 if (sym->type.t & VT_INLINE) {
8575 struct InlineFunc *fn;
8576 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8577 strcpy(fn->filename, file->filename);
8578 fn->sym = sym;
8579 skip_or_save_block(&fn->func_str);
8580 dynarray_add(&tcc_state->inline_fns,
8581 &tcc_state->nb_inline_fns, fn);
8582 } else {
8583 /* compute text section */
8584 cur_text_section = ad.section;
8585 if (!cur_text_section)
8586 cur_text_section = text_section;
8587 gen_function(sym);
8589 break;
8590 } else {
8591 if (l == VT_CMP) {
8592 /* find parameter in function parameter list */
8593 for (sym = func_vt.ref->next; sym; sym = sym->next)
8594 if ((sym->v & ~SYM_FIELD) == v)
8595 goto found;
8596 tcc_error("declaration for parameter '%s' but no such parameter",
8597 get_tok_str(v, NULL));
8598 found:
8599 if (type.t & VT_STORAGE) /* 'register' is okay */
8600 tcc_error("storage class specified for '%s'",
8601 get_tok_str(v, NULL));
8602 if (sym->type.t != VT_VOID)
8603 tcc_error("redefinition of parameter '%s'",
8604 get_tok_str(v, NULL));
8605 convert_parameter_type(&type);
8606 sym->type = type;
8607 } else if (type.t & VT_TYPEDEF) {
8608 /* save typedefed type */
8609 /* XXX: test storage specifiers ? */
8610 sym = sym_find(v);
8611 if (sym && sym->sym_scope == local_scope) {
8612 if (!is_compatible_types(&sym->type, &type)
8613 || !(sym->type.t & VT_TYPEDEF))
8614 tcc_error("incompatible redefinition of '%s'",
8615 get_tok_str(v, NULL));
8616 sym->type = type;
8617 } else {
8618 sym = sym_push(v, &type, 0, 0);
8620 sym->a = ad.a;
8621 if ((type.t & VT_BTYPE) == VT_FUNC)
8622 merge_funcattr(&sym->type.ref->f, &ad.f);
8623 if (debug_modes)
8624 tcc_debug_typedef (tcc_state, sym);
8625 } else if ((type.t & VT_BTYPE) == VT_VOID
8626 && !(type.t & VT_EXTERN)) {
8627 tcc_error("declaration of void object");
8628 } else {
8629 r = 0;
8630 if ((type.t & VT_BTYPE) == VT_FUNC) {
8631 /* external function definition */
8632 /* specific case for func_call attribute */
8633 merge_funcattr(&type.ref->f, &ad.f);
8634 } else if (!(type.t & VT_ARRAY)) {
8635 /* not lvalue if array */
8636 r |= VT_LVAL;
8638 has_init = (tok == '=');
8639 if (has_init && (type.t & VT_VLA))
8640 tcc_error("variable length array cannot be initialized");
8641 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8642 || (type.t & VT_BTYPE) == VT_FUNC
8643 /* as with GCC, uninitialized global arrays with no size
8644 are considered extern: */
8645 || ((type.t & VT_ARRAY) && !has_init
8646 && l == VT_CONST && type.ref->c < 0)
8648 /* external variable or function */
8649 type.t |= VT_EXTERN;
8650 sym = external_sym(v, &type, r, &ad);
8651 if (ad.alias_target) {
8652 /* Aliases need to be emitted when their target
8653 symbol is emitted, even if perhaps unreferenced.
8654 We only support the case where the base is
8655 already defined, otherwise we would need
8656 deferring to emit the aliases until the end of
8657 the compile unit. */
8658 Sym *alias_target = sym_find(ad.alias_target);
8659 ElfSym *esym = elfsym(alias_target);
8660 if (!esym)
8661 tcc_error("unsupported forward __alias__ attribute");
8662 put_extern_sym2(sym, esym->st_shndx,
8663 esym->st_value, esym->st_size, 1);
8665 } else {
8666 if (l == VT_CONST || (type.t & VT_STATIC))
8667 r |= VT_CONST;
8668 else
8669 r |= VT_LOCAL;
8670 if (has_init)
8671 next();
8672 else if (l == VT_CONST)
8673 /* uninitialized global variables may be overridden */
8674 type.t |= VT_EXTERN;
8675 decl_initializer_alloc(&type, &ad, r, has_init, v, l == VT_CONST);
8678 if (tok != ',') {
8679 if (l == VT_JMP)
8680 return 1;
8681 skip(';');
8682 break;
8684 next();
8688 return 0;
8691 /* ------------------------------------------------------------------------- */
8692 #undef gjmp_addr
8693 #undef gjmp
8694 /* ------------------------------------------------------------------------- */