riscv: asm: implement `j offset`
[tinycc.git] / tccgen.c
blob9da4a028ce681951d7398d7a8970da54210c336f
1 /*
2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 ST_DATA char debug_modes;
48 ST_DATA SValue *vtop;
49 static SValue _vstack[1 + VSTACK_SIZE];
50 #define vstack (_vstack + 1)
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
72 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
73 ST_DATA int func_vc;
74 ST_DATA int func_ind;
75 ST_DATA const char *funcname;
76 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
77 static CString initstr;
79 #if PTR_SIZE == 4
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
82 #elif LONG_SIZE == 4
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
85 #else
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
88 #endif
90 static struct switch_t {
91 struct case_t {
92 int64_t v1, v2;
93 int sym;
94 } **p; int n; /* list of case ranges */
95 int def_sym; /* default symbol */
96 int nocode_wanted;
97 int *bsym;
98 struct scope *scope;
99 struct switch_t *prev;
100 SValue sv;
101 } *cur_switch; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable {
106 int location; //offset on stack. Svalue.c.i
107 short size;
108 short align;
109 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
110 static int nb_temp_local_vars;
112 static struct scope {
113 struct scope *prev;
114 struct { int loc, locorig, num; } vla;
115 struct { Sym *s; int n; } cl;
116 int *bsym, *csym;
117 Sym *lstk, *llstk;
118 } *cur_scope, *loop_scope, *root_scope;
120 typedef struct {
121 Section *sec;
122 int local_offset;
123 Sym *flex_array_ref;
124 } init_params;
126 #if 1
127 #define precedence_parser
128 static void init_prec(void);
129 #endif
131 static void block(int flags);
132 #define STMT_EXPR 1
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType *type);
136 static void gen_cast_s(int t);
137 static inline CType *pointed_type(CType *type);
138 static int is_compatible_types(CType *type1, CType *type2);
139 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label);
140 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
141 static void parse_expr_type(CType *type);
142 static void init_putv(init_params *p, CType *type, unsigned long c);
143 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
144 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
145 static int decl(int l);
146 static void expr_eq(void);
147 static void vpush_type_size(CType *type, int *a);
148 static int is_compatible_unqualified_types(CType *type1, CType *type2);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty, unsigned long long v);
151 static void vpush(CType *type);
152 static int gvtst(int inv, int t);
153 static void gen_inline_functions(TCCState *s);
154 static void free_inline_functions(TCCState *s);
155 static void skip_or_save_block(TokenString **str);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size,int align);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType *st, CType *dt);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC void gsym(int t)
168 if (t) {
169 gsym_addr(t, ind);
170 CODE_ON();
174 /* Clear 'nocode_wanted' if current pc is a label */
175 static int gind()
177 int t = ind;
178 CODE_ON();
179 if (debug_modes)
180 tcc_tcov_block_begin(tcc_state);
181 return t;
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t)
187 gjmp_addr(t);
188 CODE_OFF();
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t)
194 t = gjmp(t);
195 CODE_OFF();
196 return t;
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN int is_float(int t)
206 int bt = t & VT_BTYPE;
207 return bt == VT_LDOUBLE
208 || bt == VT_DOUBLE
209 || bt == VT_FLOAT
210 || bt == VT_QFLOAT;
213 static inline int is_integer_btype(int bt)
215 return bt == VT_BYTE
216 || bt == VT_BOOL
217 || bt == VT_SHORT
218 || bt == VT_INT
219 || bt == VT_LLONG;
222 static int btype_size(int bt)
224 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
225 bt == VT_SHORT ? 2 :
226 bt == VT_INT ? 4 :
227 bt == VT_LLONG ? 8 :
228 bt == VT_PTR ? PTR_SIZE : 0;
231 /* returns function return register from type */
232 static int R_RET(int t)
234 if (!is_float(t))
235 return REG_IRET;
236 #ifdef TCC_TARGET_X86_64
237 if ((t & VT_BTYPE) == VT_LDOUBLE)
238 return TREG_ST0;
239 #elif defined TCC_TARGET_RISCV64
240 if ((t & VT_BTYPE) == VT_LDOUBLE)
241 return REG_IRET;
242 #endif
243 return REG_FRET;
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t)
249 t &= VT_BTYPE;
250 #if PTR_SIZE == 4
251 if (t == VT_LLONG)
252 return REG_IRE2;
253 #elif defined TCC_TARGET_X86_64
254 if (t == VT_QLONG)
255 return REG_IRE2;
256 if (t == VT_QFLOAT)
257 return REG_FRE2;
258 #elif defined TCC_TARGET_RISCV64
259 if (t == VT_LDOUBLE)
260 return REG_IRE2;
261 #endif
262 return VT_CONST;
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue *sv, int t)
271 sv->r = R_RET(t), sv->r2 = R2_RET(t);
274 /* returns function return register class for type t */
275 static int RC_RET(int t)
277 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t)
283 if (!is_float(t))
284 return RC_INT;
285 #ifdef TCC_TARGET_X86_64
286 if ((t & VT_BTYPE) == VT_LDOUBLE)
287 return RC_ST0;
288 if ((t & VT_BTYPE) == VT_QFLOAT)
289 return RC_FRET;
290 #elif defined TCC_TARGET_RISCV64
291 if ((t & VT_BTYPE) == VT_LDOUBLE)
292 return RC_INT;
293 #endif
294 return RC_FLOAT;
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t, int rc)
300 if (!USING_TWO_WORDS(t))
301 return 0;
302 #ifdef RC_IRE2
303 if (rc == RC_IRET)
304 return RC_IRE2;
305 #endif
306 #ifdef RC_FRE2
307 if (rc == RC_FRET)
308 return RC_FRE2;
309 #endif
310 if (rc & RC_FLOAT)
311 return RC_FLOAT;
312 return RC_INT;
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC int ieee_finite(double d)
320 int p[4];
321 memcpy(p, &d, sizeof(double));
322 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
329 #endif
331 ST_FUNC void test_lvalue(void)
333 if (!(vtop->r & VT_LVAL))
334 expect("lvalue");
337 ST_FUNC void check_vstack(void)
339 if (vtop != vstack - 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop - vstack + 1));
344 /* vstack debugging aid */
345 #if 0
346 void pv (const char *lbl, int a, int b)
348 int i;
349 for (i = a; i < a + b; ++i) {
350 SValue *p = &vtop[-i];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
355 #endif
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC void tccgen_init(TCCState *s1)
361 vtop = vstack - 1;
362 memset(vtop, 0, sizeof *vtop);
364 /* define some often used types */
365 int_type.t = VT_INT;
367 char_type.t = VT_BYTE;
368 if (s1->char_is_unsigned)
369 char_type.t |= VT_UNSIGNED;
370 char_pointer_type = char_type;
371 mk_pointer(&char_pointer_type);
373 func_old_type.t = VT_FUNC;
374 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
375 func_old_type.ref->f.func_call = FUNC_CDECL;
376 func_old_type.ref->f.func_type = FUNC_OLD;
377 #ifdef precedence_parser
378 init_prec();
379 #endif
380 cstr_new(&initstr);
383 ST_FUNC int tccgen_compile(TCCState *s1)
385 funcname = "";
386 func_ind = -1;
387 anon_sym = SYM_FIRST_ANOM;
388 nocode_wanted = DATA_ONLY_WANTED; /* no code outside of functions */
389 debug_modes = (s1->do_debug ? 1 : 0) | s1->test_coverage << 1;
391 tcc_debug_start(s1);
392 tcc_tcov_start (s1);
393 #ifdef TCC_TARGET_ARM
394 arm_init(s1);
395 #endif
396 #ifdef INC_DEBUG
397 printf("%s: **** new file\n", file->filename);
398 #endif
399 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
400 next();
401 decl(VT_CONST);
402 gen_inline_functions(s1);
403 check_vstack();
404 /* end of translation unit info */
405 tcc_debug_end(s1);
406 tcc_tcov_end(s1);
407 return 0;
410 ST_FUNC void tccgen_finish(TCCState *s1)
412 tcc_debug_end(s1); /* just in case of errors: free memory */
413 free_inline_functions(s1);
414 sym_pop(&global_stack, NULL, 0);
415 sym_pop(&local_stack, NULL, 0);
416 /* free preprocessor macros */
417 free_defines(NULL);
418 /* free sym_pools */
419 dynarray_reset(&sym_pools, &nb_sym_pools);
420 cstr_free(&initstr);
421 dynarray_reset(&stk_data, &nb_stk_data);
422 while (cur_switch)
423 end_switch();
424 local_scope = 0;
425 loop_scope = NULL;
426 all_cleanups = NULL;
427 pending_gotos = NULL;
428 nb_temp_local_vars = 0;
429 global_label_stack = NULL;
430 local_label_stack = NULL;
431 cur_text_section = NULL;
432 sym_free_first = NULL;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym *elfsym(Sym *s)
438 if (!s || !s->c)
439 return NULL;
440 return &((ElfSym *)symtab_section->data)[s->c];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC void update_storage(Sym *sym)
446 ElfSym *esym;
447 int sym_bind, old_sym_bind;
449 esym = elfsym(sym);
450 if (!esym)
451 return;
453 if (sym->a.visibility)
454 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
455 | sym->a.visibility;
457 if (sym->type.t & (VT_STATIC | VT_INLINE))
458 sym_bind = STB_LOCAL;
459 else if (sym->a.weak)
460 sym_bind = STB_WEAK;
461 else
462 sym_bind = STB_GLOBAL;
463 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
464 if (sym_bind != old_sym_bind) {
465 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
468 #ifdef TCC_TARGET_PE
469 if (sym->a.dllimport)
470 esym->st_other |= ST_PE_IMPORT;
471 if (sym->a.dllexport)
472 esym->st_other |= ST_PE_EXPORT;
473 #endif
475 #if 0
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym->v, NULL),
478 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
479 sym->a.visibility,
480 sym->a.dllexport,
481 sym->a.dllimport
483 #endif
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
491 addr_t value, unsigned long size,
492 int can_add_underscore)
494 int sym_type, sym_bind, info, other, t;
495 ElfSym *esym;
496 const char *name;
497 char buf1[256];
499 if (!sym->c) {
500 name = get_tok_str(sym->v, NULL);
501 t = sym->type.t;
502 if ((t & VT_BTYPE) == VT_FUNC) {
503 sym_type = STT_FUNC;
504 } else if ((t & VT_BTYPE) == VT_VOID) {
505 sym_type = STT_NOTYPE;
506 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
507 sym_type = STT_FUNC;
508 } else {
509 sym_type = STT_OBJECT;
511 if (t & (VT_STATIC | VT_INLINE))
512 sym_bind = STB_LOCAL;
513 else
514 sym_bind = STB_GLOBAL;
515 other = 0;
517 #ifdef TCC_TARGET_PE
518 if (sym_type == STT_FUNC && sym->type.ref) {
519 Sym *ref = sym->type.ref;
520 if (ref->a.nodecorate) {
521 can_add_underscore = 0;
523 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
524 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
525 name = buf1;
526 other |= ST_PE_STDCALL;
527 can_add_underscore = 0;
530 #endif
532 if (sym->asm_label) {
533 name = get_tok_str(sym->asm_label, NULL);
534 can_add_underscore = 0;
537 if (tcc_state->leading_underscore && can_add_underscore) {
538 buf1[0] = '_';
539 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
540 name = buf1;
543 info = ELFW(ST_INFO)(sym_bind, sym_type);
544 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
546 if (debug_modes)
547 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
549 } else {
550 esym = elfsym(sym);
551 esym->st_value = value;
552 esym->st_size = size;
553 esym->st_shndx = sh_num;
555 update_storage(sym);
558 ST_FUNC void put_extern_sym(Sym *sym, Section *s, addr_t value, unsigned long size)
560 if (nocode_wanted && (NODATA_WANTED || (s && s == cur_text_section)))
561 return;
562 put_extern_sym2(sym, s ? s->sh_num : SHN_UNDEF, value, size, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
567 addr_t addend)
569 int c = 0;
571 if (nocode_wanted && s == cur_text_section)
572 return;
574 if (sym) {
575 if (0 == sym->c)
576 put_extern_sym(sym, NULL, 0, 0);
577 c = sym->c;
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section, s, offset, type, c, addend);
584 #if PTR_SIZE == 4
585 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
587 greloca(s, sym, offset, type, 0);
589 #endif
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym *__sym_malloc(void)
595 Sym *sym_pool, *sym, *last_sym;
596 int i;
598 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
599 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
601 last_sym = sym_free_first;
602 sym = sym_pool;
603 for(i = 0; i < SYM_POOL_NB; i++) {
604 sym->next = last_sym;
605 last_sym = sym;
606 sym++;
608 sym_free_first = last_sym;
609 return last_sym;
612 static inline Sym *sym_malloc(void)
614 Sym *sym;
615 #ifndef SYM_DEBUG
616 sym = sym_free_first;
617 if (!sym)
618 sym = __sym_malloc();
619 sym_free_first = sym->next;
620 return sym;
621 #else
622 sym = tcc_malloc(sizeof(Sym));
623 return sym;
624 #endif
627 ST_INLN void sym_free(Sym *sym)
629 #ifndef SYM_DEBUG
630 sym->next = sym_free_first;
631 sym_free_first = sym;
632 #else
633 tcc_free(sym);
634 #endif
637 /* push, without hashing */
638 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
640 Sym *s;
642 s = sym_malloc();
643 memset(s, 0, sizeof *s);
644 s->v = v;
645 s->type.t = t;
646 s->c = c;
647 /* add in stack */
648 s->prev = *ps;
649 *ps = s;
650 return s;
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym *sym_find2(Sym *s, int v)
657 while (s) {
658 if (s->v == v)
659 return s;
660 else if (s->v == -1)
661 return NULL;
662 s = s->prev;
664 return NULL;
667 /* structure lookup */
668 ST_INLN Sym *struct_find(int v)
670 v -= TOK_IDENT;
671 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
672 return NULL;
673 return table_ident[v]->sym_struct;
676 /* find an identifier */
677 ST_INLN Sym *sym_find(int v)
679 v -= TOK_IDENT;
680 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
681 return NULL;
682 return table_ident[v]->sym_identifier;
685 static int sym_scope(Sym *s)
687 if (IS_ENUM_VAL (s->type.t))
688 return s->type.ref->sym_scope;
689 else
690 return s->sym_scope;
693 /* push a given symbol on the symbol stack */
694 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
696 Sym *s, **ps;
697 TokenSym *ts;
699 if (local_stack)
700 ps = &local_stack;
701 else
702 ps = &global_stack;
703 s = sym_push2(ps, v, type->t, c);
704 s->type.ref = type->ref;
705 s->r = r;
706 /* don't record fields or anonymous symbols */
707 /* XXX: simplify */
708 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
709 /* record symbol in token array */
710 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
711 if (v & SYM_STRUCT)
712 ps = &ts->sym_struct;
713 else
714 ps = &ts->sym_identifier;
715 s->prev_tok = *ps;
716 *ps = s;
717 s->sym_scope = local_scope;
718 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
719 tcc_error("redeclaration of '%s'",
720 get_tok_str(v & ~SYM_STRUCT, NULL));
722 return s;
725 /* push a global identifier */
726 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
728 Sym *s, **ps;
729 s = sym_push2(&global_stack, v, t, c);
730 s->r = VT_CONST | VT_SYM;
731 /* don't record anonymous symbol */
732 if (v < SYM_FIRST_ANOM) {
733 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
734 /* modify the top most local identifier, so that sym_identifier will
735 point to 's' when popped; happens when called from inline asm */
736 while (*ps != NULL && (*ps)->sym_scope)
737 ps = &(*ps)->prev_tok;
738 s->prev_tok = *ps;
739 *ps = s;
741 return s;
744 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
745 pop them yet from the list, but do remove them from the token array. */
746 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
748 Sym *s, *ss, **ps;
749 TokenSym *ts;
750 int v;
752 s = *ptop;
753 while(s != b) {
754 ss = s->prev;
755 v = s->v;
756 /* remove symbol in token array */
757 /* XXX: simplify */
758 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
759 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
760 if (v & SYM_STRUCT)
761 ps = &ts->sym_struct;
762 else
763 ps = &ts->sym_identifier;
764 *ps = s->prev_tok;
766 if (!keep)
767 sym_free(s);
768 s = ss;
770 if (!keep)
771 *ptop = b;
774 /* label lookup */
775 ST_FUNC Sym *label_find(int v)
777 v -= TOK_IDENT;
778 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
779 return NULL;
780 return table_ident[v]->sym_label;
783 ST_FUNC Sym *label_push(Sym **ptop, int v, int flags)
785 Sym *s, **ps;
786 s = sym_push2(ptop, v, VT_STATIC, 0);
787 s->r = flags;
788 ps = &table_ident[v - TOK_IDENT]->sym_label;
789 if (ptop == &global_label_stack) {
790 /* modify the top most local identifier, so that
791 sym_identifier will point to 's' when popped */
792 while (*ps != NULL)
793 ps = &(*ps)->prev_tok;
795 s->prev_tok = *ps;
796 *ps = s;
797 return s;
800 /* pop labels until element last is reached. Look if any labels are
801 undefined. Define symbols if '&&label' was used. */
802 ST_FUNC void label_pop(Sym **ptop, Sym *slast, int keep)
804 Sym *s, *s1;
805 for(s = *ptop; s != slast; s = s1) {
806 s1 = s->prev;
807 if (s->r == LABEL_DECLARED) {
808 tcc_warning_c(warn_all)("label '%s' declared but not used", get_tok_str(s->v, NULL));
809 } else if (s->r == LABEL_FORWARD) {
810 tcc_error("label '%s' used but not defined",
811 get_tok_str(s->v, NULL));
812 } else {
813 if (s->c) {
814 /* define corresponding symbol. A size of
815 1 is put. */
816 put_extern_sym(s, cur_text_section, s->jnext, 1);
819 /* remove label */
820 if (s->r != LABEL_GONE)
821 table_ident[s->v - TOK_IDENT]->sym_label = s->prev_tok;
822 if (!keep)
823 sym_free(s);
824 else
825 s->r = LABEL_GONE;
827 if (!keep)
828 *ptop = slast;
831 /* ------------------------------------------------------------------------- */
832 static void vcheck_cmp(void)
834 /* cannot let cpu flags if other instruction are generated. Also
835 avoid leaving VT_JMP anywhere except on the top of the stack
836 because it would complicate the code generator.
838 Don't do this when nocode_wanted. vtop might come from
839 !nocode_wanted regions (see 88_codeopt.c) and transforming
840 it to a register without actually generating code is wrong
841 as their value might still be used for real. All values
842 we push under nocode_wanted will eventually be popped
843 again, so that the VT_CMP/VT_JMP value will be in vtop
844 when code is unsuppressed again. */
846 /* However if it's just automatic suppression via CODE_OFF/ON()
847 then it seems that we better let things work undisturbed.
848 How can it work at all under nocode_wanted? Well, gv() will
849 actually clear it at the gsym() in load()/VT_JMP in the
850 generator backends */
852 if (vtop->r == VT_CMP && 0 == (nocode_wanted & ~CODE_OFF_BIT))
853 gv(RC_INT);
856 static void vsetc(CType *type, int r, CValue *vc)
858 if (vtop >= vstack + (VSTACK_SIZE - 1))
859 tcc_error("memory full (vstack)");
860 vcheck_cmp();
861 vtop++;
862 vtop->type = *type;
863 vtop->r = r;
864 vtop->r2 = VT_CONST;
865 vtop->c = *vc;
866 vtop->sym = NULL;
869 ST_FUNC void vswap(void)
871 SValue tmp;
873 vcheck_cmp();
874 tmp = vtop[0];
875 vtop[0] = vtop[-1];
876 vtop[-1] = tmp;
879 /* pop stack value */
880 ST_FUNC void vpop(void)
882 int v;
883 v = vtop->r & VT_VALMASK;
884 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
885 /* for x86, we need to pop the FP stack */
886 if (v == TREG_ST0) {
887 o(0xd8dd); /* fstp %st(0) */
888 } else
889 #endif
890 if (v == VT_CMP) {
891 /* need to put correct jump if && or || without test */
892 gsym(vtop->jtrue);
893 gsym(vtop->jfalse);
895 vtop--;
898 /* push constant of type "type" with useless value */
899 static void vpush(CType *type)
901 vset(type, VT_CONST, 0);
904 /* push arbitrary 64bit constant */
905 static void vpush64(int ty, unsigned long long v)
907 CValue cval;
908 CType ctype;
909 ctype.t = ty;
910 ctype.ref = NULL;
911 cval.i = v;
912 vsetc(&ctype, VT_CONST, &cval);
915 /* push integer constant */
916 ST_FUNC void vpushi(int v)
918 vpush64(VT_INT, v);
921 /* push a pointer sized constant */
922 static void vpushs(addr_t v)
924 vpush64(VT_SIZE_T, v);
927 /* push long long constant */
928 static inline void vpushll(long long v)
930 vpush64(VT_LLONG, v);
933 ST_FUNC void vset(CType *type, int r, int v)
935 CValue cval;
936 cval.i = v;
937 vsetc(type, r, &cval);
940 static void vseti(int r, int v)
942 CType type;
943 type.t = VT_INT;
944 type.ref = NULL;
945 vset(&type, r, v);
948 ST_FUNC void vpushv(SValue *v)
950 if (vtop >= vstack + (VSTACK_SIZE - 1))
951 tcc_error("memory full (vstack)");
952 vtop++;
953 *vtop = *v;
956 static void vdup(void)
958 vpushv(vtop);
961 /* rotate n first stack elements to the bottom
962 I1 ... In -> I2 ... In I1 [top is right]
964 ST_FUNC void vrotb(int n)
966 int i;
967 SValue tmp;
969 vcheck_cmp();
970 tmp = vtop[-n + 1];
971 for(i=-n+1;i!=0;i++)
972 vtop[i] = vtop[i+1];
973 vtop[0] = tmp;
976 /* rotate the n elements before entry e towards the top
977 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
979 ST_FUNC void vrote(SValue *e, int n)
981 int i;
982 SValue tmp;
984 vcheck_cmp();
985 tmp = *e;
986 for(i = 0;i < n - 1; i++)
987 e[-i] = e[-i - 1];
988 e[-n + 1] = tmp;
991 /* rotate n first stack elements to the top
992 I1 ... In -> In I1 ... I(n-1) [top is right]
994 ST_FUNC void vrott(int n)
996 vrote(vtop, n);
999 /* ------------------------------------------------------------------------- */
1000 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1002 /* called from generators to set the result from relational ops */
1003 ST_FUNC void vset_VT_CMP(int op)
1005 vtop->r = VT_CMP;
1006 vtop->cmp_op = op;
1007 vtop->jfalse = 0;
1008 vtop->jtrue = 0;
1011 /* called once before asking generators to load VT_CMP to a register */
1012 static void vset_VT_JMP(void)
1014 int op = vtop->cmp_op;
1016 if (vtop->jtrue || vtop->jfalse) {
1017 int origt = vtop->type.t;
1018 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1019 int inv = op & (op < 2); /* small optimization */
1020 vseti(VT_JMP+inv, gvtst(inv, 0));
1021 vtop->type.t |= origt & (VT_UNSIGNED | VT_DEFSIGN);
1022 } else {
1023 /* otherwise convert flags (rsp. 0/1) to register */
1024 vtop->c.i = op;
1025 if (op < 2) /* doesn't seem to happen */
1026 vtop->r = VT_CONST;
1030 /* Set CPU Flags, doesn't yet jump */
1031 static void gvtst_set(int inv, int t)
1033 int *p;
1035 if (vtop->r != VT_CMP) {
1036 vpushi(0);
1037 gen_op(TOK_NE);
1038 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1039 vset_VT_CMP(vtop->c.i != 0);
1042 p = inv ? &vtop->jfalse : &vtop->jtrue;
1043 *p = gjmp_append(*p, t);
1046 /* Generate value test
1048 * Generate a test for any value (jump, comparison and integers) */
1049 static int gvtst(int inv, int t)
1051 int op, x, u;
1053 gvtst_set(inv, t);
1054 t = vtop->jtrue, u = vtop->jfalse;
1055 if (inv)
1056 x = u, u = t, t = x;
1057 op = vtop->cmp_op;
1059 /* jump to the wanted target */
1060 if (op > 1)
1061 t = gjmp_cond(op ^ inv, t);
1062 else if (op != inv)
1063 t = gjmp(t);
1064 /* resolve complementary jumps to here */
1065 gsym(u);
1067 vtop--;
1068 return t;
1071 /* generate a zero or nozero test */
1072 static void gen_test_zero(int op)
1074 if (vtop->r == VT_CMP) {
1075 int j;
1076 if (op == TOK_EQ) {
1077 j = vtop->jfalse;
1078 vtop->jfalse = vtop->jtrue;
1079 vtop->jtrue = j;
1080 vtop->cmp_op ^= 1;
1082 } else {
1083 vpushi(0);
1084 gen_op(op);
1088 /* ------------------------------------------------------------------------- */
1089 /* push a symbol value of TYPE */
1090 ST_FUNC void vpushsym(CType *type, Sym *sym)
1092 CValue cval;
1093 cval.i = 0;
1094 vsetc(type, VT_CONST | VT_SYM, &cval);
1095 vtop->sym = sym;
1098 /* Return a static symbol pointing to a section */
1099 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1101 int v;
1102 Sym *sym;
1104 v = anon_sym++;
1105 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1106 sym->type.t |= VT_STATIC;
1107 put_extern_sym(sym, sec, offset, size);
1108 return sym;
1111 /* push a reference to a section offset by adding a dummy symbol */
1112 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1114 vpushsym(type, get_sym_ref(type, sec, offset, size));
1117 /* define a new external reference to a symbol 'v' of type 'u' */
1118 ST_FUNC Sym *external_global_sym(int v, CType *type)
1120 Sym *s;
1122 s = sym_find(v);
1123 if (!s) {
1124 /* push forward reference */
1125 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1126 s->type.ref = type->ref;
1127 } else if (IS_ASM_SYM(s)) {
1128 s->type.t = type->t | (s->type.t & VT_EXTERN);
1129 s->type.ref = type->ref;
1130 update_storage(s);
1132 return s;
1135 /* create an external reference with no specific type similar to asm labels.
1136 This avoids type conflicts if the symbol is used from C too */
1137 ST_FUNC Sym *external_helper_sym(int v)
1139 CType ct = { VT_ASM_FUNC, NULL };
1140 return external_global_sym(v, &ct);
1143 /* push a reference to an helper function (such as memmove) */
1144 ST_FUNC void vpush_helper_func(int v)
1146 vpushsym(&func_old_type, external_helper_sym(v));
1149 /* Merge symbol attributes. */
1150 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1152 if (sa1->aligned && !sa->aligned)
1153 sa->aligned = sa1->aligned;
1154 sa->packed |= sa1->packed;
1155 sa->weak |= sa1->weak;
1156 sa->nodebug |= sa1->nodebug;
1157 if (sa1->visibility != STV_DEFAULT) {
1158 int vis = sa->visibility;
1159 if (vis == STV_DEFAULT
1160 || vis > sa1->visibility)
1161 vis = sa1->visibility;
1162 sa->visibility = vis;
1164 sa->dllexport |= sa1->dllexport;
1165 sa->nodecorate |= sa1->nodecorate;
1166 sa->dllimport |= sa1->dllimport;
1169 /* Merge function attributes. */
1170 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1172 if (fa1->func_call && !fa->func_call)
1173 fa->func_call = fa1->func_call;
1174 if (fa1->func_type && !fa->func_type)
1175 fa->func_type = fa1->func_type;
1176 if (fa1->func_args && !fa->func_args)
1177 fa->func_args = fa1->func_args;
1178 if (fa1->func_noreturn)
1179 fa->func_noreturn = 1;
1180 if (fa1->func_ctor)
1181 fa->func_ctor = 1;
1182 if (fa1->func_dtor)
1183 fa->func_dtor = 1;
1186 /* Merge attributes. */
1187 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1189 merge_symattr(&ad->a, &ad1->a);
1190 merge_funcattr(&ad->f, &ad1->f);
1192 if (ad1->section)
1193 ad->section = ad1->section;
1194 if (ad1->alias_target)
1195 ad->alias_target = ad1->alias_target;
1196 if (ad1->asm_label)
1197 ad->asm_label = ad1->asm_label;
1198 if (ad1->attr_mode)
1199 ad->attr_mode = ad1->attr_mode;
1202 /* Merge some type attributes. */
1203 static void patch_type(Sym *sym, CType *type)
1205 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1206 if (!(sym->type.t & VT_EXTERN))
1207 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1208 sym->type.t &= ~VT_EXTERN;
1211 if (IS_ASM_SYM(sym)) {
1212 /* stay static if both are static */
1213 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1214 sym->type.ref = type->ref;
1215 if ((type->t & VT_BTYPE) != VT_FUNC && !(type->t & VT_ARRAY))
1216 sym->r |= VT_LVAL;
1219 if (!is_compatible_types(&sym->type, type)) {
1220 tcc_error("incompatible types for redefinition of '%s'",
1221 get_tok_str(sym->v, NULL));
1223 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1224 int static_proto = sym->type.t & VT_STATIC;
1225 /* warn if static follows non-static function declaration */
1226 if ((type->t & VT_STATIC) && !static_proto
1227 /* XXX this test for inline shouldn't be here. Until we
1228 implement gnu-inline mode again it silences a warning for
1229 mingw caused by our workarounds. */
1230 && !((type->t | sym->type.t) & VT_INLINE))
1231 tcc_warning("static storage ignored for redefinition of '%s'",
1232 get_tok_str(sym->v, NULL));
1234 /* set 'inline' if both agree or if one has static */
1235 if ((type->t | sym->type.t) & VT_INLINE) {
1236 if (!((type->t ^ sym->type.t) & VT_INLINE)
1237 || ((type->t | sym->type.t) & VT_STATIC))
1238 static_proto |= VT_INLINE;
1241 if (0 == (type->t & VT_EXTERN)) {
1242 struct FuncAttr f = sym->type.ref->f;
1243 /* put complete type, use static from prototype */
1244 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1245 sym->type.ref = type->ref;
1246 merge_funcattr(&sym->type.ref->f, &f);
1247 } else {
1248 sym->type.t &= ~VT_INLINE | static_proto;
1251 if (sym->type.ref->f.func_type == FUNC_OLD
1252 && type->ref->f.func_type != FUNC_OLD) {
1253 sym->type.ref = type->ref;
1256 } else {
1257 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1258 /* set array size if it was omitted in extern declaration */
1259 sym->type.ref->c = type->ref->c;
1261 if ((type->t ^ sym->type.t) & VT_STATIC)
1262 tcc_warning("storage mismatch for redefinition of '%s'",
1263 get_tok_str(sym->v, NULL));
1267 /* Merge some storage attributes. */
1268 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1270 if (type)
1271 patch_type(sym, type);
1273 #ifdef TCC_TARGET_PE
1274 if (sym->a.dllimport != ad->a.dllimport)
1275 tcc_error("incompatible dll linkage for redefinition of '%s'",
1276 get_tok_str(sym->v, NULL));
1277 #endif
1278 merge_symattr(&sym->a, &ad->a);
1279 if (ad->asm_label)
1280 sym->asm_label = ad->asm_label;
1281 update_storage(sym);
1284 /* copy sym to other stack */
1285 static Sym *sym_copy(Sym *s0, Sym **ps)
1287 Sym *s;
1288 s = sym_malloc(), *s = *s0;
1289 s->prev = *ps, *ps = s;
1290 if (s->v < SYM_FIRST_ANOM) {
1291 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1292 s->prev_tok = *ps, *ps = s;
1294 return s;
1297 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1298 static void sym_copy_ref(Sym *s, Sym **ps)
1300 int bt = s->type.t & VT_BTYPE;
1301 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1302 Sym **sp = &s->type.ref;
1303 for (s = *sp, *sp = NULL; s; s = s->next) {
1304 Sym *s2 = sym_copy(s, ps);
1305 sp = &(*sp = s2)->next;
1306 sym_copy_ref(s2, ps);
1311 /* define a new external reference to a symbol 'v' */
1312 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1314 Sym *s;
1316 /* look for global symbol */
1317 s = sym_find(v);
1318 while (s && s->sym_scope)
1319 s = s->prev_tok;
1321 if (!s) {
1322 /* push forward reference */
1323 s = global_identifier_push(v, type->t, 0);
1324 s->r |= r;
1325 s->a = ad->a;
1326 s->asm_label = ad->asm_label;
1327 s->type.ref = type->ref;
1328 /* copy type to the global stack */
1329 if (local_stack)
1330 sym_copy_ref(s, &global_stack);
1331 } else {
1332 patch_storage(s, ad, type);
1334 /* push variables on local_stack if any */
1335 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1336 s = sym_copy(s, &local_stack);
1337 return s;
1340 /* save registers up to (vtop - n) stack entry */
1341 ST_FUNC void save_regs(int n)
1343 SValue *p, *p1;
1344 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1345 save_reg(p->r);
1348 /* save r to the memory stack, and mark it as being free */
1349 ST_FUNC void save_reg(int r)
1351 save_reg_upstack(r, 0);
1354 /* save r to the memory stack, and mark it as being free,
1355 if seen up to (vtop - n) stack entry */
1356 ST_FUNC void save_reg_upstack(int r, int n)
1358 int l, size, align, bt;
1359 SValue *p, *p1, sv;
1361 if ((r &= VT_VALMASK) >= VT_CONST)
1362 return;
1363 if (nocode_wanted)
1364 return;
1365 l = 0;
1366 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1367 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1368 /* must save value on stack if not already done */
1369 if (!l) {
1370 bt = p->type.t & VT_BTYPE;
1371 if (bt == VT_VOID)
1372 continue;
1373 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1374 bt = VT_PTR;
1375 sv.type.t = bt;
1376 size = type_size(&sv.type, &align);
1377 l = get_temp_local_var(size,align);
1378 sv.r = VT_LOCAL | VT_LVAL;
1379 sv.c.i = l;
1380 store(p->r & VT_VALMASK, &sv);
1381 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1382 /* x86 specific: need to pop fp register ST0 if saved */
1383 if (r == TREG_ST0) {
1384 o(0xd8dd); /* fstp %st(0) */
1386 #endif
1387 /* special long long case */
1388 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1389 sv.c.i += PTR_SIZE;
1390 store(p->r2, &sv);
1393 /* mark that stack entry as being saved on the stack */
1394 if (p->r & VT_LVAL) {
1395 /* also clear the bounded flag because the
1396 relocation address of the function was stored in
1397 p->c.i */
1398 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1399 } else {
1400 p->r = VT_LVAL | VT_LOCAL;
1402 p->sym = NULL;
1403 p->r2 = VT_CONST;
1404 p->c.i = l;
1409 #ifdef TCC_TARGET_ARM
1410 /* find a register of class 'rc2' with at most one reference on stack.
1411 * If none, call get_reg(rc) */
1412 ST_FUNC int get_reg_ex(int rc, int rc2)
1414 int r;
1415 SValue *p;
1417 for(r=0;r<NB_REGS;r++) {
1418 if (reg_classes[r] & rc2) {
1419 int n;
1420 n=0;
1421 for(p = vstack; p <= vtop; p++) {
1422 if ((p->r & VT_VALMASK) == r ||
1423 p->r2 == r)
1424 n++;
1426 if (n <= 1)
1427 return r;
1430 return get_reg(rc);
1432 #endif
1434 /* find a free register of class 'rc'. If none, save one register */
1435 ST_FUNC int get_reg(int rc)
1437 int r;
1438 SValue *p;
1440 /* find a free register */
1441 for(r=0;r<NB_REGS;r++) {
1442 if (reg_classes[r] & rc) {
1443 if (nocode_wanted)
1444 return r;
1445 for(p=vstack;p<=vtop;p++) {
1446 if ((p->r & VT_VALMASK) == r ||
1447 p->r2 == r)
1448 goto notfound;
1450 return r;
1452 notfound: ;
1455 /* no register left : free the first one on the stack (VERY
1456 IMPORTANT to start from the bottom to ensure that we don't
1457 spill registers used in gen_opi()) */
1458 for(p=vstack;p<=vtop;p++) {
1459 /* look at second register (if long long) */
1460 r = p->r2;
1461 if (r < VT_CONST && (reg_classes[r] & rc))
1462 goto save_found;
1463 r = p->r & VT_VALMASK;
1464 if (r < VT_CONST && (reg_classes[r] & rc)) {
1465 save_found:
1466 save_reg(r);
1467 return r;
1470 /* Should never comes here */
1471 return -1;
1474 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1475 static int get_temp_local_var(int size,int align){
1476 int i;
1477 struct temp_local_variable *temp_var;
1478 int found_var;
1479 SValue *p;
1480 int r;
1481 char free;
1482 char found;
1483 found=0;
1484 for(i=0;i<nb_temp_local_vars;i++){
1485 temp_var=&arr_temp_local_vars[i];
1486 if(temp_var->size<size||align!=temp_var->align){
1487 continue;
1489 /*check if temp_var is free*/
1490 free=1;
1491 for(p=vstack;p<=vtop;p++) {
1492 r=p->r&VT_VALMASK;
1493 if(r==VT_LOCAL||r==VT_LLOCAL){
1494 if(p->c.i==temp_var->location){
1495 free=0;
1496 break;
1500 if(free){
1501 found_var=temp_var->location;
1502 found=1;
1503 break;
1506 if(!found){
1507 loc = (loc - size) & -align;
1508 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1509 temp_var=&arr_temp_local_vars[i];
1510 temp_var->location=loc;
1511 temp_var->size=size;
1512 temp_var->align=align;
1513 nb_temp_local_vars++;
1515 found_var=loc;
1517 return found_var;
1520 static void clear_temp_local_var_list(){
1521 nb_temp_local_vars=0;
1524 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1525 if needed */
1526 static void move_reg(int r, int s, int t)
1528 SValue sv;
1530 if (r != s) {
1531 save_reg(r);
1532 sv.type.t = t;
1533 sv.type.ref = NULL;
1534 sv.r = s;
1535 sv.c.i = 0;
1536 load(r, &sv);
1540 /* get address of vtop (vtop MUST BE an lvalue) */
1541 ST_FUNC void gaddrof(void)
1543 vtop->r &= ~VT_LVAL;
1544 /* tricky: if saved lvalue, then we can go back to lvalue */
1545 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1546 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1549 #ifdef CONFIG_TCC_BCHECK
1550 /* generate a bounded pointer addition */
1551 static void gen_bounded_ptr_add(void)
1553 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1554 if (save) {
1555 vpushv(&vtop[-1]);
1556 vrott(3);
1558 vpush_helper_func(TOK___bound_ptr_add);
1559 vrott(3);
1560 gfunc_call(2);
1561 vtop -= save;
1562 vpushi(0);
1563 /* returned pointer is in REG_IRET */
1564 vtop->r = REG_IRET | VT_BOUNDED;
1565 if (nocode_wanted)
1566 return;
1567 /* relocation offset of the bounding function call point */
1568 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1571 /* patch pointer addition in vtop so that pointer dereferencing is
1572 also tested */
1573 static void gen_bounded_ptr_deref(void)
1575 addr_t func;
1576 int size, align;
1577 ElfW_Rel *rel;
1578 Sym *sym;
1580 if (nocode_wanted)
1581 return;
1583 size = type_size(&vtop->type, &align);
1584 switch(size) {
1585 case 1: func = TOK___bound_ptr_indir1; break;
1586 case 2: func = TOK___bound_ptr_indir2; break;
1587 case 4: func = TOK___bound_ptr_indir4; break;
1588 case 8: func = TOK___bound_ptr_indir8; break;
1589 case 12: func = TOK___bound_ptr_indir12; break;
1590 case 16: func = TOK___bound_ptr_indir16; break;
1591 default:
1592 /* may happen with struct member access */
1593 return;
1595 sym = external_helper_sym(func);
1596 if (!sym->c)
1597 put_extern_sym(sym, NULL, 0, 0);
1598 /* patch relocation */
1599 /* XXX: find a better solution ? */
1600 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1601 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1604 /* generate lvalue bound code */
1605 static void gbound(void)
1607 CType type1;
1609 vtop->r &= ~VT_MUSTBOUND;
1610 /* if lvalue, then use checking code before dereferencing */
1611 if (vtop->r & VT_LVAL) {
1612 /* if not VT_BOUNDED value, then make one */
1613 if (!(vtop->r & VT_BOUNDED)) {
1614 /* must save type because we must set it to int to get pointer */
1615 type1 = vtop->type;
1616 vtop->type.t = VT_PTR;
1617 gaddrof();
1618 vpushi(0);
1619 gen_bounded_ptr_add();
1620 vtop->r |= VT_LVAL;
1621 vtop->type = type1;
1623 /* then check for dereferencing */
1624 gen_bounded_ptr_deref();
1628 /* we need to call __bound_ptr_add before we start to load function
1629 args into registers */
1630 ST_FUNC void gbound_args(int nb_args)
1632 int i, v;
1633 SValue *sv;
1635 for (i = 1; i <= nb_args; ++i)
1636 if (vtop[1 - i].r & VT_MUSTBOUND) {
1637 vrotb(i);
1638 gbound();
1639 vrott(i);
1642 sv = vtop - nb_args;
1643 if (sv->r & VT_SYM) {
1644 v = sv->sym->v;
1645 if (v == TOK_setjmp
1646 || v == TOK__setjmp
1647 #ifndef TCC_TARGET_PE
1648 || v == TOK_sigsetjmp
1649 || v == TOK___sigsetjmp
1650 #endif
1652 vpush_helper_func(TOK___bound_setjmp);
1653 vpushv(sv + 1);
1654 gfunc_call(1);
1655 func_bound_add_epilog = 1;
1657 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1658 if (v == TOK_alloca)
1659 func_bound_add_epilog = 1;
1660 #endif
1661 #if TARGETOS_NetBSD
1662 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
1663 sv->sym->asm_label = TOK___bound_longjmp;
1664 #endif
1668 /* Add bounds for local symbols from S to E (via ->prev) */
1669 static void add_local_bounds(Sym *s, Sym *e)
1671 for (; s != e; s = s->prev) {
1672 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1673 continue;
1674 /* Add arrays/structs/unions because we always take address */
1675 if ((s->type.t & VT_ARRAY)
1676 || (s->type.t & VT_BTYPE) == VT_STRUCT
1677 || s->a.addrtaken) {
1678 /* add local bound info */
1679 int align, size = type_size(&s->type, &align);
1680 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1681 2 * sizeof(addr_t));
1682 bounds_ptr[0] = s->c;
1683 bounds_ptr[1] = size;
1687 #endif
1689 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1690 static void pop_local_syms(Sym *b, int keep)
1692 #ifdef CONFIG_TCC_BCHECK
1693 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
1694 add_local_bounds(local_stack, b);
1695 #endif
1696 if (debug_modes)
1697 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
1698 sym_pop(&local_stack, b, keep);
1701 /* increment an lvalue pointer */
1702 static void incr_offset(int offset)
1704 int t = vtop->type.t;
1705 gaddrof(); /* remove VT_LVAL */
1706 vtop->type.t = VT_PTRDIFF_T; /* set scalar type */
1707 vpushs(offset);
1708 gen_op('+');
1709 vtop->r |= VT_LVAL;
1710 vtop->type.t = t;
1713 static void incr_bf_adr(int o)
1715 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1716 incr_offset(o);
1719 /* single-byte load mode for packed or otherwise unaligned bitfields */
1720 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1722 int n, o, bits;
1723 save_reg_upstack(vtop->r, 1);
1724 vpush64(type->t & VT_BTYPE, 0); // B X
1725 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1726 do {
1727 vswap(); // X B
1728 incr_bf_adr(o);
1729 vdup(); // X B B
1730 n = 8 - bit_pos;
1731 if (n > bit_size)
1732 n = bit_size;
1733 if (bit_pos)
1734 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1735 if (n < 8)
1736 vpushi((1 << n) - 1), gen_op('&');
1737 gen_cast(type);
1738 if (bits)
1739 vpushi(bits), gen_op(TOK_SHL);
1740 vrotb(3); // B Y X
1741 gen_op('|'); // B X
1742 bits += n, bit_size -= n, o = 1;
1743 } while (bit_size);
1744 vswap(), vpop();
1745 if (!(type->t & VT_UNSIGNED)) {
1746 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1747 vpushi(n), gen_op(TOK_SHL);
1748 vpushi(n), gen_op(TOK_SAR);
1752 /* single-byte store mode for packed or otherwise unaligned bitfields */
1753 static void store_packed_bf(int bit_pos, int bit_size)
1755 int bits, n, o, m, c;
1756 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1757 vswap(); // X B
1758 save_reg_upstack(vtop->r, 1);
1759 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1760 do {
1761 incr_bf_adr(o); // X B
1762 vswap(); //B X
1763 c ? vdup() : gv_dup(); // B V X
1764 vrott(3); // X B V
1765 if (bits)
1766 vpushi(bits), gen_op(TOK_SHR);
1767 if (bit_pos)
1768 vpushi(bit_pos), gen_op(TOK_SHL);
1769 n = 8 - bit_pos;
1770 if (n > bit_size)
1771 n = bit_size;
1772 if (n < 8) {
1773 m = ((1 << n) - 1) << bit_pos;
1774 vpushi(m), gen_op('&'); // X B V1
1775 vpushv(vtop-1); // X B V1 B
1776 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1777 gen_op('&'); // X B V1 B1
1778 gen_op('|'); // X B V2
1780 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1781 vstore(), vpop(); // X B
1782 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1783 } while (bit_size);
1784 vpop(), vpop();
1787 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1789 int t;
1790 if (0 == sv->type.ref)
1791 return 0;
1792 t = sv->type.ref->auxtype;
1793 if (t != -1 && t != VT_STRUCT) {
1794 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
1795 sv->r |= VT_LVAL;
1797 return t;
1800 /* store vtop a register belonging to class 'rc'. lvalues are
1801 converted to values. Cannot be used if cannot be converted to
1802 register value (such as structures). */
1803 ST_FUNC int gv(int rc)
1805 int r, r2, r_ok, r2_ok, rc2, bt;
1806 int bit_pos, bit_size, size, align;
1808 /* NOTE: get_reg can modify vstack[] */
1809 if (vtop->type.t & VT_BITFIELD) {
1810 CType type;
1812 bit_pos = BIT_POS(vtop->type.t);
1813 bit_size = BIT_SIZE(vtop->type.t);
1814 /* remove bit field info to avoid loops */
1815 vtop->type.t &= ~VT_STRUCT_MASK;
1817 type.ref = NULL;
1818 type.t = vtop->type.t & VT_UNSIGNED;
1819 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1820 type.t |= VT_UNSIGNED;
1822 r = adjust_bf(vtop, bit_pos, bit_size);
1824 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1825 type.t |= VT_LLONG;
1826 else
1827 type.t |= VT_INT;
1829 if (r == VT_STRUCT) {
1830 load_packed_bf(&type, bit_pos, bit_size);
1831 } else {
1832 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1833 /* cast to int to propagate signedness in following ops */
1834 gen_cast(&type);
1835 /* generate shifts */
1836 vpushi(bits - (bit_pos + bit_size));
1837 gen_op(TOK_SHL);
1838 vpushi(bits - bit_size);
1839 /* NOTE: transformed to SHR if unsigned */
1840 gen_op(TOK_SAR);
1842 r = gv(rc);
1843 } else {
1844 if (is_float(vtop->type.t) &&
1845 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1846 /* CPUs usually cannot use float constants, so we store them
1847 generically in data segment */
1848 init_params p = { rodata_section };
1849 unsigned long offset;
1850 size = type_size(&vtop->type, &align);
1851 if (NODATA_WANTED)
1852 size = 0, align = 1;
1853 offset = section_add(p.sec, size, align);
1854 vpush_ref(&vtop->type, p.sec, offset, size);
1855 vswap();
1856 init_putv(&p, &vtop->type, offset);
1857 vtop->r |= VT_LVAL;
1859 #ifdef CONFIG_TCC_BCHECK
1860 if (vtop->r & VT_MUSTBOUND)
1861 gbound();
1862 #endif
1864 bt = vtop->type.t & VT_BTYPE;
1866 #ifdef TCC_TARGET_RISCV64
1867 /* XXX mega hack */
1868 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1869 rc = RC_INT;
1870 #endif
1871 rc2 = RC2_TYPE(bt, rc);
1873 /* need to reload if:
1874 - constant
1875 - lvalue (need to dereference pointer)
1876 - already a register, but not in the right class */
1877 r = vtop->r & VT_VALMASK;
1878 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1879 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1881 if (!r_ok || !r2_ok) {
1883 if (!r_ok) {
1884 if (1 /* we can 'mov (r),r' in cases */
1885 && r < VT_CONST
1886 && (reg_classes[r] & rc)
1887 && !rc2
1889 save_reg_upstack(r, 1);
1890 else
1891 r = get_reg(rc);
1894 if (rc2) {
1895 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1896 int original_type = vtop->type.t;
1898 /* two register type load :
1899 expand to two words temporarily */
1900 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1901 /* load constant */
1902 unsigned long long ll = vtop->c.i;
1903 vtop->c.i = ll; /* first word */
1904 load(r, vtop);
1905 vtop->r = r; /* save register value */
1906 vpushi(ll >> 32); /* second word */
1907 } else if (vtop->r & VT_LVAL) {
1908 /* We do not want to modifier the long long pointer here.
1909 So we save any other instances down the stack */
1910 save_reg_upstack(vtop->r, 1);
1911 /* load from memory */
1912 vtop->type.t = load_type;
1913 load(r, vtop);
1914 vdup();
1915 vtop[-1].r = r; /* save register value */
1916 /* increment pointer to get second word */
1917 incr_offset(PTR_SIZE);
1918 } else {
1919 /* move registers */
1920 if (!r_ok)
1921 load(r, vtop);
1922 if (r2_ok && vtop->r2 < VT_CONST)
1923 goto done;
1924 vdup();
1925 vtop[-1].r = r; /* save register value */
1926 vtop->r = vtop[-1].r2;
1928 /* Allocate second register. Here we rely on the fact that
1929 get_reg() tries first to free r2 of an SValue. */
1930 r2 = get_reg(rc2);
1931 load(r2, vtop);
1932 vpop();
1933 /* write second register */
1934 vtop->r2 = r2;
1935 done:
1936 vtop->type.t = original_type;
1937 } else {
1938 if (vtop->r == VT_CMP)
1939 vset_VT_JMP();
1940 /* one register type load */
1941 load(r, vtop);
1944 vtop->r = r;
1945 #ifdef TCC_TARGET_C67
1946 /* uses register pairs for doubles */
1947 if (bt == VT_DOUBLE)
1948 vtop->r2 = r+1;
1949 #endif
1951 return r;
1954 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1955 ST_FUNC void gv2(int rc1, int rc2)
1957 /* generate more generic register first. But VT_JMP or VT_CMP
1958 values must be generated first in all cases to avoid possible
1959 reload errors */
1960 if (vtop->r != VT_CMP && rc1 <= rc2) {
1961 vswap();
1962 gv(rc1);
1963 vswap();
1964 gv(rc2);
1965 /* test if reload is needed for first register */
1966 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1967 vswap();
1968 gv(rc1);
1969 vswap();
1971 } else {
1972 gv(rc2);
1973 vswap();
1974 gv(rc1);
1975 vswap();
1976 /* test if reload is needed for first register */
1977 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1978 gv(rc2);
1983 #if PTR_SIZE == 4
1984 /* expand 64bit on stack in two ints */
1985 ST_FUNC void lexpand(void)
1987 int u, v;
1988 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1989 v = vtop->r & (VT_VALMASK | VT_LVAL);
1990 if (v == VT_CONST) {
1991 vdup();
1992 vtop[0].c.i >>= 32;
1993 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1994 vdup();
1995 vtop[0].c.i += 4;
1996 } else {
1997 gv(RC_INT);
1998 vdup();
1999 vtop[0].r = vtop[-1].r2;
2000 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2002 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2004 #endif
2006 #if PTR_SIZE == 4
2007 /* build a long long from two ints */
2008 static void lbuild(int t)
2010 gv2(RC_INT, RC_INT);
2011 vtop[-1].r2 = vtop[0].r;
2012 vtop[-1].type.t = t;
2013 vpop();
2015 #endif
2017 /* convert stack entry to register and duplicate its value in another
2018 register */
2019 static void gv_dup(void)
2021 int t, rc, r;
2023 t = vtop->type.t;
2024 #if PTR_SIZE == 4
2025 if ((t & VT_BTYPE) == VT_LLONG) {
2026 if (t & VT_BITFIELD) {
2027 gv(RC_INT);
2028 t = vtop->type.t;
2030 lexpand();
2031 gv_dup();
2032 vswap();
2033 vrotb(3);
2034 gv_dup();
2035 vrotb(4);
2036 /* stack: H L L1 H1 */
2037 lbuild(t);
2038 vrotb(3);
2039 vrotb(3);
2040 vswap();
2041 lbuild(t);
2042 vswap();
2043 return;
2045 #endif
2046 /* duplicate value */
2047 rc = RC_TYPE(t);
2048 gv(rc);
2049 r = get_reg(rc);
2050 vdup();
2051 load(r, vtop);
2052 vtop->r = r;
2055 #if PTR_SIZE == 4
2056 /* generate CPU independent (unsigned) long long operations */
2057 static void gen_opl(int op)
2059 int t, a, b, op1, c, i;
2060 int func;
2061 unsigned short reg_iret = REG_IRET;
2062 unsigned short reg_lret = REG_IRE2;
2063 SValue tmp;
2065 switch(op) {
2066 case '/':
2067 case TOK_PDIV:
2068 func = TOK___divdi3;
2069 goto gen_func;
2070 case TOK_UDIV:
2071 func = TOK___udivdi3;
2072 goto gen_func;
2073 case '%':
2074 func = TOK___moddi3;
2075 goto gen_mod_func;
2076 case TOK_UMOD:
2077 func = TOK___umoddi3;
2078 gen_mod_func:
2079 #ifdef TCC_ARM_EABI
2080 reg_iret = TREG_R2;
2081 reg_lret = TREG_R3;
2082 #endif
2083 gen_func:
2084 /* call generic long long function */
2085 vpush_helper_func(func);
2086 vrott(3);
2087 gfunc_call(2);
2088 vpushi(0);
2089 vtop->r = reg_iret;
2090 vtop->r2 = reg_lret;
2091 break;
2092 case '^':
2093 case '&':
2094 case '|':
2095 case '*':
2096 case '+':
2097 case '-':
2098 //pv("gen_opl A",0,2);
2099 t = vtop->type.t;
2100 vswap();
2101 lexpand();
2102 vrotb(3);
2103 lexpand();
2104 /* stack: L1 H1 L2 H2 */
2105 tmp = vtop[0];
2106 vtop[0] = vtop[-3];
2107 vtop[-3] = tmp;
2108 tmp = vtop[-2];
2109 vtop[-2] = vtop[-3];
2110 vtop[-3] = tmp;
2111 vswap();
2112 /* stack: H1 H2 L1 L2 */
2113 //pv("gen_opl B",0,4);
2114 if (op == '*') {
2115 vpushv(vtop - 1);
2116 vpushv(vtop - 1);
2117 gen_op(TOK_UMULL);
2118 lexpand();
2119 /* stack: H1 H2 L1 L2 ML MH */
2120 for(i=0;i<4;i++)
2121 vrotb(6);
2122 /* stack: ML MH H1 H2 L1 L2 */
2123 tmp = vtop[0];
2124 vtop[0] = vtop[-2];
2125 vtop[-2] = tmp;
2126 /* stack: ML MH H1 L2 H2 L1 */
2127 gen_op('*');
2128 vrotb(3);
2129 vrotb(3);
2130 gen_op('*');
2131 /* stack: ML MH M1 M2 */
2132 gen_op('+');
2133 gen_op('+');
2134 } else if (op == '+' || op == '-') {
2135 /* XXX: add non carry method too (for MIPS or alpha) */
2136 if (op == '+')
2137 op1 = TOK_ADDC1;
2138 else
2139 op1 = TOK_SUBC1;
2140 gen_op(op1);
2141 /* stack: H1 H2 (L1 op L2) */
2142 vrotb(3);
2143 vrotb(3);
2144 gen_op(op1 + 1); /* TOK_xxxC2 */
2145 } else {
2146 gen_op(op);
2147 /* stack: H1 H2 (L1 op L2) */
2148 vrotb(3);
2149 vrotb(3);
2150 /* stack: (L1 op L2) H1 H2 */
2151 gen_op(op);
2152 /* stack: (L1 op L2) (H1 op H2) */
2154 /* stack: L H */
2155 lbuild(t);
2156 break;
2157 case TOK_SAR:
2158 case TOK_SHR:
2159 case TOK_SHL:
2160 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2161 t = vtop[-1].type.t;
2162 vswap();
2163 lexpand();
2164 vrotb(3);
2165 /* stack: L H shift */
2166 c = (int)vtop->c.i;
2167 /* constant: simpler */
2168 /* NOTE: all comments are for SHL. the other cases are
2169 done by swapping words */
2170 vpop();
2171 if (op != TOK_SHL)
2172 vswap();
2173 if (c >= 32) {
2174 /* stack: L H */
2175 vpop();
2176 if (c > 32) {
2177 vpushi(c - 32);
2178 gen_op(op);
2180 if (op != TOK_SAR) {
2181 vpushi(0);
2182 } else {
2183 gv_dup();
2184 vpushi(31);
2185 gen_op(TOK_SAR);
2187 vswap();
2188 } else {
2189 vswap();
2190 gv_dup();
2191 /* stack: H L L */
2192 vpushi(c);
2193 gen_op(op);
2194 vswap();
2195 vpushi(32 - c);
2196 if (op == TOK_SHL)
2197 gen_op(TOK_SHR);
2198 else
2199 gen_op(TOK_SHL);
2200 vrotb(3);
2201 /* stack: L L H */
2202 vpushi(c);
2203 if (op == TOK_SHL)
2204 gen_op(TOK_SHL);
2205 else
2206 gen_op(TOK_SHR);
2207 gen_op('|');
2209 if (op != TOK_SHL)
2210 vswap();
2211 lbuild(t);
2212 } else {
2213 /* XXX: should provide a faster fallback on x86 ? */
2214 switch(op) {
2215 case TOK_SAR:
2216 func = TOK___ashrdi3;
2217 goto gen_func;
2218 case TOK_SHR:
2219 func = TOK___lshrdi3;
2220 goto gen_func;
2221 case TOK_SHL:
2222 func = TOK___ashldi3;
2223 goto gen_func;
2226 break;
2227 default:
2228 /* compare operations */
2229 t = vtop->type.t;
2230 vswap();
2231 lexpand();
2232 vrotb(3);
2233 lexpand();
2234 /* stack: L1 H1 L2 H2 */
2235 tmp = vtop[-1];
2236 vtop[-1] = vtop[-2];
2237 vtop[-2] = tmp;
2238 /* stack: L1 L2 H1 H2 */
2239 save_regs(4);
2240 /* compare high */
2241 op1 = op;
2242 /* when values are equal, we need to compare low words. since
2243 the jump is inverted, we invert the test too. */
2244 if (op1 == TOK_LT)
2245 op1 = TOK_LE;
2246 else if (op1 == TOK_GT)
2247 op1 = TOK_GE;
2248 else if (op1 == TOK_ULT)
2249 op1 = TOK_ULE;
2250 else if (op1 == TOK_UGT)
2251 op1 = TOK_UGE;
2252 a = 0;
2253 b = 0;
2254 gen_op(op1);
2255 if (op == TOK_NE) {
2256 b = gvtst(0, 0);
2257 } else {
2258 a = gvtst(1, 0);
2259 if (op != TOK_EQ) {
2260 /* generate non equal test */
2261 vpushi(0);
2262 vset_VT_CMP(TOK_NE);
2263 b = gvtst(0, 0);
2266 /* compare low. Always unsigned */
2267 op1 = op;
2268 if (op1 == TOK_LT)
2269 op1 = TOK_ULT;
2270 else if (op1 == TOK_LE)
2271 op1 = TOK_ULE;
2272 else if (op1 == TOK_GT)
2273 op1 = TOK_UGT;
2274 else if (op1 == TOK_GE)
2275 op1 = TOK_UGE;
2276 gen_op(op1);
2277 #if 0//def TCC_TARGET_I386
2278 if (op == TOK_NE) { gsym(b); break; }
2279 if (op == TOK_EQ) { gsym(a); break; }
2280 #endif
2281 gvtst_set(1, a);
2282 gvtst_set(0, b);
2283 break;
2286 #endif
2288 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2290 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2291 return (a ^ b) >> 63 ? -x : x;
2294 static int gen_opic_lt(uint64_t a, uint64_t b)
2296 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2299 /* handle integer constant optimizations and various machine
2300 independent opt */
2301 static void gen_opic(int op)
2303 SValue *v1 = vtop - 1;
2304 SValue *v2 = vtop;
2305 int t1 = v1->type.t & VT_BTYPE;
2306 int t2 = v2->type.t & VT_BTYPE;
2307 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2308 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2309 uint64_t l1 = c1 ? v1->c.i : 0;
2310 uint64_t l2 = c2 ? v2->c.i : 0;
2311 int shm = (t1 == VT_LLONG) ? 63 : 31;
2312 int r;
2314 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2315 l1 = ((uint32_t)l1 |
2316 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2317 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2318 l2 = ((uint32_t)l2 |
2319 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2321 if (c1 && c2) {
2322 switch(op) {
2323 case '+': l1 += l2; break;
2324 case '-': l1 -= l2; break;
2325 case '&': l1 &= l2; break;
2326 case '^': l1 ^= l2; break;
2327 case '|': l1 |= l2; break;
2328 case '*': l1 *= l2; break;
2330 case TOK_PDIV:
2331 case '/':
2332 case '%':
2333 case TOK_UDIV:
2334 case TOK_UMOD:
2335 /* if division by zero, generate explicit division */
2336 if (l2 == 0) {
2337 if (CONST_WANTED && !NOEVAL_WANTED)
2338 tcc_error("division by zero in constant");
2339 goto general_case;
2341 switch(op) {
2342 default: l1 = gen_opic_sdiv(l1, l2); break;
2343 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2344 case TOK_UDIV: l1 = l1 / l2; break;
2345 case TOK_UMOD: l1 = l1 % l2; break;
2347 break;
2348 case TOK_SHL: l1 <<= (l2 & shm); break;
2349 case TOK_SHR: l1 >>= (l2 & shm); break;
2350 case TOK_SAR:
2351 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2352 break;
2353 /* tests */
2354 case TOK_ULT: l1 = l1 < l2; break;
2355 case TOK_UGE: l1 = l1 >= l2; break;
2356 case TOK_EQ: l1 = l1 == l2; break;
2357 case TOK_NE: l1 = l1 != l2; break;
2358 case TOK_ULE: l1 = l1 <= l2; break;
2359 case TOK_UGT: l1 = l1 > l2; break;
2360 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2361 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2362 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2363 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2364 /* logical */
2365 case TOK_LAND: l1 = l1 && l2; break;
2366 case TOK_LOR: l1 = l1 || l2; break;
2367 default:
2368 goto general_case;
2370 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2371 l1 = ((uint32_t)l1 |
2372 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2373 v1->c.i = l1;
2374 v1->r |= v2->r & VT_NONCONST;
2375 vtop--;
2376 } else {
2377 /* if commutative ops, put c2 as constant */
2378 if (c1 && (op == '+' || op == '&' || op == '^' ||
2379 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2380 vswap();
2381 c2 = c1; //c = c1, c1 = c2, c2 = c;
2382 l2 = l1; //l = l1, l1 = l2, l2 = l;
2384 if (c1 && ((l1 == 0 &&
2385 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2386 (l1 == -1 && op == TOK_SAR))) {
2387 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2388 vpop();
2389 } else if (c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2390 (op == '|' &&
2391 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2392 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2393 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2394 if (l2 == 1)
2395 vtop->c.i = 0;
2396 vswap();
2397 vtop--;
2398 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2399 op == TOK_PDIV) &&
2400 l2 == 1) ||
2401 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2402 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2403 l2 == 0) ||
2404 (op == '&' &&
2405 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2406 /* filter out NOP operations like x*1, x-0, x&-1... */
2407 vtop--;
2408 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2409 /* try to use shifts instead of muls or divs */
2410 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2411 int n = -1;
2412 while (l2) {
2413 l2 >>= 1;
2414 n++;
2416 vtop->c.i = n;
2417 if (op == '*')
2418 op = TOK_SHL;
2419 else if (op == TOK_PDIV)
2420 op = TOK_SAR;
2421 else
2422 op = TOK_SHR;
2424 goto general_case;
2425 } else if (c2 && (op == '+' || op == '-') &&
2426 (r = vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM),
2427 r == (VT_CONST | VT_SYM) || r == VT_LOCAL)) {
2428 /* symbol + constant case */
2429 if (op == '-')
2430 l2 = -l2;
2431 l2 += vtop[-1].c.i;
2432 /* The backends can't always deal with addends to symbols
2433 larger than +-1<<31. Don't construct such. */
2434 if ((int)l2 != l2)
2435 goto general_case;
2436 vtop--;
2437 vtop->c.i = l2;
2438 } else {
2439 general_case:
2440 /* call low level op generator */
2441 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2442 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2443 gen_opl(op);
2444 else
2445 gen_opi(op);
2447 if (vtop->r == VT_CONST)
2448 vtop->r |= VT_NONCONST; /* is const, but only by optimization */
2452 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2453 # define gen_negf gen_opf
2454 #elif defined TCC_TARGET_ARM
2455 void gen_negf(int op)
2457 /* arm will detect 0-x and replace by vneg */
2458 vpushi(0), vswap(), gen_op('-');
2460 #else
2461 /* XXX: implement in gen_opf() for other backends too */
2462 void gen_negf(int op)
2464 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2465 subtract(-0, x), but with them it's really a sign flip
2466 operation. We implement this with bit manipulation and have
2467 to do some type reinterpretation for this, which TCC can do
2468 only via memory. */
2470 int align, size, bt;
2472 size = type_size(&vtop->type, &align);
2473 bt = vtop->type.t & VT_BTYPE;
2474 save_reg(gv(RC_TYPE(bt)));
2475 vdup();
2476 incr_bf_adr(size - 1);
2477 vdup();
2478 vpushi(0x80); /* flip sign */
2479 gen_op('^');
2480 vstore();
2481 vpop();
2483 #endif
2485 /* generate a floating point operation with constant propagation */
2486 static void gen_opif(int op)
2488 int c1, c2, i, bt;
2489 SValue *v1, *v2;
2490 #if defined _MSC_VER && defined __x86_64__
2491 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2492 volatile
2493 #endif
2494 long double f1, f2;
2496 v1 = vtop - 1;
2497 v2 = vtop;
2498 if (op == TOK_NEG)
2499 v1 = v2;
2500 bt = v1->type.t & VT_BTYPE;
2502 /* currently, we cannot do computations with forward symbols */
2503 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2504 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2505 if (c1 && c2) {
2506 if (bt == VT_FLOAT) {
2507 f1 = v1->c.f;
2508 f2 = v2->c.f;
2509 } else if (bt == VT_DOUBLE) {
2510 f1 = v1->c.d;
2511 f2 = v2->c.d;
2512 } else {
2513 f1 = v1->c.ld;
2514 f2 = v2->c.ld;
2516 /* NOTE: we only do constant propagation if finite number (not
2517 NaN or infinity) (ANSI spec) */
2518 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !CONST_WANTED)
2519 goto general_case;
2520 switch(op) {
2521 case '+': f1 += f2; break;
2522 case '-': f1 -= f2; break;
2523 case '*': f1 *= f2; break;
2524 case '/':
2525 if (f2 == 0.0) {
2526 union { float f; unsigned u; } x1, x2, y;
2527 /* If not in initializer we need to potentially generate
2528 FP exceptions at runtime, otherwise we want to fold. */
2529 if (!CONST_WANTED)
2530 goto general_case;
2531 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2532 when used to compile the f1 /= f2 below, would be -nan */
2533 x1.f = f1, x2.f = f2;
2534 if (f1 == 0.0)
2535 y.u = 0x7fc00000; /* nan */
2536 else
2537 y.u = 0x7f800000; /* infinity */
2538 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2539 f1 = y.f;
2540 break;
2542 f1 /= f2;
2543 break;
2544 case TOK_NEG:
2545 f1 = -f1;
2546 goto unary_result;
2547 case TOK_EQ:
2548 i = f1 == f2;
2549 make_int:
2550 vtop -= 2;
2551 vpushi(i);
2552 return;
2553 case TOK_NE:
2554 i = f1 != f2;
2555 goto make_int;
2556 case TOK_LT:
2557 i = f1 < f2;
2558 goto make_int;
2559 case TOK_GE:
2560 i = f1 >= f2;
2561 goto make_int;
2562 case TOK_LE:
2563 i = f1 <= f2;
2564 goto make_int;
2565 case TOK_GT:
2566 i = f1 > f2;
2567 goto make_int;
2568 default:
2569 goto general_case;
2571 vtop--;
2572 unary_result:
2573 /* XXX: overflow test ? */
2574 if (bt == VT_FLOAT) {
2575 v1->c.f = f1;
2576 } else if (bt == VT_DOUBLE) {
2577 v1->c.d = f1;
2578 } else {
2579 v1->c.ld = f1;
2581 } else {
2582 general_case:
2583 if (op == TOK_NEG) {
2584 gen_negf(op);
2585 } else {
2586 gen_opf(op);
2591 /* print a type. If 'varstr' is not NULL, then the variable is also
2592 printed in the type */
2593 /* XXX: union */
2594 /* XXX: add array and function pointers */
2595 static void type_to_str(char *buf, int buf_size,
2596 CType *type, const char *varstr)
2598 int bt, v, t;
2599 Sym *s, *sa;
2600 char buf1[256];
2601 const char *tstr;
2603 t = type->t;
2604 bt = t & VT_BTYPE;
2605 buf[0] = '\0';
2607 if (t & VT_EXTERN)
2608 pstrcat(buf, buf_size, "extern ");
2609 if (t & VT_STATIC)
2610 pstrcat(buf, buf_size, "static ");
2611 if (t & VT_TYPEDEF)
2612 pstrcat(buf, buf_size, "typedef ");
2613 if (t & VT_INLINE)
2614 pstrcat(buf, buf_size, "inline ");
2615 if (bt != VT_PTR) {
2616 if (t & VT_VOLATILE)
2617 pstrcat(buf, buf_size, "volatile ");
2618 if (t & VT_CONSTANT)
2619 pstrcat(buf, buf_size, "const ");
2621 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2622 || ((t & VT_UNSIGNED)
2623 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2624 && !IS_ENUM(t)
2626 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2628 buf_size -= strlen(buf);
2629 buf += strlen(buf);
2631 switch(bt) {
2632 case VT_VOID:
2633 tstr = "void";
2634 goto add_tstr;
2635 case VT_BOOL:
2636 tstr = "_Bool";
2637 goto add_tstr;
2638 case VT_BYTE:
2639 tstr = "char";
2640 goto add_tstr;
2641 case VT_SHORT:
2642 tstr = "short";
2643 goto add_tstr;
2644 case VT_INT:
2645 tstr = "int";
2646 goto maybe_long;
2647 case VT_LLONG:
2648 tstr = "long long";
2649 maybe_long:
2650 if (t & VT_LONG)
2651 tstr = "long";
2652 if (!IS_ENUM(t))
2653 goto add_tstr;
2654 tstr = "enum ";
2655 goto tstruct;
2656 case VT_FLOAT:
2657 tstr = "float";
2658 goto add_tstr;
2659 case VT_DOUBLE:
2660 tstr = "double";
2661 if (!(t & VT_LONG))
2662 goto add_tstr;
2663 case VT_LDOUBLE:
2664 tstr = "long double";
2665 add_tstr:
2666 pstrcat(buf, buf_size, tstr);
2667 break;
2668 case VT_STRUCT:
2669 tstr = "struct ";
2670 if (IS_UNION(t))
2671 tstr = "union ";
2672 tstruct:
2673 pstrcat(buf, buf_size, tstr);
2674 v = type->ref->v & ~SYM_STRUCT;
2675 if (v >= SYM_FIRST_ANOM)
2676 pstrcat(buf, buf_size, "<anonymous>");
2677 else
2678 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2679 break;
2680 case VT_FUNC:
2681 s = type->ref;
2682 buf1[0]=0;
2683 if (varstr && '*' == *varstr) {
2684 pstrcat(buf1, sizeof(buf1), "(");
2685 pstrcat(buf1, sizeof(buf1), varstr);
2686 pstrcat(buf1, sizeof(buf1), ")");
2688 pstrcat(buf1, buf_size, "(");
2689 sa = s->next;
2690 while (sa != NULL) {
2691 char buf2[256];
2692 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2693 pstrcat(buf1, sizeof(buf1), buf2);
2694 sa = sa->next;
2695 if (sa)
2696 pstrcat(buf1, sizeof(buf1), ", ");
2698 if (s->f.func_type == FUNC_ELLIPSIS)
2699 pstrcat(buf1, sizeof(buf1), ", ...");
2700 pstrcat(buf1, sizeof(buf1), ")");
2701 type_to_str(buf, buf_size, &s->type, buf1);
2702 goto no_var;
2703 case VT_PTR:
2704 s = type->ref;
2705 if (t & (VT_ARRAY|VT_VLA)) {
2706 if (varstr && '*' == *varstr)
2707 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2708 else
2709 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2710 type_to_str(buf, buf_size, &s->type, buf1);
2711 goto no_var;
2713 pstrcpy(buf1, sizeof(buf1), "*");
2714 if (t & VT_CONSTANT)
2715 pstrcat(buf1, buf_size, "const ");
2716 if (t & VT_VOLATILE)
2717 pstrcat(buf1, buf_size, "volatile ");
2718 if (varstr)
2719 pstrcat(buf1, sizeof(buf1), varstr);
2720 type_to_str(buf, buf_size, &s->type, buf1);
2721 goto no_var;
2723 if (varstr) {
2724 pstrcat(buf, buf_size, " ");
2725 pstrcat(buf, buf_size, varstr);
2727 no_var: ;
2730 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2732 char buf1[256], buf2[256];
2733 type_to_str(buf1, sizeof(buf1), st, NULL);
2734 type_to_str(buf2, sizeof(buf2), dt, NULL);
2735 tcc_error(fmt, buf1, buf2);
2738 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2740 char buf1[256], buf2[256];
2741 type_to_str(buf1, sizeof(buf1), st, NULL);
2742 type_to_str(buf2, sizeof(buf2), dt, NULL);
2743 tcc_warning(fmt, buf1, buf2);
2746 static int pointed_size(CType *type)
2748 int align;
2749 return type_size(pointed_type(type), &align);
2752 static inline int is_null_pointer(SValue *p)
2754 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM | VT_NONCONST)) != VT_CONST)
2755 return 0;
2756 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2757 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2758 ((p->type.t & VT_BTYPE) == VT_PTR &&
2759 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2760 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2761 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2765 /* compare function types. OLD functions match any new functions */
2766 static int is_compatible_func(CType *type1, CType *type2)
2768 Sym *s1, *s2;
2770 s1 = type1->ref;
2771 s2 = type2->ref;
2772 if (s1->f.func_call != s2->f.func_call)
2773 return 0;
2774 if (s1->f.func_type != s2->f.func_type
2775 && s1->f.func_type != FUNC_OLD
2776 && s2->f.func_type != FUNC_OLD)
2777 return 0;
2778 for (;;) {
2779 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2780 return 0;
2781 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
2782 return 1;
2783 s1 = s1->next;
2784 s2 = s2->next;
2785 if (!s1)
2786 return !s2;
2787 if (!s2)
2788 return 0;
2792 /* return true if type1 and type2 are the same. If unqualified is
2793 true, qualifiers on the types are ignored.
2795 static int compare_types(CType *type1, CType *type2, int unqualified)
2797 int bt1, t1, t2;
2799 t1 = type1->t & VT_TYPE;
2800 t2 = type2->t & VT_TYPE;
2801 if (unqualified) {
2802 /* strip qualifiers before comparing */
2803 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2804 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2807 /* Default Vs explicit signedness only matters for char */
2808 if ((t1 & VT_BTYPE) != VT_BYTE) {
2809 t1 &= ~VT_DEFSIGN;
2810 t2 &= ~VT_DEFSIGN;
2812 /* XXX: bitfields ? */
2813 if (t1 != t2)
2814 return 0;
2816 if ((t1 & VT_ARRAY)
2817 && !(type1->ref->c < 0
2818 || type2->ref->c < 0
2819 || type1->ref->c == type2->ref->c))
2820 return 0;
2822 /* test more complicated cases */
2823 bt1 = t1 & VT_BTYPE;
2824 if (bt1 == VT_PTR) {
2825 type1 = pointed_type(type1);
2826 type2 = pointed_type(type2);
2827 return is_compatible_types(type1, type2);
2828 } else if (bt1 == VT_STRUCT) {
2829 return (type1->ref == type2->ref);
2830 } else if (bt1 == VT_FUNC) {
2831 return is_compatible_func(type1, type2);
2832 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
2833 /* If both are enums then they must be the same, if only one is then
2834 t1 and t2 must be equal, which was checked above already. */
2835 return type1->ref == type2->ref;
2836 } else {
2837 return 1;
2841 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2842 type is stored in DEST if non-null (except for pointer plus/minus) . */
2843 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2845 CType *type1 = &op1->type, *type2 = &op2->type, type;
2846 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2847 int ret = 1;
2849 type.t = VT_VOID;
2850 type.ref = NULL;
2852 if (bt1 == VT_VOID || bt2 == VT_VOID) {
2853 ret = op == '?' ? 1 : 0;
2854 /* NOTE: as an extension, we accept void on only one side */
2855 type.t = VT_VOID;
2856 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2857 if (op == '+') ; /* Handled in caller */
2858 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2859 /* If one is a null ptr constant the result type is the other. */
2860 else if (is_null_pointer (op2)) type = *type1;
2861 else if (is_null_pointer (op1)) type = *type2;
2862 else if (bt1 != bt2) {
2863 /* accept comparison or cond-expr between pointer and integer
2864 with a warning */
2865 if ((op == '?' || TOK_ISCOND(op))
2866 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
2867 tcc_warning("pointer/integer mismatch in %s",
2868 op == '?' ? "conditional expression" : "comparison");
2869 else if (op != '-' || !is_integer_btype(bt2))
2870 ret = 0;
2871 type = *(bt1 == VT_PTR ? type1 : type2);
2872 } else {
2873 CType *pt1 = pointed_type(type1);
2874 CType *pt2 = pointed_type(type2);
2875 int pbt1 = pt1->t & VT_BTYPE;
2876 int pbt2 = pt2->t & VT_BTYPE;
2877 int newquals, copied = 0;
2878 if (pbt1 != VT_VOID && pbt2 != VT_VOID
2879 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
2880 if (op != '?' && !TOK_ISCOND(op))
2881 ret = 0;
2882 else
2883 type_incompatibility_warning(type1, type2,
2884 op == '?'
2885 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2886 : "pointer type mismatch in comparison('%s' and '%s')");
2888 if (op == '?') {
2889 /* pointers to void get preferred, otherwise the
2890 pointed to types minus qualifs should be compatible */
2891 type = *((pbt1 == VT_VOID) ? type1 : type2);
2892 /* combine qualifs */
2893 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
2894 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
2895 & newquals)
2897 /* copy the pointer target symbol */
2898 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2899 0, type.ref->c);
2900 copied = 1;
2901 pointed_type(&type)->t |= newquals;
2903 /* pointers to incomplete arrays get converted to
2904 pointers to completed ones if possible */
2905 if (pt1->t & VT_ARRAY
2906 && pt2->t & VT_ARRAY
2907 && pointed_type(&type)->ref->c < 0
2908 && (pt1->ref->c > 0 || pt2->ref->c > 0))
2910 if (!copied)
2911 type.ref = sym_push(SYM_FIELD, &type.ref->type,
2912 0, type.ref->c);
2913 pointed_type(&type)->ref =
2914 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
2915 0, pointed_type(&type)->ref->c);
2916 pointed_type(&type)->ref->c =
2917 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
2921 if (TOK_ISCOND(op))
2922 type.t = VT_SIZE_T;
2923 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2924 if (op != '?' || !compare_types(type1, type2, 1))
2925 ret = 0;
2926 type = *type1;
2927 } else if (is_float(bt1) || is_float(bt2)) {
2928 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2929 type.t = VT_LDOUBLE;
2930 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2931 type.t = VT_DOUBLE;
2932 } else {
2933 type.t = VT_FLOAT;
2935 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2936 /* cast to biggest op */
2937 type.t = VT_LLONG | VT_LONG;
2938 if (bt1 == VT_LLONG)
2939 type.t &= t1;
2940 if (bt2 == VT_LLONG)
2941 type.t &= t2;
2942 /* convert to unsigned if it does not fit in a long long */
2943 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2944 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2945 type.t |= VT_UNSIGNED;
2946 } else {
2947 /* integer operations */
2948 type.t = VT_INT | (VT_LONG & (t1 | t2));
2949 /* convert to unsigned if it does not fit in an integer */
2950 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2951 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2952 type.t |= VT_UNSIGNED;
2954 if (dest)
2955 *dest = type;
2956 return ret;
2959 /* generic gen_op: handles types problems */
2960 ST_FUNC void gen_op(int op)
2962 int t1, t2, bt1, bt2, t;
2963 CType type1, combtype;
2965 redo:
2966 t1 = vtop[-1].type.t;
2967 t2 = vtop[0].type.t;
2968 bt1 = t1 & VT_BTYPE;
2969 bt2 = t2 & VT_BTYPE;
2971 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2972 if (bt2 == VT_FUNC) {
2973 mk_pointer(&vtop->type);
2974 gaddrof();
2976 if (bt1 == VT_FUNC) {
2977 vswap();
2978 mk_pointer(&vtop->type);
2979 gaddrof();
2980 vswap();
2982 goto redo;
2983 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
2984 tcc_error("invalid operand types for binary operation");
2985 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2986 /* at least one operand is a pointer */
2987 /* relational op: must be both pointers */
2988 int align;
2989 if (TOK_ISCOND(op))
2990 goto std_op;
2991 /* if both pointers, then it must be the '-' op */
2992 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2993 if (op != '-')
2994 tcc_error("cannot use pointers here");
2995 vpush_type_size(pointed_type(&vtop[-1].type), &align);
2996 vtop->type.t &= ~VT_UNSIGNED;
2997 vrott(3);
2998 gen_opic(op);
2999 vtop->type.t = VT_PTRDIFF_T;
3000 vswap();
3001 gen_op(TOK_PDIV);
3002 } else {
3003 /* exactly one pointer : must be '+' or '-'. */
3004 if (op != '-' && op != '+')
3005 tcc_error("cannot use pointers here");
3006 /* Put pointer as first operand */
3007 if (bt2 == VT_PTR) {
3008 vswap();
3009 t = t1, t1 = t2, t2 = t;
3011 #if PTR_SIZE == 4
3012 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3013 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3014 gen_cast_s(VT_INT);
3015 #endif
3016 type1 = vtop[-1].type;
3017 vpush_type_size(pointed_type(&vtop[-1].type), &align);
3018 gen_op('*');
3019 #ifdef CONFIG_TCC_BCHECK
3020 if (tcc_state->do_bounds_check && !CONST_WANTED) {
3021 /* if bounded pointers, we generate a special code to
3022 test bounds */
3023 if (op == '-') {
3024 vpushi(0);
3025 vswap();
3026 gen_op('-');
3028 gen_bounded_ptr_add();
3029 } else
3030 #endif
3032 gen_opic(op);
3034 type1.t &= ~(VT_ARRAY|VT_VLA);
3035 /* put again type if gen_opic() swaped operands */
3036 vtop->type = type1;
3038 } else {
3039 /* floats can only be used for a few operations */
3040 if (is_float(combtype.t)
3041 && op != '+' && op != '-' && op != '*' && op != '/'
3042 && !TOK_ISCOND(op))
3043 tcc_error("invalid operands for binary operation");
3044 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3045 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3046 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3047 t |= VT_UNSIGNED;
3048 t |= (VT_LONG & t1);
3049 combtype.t = t;
3051 std_op:
3052 t = t2 = combtype.t;
3053 /* XXX: currently, some unsigned operations are explicit, so
3054 we modify them here */
3055 if (t & VT_UNSIGNED) {
3056 if (op == TOK_SAR)
3057 op = TOK_SHR;
3058 else if (op == '/')
3059 op = TOK_UDIV;
3060 else if (op == '%')
3061 op = TOK_UMOD;
3062 else if (op == TOK_LT)
3063 op = TOK_ULT;
3064 else if (op == TOK_GT)
3065 op = TOK_UGT;
3066 else if (op == TOK_LE)
3067 op = TOK_ULE;
3068 else if (op == TOK_GE)
3069 op = TOK_UGE;
3071 vswap();
3072 gen_cast_s(t);
3073 vswap();
3074 /* special case for shifts and long long: we keep the shift as
3075 an integer */
3076 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3077 t2 = VT_INT;
3078 gen_cast_s(t2);
3079 if (is_float(t))
3080 gen_opif(op);
3081 else
3082 gen_opic(op);
3083 if (TOK_ISCOND(op)) {
3084 /* relational op: the result is an int */
3085 vtop->type.t = VT_INT;
3086 } else {
3087 vtop->type.t = t;
3090 // Make sure that we have converted to an rvalue:
3091 if (vtop->r & VT_LVAL)
3092 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3095 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3096 #define gen_cvt_itof1 gen_cvt_itof
3097 #else
3098 /* generic itof for unsigned long long case */
3099 static void gen_cvt_itof1(int t)
3101 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3102 (VT_LLONG | VT_UNSIGNED)) {
3104 if (t == VT_FLOAT)
3105 vpush_helper_func(TOK___floatundisf);
3106 #if LDOUBLE_SIZE != 8
3107 else if (t == VT_LDOUBLE)
3108 vpush_helper_func(TOK___floatundixf);
3109 #endif
3110 else
3111 vpush_helper_func(TOK___floatundidf);
3112 vrott(2);
3113 gfunc_call(1);
3114 vpushi(0);
3115 PUT_R_RET(vtop, t);
3116 } else {
3117 gen_cvt_itof(t);
3120 #endif
3122 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3123 #define gen_cvt_ftoi1 gen_cvt_ftoi
3124 #else
3125 /* generic ftoi for unsigned long long case */
3126 static void gen_cvt_ftoi1(int t)
3128 int st;
3129 if (t == (VT_LLONG | VT_UNSIGNED)) {
3130 /* not handled natively */
3131 st = vtop->type.t & VT_BTYPE;
3132 if (st == VT_FLOAT)
3133 vpush_helper_func(TOK___fixunssfdi);
3134 #if LDOUBLE_SIZE != 8
3135 else if (st == VT_LDOUBLE)
3136 vpush_helper_func(TOK___fixunsxfdi);
3137 #endif
3138 else
3139 vpush_helper_func(TOK___fixunsdfdi);
3140 vrott(2);
3141 gfunc_call(1);
3142 vpushi(0);
3143 PUT_R_RET(vtop, t);
3144 } else {
3145 gen_cvt_ftoi(t);
3148 #endif
3150 /* special delayed cast for char/short */
3151 static void force_charshort_cast(void)
3153 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3154 int dbt = vtop->type.t;
3155 vtop->r &= ~VT_MUSTCAST;
3156 vtop->type.t = sbt;
3157 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3158 vtop->type.t = dbt;
3161 static void gen_cast_s(int t)
3163 CType type;
3164 type.t = t;
3165 type.ref = NULL;
3166 gen_cast(&type);
3169 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3170 static void gen_cast(CType *type)
3172 int sbt, dbt, sf, df, c;
3173 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3175 /* special delayed cast for char/short */
3176 if (vtop->r & VT_MUSTCAST)
3177 force_charshort_cast();
3179 /* bitfields first get cast to ints */
3180 if (vtop->type.t & VT_BITFIELD)
3181 gv(RC_INT);
3183 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3184 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3185 if (sbt == VT_FUNC)
3186 sbt = VT_PTR;
3188 again:
3189 if (sbt != dbt) {
3190 sf = is_float(sbt);
3191 df = is_float(dbt);
3192 dbt_bt = dbt & VT_BTYPE;
3193 sbt_bt = sbt & VT_BTYPE;
3194 if (dbt_bt == VT_VOID)
3195 goto done;
3196 if (sbt_bt == VT_VOID) {
3197 error:
3198 cast_error(&vtop->type, type);
3201 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3202 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3203 /* don't try to convert to ldouble when cross-compiling
3204 (except when it's '0' which is needed for arm:gen_negf()) */
3205 if (dbt_bt == VT_LDOUBLE && !nocode_wanted && (sf || vtop->c.i != 0))
3206 c = 0;
3207 #endif
3208 if (c) {
3209 /* constant case: we can do it now */
3210 /* XXX: in ISOC, cannot do it if error in convert */
3211 if (sbt == VT_FLOAT)
3212 vtop->c.ld = vtop->c.f;
3213 else if (sbt == VT_DOUBLE)
3214 vtop->c.ld = vtop->c.d;
3216 if (df) {
3217 if (sbt_bt == VT_LLONG) {
3218 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3219 vtop->c.ld = vtop->c.i;
3220 else
3221 vtop->c.ld = -(long double)-vtop->c.i;
3222 } else if(!sf) {
3223 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3224 vtop->c.ld = (uint32_t)vtop->c.i;
3225 else
3226 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3229 if (dbt == VT_FLOAT)
3230 vtop->c.f = (float)vtop->c.ld;
3231 else if (dbt == VT_DOUBLE)
3232 vtop->c.d = (double)vtop->c.ld;
3233 } else if (sf && dbt == VT_BOOL) {
3234 vtop->c.i = (vtop->c.ld != 0);
3235 } else {
3236 if(sf)
3237 vtop->c.i = vtop->c.ld;
3238 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3240 else if (sbt & VT_UNSIGNED)
3241 vtop->c.i = (uint32_t)vtop->c.i;
3242 else
3243 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3245 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3247 else if (dbt == VT_BOOL)
3248 vtop->c.i = (vtop->c.i != 0);
3249 else {
3250 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3251 dbt_bt == VT_SHORT ? 0xffff :
3252 0xffffffff;
3253 vtop->c.i &= m;
3254 if (!(dbt & VT_UNSIGNED))
3255 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3258 goto done;
3260 } else if (dbt == VT_BOOL
3261 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3262 == (VT_CONST | VT_SYM)) {
3263 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3264 vtop->r = VT_CONST;
3265 vtop->c.i = 1;
3266 goto done;
3269 /* cannot generate code for global or static initializers */
3270 if (nocode_wanted & DATA_ONLY_WANTED)
3271 goto done;
3273 /* non constant case: generate code */
3274 if (dbt == VT_BOOL) {
3275 gen_test_zero(TOK_NE);
3276 goto done;
3279 if (sf || df) {
3280 if (sf && df) {
3281 /* convert from fp to fp */
3282 gen_cvt_ftof(dbt);
3283 } else if (df) {
3284 /* convert int to fp */
3285 gen_cvt_itof1(dbt);
3286 } else {
3287 /* convert fp to int */
3288 sbt = dbt;
3289 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3290 sbt = VT_INT;
3291 gen_cvt_ftoi1(sbt);
3292 goto again; /* may need char/short cast */
3294 goto done;
3297 ds = btype_size(dbt_bt);
3298 ss = btype_size(sbt_bt);
3299 if (ds == 0 || ss == 0)
3300 goto error;
3302 if (IS_ENUM(type->t) && type->ref->c < 0)
3303 tcc_error("cast to incomplete type");
3305 /* same size and no sign conversion needed */
3306 if (ds == ss && ds >= 4)
3307 goto done;
3308 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3309 tcc_warning("cast between pointer and integer of different size");
3310 if (sbt_bt == VT_PTR) {
3311 /* put integer type to allow logical operations below */
3312 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3316 /* processor allows { int a = 0, b = *(char*)&a; }
3317 That means that if we cast to less width, we can just
3318 change the type and read it still later. */
3319 #define ALLOW_SUBTYPE_ACCESS 1
3321 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3322 /* value still in memory */
3323 if (ds <= ss)
3324 goto done;
3325 /* ss <= 4 here */
3326 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3327 gv(RC_INT);
3328 goto done; /* no 64bit envolved */
3331 gv(RC_INT);
3333 trunc = 0;
3334 #if PTR_SIZE == 4
3335 if (ds == 8) {
3336 /* generate high word */
3337 if (sbt & VT_UNSIGNED) {
3338 vpushi(0);
3339 gv(RC_INT);
3340 } else {
3341 gv_dup();
3342 vpushi(31);
3343 gen_op(TOK_SAR);
3345 lbuild(dbt);
3346 } else if (ss == 8) {
3347 /* from long long: just take low order word */
3348 lexpand();
3349 vpop();
3351 ss = 4;
3353 #elif PTR_SIZE == 8
3354 if (ds == 8) {
3355 /* need to convert from 32bit to 64bit */
3356 if (sbt & VT_UNSIGNED) {
3357 #if defined(TCC_TARGET_RISCV64)
3358 /* RISC-V keeps 32bit vals in registers sign-extended.
3359 So here we need a zero-extension. */
3360 trunc = 32;
3361 #else
3362 goto done;
3363 #endif
3364 } else {
3365 gen_cvt_sxtw();
3366 goto done;
3368 ss = ds, ds = 4, dbt = sbt;
3369 } else if (ss == 8) {
3370 /* RISC-V keeps 32bit vals in registers sign-extended.
3371 So here we need a sign-extension for signed types and
3372 zero-extension. for unsigned types. */
3373 #if !defined(TCC_TARGET_RISCV64)
3374 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3375 #endif
3376 } else {
3377 ss = 4;
3379 #endif
3381 if (ds >= ss)
3382 goto done;
3383 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3384 if (ss == 4) {
3385 gen_cvt_csti(dbt);
3386 goto done;
3388 #endif
3389 bits = (ss - ds) * 8;
3390 /* for unsigned, gen_op will convert SAR to SHR */
3391 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3392 vpushi(bits);
3393 gen_op(TOK_SHL);
3394 vpushi(bits - trunc);
3395 gen_op(TOK_SAR);
3396 vpushi(trunc);
3397 gen_op(TOK_SHR);
3399 done:
3400 vtop->type = *type;
3401 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3404 /* return type size as known at compile time. Put alignment at 'a' */
3405 ST_FUNC int type_size(CType *type, int *a)
3407 Sym *s;
3408 int bt;
3410 bt = type->t & VT_BTYPE;
3411 if (bt == VT_STRUCT) {
3412 /* struct/union */
3413 s = type->ref;
3414 *a = s->r;
3415 return s->c;
3416 } else if (bt == VT_PTR) {
3417 if (type->t & VT_ARRAY) {
3418 int ts;
3420 s = type->ref;
3421 ts = type_size(&s->type, a);
3423 if (ts < 0 && s->c < 0)
3424 ts = -ts;
3426 return ts * s->c;
3427 } else {
3428 *a = PTR_SIZE;
3429 return PTR_SIZE;
3431 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3432 *a = 0;
3433 return -1; /* incomplete enum */
3434 } else if (bt == VT_LDOUBLE) {
3435 *a = LDOUBLE_ALIGN;
3436 return LDOUBLE_SIZE;
3437 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3438 #ifdef TCC_TARGET_I386
3439 #ifdef TCC_TARGET_PE
3440 *a = 8;
3441 #else
3442 *a = 4;
3443 #endif
3444 #elif defined(TCC_TARGET_ARM)
3445 #ifdef TCC_ARM_EABI
3446 *a = 8;
3447 #else
3448 *a = 4;
3449 #endif
3450 #else
3451 *a = 8;
3452 #endif
3453 return 8;
3454 } else if (bt == VT_INT || bt == VT_FLOAT) {
3455 *a = 4;
3456 return 4;
3457 } else if (bt == VT_SHORT) {
3458 *a = 2;
3459 return 2;
3460 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3461 *a = 8;
3462 return 16;
3463 } else {
3464 /* char, void, function, _Bool */
3465 *a = 1;
3466 return 1;
3470 /* push type size as known at runtime time on top of value stack. Put
3471 alignment at 'a' */
3472 static void vpush_type_size(CType *type, int *a)
3474 if (type->t & VT_VLA) {
3475 type_size(&type->ref->type, a);
3476 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3477 } else {
3478 int size = type_size(type, a);
3479 if (size < 0)
3480 tcc_error("unknown type size");
3481 vpushs(size);
3485 /* return the pointed type of t */
3486 static inline CType *pointed_type(CType *type)
3488 return &type->ref->type;
3491 /* modify type so that its it is a pointer to type. */
3492 ST_FUNC void mk_pointer(CType *type)
3494 Sym *s;
3495 s = sym_push(SYM_FIELD, type, 0, -1);
3496 type->t = VT_PTR | (type->t & VT_STORAGE);
3497 type->ref = s;
3500 /* return true if type1 and type2 are exactly the same (including
3501 qualifiers).
3503 static int is_compatible_types(CType *type1, CType *type2)
3505 return compare_types(type1,type2,0);
3508 /* return true if type1 and type2 are the same (ignoring qualifiers).
3510 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3512 return compare_types(type1,type2,1);
3515 static void cast_error(CType *st, CType *dt)
3517 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3520 /* verify type compatibility to store vtop in 'dt' type */
3521 static void verify_assign_cast(CType *dt)
3523 CType *st, *type1, *type2;
3524 int dbt, sbt, qualwarn, lvl;
3526 st = &vtop->type; /* source type */
3527 dbt = dt->t & VT_BTYPE;
3528 sbt = st->t & VT_BTYPE;
3529 if (dt->t & VT_CONSTANT)
3530 tcc_warning("assignment of read-only location");
3531 switch(dbt) {
3532 case VT_VOID:
3533 if (sbt != dbt)
3534 tcc_error("assignment to void expression");
3535 break;
3536 case VT_PTR:
3537 /* special cases for pointers */
3538 /* '0' can also be a pointer */
3539 if (is_null_pointer(vtop))
3540 break;
3541 /* accept implicit pointer to integer cast with warning */
3542 if (is_integer_btype(sbt)) {
3543 tcc_warning("assignment makes pointer from integer without a cast");
3544 break;
3546 type1 = pointed_type(dt);
3547 if (sbt == VT_PTR)
3548 type2 = pointed_type(st);
3549 else if (sbt == VT_FUNC)
3550 type2 = st; /* a function is implicitly a function pointer */
3551 else
3552 goto error;
3553 if (is_compatible_types(type1, type2))
3554 break;
3555 for (qualwarn = lvl = 0;; ++lvl) {
3556 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3557 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3558 qualwarn = 1;
3559 dbt = type1->t & (VT_BTYPE|VT_LONG);
3560 sbt = type2->t & (VT_BTYPE|VT_LONG);
3561 if (dbt != VT_PTR || sbt != VT_PTR)
3562 break;
3563 type1 = pointed_type(type1);
3564 type2 = pointed_type(type2);
3566 if (!is_compatible_unqualified_types(type1, type2)) {
3567 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3568 /* void * can match anything */
3569 } else if (dbt == sbt
3570 && is_integer_btype(sbt & VT_BTYPE)
3571 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3572 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3573 /* Like GCC don't warn by default for merely changes
3574 in pointer target signedness. Do warn for different
3575 base types, though, in particular for unsigned enums
3576 and signed int targets. */
3577 } else {
3578 tcc_warning("assignment from incompatible pointer type");
3579 break;
3582 if (qualwarn)
3583 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
3584 break;
3585 case VT_BYTE:
3586 case VT_SHORT:
3587 case VT_INT:
3588 case VT_LLONG:
3589 if (sbt == VT_PTR || sbt == VT_FUNC) {
3590 tcc_warning("assignment makes integer from pointer without a cast");
3591 } else if (sbt == VT_STRUCT) {
3592 goto case_VT_STRUCT;
3594 /* XXX: more tests */
3595 break;
3596 case VT_STRUCT:
3597 case_VT_STRUCT:
3598 if (!is_compatible_unqualified_types(dt, st)) {
3599 error:
3600 cast_error(st, dt);
3602 break;
3606 static void gen_assign_cast(CType *dt)
3608 verify_assign_cast(dt);
3609 gen_cast(dt);
3612 /* store vtop in lvalue pushed on stack */
3613 ST_FUNC void vstore(void)
3615 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3617 ft = vtop[-1].type.t;
3618 sbt = vtop->type.t & VT_BTYPE;
3619 dbt = ft & VT_BTYPE;
3620 verify_assign_cast(&vtop[-1].type);
3622 if (sbt == VT_STRUCT) {
3623 /* if structure, only generate pointer */
3624 /* structure assignment : generate memcpy */
3625 size = type_size(&vtop->type, &align);
3626 /* destination, keep on stack() as result */
3627 vpushv(vtop - 1);
3628 #ifdef CONFIG_TCC_BCHECK
3629 if (vtop->r & VT_MUSTBOUND)
3630 gbound(); /* check would be wrong after gaddrof() */
3631 #endif
3632 vtop->type.t = VT_PTR;
3633 gaddrof();
3634 /* source */
3635 vswap();
3636 #ifdef CONFIG_TCC_BCHECK
3637 if (vtop->r & VT_MUSTBOUND)
3638 gbound();
3639 #endif
3640 vtop->type.t = VT_PTR;
3641 gaddrof();
3643 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3644 if (1
3645 #ifdef CONFIG_TCC_BCHECK
3646 && !tcc_state->do_bounds_check
3647 #endif
3649 gen_struct_copy(size);
3650 } else
3651 #endif
3653 /* type size */
3654 vpushi(size);
3655 /* Use memmove, rather than memcpy, as dest and src may be same: */
3656 #ifdef TCC_ARM_EABI
3657 if(!(align & 7))
3658 vpush_helper_func(TOK_memmove8);
3659 else if(!(align & 3))
3660 vpush_helper_func(TOK_memmove4);
3661 else
3662 #endif
3663 vpush_helper_func(TOK_memmove);
3664 vrott(4);
3665 gfunc_call(3);
3668 } else if (ft & VT_BITFIELD) {
3669 /* bitfield store handling */
3671 /* save lvalue as expression result (example: s.b = s.a = n;) */
3672 vdup(), vtop[-1] = vtop[-2];
3674 bit_pos = BIT_POS(ft);
3675 bit_size = BIT_SIZE(ft);
3676 /* remove bit field info to avoid loops */
3677 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3679 if (dbt == VT_BOOL) {
3680 gen_cast(&vtop[-1].type);
3681 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3683 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3684 if (dbt != VT_BOOL) {
3685 gen_cast(&vtop[-1].type);
3686 dbt = vtop[-1].type.t & VT_BTYPE;
3688 if (r == VT_STRUCT) {
3689 store_packed_bf(bit_pos, bit_size);
3690 } else {
3691 unsigned long long mask = (1ULL << bit_size) - 1;
3692 if (dbt != VT_BOOL) {
3693 /* mask source */
3694 if (dbt == VT_LLONG)
3695 vpushll(mask);
3696 else
3697 vpushi((unsigned)mask);
3698 gen_op('&');
3700 /* shift source */
3701 vpushi(bit_pos);
3702 gen_op(TOK_SHL);
3703 vswap();
3704 /* duplicate destination */
3705 vdup();
3706 vrott(3);
3707 /* load destination, mask and or with source */
3708 if (dbt == VT_LLONG)
3709 vpushll(~(mask << bit_pos));
3710 else
3711 vpushi(~((unsigned)mask << bit_pos));
3712 gen_op('&');
3713 gen_op('|');
3714 /* store result */
3715 vstore();
3716 /* ... and discard */
3717 vpop();
3719 } else if (dbt == VT_VOID) {
3720 --vtop;
3721 } else {
3722 /* optimize char/short casts */
3723 delayed_cast = 0;
3724 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3725 && is_integer_btype(sbt)
3727 if ((vtop->r & VT_MUSTCAST)
3728 && btype_size(dbt) > btype_size(sbt)
3730 force_charshort_cast();
3731 delayed_cast = 1;
3732 } else {
3733 gen_cast(&vtop[-1].type);
3736 #ifdef CONFIG_TCC_BCHECK
3737 /* bound check case */
3738 if (vtop[-1].r & VT_MUSTBOUND) {
3739 vswap();
3740 gbound();
3741 vswap();
3743 #endif
3744 gv(RC_TYPE(dbt)); /* generate value */
3746 if (delayed_cast) {
3747 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3748 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3749 vtop->type.t = ft & VT_TYPE;
3752 /* if lvalue was saved on stack, must read it */
3753 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3754 SValue sv;
3755 r = get_reg(RC_INT);
3756 sv.type.t = VT_PTRDIFF_T;
3757 sv.r = VT_LOCAL | VT_LVAL;
3758 sv.c.i = vtop[-1].c.i;
3759 load(r, &sv);
3760 vtop[-1].r = r | VT_LVAL;
3763 r = vtop->r & VT_VALMASK;
3764 /* two word case handling :
3765 store second register at word + 4 (or +8 for x86-64) */
3766 if (USING_TWO_WORDS(dbt)) {
3767 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3768 vtop[-1].type.t = load_type;
3769 store(r, vtop - 1);
3770 vswap();
3771 incr_offset(PTR_SIZE);
3772 vswap();
3773 /* XXX: it works because r2 is spilled last ! */
3774 store(vtop->r2, vtop - 1);
3775 } else {
3776 /* single word */
3777 store(r, vtop - 1);
3779 vswap();
3780 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3784 /* post defines POST/PRE add. c is the token ++ or -- */
3785 ST_FUNC void inc(int post, int c)
3787 test_lvalue();
3788 vdup(); /* save lvalue */
3789 if (post) {
3790 gv_dup(); /* duplicate value */
3791 vrotb(3);
3792 vrotb(3);
3794 /* add constant */
3795 vpushi(c - TOK_MID);
3796 gen_op('+');
3797 vstore(); /* store value */
3798 if (post)
3799 vpop(); /* if post op, return saved value */
3802 ST_FUNC CString* parse_mult_str (const char *msg)
3804 /* read the string */
3805 if (tok != TOK_STR)
3806 expect(msg);
3807 cstr_reset(&initstr);
3808 while (tok == TOK_STR) {
3809 /* XXX: add \0 handling too ? */
3810 cstr_cat(&initstr, tokc.str.data, -1);
3811 next();
3813 cstr_ccat(&initstr, '\0');
3814 return &initstr;
3817 /* If I is >= 1 and a power of two, returns log2(i)+1.
3818 If I is 0 returns 0. */
3819 ST_FUNC int exact_log2p1(int i)
3821 int ret;
3822 if (!i)
3823 return 0;
3824 for (ret = 1; i >= 1 << 8; ret += 8)
3825 i >>= 8;
3826 if (i >= 1 << 4)
3827 ret += 4, i >>= 4;
3828 if (i >= 1 << 2)
3829 ret += 2, i >>= 2;
3830 if (i >= 1 << 1)
3831 ret++;
3832 return ret;
3835 /* Parse __attribute__((...)) GNUC extension. */
3836 static void parse_attribute(AttributeDef *ad)
3838 int t, n;
3839 char *astr;
3841 redo:
3842 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3843 return;
3844 next();
3845 skip('(');
3846 skip('(');
3847 while (tok != ')') {
3848 if (tok < TOK_IDENT)
3849 expect("attribute name");
3850 t = tok;
3851 next();
3852 switch(t) {
3853 case TOK_CLEANUP1:
3854 case TOK_CLEANUP2:
3856 Sym *s;
3858 skip('(');
3859 s = sym_find(tok);
3860 if (!s) {
3861 tcc_warning_c(warn_implicit_function_declaration)(
3862 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
3863 s = external_global_sym(tok, &func_old_type);
3864 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
3865 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
3866 ad->cleanup_func = s;
3867 next();
3868 skip(')');
3869 break;
3871 case TOK_CONSTRUCTOR1:
3872 case TOK_CONSTRUCTOR2:
3873 ad->f.func_ctor = 1;
3874 break;
3875 case TOK_DESTRUCTOR1:
3876 case TOK_DESTRUCTOR2:
3877 ad->f.func_dtor = 1;
3878 break;
3879 case TOK_ALWAYS_INLINE1:
3880 case TOK_ALWAYS_INLINE2:
3881 ad->f.func_alwinl = 1;
3882 break;
3883 case TOK_SECTION1:
3884 case TOK_SECTION2:
3885 skip('(');
3886 astr = parse_mult_str("section name")->data;
3887 ad->section = find_section(tcc_state, astr);
3888 skip(')');
3889 break;
3890 case TOK_ALIAS1:
3891 case TOK_ALIAS2:
3892 skip('(');
3893 astr = parse_mult_str("alias(\"target\")")->data;
3894 /* save string as token, for later */
3895 ad->alias_target = tok_alloc_const(astr);
3896 skip(')');
3897 break;
3898 case TOK_VISIBILITY1:
3899 case TOK_VISIBILITY2:
3900 skip('(');
3901 astr = parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data;
3902 if (!strcmp (astr, "default"))
3903 ad->a.visibility = STV_DEFAULT;
3904 else if (!strcmp (astr, "hidden"))
3905 ad->a.visibility = STV_HIDDEN;
3906 else if (!strcmp (astr, "internal"))
3907 ad->a.visibility = STV_INTERNAL;
3908 else if (!strcmp (astr, "protected"))
3909 ad->a.visibility = STV_PROTECTED;
3910 else
3911 expect("visibility(\"default|hidden|internal|protected\")");
3912 skip(')');
3913 break;
3914 case TOK_ALIGNED1:
3915 case TOK_ALIGNED2:
3916 if (tok == '(') {
3917 next();
3918 n = expr_const();
3919 if (n <= 0 || (n & (n - 1)) != 0)
3920 tcc_error("alignment must be a positive power of two");
3921 skip(')');
3922 } else {
3923 n = MAX_ALIGN;
3925 ad->a.aligned = exact_log2p1(n);
3926 if (n != 1 << (ad->a.aligned - 1))
3927 tcc_error("alignment of %d is larger than implemented", n);
3928 break;
3929 case TOK_PACKED1:
3930 case TOK_PACKED2:
3931 ad->a.packed = 1;
3932 break;
3933 case TOK_WEAK1:
3934 case TOK_WEAK2:
3935 ad->a.weak = 1;
3936 break;
3937 case TOK_NODEBUG1:
3938 case TOK_NODEBUG2:
3939 ad->a.nodebug = 1;
3940 break;
3941 case TOK_UNUSED1:
3942 case TOK_UNUSED2:
3943 /* currently, no need to handle it because tcc does not
3944 track unused objects */
3945 break;
3946 case TOK_NORETURN1:
3947 case TOK_NORETURN2:
3948 ad->f.func_noreturn = 1;
3949 break;
3950 case TOK_CDECL1:
3951 case TOK_CDECL2:
3952 case TOK_CDECL3:
3953 ad->f.func_call = FUNC_CDECL;
3954 break;
3955 case TOK_STDCALL1:
3956 case TOK_STDCALL2:
3957 case TOK_STDCALL3:
3958 ad->f.func_call = FUNC_STDCALL;
3959 break;
3960 #ifdef TCC_TARGET_I386
3961 case TOK_REGPARM1:
3962 case TOK_REGPARM2:
3963 skip('(');
3964 n = expr_const();
3965 if (n > 3)
3966 n = 3;
3967 else if (n < 0)
3968 n = 0;
3969 if (n > 0)
3970 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3971 skip(')');
3972 break;
3973 case TOK_FASTCALL1:
3974 case TOK_FASTCALL2:
3975 case TOK_FASTCALL3:
3976 ad->f.func_call = FUNC_FASTCALLW;
3977 break;
3978 #endif
3979 case TOK_MODE:
3980 skip('(');
3981 switch(tok) {
3982 case TOK_MODE_DI:
3983 ad->attr_mode = VT_LLONG + 1;
3984 break;
3985 case TOK_MODE_QI:
3986 ad->attr_mode = VT_BYTE + 1;
3987 break;
3988 case TOK_MODE_HI:
3989 ad->attr_mode = VT_SHORT + 1;
3990 break;
3991 case TOK_MODE_SI:
3992 case TOK_MODE_word:
3993 ad->attr_mode = VT_INT + 1;
3994 break;
3995 default:
3996 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3997 break;
3999 next();
4000 skip(')');
4001 break;
4002 case TOK_DLLEXPORT:
4003 ad->a.dllexport = 1;
4004 break;
4005 case TOK_NODECORATE:
4006 ad->a.nodecorate = 1;
4007 break;
4008 case TOK_DLLIMPORT:
4009 ad->a.dllimport = 1;
4010 break;
4011 default:
4012 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4013 /* skip parameters */
4014 if (tok == '(') {
4015 int parenthesis = 0;
4016 do {
4017 if (tok == '(')
4018 parenthesis++;
4019 else if (tok == ')')
4020 parenthesis--;
4021 next();
4022 } while (parenthesis && tok != -1);
4024 break;
4026 if (tok != ',')
4027 break;
4028 next();
4030 skip(')');
4031 skip(')');
4032 goto redo;
4035 static Sym * find_field (CType *type, int v, int *cumofs)
4037 Sym *s = type->ref;
4038 int v1 = v | SYM_FIELD;
4039 if (!(v & SYM_FIELD)) { /* top-level call */
4040 if ((type->t & VT_BTYPE) != VT_STRUCT)
4041 expect("struct or union");
4042 if (v < TOK_UIDENT)
4043 expect("field name");
4044 if (s->c < 0)
4045 tcc_error("dereferencing incomplete type '%s'",
4046 get_tok_str(s->v & ~SYM_STRUCT, 0));
4048 while ((s = s->next) != NULL) {
4049 if (s->v == v1) {
4050 *cumofs = s->c;
4051 return s;
4053 if ((s->type.t & VT_BTYPE) == VT_STRUCT
4054 && s->v >= (SYM_FIRST_ANOM | SYM_FIELD)) {
4055 /* try to find field in anonymous sub-struct/union */
4056 Sym *ret = find_field (&s->type, v1, cumofs);
4057 if (ret) {
4058 *cumofs += s->c;
4059 return ret;
4063 if (!(v & SYM_FIELD))
4064 tcc_error("field not found: %s", get_tok_str(v, NULL));
4065 return s;
4068 static void check_fields (CType *type, int check)
4070 Sym *s = type->ref;
4072 while ((s = s->next) != NULL) {
4073 int v = s->v & ~SYM_FIELD;
4074 if (v < SYM_FIRST_ANOM) {
4075 TokenSym *ts = table_ident[v - TOK_IDENT];
4076 if (check && (ts->tok & SYM_FIELD))
4077 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4078 ts->tok ^= SYM_FIELD;
4079 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4080 check_fields (&s->type, check);
4084 static void struct_layout(CType *type, AttributeDef *ad)
4086 int size, align, maxalign, offset, c, bit_pos, bit_size;
4087 int packed, a, bt, prevbt, prev_bit_size;
4088 int pcc = !tcc_state->ms_bitfields;
4089 int pragma_pack = *tcc_state->pack_stack_ptr;
4090 Sym *f;
4092 maxalign = 1;
4093 offset = 0;
4094 c = 0;
4095 bit_pos = 0;
4096 prevbt = VT_STRUCT; /* make it never match */
4097 prev_bit_size = 0;
4099 //#define BF_DEBUG
4101 for (f = type->ref->next; f; f = f->next) {
4102 if (f->type.t & VT_BITFIELD)
4103 bit_size = BIT_SIZE(f->type.t);
4104 else
4105 bit_size = -1;
4106 size = type_size(&f->type, &align);
4107 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4108 packed = 0;
4110 if (pcc && bit_size == 0) {
4111 /* in pcc mode, packing does not affect zero-width bitfields */
4113 } else {
4114 /* in pcc mode, attribute packed overrides if set. */
4115 if (pcc && (f->a.packed || ad->a.packed))
4116 align = packed = 1;
4118 /* pragma pack overrides align if lesser and packs bitfields always */
4119 if (pragma_pack) {
4120 packed = 1;
4121 if (pragma_pack < align)
4122 align = pragma_pack;
4123 /* in pcc mode pragma pack also overrides individual align */
4124 if (pcc && pragma_pack < a)
4125 a = 0;
4128 /* some individual align was specified */
4129 if (a)
4130 align = a;
4132 if (type->ref->type.t == VT_UNION) {
4133 if (pcc && bit_size >= 0)
4134 size = (bit_size + 7) >> 3;
4135 offset = 0;
4136 if (size > c)
4137 c = size;
4139 } else if (bit_size < 0) {
4140 if (pcc)
4141 c += (bit_pos + 7) >> 3;
4142 c = (c + align - 1) & -align;
4143 offset = c;
4144 if (size > 0)
4145 c += size;
4146 bit_pos = 0;
4147 prevbt = VT_STRUCT;
4148 prev_bit_size = 0;
4150 } else {
4151 /* A bit-field. Layout is more complicated. There are two
4152 options: PCC (GCC) compatible and MS compatible */
4153 if (pcc) {
4154 /* In PCC layout a bit-field is placed adjacent to the
4155 preceding bit-fields, except if:
4156 - it has zero-width
4157 - an individual alignment was given
4158 - it would overflow its base type container and
4159 there is no packing */
4160 if (bit_size == 0) {
4161 new_field:
4162 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4163 bit_pos = 0;
4164 } else if (f->a.aligned) {
4165 goto new_field;
4166 } else if (!packed) {
4167 int a8 = align * 8;
4168 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4169 if (ofs > size / align)
4170 goto new_field;
4173 /* in pcc mode, long long bitfields have type int if they fit */
4174 if (size == 8 && bit_size <= 32)
4175 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4177 while (bit_pos >= align * 8)
4178 c += align, bit_pos -= align * 8;
4179 offset = c;
4181 /* In PCC layout named bit-fields influence the alignment
4182 of the containing struct using the base types alignment,
4183 except for packed fields (which here have correct align). */
4184 if (f->v & SYM_FIRST_ANOM
4185 // && bit_size // ??? gcc on ARM/rpi does that
4187 align = 1;
4189 } else {
4190 bt = f->type.t & VT_BTYPE;
4191 if ((bit_pos + bit_size > size * 8)
4192 || (bit_size > 0) == (bt != prevbt)
4194 c = (c + align - 1) & -align;
4195 offset = c;
4196 bit_pos = 0;
4197 /* In MS bitfield mode a bit-field run always uses
4198 at least as many bits as the underlying type.
4199 To start a new run it's also required that this
4200 or the last bit-field had non-zero width. */
4201 if (bit_size || prev_bit_size)
4202 c += size;
4204 /* In MS layout the records alignment is normally
4205 influenced by the field, except for a zero-width
4206 field at the start of a run (but by further zero-width
4207 fields it is again). */
4208 if (bit_size == 0 && prevbt != bt)
4209 align = 1;
4210 prevbt = bt;
4211 prev_bit_size = bit_size;
4214 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4215 | (bit_pos << VT_STRUCT_SHIFT);
4216 bit_pos += bit_size;
4218 if (align > maxalign)
4219 maxalign = align;
4221 #ifdef BF_DEBUG
4222 printf("set field %s offset %-2d size %-2d align %-2d",
4223 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4224 if (f->type.t & VT_BITFIELD) {
4225 printf(" pos %-2d bits %-2d",
4226 BIT_POS(f->type.t),
4227 BIT_SIZE(f->type.t)
4230 printf("\n");
4231 #endif
4233 f->c = offset;
4234 f->r = 0;
4237 if (pcc)
4238 c += (bit_pos + 7) >> 3;
4240 /* store size and alignment */
4241 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4242 if (a < maxalign)
4243 a = maxalign;
4244 type->ref->r = a;
4245 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4246 /* can happen if individual align for some member was given. In
4247 this case MSVC ignores maxalign when aligning the size */
4248 a = pragma_pack;
4249 if (a < bt)
4250 a = bt;
4252 c = (c + a - 1) & -a;
4253 type->ref->c = c;
4255 #ifdef BF_DEBUG
4256 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4257 #endif
4259 /* check whether we can access bitfields by their type */
4260 for (f = type->ref->next; f; f = f->next) {
4261 int s, px, cx, c0;
4262 CType t;
4264 if (0 == (f->type.t & VT_BITFIELD))
4265 continue;
4266 f->type.ref = f;
4267 f->auxtype = -1;
4268 bit_size = BIT_SIZE(f->type.t);
4269 if (bit_size == 0)
4270 continue;
4271 bit_pos = BIT_POS(f->type.t);
4272 size = type_size(&f->type, &align);
4274 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4275 #ifdef TCC_TARGET_ARM
4276 && !(f->c & (align - 1))
4277 #endif
4279 continue;
4281 /* try to access the field using a different type */
4282 c0 = -1, s = align = 1;
4283 t.t = VT_BYTE;
4284 for (;;) {
4285 px = f->c * 8 + bit_pos;
4286 cx = (px >> 3) & -align;
4287 px = px - (cx << 3);
4288 if (c0 == cx)
4289 break;
4290 s = (px + bit_size + 7) >> 3;
4291 if (s > 4) {
4292 t.t = VT_LLONG;
4293 } else if (s > 2) {
4294 t.t = VT_INT;
4295 } else if (s > 1) {
4296 t.t = VT_SHORT;
4297 } else {
4298 t.t = VT_BYTE;
4300 s = type_size(&t, &align);
4301 c0 = cx;
4304 if (px + bit_size <= s * 8 && cx + s <= c
4305 #ifdef TCC_TARGET_ARM
4306 && !(cx & (align - 1))
4307 #endif
4309 /* update offset and bit position */
4310 f->c = cx;
4311 bit_pos = px;
4312 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4313 | (bit_pos << VT_STRUCT_SHIFT);
4314 if (s != size)
4315 f->auxtype = t.t;
4316 #ifdef BF_DEBUG
4317 printf("FIX field %s offset %-2d size %-2d align %-2d "
4318 "pos %-2d bits %-2d\n",
4319 get_tok_str(f->v & ~SYM_FIELD, NULL),
4320 cx, s, align, px, bit_size);
4321 #endif
4322 } else {
4323 /* fall back to load/store single-byte wise */
4324 f->auxtype = VT_STRUCT;
4325 #ifdef BF_DEBUG
4326 printf("FIX field %s : load byte-wise\n",
4327 get_tok_str(f->v & ~SYM_FIELD, NULL));
4328 #endif
4333 static void do_Static_assert(void);
4335 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4336 static void struct_decl(CType *type, int u)
4338 int v, c, size, align, flexible;
4339 int bit_size, bsize, bt;
4340 Sym *s, *ss, **ps;
4341 AttributeDef ad, ad1;
4342 CType type1, btype;
4344 memset(&ad, 0, sizeof ad);
4345 next();
4346 parse_attribute(&ad);
4347 if (tok != '{') {
4348 v = tok;
4349 next();
4350 /* struct already defined ? return it */
4351 if (v < TOK_IDENT)
4352 expect("struct/union/enum name");
4353 s = struct_find(v);
4354 if (s && (s->sym_scope == local_scope || tok != '{')) {
4355 if (u == s->type.t)
4356 goto do_decl;
4357 if (u == VT_ENUM && IS_ENUM(s->type.t))
4358 goto do_decl;
4359 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4361 } else {
4362 v = anon_sym++;
4364 /* Record the original enum/struct/union token. */
4365 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4366 type1.ref = NULL;
4367 /* we put an undefined size for struct/union */
4368 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4369 s->r = 0; /* default alignment is zero as gcc */
4370 do_decl:
4371 type->t = s->type.t;
4372 type->ref = s;
4374 if (tok == '{') {
4375 next();
4376 if (s->c != -1)
4377 tcc_error("struct/union/enum already defined");
4378 s->c = -2;
4379 /* cannot be empty */
4380 /* non empty enums are not allowed */
4381 ps = &s->next;
4382 if (u == VT_ENUM) {
4383 long long ll = 0, pl = 0, nl = 0;
4384 CType t;
4385 t.ref = s;
4386 /* enum symbols have static storage */
4387 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4388 for(;;) {
4389 v = tok;
4390 if (v < TOK_UIDENT)
4391 expect("identifier");
4392 ss = sym_find(v);
4393 if (ss && !local_stack)
4394 tcc_error("redefinition of enumerator '%s'",
4395 get_tok_str(v, NULL));
4396 next();
4397 if (tok == '=') {
4398 next();
4399 ll = expr_const64();
4401 ss = sym_push(v, &t, VT_CONST, 0);
4402 ss->enum_val = ll;
4403 *ps = ss, ps = &ss->next;
4404 if (ll < nl)
4405 nl = ll;
4406 if (ll > pl)
4407 pl = ll;
4408 if (tok != ',')
4409 break;
4410 next();
4411 ll++;
4412 /* NOTE: we accept a trailing comma */
4413 if (tok == '}')
4414 break;
4416 skip('}');
4417 /* set integral type of the enum */
4418 t.t = VT_INT;
4419 if (nl >= 0) {
4420 if (pl != (unsigned)pl)
4421 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4422 t.t |= VT_UNSIGNED;
4423 } else if (pl != (int)pl || nl != (int)nl)
4424 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4425 s->type.t = type->t = t.t | VT_ENUM;
4426 s->c = 0;
4427 /* set type for enum members */
4428 for (ss = s->next; ss; ss = ss->next) {
4429 ll = ss->enum_val;
4430 if (ll == (int)ll) /* default is int if it fits */
4431 continue;
4432 if (t.t & VT_UNSIGNED) {
4433 ss->type.t |= VT_UNSIGNED;
4434 if (ll == (unsigned)ll)
4435 continue;
4437 ss->type.t = (ss->type.t & ~VT_BTYPE)
4438 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4440 } else {
4441 c = 0;
4442 flexible = 0;
4443 while (tok != '}') {
4444 if (!parse_btype(&btype, &ad1, 0)) {
4445 if (tok == TOK_STATIC_ASSERT) {
4446 do_Static_assert();
4447 continue;
4449 skip(';');
4450 continue;
4452 while (1) {
4453 if (flexible)
4454 tcc_error("flexible array member '%s' not at the end of struct",
4455 get_tok_str(v, NULL));
4456 bit_size = -1;
4457 v = 0;
4458 type1 = btype;
4459 if (tok != ':') {
4460 if (tok != ';')
4461 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4462 if (v == 0) {
4463 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4464 expect("identifier");
4465 else {
4466 int v = btype.ref->v;
4467 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4468 if (tcc_state->ms_extensions == 0)
4469 expect("identifier");
4473 if (type_size(&type1, &align) < 0) {
4474 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4475 flexible = 1;
4476 else
4477 tcc_error("field '%s' has incomplete type",
4478 get_tok_str(v, NULL));
4480 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4481 (type1.t & VT_BTYPE) == VT_VOID ||
4482 (type1.t & VT_STORAGE))
4483 tcc_error("invalid type for '%s'",
4484 get_tok_str(v, NULL));
4486 if (tok == ':') {
4487 next();
4488 bit_size = expr_const();
4489 /* XXX: handle v = 0 case for messages */
4490 if (bit_size < 0)
4491 tcc_error("negative width in bit-field '%s'",
4492 get_tok_str(v, NULL));
4493 if (v && bit_size == 0)
4494 tcc_error("zero width for bit-field '%s'",
4495 get_tok_str(v, NULL));
4496 parse_attribute(&ad1);
4498 size = type_size(&type1, &align);
4499 if (bit_size >= 0) {
4500 bt = type1.t & VT_BTYPE;
4501 if (bt != VT_INT &&
4502 bt != VT_BYTE &&
4503 bt != VT_SHORT &&
4504 bt != VT_BOOL &&
4505 bt != VT_LLONG)
4506 tcc_error("bitfields must have scalar type");
4507 bsize = size * 8;
4508 if (bit_size > bsize) {
4509 tcc_error("width of '%s' exceeds its type",
4510 get_tok_str(v, NULL));
4511 } else if (bit_size == bsize
4512 && !ad.a.packed && !ad1.a.packed) {
4513 /* no need for bit fields */
4515 } else if (bit_size == 64) {
4516 tcc_error("field width 64 not implemented");
4517 } else {
4518 type1.t = (type1.t & ~VT_STRUCT_MASK)
4519 | VT_BITFIELD
4520 | (bit_size << (VT_STRUCT_SHIFT + 6));
4523 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4524 /* Remember we've seen a real field to check
4525 for placement of flexible array member. */
4526 c = 1;
4528 /* If member is a struct or bit-field, enforce
4529 placing into the struct (as anonymous). */
4530 if (v == 0 &&
4531 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4532 bit_size >= 0)) {
4533 v = anon_sym++;
4535 if (v) {
4536 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4537 ss->a = ad1.a;
4538 *ps = ss;
4539 ps = &ss->next;
4541 if (tok == ';' || tok == TOK_EOF)
4542 break;
4543 skip(',');
4545 skip(';');
4547 skip('}');
4548 parse_attribute(&ad);
4549 if (ad.cleanup_func) {
4550 tcc_warning("attribute '__cleanup__' ignored on type");
4552 check_fields(type, 1);
4553 check_fields(type, 0);
4554 struct_layout(type, &ad);
4555 if (debug_modes)
4556 tcc_debug_fix_anon(tcc_state, type);
4561 static void sym_to_attr(AttributeDef *ad, Sym *s)
4563 merge_symattr(&ad->a, &s->a);
4564 merge_funcattr(&ad->f, &s->f);
4567 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4568 are added to the element type, copied because it could be a typedef. */
4569 static void parse_btype_qualify(CType *type, int qualifiers)
4571 while (type->t & VT_ARRAY) {
4572 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4573 type = &type->ref->type;
4575 type->t |= qualifiers;
4578 /* return 0 if no type declaration. otherwise, return the basic type
4579 and skip it.
4581 static int parse_btype(CType *type, AttributeDef *ad, int ignore_label)
4583 int t, u, bt, st, type_found, typespec_found, g, n;
4584 Sym *s;
4585 CType type1;
4587 memset(ad, 0, sizeof(AttributeDef));
4588 type_found = 0;
4589 typespec_found = 0;
4590 t = VT_INT;
4591 bt = st = -1;
4592 type->ref = NULL;
4594 while(1) {
4595 switch(tok) {
4596 case TOK_EXTENSION:
4597 /* currently, we really ignore extension */
4598 next();
4599 continue;
4601 /* basic types */
4602 case TOK_CHAR:
4603 u = VT_BYTE;
4604 basic_type:
4605 next();
4606 basic_type1:
4607 if (u == VT_SHORT || u == VT_LONG) {
4608 if (st != -1 || (bt != -1 && bt != VT_INT))
4609 tmbt: tcc_error("too many basic types");
4610 st = u;
4611 } else {
4612 if (bt != -1 || (st != -1 && u != VT_INT))
4613 goto tmbt;
4614 bt = u;
4616 if (u != VT_INT)
4617 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4618 typespec_found = 1;
4619 break;
4620 case TOK_VOID:
4621 u = VT_VOID;
4622 goto basic_type;
4623 case TOK_SHORT:
4624 u = VT_SHORT;
4625 goto basic_type;
4626 case TOK_INT:
4627 u = VT_INT;
4628 goto basic_type;
4629 case TOK_ALIGNAS: