x86-64 codegen: avoid allocating VLA of size 0
[tinycc.git] / tccgen.c
blob3726109fa0ed5a555253c8d6e87959f6e752fc57
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
97 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
267 /* define some often used types */
268 int_type.t = VT_INT;
269 char_pointer_type.t = VT_BYTE;
270 mk_pointer(&char_pointer_type);
271 #if PTR_SIZE == 4
272 size_type.t = VT_INT | VT_UNSIGNED;
273 ptrdiff_type.t = VT_INT;
274 #elif LONG_SIZE == 4
275 size_type.t = VT_LLONG | VT_UNSIGNED;
276 ptrdiff_type.t = VT_LLONG;
277 #else
278 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
279 ptrdiff_type.t = VT_LONG | VT_LLONG;
280 #endif
281 func_old_type.t = VT_FUNC;
282 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
283 func_old_type.ref->f.func_call = FUNC_CDECL;
284 func_old_type.ref->f.func_type = FUNC_OLD;
286 tcc_debug_start(s1);
288 #ifdef TCC_TARGET_ARM
289 arm_init(s1);
290 #endif
292 #ifdef INC_DEBUG
293 printf("%s: **** new file\n", file->filename);
294 #endif
296 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
297 next();
298 decl(VT_CONST);
299 gen_inline_functions(s1);
300 check_vstack();
301 /* end of translation unit info */
302 tcc_debug_end(s1);
303 return 0;
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym *elfsym(Sym *s)
309 if (!s || !s->c)
310 return NULL;
311 return &((ElfSym *)symtab_section->data)[s->c];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC void update_storage(Sym *sym)
317 ElfSym *esym;
318 int sym_bind, old_sym_bind;
320 esym = elfsym(sym);
321 if (!esym)
322 return;
324 if (sym->a.visibility)
325 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
326 | sym->a.visibility;
328 if ((sym->type.t & VT_STATIC)
329 || (sym->type.t & (VT_EXTERN | VT_INLINE)) == VT_INLINE)
330 sym_bind = STB_LOCAL;
331 else if (sym->a.weak)
332 sym_bind = STB_WEAK;
333 else
334 sym_bind = STB_GLOBAL;
335 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
336 if (sym_bind != old_sym_bind) {
337 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
340 #ifdef TCC_TARGET_PE
341 if (sym->a.dllimport)
342 esym->st_other |= ST_PE_IMPORT;
343 if (sym->a.dllexport)
344 esym->st_other |= ST_PE_EXPORT;
345 #endif
347 #if 0
348 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
349 get_tok_str(sym->v, NULL),
350 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
351 sym->a.visibility,
352 sym->a.dllexport,
353 sym->a.dllimport
355 #endif
358 /* ------------------------------------------------------------------------- */
359 /* update sym->c so that it points to an external symbol in section
360 'section' with value 'value' */
362 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
363 addr_t value, unsigned long size,
364 int can_add_underscore)
366 int sym_type, sym_bind, info, other, t;
367 ElfSym *esym;
368 const char *name;
369 char buf1[256];
370 #ifdef CONFIG_TCC_BCHECK
371 char buf[32];
372 #endif
374 if (!sym->c) {
375 name = get_tok_str(sym->v, NULL);
376 #ifdef CONFIG_TCC_BCHECK
377 if (tcc_state->do_bounds_check) {
378 /* XXX: avoid doing that for statics ? */
379 /* if bound checking is activated, we change some function
380 names by adding the "__bound" prefix */
381 switch(sym->v) {
382 #ifdef TCC_TARGET_PE
383 /* XXX: we rely only on malloc hooks */
384 case TOK_malloc:
385 case TOK_free:
386 case TOK_realloc:
387 case TOK_memalign:
388 case TOK_calloc:
389 #endif
390 case TOK_memcpy:
391 case TOK_memmove:
392 case TOK_memset:
393 case TOK_strlen:
394 case TOK_strcpy:
395 case TOK_alloca:
396 strcpy(buf, "__bound_");
397 strcat(buf, name);
398 name = buf;
399 break;
402 #endif
403 t = sym->type.t;
404 if ((t & VT_BTYPE) == VT_FUNC) {
405 sym_type = STT_FUNC;
406 } else if ((t & VT_BTYPE) == VT_VOID) {
407 sym_type = STT_NOTYPE;
408 } else {
409 sym_type = STT_OBJECT;
411 if ((t & VT_STATIC) || (t & (VT_EXTERN | VT_INLINE)) == VT_INLINE)
412 sym_bind = STB_LOCAL;
413 else
414 sym_bind = STB_GLOBAL;
415 other = 0;
416 #ifdef TCC_TARGET_PE
417 if (sym_type == STT_FUNC && sym->type.ref) {
418 Sym *ref = sym->type.ref;
419 if (ref->a.nodecorate) {
420 can_add_underscore = 0;
422 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
423 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
424 name = buf1;
425 other |= ST_PE_STDCALL;
426 can_add_underscore = 0;
429 #endif
430 if (tcc_state->leading_underscore && can_add_underscore) {
431 buf1[0] = '_';
432 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
433 name = buf1;
435 if (sym->asm_label)
436 name = get_tok_str(sym->asm_label, NULL);
437 info = ELFW(ST_INFO)(sym_bind, sym_type);
438 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
439 } else {
440 esym = elfsym(sym);
441 esym->st_value = value;
442 esym->st_size = size;
443 esym->st_shndx = sh_num;
445 update_storage(sym);
448 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
449 addr_t value, unsigned long size)
451 int sh_num = section ? section->sh_num : SHN_UNDEF;
452 put_extern_sym2(sym, sh_num, value, size, 1);
455 /* add a new relocation entry to symbol 'sym' in section 's' */
456 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
457 addr_t addend)
459 int c = 0;
461 if (nocode_wanted && s == cur_text_section)
462 return;
464 if (sym) {
465 if (0 == sym->c)
466 put_extern_sym(sym, NULL, 0, 0);
467 c = sym->c;
470 /* now we can add ELF relocation info */
471 put_elf_reloca(symtab_section, s, offset, type, c, addend);
474 #if PTR_SIZE == 4
475 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
477 greloca(s, sym, offset, type, 0);
479 #endif
481 /* ------------------------------------------------------------------------- */
482 /* symbol allocator */
483 static Sym *__sym_malloc(void)
485 Sym *sym_pool, *sym, *last_sym;
486 int i;
488 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
489 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
491 last_sym = sym_free_first;
492 sym = sym_pool;
493 for(i = 0; i < SYM_POOL_NB; i++) {
494 sym->next = last_sym;
495 last_sym = sym;
496 sym++;
498 sym_free_first = last_sym;
499 return last_sym;
502 static inline Sym *sym_malloc(void)
504 Sym *sym;
505 #ifndef SYM_DEBUG
506 sym = sym_free_first;
507 if (!sym)
508 sym = __sym_malloc();
509 sym_free_first = sym->next;
510 return sym;
511 #else
512 sym = tcc_malloc(sizeof(Sym));
513 return sym;
514 #endif
517 ST_INLN void sym_free(Sym *sym)
519 #ifndef SYM_DEBUG
520 sym->next = sym_free_first;
521 sym_free_first = sym;
522 #else
523 tcc_free(sym);
524 #endif
527 /* push, without hashing */
528 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
530 Sym *s;
532 s = sym_malloc();
533 memset(s, 0, sizeof *s);
534 s->v = v;
535 s->type.t = t;
536 s->c = c;
537 /* add in stack */
538 s->prev = *ps;
539 *ps = s;
540 return s;
543 /* find a symbol and return its associated structure. 's' is the top
544 of the symbol stack */
545 ST_FUNC Sym *sym_find2(Sym *s, int v)
547 while (s) {
548 if (s->v == v)
549 return s;
550 else if (s->v == -1)
551 return NULL;
552 s = s->prev;
554 return NULL;
557 /* structure lookup */
558 ST_INLN Sym *struct_find(int v)
560 v -= TOK_IDENT;
561 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
562 return NULL;
563 return table_ident[v]->sym_struct;
566 /* find an identifier */
567 ST_INLN Sym *sym_find(int v)
569 v -= TOK_IDENT;
570 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
571 return NULL;
572 return table_ident[v]->sym_identifier;
575 static int sym_scope(Sym *s)
577 if (IS_ENUM_VAL (s->type.t))
578 return s->type.ref->sym_scope;
579 else
580 return s->sym_scope;
583 /* push a given symbol on the symbol stack */
584 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
586 Sym *s, **ps;
587 TokenSym *ts;
589 if (local_stack)
590 ps = &local_stack;
591 else
592 ps = &global_stack;
593 s = sym_push2(ps, v, type->t, c);
594 s->type.ref = type->ref;
595 s->r = r;
596 /* don't record fields or anonymous symbols */
597 /* XXX: simplify */
598 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
599 /* record symbol in token array */
600 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
601 if (v & SYM_STRUCT)
602 ps = &ts->sym_struct;
603 else
604 ps = &ts->sym_identifier;
605 s->prev_tok = *ps;
606 *ps = s;
607 s->sym_scope = local_scope;
608 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
609 tcc_error("redeclaration of '%s'",
610 get_tok_str(v & ~SYM_STRUCT, NULL));
612 return s;
615 /* push a global identifier */
616 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
618 Sym *s, **ps;
619 s = sym_push2(&global_stack, v, t, c);
620 s->r = VT_CONST | VT_SYM;
621 /* don't record anonymous symbol */
622 if (v < SYM_FIRST_ANOM) {
623 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
624 /* modify the top most local identifier, so that sym_identifier will
625 point to 's' when popped; happens when called from inline asm */
626 while (*ps != NULL && (*ps)->sym_scope)
627 ps = &(*ps)->prev_tok;
628 s->prev_tok = *ps;
629 *ps = s;
631 return s;
634 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
635 pop them yet from the list, but do remove them from the token array. */
636 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
638 Sym *s, *ss, **ps;
639 TokenSym *ts;
640 int v;
642 s = *ptop;
643 while(s != b) {
644 ss = s->prev;
645 v = s->v;
646 /* remove symbol in token array */
647 /* XXX: simplify */
648 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
649 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
650 if (v & SYM_STRUCT)
651 ps = &ts->sym_struct;
652 else
653 ps = &ts->sym_identifier;
654 *ps = s->prev_tok;
656 if (!keep)
657 sym_free(s);
658 s = ss;
660 if (!keep)
661 *ptop = b;
664 /* ------------------------------------------------------------------------- */
666 static void vsetc(CType *type, int r, CValue *vc)
668 int v;
670 if (vtop >= vstack + (VSTACK_SIZE - 1))
671 tcc_error("memory full (vstack)");
672 /* cannot let cpu flags if other instruction are generated. Also
673 avoid leaving VT_JMP anywhere except on the top of the stack
674 because it would complicate the code generator.
676 Don't do this when nocode_wanted. vtop might come from
677 !nocode_wanted regions (see 88_codeopt.c) and transforming
678 it to a register without actually generating code is wrong
679 as their value might still be used for real. All values
680 we push under nocode_wanted will eventually be popped
681 again, so that the VT_CMP/VT_JMP value will be in vtop
682 when code is unsuppressed again.
684 Same logic below in vswap(); */
685 if (vtop >= vstack && !nocode_wanted) {
686 v = vtop->r & VT_VALMASK;
687 if (v == VT_CMP || (v & ~1) == VT_JMP)
688 gv(RC_INT);
691 vtop++;
692 vtop->type = *type;
693 vtop->r = r;
694 vtop->r2 = VT_CONST;
695 vtop->c = *vc;
696 vtop->sym = NULL;
699 ST_FUNC void vswap(void)
701 SValue tmp;
702 /* cannot vswap cpu flags. See comment at vsetc() above */
703 if (vtop >= vstack && !nocode_wanted) {
704 int v = vtop->r & VT_VALMASK;
705 if (v == VT_CMP || (v & ~1) == VT_JMP)
706 gv(RC_INT);
708 tmp = vtop[0];
709 vtop[0] = vtop[-1];
710 vtop[-1] = tmp;
713 /* pop stack value */
714 ST_FUNC void vpop(void)
716 int v;
717 v = vtop->r & VT_VALMASK;
718 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
719 /* for x86, we need to pop the FP stack */
720 if (v == TREG_ST0) {
721 o(0xd8dd); /* fstp %st(0) */
722 } else
723 #endif
724 if (v == VT_JMP || v == VT_JMPI) {
725 /* need to put correct jump if && or || without test */
726 gsym(vtop->c.i);
728 vtop--;
731 /* push constant of type "type" with useless value */
732 ST_FUNC void vpush(CType *type)
734 vset(type, VT_CONST, 0);
737 /* push integer constant */
738 ST_FUNC void vpushi(int v)
740 CValue cval;
741 cval.i = v;
742 vsetc(&int_type, VT_CONST, &cval);
745 /* push a pointer sized constant */
746 static void vpushs(addr_t v)
748 CValue cval;
749 cval.i = v;
750 vsetc(&size_type, VT_CONST, &cval);
753 /* push arbitrary 64bit constant */
754 ST_FUNC void vpush64(int ty, unsigned long long v)
756 CValue cval;
757 CType ctype;
758 ctype.t = ty;
759 ctype.ref = NULL;
760 cval.i = v;
761 vsetc(&ctype, VT_CONST, &cval);
764 /* push long long constant */
765 static inline void vpushll(long long v)
767 vpush64(VT_LLONG, v);
770 ST_FUNC void vset(CType *type, int r, int v)
772 CValue cval;
774 cval.i = v;
775 vsetc(type, r, &cval);
778 static void vseti(int r, int v)
780 CType type;
781 type.t = VT_INT;
782 type.ref = NULL;
783 vset(&type, r, v);
786 ST_FUNC void vpushv(SValue *v)
788 if (vtop >= vstack + (VSTACK_SIZE - 1))
789 tcc_error("memory full (vstack)");
790 vtop++;
791 *vtop = *v;
794 static void vdup(void)
796 vpushv(vtop);
799 /* rotate n first stack elements to the bottom
800 I1 ... In -> I2 ... In I1 [top is right]
802 ST_FUNC void vrotb(int n)
804 int i;
805 SValue tmp;
807 tmp = vtop[-n + 1];
808 for(i=-n+1;i!=0;i++)
809 vtop[i] = vtop[i+1];
810 vtop[0] = tmp;
813 /* rotate the n elements before entry e towards the top
814 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
816 ST_FUNC void vrote(SValue *e, int n)
818 int i;
819 SValue tmp;
821 tmp = *e;
822 for(i = 0;i < n - 1; i++)
823 e[-i] = e[-i - 1];
824 e[-n + 1] = tmp;
827 /* rotate n first stack elements to the top
828 I1 ... In -> In I1 ... I(n-1) [top is right]
830 ST_FUNC void vrott(int n)
832 vrote(vtop, n);
835 /* push a symbol value of TYPE */
836 static inline void vpushsym(CType *type, Sym *sym)
838 CValue cval;
839 cval.i = 0;
840 vsetc(type, VT_CONST | VT_SYM, &cval);
841 vtop->sym = sym;
844 /* Return a static symbol pointing to a section */
845 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
847 int v;
848 Sym *sym;
850 v = anon_sym++;
851 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
852 sym->type.t |= VT_STATIC;
853 put_extern_sym(sym, sec, offset, size);
854 return sym;
857 /* push a reference to a section offset by adding a dummy symbol */
858 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
860 vpushsym(type, get_sym_ref(type, sec, offset, size));
863 /* define a new external reference to a symbol 'v' of type 'u' */
864 ST_FUNC Sym *external_global_sym(int v, CType *type)
866 Sym *s;
868 s = sym_find(v);
869 if (!s) {
870 /* push forward reference */
871 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
872 s->type.ref = type->ref;
873 } else if (IS_ASM_SYM(s)) {
874 s->type.t = type->t | (s->type.t & VT_EXTERN);
875 s->type.ref = type->ref;
876 update_storage(s);
878 return s;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
884 if (sa1->aligned && !sa->aligned)
885 sa->aligned = sa1->aligned;
886 sa->packed |= sa1->packed;
887 sa->weak |= sa1->weak;
888 if (sa1->visibility != STV_DEFAULT) {
889 int vis = sa->visibility;
890 if (vis == STV_DEFAULT
891 || vis > sa1->visibility)
892 vis = sa1->visibility;
893 sa->visibility = vis;
895 sa->dllexport |= sa1->dllexport;
896 sa->nodecorate |= sa1->nodecorate;
897 sa->dllimport |= sa1->dllimport;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
903 if (fa1->func_call && !fa->func_call)
904 fa->func_call = fa1->func_call;
905 if (fa1->func_type && !fa->func_type)
906 fa->func_type = fa1->func_type;
907 if (fa1->func_args && !fa->func_args)
908 fa->func_args = fa1->func_args;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
914 merge_symattr(&ad->a, &ad1->a);
915 merge_funcattr(&ad->f, &ad1->f);
917 if (ad1->section)
918 ad->section = ad1->section;
919 if (ad1->alias_target)
920 ad->alias_target = ad1->alias_target;
921 if (ad1->asm_label)
922 ad->asm_label = ad1->asm_label;
923 if (ad1->attr_mode)
924 ad->attr_mode = ad1->attr_mode;
927 /* Merge some type attributes. */
928 static void patch_type(Sym *sym, CType *type)
930 if ((!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t))
931 && (type->t & VT_BTYPE) != VT_FUNC) {
932 if (!(sym->type.t & VT_EXTERN))
933 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
934 sym->type.t &= ~VT_EXTERN;
937 if (IS_ASM_SYM(sym)) {
938 /* stay static if both are static */
939 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
940 sym->type.ref = type->ref;
943 if (!is_compatible_types(&sym->type, type)) {
944 tcc_error("incompatible types for redefinition of '%s'",
945 get_tok_str(sym->v, NULL));
947 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
948 int static_proto = sym->type.t & VT_STATIC;
949 /* warn if static follows non-static function declaration */
950 if ((type->t & VT_STATIC) && !static_proto
951 /* XXX this test for inline shouldn't be here. Until we
952 implement gnu-inline mode again it silences a warning for
953 mingw caused by our workarounds. */
954 && !((type->t | sym->type.t) & VT_INLINE))
955 tcc_warning("static storage ignored for redefinition of '%s'",
956 get_tok_str(sym->v, NULL));
958 /* Force external definition if unequal inline specifier
959 or an explicit extern one. */
960 if ((sym->type.t | type->t) & VT_STATIC) {
961 type->t |= sym->type.t & VT_INLINE;
962 sym->type.t |= type->t & VT_INLINE;
963 } else if (((type->t & VT_INLINE) != (sym->type.t & VT_INLINE)
964 || (type->t | sym->type.t) & VT_EXTERN)
965 && !static_proto) {
966 type->t &= ~VT_INLINE;
967 sym->type.t &= ~VT_INLINE;
969 if (0 == (type->t & VT_EXTERN)) {
970 /* put complete type, use static from prototype, but don't
971 overwrite type.ref, it might contain parameter names */
972 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
974 } else {
975 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
976 /* set array size if it was omitted in extern declaration */
977 if (sym->type.ref->c < 0)
978 sym->type.ref->c = type->ref->c;
979 else if (sym->type.ref->c != type->ref->c)
980 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
982 if ((type->t ^ sym->type.t) & VT_STATIC)
983 tcc_warning("storage mismatch for redefinition of '%s'",
984 get_tok_str(sym->v, NULL));
989 /* Merge some storage attributes. */
990 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
992 if (type)
993 patch_type(sym, type);
995 #ifdef TCC_TARGET_PE
996 if (sym->a.dllimport != ad->a.dllimport)
997 tcc_error("incompatible dll linkage for redefinition of '%s'",
998 get_tok_str(sym->v, NULL));
999 #endif
1000 merge_symattr(&sym->a, &ad->a);
1001 if (ad->asm_label)
1002 sym->asm_label = ad->asm_label;
1003 update_storage(sym);
1006 /* define a new external reference to a symbol 'v' */
1007 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1009 Sym *s;
1010 s = sym_find(v);
1011 if (!s || (!IS_ASM_SYM(s) && !(s->type.t & VT_EXTERN)
1012 && (!(type->t & VT_EXTERN) || s->sym_scope)
1013 && (s->type.t & VT_BTYPE) != VT_FUNC)) {
1014 if (s && !is_compatible_types(&s->type, type))
1015 tcc_error("conflicting types for '%s'", get_tok_str(s->v, NULL));
1016 /* push forward reference */
1017 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
1018 s->a = ad->a;
1019 s->asm_label = ad->asm_label;
1020 s->sym_scope = 0;
1021 } else {
1022 if (s->type.ref == func_old_type.ref) {
1023 s->type.ref = type->ref;
1024 s->r = r | VT_CONST | VT_SYM;
1025 s->type.t |= VT_EXTERN;
1027 patch_storage(s, ad, type);
1029 return s;
1032 /* push a reference to global symbol v */
1033 ST_FUNC void vpush_global_sym(CType *type, int v)
1035 vpushsym(type, external_global_sym(v, type));
1038 /* save registers up to (vtop - n) stack entry */
1039 ST_FUNC void save_regs(int n)
1041 SValue *p, *p1;
1042 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1043 save_reg(p->r);
1046 /* save r to the memory stack, and mark it as being free */
1047 ST_FUNC void save_reg(int r)
1049 save_reg_upstack(r, 0);
1052 /* save r to the memory stack, and mark it as being free,
1053 if seen up to (vtop - n) stack entry */
1054 ST_FUNC void save_reg_upstack(int r, int n)
1056 int l, saved, size, align;
1057 SValue *p, *p1, sv;
1058 CType *type;
1060 if ((r &= VT_VALMASK) >= VT_CONST)
1061 return;
1062 if (nocode_wanted)
1063 return;
1065 /* modify all stack values */
1066 saved = 0;
1067 l = 0;
1068 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1069 if ((p->r & VT_VALMASK) == r ||
1070 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1071 /* must save value on stack if not already done */
1072 if (!saved) {
1073 /* NOTE: must reload 'r' because r might be equal to r2 */
1074 r = p->r & VT_VALMASK;
1075 /* store register in the stack */
1076 type = &p->type;
1077 if ((p->r & VT_LVAL) ||
1078 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1079 #if PTR_SIZE == 8
1080 type = &char_pointer_type;
1081 #else
1082 type = &int_type;
1083 #endif
1084 size = type_size(type, &align);
1085 l=get_temp_local_var(size,align);
1086 sv.type.t = type->t;
1087 sv.r = VT_LOCAL | VT_LVAL;
1088 sv.c.i = l;
1089 store(r, &sv);
1090 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1091 /* x86 specific: need to pop fp register ST0 if saved */
1092 if (r == TREG_ST0) {
1093 o(0xd8dd); /* fstp %st(0) */
1095 #endif
1096 #if PTR_SIZE == 4
1097 /* special long long case */
1098 if ((type->t & VT_BTYPE) == VT_LLONG) {
1099 sv.c.i += 4;
1100 store(p->r2, &sv);
1102 #endif
1103 saved = 1;
1105 /* mark that stack entry as being saved on the stack */
1106 if (p->r & VT_LVAL) {
1107 /* also clear the bounded flag because the
1108 relocation address of the function was stored in
1109 p->c.i */
1110 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1111 } else {
1112 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1114 p->r2 = VT_CONST;
1115 p->c.i = l;
1120 #ifdef TCC_TARGET_ARM
1121 /* find a register of class 'rc2' with at most one reference on stack.
1122 * If none, call get_reg(rc) */
1123 ST_FUNC int get_reg_ex(int rc, int rc2)
1125 int r;
1126 SValue *p;
1128 for(r=0;r<NB_REGS;r++) {
1129 if (reg_classes[r] & rc2) {
1130 int n;
1131 n=0;
1132 for(p = vstack; p <= vtop; p++) {
1133 if ((p->r & VT_VALMASK) == r ||
1134 (p->r2 & VT_VALMASK) == r)
1135 n++;
1137 if (n <= 1)
1138 return r;
1141 return get_reg(rc);
1143 #endif
1145 /* find a free register of class 'rc'. If none, save one register */
1146 ST_FUNC int get_reg(int rc)
1148 int r;
1149 SValue *p;
1151 /* find a free register */
1152 for(r=0;r<NB_REGS;r++) {
1153 if (reg_classes[r] & rc) {
1154 if (nocode_wanted)
1155 return r;
1156 for(p=vstack;p<=vtop;p++) {
1157 if ((p->r & VT_VALMASK) == r ||
1158 (p->r2 & VT_VALMASK) == r)
1159 goto notfound;
1161 return r;
1163 notfound: ;
1166 /* no register left : free the first one on the stack (VERY
1167 IMPORTANT to start from the bottom to ensure that we don't
1168 spill registers used in gen_opi()) */
1169 for(p=vstack;p<=vtop;p++) {
1170 /* look at second register (if long long) */
1171 r = p->r2 & VT_VALMASK;
1172 if (r < VT_CONST && (reg_classes[r] & rc))
1173 goto save_found;
1174 r = p->r & VT_VALMASK;
1175 if (r < VT_CONST && (reg_classes[r] & rc)) {
1176 save_found:
1177 save_reg(r);
1178 return r;
1181 /* Should never comes here */
1182 return -1;
1185 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1186 static int get_temp_local_var(int size,int align){
1187 int i;
1188 struct temp_local_variable *temp_var;
1189 int found_var;
1190 SValue *p;
1191 int r;
1192 char free;
1193 char found;
1194 found=0;
1195 for(i=0;i<nb_temp_local_vars;i++){
1196 temp_var=&arr_temp_local_vars[i];
1197 if(temp_var->size<size||align!=temp_var->align){
1198 continue;
1200 /*check if temp_var is free*/
1201 free=1;
1202 for(p=vstack;p<=vtop;p++) {
1203 r=p->r&VT_VALMASK;
1204 if(r==VT_LOCAL||r==VT_LLOCAL){
1205 if(p->c.i==temp_var->location){
1206 free=0;
1207 break;
1211 if(free){
1212 found_var=temp_var->location;
1213 found=1;
1214 break;
1217 if(!found){
1218 loc = (loc - size) & -align;
1219 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1220 temp_var=&arr_temp_local_vars[i];
1221 temp_var->location=loc;
1222 temp_var->size=size;
1223 temp_var->align=align;
1224 nb_temp_local_vars++;
1226 found_var=loc;
1228 return found_var;
1231 static void clear_temp_local_var_list(){
1232 nb_temp_local_vars=0;
1235 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1236 if needed */
1237 static void move_reg(int r, int s, int t)
1239 SValue sv;
1241 if (r != s) {
1242 save_reg(r);
1243 sv.type.t = t;
1244 sv.type.ref = NULL;
1245 sv.r = s;
1246 sv.c.i = 0;
1247 load(r, &sv);
1251 /* get address of vtop (vtop MUST BE an lvalue) */
1252 ST_FUNC void gaddrof(void)
1254 vtop->r &= ~VT_LVAL;
1255 /* tricky: if saved lvalue, then we can go back to lvalue */
1256 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1257 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1262 #ifdef CONFIG_TCC_BCHECK
1263 /* generate lvalue bound code */
1264 static void gbound(void)
1266 int lval_type;
1267 CType type1;
1269 vtop->r &= ~VT_MUSTBOUND;
1270 /* if lvalue, then use checking code before dereferencing */
1271 if (vtop->r & VT_LVAL) {
1272 /* if not VT_BOUNDED value, then make one */
1273 if (!(vtop->r & VT_BOUNDED)) {
1274 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1275 /* must save type because we must set it to int to get pointer */
1276 type1 = vtop->type;
1277 vtop->type.t = VT_PTR;
1278 gaddrof();
1279 vpushi(0);
1280 gen_bounded_ptr_add();
1281 vtop->r |= lval_type;
1282 vtop->type = type1;
1284 /* then check for dereferencing */
1285 gen_bounded_ptr_deref();
1288 #endif
1290 static void incr_bf_adr(int o)
1292 vtop->type = char_pointer_type;
1293 gaddrof();
1294 vpushi(o);
1295 gen_op('+');
1296 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1297 | (VT_BYTE|VT_UNSIGNED);
1298 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1299 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1302 /* single-byte load mode for packed or otherwise unaligned bitfields */
1303 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1305 int n, o, bits;
1306 save_reg_upstack(vtop->r, 1);
1307 vpush64(type->t & VT_BTYPE, 0); // B X
1308 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1309 do {
1310 vswap(); // X B
1311 incr_bf_adr(o);
1312 vdup(); // X B B
1313 n = 8 - bit_pos;
1314 if (n > bit_size)
1315 n = bit_size;
1316 if (bit_pos)
1317 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1318 if (n < 8)
1319 vpushi((1 << n) - 1), gen_op('&');
1320 gen_cast(type);
1321 if (bits)
1322 vpushi(bits), gen_op(TOK_SHL);
1323 vrotb(3); // B Y X
1324 gen_op('|'); // B X
1325 bits += n, bit_size -= n, o = 1;
1326 } while (bit_size);
1327 vswap(), vpop();
1328 if (!(type->t & VT_UNSIGNED)) {
1329 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1330 vpushi(n), gen_op(TOK_SHL);
1331 vpushi(n), gen_op(TOK_SAR);
1335 /* single-byte store mode for packed or otherwise unaligned bitfields */
1336 static void store_packed_bf(int bit_pos, int bit_size)
1338 int bits, n, o, m, c;
1340 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1341 vswap(); // X B
1342 save_reg_upstack(vtop->r, 1);
1343 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1344 do {
1345 incr_bf_adr(o); // X B
1346 vswap(); //B X
1347 c ? vdup() : gv_dup(); // B V X
1348 vrott(3); // X B V
1349 if (bits)
1350 vpushi(bits), gen_op(TOK_SHR);
1351 if (bit_pos)
1352 vpushi(bit_pos), gen_op(TOK_SHL);
1353 n = 8 - bit_pos;
1354 if (n > bit_size)
1355 n = bit_size;
1356 if (n < 8) {
1357 m = ((1 << n) - 1) << bit_pos;
1358 vpushi(m), gen_op('&'); // X B V1
1359 vpushv(vtop-1); // X B V1 B
1360 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1361 gen_op('&'); // X B V1 B1
1362 gen_op('|'); // X B V2
1364 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1365 vstore(), vpop(); // X B
1366 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1367 } while (bit_size);
1368 vpop(), vpop();
1371 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1373 int t;
1374 if (0 == sv->type.ref)
1375 return 0;
1376 t = sv->type.ref->auxtype;
1377 if (t != -1 && t != VT_STRUCT) {
1378 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1379 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1381 return t;
1384 /* store vtop a register belonging to class 'rc'. lvalues are
1385 converted to values. Cannot be used if cannot be converted to
1386 register value (such as structures). */
1387 ST_FUNC int gv(int rc)
1389 int r, bit_pos, bit_size, size, align, rc2;
1391 /* NOTE: get_reg can modify vstack[] */
1392 if (vtop->type.t & VT_BITFIELD) {
1393 CType type;
1395 bit_pos = BIT_POS(vtop->type.t);
1396 bit_size = BIT_SIZE(vtop->type.t);
1397 /* remove bit field info to avoid loops */
1398 vtop->type.t &= ~VT_STRUCT_MASK;
1400 type.ref = NULL;
1401 type.t = vtop->type.t & VT_UNSIGNED;
1402 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1403 type.t |= VT_UNSIGNED;
1405 r = adjust_bf(vtop, bit_pos, bit_size);
1407 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1408 type.t |= VT_LLONG;
1409 else
1410 type.t |= VT_INT;
1412 if (r == VT_STRUCT) {
1413 load_packed_bf(&type, bit_pos, bit_size);
1414 } else {
1415 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1416 /* cast to int to propagate signedness in following ops */
1417 gen_cast(&type);
1418 /* generate shifts */
1419 vpushi(bits - (bit_pos + bit_size));
1420 gen_op(TOK_SHL);
1421 vpushi(bits - bit_size);
1422 /* NOTE: transformed to SHR if unsigned */
1423 gen_op(TOK_SAR);
1425 r = gv(rc);
1426 } else {
1427 if (is_float(vtop->type.t) &&
1428 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1429 unsigned long offset;
1430 /* CPUs usually cannot use float constants, so we store them
1431 generically in data segment */
1432 size = type_size(&vtop->type, &align);
1433 if (NODATA_WANTED)
1434 size = 0, align = 1;
1435 offset = section_add(data_section, size, align);
1436 vpush_ref(&vtop->type, data_section, offset, size);
1437 vswap();
1438 init_putv(&vtop->type, data_section, offset);
1439 vtop->r |= VT_LVAL;
1441 #ifdef CONFIG_TCC_BCHECK
1442 if (vtop->r & VT_MUSTBOUND)
1443 gbound();
1444 #endif
1446 r = vtop->r & VT_VALMASK;
1447 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1448 #ifndef TCC_TARGET_ARM64
1449 if (rc == RC_IRET)
1450 rc2 = RC_LRET;
1451 #ifdef TCC_TARGET_X86_64
1452 else if (rc == RC_FRET)
1453 rc2 = RC_QRET;
1454 #endif
1455 #endif
1456 /* need to reload if:
1457 - constant
1458 - lvalue (need to dereference pointer)
1459 - already a register, but not in the right class */
1460 if (r >= VT_CONST
1461 || (vtop->r & VT_LVAL)
1462 || !(reg_classes[r] & rc)
1463 #if PTR_SIZE == 8
1464 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1465 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1466 #else
1467 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1468 #endif
1471 r = get_reg(rc);
1472 #if PTR_SIZE == 8
1473 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1474 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1475 #else
1476 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1477 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1478 unsigned long long ll;
1479 #endif
1480 int r2, original_type;
1481 original_type = vtop->type.t;
1482 /* two register type load : expand to two words
1483 temporarily */
1484 #if PTR_SIZE == 4
1485 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1486 /* load constant */
1487 ll = vtop->c.i;
1488 vtop->c.i = ll; /* first word */
1489 load(r, vtop);
1490 vtop->r = r; /* save register value */
1491 vpushi(ll >> 32); /* second word */
1492 } else
1493 #endif
1494 if (vtop->r & VT_LVAL) {
1495 /* We do not want to modifier the long long
1496 pointer here, so the safest (and less
1497 efficient) is to save all the other registers
1498 in the stack. XXX: totally inefficient. */
1499 #if 0
1500 save_regs(1);
1501 #else
1502 /* lvalue_save: save only if used further down the stack */
1503 save_reg_upstack(vtop->r, 1);
1504 #endif
1505 /* load from memory */
1506 vtop->type.t = load_type;
1507 load(r, vtop);
1508 vdup();
1509 vtop[-1].r = r; /* save register value */
1510 /* increment pointer to get second word */
1511 vtop->type.t = addr_type;
1512 gaddrof();
1513 vpushi(load_size);
1514 gen_op('+');
1515 vtop->r |= VT_LVAL;
1516 vtop->type.t = load_type;
1517 } else {
1518 /* move registers */
1519 load(r, vtop);
1520 vdup();
1521 vtop[-1].r = r; /* save register value */
1522 vtop->r = vtop[-1].r2;
1524 /* Allocate second register. Here we rely on the fact that
1525 get_reg() tries first to free r2 of an SValue. */
1526 r2 = get_reg(rc2);
1527 load(r2, vtop);
1528 vpop();
1529 /* write second register */
1530 vtop->r2 = r2;
1531 vtop->type.t = original_type;
1532 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1533 int t1, t;
1534 /* lvalue of scalar type : need to use lvalue type
1535 because of possible cast */
1536 t = vtop->type.t;
1537 t1 = t;
1538 /* compute memory access type */
1539 if (vtop->r & VT_LVAL_BYTE)
1540 t = VT_BYTE;
1541 else if (vtop->r & VT_LVAL_SHORT)
1542 t = VT_SHORT;
1543 if (vtop->r & VT_LVAL_UNSIGNED)
1544 t |= VT_UNSIGNED;
1545 vtop->type.t = t;
1546 load(r, vtop);
1547 /* restore wanted type */
1548 vtop->type.t = t1;
1549 } else {
1550 /* one register type load */
1551 load(r, vtop);
1554 vtop->r = r;
1555 #ifdef TCC_TARGET_C67
1556 /* uses register pairs for doubles */
1557 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1558 vtop->r2 = r+1;
1559 #endif
1561 return r;
1564 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1565 ST_FUNC void gv2(int rc1, int rc2)
1567 int v;
1569 /* generate more generic register first. But VT_JMP or VT_CMP
1570 values must be generated first in all cases to avoid possible
1571 reload errors */
1572 v = vtop[0].r & VT_VALMASK;
1573 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1574 vswap();
1575 gv(rc1);
1576 vswap();
1577 gv(rc2);
1578 /* test if reload is needed for first register */
1579 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1580 vswap();
1581 gv(rc1);
1582 vswap();
1584 } else {
1585 gv(rc2);
1586 vswap();
1587 gv(rc1);
1588 vswap();
1589 /* test if reload is needed for first register */
1590 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1591 gv(rc2);
1596 #ifndef TCC_TARGET_ARM64
1597 /* wrapper around RC_FRET to return a register by type */
1598 static int rc_fret(int t)
1600 #ifdef TCC_TARGET_X86_64
1601 if (t == VT_LDOUBLE) {
1602 return RC_ST0;
1604 #endif
1605 return RC_FRET;
1607 #endif
1609 /* wrapper around REG_FRET to return a register by type */
1610 static int reg_fret(int t)
1612 #ifdef TCC_TARGET_X86_64
1613 if (t == VT_LDOUBLE) {
1614 return TREG_ST0;
1616 #endif
1617 return REG_FRET;
1620 #if PTR_SIZE == 4
1621 /* expand 64bit on stack in two ints */
1622 ST_FUNC void lexpand(void)
1624 int u, v;
1625 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1626 v = vtop->r & (VT_VALMASK | VT_LVAL);
1627 if (v == VT_CONST) {
1628 vdup();
1629 vtop[0].c.i >>= 32;
1630 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1631 vdup();
1632 vtop[0].c.i += 4;
1633 } else {
1634 gv(RC_INT);
1635 vdup();
1636 vtop[0].r = vtop[-1].r2;
1637 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1639 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1641 #endif
1643 #if PTR_SIZE == 4
1644 /* build a long long from two ints */
1645 static void lbuild(int t)
1647 gv2(RC_INT, RC_INT);
1648 vtop[-1].r2 = vtop[0].r;
1649 vtop[-1].type.t = t;
1650 vpop();
1652 #endif
1654 /* convert stack entry to register and duplicate its value in another
1655 register */
1656 static void gv_dup(void)
1658 int rc, t, r, r1;
1659 SValue sv;
1661 t = vtop->type.t;
1662 #if PTR_SIZE == 4
1663 if ((t & VT_BTYPE) == VT_LLONG) {
1664 if (t & VT_BITFIELD) {
1665 gv(RC_INT);
1666 t = vtop->type.t;
1668 lexpand();
1669 gv_dup();
1670 vswap();
1671 vrotb(3);
1672 gv_dup();
1673 vrotb(4);
1674 /* stack: H L L1 H1 */
1675 lbuild(t);
1676 vrotb(3);
1677 vrotb(3);
1678 vswap();
1679 lbuild(t);
1680 vswap();
1681 } else
1682 #endif
1684 /* duplicate value */
1685 rc = RC_INT;
1686 sv.type.t = VT_INT;
1687 if (is_float(t)) {
1688 rc = RC_FLOAT;
1689 #ifdef TCC_TARGET_X86_64
1690 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1691 rc = RC_ST0;
1693 #endif
1694 sv.type.t = t;
1696 r = gv(rc);
1697 r1 = get_reg(rc);
1698 sv.r = r;
1699 sv.c.i = 0;
1700 load(r1, &sv); /* move r to r1 */
1701 vdup();
1702 /* duplicates value */
1703 if (r != r1)
1704 vtop->r = r1;
1708 /* Generate value test
1710 * Generate a test for any value (jump, comparison and integers) */
1711 ST_FUNC int gvtst(int inv, int t)
1713 int v = vtop->r & VT_VALMASK;
1714 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1715 vpushi(0);
1716 gen_op(TOK_NE);
1718 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1719 /* constant jmp optimization */
1720 if ((vtop->c.i != 0) != inv)
1721 t = gjmp(t);
1722 vtop--;
1723 return t;
1725 return gtst(inv, t);
1728 #if PTR_SIZE == 4
1729 /* generate CPU independent (unsigned) long long operations */
1730 static void gen_opl(int op)
1732 int t, a, b, op1, c, i;
1733 int func;
1734 unsigned short reg_iret = REG_IRET;
1735 unsigned short reg_lret = REG_LRET;
1736 SValue tmp;
1738 switch(op) {
1739 case '/':
1740 case TOK_PDIV:
1741 func = TOK___divdi3;
1742 goto gen_func;
1743 case TOK_UDIV:
1744 func = TOK___udivdi3;
1745 goto gen_func;
1746 case '%':
1747 func = TOK___moddi3;
1748 goto gen_mod_func;
1749 case TOK_UMOD:
1750 func = TOK___umoddi3;
1751 gen_mod_func:
1752 #ifdef TCC_ARM_EABI
1753 reg_iret = TREG_R2;
1754 reg_lret = TREG_R3;
1755 #endif
1756 gen_func:
1757 /* call generic long long function */
1758 vpush_global_sym(&func_old_type, func);
1759 vrott(3);
1760 gfunc_call(2);
1761 vpushi(0);
1762 vtop->r = reg_iret;
1763 vtop->r2 = reg_lret;
1764 break;
1765 case '^':
1766 case '&':
1767 case '|':
1768 case '*':
1769 case '+':
1770 case '-':
1771 //pv("gen_opl A",0,2);
1772 t = vtop->type.t;
1773 vswap();
1774 lexpand();
1775 vrotb(3);
1776 lexpand();
1777 /* stack: L1 H1 L2 H2 */
1778 tmp = vtop[0];
1779 vtop[0] = vtop[-3];
1780 vtop[-3] = tmp;
1781 tmp = vtop[-2];
1782 vtop[-2] = vtop[-3];
1783 vtop[-3] = tmp;
1784 vswap();
1785 /* stack: H1 H2 L1 L2 */
1786 //pv("gen_opl B",0,4);
1787 if (op == '*') {
1788 vpushv(vtop - 1);
1789 vpushv(vtop - 1);
1790 gen_op(TOK_UMULL);
1791 lexpand();
1792 /* stack: H1 H2 L1 L2 ML MH */
1793 for(i=0;i<4;i++)
1794 vrotb(6);
1795 /* stack: ML MH H1 H2 L1 L2 */
1796 tmp = vtop[0];
1797 vtop[0] = vtop[-2];
1798 vtop[-2] = tmp;
1799 /* stack: ML MH H1 L2 H2 L1 */
1800 gen_op('*');
1801 vrotb(3);
1802 vrotb(3);
1803 gen_op('*');
1804 /* stack: ML MH M1 M2 */
1805 gen_op('+');
1806 gen_op('+');
1807 } else if (op == '+' || op == '-') {
1808 /* XXX: add non carry method too (for MIPS or alpha) */
1809 if (op == '+')
1810 op1 = TOK_ADDC1;
1811 else
1812 op1 = TOK_SUBC1;
1813 gen_op(op1);
1814 /* stack: H1 H2 (L1 op L2) */
1815 vrotb(3);
1816 vrotb(3);
1817 gen_op(op1 + 1); /* TOK_xxxC2 */
1818 } else {
1819 gen_op(op);
1820 /* stack: H1 H2 (L1 op L2) */
1821 vrotb(3);
1822 vrotb(3);
1823 /* stack: (L1 op L2) H1 H2 */
1824 gen_op(op);
1825 /* stack: (L1 op L2) (H1 op H2) */
1827 /* stack: L H */
1828 lbuild(t);
1829 break;
1830 case TOK_SAR:
1831 case TOK_SHR:
1832 case TOK_SHL:
1833 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1834 t = vtop[-1].type.t;
1835 vswap();
1836 lexpand();
1837 vrotb(3);
1838 /* stack: L H shift */
1839 c = (int)vtop->c.i;
1840 /* constant: simpler */
1841 /* NOTE: all comments are for SHL. the other cases are
1842 done by swapping words */
1843 vpop();
1844 if (op != TOK_SHL)
1845 vswap();
1846 if (c >= 32) {
1847 /* stack: L H */
1848 vpop();
1849 if (c > 32) {
1850 vpushi(c - 32);
1851 gen_op(op);
1853 if (op != TOK_SAR) {
1854 vpushi(0);
1855 } else {
1856 gv_dup();
1857 vpushi(31);
1858 gen_op(TOK_SAR);
1860 vswap();
1861 } else {
1862 vswap();
1863 gv_dup();
1864 /* stack: H L L */
1865 vpushi(c);
1866 gen_op(op);
1867 vswap();
1868 vpushi(32 - c);
1869 if (op == TOK_SHL)
1870 gen_op(TOK_SHR);
1871 else
1872 gen_op(TOK_SHL);
1873 vrotb(3);
1874 /* stack: L L H */
1875 vpushi(c);
1876 if (op == TOK_SHL)
1877 gen_op(TOK_SHL);
1878 else
1879 gen_op(TOK_SHR);
1880 gen_op('|');
1882 if (op != TOK_SHL)
1883 vswap();
1884 lbuild(t);
1885 } else {
1886 /* XXX: should provide a faster fallback on x86 ? */
1887 switch(op) {
1888 case TOK_SAR:
1889 func = TOK___ashrdi3;
1890 goto gen_func;
1891 case TOK_SHR:
1892 func = TOK___lshrdi3;
1893 goto gen_func;
1894 case TOK_SHL:
1895 func = TOK___ashldi3;
1896 goto gen_func;
1899 break;
1900 default:
1901 /* compare operations */
1902 t = vtop->type.t;
1903 vswap();
1904 lexpand();
1905 vrotb(3);
1906 lexpand();
1907 /* stack: L1 H1 L2 H2 */
1908 tmp = vtop[-1];
1909 vtop[-1] = vtop[-2];
1910 vtop[-2] = tmp;
1911 /* stack: L1 L2 H1 H2 */
1912 /* compare high */
1913 op1 = op;
1914 /* when values are equal, we need to compare low words. since
1915 the jump is inverted, we invert the test too. */
1916 if (op1 == TOK_LT)
1917 op1 = TOK_LE;
1918 else if (op1 == TOK_GT)
1919 op1 = TOK_GE;
1920 else if (op1 == TOK_ULT)
1921 op1 = TOK_ULE;
1922 else if (op1 == TOK_UGT)
1923 op1 = TOK_UGE;
1924 a = 0;
1925 b = 0;
1926 gen_op(op1);
1927 if (op == TOK_NE) {
1928 b = gvtst(0, 0);
1929 } else {
1930 a = gvtst(1, 0);
1931 if (op != TOK_EQ) {
1932 /* generate non equal test */
1933 vpushi(TOK_NE);
1934 vtop->r = VT_CMP;
1935 b = gvtst(0, 0);
1938 /* compare low. Always unsigned */
1939 op1 = op;
1940 if (op1 == TOK_LT)
1941 op1 = TOK_ULT;
1942 else if (op1 == TOK_LE)
1943 op1 = TOK_ULE;
1944 else if (op1 == TOK_GT)
1945 op1 = TOK_UGT;
1946 else if (op1 == TOK_GE)
1947 op1 = TOK_UGE;
1948 gen_op(op1);
1949 a = gvtst(1, a);
1950 gsym(b);
1951 vseti(VT_JMPI, a);
1952 break;
1955 #endif
1957 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1959 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1960 return (a ^ b) >> 63 ? -x : x;
1963 static int gen_opic_lt(uint64_t a, uint64_t b)
1965 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1968 /* handle integer constant optimizations and various machine
1969 independent opt */
1970 static void gen_opic(int op)
1972 SValue *v1 = vtop - 1;
1973 SValue *v2 = vtop;
1974 int t1 = v1->type.t & VT_BTYPE;
1975 int t2 = v2->type.t & VT_BTYPE;
1976 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1977 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1978 uint64_t l1 = c1 ? v1->c.i : 0;
1979 uint64_t l2 = c2 ? v2->c.i : 0;
1980 int shm = (t1 == VT_LLONG) ? 63 : 31;
1982 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1983 l1 = ((uint32_t)l1 |
1984 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1985 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1986 l2 = ((uint32_t)l2 |
1987 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1989 if (c1 && c2) {
1990 switch(op) {
1991 case '+': l1 += l2; break;
1992 case '-': l1 -= l2; break;
1993 case '&': l1 &= l2; break;
1994 case '^': l1 ^= l2; break;
1995 case '|': l1 |= l2; break;
1996 case '*': l1 *= l2; break;
1998 case TOK_PDIV:
1999 case '/':
2000 case '%':
2001 case TOK_UDIV:
2002 case TOK_UMOD:
2003 /* if division by zero, generate explicit division */
2004 if (l2 == 0) {
2005 if (const_wanted)
2006 tcc_error("division by zero in constant");
2007 goto general_case;
2009 switch(op) {
2010 default: l1 = gen_opic_sdiv(l1, l2); break;
2011 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2012 case TOK_UDIV: l1 = l1 / l2; break;
2013 case TOK_UMOD: l1 = l1 % l2; break;
2015 break;
2016 case TOK_SHL: l1 <<= (l2 & shm); break;
2017 case TOK_SHR: l1 >>= (l2 & shm); break;
2018 case TOK_SAR:
2019 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2020 break;
2021 /* tests */
2022 case TOK_ULT: l1 = l1 < l2; break;
2023 case TOK_UGE: l1 = l1 >= l2; break;
2024 case TOK_EQ: l1 = l1 == l2; break;
2025 case TOK_NE: l1 = l1 != l2; break;
2026 case TOK_ULE: l1 = l1 <= l2; break;
2027 case TOK_UGT: l1 = l1 > l2; break;
2028 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2029 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2030 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2031 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2032 /* logical */
2033 case TOK_LAND: l1 = l1 && l2; break;
2034 case TOK_LOR: l1 = l1 || l2; break;
2035 default:
2036 goto general_case;
2038 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2039 l1 = ((uint32_t)l1 |
2040 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2041 v1->c.i = l1;
2042 vtop--;
2043 } else {
2044 /* if commutative ops, put c2 as constant */
2045 if (c1 && (op == '+' || op == '&' || op == '^' ||
2046 op == '|' || op == '*')) {
2047 vswap();
2048 c2 = c1; //c = c1, c1 = c2, c2 = c;
2049 l2 = l1; //l = l1, l1 = l2, l2 = l;
2051 if (!const_wanted &&
2052 c1 && ((l1 == 0 &&
2053 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2054 (l1 == -1 && op == TOK_SAR))) {
2055 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2056 vtop--;
2057 } else if (!const_wanted &&
2058 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2059 (op == '|' &&
2060 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2061 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2062 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2063 if (l2 == 1)
2064 vtop->c.i = 0;
2065 vswap();
2066 vtop--;
2067 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2068 op == TOK_PDIV) &&
2069 l2 == 1) ||
2070 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2071 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2072 l2 == 0) ||
2073 (op == '&' &&
2074 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2075 /* filter out NOP operations like x*1, x-0, x&-1... */
2076 vtop--;
2077 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2078 /* try to use shifts instead of muls or divs */
2079 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2080 int n = -1;
2081 while (l2) {
2082 l2 >>= 1;
2083 n++;
2085 vtop->c.i = n;
2086 if (op == '*')
2087 op = TOK_SHL;
2088 else if (op == TOK_PDIV)
2089 op = TOK_SAR;
2090 else
2091 op = TOK_SHR;
2093 goto general_case;
2094 } else if (c2 && (op == '+' || op == '-') &&
2095 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2096 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2097 /* symbol + constant case */
2098 if (op == '-')
2099 l2 = -l2;
2100 l2 += vtop[-1].c.i;
2101 /* The backends can't always deal with addends to symbols
2102 larger than +-1<<31. Don't construct such. */
2103 if ((int)l2 != l2)
2104 goto general_case;
2105 vtop--;
2106 vtop->c.i = l2;
2107 } else {
2108 general_case:
2109 /* call low level op generator */
2110 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2111 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2112 gen_opl(op);
2113 else
2114 gen_opi(op);
2119 /* generate a floating point operation with constant propagation */
2120 static void gen_opif(int op)
2122 int c1, c2;
2123 SValue *v1, *v2;
2124 #if defined _MSC_VER && defined _AMD64_
2125 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2126 volatile
2127 #endif
2128 long double f1, f2;
2130 v1 = vtop - 1;
2131 v2 = vtop;
2132 /* currently, we cannot do computations with forward symbols */
2133 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2134 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2135 if (c1 && c2) {
2136 if (v1->type.t == VT_FLOAT) {
2137 f1 = v1->c.f;
2138 f2 = v2->c.f;
2139 } else if (v1->type.t == VT_DOUBLE) {
2140 f1 = v1->c.d;
2141 f2 = v2->c.d;
2142 } else {
2143 f1 = v1->c.ld;
2144 f2 = v2->c.ld;
2147 /* NOTE: we only do constant propagation if finite number (not
2148 NaN or infinity) (ANSI spec) */
2149 if (!ieee_finite(f1) || !ieee_finite(f2))
2150 goto general_case;
2152 switch(op) {
2153 case '+': f1 += f2; break;
2154 case '-': f1 -= f2; break;
2155 case '*': f1 *= f2; break;
2156 case '/':
2157 if (f2 == 0.0) {
2158 /* If not in initializer we need to potentially generate
2159 FP exceptions at runtime, otherwise we want to fold. */
2160 if (!const_wanted)
2161 goto general_case;
2163 f1 /= f2;
2164 break;
2165 /* XXX: also handles tests ? */
2166 default:
2167 goto general_case;
2169 /* XXX: overflow test ? */
2170 if (v1->type.t == VT_FLOAT) {
2171 v1->c.f = f1;
2172 } else if (v1->type.t == VT_DOUBLE) {
2173 v1->c.d = f1;
2174 } else {
2175 v1->c.ld = f1;
2177 vtop--;
2178 } else {
2179 general_case:
2180 gen_opf(op);
2184 static int pointed_size(CType *type)
2186 int align;
2187 return type_size(pointed_type(type), &align);
2190 static void vla_runtime_pointed_size(CType *type)
2192 int align;
2193 vla_runtime_type_size(pointed_type(type), &align);
2196 static inline int is_null_pointer(SValue *p)
2198 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2199 return 0;
2200 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2201 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2202 ((p->type.t & VT_BTYPE) == VT_PTR &&
2203 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2204 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2205 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2208 static inline int is_integer_btype(int bt)
2210 return (bt == VT_BYTE || bt == VT_SHORT ||
2211 bt == VT_INT || bt == VT_LLONG);
2214 /* check types for comparison or subtraction of pointers */
2215 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2217 CType *type1, *type2, tmp_type1, tmp_type2;
2218 int bt1, bt2;
2220 /* null pointers are accepted for all comparisons as gcc */
2221 if (is_null_pointer(p1) || is_null_pointer(p2))
2222 return;
2223 type1 = &p1->type;
2224 type2 = &p2->type;
2225 bt1 = type1->t & VT_BTYPE;
2226 bt2 = type2->t & VT_BTYPE;
2227 /* accept comparison between pointer and integer with a warning */
2228 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2229 if (op != TOK_LOR && op != TOK_LAND )
2230 tcc_warning("comparison between pointer and integer");
2231 return;
2234 /* both must be pointers or implicit function pointers */
2235 if (bt1 == VT_PTR) {
2236 type1 = pointed_type(type1);
2237 } else if (bt1 != VT_FUNC)
2238 goto invalid_operands;
2240 if (bt2 == VT_PTR) {
2241 type2 = pointed_type(type2);
2242 } else if (bt2 != VT_FUNC) {
2243 invalid_operands:
2244 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2246 if ((type1->t & VT_BTYPE) == VT_VOID ||
2247 (type2->t & VT_BTYPE) == VT_VOID)
2248 return;
2249 tmp_type1 = *type1;
2250 tmp_type2 = *type2;
2251 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2252 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2253 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2254 /* gcc-like error if '-' is used */
2255 if (op == '-')
2256 goto invalid_operands;
2257 else
2258 tcc_warning("comparison of distinct pointer types lacks a cast");
2262 /* generic gen_op: handles types problems */
2263 ST_FUNC void gen_op(int op)
2265 int u, t1, t2, bt1, bt2, t;
2266 CType type1;
2268 redo:
2269 t1 = vtop[-1].type.t;
2270 t2 = vtop[0].type.t;
2271 bt1 = t1 & VT_BTYPE;
2272 bt2 = t2 & VT_BTYPE;
2274 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2275 tcc_error("operation on a struct");
2276 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2277 if (bt2 == VT_FUNC) {
2278 mk_pointer(&vtop->type);
2279 gaddrof();
2281 if (bt1 == VT_FUNC) {
2282 vswap();
2283 mk_pointer(&vtop->type);
2284 gaddrof();
2285 vswap();
2287 goto redo;
2288 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2289 /* at least one operand is a pointer */
2290 /* relational op: must be both pointers */
2291 if (op >= TOK_ULT && op <= TOK_LOR) {
2292 check_comparison_pointer_types(vtop - 1, vtop, op);
2293 /* pointers are handled are unsigned */
2294 #if PTR_SIZE == 8
2295 t = VT_LLONG | VT_UNSIGNED;
2296 #else
2297 t = VT_INT | VT_UNSIGNED;
2298 #endif
2299 goto std_op;
2301 /* if both pointers, then it must be the '-' op */
2302 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2303 if (op != '-')
2304 tcc_error("cannot use pointers here");
2305 check_comparison_pointer_types(vtop - 1, vtop, op);
2306 /* XXX: check that types are compatible */
2307 if (vtop[-1].type.t & VT_VLA) {
2308 vla_runtime_pointed_size(&vtop[-1].type);
2309 } else {
2310 vpushi(pointed_size(&vtop[-1].type));
2312 vrott(3);
2313 gen_opic(op);
2314 vtop->type.t = ptrdiff_type.t;
2315 vswap();
2316 gen_op(TOK_PDIV);
2317 } else {
2318 /* exactly one pointer : must be '+' or '-'. */
2319 if (op != '-' && op != '+')
2320 tcc_error("cannot use pointers here");
2321 /* Put pointer as first operand */
2322 if (bt2 == VT_PTR) {
2323 vswap();
2324 t = t1, t1 = t2, t2 = t;
2326 #if PTR_SIZE == 4
2327 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2328 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2329 gen_cast_s(VT_INT);
2330 #endif
2331 type1 = vtop[-1].type;
2332 type1.t &= ~VT_ARRAY;
2333 if (vtop[-1].type.t & VT_VLA)
2334 vla_runtime_pointed_size(&vtop[-1].type);
2335 else {
2336 u = pointed_size(&vtop[-1].type);
2337 if (u < 0)
2338 tcc_error("unknown array element size");
2339 #if PTR_SIZE == 8
2340 vpushll(u);
2341 #else
2342 /* XXX: cast to int ? (long long case) */
2343 vpushi(u);
2344 #endif
2346 gen_op('*');
2347 #if 0
2348 /* #ifdef CONFIG_TCC_BCHECK
2349 The main reason to removing this code:
2350 #include <stdio.h>
2351 int main ()
2353 int v[10];
2354 int i = 10;
2355 int j = 9;
2356 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2357 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2359 When this code is on. then the output looks like
2360 v+i-j = 0xfffffffe
2361 v+(i-j) = 0xbff84000
2363 /* if evaluating constant expression, no code should be
2364 generated, so no bound check */
2365 if (tcc_state->do_bounds_check && !const_wanted) {
2366 /* if bounded pointers, we generate a special code to
2367 test bounds */
2368 if (op == '-') {
2369 vpushi(0);
2370 vswap();
2371 gen_op('-');
2373 gen_bounded_ptr_add();
2374 } else
2375 #endif
2377 gen_opic(op);
2379 /* put again type if gen_opic() swaped operands */
2380 vtop->type = type1;
2382 } else if (is_float(bt1) || is_float(bt2)) {
2383 /* compute bigger type and do implicit casts */
2384 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2385 t = VT_LDOUBLE;
2386 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2387 t = VT_DOUBLE;
2388 } else {
2389 t = VT_FLOAT;
2391 /* floats can only be used for a few operations */
2392 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2393 (op < TOK_ULT || op > TOK_GT))
2394 tcc_error("invalid operands for binary operation");
2395 goto std_op;
2396 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2397 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2398 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2399 t |= VT_UNSIGNED;
2400 t |= (VT_LONG & t1);
2401 goto std_op;
2402 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2403 /* cast to biggest op */
2404 t = VT_LLONG | VT_LONG;
2405 if (bt1 == VT_LLONG)
2406 t &= t1;
2407 if (bt2 == VT_LLONG)
2408 t &= t2;
2409 /* convert to unsigned if it does not fit in a long long */
2410 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2411 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2412 t |= VT_UNSIGNED;
2413 goto std_op;
2414 } else {
2415 /* integer operations */
2416 t = VT_INT | (VT_LONG & (t1 | t2));
2417 /* convert to unsigned if it does not fit in an integer */
2418 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2419 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2420 t |= VT_UNSIGNED;
2421 std_op:
2422 /* XXX: currently, some unsigned operations are explicit, so
2423 we modify them here */
2424 if (t & VT_UNSIGNED) {
2425 if (op == TOK_SAR)
2426 op = TOK_SHR;
2427 else if (op == '/')
2428 op = TOK_UDIV;
2429 else if (op == '%')
2430 op = TOK_UMOD;
2431 else if (op == TOK_LT)
2432 op = TOK_ULT;
2433 else if (op == TOK_GT)
2434 op = TOK_UGT;
2435 else if (op == TOK_LE)
2436 op = TOK_ULE;
2437 else if (op == TOK_GE)
2438 op = TOK_UGE;
2440 vswap();
2441 type1.t = t;
2442 type1.ref = NULL;
2443 gen_cast(&type1);
2444 vswap();
2445 /* special case for shifts and long long: we keep the shift as
2446 an integer */
2447 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2448 type1.t = VT_INT;
2449 gen_cast(&type1);
2450 if (is_float(t))
2451 gen_opif(op);
2452 else
2453 gen_opic(op);
2454 if (op >= TOK_ULT && op <= TOK_GT) {
2455 /* relational op: the result is an int */
2456 vtop->type.t = VT_INT;
2457 } else {
2458 vtop->type.t = t;
2461 // Make sure that we have converted to an rvalue:
2462 if (vtop->r & VT_LVAL)
2463 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2466 #ifndef TCC_TARGET_ARM
2467 /* generic itof for unsigned long long case */
2468 static void gen_cvt_itof1(int t)
2470 #ifdef TCC_TARGET_ARM64
2471 gen_cvt_itof(t);
2472 #else
2473 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2474 (VT_LLONG | VT_UNSIGNED)) {
2476 if (t == VT_FLOAT)
2477 vpush_global_sym(&func_old_type, TOK___floatundisf);
2478 #if LDOUBLE_SIZE != 8
2479 else if (t == VT_LDOUBLE)
2480 vpush_global_sym(&func_old_type, TOK___floatundixf);
2481 #endif
2482 else
2483 vpush_global_sym(&func_old_type, TOK___floatundidf);
2484 vrott(2);
2485 gfunc_call(1);
2486 vpushi(0);
2487 vtop->r = reg_fret(t);
2488 } else {
2489 gen_cvt_itof(t);
2491 #endif
2493 #endif
2495 /* generic ftoi for unsigned long long case */
2496 static void gen_cvt_ftoi1(int t)
2498 #ifdef TCC_TARGET_ARM64
2499 gen_cvt_ftoi(t);
2500 #else
2501 int st;
2503 if (t == (VT_LLONG | VT_UNSIGNED)) {
2504 /* not handled natively */
2505 st = vtop->type.t & VT_BTYPE;
2506 if (st == VT_FLOAT)
2507 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2508 #if LDOUBLE_SIZE != 8
2509 else if (st == VT_LDOUBLE)
2510 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2511 #endif
2512 else
2513 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2514 vrott(2);
2515 gfunc_call(1);
2516 vpushi(0);
2517 vtop->r = REG_IRET;
2518 vtop->r2 = REG_LRET;
2519 } else {
2520 gen_cvt_ftoi(t);
2522 #endif
2525 /* force char or short cast */
2526 static void force_charshort_cast(int t)
2528 int bits, dbt;
2530 /* cannot cast static initializers */
2531 if (STATIC_DATA_WANTED)
2532 return;
2534 dbt = t & VT_BTYPE;
2535 /* XXX: add optimization if lvalue : just change type and offset */
2536 if (dbt == VT_BYTE)
2537 bits = 8;
2538 else
2539 bits = 16;
2540 if (t & VT_UNSIGNED) {
2541 vpushi((1 << bits) - 1);
2542 gen_op('&');
2543 } else {
2544 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2545 bits = 64 - bits;
2546 else
2547 bits = 32 - bits;
2548 vpushi(bits);
2549 gen_op(TOK_SHL);
2550 /* result must be signed or the SAR is converted to an SHL
2551 This was not the case when "t" was a signed short
2552 and the last value on the stack was an unsigned int */
2553 vtop->type.t &= ~VT_UNSIGNED;
2554 vpushi(bits);
2555 gen_op(TOK_SAR);
2559 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2560 static void gen_cast_s(int t)
2562 CType type;
2563 type.t = t;
2564 type.ref = NULL;
2565 gen_cast(&type);
2568 static void gen_cast(CType *type)
2570 int sbt, dbt, sf, df, c, p;
2572 /* special delayed cast for char/short */
2573 /* XXX: in some cases (multiple cascaded casts), it may still
2574 be incorrect */
2575 if (vtop->r & VT_MUSTCAST) {
2576 vtop->r &= ~VT_MUSTCAST;
2577 force_charshort_cast(vtop->type.t);
2580 /* bitfields first get cast to ints */
2581 if (vtop->type.t & VT_BITFIELD) {
2582 gv(RC_INT);
2585 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2586 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2588 if (sbt != dbt) {
2589 sf = is_float(sbt);
2590 df = is_float(dbt);
2591 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2592 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2593 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2594 c &= dbt != VT_LDOUBLE;
2595 #endif
2596 if (c) {
2597 /* constant case: we can do it now */
2598 /* XXX: in ISOC, cannot do it if error in convert */
2599 if (sbt == VT_FLOAT)
2600 vtop->c.ld = vtop->c.f;
2601 else if (sbt == VT_DOUBLE)
2602 vtop->c.ld = vtop->c.d;
2604 if (df) {
2605 if ((sbt & VT_BTYPE) == VT_LLONG) {
2606 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2607 vtop->c.ld = vtop->c.i;
2608 else
2609 vtop->c.ld = -(long double)-vtop->c.i;
2610 } else if(!sf) {
2611 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2612 vtop->c.ld = (uint32_t)vtop->c.i;
2613 else
2614 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2617 if (dbt == VT_FLOAT)
2618 vtop->c.f = (float)vtop->c.ld;
2619 else if (dbt == VT_DOUBLE)
2620 vtop->c.d = (double)vtop->c.ld;
2621 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2622 vtop->c.i = vtop->c.ld;
2623 } else if (sf && dbt == VT_BOOL) {
2624 vtop->c.i = (vtop->c.ld != 0);
2625 } else {
2626 if(sf)
2627 vtop->c.i = vtop->c.ld;
2628 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2630 else if (sbt & VT_UNSIGNED)
2631 vtop->c.i = (uint32_t)vtop->c.i;
2632 #if PTR_SIZE == 8
2633 else if (sbt == VT_PTR)
2635 #endif
2636 else if (sbt != VT_LLONG)
2637 vtop->c.i = ((uint32_t)vtop->c.i |
2638 -(vtop->c.i & 0x80000000));
2640 if (dbt == (VT_LLONG|VT_UNSIGNED))
2642 else if (dbt == VT_BOOL)
2643 vtop->c.i = (vtop->c.i != 0);
2644 #if PTR_SIZE == 8
2645 else if (dbt == VT_PTR)
2647 #endif
2648 else if (dbt != VT_LLONG) {
2649 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2650 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2651 0xffffffff);
2652 vtop->c.i &= m;
2653 if (!(dbt & VT_UNSIGNED))
2654 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2657 } else if (p && dbt == VT_BOOL) {
2658 vtop->r = VT_CONST;
2659 vtop->c.i = 1;
2660 } else {
2661 /* non constant case: generate code */
2662 if (sf && df) {
2663 /* convert from fp to fp */
2664 gen_cvt_ftof(dbt);
2665 } else if (df) {
2666 /* convert int to fp */
2667 gen_cvt_itof1(dbt);
2668 } else if (sf) {
2669 /* convert fp to int */
2670 if (dbt == VT_BOOL) {
2671 vpushi(0);
2672 gen_op(TOK_NE);
2673 } else {
2674 /* we handle char/short/etc... with generic code */
2675 if (dbt != (VT_INT | VT_UNSIGNED) &&
2676 dbt != (VT_LLONG | VT_UNSIGNED) &&
2677 dbt != VT_LLONG)
2678 dbt = VT_INT;
2679 gen_cvt_ftoi1(dbt);
2680 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2681 /* additional cast for char/short... */
2682 vtop->type.t = dbt;
2683 gen_cast(type);
2686 #if PTR_SIZE == 4
2687 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2688 if ((sbt & VT_BTYPE) != VT_LLONG) {
2689 /* scalar to long long */
2690 /* machine independent conversion */
2691 gv(RC_INT);
2692 /* generate high word */
2693 if (sbt == (VT_INT | VT_UNSIGNED)) {
2694 vpushi(0);
2695 gv(RC_INT);
2696 } else {
2697 if (sbt == VT_PTR) {
2698 /* cast from pointer to int before we apply
2699 shift operation, which pointers don't support*/
2700 gen_cast_s(VT_INT);
2702 gv_dup();
2703 vpushi(31);
2704 gen_op(TOK_SAR);
2706 /* patch second register */
2707 vtop[-1].r2 = vtop->r;
2708 vpop();
2710 #else
2711 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2712 (dbt & VT_BTYPE) == VT_PTR ||
2713 (dbt & VT_BTYPE) == VT_FUNC) {
2714 if ((sbt & VT_BTYPE) != VT_LLONG &&
2715 (sbt & VT_BTYPE) != VT_PTR &&
2716 (sbt & VT_BTYPE) != VT_FUNC) {
2717 /* need to convert from 32bit to 64bit */
2718 gv(RC_INT);
2719 if (sbt != (VT_INT | VT_UNSIGNED)) {
2720 #if defined(TCC_TARGET_ARM64)
2721 gen_cvt_sxtw();
2722 #elif defined(TCC_TARGET_X86_64)
2723 int r = gv(RC_INT);
2724 /* x86_64 specific: movslq */
2725 o(0x6348);
2726 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2727 #else
2728 #error
2729 #endif
2732 #endif
2733 } else if (dbt == VT_BOOL) {
2734 /* scalar to bool */
2735 vpushi(0);
2736 gen_op(TOK_NE);
2737 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2738 (dbt & VT_BTYPE) == VT_SHORT) {
2739 if (sbt == VT_PTR) {
2740 vtop->type.t = VT_INT;
2741 tcc_warning("nonportable conversion from pointer to char/short");
2743 force_charshort_cast(dbt);
2744 } else if ((dbt & VT_BTYPE) == VT_INT) {
2745 /* scalar to int */
2746 if ((sbt & VT_BTYPE) == VT_LLONG) {
2747 #if PTR_SIZE == 4
2748 /* from long long: just take low order word */
2749 lexpand();
2750 vpop();
2751 #else
2752 vpushi(0xffffffff);
2753 vtop->type.t |= VT_UNSIGNED;
2754 gen_op('&');
2755 #endif
2757 /* if lvalue and single word type, nothing to do because
2758 the lvalue already contains the real type size (see
2759 VT_LVAL_xxx constants) */
2762 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2763 /* if we are casting between pointer types,
2764 we must update the VT_LVAL_xxx size */
2765 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2766 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2768 vtop->type = *type;
2769 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2772 /* return type size as known at compile time. Put alignment at 'a' */
2773 ST_FUNC int type_size(CType *type, int *a)
2775 Sym *s;
2776 int bt;
2778 bt = type->t & VT_BTYPE;
2779 if (bt == VT_STRUCT) {
2780 /* struct/union */
2781 s = type->ref;
2782 *a = s->r;
2783 return s->c;
2784 } else if (bt == VT_PTR) {
2785 if (type->t & VT_ARRAY) {
2786 int ts;
2788 s = type->ref;
2789 ts = type_size(&s->type, a);
2791 if (ts < 0 && s->c < 0)
2792 ts = -ts;
2794 return ts * s->c;
2795 } else {
2796 *a = PTR_SIZE;
2797 return PTR_SIZE;
2799 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2800 return -1; /* incomplete enum */
2801 } else if (bt == VT_LDOUBLE) {
2802 *a = LDOUBLE_ALIGN;
2803 return LDOUBLE_SIZE;
2804 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2805 #ifdef TCC_TARGET_I386
2806 #ifdef TCC_TARGET_PE
2807 *a = 8;
2808 #else
2809 *a = 4;
2810 #endif
2811 #elif defined(TCC_TARGET_ARM)
2812 #ifdef TCC_ARM_EABI
2813 *a = 8;
2814 #else
2815 *a = 4;
2816 #endif
2817 #else
2818 *a = 8;
2819 #endif
2820 return 8;
2821 } else if (bt == VT_INT || bt == VT_FLOAT) {
2822 *a = 4;
2823 return 4;
2824 } else if (bt == VT_SHORT) {
2825 *a = 2;
2826 return 2;
2827 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2828 *a = 8;
2829 return 16;
2830 } else {
2831 /* char, void, function, _Bool */
2832 *a = 1;
2833 return 1;
2837 /* push type size as known at runtime time on top of value stack. Put
2838 alignment at 'a' */
2839 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2841 if (type->t & VT_VLA) {
2842 type_size(&type->ref->type, a);
2843 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2844 } else {
2845 vpushi(type_size(type, a));
2849 static void vla_sp_restore(void) {
2850 if (vlas_in_scope) {
2851 gen_vla_sp_restore(vla_sp_loc);
2855 static void vla_sp_restore_root(void) {
2856 if (vlas_in_scope) {
2857 gen_vla_sp_restore(vla_sp_root_loc);
2861 /* return the pointed type of t */
2862 static inline CType *pointed_type(CType *type)
2864 return &type->ref->type;
2867 /* modify type so that its it is a pointer to type. */
2868 ST_FUNC void mk_pointer(CType *type)
2870 Sym *s;
2871 s = sym_push(SYM_FIELD, type, 0, -1);
2872 type->t = VT_PTR | (type->t & VT_STORAGE);
2873 type->ref = s;
2876 /* compare function types. OLD functions match any new functions */
2877 static int is_compatible_func(CType *type1, CType *type2)
2879 Sym *s1, *s2;
2881 s1 = type1->ref;
2882 s2 = type2->ref;
2883 if (!is_compatible_types(&s1->type, &s2->type))
2884 return 0;
2885 /* check func_call */
2886 if (s1->f.func_call != s2->f.func_call)
2887 return 0;
2888 /* XXX: not complete */
2889 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2890 return 1;
2891 if (s1->f.func_type != s2->f.func_type)
2892 return 0;
2893 while (s1 != NULL) {
2894 if (s2 == NULL)
2895 return 0;
2896 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2897 return 0;
2898 s1 = s1->next;
2899 s2 = s2->next;
2901 if (s2)
2902 return 0;
2903 return 1;
2906 /* return true if type1 and type2 are the same. If unqualified is
2907 true, qualifiers on the types are ignored.
2909 static int compare_types(CType *type1, CType *type2, int unqualified)
2911 int bt1, t1, t2;
2913 t1 = type1->t & VT_TYPE;
2914 t2 = type2->t & VT_TYPE;
2915 if (unqualified) {
2916 /* strip qualifiers before comparing */
2917 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2918 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2921 /* Default Vs explicit signedness only matters for char */
2922 if ((t1 & VT_BTYPE) != VT_BYTE) {
2923 t1 &= ~VT_DEFSIGN;
2924 t2 &= ~VT_DEFSIGN;
2926 /* XXX: bitfields ? */
2927 if (t1 != t2)
2928 return 0;
2929 /* test more complicated cases */
2930 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2931 if (bt1 == VT_PTR) {
2932 type1 = pointed_type(type1);
2933 type2 = pointed_type(type2);
2934 return is_compatible_types(type1, type2);
2935 } else if (bt1 & VT_ARRAY) {
2936 return type1->ref->c < 0 || type2->ref->c < 0
2937 || type1->ref->c == type2->ref->c;
2938 } else if (bt1 == VT_STRUCT) {
2939 return (type1->ref == type2->ref);
2940 } else if (bt1 == VT_FUNC) {
2941 return is_compatible_func(type1, type2);
2942 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2943 return type1->ref == type2->ref;
2944 } else {
2945 return 1;
2949 /* return true if type1 and type2 are exactly the same (including
2950 qualifiers).
2952 static int is_compatible_types(CType *type1, CType *type2)
2954 return compare_types(type1,type2,0);
2957 /* return true if type1 and type2 are the same (ignoring qualifiers).
2959 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2961 return compare_types(type1,type2,1);
2964 /* print a type. If 'varstr' is not NULL, then the variable is also
2965 printed in the type */
2966 /* XXX: union */
2967 /* XXX: add array and function pointers */
2968 static void type_to_str(char *buf, int buf_size,
2969 CType *type, const char *varstr)
2971 int bt, v, t;
2972 Sym *s, *sa;
2973 char buf1[256];
2974 const char *tstr;
2976 t = type->t;
2977 bt = t & VT_BTYPE;
2978 buf[0] = '\0';
2980 if (t & VT_EXTERN)
2981 pstrcat(buf, buf_size, "extern ");
2982 if (t & VT_STATIC)
2983 pstrcat(buf, buf_size, "static ");
2984 if (t & VT_TYPEDEF)
2985 pstrcat(buf, buf_size, "typedef ");
2986 if (t & VT_INLINE)
2987 pstrcat(buf, buf_size, "inline ");
2988 if (t & VT_VOLATILE)
2989 pstrcat(buf, buf_size, "volatile ");
2990 if (t & VT_CONSTANT)
2991 pstrcat(buf, buf_size, "const ");
2993 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2994 || ((t & VT_UNSIGNED)
2995 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2996 && !IS_ENUM(t)
2998 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3000 buf_size -= strlen(buf);
3001 buf += strlen(buf);
3003 switch(bt) {
3004 case VT_VOID:
3005 tstr = "void";
3006 goto add_tstr;
3007 case VT_BOOL:
3008 tstr = "_Bool";
3009 goto add_tstr;
3010 case VT_BYTE:
3011 tstr = "char";
3012 goto add_tstr;
3013 case VT_SHORT:
3014 tstr = "short";
3015 goto add_tstr;
3016 case VT_INT:
3017 tstr = "int";
3018 goto maybe_long;
3019 case VT_LLONG:
3020 tstr = "long long";
3021 maybe_long:
3022 if (t & VT_LONG)
3023 tstr = "long";
3024 if (!IS_ENUM(t))
3025 goto add_tstr;
3026 tstr = "enum ";
3027 goto tstruct;
3028 case VT_FLOAT:
3029 tstr = "float";
3030 goto add_tstr;
3031 case VT_DOUBLE:
3032 tstr = "double";
3033 goto add_tstr;
3034 case VT_LDOUBLE:
3035 tstr = "long double";
3036 add_tstr:
3037 pstrcat(buf, buf_size, tstr);
3038 break;
3039 case VT_STRUCT:
3040 tstr = "struct ";
3041 if (IS_UNION(t))
3042 tstr = "union ";
3043 tstruct:
3044 pstrcat(buf, buf_size, tstr);
3045 v = type->ref->v & ~SYM_STRUCT;
3046 if (v >= SYM_FIRST_ANOM)
3047 pstrcat(buf, buf_size, "<anonymous>");
3048 else
3049 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3050 break;
3051 case VT_FUNC:
3052 s = type->ref;
3053 buf1[0]=0;
3054 if (varstr && '*' == *varstr) {
3055 pstrcat(buf1, sizeof(buf1), "(");
3056 pstrcat(buf1, sizeof(buf1), varstr);
3057 pstrcat(buf1, sizeof(buf1), ")");
3059 pstrcat(buf1, buf_size, "(");
3060 sa = s->next;
3061 while (sa != NULL) {
3062 char buf2[256];
3063 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3064 pstrcat(buf1, sizeof(buf1), buf2);
3065 sa = sa->next;
3066 if (sa)
3067 pstrcat(buf1, sizeof(buf1), ", ");
3069 if (s->f.func_type == FUNC_ELLIPSIS)
3070 pstrcat(buf1, sizeof(buf1), ", ...");
3071 pstrcat(buf1, sizeof(buf1), ")");
3072 type_to_str(buf, buf_size, &s->type, buf1);
3073 goto no_var;
3074 case VT_PTR:
3075 s = type->ref;
3076 if (t & VT_ARRAY) {
3077 if (varstr && '*' == *varstr)
3078 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3079 else
3080 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3081 type_to_str(buf, buf_size, &s->type, buf1);
3082 goto no_var;
3084 pstrcpy(buf1, sizeof(buf1), "*");
3085 if (t & VT_CONSTANT)
3086 pstrcat(buf1, buf_size, "const ");
3087 if (t & VT_VOLATILE)
3088 pstrcat(buf1, buf_size, "volatile ");
3089 if (varstr)
3090 pstrcat(buf1, sizeof(buf1), varstr);
3091 type_to_str(buf, buf_size, &s->type, buf1);
3092 goto no_var;
3094 if (varstr) {
3095 pstrcat(buf, buf_size, " ");
3096 pstrcat(buf, buf_size, varstr);
3098 no_var: ;
3101 /* verify type compatibility to store vtop in 'dt' type, and generate
3102 casts if needed. */
3103 static void gen_assign_cast(CType *dt)
3105 CType *st, *type1, *type2;
3106 char buf1[256], buf2[256];
3107 int dbt, sbt, qualwarn, lvl;
3109 st = &vtop->type; /* source type */
3110 dbt = dt->t & VT_BTYPE;
3111 sbt = st->t & VT_BTYPE;
3112 if (sbt == VT_VOID || dbt == VT_VOID) {
3113 if (sbt == VT_VOID && dbt == VT_VOID)
3114 ; /* It is Ok if both are void */
3115 else
3116 tcc_error("cannot cast from/to void");
3118 if (dt->t & VT_CONSTANT)
3119 tcc_warning("assignment of read-only location");
3120 switch(dbt) {
3121 case VT_PTR:
3122 /* special cases for pointers */
3123 /* '0' can also be a pointer */
3124 if (is_null_pointer(vtop))
3125 break;
3126 /* accept implicit pointer to integer cast with warning */
3127 if (is_integer_btype(sbt)) {
3128 tcc_warning("assignment makes pointer from integer without a cast");
3129 break;
3131 type1 = pointed_type(dt);
3132 if (sbt == VT_PTR)
3133 type2 = pointed_type(st);
3134 else if (sbt == VT_FUNC)
3135 type2 = st; /* a function is implicitly a function pointer */
3136 else
3137 goto error;
3138 if (is_compatible_types(type1, type2))
3139 break;
3140 for (qualwarn = lvl = 0;; ++lvl) {
3141 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3142 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3143 qualwarn = 1;
3144 dbt = type1->t & (VT_BTYPE|VT_LONG);
3145 sbt = type2->t & (VT_BTYPE|VT_LONG);
3146 if (dbt != VT_PTR || sbt != VT_PTR)
3147 break;
3148 type1 = pointed_type(type1);
3149 type2 = pointed_type(type2);
3151 if (!is_compatible_unqualified_types(type1, type2)) {
3152 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3153 /* void * can match anything */
3154 } else if (dbt == sbt
3155 && is_integer_btype(sbt & VT_BTYPE)
3156 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3157 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3158 /* Like GCC don't warn by default for merely changes
3159 in pointer target signedness. Do warn for different
3160 base types, though, in particular for unsigned enums
3161 and signed int targets. */
3162 } else {
3163 tcc_warning("assignment from incompatible pointer type");
3164 break;
3167 if (qualwarn)
3168 tcc_warning("assignment discards qualifiers from pointer target type");
3169 break;
3170 case VT_BYTE:
3171 case VT_SHORT:
3172 case VT_INT:
3173 case VT_LLONG:
3174 if (sbt == VT_PTR || sbt == VT_FUNC) {
3175 tcc_warning("assignment makes integer from pointer without a cast");
3176 } else if (sbt == VT_STRUCT) {
3177 goto case_VT_STRUCT;
3179 /* XXX: more tests */
3180 break;
3181 case VT_STRUCT:
3182 case_VT_STRUCT:
3183 if (!is_compatible_unqualified_types(dt, st)) {
3184 error:
3185 type_to_str(buf1, sizeof(buf1), st, NULL);
3186 type_to_str(buf2, sizeof(buf2), dt, NULL);
3187 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3189 break;
3191 gen_cast(dt);
3194 /* store vtop in lvalue pushed on stack */
3195 ST_FUNC void vstore(void)
3197 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3199 ft = vtop[-1].type.t;
3200 sbt = vtop->type.t & VT_BTYPE;
3201 dbt = ft & VT_BTYPE;
3202 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3203 (sbt == VT_INT && dbt == VT_SHORT))
3204 && !(vtop->type.t & VT_BITFIELD)) {
3205 /* optimize char/short casts */
3206 delayed_cast = VT_MUSTCAST;
3207 vtop->type.t = ft & VT_TYPE;
3208 /* XXX: factorize */
3209 if (ft & VT_CONSTANT)
3210 tcc_warning("assignment of read-only location");
3211 } else {
3212 delayed_cast = 0;
3213 if (!(ft & VT_BITFIELD))
3214 gen_assign_cast(&vtop[-1].type);
3217 if (sbt == VT_STRUCT) {
3218 /* if structure, only generate pointer */
3219 /* structure assignment : generate memcpy */
3220 /* XXX: optimize if small size */
3221 size = type_size(&vtop->type, &align);
3223 /* destination */
3224 vswap();
3225 vtop->type.t = VT_PTR;
3226 gaddrof();
3228 /* address of memcpy() */
3229 #ifdef TCC_ARM_EABI
3230 if(!(align & 7))
3231 vpush_global_sym(&func_old_type, TOK_memcpy8);
3232 else if(!(align & 3))
3233 vpush_global_sym(&func_old_type, TOK_memcpy4);
3234 else
3235 #endif
3236 /* Use memmove, rather than memcpy, as dest and src may be same: */
3237 vpush_global_sym(&func_old_type, TOK_memmove);
3239 vswap();
3240 /* source */
3241 vpushv(vtop - 2);
3242 vtop->type.t = VT_PTR;
3243 gaddrof();
3244 /* type size */
3245 vpushi(size);
3246 gfunc_call(3);
3248 /* leave source on stack */
3249 } else if (ft & VT_BITFIELD) {
3250 /* bitfield store handling */
3252 /* save lvalue as expression result (example: s.b = s.a = n;) */
3253 vdup(), vtop[-1] = vtop[-2];
3255 bit_pos = BIT_POS(ft);
3256 bit_size = BIT_SIZE(ft);
3257 /* remove bit field info to avoid loops */
3258 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3260 if ((ft & VT_BTYPE) == VT_BOOL) {
3261 gen_cast(&vtop[-1].type);
3262 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3265 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3266 if (r == VT_STRUCT) {
3267 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3268 store_packed_bf(bit_pos, bit_size);
3269 } else {
3270 unsigned long long mask = (1ULL << bit_size) - 1;
3271 if ((ft & VT_BTYPE) != VT_BOOL) {
3272 /* mask source */
3273 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3274 vpushll(mask);
3275 else
3276 vpushi((unsigned)mask);
3277 gen_op('&');
3279 /* shift source */
3280 vpushi(bit_pos);
3281 gen_op(TOK_SHL);
3282 vswap();
3283 /* duplicate destination */
3284 vdup();
3285 vrott(3);
3286 /* load destination, mask and or with source */
3287 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3288 vpushll(~(mask << bit_pos));
3289 else
3290 vpushi(~((unsigned)mask << bit_pos));
3291 gen_op('&');
3292 gen_op('|');
3293 /* store result */
3294 vstore();
3295 /* ... and discard */
3296 vpop();
3298 } else if (dbt == VT_VOID) {
3299 --vtop;
3300 } else {
3301 #ifdef CONFIG_TCC_BCHECK
3302 /* bound check case */
3303 if (vtop[-1].r & VT_MUSTBOUND) {
3304 vswap();
3305 gbound();
3306 vswap();
3308 #endif
3309 rc = RC_INT;
3310 if (is_float(ft)) {
3311 rc = RC_FLOAT;
3312 #ifdef TCC_TARGET_X86_64
3313 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3314 rc = RC_ST0;
3315 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3316 rc = RC_FRET;
3318 #endif
3320 r = gv(rc); /* generate value */
3321 /* if lvalue was saved on stack, must read it */
3322 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3323 SValue sv;
3324 t = get_reg(RC_INT);
3325 #if PTR_SIZE == 8
3326 sv.type.t = VT_PTR;
3327 #else
3328 sv.type.t = VT_INT;
3329 #endif
3330 sv.r = VT_LOCAL | VT_LVAL;
3331 sv.c.i = vtop[-1].c.i;
3332 load(t, &sv);
3333 vtop[-1].r = t | VT_LVAL;
3335 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3336 #if PTR_SIZE == 8
3337 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3338 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3339 #else
3340 if ((ft & VT_BTYPE) == VT_LLONG) {
3341 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3342 #endif
3343 vtop[-1].type.t = load_type;
3344 store(r, vtop - 1);
3345 vswap();
3346 /* convert to int to increment easily */
3347 vtop->type.t = addr_type;
3348 gaddrof();
3349 vpushi(load_size);
3350 gen_op('+');
3351 vtop->r |= VT_LVAL;
3352 vswap();
3353 vtop[-1].type.t = load_type;
3354 /* XXX: it works because r2 is spilled last ! */
3355 store(vtop->r2, vtop - 1);
3356 } else {
3357 store(r, vtop - 1);
3360 vswap();
3361 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3362 vtop->r |= delayed_cast;
3366 /* post defines POST/PRE add. c is the token ++ or -- */
3367 ST_FUNC void inc(int post, int c)
3369 test_lvalue();
3370 vdup(); /* save lvalue */
3371 if (post) {
3372 gv_dup(); /* duplicate value */
3373 vrotb(3);
3374 vrotb(3);
3376 /* add constant */
3377 vpushi(c - TOK_MID);
3378 gen_op('+');
3379 vstore(); /* store value */
3380 if (post)
3381 vpop(); /* if post op, return saved value */
3384 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3386 /* read the string */
3387 if (tok != TOK_STR)
3388 expect(msg);
3389 cstr_new(astr);
3390 while (tok == TOK_STR) {
3391 /* XXX: add \0 handling too ? */
3392 cstr_cat(astr, tokc.str.data, -1);
3393 next();
3395 cstr_ccat(astr, '\0');
3398 /* If I is >= 1 and a power of two, returns log2(i)+1.
3399 If I is 0 returns 0. */
3400 static int exact_log2p1(int i)
3402 int ret;
3403 if (!i)
3404 return 0;
3405 for (ret = 1; i >= 1 << 8; ret += 8)
3406 i >>= 8;
3407 if (i >= 1 << 4)
3408 ret += 4, i >>= 4;
3409 if (i >= 1 << 2)
3410 ret += 2, i >>= 2;
3411 if (i >= 1 << 1)
3412 ret++;
3413 return ret;
3416 /* Parse __attribute__((...)) GNUC extension. */
3417 static void parse_attribute(AttributeDef *ad)
3419 int t, n;
3420 CString astr;
3422 redo:
3423 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3424 return;
3425 next();
3426 skip('(');
3427 skip('(');
3428 while (tok != ')') {
3429 if (tok < TOK_IDENT)
3430 expect("attribute name");
3431 t = tok;
3432 next();
3433 switch(t) {
3434 case TOK_CLEANUP1:
3435 case TOK_CLEANUP2:
3437 Sym *s;
3439 skip('(');
3440 s = sym_find(tok);
3441 if (!s) {
3442 tcc_warning("implicit declaration of function '%s'",
3443 get_tok_str(tok, &tokc));
3444 s = external_global_sym(tok, &func_old_type);
3446 ad->cleanup_func = s;
3447 next();
3448 skip(')');
3449 break;
3451 case TOK_SECTION1:
3452 case TOK_SECTION2:
3453 skip('(');
3454 parse_mult_str(&astr, "section name");
3455 ad->section = find_section(tcc_state, (char *)astr.data);
3456 skip(')');
3457 cstr_free(&astr);
3458 break;
3459 case TOK_ALIAS1:
3460 case TOK_ALIAS2:
3461 skip('(');
3462 parse_mult_str(&astr, "alias(\"target\")");
3463 ad->alias_target = /* save string as token, for later */
3464 tok_alloc((char*)astr.data, astr.size-1)->tok;
3465 skip(')');
3466 cstr_free(&astr);
3467 break;
3468 case TOK_VISIBILITY1:
3469 case TOK_VISIBILITY2:
3470 skip('(');
3471 parse_mult_str(&astr,
3472 "visibility(\"default|hidden|internal|protected\")");
3473 if (!strcmp (astr.data, "default"))
3474 ad->a.visibility = STV_DEFAULT;
3475 else if (!strcmp (astr.data, "hidden"))
3476 ad->a.visibility = STV_HIDDEN;
3477 else if (!strcmp (astr.data, "internal"))
3478 ad->a.visibility = STV_INTERNAL;
3479 else if (!strcmp (astr.data, "protected"))
3480 ad->a.visibility = STV_PROTECTED;
3481 else
3482 expect("visibility(\"default|hidden|internal|protected\")");
3483 skip(')');
3484 cstr_free(&astr);
3485 break;
3486 case TOK_ALIGNED1:
3487 case TOK_ALIGNED2:
3488 if (tok == '(') {
3489 next();
3490 n = expr_const();
3491 if (n <= 0 || (n & (n - 1)) != 0)
3492 tcc_error("alignment must be a positive power of two");
3493 skip(')');
3494 } else {
3495 n = MAX_ALIGN;
3497 ad->a.aligned = exact_log2p1(n);
3498 if (n != 1 << (ad->a.aligned - 1))
3499 tcc_error("alignment of %d is larger than implemented", n);
3500 break;
3501 case TOK_PACKED1:
3502 case TOK_PACKED2:
3503 ad->a.packed = 1;
3504 break;
3505 case TOK_WEAK1:
3506 case TOK_WEAK2:
3507 ad->a.weak = 1;
3508 break;
3509 case TOK_UNUSED1:
3510 case TOK_UNUSED2:
3511 /* currently, no need to handle it because tcc does not
3512 track unused objects */
3513 break;
3514 case TOK_NORETURN1:
3515 case TOK_NORETURN2:
3516 /* currently, no need to handle it because tcc does not
3517 track unused objects */
3518 break;
3519 case TOK_CDECL1:
3520 case TOK_CDECL2:
3521 case TOK_CDECL3:
3522 ad->f.func_call = FUNC_CDECL;
3523 break;
3524 case TOK_STDCALL1:
3525 case TOK_STDCALL2:
3526 case TOK_STDCALL3:
3527 ad->f.func_call = FUNC_STDCALL;
3528 break;
3529 #ifdef TCC_TARGET_I386
3530 case TOK_REGPARM1:
3531 case TOK_REGPARM2:
3532 skip('(');
3533 n = expr_const();
3534 if (n > 3)
3535 n = 3;
3536 else if (n < 0)
3537 n = 0;
3538 if (n > 0)
3539 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3540 skip(')');
3541 break;
3542 case TOK_FASTCALL1:
3543 case TOK_FASTCALL2:
3544 case TOK_FASTCALL3:
3545 ad->f.func_call = FUNC_FASTCALLW;
3546 break;
3547 #endif
3548 case TOK_MODE:
3549 skip('(');
3550 switch(tok) {
3551 case TOK_MODE_DI:
3552 ad->attr_mode = VT_LLONG + 1;
3553 break;
3554 case TOK_MODE_QI:
3555 ad->attr_mode = VT_BYTE + 1;
3556 break;
3557 case TOK_MODE_HI:
3558 ad->attr_mode = VT_SHORT + 1;
3559 break;
3560 case TOK_MODE_SI:
3561 case TOK_MODE_word:
3562 ad->attr_mode = VT_INT + 1;
3563 break;
3564 default:
3565 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3566 break;
3568 next();
3569 skip(')');
3570 break;
3571 case TOK_DLLEXPORT:
3572 ad->a.dllexport = 1;
3573 break;
3574 case TOK_NODECORATE:
3575 ad->a.nodecorate = 1;
3576 break;
3577 case TOK_DLLIMPORT:
3578 ad->a.dllimport = 1;
3579 break;
3580 default:
3581 if (tcc_state->warn_unsupported)
3582 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3583 /* skip parameters */
3584 if (tok == '(') {
3585 int parenthesis = 0;
3586 do {
3587 if (tok == '(')
3588 parenthesis++;
3589 else if (tok == ')')
3590 parenthesis--;
3591 next();
3592 } while (parenthesis && tok != -1);
3594 break;
3596 if (tok != ',')
3597 break;
3598 next();
3600 skip(')');
3601 skip(')');
3602 goto redo;
3605 static Sym * find_field (CType *type, int v, int *cumofs)
3607 Sym *s = type->ref;
3608 v |= SYM_FIELD;
3609 while ((s = s->next) != NULL) {
3610 if ((s->v & SYM_FIELD) &&
3611 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3612 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3613 Sym *ret = find_field (&s->type, v, cumofs);
3614 if (ret) {
3615 *cumofs += s->c;
3616 return ret;
3619 if (s->v == v)
3620 break;
3622 return s;
3625 static void struct_layout(CType *type, AttributeDef *ad)
3627 int size, align, maxalign, offset, c, bit_pos, bit_size;
3628 int packed, a, bt, prevbt, prev_bit_size;
3629 int pcc = !tcc_state->ms_bitfields;
3630 int pragma_pack = *tcc_state->pack_stack_ptr;
3631 Sym *f;
3633 maxalign = 1;
3634 offset = 0;
3635 c = 0;
3636 bit_pos = 0;
3637 prevbt = VT_STRUCT; /* make it never match */
3638 prev_bit_size = 0;
3640 //#define BF_DEBUG
3642 for (f = type->ref->next; f; f = f->next) {
3643 if (f->type.t & VT_BITFIELD)
3644 bit_size = BIT_SIZE(f->type.t);
3645 else
3646 bit_size = -1;
3647 size = type_size(&f->type, &align);
3648 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3649 packed = 0;
3651 if (pcc && bit_size == 0) {
3652 /* in pcc mode, packing does not affect zero-width bitfields */
3654 } else {
3655 /* in pcc mode, attribute packed overrides if set. */
3656 if (pcc && (f->a.packed || ad->a.packed))
3657 align = packed = 1;
3659 /* pragma pack overrides align if lesser and packs bitfields always */
3660 if (pragma_pack) {
3661 packed = 1;
3662 if (pragma_pack < align)
3663 align = pragma_pack;
3664 /* in pcc mode pragma pack also overrides individual align */
3665 if (pcc && pragma_pack < a)
3666 a = 0;
3669 /* some individual align was specified */
3670 if (a)
3671 align = a;
3673 if (type->ref->type.t == VT_UNION) {
3674 if (pcc && bit_size >= 0)
3675 size = (bit_size + 7) >> 3;
3676 offset = 0;
3677 if (size > c)
3678 c = size;
3680 } else if (bit_size < 0) {
3681 if (pcc)
3682 c += (bit_pos + 7) >> 3;
3683 c = (c + align - 1) & -align;
3684 offset = c;
3685 if (size > 0)
3686 c += size;
3687 bit_pos = 0;
3688 prevbt = VT_STRUCT;
3689 prev_bit_size = 0;
3691 } else {
3692 /* A bit-field. Layout is more complicated. There are two
3693 options: PCC (GCC) compatible and MS compatible */
3694 if (pcc) {
3695 /* In PCC layout a bit-field is placed adjacent to the
3696 preceding bit-fields, except if:
3697 - it has zero-width
3698 - an individual alignment was given
3699 - it would overflow its base type container and
3700 there is no packing */
3701 if (bit_size == 0) {
3702 new_field:
3703 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3704 bit_pos = 0;
3705 } else if (f->a.aligned) {
3706 goto new_field;
3707 } else if (!packed) {
3708 int a8 = align * 8;
3709 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3710 if (ofs > size / align)
3711 goto new_field;
3714 /* in pcc mode, long long bitfields have type int if they fit */
3715 if (size == 8 && bit_size <= 32)
3716 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3718 while (bit_pos >= align * 8)
3719 c += align, bit_pos -= align * 8;
3720 offset = c;
3722 /* In PCC layout named bit-fields influence the alignment
3723 of the containing struct using the base types alignment,
3724 except for packed fields (which here have correct align). */
3725 if (f->v & SYM_FIRST_ANOM
3726 // && bit_size // ??? gcc on ARM/rpi does that
3728 align = 1;
3730 } else {
3731 bt = f->type.t & VT_BTYPE;
3732 if ((bit_pos + bit_size > size * 8)
3733 || (bit_size > 0) == (bt != prevbt)
3735 c = (c + align - 1) & -align;
3736 offset = c;
3737 bit_pos = 0;
3738 /* In MS bitfield mode a bit-field run always uses
3739 at least as many bits as the underlying type.
3740 To start a new run it's also required that this
3741 or the last bit-field had non-zero width. */
3742 if (bit_size || prev_bit_size)
3743 c += size;
3745 /* In MS layout the records alignment is normally
3746 influenced by the field, except for a zero-width
3747 field at the start of a run (but by further zero-width
3748 fields it is again). */
3749 if (bit_size == 0 && prevbt != bt)
3750 align = 1;
3751 prevbt = bt;
3752 prev_bit_size = bit_size;
3755 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3756 | (bit_pos << VT_STRUCT_SHIFT);
3757 bit_pos += bit_size;
3759 if (align > maxalign)
3760 maxalign = align;
3762 #ifdef BF_DEBUG
3763 printf("set field %s offset %-2d size %-2d align %-2d",
3764 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3765 if (f->type.t & VT_BITFIELD) {
3766 printf(" pos %-2d bits %-2d",
3767 BIT_POS(f->type.t),
3768 BIT_SIZE(f->type.t)
3771 printf("\n");
3772 #endif
3774 f->c = offset;
3775 f->r = 0;
3778 if (pcc)
3779 c += (bit_pos + 7) >> 3;
3781 /* store size and alignment */
3782 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3783 if (a < maxalign)
3784 a = maxalign;
3785 type->ref->r = a;
3786 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3787 /* can happen if individual align for some member was given. In
3788 this case MSVC ignores maxalign when aligning the size */
3789 a = pragma_pack;
3790 if (a < bt)
3791 a = bt;
3793 c = (c + a - 1) & -a;
3794 type->ref->c = c;
3796 #ifdef BF_DEBUG
3797 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3798 #endif
3800 /* check whether we can access bitfields by their type */
3801 for (f = type->ref->next; f; f = f->next) {
3802 int s, px, cx, c0;
3803 CType t;
3805 if (0 == (f->type.t & VT_BITFIELD))
3806 continue;
3807 f->type.ref = f;
3808 f->auxtype = -1;
3809 bit_size = BIT_SIZE(f->type.t);
3810 if (bit_size == 0)
3811 continue;
3812 bit_pos = BIT_POS(f->type.t);
3813 size = type_size(&f->type, &align);
3814 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3815 continue;
3817 /* try to access the field using a different type */
3818 c0 = -1, s = align = 1;
3819 for (;;) {
3820 px = f->c * 8 + bit_pos;
3821 cx = (px >> 3) & -align;
3822 px = px - (cx << 3);
3823 if (c0 == cx)
3824 break;
3825 s = (px + bit_size + 7) >> 3;
3826 if (s > 4) {
3827 t.t = VT_LLONG;
3828 } else if (s > 2) {
3829 t.t = VT_INT;
3830 } else if (s > 1) {
3831 t.t = VT_SHORT;
3832 } else {
3833 t.t = VT_BYTE;
3835 s = type_size(&t, &align);
3836 c0 = cx;
3839 if (px + bit_size <= s * 8 && cx + s <= c) {
3840 /* update offset and bit position */
3841 f->c = cx;
3842 bit_pos = px;
3843 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3844 | (bit_pos << VT_STRUCT_SHIFT);
3845 if (s != size)
3846 f->auxtype = t.t;
3847 #ifdef BF_DEBUG
3848 printf("FIX field %s offset %-2d size %-2d align %-2d "
3849 "pos %-2d bits %-2d\n",
3850 get_tok_str(f->v & ~SYM_FIELD, NULL),
3851 cx, s, align, px, bit_size);
3852 #endif
3853 } else {
3854 /* fall back to load/store single-byte wise */
3855 f->auxtype = VT_STRUCT;
3856 #ifdef BF_DEBUG
3857 printf("FIX field %s : load byte-wise\n",
3858 get_tok_str(f->v & ~SYM_FIELD, NULL));
3859 #endif
3864 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3865 static void struct_decl(CType *type, int u)
3867 int v, c, size, align, flexible;
3868 int bit_size, bsize, bt;
3869 Sym *s, *ss, **ps;
3870 AttributeDef ad, ad1;
3871 CType type1, btype;
3873 memset(&ad, 0, sizeof ad);
3874 next();
3875 parse_attribute(&ad);
3876 if (tok != '{') {
3877 v = tok;
3878 next();
3879 /* struct already defined ? return it */
3880 if (v < TOK_IDENT)
3881 expect("struct/union/enum name");
3882 s = struct_find(v);
3883 if (s && (s->sym_scope == local_scope || tok != '{')) {
3884 if (u == s->type.t)
3885 goto do_decl;
3886 if (u == VT_ENUM && IS_ENUM(s->type.t))
3887 goto do_decl;
3888 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3890 } else {
3891 v = anon_sym++;
3893 /* Record the original enum/struct/union token. */
3894 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3895 type1.ref = NULL;
3896 /* we put an undefined size for struct/union */
3897 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3898 s->r = 0; /* default alignment is zero as gcc */
3899 do_decl:
3900 type->t = s->type.t;
3901 type->ref = s;
3903 if (tok == '{') {
3904 next();
3905 if (s->c != -1)
3906 tcc_error("struct/union/enum already defined");
3907 s->c = -2;
3908 /* cannot be empty */
3909 /* non empty enums are not allowed */
3910 ps = &s->next;
3911 if (u == VT_ENUM) {
3912 long long ll = 0, pl = 0, nl = 0;
3913 CType t;
3914 t.ref = s;
3915 /* enum symbols have static storage */
3916 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3917 for(;;) {
3918 v = tok;
3919 if (v < TOK_UIDENT)
3920 expect("identifier");
3921 ss = sym_find(v);
3922 if (ss && !local_stack)
3923 tcc_error("redefinition of enumerator '%s'",
3924 get_tok_str(v, NULL));
3925 next();
3926 if (tok == '=') {
3927 next();
3928 ll = expr_const64();
3930 ss = sym_push(v, &t, VT_CONST, 0);
3931 ss->enum_val = ll;
3932 *ps = ss, ps = &ss->next;
3933 if (ll < nl)
3934 nl = ll;
3935 if (ll > pl)
3936 pl = ll;
3937 if (tok != ',')
3938 break;
3939 next();
3940 ll++;
3941 /* NOTE: we accept a trailing comma */
3942 if (tok == '}')
3943 break;
3945 skip('}');
3946 /* set integral type of the enum */
3947 t.t = VT_INT;
3948 if (nl >= 0) {
3949 if (pl != (unsigned)pl)
3950 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3951 t.t |= VT_UNSIGNED;
3952 } else if (pl != (int)pl || nl != (int)nl)
3953 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3954 s->type.t = type->t = t.t | VT_ENUM;
3955 s->c = 0;
3956 /* set type for enum members */
3957 for (ss = s->next; ss; ss = ss->next) {
3958 ll = ss->enum_val;
3959 if (ll == (int)ll) /* default is int if it fits */
3960 continue;
3961 if (t.t & VT_UNSIGNED) {
3962 ss->type.t |= VT_UNSIGNED;
3963 if (ll == (unsigned)ll)
3964 continue;
3966 ss->type.t = (ss->type.t & ~VT_BTYPE)
3967 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3969 } else {
3970 c = 0;
3971 flexible = 0;
3972 while (tok != '}') {
3973 if (!parse_btype(&btype, &ad1)) {
3974 skip(';');
3975 continue;
3977 while (1) {
3978 if (flexible)
3979 tcc_error("flexible array member '%s' not at the end of struct",
3980 get_tok_str(v, NULL));