jump optimizations
[tinycc.git] / tccgen.c
blob38dc33f179d935b3e616fcce04ebe718dc23df1f
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
51 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
52 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
54 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
56 ST_DATA int const_wanted; /* true if constant wanted */
57 ST_DATA int nocode_wanted; /* no code generation wanted */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 /* Clear 'nocode_wanted' at label if it was used */
66 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
67 static int gind(void) { CODE_ON(); return ind; }
69 /* Set 'nocode_wanted' after unconditional jumps */
70 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
71 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
73 /* These are #undef'd at the end of this file */
74 #define gjmp_addr gjmp_addr_acs
75 #define gjmp gjmp_acs
76 /* <---- */
78 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
79 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
80 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
81 ST_DATA int func_vc;
82 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
83 ST_DATA const char *funcname;
84 ST_DATA int g_debug;
86 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
88 ST_DATA struct switch_t {
89 struct case_t {
90 int64_t v1, v2;
91 int sym;
92 } **p; int n; /* list of case ranges */
93 int def_sym; /* default symbol */
94 } *cur_switch; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
97 /*list of temporary local variables on the stack in current function. */
98 ST_DATA struct temp_local_variable {
99 int location; //offset on stack. Svalue.c.i
100 short size;
101 short align;
102 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
103 short nb_temp_local_vars;
105 /* ------------------------------------------------------------------------- */
107 static void gen_cast(CType *type);
108 static void gen_cast_s(int t);
109 static inline CType *pointed_type(CType *type);
110 static int is_compatible_types(CType *type1, CType *type2);
111 static int parse_btype(CType *type, AttributeDef *ad);
112 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
113 static void parse_expr_type(CType *type);
114 static void init_putv(CType *type, Section *sec, unsigned long c);
115 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
116 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr);
117 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
118 static void decl(int l);
119 static int decl0(int l, int is_for_loop_init, Sym *);
120 static void expr_eq(void);
121 static void vla_runtime_type_size(CType *type, int *a);
122 static void vla_sp_restore(void);
123 static void vla_sp_restore_root(void);
124 static int is_compatible_unqualified_types(CType *type1, CType *type2);
125 static inline int64_t expr_const64(void);
126 static void vpush64(int ty, unsigned long long v);
127 static void vpush(CType *type);
128 static int gvtst(int inv, int t);
129 static void gen_inline_functions(TCCState *s);
130 static void skip_or_save_block(TokenString **str);
131 static void gv_dup(void);
132 static int get_temp_local_var(int size,int align);
133 static void clear_temp_local_var_list();
136 static void reset_local_scope(void)
138 if (current_cleanups)
139 tcc_error("ICE current_cleanups");
140 sym_pop(&all_cleanups, NULL, 0);
141 local_scope = 0;
144 ST_INLN int is_float(int t)
146 int bt;
147 bt = t & VT_BTYPE;
148 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
151 /* we use our own 'finite' function to avoid potential problems with
152 non standard math libs */
153 /* XXX: endianness dependent */
154 ST_FUNC int ieee_finite(double d)
156 int p[4];
157 memcpy(p, &d, sizeof(double));
158 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
161 /* compiling intel long double natively */
162 #if (defined __i386__ || defined __x86_64__) \
163 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
164 # define TCC_IS_NATIVE_387
165 #endif
167 ST_FUNC void test_lvalue(void)
169 if (!(vtop->r & VT_LVAL))
170 expect("lvalue");
173 ST_FUNC void check_vstack(void)
175 if (pvtop != vtop)
176 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
179 /* ------------------------------------------------------------------------- */
180 /* vstack debugging aid */
182 #if 0
183 void pv (const char *lbl, int a, int b)
185 int i;
186 for (i = a; i < a + b; ++i) {
187 SValue *p = &vtop[-i];
188 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
189 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
192 #endif
194 /* ------------------------------------------------------------------------- */
195 /* start of translation unit info */
196 ST_FUNC void tcc_debug_start(TCCState *s1)
198 if (s1->do_debug) {
199 char buf[512];
201 /* file info: full path + filename */
202 section_sym = put_elf_sym(symtab_section, 0, 0,
203 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
204 text_section->sh_num, NULL);
205 getcwd(buf, sizeof(buf));
206 #ifdef _WIN32
207 normalize_slashes(buf);
208 #endif
209 pstrcat(buf, sizeof(buf), "/");
210 put_stabs_r(buf, N_SO, 0, 0,
211 text_section->data_offset, text_section, section_sym);
212 put_stabs_r(file->filename, N_SO, 0, 0,
213 text_section->data_offset, text_section, section_sym);
214 last_ind = 0;
215 last_line_num = 0;
218 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
219 symbols can be safely used */
220 put_elf_sym(symtab_section, 0, 0,
221 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
222 SHN_ABS, file->filename);
225 /* put end of translation unit info */
226 ST_FUNC void tcc_debug_end(TCCState *s1)
228 if (!s1->do_debug)
229 return;
230 put_stabs_r(NULL, N_SO, 0, 0,
231 text_section->data_offset, text_section, section_sym);
235 /* generate line number info */
236 ST_FUNC void tcc_debug_line(TCCState *s1)
238 if (!s1->do_debug)
239 return;
240 if ((last_line_num != file->line_num || last_ind != ind)) {
241 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
242 last_ind = ind;
243 last_line_num = file->line_num;
247 /* put function symbol */
248 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
250 char buf[512];
252 if (!s1->do_debug)
253 return;
255 /* stabs info */
256 /* XXX: we put here a dummy type */
257 snprintf(buf, sizeof(buf), "%s:%c1",
258 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
259 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
260 cur_text_section, sym->c);
261 /* //gr gdb wants a line at the function */
262 put_stabn(N_SLINE, 0, file->line_num, 0);
264 last_ind = 0;
265 last_line_num = 0;
268 /* put function size */
269 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
271 if (!s1->do_debug)
272 return;
273 put_stabn(N_FUN, 0, 0, size);
276 /* ------------------------------------------------------------------------- */
277 ST_FUNC int tccgen_compile(TCCState *s1)
279 cur_text_section = NULL;
280 funcname = "";
281 anon_sym = SYM_FIRST_ANOM;
282 section_sym = 0;
283 const_wanted = 0;
284 nocode_wanted = 0x80000000;
285 local_scope = 0;
287 /* define some often used types */
288 int_type.t = VT_INT;
289 char_pointer_type.t = VT_BYTE;
290 mk_pointer(&char_pointer_type);
291 #if PTR_SIZE == 4
292 size_type.t = VT_INT | VT_UNSIGNED;
293 ptrdiff_type.t = VT_INT;
294 #elif LONG_SIZE == 4
295 size_type.t = VT_LLONG | VT_UNSIGNED;
296 ptrdiff_type.t = VT_LLONG;
297 #else
298 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
299 ptrdiff_type.t = VT_LONG | VT_LLONG;
300 #endif
301 func_old_type.t = VT_FUNC;
302 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
303 func_old_type.ref->f.func_call = FUNC_CDECL;
304 func_old_type.ref->f.func_type = FUNC_OLD;
306 tcc_debug_start(s1);
308 #ifdef TCC_TARGET_ARM
309 arm_init(s1);
310 #endif
312 #ifdef INC_DEBUG
313 printf("%s: **** new file\n", file->filename);
314 #endif
316 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
317 next();
318 decl(VT_CONST);
319 gen_inline_functions(s1);
320 check_vstack();
321 /* end of translation unit info */
322 tcc_debug_end(s1);
323 return 0;
326 /* ------------------------------------------------------------------------- */
327 ST_FUNC ElfSym *elfsym(Sym *s)
329 if (!s || !s->c)
330 return NULL;
331 return &((ElfSym *)symtab_section->data)[s->c];
334 /* apply storage attributes to Elf symbol */
335 ST_FUNC void update_storage(Sym *sym)
337 ElfSym *esym;
338 int sym_bind, old_sym_bind;
340 esym = elfsym(sym);
341 if (!esym)
342 return;
344 if (sym->a.visibility)
345 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
346 | sym->a.visibility;
348 if (sym->type.t & (VT_STATIC | VT_INLINE))
349 sym_bind = STB_LOCAL;
350 else if (sym->a.weak)
351 sym_bind = STB_WEAK;
352 else
353 sym_bind = STB_GLOBAL;
354 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
355 if (sym_bind != old_sym_bind) {
356 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
359 #ifdef TCC_TARGET_PE
360 if (sym->a.dllimport)
361 esym->st_other |= ST_PE_IMPORT;
362 if (sym->a.dllexport)
363 esym->st_other |= ST_PE_EXPORT;
364 #endif
366 #if 0
367 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
368 get_tok_str(sym->v, NULL),
369 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
370 sym->a.visibility,
371 sym->a.dllexport,
372 sym->a.dllimport
374 #endif
377 /* ------------------------------------------------------------------------- */
378 /* update sym->c so that it points to an external symbol in section
379 'section' with value 'value' */
381 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
382 addr_t value, unsigned long size,
383 int can_add_underscore)
385 int sym_type, sym_bind, info, other, t;
386 ElfSym *esym;
387 const char *name;
388 char buf1[256];
389 #ifdef CONFIG_TCC_BCHECK
390 char buf[32];
391 #endif
393 if (!sym->c) {
394 name = get_tok_str(sym->v, NULL);
395 #ifdef CONFIG_TCC_BCHECK
396 if (tcc_state->do_bounds_check) {
397 /* XXX: avoid doing that for statics ? */
398 /* if bound checking is activated, we change some function
399 names by adding the "__bound" prefix */
400 switch(sym->v) {
401 #ifdef TCC_TARGET_PE
402 /* XXX: we rely only on malloc hooks */
403 case TOK_malloc:
404 case TOK_free:
405 case TOK_realloc:
406 case TOK_memalign:
407 case TOK_calloc:
408 #endif
409 case TOK_memcpy:
410 case TOK_memmove:
411 case TOK_memset:
412 case TOK_strlen:
413 case TOK_strcpy:
414 case TOK_alloca:
415 strcpy(buf, "__bound_");
416 strcat(buf, name);
417 name = buf;
418 break;
421 #endif
422 t = sym->type.t;
423 if ((t & VT_BTYPE) == VT_FUNC) {
424 sym_type = STT_FUNC;
425 } else if ((t & VT_BTYPE) == VT_VOID) {
426 sym_type = STT_NOTYPE;
427 } else {
428 sym_type = STT_OBJECT;
430 if (t & (VT_STATIC | VT_INLINE))
431 sym_bind = STB_LOCAL;
432 else
433 sym_bind = STB_GLOBAL;
434 other = 0;
435 #ifdef TCC_TARGET_PE
436 if (sym_type == STT_FUNC && sym->type.ref) {
437 Sym *ref = sym->type.ref;
438 if (ref->a.nodecorate) {
439 can_add_underscore = 0;
441 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
442 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
443 name = buf1;
444 other |= ST_PE_STDCALL;
445 can_add_underscore = 0;
448 #endif
449 if (tcc_state->leading_underscore && can_add_underscore) {
450 buf1[0] = '_';
451 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
452 name = buf1;
454 if (sym->asm_label)
455 name = get_tok_str(sym->asm_label, NULL);
456 info = ELFW(ST_INFO)(sym_bind, sym_type);
457 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
458 } else {
459 esym = elfsym(sym);
460 esym->st_value = value;
461 esym->st_size = size;
462 esym->st_shndx = sh_num;
464 update_storage(sym);
467 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
468 addr_t value, unsigned long size)
470 int sh_num = section ? section->sh_num : SHN_UNDEF;
471 put_extern_sym2(sym, sh_num, value, size, 1);
474 /* add a new relocation entry to symbol 'sym' in section 's' */
475 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
476 addr_t addend)
478 int c = 0;
480 if (nocode_wanted && s == cur_text_section)
481 return;
483 if (sym) {
484 if (0 == sym->c)
485 put_extern_sym(sym, NULL, 0, 0);
486 c = sym->c;
489 /* now we can add ELF relocation info */
490 put_elf_reloca(symtab_section, s, offset, type, c, addend);
493 #if PTR_SIZE == 4
494 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
496 greloca(s, sym, offset, type, 0);
498 #endif
500 /* ------------------------------------------------------------------------- */
501 /* symbol allocator */
502 static Sym *__sym_malloc(void)
504 Sym *sym_pool, *sym, *last_sym;
505 int i;
507 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
508 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
510 last_sym = sym_free_first;
511 sym = sym_pool;
512 for(i = 0; i < SYM_POOL_NB; i++) {
513 sym->next = last_sym;
514 last_sym = sym;
515 sym++;
517 sym_free_first = last_sym;
518 return last_sym;
521 static inline Sym *sym_malloc(void)
523 Sym *sym;
524 #ifndef SYM_DEBUG
525 sym = sym_free_first;
526 if (!sym)
527 sym = __sym_malloc();
528 sym_free_first = sym->next;
529 return sym;
530 #else
531 sym = tcc_malloc(sizeof(Sym));
532 return sym;
533 #endif
536 ST_INLN void sym_free(Sym *sym)
538 #ifndef SYM_DEBUG
539 sym->next = sym_free_first;
540 sym_free_first = sym;
541 #else
542 tcc_free(sym);
543 #endif
546 /* push, without hashing */
547 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
549 Sym *s;
551 s = sym_malloc();
552 memset(s, 0, sizeof *s);
553 s->v = v;
554 s->type.t = t;
555 s->c = c;
556 /* add in stack */
557 s->prev = *ps;
558 *ps = s;
559 return s;
562 /* find a symbol and return its associated structure. 's' is the top
563 of the symbol stack */
564 ST_FUNC Sym *sym_find2(Sym *s, int v)
566 while (s) {
567 if (s->v == v)
568 return s;
569 else if (s->v == -1)
570 return NULL;
571 s = s->prev;
573 return NULL;
576 /* structure lookup */
577 ST_INLN Sym *struct_find(int v)
579 v -= TOK_IDENT;
580 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
581 return NULL;
582 return table_ident[v]->sym_struct;
585 /* find an identifier */
586 ST_INLN Sym *sym_find(int v)
588 v -= TOK_IDENT;
589 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
590 return NULL;
591 return table_ident[v]->sym_identifier;
594 static int sym_scope(Sym *s)
596 if (IS_ENUM_VAL (s->type.t))
597 return s->type.ref->sym_scope;
598 else
599 return s->sym_scope;
602 /* push a given symbol on the symbol stack */
603 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
605 Sym *s, **ps;
606 TokenSym *ts;
608 if (local_stack)
609 ps = &local_stack;
610 else
611 ps = &global_stack;
612 s = sym_push2(ps, v, type->t, c);
613 s->type.ref = type->ref;
614 s->r = r;
615 /* don't record fields or anonymous symbols */
616 /* XXX: simplify */
617 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
618 /* record symbol in token array */
619 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
620 if (v & SYM_STRUCT)
621 ps = &ts->sym_struct;
622 else
623 ps = &ts->sym_identifier;
624 s->prev_tok = *ps;
625 *ps = s;
626 s->sym_scope = local_scope;
627 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
628 tcc_error("redeclaration of '%s'",
629 get_tok_str(v & ~SYM_STRUCT, NULL));
631 return s;
634 /* push a global identifier */
635 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
637 Sym *s, **ps;
638 s = sym_push2(&global_stack, v, t, c);
639 s->r = VT_CONST | VT_SYM;
640 /* don't record anonymous symbol */
641 if (v < SYM_FIRST_ANOM) {
642 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
643 /* modify the top most local identifier, so that sym_identifier will
644 point to 's' when popped; happens when called from inline asm */
645 while (*ps != NULL && (*ps)->sym_scope)
646 ps = &(*ps)->prev_tok;
647 s->prev_tok = *ps;
648 *ps = s;
650 return s;
653 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
654 pop them yet from the list, but do remove them from the token array. */
655 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
657 Sym *s, *ss, **ps;
658 TokenSym *ts;
659 int v;
661 s = *ptop;
662 while(s != b) {
663 ss = s->prev;
664 v = s->v;
665 /* remove symbol in token array */
666 /* XXX: simplify */
667 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
668 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
669 if (v & SYM_STRUCT)
670 ps = &ts->sym_struct;
671 else
672 ps = &ts->sym_identifier;
673 *ps = s->prev_tok;
675 if (!keep)
676 sym_free(s);
677 s = ss;
679 if (!keep)
680 *ptop = b;
683 /* ------------------------------------------------------------------------- */
684 static void vcheck_cmp(void)
686 /* cannot let cpu flags if other instruction are generated. Also
687 avoid leaving VT_JMP anywhere except on the top of the stack
688 because it would complicate the code generator.
690 Don't do this when nocode_wanted. vtop might come from
691 !nocode_wanted regions (see 88_codeopt.c) and transforming
692 it to a register without actually generating code is wrong
693 as their value might still be used for real. All values
694 we push under nocode_wanted will eventually be popped
695 again, so that the VT_CMP/VT_JMP value will be in vtop
696 when code is unsuppressed again. */
698 if (vtop->r == VT_CMP && !nocode_wanted)
699 gv(RC_INT);
702 static void vsetc(CType *type, int r, CValue *vc)
704 if (vtop >= vstack + (VSTACK_SIZE - 1))
705 tcc_error("memory full (vstack)");
706 vcheck_cmp();
707 vtop++;
708 vtop->type = *type;
709 vtop->r = r;
710 vtop->r2 = VT_CONST;
711 vtop->c = *vc;
712 vtop->sym = NULL;
715 ST_FUNC void vswap(void)
717 SValue tmp;
719 vcheck_cmp();
720 tmp = vtop[0];
721 vtop[0] = vtop[-1];
722 vtop[-1] = tmp;
725 /* pop stack value */
726 ST_FUNC void vpop(void)
728 int v;
729 v = vtop->r & VT_VALMASK;
730 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
731 /* for x86, we need to pop the FP stack */
732 if (v == TREG_ST0) {
733 o(0xd8dd); /* fstp %st(0) */
734 } else
735 #endif
736 if (v == VT_CMP) {
737 /* need to put correct jump if && or || without test */
738 gsym(vtop->jtrue);
739 gsym(vtop->jfalse);
741 vtop--;
744 /* push constant of type "type" with useless value */
745 ST_FUNC void vpush(CType *type)
747 vset(type, VT_CONST, 0);
750 /* push integer constant */
751 ST_FUNC void vpushi(int v)
753 CValue cval;
754 cval.i = v;
755 vsetc(&int_type, VT_CONST, &cval);
758 /* push a pointer sized constant */
759 static void vpushs(addr_t v)
761 CValue cval;
762 cval.i = v;
763 vsetc(&size_type, VT_CONST, &cval);
766 /* push arbitrary 64bit constant */
767 ST_FUNC void vpush64(int ty, unsigned long long v)
769 CValue cval;
770 CType ctype;
771 ctype.t = ty;
772 ctype.ref = NULL;
773 cval.i = v;
774 vsetc(&ctype, VT_CONST, &cval);
777 /* push long long constant */
778 static inline void vpushll(long long v)
780 vpush64(VT_LLONG, v);
783 ST_FUNC void vset(CType *type, int r, int v)
785 CValue cval;
787 cval.i = v;
788 vsetc(type, r, &cval);
791 static void vseti(int r, int v)
793 CType type;
794 type.t = VT_INT;
795 type.ref = NULL;
796 vset(&type, r, v);
799 ST_FUNC void vpushv(SValue *v)
801 if (vtop >= vstack + (VSTACK_SIZE - 1))
802 tcc_error("memory full (vstack)");
803 vtop++;
804 *vtop = *v;
807 static void vdup(void)
809 vpushv(vtop);
812 /* rotate n first stack elements to the bottom
813 I1 ... In -> I2 ... In I1 [top is right]
815 ST_FUNC void vrotb(int n)
817 int i;
818 SValue tmp;
820 vcheck_cmp();
821 tmp = vtop[-n + 1];
822 for(i=-n+1;i!=0;i++)
823 vtop[i] = vtop[i+1];
824 vtop[0] = tmp;
827 /* rotate the n elements before entry e towards the top
828 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
830 ST_FUNC void vrote(SValue *e, int n)
832 int i;
833 SValue tmp;
835 vcheck_cmp();
836 tmp = *e;
837 for(i = 0;i < n - 1; i++)
838 e[-i] = e[-i - 1];
839 e[-n + 1] = tmp;
842 /* rotate n first stack elements to the top
843 I1 ... In -> In I1 ... I(n-1) [top is right]
845 ST_FUNC void vrott(int n)
847 vrote(vtop, n);
850 /* ------------------------------------------------------------------------- */
851 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
853 /* called from generators to set the result from relational ops */
854 ST_FUNC void vset_VT_CMP(int op)
856 vtop->r = VT_CMP;
857 vtop->cmp_op = op;
858 vtop->jfalse = 0;
859 vtop->jtrue = 0;
862 /* called once before asking generators to load VT_CMP to a register */
863 static void vset_VT_JMP(void)
865 int op = vtop->cmp_op;
866 if (vtop->jtrue || vtop->jfalse) {
867 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
868 int inv = op & (op < 2); /* small optimization */
869 vseti(VT_JMP+inv, gvtst(inv, 0));
870 } else {
871 /* otherwise convert flags (rsp. 0/1) to register */
872 vtop->c.i = op;
873 if (op < 2) /* doesn't seem to happen */
874 vtop->r = VT_CONST;
878 /* Set CPU Flags, doesn't yet jump */
879 static void gvtst_set(int inv, int t)
881 int *p;
882 if (vtop->r != VT_CMP) {
883 vpushi(0);
884 gen_op(TOK_NE);
885 if (vtop->r != VT_CMP) /* must be VT_CONST then */
886 vset_VT_CMP(vtop->c.i != 0);
888 p = inv ? &vtop->jfalse : &vtop->jtrue;
889 *p = gjmp_append(*p, t);
892 /* Generate value test
894 * Generate a test for any value (jump, comparison and integers) */
895 static int gvtst(int inv, int t)
897 int op, u, x;
899 gvtst_set(inv, t);
901 t = vtop->jtrue, u = vtop->jfalse;
902 if (inv)
903 x = u, u = t, t = x;
904 op = vtop->cmp_op;
906 /* jump to the wanted target */
907 if (op > 1)
908 t = gjmp_cond(op ^ inv, t);
909 else if (op != inv)
910 t = gjmp(t);
911 /* resolve complementary jumps to here */
912 gsym(u);
914 vtop--;
915 return t;
918 /* ------------------------------------------------------------------------- */
919 /* push a symbol value of TYPE */
920 static inline void vpushsym(CType *type, Sym *sym)
922 CValue cval;
923 cval.i = 0;
924 vsetc(type, VT_CONST | VT_SYM, &cval);
925 vtop->sym = sym;
928 /* Return a static symbol pointing to a section */
929 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
931 int v;
932 Sym *sym;
934 v = anon_sym++;
935 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
936 sym->type.t |= VT_STATIC;
937 put_extern_sym(sym, sec, offset, size);
938 return sym;
941 /* push a reference to a section offset by adding a dummy symbol */
942 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
944 vpushsym(type, get_sym_ref(type, sec, offset, size));
947 /* define a new external reference to a symbol 'v' of type 'u' */
948 ST_FUNC Sym *external_global_sym(int v, CType *type)
950 Sym *s;
952 s = sym_find(v);
953 if (!s) {
954 /* push forward reference */
955 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
956 s->type.ref = type->ref;
957 } else if (IS_ASM_SYM(s)) {
958 s->type.t = type->t | (s->type.t & VT_EXTERN);
959 s->type.ref = type->ref;
960 update_storage(s);
962 return s;
965 /* Merge symbol attributes. */
966 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
968 if (sa1->aligned && !sa->aligned)
969 sa->aligned = sa1->aligned;
970 sa->packed |= sa1->packed;
971 sa->weak |= sa1->weak;
972 if (sa1->visibility != STV_DEFAULT) {
973 int vis = sa->visibility;
974 if (vis == STV_DEFAULT
975 || vis > sa1->visibility)
976 vis = sa1->visibility;
977 sa->visibility = vis;
979 sa->dllexport |= sa1->dllexport;
980 sa->nodecorate |= sa1->nodecorate;
981 sa->dllimport |= sa1->dllimport;
984 /* Merge function attributes. */
985 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
987 if (fa1->func_call && !fa->func_call)
988 fa->func_call = fa1->func_call;
989 if (fa1->func_type && !fa->func_type)
990 fa->func_type = fa1->func_type;
991 if (fa1->func_args && !fa->func_args)
992 fa->func_args = fa1->func_args;
995 /* Merge attributes. */
996 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
998 merge_symattr(&ad->a, &ad1->a);
999 merge_funcattr(&ad->f, &ad1->f);
1001 if (ad1->section)
1002 ad->section = ad1->section;
1003 if (ad1->alias_target)
1004 ad->alias_target = ad1->alias_target;
1005 if (ad1->asm_label)
1006 ad->asm_label = ad1->asm_label;
1007 if (ad1->attr_mode)
1008 ad->attr_mode = ad1->attr_mode;
1011 /* Merge some type attributes. */
1012 static void patch_type(Sym *sym, CType *type)
1014 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1015 if (!(sym->type.t & VT_EXTERN))
1016 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1017 sym->type.t &= ~VT_EXTERN;
1020 if (IS_ASM_SYM(sym)) {
1021 /* stay static if both are static */
1022 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1023 sym->type.ref = type->ref;
1026 if (!is_compatible_types(&sym->type, type)) {
1027 tcc_error("incompatible types for redefinition of '%s'",
1028 get_tok_str(sym->v, NULL));
1030 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1031 int static_proto = sym->type.t & VT_STATIC;
1032 /* warn if static follows non-static function declaration */
1033 if ((type->t & VT_STATIC) && !static_proto
1034 /* XXX this test for inline shouldn't be here. Until we
1035 implement gnu-inline mode again it silences a warning for
1036 mingw caused by our workarounds. */
1037 && !((type->t | sym->type.t) & VT_INLINE))
1038 tcc_warning("static storage ignored for redefinition of '%s'",
1039 get_tok_str(sym->v, NULL));
1041 /* set 'inline' if both agree or if one has static */
1042 if ((type->t | sym->type.t) & VT_INLINE) {
1043 if (!((type->t ^ sym->type.t) & VT_INLINE)
1044 || ((type->t | sym->type.t) & VT_STATIC))
1045 static_proto |= VT_INLINE;
1048 if (0 == (type->t & VT_EXTERN)) {
1049 /* put complete type, use static from prototype */
1050 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1051 sym->type.ref = type->ref;
1052 } else {
1053 sym->type.t &= ~VT_INLINE | static_proto;
1056 if (sym->type.ref->f.func_type == FUNC_OLD
1057 && type->ref->f.func_type != FUNC_OLD) {
1058 sym->type.ref = type->ref;
1061 } else {
1062 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1063 /* set array size if it was omitted in extern declaration */
1064 sym->type.ref->c = type->ref->c;
1066 if ((type->t ^ sym->type.t) & VT_STATIC)
1067 tcc_warning("storage mismatch for redefinition of '%s'",
1068 get_tok_str(sym->v, NULL));
1072 /* Merge some storage attributes. */
1073 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1075 if (type)
1076 patch_type(sym, type);
1078 #ifdef TCC_TARGET_PE
1079 if (sym->a.dllimport != ad->a.dllimport)
1080 tcc_error("incompatible dll linkage for redefinition of '%s'",
1081 get_tok_str(sym->v, NULL));
1082 #endif
1083 merge_symattr(&sym->a, &ad->a);
1084 if (ad->asm_label)
1085 sym->asm_label = ad->asm_label;
1086 update_storage(sym);
1089 /* copy sym to other stack */
1090 static Sym *sym_copy(Sym *s0, Sym **ps)
1092 Sym *s;
1093 s = sym_malloc(), *s = *s0;
1094 s->prev = *ps, *ps = s;
1095 if (s->v < SYM_FIRST_ANOM) {
1096 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1097 s->prev_tok = *ps, *ps = s;
1099 return s;
1102 /* copy a list of syms */
1103 static void sym_copy_ref(Sym *s0, Sym **ps)
1105 Sym *s, **sp = &s0->type.ref;
1106 for (s = *sp, *sp = NULL; s; s = s->next)
1107 sp = &(*sp = sym_copy(s, ps))->next;
1110 /* define a new external reference to a symbol 'v' */
1111 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1113 Sym *s; int bt;
1115 /* look for global symbol */
1116 s = sym_find(v);
1117 while (s && s->sym_scope)
1118 s = s->prev_tok;
1120 if (!s) {
1121 /* push forward reference */
1122 s = global_identifier_push(v, type->t, 0);
1123 s->r |= r;
1124 s->a = ad->a;
1125 s->asm_label = ad->asm_label;
1126 s->type.ref = type->ref;
1127 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1128 /* copy type to the global stack also */
1129 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1130 sym_copy_ref(s, &global_stack);
1131 } else {
1132 patch_storage(s, ad, type);
1133 bt = s->type.t & VT_BTYPE;
1135 /* push variables to local scope if any */
1136 if (local_stack && bt != VT_FUNC)
1137 s = sym_copy(s, &local_stack);
1138 return s;
1141 /* push a reference to global symbol v */
1142 ST_FUNC void vpush_global_sym(CType *type, int v)
1144 vpushsym(type, external_global_sym(v, type));
1147 /* save registers up to (vtop - n) stack entry */
1148 ST_FUNC void save_regs(int n)
1150 SValue *p, *p1;
1151 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1152 save_reg(p->r);
1155 /* save r to the memory stack, and mark it as being free */
1156 ST_FUNC void save_reg(int r)
1158 save_reg_upstack(r, 0);
1161 /* save r to the memory stack, and mark it as being free,
1162 if seen up to (vtop - n) stack entry */
1163 ST_FUNC void save_reg_upstack(int r, int n)
1165 int l, saved, size, align;
1166 SValue *p, *p1, sv;
1167 CType *type;
1169 if ((r &= VT_VALMASK) >= VT_CONST)
1170 return;
1171 if (nocode_wanted)
1172 return;
1174 /* modify all stack values */
1175 saved = 0;
1176 l = 0;
1177 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1178 if ((p->r & VT_VALMASK) == r ||
1179 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1180 /* must save value on stack if not already done */
1181 if (!saved) {
1182 /* NOTE: must reload 'r' because r might be equal to r2 */
1183 r = p->r & VT_VALMASK;
1184 /* store register in the stack */
1185 type = &p->type;
1186 if ((p->r & VT_LVAL) ||
1187 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1188 #if PTR_SIZE == 8
1189 type = &char_pointer_type;
1190 #else
1191 type = &int_type;
1192 #endif
1193 size = type_size(type, &align);
1194 l=get_temp_local_var(size,align);
1195 sv.type.t = type->t;
1196 sv.r = VT_LOCAL | VT_LVAL;
1197 sv.c.i = l;
1198 store(r, &sv);
1199 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1200 /* x86 specific: need to pop fp register ST0 if saved */
1201 if (r == TREG_ST0) {
1202 o(0xd8dd); /* fstp %st(0) */
1204 #endif
1205 #if PTR_SIZE == 4
1206 /* special long long case */
1207 if ((type->t & VT_BTYPE) == VT_LLONG) {
1208 sv.c.i += 4;
1209 store(p->r2, &sv);
1211 #endif
1212 saved = 1;
1214 /* mark that stack entry as being saved on the stack */
1215 if (p->r & VT_LVAL) {
1216 /* also clear the bounded flag because the
1217 relocation address of the function was stored in
1218 p->c.i */
1219 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1220 } else {
1221 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1223 p->r2 = VT_CONST;
1224 p->c.i = l;
1229 #ifdef TCC_TARGET_ARM
1230 /* find a register of class 'rc2' with at most one reference on stack.
1231 * If none, call get_reg(rc) */
1232 ST_FUNC int get_reg_ex(int rc, int rc2)
1234 int r;
1235 SValue *p;
1237 for(r=0;r<NB_REGS;r++) {
1238 if (reg_classes[r] & rc2) {
1239 int n;
1240 n=0;
1241 for(p = vstack; p <= vtop; p++) {
1242 if ((p->r & VT_VALMASK) == r ||
1243 (p->r2 & VT_VALMASK) == r)
1244 n++;
1246 if (n <= 1)
1247 return r;
1250 return get_reg(rc);
1252 #endif
1254 /* find a free register of class 'rc'. If none, save one register */
1255 ST_FUNC int get_reg(int rc)
1257 int r;
1258 SValue *p;
1260 /* find a free register */
1261 for(r=0;r<NB_REGS;r++) {
1262 if (reg_classes[r] & rc) {
1263 if (nocode_wanted)
1264 return r;
1265 for(p=vstack;p<=vtop;p++) {
1266 if ((p->r & VT_VALMASK) == r ||
1267 (p->r2 & VT_VALMASK) == r)
1268 goto notfound;
1270 return r;
1272 notfound: ;
1275 /* no register left : free the first one on the stack (VERY
1276 IMPORTANT to start from the bottom to ensure that we don't
1277 spill registers used in gen_opi()) */
1278 for(p=vstack;p<=vtop;p++) {
1279 /* look at second register (if long long) */
1280 r = p->r2 & VT_VALMASK;
1281 if (r < VT_CONST && (reg_classes[r] & rc))
1282 goto save_found;
1283 r = p->r & VT_VALMASK;
1284 if (r < VT_CONST && (reg_classes[r] & rc)) {
1285 save_found:
1286 save_reg(r);
1287 return r;
1290 /* Should never comes here */
1291 return -1;
1294 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1295 static int get_temp_local_var(int size,int align){
1296 int i;
1297 struct temp_local_variable *temp_var;
1298 int found_var;
1299 SValue *p;
1300 int r;
1301 char free;
1302 char found;
1303 found=0;
1304 for(i=0;i<nb_temp_local_vars;i++){
1305 temp_var=&arr_temp_local_vars[i];
1306 if(temp_var->size<size||align!=temp_var->align){
1307 continue;
1309 /*check if temp_var is free*/
1310 free=1;
1311 for(p=vstack;p<=vtop;p++) {
1312 r=p->r&VT_VALMASK;
1313 if(r==VT_LOCAL||r==VT_LLOCAL){
1314 if(p->c.i==temp_var->location){
1315 free=0;
1316 break;
1320 if(free){
1321 found_var=temp_var->location;
1322 found=1;
1323 break;
1326 if(!found){
1327 loc = (loc - size) & -align;
1328 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1329 temp_var=&arr_temp_local_vars[i];
1330 temp_var->location=loc;
1331 temp_var->size=size;
1332 temp_var->align=align;
1333 nb_temp_local_vars++;
1335 found_var=loc;
1337 return found_var;
1340 static void clear_temp_local_var_list(){
1341 nb_temp_local_vars=0;
1344 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1345 if needed */
1346 static void move_reg(int r, int s, int t)
1348 SValue sv;
1350 if (r != s) {
1351 save_reg(r);
1352 sv.type.t = t;
1353 sv.type.ref = NULL;
1354 sv.r = s;
1355 sv.c.i = 0;
1356 load(r, &sv);
1360 /* get address of vtop (vtop MUST BE an lvalue) */
1361 ST_FUNC void gaddrof(void)
1363 vtop->r &= ~VT_LVAL;
1364 /* tricky: if saved lvalue, then we can go back to lvalue */
1365 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1366 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1371 #ifdef CONFIG_TCC_BCHECK
1372 /* generate lvalue bound code */
1373 static void gbound(void)
1375 int lval_type;
1376 CType type1;
1378 vtop->r &= ~VT_MUSTBOUND;
1379 /* if lvalue, then use checking code before dereferencing */
1380 if (vtop->r & VT_LVAL) {
1381 /* if not VT_BOUNDED value, then make one */
1382 if (!(vtop->r & VT_BOUNDED)) {
1383 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1384 /* must save type because we must set it to int to get pointer */
1385 type1 = vtop->type;
1386 vtop->type.t = VT_PTR;
1387 gaddrof();
1388 vpushi(0);
1389 gen_bounded_ptr_add();
1390 vtop->r |= lval_type;
1391 vtop->type = type1;
1393 /* then check for dereferencing */
1394 gen_bounded_ptr_deref();
1397 #endif
1399 static void incr_bf_adr(int o)
1401 vtop->type = char_pointer_type;
1402 gaddrof();
1403 vpushi(o);
1404 gen_op('+');
1405 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1406 | (VT_BYTE|VT_UNSIGNED);
1407 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1408 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1411 /* single-byte load mode for packed or otherwise unaligned bitfields */
1412 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1414 int n, o, bits;
1415 save_reg_upstack(vtop->r, 1);
1416 vpush64(type->t & VT_BTYPE, 0); // B X
1417 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1418 do {
1419 vswap(); // X B
1420 incr_bf_adr(o);
1421 vdup(); // X B B
1422 n = 8 - bit_pos;
1423 if (n > bit_size)
1424 n = bit_size;
1425 if (bit_pos)
1426 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1427 if (n < 8)
1428 vpushi((1 << n) - 1), gen_op('&');
1429 gen_cast(type);
1430 if (bits)
1431 vpushi(bits), gen_op(TOK_SHL);
1432 vrotb(3); // B Y X
1433 gen_op('|'); // B X
1434 bits += n, bit_size -= n, o = 1;
1435 } while (bit_size);
1436 vswap(), vpop();
1437 if (!(type->t & VT_UNSIGNED)) {
1438 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1439 vpushi(n), gen_op(TOK_SHL);
1440 vpushi(n), gen_op(TOK_SAR);
1444 /* single-byte store mode for packed or otherwise unaligned bitfields */
1445 static void store_packed_bf(int bit_pos, int bit_size)
1447 int bits, n, o, m, c;
1449 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1450 vswap(); // X B
1451 save_reg_upstack(vtop->r, 1);
1452 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1453 do {
1454 incr_bf_adr(o); // X B
1455 vswap(); //B X
1456 c ? vdup() : gv_dup(); // B V X
1457 vrott(3); // X B V
1458 if (bits)
1459 vpushi(bits), gen_op(TOK_SHR);
1460 if (bit_pos)
1461 vpushi(bit_pos), gen_op(TOK_SHL);
1462 n = 8 - bit_pos;
1463 if (n > bit_size)
1464 n = bit_size;
1465 if (n < 8) {
1466 m = ((1 << n) - 1) << bit_pos;
1467 vpushi(m), gen_op('&'); // X B V1
1468 vpushv(vtop-1); // X B V1 B
1469 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1470 gen_op('&'); // X B V1 B1
1471 gen_op('|'); // X B V2
1473 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1474 vstore(), vpop(); // X B
1475 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1476 } while (bit_size);
1477 vpop(), vpop();
1480 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1482 int t;
1483 if (0 == sv->type.ref)
1484 return 0;
1485 t = sv->type.ref->auxtype;
1486 if (t != -1 && t != VT_STRUCT) {
1487 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1488 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1490 return t;
1493 /* store vtop a register belonging to class 'rc'. lvalues are
1494 converted to values. Cannot be used if cannot be converted to
1495 register value (such as structures). */
1496 ST_FUNC int gv(int rc)
1498 int r, bit_pos, bit_size, size, align, rc2;
1500 /* NOTE: get_reg can modify vstack[] */
1501 if (vtop->type.t & VT_BITFIELD) {
1502 CType type;
1504 bit_pos = BIT_POS(vtop->type.t);
1505 bit_size = BIT_SIZE(vtop->type.t);
1506 /* remove bit field info to avoid loops */
1507 vtop->type.t &= ~VT_STRUCT_MASK;
1509 type.ref = NULL;
1510 type.t = vtop->type.t & VT_UNSIGNED;
1511 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1512 type.t |= VT_UNSIGNED;
1514 r = adjust_bf(vtop, bit_pos, bit_size);
1516 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1517 type.t |= VT_LLONG;
1518 else
1519 type.t |= VT_INT;
1521 if (r == VT_STRUCT) {
1522 load_packed_bf(&type, bit_pos, bit_size);
1523 } else {
1524 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1525 /* cast to int to propagate signedness in following ops */
1526 gen_cast(&type);
1527 /* generate shifts */
1528 vpushi(bits - (bit_pos + bit_size));
1529 gen_op(TOK_SHL);
1530 vpushi(bits - bit_size);
1531 /* NOTE: transformed to SHR if unsigned */
1532 gen_op(TOK_SAR);
1534 r = gv(rc);
1535 } else {
1536 if (is_float(vtop->type.t) &&
1537 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1538 unsigned long offset;
1539 /* CPUs usually cannot use float constants, so we store them
1540 generically in data segment */
1541 size = type_size(&vtop->type, &align);
1542 if (NODATA_WANTED)
1543 size = 0, align = 1;
1544 offset = section_add(data_section, size, align);
1545 vpush_ref(&vtop->type, data_section, offset, size);
1546 vswap();
1547 init_putv(&vtop->type, data_section, offset);
1548 vtop->r |= VT_LVAL;
1550 #ifdef CONFIG_TCC_BCHECK
1551 if (vtop->r & VT_MUSTBOUND)
1552 gbound();
1553 #endif
1555 r = vtop->r & VT_VALMASK;
1556 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1557 #ifndef TCC_TARGET_ARM64
1558 if (rc == RC_IRET)
1559 rc2 = RC_LRET;
1560 #ifdef TCC_TARGET_X86_64
1561 else if (rc == RC_FRET)
1562 rc2 = RC_QRET;
1563 #endif
1564 #endif
1565 /* need to reload if:
1566 - constant
1567 - lvalue (need to dereference pointer)
1568 - already a register, but not in the right class */
1569 if (r >= VT_CONST
1570 || (vtop->r & VT_LVAL)
1571 || !(reg_classes[r] & rc)
1572 #if PTR_SIZE == 8
1573 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1574 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1575 #else
1576 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1577 #endif
1580 r = get_reg(rc);
1581 #if PTR_SIZE == 8
1582 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1583 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1584 #else
1585 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1586 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1587 unsigned long long ll;
1588 #endif
1589 int r2, original_type;
1590 original_type = vtop->type.t;
1591 /* two register type load : expand to two words
1592 temporarily */
1593 #if PTR_SIZE == 4
1594 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1595 /* load constant */
1596 ll = vtop->c.i;
1597 vtop->c.i = ll; /* first word */
1598 load(r, vtop);
1599 vtop->r = r; /* save register value */
1600 vpushi(ll >> 32); /* second word */
1601 } else
1602 #endif
1603 if (vtop->r & VT_LVAL) {
1604 /* We do not want to modifier the long long
1605 pointer here, so the safest (and less
1606 efficient) is to save all the other registers
1607 in the stack. XXX: totally inefficient. */
1608 #if 0
1609 save_regs(1);
1610 #else
1611 /* lvalue_save: save only if used further down the stack */
1612 save_reg_upstack(vtop->r, 1);
1613 #endif
1614 /* load from memory */
1615 vtop->type.t = load_type;
1616 load(r, vtop);
1617 vdup();
1618 vtop[-1].r = r; /* save register value */
1619 /* increment pointer to get second word */
1620 vtop->type.t = addr_type;
1621 gaddrof();
1622 vpushi(load_size);
1623 gen_op('+');
1624 vtop->r |= VT_LVAL;
1625 vtop->type.t = load_type;
1626 } else {
1627 /* move registers */
1628 load(r, vtop);
1629 vdup();
1630 vtop[-1].r = r; /* save register value */
1631 vtop->r = vtop[-1].r2;
1633 /* Allocate second register. Here we rely on the fact that
1634 get_reg() tries first to free r2 of an SValue. */
1635 r2 = get_reg(rc2);
1636 load(r2, vtop);
1637 vpop();
1638 /* write second register */
1639 vtop->r2 = r2;
1640 vtop->type.t = original_type;
1641 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1642 int t1, t;
1643 /* lvalue of scalar type : need to use lvalue type
1644 because of possible cast */
1645 t = vtop->type.t;
1646 t1 = t;
1647 /* compute memory access type */
1648 if (vtop->r & VT_LVAL_BYTE)
1649 t = VT_BYTE;
1650 else if (vtop->r & VT_LVAL_SHORT)
1651 t = VT_SHORT;
1652 if (vtop->r & VT_LVAL_UNSIGNED)
1653 t |= VT_UNSIGNED;
1654 vtop->type.t = t;
1655 load(r, vtop);
1656 /* restore wanted type */
1657 vtop->type.t = t1;
1658 } else {
1659 if (vtop->r == VT_CMP)
1660 vset_VT_JMP();
1661 /* one register type load */
1662 load(r, vtop);
1665 vtop->r = r;
1666 #ifdef TCC_TARGET_C67
1667 /* uses register pairs for doubles */
1668 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1669 vtop->r2 = r+1;
1670 #endif
1672 return r;
1675 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1676 ST_FUNC void gv2(int rc1, int rc2)
1678 /* generate more generic register first. But VT_JMP or VT_CMP
1679 values must be generated first in all cases to avoid possible
1680 reload errors */
1681 if (vtop->r != VT_CMP && rc1 <= rc2) {
1682 vswap();
1683 gv(rc1);
1684 vswap();
1685 gv(rc2);
1686 /* test if reload is needed for first register */
1687 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1688 vswap();
1689 gv(rc1);
1690 vswap();
1692 } else {
1693 gv(rc2);
1694 vswap();
1695 gv(rc1);
1696 vswap();
1697 /* test if reload is needed for first register */
1698 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1699 gv(rc2);
1704 #ifndef TCC_TARGET_ARM64
1705 /* wrapper around RC_FRET to return a register by type */
1706 static int rc_fret(int t)
1708 #ifdef TCC_TARGET_X86_64
1709 if (t == VT_LDOUBLE) {
1710 return RC_ST0;
1712 #endif
1713 return RC_FRET;
1715 #endif
1717 /* wrapper around REG_FRET to return a register by type */
1718 static int reg_fret(int t)
1720 #ifdef TCC_TARGET_X86_64
1721 if (t == VT_LDOUBLE) {
1722 return TREG_ST0;
1724 #endif
1725 return REG_FRET;
1728 #if PTR_SIZE == 4
1729 /* expand 64bit on stack in two ints */
1730 ST_FUNC void lexpand(void)
1732 int u, v;
1733 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1734 v = vtop->r & (VT_VALMASK | VT_LVAL);
1735 if (v == VT_CONST) {
1736 vdup();
1737 vtop[0].c.i >>= 32;
1738 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1739 vdup();
1740 vtop[0].c.i += 4;
1741 } else {
1742 gv(RC_INT);
1743 vdup();
1744 vtop[0].r = vtop[-1].r2;
1745 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1747 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1749 #endif
1751 #if PTR_SIZE == 4
1752 /* build a long long from two ints */
1753 static void lbuild(int t)
1755 gv2(RC_INT, RC_INT);
1756 vtop[-1].r2 = vtop[0].r;
1757 vtop[-1].type.t = t;
1758 vpop();
1760 #endif
1762 /* convert stack entry to register and duplicate its value in another
1763 register */
1764 static void gv_dup(void)
1766 int rc, t, r, r1;
1767 SValue sv;
1769 t = vtop->type.t;
1770 #if PTR_SIZE == 4
1771 if ((t & VT_BTYPE) == VT_LLONG) {
1772 if (t & VT_BITFIELD) {
1773 gv(RC_INT);
1774 t = vtop->type.t;
1776 lexpand();
1777 gv_dup();
1778 vswap();
1779 vrotb(3);
1780 gv_dup();
1781 vrotb(4);
1782 /* stack: H L L1 H1 */
1783 lbuild(t);
1784 vrotb(3);
1785 vrotb(3);
1786 vswap();
1787 lbuild(t);
1788 vswap();
1789 } else
1790 #endif
1792 /* duplicate value */
1793 rc = RC_INT;
1794 sv.type.t = VT_INT;
1795 if (is_float(t)) {
1796 rc = RC_FLOAT;
1797 #ifdef TCC_TARGET_X86_64
1798 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1799 rc = RC_ST0;
1801 #endif
1802 sv.type.t = t;
1804 r = gv(rc);
1805 r1 = get_reg(rc);
1806 sv.r = r;
1807 sv.c.i = 0;
1808 load(r1, &sv); /* move r to r1 */
1809 vdup();
1810 /* duplicates value */
1811 if (r != r1)
1812 vtop->r = r1;
1816 #if PTR_SIZE == 4
1817 /* generate CPU independent (unsigned) long long operations */
1818 static void gen_opl(int op)
1820 int t, a, b, op1, c, i;
1821 int func;
1822 unsigned short reg_iret = REG_IRET;
1823 unsigned short reg_lret = REG_LRET;
1824 SValue tmp;
1826 switch(op) {
1827 case '/':
1828 case TOK_PDIV:
1829 func = TOK___divdi3;
1830 goto gen_func;
1831 case TOK_UDIV:
1832 func = TOK___udivdi3;
1833 goto gen_func;
1834 case '%':
1835 func = TOK___moddi3;
1836 goto gen_mod_func;
1837 case TOK_UMOD:
1838 func = TOK___umoddi3;
1839 gen_mod_func:
1840 #ifdef TCC_ARM_EABI
1841 reg_iret = TREG_R2;
1842 reg_lret = TREG_R3;
1843 #endif
1844 gen_func:
1845 /* call generic long long function */
1846 vpush_global_sym(&func_old_type, func);
1847 vrott(3);
1848 gfunc_call(2);
1849 vpushi(0);
1850 vtop->r = reg_iret;
1851 vtop->r2 = reg_lret;
1852 break;
1853 case '^':
1854 case '&':
1855 case '|':
1856 case '*':
1857 case '+':
1858 case '-':
1859 //pv("gen_opl A",0,2);
1860 t = vtop->type.t;
1861 vswap();
1862 lexpand();
1863 vrotb(3);
1864 lexpand();
1865 /* stack: L1 H1 L2 H2 */
1866 tmp = vtop[0];
1867 vtop[0] = vtop[-3];
1868 vtop[-3] = tmp;
1869 tmp = vtop[-2];
1870 vtop[-2] = vtop[-3];
1871 vtop[-3] = tmp;
1872 vswap();
1873 /* stack: H1 H2 L1 L2 */
1874 //pv("gen_opl B",0,4);
1875 if (op == '*') {
1876 vpushv(vtop - 1);
1877 vpushv(vtop - 1);
1878 gen_op(TOK_UMULL);
1879 lexpand();
1880 /* stack: H1 H2 L1 L2 ML MH */
1881 for(i=0;i<4;i++)
1882 vrotb(6);
1883 /* stack: ML MH H1 H2 L1 L2 */
1884 tmp = vtop[0];
1885 vtop[0] = vtop[-2];
1886 vtop[-2] = tmp;
1887 /* stack: ML MH H1 L2 H2 L1 */
1888 gen_op('*');
1889 vrotb(3);
1890 vrotb(3);
1891 gen_op('*');
1892 /* stack: ML MH M1 M2 */
1893 gen_op('+');
1894 gen_op('+');
1895 } else if (op == '+' || op == '-') {
1896 /* XXX: add non carry method too (for MIPS or alpha) */
1897 if (op == '+')
1898 op1 = TOK_ADDC1;
1899 else
1900 op1 = TOK_SUBC1;
1901 gen_op(op1);
1902 /* stack: H1 H2 (L1 op L2) */
1903 vrotb(3);
1904 vrotb(3);
1905 gen_op(op1 + 1); /* TOK_xxxC2 */
1906 } else {
1907 gen_op(op);
1908 /* stack: H1 H2 (L1 op L2) */
1909 vrotb(3);
1910 vrotb(3);
1911 /* stack: (L1 op L2) H1 H2 */
1912 gen_op(op);
1913 /* stack: (L1 op L2) (H1 op H2) */
1915 /* stack: L H */
1916 lbuild(t);
1917 break;
1918 case TOK_SAR:
1919 case TOK_SHR:
1920 case TOK_SHL:
1921 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1922 t = vtop[-1].type.t;
1923 vswap();
1924 lexpand();
1925 vrotb(3);
1926 /* stack: L H shift */
1927 c = (int)vtop->c.i;
1928 /* constant: simpler */
1929 /* NOTE: all comments are for SHL. the other cases are
1930 done by swapping words */
1931 vpop();
1932 if (op != TOK_SHL)
1933 vswap();
1934 if (c >= 32) {
1935 /* stack: L H */
1936 vpop();
1937 if (c > 32) {
1938 vpushi(c - 32);
1939 gen_op(op);
1941 if (op != TOK_SAR) {
1942 vpushi(0);
1943 } else {
1944 gv_dup();
1945 vpushi(31);
1946 gen_op(TOK_SAR);
1948 vswap();
1949 } else {
1950 vswap();
1951 gv_dup();
1952 /* stack: H L L */
1953 vpushi(c);
1954 gen_op(op);
1955 vswap();
1956 vpushi(32 - c);
1957 if (op == TOK_SHL)
1958 gen_op(TOK_SHR);
1959 else
1960 gen_op(TOK_SHL);
1961 vrotb(3);
1962 /* stack: L L H */
1963 vpushi(c);
1964 if (op == TOK_SHL)
1965 gen_op(TOK_SHL);
1966 else
1967 gen_op(TOK_SHR);
1968 gen_op('|');
1970 if (op != TOK_SHL)
1971 vswap();
1972 lbuild(t);
1973 } else {
1974 /* XXX: should provide a faster fallback on x86 ? */
1975 switch(op) {
1976 case TOK_SAR:
1977 func = TOK___ashrdi3;
1978 goto gen_func;
1979 case TOK_SHR:
1980 func = TOK___lshrdi3;
1981 goto gen_func;
1982 case TOK_SHL:
1983 func = TOK___ashldi3;
1984 goto gen_func;
1987 break;
1988 default:
1989 /* compare operations */
1990 t = vtop->type.t;
1991 vswap();
1992 lexpand();
1993 vrotb(3);
1994 lexpand();
1995 /* stack: L1 H1 L2 H2 */
1996 tmp = vtop[-1];
1997 vtop[-1] = vtop[-2];
1998 vtop[-2] = tmp;
1999 /* stack: L1 L2 H1 H2 */
2000 /* compare high */
2001 op1 = op;
2002 /* when values are equal, we need to compare low words. since
2003 the jump is inverted, we invert the test too. */
2004 if (op1 == TOK_LT)
2005 op1 = TOK_LE;
2006 else if (op1 == TOK_GT)
2007 op1 = TOK_GE;
2008 else if (op1 == TOK_ULT)
2009 op1 = TOK_ULE;
2010 else if (op1 == TOK_UGT)
2011 op1 = TOK_UGE;
2012 a = 0;
2013 b = 0;
2014 gen_op(op1);
2015 if (op == TOK_NE) {
2016 b = gvtst(0, 0);
2017 } else {
2018 a = gvtst(1, 0);
2019 if (op != TOK_EQ) {
2020 /* generate non equal test */
2021 vpushi(0);
2022 vset_VT_CMP(TOK_NE);
2023 b = gvtst(0, 0);
2026 /* compare low. Always unsigned */
2027 op1 = op;
2028 if (op1 == TOK_LT)
2029 op1 = TOK_ULT;
2030 else if (op1 == TOK_LE)
2031 op1 = TOK_ULE;
2032 else if (op1 == TOK_GT)
2033 op1 = TOK_UGT;
2034 else if (op1 == TOK_GE)
2035 op1 = TOK_UGE;
2036 gen_op(op1);
2037 #if 0//def TCC_TARGET_I386
2038 if (op == TOK_NE) { gsym(b); break; }
2039 if (op == TOK_EQ) { gsym(a); break; }
2040 #endif
2041 gvtst_set(1, a);
2042 gvtst_set(0, b);
2043 break;
2046 #endif
2048 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2050 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2051 return (a ^ b) >> 63 ? -x : x;
2054 static int gen_opic_lt(uint64_t a, uint64_t b)
2056 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2059 /* handle integer constant optimizations and various machine
2060 independent opt */
2061 static void gen_opic(int op)
2063 SValue *v1 = vtop - 1;
2064 SValue *v2 = vtop;
2065 int t1 = v1->type.t & VT_BTYPE;
2066 int t2 = v2->type.t & VT_BTYPE;
2067 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2068 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2069 uint64_t l1 = c1 ? v1->c.i : 0;
2070 uint64_t l2 = c2 ? v2->c.i : 0;
2071 int shm = (t1 == VT_LLONG) ? 63 : 31;
2073 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2074 l1 = ((uint32_t)l1 |
2075 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2076 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2077 l2 = ((uint32_t)l2 |
2078 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2080 if (c1 && c2) {
2081 switch(op) {
2082 case '+': l1 += l2; break;
2083 case '-': l1 -= l2; break;
2084 case '&': l1 &= l2; break;
2085 case '^': l1 ^= l2; break;
2086 case '|': l1 |= l2; break;
2087 case '*': l1 *= l2; break;
2089 case TOK_PDIV:
2090 case '/':
2091 case '%':
2092 case TOK_UDIV:
2093 case TOK_UMOD:
2094 /* if division by zero, generate explicit division */
2095 if (l2 == 0) {
2096 if (const_wanted)
2097 tcc_error("division by zero in constant");
2098 goto general_case;
2100 switch(op) {
2101 default: l1 = gen_opic_sdiv(l1, l2); break;
2102 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2103 case TOK_UDIV: l1 = l1 / l2; break;
2104 case TOK_UMOD: l1 = l1 % l2; break;
2106 break;
2107 case TOK_SHL: l1 <<= (l2 & shm); break;
2108 case TOK_SHR: l1 >>= (l2 & shm); break;
2109 case TOK_SAR:
2110 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2111 break;
2112 /* tests */
2113 case TOK_ULT: l1 = l1 < l2; break;
2114 case TOK_UGE: l1 = l1 >= l2; break;
2115 case TOK_EQ: l1 = l1 == l2; break;
2116 case TOK_NE: l1 = l1 != l2; break;
2117 case TOK_ULE: l1 = l1 <= l2; break;
2118 case TOK_UGT: l1 = l1 > l2; break;
2119 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2120 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2121 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2122 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2123 /* logical */
2124 case TOK_LAND: l1 = l1 && l2; break;
2125 case TOK_LOR: l1 = l1 || l2; break;
2126 default:
2127 goto general_case;
2129 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2130 l1 = ((uint32_t)l1 |
2131 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2132 v1->c.i = l1;
2133 vtop--;
2134 } else {
2135 /* if commutative ops, put c2 as constant */
2136 if (c1 && (op == '+' || op == '&' || op == '^' ||
2137 op == '|' || op == '*')) {
2138 vswap();
2139 c2 = c1; //c = c1, c1 = c2, c2 = c;
2140 l2 = l1; //l = l1, l1 = l2, l2 = l;
2142 if (!const_wanted &&
2143 c1 && ((l1 == 0 &&
2144 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2145 (l1 == -1 && op == TOK_SAR))) {
2146 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2147 vtop--;
2148 } else if (!const_wanted &&
2149 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2150 (op == '|' &&
2151 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2152 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2153 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2154 if (l2 == 1)
2155 vtop->c.i = 0;
2156 vswap();
2157 vtop--;
2158 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2159 op == TOK_PDIV) &&
2160 l2 == 1) ||
2161 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2162 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2163 l2 == 0) ||
2164 (op == '&' &&
2165 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2166 /* filter out NOP operations like x*1, x-0, x&-1... */
2167 vtop--;
2168 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2169 /* try to use shifts instead of muls or divs */
2170 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2171 int n = -1;
2172 while (l2) {
2173 l2 >>= 1;
2174 n++;
2176 vtop->c.i = n;
2177 if (op == '*')
2178 op = TOK_SHL;
2179 else if (op == TOK_PDIV)
2180 op = TOK_SAR;
2181 else
2182 op = TOK_SHR;
2184 goto general_case;
2185 } else if (c2 && (op == '+' || op == '-') &&
2186 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2187 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2188 /* symbol + constant case */
2189 if (op == '-')
2190 l2 = -l2;
2191 l2 += vtop[-1].c.i;
2192 /* The backends can't always deal with addends to symbols
2193 larger than +-1<<31. Don't construct such. */
2194 if ((int)l2 != l2)
2195 goto general_case;
2196 vtop--;
2197 vtop->c.i = l2;
2198 } else {
2199 general_case:
2200 /* call low level op generator */
2201 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2202 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2203 gen_opl(op);
2204 else
2205 gen_opi(op);
2210 /* generate a floating point operation with constant propagation */
2211 static void gen_opif(int op)
2213 int c1, c2;
2214 SValue *v1, *v2;
2215 #if defined _MSC_VER && defined _AMD64_
2216 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2217 volatile
2218 #endif
2219 long double f1, f2;
2221 v1 = vtop - 1;
2222 v2 = vtop;
2223 /* currently, we cannot do computations with forward symbols */
2224 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2225 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2226 if (c1 && c2) {
2227 if (v1->type.t == VT_FLOAT) {
2228 f1 = v1->c.f;
2229 f2 = v2->c.f;
2230 } else if (v1->type.t == VT_DOUBLE) {
2231 f1 = v1->c.d;
2232 f2 = v2->c.d;
2233 } else {
2234 f1 = v1->c.ld;
2235 f2 = v2->c.ld;
2238 /* NOTE: we only do constant propagation if finite number (not
2239 NaN or infinity) (ANSI spec) */
2240 if (!ieee_finite(f1) || !ieee_finite(f2))
2241 goto general_case;
2243 switch(op) {
2244 case '+': f1 += f2; break;
2245 case '-': f1 -= f2; break;
2246 case '*': f1 *= f2; break;
2247 case '/':
2248 if (f2 == 0.0) {
2249 /* If not in initializer we need to potentially generate
2250 FP exceptions at runtime, otherwise we want to fold. */
2251 if (!const_wanted)
2252 goto general_case;
2254 f1 /= f2;
2255 break;
2256 /* XXX: also handles tests ? */
2257 default:
2258 goto general_case;
2260 /* XXX: overflow test ? */
2261 if (v1->type.t == VT_FLOAT) {
2262 v1->c.f = f1;
2263 } else if (v1->type.t == VT_DOUBLE) {
2264 v1->c.d = f1;
2265 } else {
2266 v1->c.ld = f1;
2268 vtop--;
2269 } else {
2270 general_case:
2271 gen_opf(op);
2275 static int pointed_size(CType *type)
2277 int align;
2278 return type_size(pointed_type(type), &align);
2281 static void vla_runtime_pointed_size(CType *type)
2283 int align;
2284 vla_runtime_type_size(pointed_type(type), &align);
2287 static inline int is_null_pointer(SValue *p)
2289 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2290 return 0;
2291 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2292 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2293 ((p->type.t & VT_BTYPE) == VT_PTR &&
2294 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2295 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2296 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2299 static inline int is_integer_btype(int bt)
2301 return (bt == VT_BYTE || bt == VT_SHORT ||
2302 bt == VT_INT || bt == VT_LLONG);
2305 /* check types for comparison or subtraction of pointers */
2306 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2308 CType *type1, *type2, tmp_type1, tmp_type2;
2309 int bt1, bt2;
2311 /* null pointers are accepted for all comparisons as gcc */
2312 if (is_null_pointer(p1) || is_null_pointer(p2))
2313 return;
2314 type1 = &p1->type;
2315 type2 = &p2->type;
2316 bt1 = type1->t & VT_BTYPE;
2317 bt2 = type2->t & VT_BTYPE;
2318 /* accept comparison between pointer and integer with a warning */
2319 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2320 if (op != TOK_LOR && op != TOK_LAND )
2321 tcc_warning("comparison between pointer and integer");
2322 return;
2325 /* both must be pointers or implicit function pointers */
2326 if (bt1 == VT_PTR) {
2327 type1 = pointed_type(type1);
2328 } else if (bt1 != VT_FUNC)
2329 goto invalid_operands;
2331 if (bt2 == VT_PTR) {
2332 type2 = pointed_type(type2);
2333 } else if (bt2 != VT_FUNC) {
2334 invalid_operands:
2335 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2337 if ((type1->t & VT_BTYPE) == VT_VOID ||
2338 (type2->t & VT_BTYPE) == VT_VOID)
2339 return;
2340 tmp_type1 = *type1;
2341 tmp_type2 = *type2;
2342 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2343 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2344 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2345 /* gcc-like error if '-' is used */
2346 if (op == '-')
2347 goto invalid_operands;
2348 else
2349 tcc_warning("comparison of distinct pointer types lacks a cast");
2353 /* generic gen_op: handles types problems */
2354 ST_FUNC void gen_op(int op)
2356 int u, t1, t2, bt1, bt2, t;
2357 CType type1;
2359 redo:
2360 t1 = vtop[-1].type.t;
2361 t2 = vtop[0].type.t;
2362 bt1 = t1 & VT_BTYPE;
2363 bt2 = t2 & VT_BTYPE;
2365 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2366 tcc_error("operation on a struct");
2367 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2368 if (bt2 == VT_FUNC) {
2369 mk_pointer(&vtop->type);
2370 gaddrof();
2372 if (bt1 == VT_FUNC) {
2373 vswap();
2374 mk_pointer(&vtop->type);
2375 gaddrof();
2376 vswap();
2378 goto redo;
2379 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2380 /* at least one operand is a pointer */
2381 /* relational op: must be both pointers */
2382 if (op >= TOK_ULT && op <= TOK_LOR) {
2383 check_comparison_pointer_types(vtop - 1, vtop, op);
2384 /* pointers are handled are unsigned */
2385 #if PTR_SIZE == 8
2386 t = VT_LLONG | VT_UNSIGNED;
2387 #else
2388 t = VT_INT | VT_UNSIGNED;
2389 #endif
2390 goto std_op;
2392 /* if both pointers, then it must be the '-' op */
2393 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2394 if (op != '-')
2395 tcc_error("cannot use pointers here");
2396 check_comparison_pointer_types(vtop - 1, vtop, op);
2397 /* XXX: check that types are compatible */
2398 if (vtop[-1].type.t & VT_VLA) {
2399 vla_runtime_pointed_size(&vtop[-1].type);
2400 } else {
2401 vpushi(pointed_size(&vtop[-1].type));
2403 vrott(3);
2404 gen_opic(op);
2405 vtop->type.t = ptrdiff_type.t;
2406 vswap();
2407 gen_op(TOK_PDIV);
2408 } else {
2409 /* exactly one pointer : must be '+' or '-'. */
2410 if (op != '-' && op != '+')
2411 tcc_error("cannot use pointers here");
2412 /* Put pointer as first operand */
2413 if (bt2 == VT_PTR) {
2414 vswap();
2415 t = t1, t1 = t2, t2 = t;
2417 #if PTR_SIZE == 4
2418 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2419 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2420 gen_cast_s(VT_INT);
2421 #endif
2422 type1 = vtop[-1].type;
2423 type1.t &= ~VT_ARRAY;
2424 if (vtop[-1].type.t & VT_VLA)
2425 vla_runtime_pointed_size(&vtop[-1].type);
2426 else {
2427 u = pointed_size(&vtop[-1].type);
2428 if (u < 0)
2429 tcc_error("unknown array element size");
2430 #if PTR_SIZE == 8
2431 vpushll(u);
2432 #else
2433 /* XXX: cast to int ? (long long case) */
2434 vpushi(u);
2435 #endif
2437 gen_op('*');
2438 #if 0
2439 /* #ifdef CONFIG_TCC_BCHECK
2440 The main reason to removing this code:
2441 #include <stdio.h>
2442 int main ()
2444 int v[10];
2445 int i = 10;
2446 int j = 9;
2447 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2448 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2450 When this code is on. then the output looks like
2451 v+i-j = 0xfffffffe
2452 v+(i-j) = 0xbff84000
2454 /* if evaluating constant expression, no code should be
2455 generated, so no bound check */
2456 if (tcc_state->do_bounds_check && !const_wanted) {
2457 /* if bounded pointers, we generate a special code to
2458 test bounds */
2459 if (op == '-') {
2460 vpushi(0);
2461 vswap();
2462 gen_op('-');
2464 gen_bounded_ptr_add();
2465 } else
2466 #endif
2468 gen_opic(op);
2470 /* put again type if gen_opic() swaped operands */
2471 vtop->type = type1;
2473 } else if (is_float(bt1) || is_float(bt2)) {
2474 /* compute bigger type and do implicit casts */
2475 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2476 t = VT_LDOUBLE;
2477 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2478 t = VT_DOUBLE;
2479 } else {
2480 t = VT_FLOAT;
2482 /* floats can only be used for a few operations */
2483 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2484 (op < TOK_ULT || op > TOK_GT))
2485 tcc_error("invalid operands for binary operation");
2486 goto std_op;
2487 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2488 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2489 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2490 t |= VT_UNSIGNED;
2491 t |= (VT_LONG & t1);
2492 goto std_op;
2493 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2494 /* cast to biggest op */
2495 t = VT_LLONG | VT_LONG;
2496 if (bt1 == VT_LLONG)
2497 t &= t1;
2498 if (bt2 == VT_LLONG)
2499 t &= t2;
2500 /* convert to unsigned if it does not fit in a long long */
2501 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2502 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2503 t |= VT_UNSIGNED;
2504 goto std_op;
2505 } else {
2506 /* integer operations */
2507 t = VT_INT | (VT_LONG & (t1 | t2));
2508 /* convert to unsigned if it does not fit in an integer */
2509 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2510 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2511 t |= VT_UNSIGNED;
2512 std_op:
2513 /* XXX: currently, some unsigned operations are explicit, so
2514 we modify them here */
2515 if (t & VT_UNSIGNED) {
2516 if (op == TOK_SAR)
2517 op = TOK_SHR;
2518 else if (op == '/')
2519 op = TOK_UDIV;
2520 else if (op == '%')
2521 op = TOK_UMOD;
2522 else if (op == TOK_LT)
2523 op = TOK_ULT;
2524 else if (op == TOK_GT)
2525 op = TOK_UGT;
2526 else if (op == TOK_LE)
2527 op = TOK_ULE;
2528 else if (op == TOK_GE)
2529 op = TOK_UGE;
2531 vswap();
2532 type1.t = t;
2533 type1.ref = NULL;
2534 gen_cast(&type1);
2535 vswap();
2536 /* special case for shifts and long long: we keep the shift as
2537 an integer */
2538 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2539 type1.t = VT_INT;
2540 gen_cast(&type1);
2541 if (is_float(t))
2542 gen_opif(op);
2543 else
2544 gen_opic(op);
2545 if (op >= TOK_ULT && op <= TOK_GT) {
2546 /* relational op: the result is an int */
2547 vtop->type.t = VT_INT;
2548 } else {
2549 vtop->type.t = t;
2552 // Make sure that we have converted to an rvalue:
2553 if (vtop->r & VT_LVAL)
2554 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2557 #ifndef TCC_TARGET_ARM
2558 /* generic itof for unsigned long long case */
2559 static void gen_cvt_itof1(int t)
2561 #ifdef TCC_TARGET_ARM64
2562 gen_cvt_itof(t);
2563 #else
2564 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2565 (VT_LLONG | VT_UNSIGNED)) {
2567 if (t == VT_FLOAT)
2568 vpush_global_sym(&func_old_type, TOK___floatundisf);
2569 #if LDOUBLE_SIZE != 8
2570 else if (t == VT_LDOUBLE)
2571 vpush_global_sym(&func_old_type, TOK___floatundixf);
2572 #endif
2573 else
2574 vpush_global_sym(&func_old_type, TOK___floatundidf);
2575 vrott(2);
2576 gfunc_call(1);
2577 vpushi(0);
2578 vtop->r = reg_fret(t);
2579 } else {
2580 gen_cvt_itof(t);
2582 #endif
2584 #endif
2586 /* generic ftoi for unsigned long long case */
2587 static void gen_cvt_ftoi1(int t)
2589 #ifdef TCC_TARGET_ARM64
2590 gen_cvt_ftoi(t);
2591 #else
2592 int st;
2594 if (t == (VT_LLONG | VT_UNSIGNED)) {
2595 /* not handled natively */
2596 st = vtop->type.t & VT_BTYPE;
2597 if (st == VT_FLOAT)
2598 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2599 #if LDOUBLE_SIZE != 8
2600 else if (st == VT_LDOUBLE)
2601 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2602 #endif
2603 else
2604 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2605 vrott(2);
2606 gfunc_call(1);
2607 vpushi(0);
2608 vtop->r = REG_IRET;
2609 vtop->r2 = REG_LRET;
2610 } else {
2611 gen_cvt_ftoi(t);
2613 #endif
2616 /* force char or short cast */
2617 static void force_charshort_cast(int t)
2619 int bits, dbt;
2621 /* cannot cast static initializers */
2622 if (STATIC_DATA_WANTED)
2623 return;
2625 dbt = t & VT_BTYPE;
2626 /* XXX: add optimization if lvalue : just change type and offset */
2627 if (dbt == VT_BYTE)
2628 bits = 8;
2629 else
2630 bits = 16;
2631 if (t & VT_UNSIGNED) {
2632 vpushi((1 << bits) - 1);
2633 gen_op('&');
2634 } else {
2635 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2636 bits = 64 - bits;
2637 else
2638 bits = 32 - bits;
2639 vpushi(bits);
2640 gen_op(TOK_SHL);
2641 /* result must be signed or the SAR is converted to an SHL
2642 This was not the case when "t" was a signed short
2643 and the last value on the stack was an unsigned int */
2644 vtop->type.t &= ~VT_UNSIGNED;
2645 vpushi(bits);
2646 gen_op(TOK_SAR);
2650 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2651 static void gen_cast_s(int t)
2653 CType type;
2654 type.t = t;
2655 type.ref = NULL;
2656 gen_cast(&type);
2659 static void gen_cast(CType *type)
2661 int sbt, dbt, sf, df, c, p;
2663 /* special delayed cast for char/short */
2664 /* XXX: in some cases (multiple cascaded casts), it may still
2665 be incorrect */
2666 if (vtop->r & VT_MUSTCAST) {
2667 vtop->r &= ~VT_MUSTCAST;
2668 force_charshort_cast(vtop->type.t);
2671 /* bitfields first get cast to ints */
2672 if (vtop->type.t & VT_BITFIELD) {
2673 gv(RC_INT);
2676 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2677 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2679 if (sbt != dbt) {
2680 sf = is_float(sbt);
2681 df = is_float(dbt);
2682 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2683 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2684 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2685 c &= dbt != VT_LDOUBLE;
2686 #endif
2687 if (c) {
2688 /* constant case: we can do it now */
2689 /* XXX: in ISOC, cannot do it if error in convert */
2690 if (sbt == VT_FLOAT)
2691 vtop->c.ld = vtop->c.f;
2692 else if (sbt == VT_DOUBLE)
2693 vtop->c.ld = vtop->c.d;
2695 if (df) {
2696 if ((sbt & VT_BTYPE) == VT_LLONG) {
2697 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2698 vtop->c.ld = vtop->c.i;
2699 else
2700 vtop->c.ld = -(long double)-vtop->c.i;
2701 } else if(!sf) {
2702 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2703 vtop->c.ld = (uint32_t)vtop->c.i;
2704 else
2705 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2708 if (dbt == VT_FLOAT)
2709 vtop->c.f = (float)vtop->c.ld;
2710 else if (dbt == VT_DOUBLE)
2711 vtop->c.d = (double)vtop->c.ld;
2712 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2713 vtop->c.i = vtop->c.ld;
2714 } else if (sf && dbt == VT_BOOL) {
2715 vtop->c.i = (vtop->c.ld != 0);
2716 } else {
2717 if(sf)
2718 vtop->c.i = vtop->c.ld;
2719 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2721 else if (sbt & VT_UNSIGNED)
2722 vtop->c.i = (uint32_t)vtop->c.i;
2723 #if PTR_SIZE == 8
2724 else if (sbt == VT_PTR)
2726 #endif
2727 else if (sbt != VT_LLONG)
2728 vtop->c.i = ((uint32_t)vtop->c.i |
2729 -(vtop->c.i & 0x80000000));
2731 if (dbt == (VT_LLONG|VT_UNSIGNED))
2733 else if (dbt == VT_BOOL)
2734 vtop->c.i = (vtop->c.i != 0);
2735 #if PTR_SIZE == 8
2736 else if (dbt == VT_PTR)
2738 #endif
2739 else if (dbt != VT_LLONG) {
2740 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2741 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2742 0xffffffff);
2743 vtop->c.i &= m;
2744 if (!(dbt & VT_UNSIGNED))
2745 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2748 } else if (p && dbt == VT_BOOL) {
2749 vtop->r = VT_CONST;
2750 vtop->c.i = 1;
2751 } else {
2752 /* non constant case: generate code */
2753 if (sf && df) {
2754 /* convert from fp to fp */
2755 gen_cvt_ftof(dbt);
2756 } else if (df) {
2757 /* convert int to fp */
2758 gen_cvt_itof1(dbt);
2759 } else if (sf) {
2760 /* convert fp to int */
2761 if (dbt == VT_BOOL) {
2762 vpushi(0);
2763 gen_op(TOK_NE);
2764 } else {
2765 /* we handle char/short/etc... with generic code */
2766 if (dbt != (VT_INT | VT_UNSIGNED) &&
2767 dbt != (VT_LLONG | VT_UNSIGNED) &&
2768 dbt != VT_LLONG)
2769 dbt = VT_INT;
2770 gen_cvt_ftoi1(dbt);
2771 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2772 /* additional cast for char/short... */
2773 vtop->type.t = dbt;
2774 gen_cast(type);
2777 #if PTR_SIZE == 4
2778 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2779 if ((sbt & VT_BTYPE) != VT_LLONG) {
2780 /* scalar to long long */
2781 /* machine independent conversion */
2782 gv(RC_INT);
2783 /* generate high word */
2784 if (sbt == (VT_INT | VT_UNSIGNED)) {
2785 vpushi(0);
2786 gv(RC_INT);
2787 } else {
2788 if (sbt == VT_PTR) {
2789 /* cast from pointer to int before we apply
2790 shift operation, which pointers don't support*/
2791 gen_cast_s(VT_INT);
2793 gv_dup();
2794 vpushi(31);
2795 gen_op(TOK_SAR);
2797 /* patch second register */
2798 vtop[-1].r2 = vtop->r;
2799 vpop();
2801 #else
2802 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2803 (dbt & VT_BTYPE) == VT_PTR ||
2804 (dbt & VT_BTYPE) == VT_FUNC) {
2805 if ((sbt & VT_BTYPE) != VT_LLONG &&
2806 (sbt & VT_BTYPE) != VT_PTR &&
2807 (sbt & VT_BTYPE) != VT_FUNC) {
2808 /* need to convert from 32bit to 64bit */
2809 gv(RC_INT);
2810 if (sbt != (VT_INT | VT_UNSIGNED)) {
2811 #if defined(TCC_TARGET_ARM64)
2812 gen_cvt_sxtw();
2813 #elif defined(TCC_TARGET_X86_64)
2814 int r = gv(RC_INT);
2815 /* x86_64 specific: movslq */
2816 o(0x6348);
2817 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2818 #else
2819 #error
2820 #endif
2823 #endif
2824 } else if (dbt == VT_BOOL) {
2825 /* scalar to bool */
2826 vpushi(0);
2827 gen_op(TOK_NE);
2828 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2829 (dbt & VT_BTYPE) == VT_SHORT) {
2830 if (sbt == VT_PTR) {
2831 vtop->type.t = VT_INT;
2832 tcc_warning("nonportable conversion from pointer to char/short");
2834 force_charshort_cast(dbt);
2835 } else if ((dbt & VT_BTYPE) == VT_INT) {
2836 /* scalar to int */
2837 if ((sbt & VT_BTYPE) == VT_LLONG) {
2838 #if PTR_SIZE == 4
2839 /* from long long: just take low order word */
2840 lexpand();
2841 vpop();
2842 #else
2843 vpushi(0xffffffff);
2844 vtop->type.t |= VT_UNSIGNED;
2845 gen_op('&');
2846 #endif
2848 /* if lvalue and single word type, nothing to do because
2849 the lvalue already contains the real type size (see
2850 VT_LVAL_xxx constants) */
2853 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2854 /* if we are casting between pointer types,
2855 we must update the VT_LVAL_xxx size */
2856 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2857 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2859 vtop->type = *type;
2860 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2863 /* return type size as known at compile time. Put alignment at 'a' */
2864 ST_FUNC int type_size(CType *type, int *a)
2866 Sym *s;
2867 int bt;
2869 bt = type->t & VT_BTYPE;
2870 if (bt == VT_STRUCT) {
2871 /* struct/union */
2872 s = type->ref;
2873 *a = s->r;
2874 return s->c;
2875 } else if (bt == VT_PTR) {
2876 if (type->t & VT_ARRAY) {
2877 int ts;
2879 s = type->ref;
2880 ts = type_size(&s->type, a);
2882 if (ts < 0 && s->c < 0)
2883 ts = -ts;
2885 return ts * s->c;
2886 } else {
2887 *a = PTR_SIZE;
2888 return PTR_SIZE;
2890 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2891 return -1; /* incomplete enum */
2892 } else if (bt == VT_LDOUBLE) {
2893 *a = LDOUBLE_ALIGN;
2894 return LDOUBLE_SIZE;
2895 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2896 #ifdef TCC_TARGET_I386
2897 #ifdef TCC_TARGET_PE
2898 *a = 8;
2899 #else
2900 *a = 4;
2901 #endif
2902 #elif defined(TCC_TARGET_ARM)
2903 #ifdef TCC_ARM_EABI
2904 *a = 8;
2905 #else
2906 *a = 4;
2907 #endif
2908 #else
2909 *a = 8;
2910 #endif
2911 return 8;
2912 } else if (bt == VT_INT || bt == VT_FLOAT) {
2913 *a = 4;
2914 return 4;
2915 } else if (bt == VT_SHORT) {
2916 *a = 2;
2917 return 2;
2918 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2919 *a = 8;
2920 return 16;
2921 } else {
2922 /* char, void, function, _Bool */
2923 *a = 1;
2924 return 1;
2928 /* push type size as known at runtime time on top of value stack. Put
2929 alignment at 'a' */
2930 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2932 if (type->t & VT_VLA) {
2933 type_size(&type->ref->type, a);
2934 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2935 } else {
2936 vpushi(type_size(type, a));
2940 static void vla_sp_restore(void) {
2941 if (vlas_in_scope) {
2942 gen_vla_sp_restore(vla_sp_loc);
2946 static void vla_sp_restore_root(void) {
2947 if (vlas_in_scope) {
2948 gen_vla_sp_restore(vla_sp_root_loc);
2952 /* return the pointed type of t */
2953 static inline CType *pointed_type(CType *type)
2955 return &type->ref->type;
2958 /* modify type so that its it is a pointer to type. */
2959 ST_FUNC void mk_pointer(CType *type)
2961 Sym *s;
2962 s = sym_push(SYM_FIELD, type, 0, -1);
2963 type->t = VT_PTR | (type->t & VT_STORAGE);
2964 type->ref = s;
2967 /* compare function types. OLD functions match any new functions */
2968 static int is_compatible_func(CType *type1, CType *type2)
2970 Sym *s1, *s2;
2972 s1 = type1->ref;
2973 s2 = type2->ref;
2974 if (s1->f.func_call != s2->f.func_call)
2975 return 0;
2976 if (s1->f.func_type != s2->f.func_type
2977 && s1->f.func_type != FUNC_OLD
2978 && s2->f.func_type != FUNC_OLD)
2979 return 0;
2980 /* we should check the function return type for FUNC_OLD too
2981 but that causes problems with the internally used support
2982 functions such as TOK_memmove */
2983 if (s1->f.func_type == FUNC_OLD && !s1->next)
2984 return 1;
2985 if (s2->f.func_type == FUNC_OLD && !s2->next)
2986 return 1;
2987 for (;;) {
2988 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2989 return 0;
2990 s1 = s1->next;
2991 s2 = s2->next;
2992 if (!s1)
2993 return !s2;
2994 if (!s2)
2995 return 0;
2999 /* return true if type1 and type2 are the same. If unqualified is
3000 true, qualifiers on the types are ignored.
3002 static int compare_types(CType *type1, CType *type2, int unqualified)
3004 int bt1, t1, t2;
3006 t1 = type1->t & VT_TYPE;
3007 t2 = type2->t & VT_TYPE;
3008 if (unqualified) {
3009 /* strip qualifiers before comparing */
3010 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3011 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3014 /* Default Vs explicit signedness only matters for char */
3015 if ((t1 & VT_BTYPE) != VT_BYTE) {
3016 t1 &= ~VT_DEFSIGN;
3017 t2 &= ~VT_DEFSIGN;
3019 /* XXX: bitfields ? */
3020 if (t1 != t2)
3021 return 0;
3023 if ((t1 & VT_ARRAY)
3024 && !(type1->ref->c < 0
3025 || type2->ref->c < 0
3026 || type1->ref->c == type2->ref->c))
3027 return 0;
3029 /* test more complicated cases */
3030 bt1 = t1 & VT_BTYPE;
3031 if (bt1 == VT_PTR) {
3032 type1 = pointed_type(type1);
3033 type2 = pointed_type(type2);
3034 return is_compatible_types(type1, type2);
3035 } else if (bt1 == VT_STRUCT) {
3036 return (type1->ref == type2->ref);
3037 } else if (bt1 == VT_FUNC) {
3038 return is_compatible_func(type1, type2);
3039 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3040 return type1->ref == type2->ref;
3041 } else {
3042 return 1;
3046 /* return true if type1 and type2 are exactly the same (including
3047 qualifiers).
3049 static int is_compatible_types(CType *type1, CType *type2)
3051 return compare_types(type1,type2,0);
3054 /* return true if type1 and type2 are the same (ignoring qualifiers).
3056 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3058 return compare_types(type1,type2,1);
3061 /* print a type. If 'varstr' is not NULL, then the variable is also
3062 printed in the type */
3063 /* XXX: union */
3064 /* XXX: add array and function pointers */
3065 static void type_to_str(char *buf, int buf_size,
3066 CType *type, const char *varstr)
3068 int bt, v, t;
3069 Sym *s, *sa;
3070 char buf1[256];
3071 const char *tstr;
3073 t = type->t;
3074 bt = t & VT_BTYPE;
3075 buf[0] = '\0';
3077 if (t & VT_EXTERN)
3078 pstrcat(buf, buf_size, "extern ");
3079 if (t & VT_STATIC)
3080 pstrcat(buf, buf_size, "static ");
3081 if (t & VT_TYPEDEF)
3082 pstrcat(buf, buf_size, "typedef ");
3083 if (t & VT_INLINE)
3084 pstrcat(buf, buf_size, "inline ");
3085 if (t & VT_VOLATILE)
3086 pstrcat(buf, buf_size, "volatile ");
3087 if (t & VT_CONSTANT)
3088 pstrcat(buf, buf_size, "const ");
3090 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3091 || ((t & VT_UNSIGNED)
3092 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3093 && !IS_ENUM(t)
3095 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3097 buf_size -= strlen(buf);
3098 buf += strlen(buf);
3100 switch(bt) {
3101 case VT_VOID:
3102 tstr = "void";
3103 goto add_tstr;
3104 case VT_BOOL:
3105 tstr = "_Bool";
3106 goto add_tstr;
3107 case VT_BYTE:
3108 tstr = "char";
3109 goto add_tstr;
3110 case VT_SHORT:
3111 tstr = "short";
3112 goto add_tstr;
3113 case VT_INT:
3114 tstr = "int";
3115 goto maybe_long;
3116 case VT_LLONG:
3117 tstr = "long long";
3118 maybe_long:
3119 if (t & VT_LONG)
3120 tstr = "long";
3121 if (!IS_ENUM(t))
3122 goto add_tstr;
3123 tstr = "enum ";
3124 goto tstruct;
3125 case VT_FLOAT:
3126 tstr = "float";
3127 goto add_tstr;
3128 case VT_DOUBLE:
3129 tstr = "double";
3130 goto add_tstr;
3131 case VT_LDOUBLE:
3132 tstr = "long double";
3133 add_tstr:
3134 pstrcat(buf, buf_size, tstr);
3135 break;
3136 case VT_STRUCT:
3137 tstr = "struct ";
3138 if (IS_UNION(t))
3139 tstr = "union ";
3140 tstruct:
3141 pstrcat(buf, buf_size, tstr);
3142 v = type->ref->v & ~SYM_STRUCT;
3143 if (v >= SYM_FIRST_ANOM)
3144 pstrcat(buf, buf_size, "<anonymous>");
3145 else
3146 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3147 break;
3148 case VT_FUNC:
3149 s = type->ref;
3150 buf1[0]=0;
3151 if (varstr && '*' == *varstr) {
3152 pstrcat(buf1, sizeof(buf1), "(");
3153 pstrcat(buf1, sizeof(buf1), varstr);
3154 pstrcat(buf1, sizeof(buf1), ")");
3156 pstrcat(buf1, buf_size, "(");
3157 sa = s->next;
3158 while (sa != NULL) {
3159 char buf2[256];
3160 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3161 pstrcat(buf1, sizeof(buf1), buf2);
3162 sa = sa->next;
3163 if (sa)
3164 pstrcat(buf1, sizeof(buf1), ", ");
3166 if (s->f.func_type == FUNC_ELLIPSIS)
3167 pstrcat(buf1, sizeof(buf1), ", ...");
3168 pstrcat(buf1, sizeof(buf1), ")");
3169 type_to_str(buf, buf_size, &s->type, buf1);
3170 goto no_var;
3171 case VT_PTR:
3172 s = type->ref;
3173 if (t & VT_ARRAY) {
3174 if (varstr && '*' == *varstr)
3175 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3176 else
3177 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3178 type_to_str(buf, buf_size, &s->type, buf1);
3179 goto no_var;
3181 pstrcpy(buf1, sizeof(buf1), "*");
3182 if (t & VT_CONSTANT)
3183 pstrcat(buf1, buf_size, "const ");
3184 if (t & VT_VOLATILE)
3185 pstrcat(buf1, buf_size, "volatile ");
3186 if (varstr)
3187 pstrcat(buf1, sizeof(buf1), varstr);
3188 type_to_str(buf, buf_size, &s->type, buf1);
3189 goto no_var;
3191 if (varstr) {
3192 pstrcat(buf, buf_size, " ");
3193 pstrcat(buf, buf_size, varstr);
3195 no_var: ;
3198 /* verify type compatibility to store vtop in 'dt' type, and generate
3199 casts if needed. */
3200 static void gen_assign_cast(CType *dt)
3202 CType *st, *type1, *type2;
3203 char buf1[256], buf2[256];
3204 int dbt, sbt, qualwarn, lvl;
3206 st = &vtop->type; /* source type */
3207 dbt = dt->t & VT_BTYPE;
3208 sbt = st->t & VT_BTYPE;
3209 if (sbt == VT_VOID || dbt == VT_VOID) {
3210 if (sbt == VT_VOID && dbt == VT_VOID)
3211 ; /* It is Ok if both are void */
3212 else
3213 tcc_error("cannot cast from/to void");
3215 if (dt->t & VT_CONSTANT)
3216 tcc_warning("assignment of read-only location");
3217 switch(dbt) {
3218 case VT_PTR:
3219 /* special cases for pointers */
3220 /* '0' can also be a pointer */
3221 if (is_null_pointer(vtop))
3222 break;
3223 /* accept implicit pointer to integer cast with warning */
3224 if (is_integer_btype(sbt)) {
3225 tcc_warning("assignment makes pointer from integer without a cast");
3226 break;
3228 type1 = pointed_type(dt);
3229 if (sbt == VT_PTR)
3230 type2 = pointed_type(st);
3231 else if (sbt == VT_FUNC)
3232 type2 = st; /* a function is implicitly a function pointer */
3233 else
3234 goto error;
3235 if (is_compatible_types(type1, type2))
3236 break;
3237 for (qualwarn = lvl = 0;; ++lvl) {
3238 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3239 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3240 qualwarn = 1;
3241 dbt = type1->t & (VT_BTYPE|VT_LONG);
3242 sbt = type2->t & (VT_BTYPE|VT_LONG);
3243 if (dbt != VT_PTR || sbt != VT_PTR)
3244 break;
3245 type1 = pointed_type(type1);
3246 type2 = pointed_type(type2);
3248 if (!is_compatible_unqualified_types(type1, type2)) {
3249 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3250 /* void * can match anything */
3251 } else if (dbt == sbt
3252 && is_integer_btype(sbt & VT_BTYPE)
3253 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3254 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3255 /* Like GCC don't warn by default for merely changes
3256 in pointer target signedness. Do warn for different
3257 base types, though, in particular for unsigned enums
3258 and signed int targets. */
3259 } else {
3260 tcc_warning("assignment from incompatible pointer type");
3261 break;
3264 if (qualwarn)
3265 tcc_warning("assignment discards qualifiers from pointer target type");
3266 break;
3267 case VT_BYTE:
3268 case VT_SHORT:
3269 case VT_INT:
3270 case VT_LLONG:
3271 if (sbt == VT_PTR || sbt == VT_FUNC) {
3272 tcc_warning("assignment makes integer from pointer without a cast");
3273 } else if (sbt == VT_STRUCT) {
3274 goto case_VT_STRUCT;
3276 /* XXX: more tests */
3277 break;
3278 case VT_STRUCT:
3279 case_VT_STRUCT:
3280 if (!is_compatible_unqualified_types(dt, st)) {
3281 error:
3282 type_to_str(buf1, sizeof(buf1), st, NULL);
3283 type_to_str(buf2, sizeof(buf2), dt, NULL);
3284 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3286 break;
3288 gen_cast(dt);
3291 /* store vtop in lvalue pushed on stack */
3292 ST_FUNC void vstore(void)
3294 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3296 ft = vtop[-1].type.t;
3297 sbt = vtop->type.t & VT_BTYPE;
3298 dbt = ft & VT_BTYPE;
3299 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3300 (sbt == VT_INT && dbt == VT_SHORT))
3301 && !(vtop->type.t & VT_BITFIELD)) {
3302 /* optimize char/short casts */
3303 delayed_cast = VT_MUSTCAST;
3304 vtop->type.t = ft & VT_TYPE;
3305 /* XXX: factorize */
3306 if (ft & VT_CONSTANT)
3307 tcc_warning("assignment of read-only location");
3308 } else {
3309 delayed_cast = 0;
3310 if (!(ft & VT_BITFIELD))
3311 gen_assign_cast(&vtop[-1].type);
3314 if (sbt == VT_STRUCT) {
3315 /* if structure, only generate pointer */
3316 /* structure assignment : generate memcpy */
3317 /* XXX: optimize if small size */
3318 size = type_size(&vtop->type, &align);
3320 /* destination */
3321 vswap();
3322 vtop->type.t = VT_PTR;
3323 gaddrof();
3325 /* address of memcpy() */
3326 #ifdef TCC_ARM_EABI
3327 if(!(align & 7))
3328 vpush_global_sym(&func_old_type, TOK_memcpy8);
3329 else if(!(align & 3))
3330 vpush_global_sym(&func_old_type, TOK_memcpy4);
3331 else
3332 #endif
3333 /* Use memmove, rather than memcpy, as dest and src may be same: */
3334 vpush_global_sym(&func_old_type, TOK_memmove);
3336 vswap();
3337 /* source */
3338 vpushv(vtop - 2);
3339 vtop->type.t = VT_PTR;
3340 gaddrof();
3341 /* type size */
3342 vpushi(size);
3343 gfunc_call(3);
3345 /* leave source on stack */
3346 } else if (ft & VT_BITFIELD) {
3347 /* bitfield store handling */
3349 /* save lvalue as expression result (example: s.b = s.a = n;) */
3350 vdup(), vtop[-1] = vtop[-2];
3352 bit_pos = BIT_POS(ft);
3353 bit_size = BIT_SIZE(ft);
3354 /* remove bit field info to avoid loops */
3355 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3357 if ((ft & VT_BTYPE) == VT_BOOL) {
3358 gen_cast(&vtop[-1].type);
3359 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3362 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3363 if (r == VT_STRUCT) {
3364 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3365 store_packed_bf(bit_pos, bit_size);
3366 } else {
3367 unsigned long long mask = (1ULL << bit_size) - 1;
3368 if ((ft & VT_BTYPE) != VT_BOOL) {
3369 /* mask source */
3370 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3371 vpushll(mask);
3372 else
3373 vpushi((unsigned)mask);
3374 gen_op('&');
3376 /* shift source */
3377 vpushi(bit_pos);
3378 gen_op(TOK_SHL);
3379 vswap();
3380 /* duplicate destination */
3381 vdup();
3382 vrott(3);
3383 /* load destination, mask and or with source */
3384 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3385 vpushll(~(mask << bit_pos));
3386 else
3387 vpushi(~((unsigned)mask << bit_pos));
3388 gen_op('&');
3389 gen_op('|');
3390 /* store result */
3391 vstore();
3392 /* ... and discard */
3393 vpop();
3395 } else if (dbt == VT_VOID) {
3396 --vtop;
3397 } else {
3398 #ifdef CONFIG_TCC_BCHECK
3399 /* bound check case */
3400 if (vtop[-1].r & VT_MUSTBOUND) {
3401 vswap();
3402 gbound();
3403 vswap();
3405 #endif
3406 rc = RC_INT;
3407 if (is_float(ft)) {
3408 rc = RC_FLOAT;
3409 #ifdef TCC_TARGET_X86_64
3410 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3411 rc = RC_ST0;
3412 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3413 rc = RC_FRET;
3415 #endif
3417 r = gv(rc); /* generate value */
3418 /* if lvalue was saved on stack, must read it */
3419 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3420 SValue sv;
3421 t = get_reg(RC_INT);
3422 #if PTR_SIZE == 8
3423 sv.type.t = VT_PTR;
3424 #else
3425 sv.type.t = VT_INT;
3426 #endif
3427 sv.r = VT_LOCAL | VT_LVAL;
3428 sv.c.i = vtop[-1].c.i;
3429 load(t, &sv);
3430 vtop[-1].r = t | VT_LVAL;
3432 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3433 #if PTR_SIZE == 8
3434 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3435 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3436 #else
3437 if ((ft & VT_BTYPE) == VT_LLONG) {
3438 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3439 #endif
3440 vtop[-1].type.t = load_type;
3441 store(r, vtop - 1);
3442 vswap();
3443 /* convert to int to increment easily */
3444 vtop->type.t = addr_type;
3445 gaddrof();
3446 vpushi(load_size);
3447 gen_op('+');
3448 vtop->r |= VT_LVAL;
3449 vswap();
3450 vtop[-1].type.t = load_type;
3451 /* XXX: it works because r2 is spilled last ! */
3452 store(vtop->r2, vtop - 1);
3453 } else {
3454 store(r, vtop - 1);
3457 vswap();
3458 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3459 vtop->r |= delayed_cast;
3463 /* post defines POST/PRE add. c is the token ++ or -- */
3464 ST_FUNC void inc(int post, int c)
3466 test_lvalue();
3467 vdup(); /* save lvalue */
3468 if (post) {
3469 gv_dup(); /* duplicate value */
3470 vrotb(3);
3471 vrotb(3);
3473 /* add constant */
3474 vpushi(c - TOK_MID);
3475 gen_op('+');
3476 vstore(); /* store value */
3477 if (post)
3478 vpop(); /* if post op, return saved value */
3481 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3483 /* read the string */
3484 if (tok != TOK_STR)
3485 expect(msg);
3486 cstr_new(astr);
3487 while (tok == TOK_STR) {
3488 /* XXX: add \0 handling too ? */
3489 cstr_cat(astr, tokc.str.data, -1);
3490 next();
3492 cstr_ccat(astr, '\0');
3495 /* If I is >= 1 and a power of two, returns log2(i)+1.
3496 If I is 0 returns 0. */
3497 static int exact_log2p1(int i)
3499 int ret;
3500 if (!i)
3501 return 0;
3502 for (ret = 1; i >= 1 << 8; ret += 8)
3503 i >>= 8;
3504 if (i >= 1 << 4)
3505 ret += 4, i >>= 4;
3506 if (i >= 1 << 2)
3507 ret += 2, i >>= 2;
3508 if (i >= 1 << 1)
3509 ret++;
3510 return ret;
3513 /* Parse __attribute__((...)) GNUC extension. */
3514 static void parse_attribute(AttributeDef *ad)
3516 int t, n;
3517 CString astr;
3519 redo:
3520 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3521 return;
3522 next();
3523 skip('(');
3524 skip('(');
3525 while (tok != ')') {
3526 if (tok < TOK_IDENT)
3527 expect("attribute name");
3528 t = tok;
3529 next();
3530 switch(t) {
3531 case TOK_CLEANUP1:
3532 case TOK_CLEANUP2:
3534 Sym *s;
3536 skip('(');
3537 s = sym_find(tok);
3538 if (!s) {
3539 tcc_warning("implicit declaration of function '%s'",
3540 get_tok_str(tok, &tokc));
3541 s = external_global_sym(tok, &func_old_type);
3543 ad->cleanup_func = s;
3544 next();
3545 skip(')');
3546 break;
3548 case TOK_SECTION1:
3549 case TOK_SECTION2:
3550 skip('(');
3551 parse_mult_str(&astr, "section name");
3552 ad->section = find_section(tcc_state, (char *)astr.data);
3553 skip(')');
3554 cstr_free(&astr);
3555 break;
3556 case TOK_ALIAS1:
3557 case TOK_ALIAS2:
3558 skip('(');
3559 parse_mult_str(&astr, "alias(\"target\")");
3560 ad->alias_target = /* save string as token, for later */
3561 tok_alloc((char*)astr.data, astr.size-1)->tok;
3562 skip(')');
3563 cstr_free(&astr);
3564 break;
3565 case TOK_VISIBILITY1:
3566 case TOK_VISIBILITY2:
3567 skip('(');
3568 parse_mult_str(&astr,
3569 "visibility(\"default|hidden|internal|protected\")");
3570 if (!strcmp (astr.data, "default"))
3571 ad->a.visibility = STV_DEFAULT;
3572 else if (!strcmp (astr.data, "hidden"))
3573 ad->a.visibility = STV_HIDDEN;
3574 else if (!strcmp (astr.data, "internal"))
3575 ad->a.visibility = STV_INTERNAL;
3576 else if (!strcmp (astr.data, "protected"))
3577 ad->a.visibility = STV_PROTECTED;
3578 else
3579 expect("visibility(\"default|hidden|internal|protected\")");
3580 skip(')');
3581 cstr_free(&astr);
3582 break;
3583 case TOK_ALIGNED1:
3584 case TOK_ALIGNED2:
3585 if (tok == '(') {
3586 next();
3587 n = expr_const();
3588 if (n <= 0 || (n & (n - 1)) != 0)
3589 tcc_error("alignment must be a positive power of two");
3590 skip(')');
3591 } else {
3592 n = MAX_ALIGN;
3594 ad->a.aligned = exact_log2p1(n);
3595 if (n != 1 << (ad->a.aligned - 1))
3596 tcc_error("alignment of %d is larger than implemented", n);
3597 break;
3598 case TOK_PACKED1:
3599 case TOK_PACKED2:
3600 ad->a.packed = 1;
3601 break;
3602 case TOK_WEAK1:
3603 case TOK_WEAK2:
3604 ad->a.weak = 1;
3605 break;
3606 case TOK_UNUSED1:
3607 case TOK_UNUSED2:
3608 /* currently, no need to handle it because tcc does not
3609 track unused objects */
3610 break;
3611 case TOK_NORETURN1:
3612 case TOK_NORETURN2:
3613 ad->f.func_noreturn = 1;
3614 break;
3615 case TOK_CDECL1:
3616 case TOK_CDECL2:
3617 case TOK_CDECL3:
3618 ad->f.func_call = FUNC_CDECL;
3619 break;
3620 case TOK_STDCALL1:
3621 case TOK_STDCALL2:
3622 case TOK_STDCALL3:
3623 ad->f.func_call = FUNC_STDCALL;
3624 break;
3625 #ifdef TCC_TARGET_I386
3626 case TOK_REGPARM1:
3627 case TOK_REGPARM2:
3628 skip('(');
3629 n = expr_const();
3630 if (n > 3)
3631 n = 3;
3632 else if (n < 0)
3633 n = 0;
3634 if (n > 0)
3635 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3636 skip(')');
3637 break;
3638 case TOK_FASTCALL1:
3639 case TOK_FASTCALL2:
3640 case TOK_FASTCALL3:
3641 ad->f.func_call = FUNC_FASTCALLW;
3642 break;
3643 #endif
3644 case TOK_MODE:
3645 skip('(');
3646 switch(tok) {
3647 case TOK_MODE_DI:
3648 ad->attr_mode = VT_LLONG + 1;
3649 break;
3650 case TOK_MODE_QI:
3651 ad->attr_mode = VT_BYTE + 1;
3652 break;
3653 case TOK_MODE_HI:
3654 ad->attr_mode = VT_SHORT + 1;
3655 break;
3656 case TOK_MODE_SI:
3657 case TOK_MODE_word:
3658 ad->attr_mode = VT_INT + 1;
3659 break;
3660 default:
3661 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3662 break;
3664 next();
3665 skip(')');
3666 break;
3667 case TOK_DLLEXPORT:
3668 ad->a.dllexport = 1;
3669 break;
3670 case TOK_NODECORATE:
3671 ad->a.nodecorate = 1;
3672 break;
3673 case TOK_DLLIMPORT:
3674 ad->a.dllimport = 1;
3675 break;
3676 default:
3677 if (tcc_state->warn_unsupported)
3678 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3679 /* skip parameters */
3680 if (tok == '(') {
3681 int parenthesis = 0;
3682 do {
3683 if (tok == '(')
3684 parenthesis++;
3685 else if (tok == ')')
3686 parenthesis--;
3687 next();
3688 } while (parenthesis && tok != -1);
3690 break;
3692 if (tok != ',')
3693 break;
3694 next();
3696 skip(')');
3697 skip(')');
3698 goto redo;
3701 static Sym * find_field (CType *type, int v, int *cumofs)
3703 Sym *s = type->ref;
3704 v |= SYM_FIELD;
3705 while ((s = s->next) != NULL) {
3706 if ((s->v & SYM_FIELD) &&
3707 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3708 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3709 Sym *ret = find_field (&s->type, v, cumofs);
3710 if (ret) {
3711 *cumofs += s->c;
3712 return ret;
3715 if (s->v == v)
3716 break;
3718 return s;
3721 static void struct_layout(CType *type, AttributeDef *ad)
3723 int size, align, maxalign, offset, c, bit_pos, bit_size;
3724 int packed, a, bt, prevbt, prev_bit_size;
3725 int pcc = !tcc_state->ms_bitfields;
3726 int pragma_pack = *tcc_state->pack_stack_ptr;
3727 Sym *f;
3729 maxalign = 1;
3730 offset = 0;
3731 c = 0;
3732 bit_pos = 0;
3733 prevbt = VT_STRUCT; /* make it never match */
3734 prev_bit_size = 0;
3736 //#define BF_DEBUG
3738 for (f = type->ref->next; f; f = f->next) {
3739 if (f->type.t & VT_BITFIELD)
3740 bit_size = BIT_SIZE(f->type.t);
3741 else
3742 bit_size = -1;
3743 size = type_size(&f->type, &align);
3744 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3745 packed = 0;
3747 if (pcc && bit_size == 0) {
3748 /* in pcc mode, packing does not affect zero-width bitfields */
3750 } else {
3751 /* in pcc mode, attribute packed overrides if set. */
3752 if (pcc && (f->a.packed || ad->a.packed))
3753 align = packed = 1;
3755 /* pragma pack overrides align if lesser and packs bitfields always */
3756 if (pragma_pack) {
3757 packed = 1;
3758 if (pragma_pack < align)
3759 align = pragma_pack;
3760 /* in pcc mode pragma pack also overrides individual align */
3761 if (pcc && pragma_pack < a)
3762 a = 0;
3765 /* some individual align was specified */
3766 if (a)
3767 align = a;
3769 if (type->ref->type.t == VT_UNION) {
3770 if (pcc && bit_size >= 0)
3771 size = (bit_size + 7) >> 3;
3772 offset = 0;
3773 if (size > c)
3774 c = size;
3776 } else if (bit_size < 0) {
3777 if (pcc)
3778 c += (bit_pos + 7) >> 3;
3779 c = (c + align - 1) & -align;
3780 offset = c;
3781 if (size > 0)
3782 c += size;
3783 bit_pos = 0;
3784 prevbt = VT_STRUCT;
3785 prev_bit_size = 0;
3787 } else {
3788 /* A bit-field. Layout is more complicated. There are two
3789 options: PCC (GCC) compatible and MS compatible */
3790 if (pcc) {
3791 /* In PCC layout a bit-field is placed adjacent to the
3792 preceding bit-fields, except if:
3793 - it has zero-width
3794 - an individual alignment was given
3795 - it would overflow its base type container and
3796 there is no packing */
3797 if (bit_size == 0) {
3798 new_field:
3799 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3800 bit_pos = 0;
3801 } else if (f->a.aligned) {
3802 goto new_field;
3803 } else if (!packed) {
3804 int a8 = align * 8;
3805 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3806 if (ofs > size / align)
3807 goto new_field;
3810 /* in pcc mode, long long bitfields have type int if they fit */
3811 if (size == 8 && bit_size <= 32)
3812 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3814 while (bit_pos >= align * 8)
3815 c += align, bit_pos -= align * 8;
3816 offset = c;
3818 /* In PCC layout named bit-fields influence the alignment
3819 of the containing struct using the base types alignment,
3820 except for packed fields (which here have correct align). */
3821 if (f->v & SYM_FIRST_ANOM
3822 // && bit_size // ??? gcc on ARM/rpi does that
3824 align = 1;
3826 } else {
3827 bt = f->type.t & VT_BTYPE;
3828 if ((bit_pos + bit_size > size * 8)
3829 || (bit_size > 0) == (bt != prevbt)
3831 c = (c + align - 1) & -align;
3832 offset = c;
3833 bit_pos = 0;
3834 /* In MS bitfield mode a bit-field run always uses
3835 at least as many bits as the underlying type.
3836 To start a new run it's also required that this
3837 or the last bit-field had non-zero width. */
3838 if (bit_size || prev_bit_size)
3839 c += size;
3841 /* In MS layout the records alignment is normally
3842 influenced by the field, except for a zero-width
3843 field at the start of a run (but by further zero-width
3844 fields it is again). */
3845 if (bit_size == 0 && prevbt != bt)
3846 align = 1;
3847 prevbt = bt;
3848 prev_bit_size = bit_size;
3851 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3852 | (bit_pos << VT_STRUCT_SHIFT);
3853 bit_pos += bit_size;
3855 if (align > maxalign)
3856 maxalign = align;
3858 #ifdef BF_DEBUG
3859 printf("set field %s offset %-2d size %-2d align %-2d",
3860 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3861 if (f->type.t & VT_BITFIELD) {
3862 printf(" pos %-2d bits %-2d",
3863 BIT_POS(f->type.t),
3864 BIT_SIZE(f->type.t)
3867 printf("\n");
3868 #endif
3870 f->c = offset;
3871 f->r = 0;
3874 if (pcc)
3875 c += (bit_pos + 7) >> 3;
3877 /* store size and alignment */
3878 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3879 if (a < maxalign)
3880 a = maxalign;
3881 type->ref->r = a;
3882 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3883 /* can happen if individual align for some member was given. In
3884 this case MSVC ignores maxalign when aligning the size */
3885 a = pragma_pack;
3886 if (a < bt)
3887 a = bt;
3889 c = (c + a - 1) & -a;
3890 type->ref->c = c;
3892 #ifdef BF_DEBUG
3893 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3894 #endif
3896 /* check whether we can access bitfields by their type */
3897 for (f = type->ref->next; f; f = f->next) {
3898 int s, px, cx, c0;
3899 CType t;
3901 if (0 == (f->type.t & VT_BITFIELD))
3902 continue;
3903 f->type.ref = f;
3904 f->auxtype = -1;
3905 bit_size = BIT_SIZE(f->type.t);
3906 if (bit_size == 0)
3907 continue;
3908 bit_pos = BIT_POS(f->type.t);
3909 size = type_size(&f->type, &align);
3910 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3911 continue;
3913 /* try to access the field using a different type */
3914 c0 = -1, s = align = 1;
3915 for (;;) {
3916 px = f->c * 8 + bit_pos;
3917 cx = (px >> 3) & -align;
3918 px = px - (cx << 3);
3919 if (c0 == cx)
3920 break;
3921 s = (px + bit_size + 7) >> 3;
3922 if (s > 4) {
3923 t.t = VT_LLONG;
3924 } else if (s > 2) {
3925 t.t = VT_INT;
3926 } else if (s > 1) {
3927 t.t = VT_SHORT;
3928 } else {
3929 t.t = VT_BYTE;
3931 s = type_size(&t, &align);
3932 c0 = cx;
3935 if (px + bit_size <= s * 8 && cx + s <= c) {
3936 /* update offset and bit position */
3937 f->c = cx;
3938 bit_pos = px;
3939 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3940 | (bit_pos << VT_STRUCT_SHIFT);
3941 if (s != size)
3942 f->auxtype = t.t;
3943 #ifdef BF_DEBUG
3944 printf("FIX field %s offset %-2d size %-2d align %-2d "
3945 "pos %-2d bits %-2d\n",
3946 get_tok_str(f->v & ~SYM_FIELD, NULL),
3947 cx, s, align, px, bit_size);
3948 #endif
3949 } else {
3950 /* fall back to load/store single-byte wise */
3951 f->auxtype = VT_STRUCT;
3952 #ifdef BF_DEBUG
3953 printf("FIX field %s : load byte-wise\n",
3954 get_tok_str(f->v & ~SYM_FIELD, NULL));
3955 #endif
3960 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3961 static void struct_decl(CType *type, int u)
3963 int v, c, size, align, flexible;
3964 int bit_size, bsize, bt;
3965 Sym *s, *ss, **ps;
3966 AttributeDef ad, ad1;
3967 CType type1, btype;
3969 memset(&ad, 0, sizeof ad);
3970 next();
3971 parse_attribute(&ad);
3972 if (tok != '{') {
3973 v = tok;
3974 next();
3975 /* struct already defined ? return it */
3976 if (v < TOK_IDENT)
3977 expect("struct/union/enum name");
3978 s = struct_find(v);
3979 if (s && (s->sym_scope == local_scope || tok != '{')) {
3980 if (u == s->type.t)
3981 goto do_decl;
3982 if (u == VT_ENUM && IS_ENUM(s->type.t))
3983 goto do_decl;
3984 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3986 } else {
3987 v = anon_sym++;
3989 /* Record the original enum/struct/union token. */
3990 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3991 type1.ref = NULL;
3992 /* we put an undefined size for struct/union */
3993 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3994 s->r = 0; /* default alignment is zero as gcc */
3995 do_decl:
3996 type->t = s->type.t;
3997 type->ref = s;
3999 if (tok == '{') {
4000 next();
4001 if (s->c != -1)
4002 tcc_error("struct/union/enum already defined");
4003 s->c = -2;
4004 /* cannot be empty */
4005 /* non empty enums are not allowed */
4006 ps = &s->next;
4007 if (u == VT_ENUM) {
4008 long long ll = 0, pl = 0, nl = 0;
4009 CType t;
4010 t.ref = s;
4011 /* enum symbols have static storage */
4012 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4013 for(;;) {
4014 v = tok;
4015 if (v < TOK_UIDENT)
4016 expect("identifier");
4017 ss = sym_find(v);
4018 if (ss && !local_stack)
4019 tcc_error("redefinition of enumerator '%s'",
4020 get_tok_str(v, NULL));
4021 next();
4022 if (tok == '=') {
4023 next();
4024 ll = expr_const64();
4026 ss = sym_push(v, &t, VT_CONST, 0);
4027 ss->enum_val = ll;
4028 *ps = ss, ps = &ss->next;
4029 if (ll < nl)
4030 nl = ll;
4031 if (ll > pl)
4032 pl = ll;
4033 if (tok != ',')
4034 break;
4035 next();
4036 ll++;
4037 /* NOTE: we accept a trailing comma */
4038 if (tok == '}')
4039 break;
4041 skip('}');
4042 /* set integral type of the enum */
4043 t.t = VT_INT;
4044 if (nl >= 0) {
4045 if (pl != (unsigned)pl)
4046 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4047 t.t |= VT_UNSIGNED;
4048 } else if (pl != (int)pl || nl != (int)nl)
4049 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4050 s->type.t = type->t = t.t | VT_ENUM;
4051 s->c = 0;
4052 /* set type for enum members */
4053 for (ss = s->next; ss; ss = ss->next) {
4054 ll = ss->enum_val;
4055 if (ll == (int)ll) /* default is int if it fits */
4056 continue;
4057 if (t.t & VT_UNSIGNED) {
4058 ss->type.t |= VT_UNSIGNED;
4059 if (ll == (unsigned)ll)
4060 continue;
4062 ss->type.t = (ss->type.t & ~VT_BTYPE)
4063 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4065 } else {
4066 c = 0;
4067 flexible = 0;
4068 while (tok != '}') {
4069 if (!parse_btype(&btype, &ad1)) {
4070 skip(';');
4071 continue;
4073 while (1) {
4074 if (flexible)
4075 tcc_error("flexible array member '%s' not at the end of struct",
4076 get_tok_str(v, NULL));
4077 bit_size = -1;
4078 v = 0;
4079 type1 = btype;
4080 if (tok != ':') {
4081 if (tok != ';')
4082 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4083 if (v == 0) {
4084 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4085 expect("identifier");
4086 else {
4087 int v = btype.ref->v;
4088 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4089 if (tcc_state->ms_extensions == 0)
4090 expect("identifier");
4094 if (type_size(&type1, &align) < 0) {
4095 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4096 flexible = 1;
4097 else
4098 tcc_error("field '%s' has incomplete type",
4099 get_tok_str(v, NULL));
4101 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4102 (type1.t & VT_BTYPE) == VT_VOID ||
4103 (type1.t & VT_STORAGE))
4104 tcc_error("invalid type for '%s'",
4105 get_tok_str(v, NULL));
4107 if (tok == ':') {
4108 next();
4109 bit_size = expr_const();
4110 /* XXX: handle v = 0 case for messages */
4111 if (bit_size < 0)
4112 tcc_error("negative width in bit-field '%s'",
4113 get_tok_str(v, NULL));
4114 if (v && bit_size == 0)
4115 tcc_error("zero width for bit-field '%s'",
4116 get_tok_str(v, NULL));
4117 parse_attribute(&ad1);
4119 size = type_size(&type1, &align);
4120 if (bit_size >= 0) {
4121 bt = type1.t & VT_BTYPE;
4122 if (bt != VT_INT &&
4123 bt != VT_BYTE &&
4124 bt != VT_SHORT &&
4125 bt != VT_BOOL &&
4126 bt != VT_LLONG)
4127 tcc_error("bitfields must have scalar type");
4128 bsize = size * 8;
4129 if (bit_size > bsize) {
4130 tcc_error("width of '%s' exceeds its type",
4131 get_tok_str(v, NULL));
4132 } else if (bit_size == bsize
4133 && !ad.a.packed && !ad1.a.packed) {
4134 /* no need for bit fields */
4136 } else if (bit_size == 64) {
4137 tcc_error("field width 64 not implemented");
4138 } else {
4139 type1.t = (type1.t & ~VT_STRUCT_MASK)
4140 | VT_BITFIELD
4141 | (bit_size << (VT_STRUCT_SHIFT + 6));
4144 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4145 /* Remember we've seen a real field to check
4146 for placement of flexible array member. */
4147 c = 1;
4149 /* If member is a struct or bit-field, enforce
4150 placing into the struct (as anonymous). */
4151 if (v == 0 &&
4152 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4153 bit_size >= 0)) {
4154 v = anon_sym++;
4156 if (v) {
4157 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4158 ss->a = ad1.a;
4159 *ps = ss;
4160 ps = &ss->next;
4162 if (tok == ';' || tok == TOK_EOF)
4163 break;
4164 skip(',');
4166 skip(';');
4168 skip('}');
4169 parse_attribute(&ad);
4170 struct_layout(type, &ad);
4175 static void sym_to_attr(AttributeDef *ad, Sym *s)
4177 merge_symattr(&ad->a, &s->a);
4178 merge_funcattr(&ad->f, &s->f);
4181 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4182 are added to the element type, copied because it could be a typedef. */
4183 static void parse_btype_qualify(CType *type, int qualifiers)
4185 while (type->t & VT_ARRAY) {
4186 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4187 type = &type->ref->type;
4189 type->t |= qualifiers;
4192 /* return 0 if no type declaration. otherwise, return the basic type
4193 and skip it.
4195 static int parse_btype(CType *type, AttributeDef *ad)
4197 int t, u, bt, st, type_found, typespec_found, g, n;
4198 Sym *s;
4199 CType type1;
4201 memset(ad, 0, sizeof(AttributeDef));
4202 type_found = 0;
4203 typespec_found = 0;
4204 t = VT_INT;
4205 bt = st = -1;
4206 type->ref = NULL;
4208 while(1) {
4209 switch(tok) {
4210 case TOK_EXTENSION:
4211 /* currently, we really ignore extension */
4212 next();
4213 continue;
4215 /* basic types */
4216 case TOK_CHAR:
4217 u = VT_BYTE;
4218 basic_type:
4219 next();
4220 basic_type1:
4221 if (u == VT_SHORT || u == VT_LONG) {
4222 if (st != -1 || (bt != -1 && bt != VT_INT))
4223 tmbt: tcc_error("too many basic types");
4224 st = u;
4225 } else {
4226 if (bt != -1 || (st != -1 && u != VT_INT))
4227 goto tmbt;
4228 bt = u;
4230 if (u != VT_INT)
4231 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4232 typespec_found = 1;
4233 break;
4234 case TOK_VOID:
4235 u = VT_VOID;
4236 goto basic_type;
4237 case TOK_SHORT:
4238 u = VT_SHORT;
4239 goto basic_type;
4240 case TOK_INT:
4241 u = VT_INT;
4242 goto basic_type;
4243 case TOK_ALIGNAS:
4244 { int n;
4245 AttributeDef ad1;
4246 next();
4247 skip('(');
4248 memset(&ad1, 0, sizeof(AttributeDef));
4249 if (parse_btype(&type1, &ad1)) {
4250 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4251 if (ad1.a.aligned)
4252 n = 1 << (ad1.a.aligned - 1);
4253 else
4254 type_size(&type1, &n);
4255 } else {
4256 n = expr_const();
4257 if (n <= 0 || (n & (n - 1)) != 0)
4258 tcc_error("alignment must be a positive power of two");
4260 skip(')');
4261 ad->a.aligned = exact_log2p1(n);
4263 continue;
4264 case TOK_LONG:
4265 if ((t & VT_BTYPE) == VT_DOUBLE) {
4266 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4267 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4268 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4269 } else {
4270 u = VT_LONG;
4271 goto basic_type;
4273 next();
4274 break;
4275 #ifdef TCC_TARGET_ARM64
4276 case TOK_UINT128:
4277 /* GCC's __uint128_t appears in some Linux header files. Make it a
4278 synonym for long double to get the size and alignment right. */
4279 u = VT_LDOUBLE;
4280 goto basic_type;
4281 #endif
4282 case TOK_BOOL:
4283 u = VT_BOOL;
4284 goto basic_type;
4285 case TOK_FLOAT:
4286 u = VT_FLOAT;
4287 goto basic_type;
4288 case TOK_DOUBLE:
4289 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4290 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4291 } else {
4292 u = VT_DOUBLE;
4293 goto basic_type;
4295 next();
4296 break;
4297 case TOK_ENUM:
4298 struct_decl(&type1, VT_ENUM);
4299 basic_type2:
4300 u = type1.t;
4301 type->ref = type1.ref;
4302 goto basic_type1;
4303 case TOK_STRUCT:
4304 struct_decl(&type1, VT_STRUCT);
4305 goto basic_type2;
4306 case TOK_UNION:
4307 struct_decl(&type1, VT_UNION);
4308 goto basic_type2;
4310 /* type modifiers */
4311 case TOK_CONST1:
4312 case TOK_CONST2:
4313 case TOK_CONST3:
4314 type->t = t;
4315 parse_btype_qualify(type, VT_CONSTANT);
4316 t = type->t;
4317 next();
4318 break;
4319 case TOK_VOLATILE1:
4320 case TOK_VOLATILE2:
4321 case TOK_VOLATILE3:
4322 type->t = t;
4323 parse_btype_qualify(type, VT_VOLATILE);
4324 t = type->t;
4325 next();
4326 break;
4327 case TOK_SIGNED1:
4328 case TOK_SIGNED2:
4329 case TOK_SIGNED3:
4330 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4331 tcc_error("signed and unsigned modifier");
4332 t |= VT_DEFSIGN;
4333 next();
4334 typespec_found = 1;
4335 break;
4336 case TOK_REGISTER:
4337 case TOK_AUTO:
4338 case TOK_RESTRICT1:
4339 case TOK_RESTRICT2:
4340 case TOK_RESTRICT3:
4341 next();
4342 break;
4343 case TOK_UNSIGNED:
4344 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4345 tcc_error("signed and unsigned modifier");
4346 t |= VT_DEFSIGN | VT_UNSIGNED;
4347 next();
4348 typespec_found = 1;
4349 break;
4351 /* storage */
4352 case TOK_EXTERN:
4353 g = VT_EXTERN;
4354 goto storage;
4355 case TOK_STATIC:
4356 g = VT_STATIC;
4357 goto storage;
4358 case TOK_TYPEDEF:
4359 g = VT_TYPEDEF;
4360 goto storage;
4361 storage:
4362 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4363 tcc_error("multiple storage classes");
4364 t |= g;
4365 next();
4366 break;
4367 case TOK_INLINE1:
4368 case TOK_INLINE2:
4369 case TOK_INLINE3:
4370 t |= VT_INLINE;
4371 next();
4372 break;
4373 case TOK_NORETURN3:
4374 /* currently, no need to handle it because tcc does not
4375 track unused objects */
4376 next();
4377 break;
4378 /* GNUC attribute */
4379 case TOK_ATTRIBUTE1:
4380 case TOK_ATTRIBUTE2:
4381 parse_attribute(ad);
4382 if (ad->attr_mode) {
4383 u = ad->attr_mode -1;
4384 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4386 continue;
4387 /* GNUC typeof */
4388 case TOK_TYPEOF1:
4389 case TOK_TYPEOF2:
4390 case TOK_TYPEOF3:
4391 next();
4392 parse_expr_type(&type1);
4393 /* remove all storage modifiers except typedef */
4394 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4395 if (type1.ref)
4396 sym_to_attr(ad, type1.ref);
4397 goto basic_type2;
4398 default:
4399 if (typespec_found)
4400 goto the_end;
4401 s = sym_find(tok);
4402 if (!s || !(s->type.t & VT_TYPEDEF))
4403 goto the_end;
4405 n = tok, next();
4406 if (tok == ':' && !in_generic) {
4407 /* ignore if it's a label */
4408 unget_tok(n);
4409 goto the_end;
4412 t &= ~(VT_BTYPE|VT_LONG);
4413 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4414 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4415 type->ref = s->type.ref;
4416 if (t)
4417 parse_btype_qualify(type, t);
4418 t = type->t;
4419 /* get attributes from typedef */
4420 sym_to_attr(ad, s);
4421 typespec_found = 1;
4422 st = bt = -2;
4423 break;
4425 type_found = 1;
4427 the_end:
4428 if (tcc_state->char_is_unsigned) {
4429 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4430 t |= VT_UNSIGNED;
4432 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4433 bt = t & (VT_BTYPE|VT_LONG);
4434 if (bt == VT_LONG)
4435 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4436 #ifdef TCC_TARGET_PE
4437 if (bt == VT_LDOUBLE)
4438 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4439 #endif
4440 type->t = t;
4441 return type_found;
4444 /* convert a function parameter type (array to pointer and function to
4445 function pointer) */
4446 static inline void convert_parameter_type(CType *pt)
4448 /* remove const and volatile qualifiers (XXX: const could be used
4449 to indicate a const function parameter */
4450 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4451 /* array must be transformed to pointer according to ANSI C */
4452 pt->t &= ~VT_ARRAY;
4453 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4454 mk_pointer(pt);
4458 ST_FUNC void parse_asm_str(CString *astr)
4460 skip('(');
4461 parse_mult_str(astr, "string constant");
4464 /* Parse an asm label and return the token */
4465 static int asm_label_instr(void)
4467 int v;
4468 CString astr;
4470 next();
4471 parse_asm_str(&astr);
4472 skip(')');
4473 #ifdef ASM_DEBUG
4474 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4475 #endif
4476 v = tok_alloc(astr.data, astr.size - 1)->tok;
4477 cstr_free(&astr);
4478 return v;
4481 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4483 int n, l, t1, arg_size, align, unused_align;
4484 Sym **plast, *s, *first;
4485 AttributeDef ad1;
4486 CType pt;
4488 if (tok == '(') {
4489 /* function type, or recursive declarator (return if so) */
4490 next();
4491 if (td && !(td & TYPE_ABSTRACT))
4492 return 0;
4493 if (tok == ')')
4494 l = 0;
4495 else if (parse_btype(&pt, &ad1))
4496 l = FUNC_NEW;
4497 else if (td) {
4498 merge_attr (ad, &ad1);
4499 return 0;
4500 } else
4501 l = FUNC_OLD;
4502 first = NULL;
4503 plast = &first;
4504 arg_size = 0;
4505 if (l) {
4506 for(;;) {
4507 /* read param name and compute offset */
4508 if (l != FUNC_OLD) {
4509 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4510 break;
4511 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4512 if ((pt.t & VT_BTYPE) == VT_VOID)
4513 tcc_error("parameter declared as void");
4514 } else {
4515 n = tok;
4516 if (n < TOK_UIDENT)
4517 expect("identifier");
4518 pt.t = VT_VOID; /* invalid type */
4519 next();
4521 convert_parameter_type(&pt);
4522 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4523 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4524 *plast = s;
4525 plast = &s->next;
4526 if (tok == ')')
4527 break;
4528 skip(',');
4529 if (l == FUNC_NEW && tok == TOK_DOTS) {
4530 l = FUNC_ELLIPSIS;
4531 next();
4532 break;
4534 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4535 tcc_error("invalid type");
4537 } else
4538 /* if no parameters, then old type prototype */
4539 l = FUNC_OLD;
4540 skip(')');
4541 /* NOTE: const is ignored in returned type as it has a special
4542 meaning in gcc / C++ */
4543 type->t &= ~VT_CONSTANT;
4544 /* some ancient pre-K&R C allows a function to return an array
4545 and the array brackets to be put after the arguments, such
4546 that "int c()[]" means something like "int[] c()" */
4547 if (tok == '[') {
4548 next();
4549 skip(']'); /* only handle simple "[]" */
4550 mk_pointer(type);
4552 /* we push a anonymous symbol which will contain the function prototype */
4553 ad->f.func_args = arg_size;
4554 ad->f.func_type = l;
4555 s = sym_push(SYM_FIELD, type, 0, 0);
4556 s->a = ad->a;
4557 s->f = ad->f;
4558 s->next = first;
4559 type->t = VT_FUNC;
4560 type->ref = s;
4561 } else if (tok == '[') {
4562 int saved_nocode_wanted = nocode_wanted;
4563 /* array definition */
4564 next();
4565 while (1) {
4566 /* XXX The optional type-quals and static should only be accepted
4567 in parameter decls. The '*' as well, and then even only
4568 in prototypes (not function defs). */
4569 switch (tok) {
4570 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4571 case TOK_CONST1:
4572 case TOK_VOLATILE1:
4573 case TOK_STATIC:
4574 case '*':
4575 next();
4576 continue;
4577 default:
4578 break;
4580 break;
4582 n = -1;
4583 t1 = 0;
4584 if (tok != ']') {
4585 if (!local_stack || (storage & VT_STATIC))
4586 vpushi(expr_const());
4587 else {
4588 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4589 length must always be evaluated, even under nocode_wanted,
4590 so that its size slot is initialized (e.g. under sizeof
4591 or typeof). */
4592 nocode_wanted = 0;
4593 gexpr();
4595 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4596 n = vtop->c.i;
4597 if (n < 0)
4598 tcc_error("invalid array size");
4599 } else {
4600 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4601 tcc_error("size of variable length array should be an integer");
4602 n = 0;
4603 t1 = VT_VLA;
4606 skip(']');
4607 /* parse next post type */
4608 post_type(type, ad, storage, 0);
4610 if ((type->t & VT_BTYPE) == VT_FUNC)
4611 tcc_error("declaration of an array of functions");
4612 if ((type->t & VT_BTYPE) == VT_VOID
4613 || type_size(type, &unused_align) < 0)
4614 tcc_error("declaration of an array of incomplete type elements");
4616 t1 |= type->t & VT_VLA;
4618 if (t1 & VT_VLA) {
4619 if (n < 0)
4620 tcc_error("need explicit inner array size in VLAs");
4621 loc -= type_size(&int_type, &align);
4622 loc &= -align;
4623 n = loc;
4625 vla_runtime_type_size(type, &align);
4626 gen_op('*');
4627 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4628 vswap();
4629 vstore();
4631 if (n != -1)
4632 vpop();
4633 nocode_wanted = saved_nocode_wanted;
4635 /* we push an anonymous symbol which will contain the array
4636 element type */
4637 s = sym_push(SYM_FIELD, type, 0, n);
4638 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4639 type->ref = s;
4641 return 1;
4644 /* Parse a type declarator (except basic type), and return the type
4645 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4646 expected. 'type' should contain the basic type. 'ad' is the
4647 attribute definition of the basic type. It can be modified by
4648 type_decl(). If this (possibly abstract) declarator is a pointer chain
4649 it returns the innermost pointed to type (equals *type, but is a different
4650 pointer), otherwise returns type itself, that's used for recursive calls. */
4651 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4653 CType *post, *ret;
4654 int qualifiers, storage;
4656 /* recursive type, remove storage bits first, apply them later again */
4657 storage = type->t & VT_STORAGE;
4658 type->t &= ~VT_STORAGE;
4659 post = ret = type;
4661 while (tok == '*') {
4662 qualifiers = 0;
4663 redo:
4664 next();
4665 switch(tok) {
4666 case TOK_CONST1:
4667 case TOK_CONST2:
4668 case TOK_CONST3:
4669 qualifiers |= VT_CONSTANT;
4670 goto redo;
4671 case TOK_VOLATILE1:
4672 case TOK_VOLATILE2:
4673 case TOK_VOLATILE3:
4674 qualifiers |= VT_VOLATILE;
4675 goto redo;
4676 case TOK_RESTRICT1:
4677 case TOK_RESTRICT2:
4678 case TOK_RESTRICT3:
4679 goto redo;
4680 /* XXX: clarify attribute handling */
4681 case TOK_ATTRIBUTE1:
4682 case TOK_ATTRIBUTE2:
4683 parse_attribute(ad);
4684 break;
4686 mk_pointer(type);
4687 type->t |= qualifiers;
4688 if (ret == type)
4689 /* innermost pointed to type is the one for the first derivation */
4690 ret = pointed_type(type);
4693 if (tok == '(') {
4694 /* This is possibly a parameter type list for abstract declarators
4695 ('int ()'), use post_type for testing this. */
4696 if (!post_type(type, ad, 0, td)) {
4697 /* It's not, so it's a nested declarator, and the post operations
4698 apply to the innermost pointed to type (if any). */
4699 /* XXX: this is not correct to modify 'ad' at this point, but
4700 the syntax is not clear */
4701 parse_attribute(ad);
4702 post = type_decl(type, ad, v, td);
4703 skip(')');
4704 } else
4705 goto abstract;
4706 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4707 /* type identifier */
4708 *v = tok;
4709 next();
4710 } else {
4711 abstract:
4712 if (!(td & TYPE_ABSTRACT))
4713 expect("identifier");
4714 *v = 0;
4716 post_type(post, ad, storage, 0);
4717 parse_attribute(ad);
4718 type->t |= storage;
4719 return ret;
4722 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4723 ST_FUNC int lvalue_type(int t)
4725 int bt, r;
4726 r = VT_LVAL;
4727 bt = t & VT_BTYPE;
4728 if (bt == VT_BYTE || bt == VT_BOOL)
4729 r |= VT_LVAL_BYTE;
4730 else if (bt == VT_SHORT)
4731 r |= VT_LVAL_SHORT;
4732 else
4733 return r;
4734 if (t & VT_UNSIGNED)
4735 r |= VT_LVAL_UNSIGNED;
4736 return r;
4739 /* indirection with full error checking and bound check */
4740 ST_FUNC void indir(void)
4742 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4743 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4744 return;
4745 expect("pointer");
4747 if (vtop->r & VT_LVAL)
4748 gv(RC_INT);
4749 vtop->type = *pointed_type(&vtop->type);
4750 /* Arrays and functions are never lvalues */
4751 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4752 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4753 vtop->r |= lvalue_type(vtop->type.t);
4754 /* if bound checking, the referenced pointer must be checked */
4755 #ifdef CONFIG_TCC_BCHECK
4756 if (tcc_state->do_bounds_check)
4757 vtop->r |= VT_MUSTBOUND;
4758 #endif
4762 /* pass a parameter to a function and do type checking and casting */
4763 static void gfunc_param_typed(Sym *func, Sym *arg)
4765 int func_type;
4766 CType type;
4768 func_type = func->f.func_type;
4769 if (func_type == FUNC_OLD ||
4770 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4771 /* default casting : only need to convert float to double */
4772 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4773 gen_cast_s(VT_DOUBLE);
4774 } else if (vtop->type.t & VT_BITFIELD) {
4775 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4776 type.ref = vtop->type.ref;
4777 gen_cast(&type);
4779 } else if (arg == NULL) {
4780 tcc_error("too many arguments to function");
4781 } else {
4782 type = arg->type;
4783 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4784 gen_assign_cast(&type);
4788 /* parse an expression and return its type without any side effect. */
4789 static void expr_type(CType *type, void (*expr_fn)(void))
4791 nocode_wanted++;
4792 expr_fn();
4793 *type = vtop->type;
4794 vpop();
4795 nocode_wanted--;
4798 /* parse an expression of the form '(type)' or '(expr)' and return its
4799 type */
4800 static void parse_expr_type(CType *type)
4802 int n;
4803 AttributeDef ad;
4805 skip('(');
4806 if (parse_btype(type, &ad)) {
4807 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4808 } else {
4809 expr_type(type, gexpr);
4811 skip(')');
4814 static void parse_type(CType *type)
4816 AttributeDef ad;
4817 int n;
4819 if (!parse_btype(type, &ad)) {
4820 expect("type");
4822 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4825 static void parse_builtin_params(int nc, const char *args)
4827 char c, sep = '(';
4828 CType t;
4829 if (nc)
4830 nocode_wanted++;
4831 next();
4832 while ((c = *args++)) {
4833 skip(sep);
4834 sep = ',';
4835 switch (c) {
4836 case 'e': expr_eq(); continue;
4837 case 't': parse_type(&t); vpush(&t); continue;
4838 default: tcc_error("internal error"); break;
4841 skip(')');
4842 if (nc)
4843 nocode_wanted--;
4846 static void try_call_scope_cleanup(Sym *stop)
4848 Sym *cls = current_cleanups;
4850 for (; cls != stop; cls = cls->ncl) {
4851 Sym *fs = cls->next;
4852 Sym *vs = cls->prev_tok;
4854 vpushsym(&fs->type, fs);
4855 vset(&vs->type, vs->r, vs->c);
4856 vtop->sym = vs;
4857 mk_pointer(&vtop->type);
4858 gaddrof();
4859 gfunc_call(1);
4863 static void try_call_cleanup_goto(Sym *cleanupstate)
4865 Sym *oc, *cc;
4866 int ocd, ccd;
4868 if (!current_cleanups)
4869 return;
4871 /* search NCA of both cleanup chains given parents and initial depth */
4872 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4873 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4875 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4877 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4880 try_call_scope_cleanup(cc);
4883 ST_FUNC void unary(void)
4885 int n, t, align, size, r, sizeof_caller;
4886 CType type;
4887 Sym *s;
4888 AttributeDef ad;
4890 sizeof_caller = in_sizeof;
4891 in_sizeof = 0;
4892 type.ref = NULL;
4893 /* XXX: GCC 2.95.3 does not generate a table although it should be
4894 better here */
4895 tok_next:
4896 switch(tok) {
4897 case TOK_EXTENSION:
4898 next();
4899 goto tok_next;
4900 case TOK_LCHAR:
4901 #ifdef TCC_TARGET_PE
4902 t = VT_SHORT|VT_UNSIGNED;
4903 goto push_tokc;
4904 #endif
4905 case TOK_CINT:
4906 case TOK_CCHAR:
4907 t = VT_INT;
4908 push_tokc:
4909 type.t = t;
4910 vsetc(&type, VT_CONST, &tokc);
4911 next();
4912 break;
4913 case TOK_CUINT:
4914 t = VT_INT | VT_UNSIGNED;
4915 goto push_tokc;
4916 case TOK_CLLONG:
4917 t = VT_LLONG;
4918 goto push_tokc;
4919 case TOK_CULLONG:
4920 t = VT_LLONG | VT_UNSIGNED;
4921 goto push_tokc;
4922 case TOK_CFLOAT:
4923 t = VT_FLOAT;
4924 goto push_tokc;
4925 case TOK_CDOUBLE:
4926 t = VT_DOUBLE;
4927 goto push_tokc;
4928 case TOK_CLDOUBLE:
4929 t = VT_LDOUBLE;
4930 goto push_tokc;
4931 case TOK_CLONG:
4932 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4933 goto push_tokc;
4934 case TOK_CULONG:
4935 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4936 goto push_tokc;
4937 case TOK___FUNCTION__:
4938 if (!gnu_ext)
4939 goto tok_identifier;
4940 /* fall thru */
4941 case TOK___FUNC__:
4943 void *ptr;
4944 int len;
4945 /* special function name identifier */
4946 len = strlen(funcname) + 1;
4947 /* generate char[len] type */
4948 type.t = VT_BYTE;
4949 mk_pointer(&type);
4950 type.t |= VT_ARRAY;
4951 type.ref->c = len;
4952 vpush_ref(&type, data_section, data_section->data_offset, len);
4953 if (!NODATA_WANTED) {
4954 ptr = section_ptr_add(data_section, len);
4955 memcpy(ptr, funcname, len);
4957 next();
4959 break;
4960 case TOK_LSTR:
4961 #ifdef TCC_TARGET_PE
4962 t = VT_SHORT | VT_UNSIGNED;
4963 #else
4964 t = VT_INT;
4965 #endif
4966 goto str_init;
4967 case TOK_STR:
4968 /* string parsing */
4969 t = VT_BYTE;
4970 if (tcc_state->char_is_unsigned)
4971 t = VT_BYTE | VT_UNSIGNED;
4972 str_init:
4973 if (tcc_state->warn_write_strings)
4974 t |= VT_CONSTANT;
4975 type.t = t;
4976 mk_pointer(&type);
4977 type.t |= VT_ARRAY;
4978 memset(&ad, 0, sizeof(AttributeDef));
4979 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4980 break;
4981 case '(':
4982 next();
4983 /* cast ? */
4984 if (parse_btype(&type, &ad)) {
4985 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4986 skip(')');
4987 /* check ISOC99 compound literal */
4988 if (tok == '{') {
4989 /* data is allocated locally by default */
4990 if (global_expr)
4991 r = VT_CONST;
4992 else
4993 r = VT_LOCAL;
4994 /* all except arrays are lvalues */
4995 if (!(type.t & VT_ARRAY))
4996 r |= lvalue_type(type.t);
4997 memset(&ad, 0, sizeof(AttributeDef));
4998 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4999 } else {
5000 if (sizeof_caller) {
5001 vpush(&type);
5002 return;
5004 unary();
5005 gen_cast(&type);
5007 } else if (tok == '{') {
5008 int saved_nocode_wanted = nocode_wanted;
5009 if (const_wanted)
5010 tcc_error("expected constant");
5011 /* save all registers */
5012 save_regs(0);
5013 /* statement expression : we do not accept break/continue
5014 inside as GCC does. We do retain the nocode_wanted state,
5015 as statement expressions can't ever be entered from the
5016 outside, so any reactivation of code emission (from labels
5017 or loop heads) can be disabled again after the end of it. */
5018 block(NULL, NULL, NULL, NULL, 1);
5019 nocode_wanted = saved_nocode_wanted;
5020 skip(')');
5021 } else {
5022 gexpr();
5023 skip(')');
5025 break;
5026 case '*':
5027 next();
5028 unary();
5029 indir();
5030 break;
5031 case '&':
5032 next();
5033 unary();
5034 /* functions names must be treated as function pointers,
5035 except for unary '&' and sizeof. Since we consider that
5036 functions are not lvalues, we only have to handle it
5037 there and in function calls. */
5038 /* arrays can also be used although they are not lvalues */
5039 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5040 !(vtop->type.t & VT_ARRAY))
5041 test_lvalue();
5042 mk_pointer(&vtop->type);
5043 gaddrof();
5044 break;
5045 case '!':
5046 next();
5047 unary();
5048 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5049 gen_cast_s(VT_BOOL);
5050 vtop->c.i = !vtop->c.i;
5051 } else if (vtop->r == VT_CMP) {
5052 vtop->cmp_op ^= 1;
5053 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
5054 } else {
5055 vpushi(0);
5056 gen_op(TOK_EQ);
5058 break;
5059 case '~':
5060 next();
5061 unary();
5062 vpushi(-1);
5063 gen_op('^');
5064 break;
5065 case '+':
5066 next();
5067 unary();
5068 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5069 tcc_error("pointer not accepted for unary plus");
5070 /* In order to force cast, we add zero, except for floating point
5071 where we really need an noop (otherwise -0.0 will be transformed
5072 into +0.0). */
5073 if (!is_float(vtop->type.t)) {
5074 vpushi(0);
5075 gen_op('+');
5077 break;
5078 case TOK_SIZEOF:
5079 case TOK_ALIGNOF1:
5080 case TOK_ALIGNOF2:
5081 case TOK_ALIGNOF3:
5082 t = tok;
5083 next();
5084 in_sizeof++;
5085 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5086 s = NULL;
5087 if (vtop[1].r & VT_SYM)
5088 s = vtop[1].sym; /* hack: accessing previous vtop */
5089 size = type_size(&type, &align);
5090 if (s && s->a.aligned)
5091 align = 1 << (s->a.aligned - 1);
5092 if (t == TOK_SIZEOF) {
5093 if (!(type.t & VT_VLA)) {
5094 if (size < 0)
5095 tcc_error("sizeof applied to an incomplete type");
5096 vpushs(size);
5097 } else {
5098 vla_runtime_type_size(&type, &align);
5100 } else {
5101 vpushs(align);
5103 vtop->type.t |= VT_UNSIGNED;
5104 break;
5106 case TOK_builtin_expect:
5107 /* __builtin_expect is a no-op for now */
5108 parse_builtin_params(0, "ee");
5109 vpop();
5110 break;
5111 case TOK_builtin_types_compatible_p:
5112 parse_builtin_params(0, "tt");
5113 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5114 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5115 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5116 vtop -= 2;
5117 vpushi(n);
5118 break;
5119 case TOK_builtin_choose_expr:
5121 int64_t c;
5122 next();
5123 skip('(');
5124 c = expr_const64();
5125 skip(',');
5126 if (!c) {
5127 nocode_wanted++;
5129 expr_eq();
5130 if (!c) {
5131 vpop();
5132 nocode_wanted--;
5134 skip(',');
5135 if (c) {
5136 nocode_wanted++;
5138 expr_eq();
5139 if (c) {
5140 vpop();
5141 nocode_wanted--;
5143 skip(')');
5145 break;
5146 case TOK_builtin_constant_p:
5147 parse_builtin_params(1, "e");
5148 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5149 vtop--;
5150 vpushi(n);
5151 break;
5152 case TOK_builtin_frame_address:
5153 case TOK_builtin_return_address:
5155 int tok1 = tok;
5156 int level;
5157 next();
5158 skip('(');
5159 if (tok != TOK_CINT) {
5160 tcc_error("%s only takes positive integers",
5161 tok1 == TOK_builtin_return_address ?
5162 "__builtin_return_address" :
5163 "__builtin_frame_address");
5165 level = (uint32_t)tokc.i;
5166 next();
5167 skip(')');
5168 type.t = VT_VOID;
5169 mk_pointer(&type);
5170 vset(&type, VT_LOCAL, 0); /* local frame */
5171 while (level--) {
5172 mk_pointer(&vtop->type);
5173 indir(); /* -> parent frame */
5175 if (tok1 == TOK_builtin_return_address) {
5176 // assume return address is just above frame pointer on stack
5177 vpushi(PTR_SIZE);
5178 gen_op('+');
5179 mk_pointer(&vtop->type);
5180 indir();
5183 break;
5184 #ifdef TCC_TARGET_X86_64
5185 #ifdef TCC_TARGET_PE
5186 case TOK_builtin_va_start:
5187 parse_builtin_params(0, "ee");
5188 r = vtop->r & VT_VALMASK;
5189 if (r == VT_LLOCAL)
5190 r = VT_LOCAL;
5191 if (r != VT_LOCAL)
5192 tcc_error("__builtin_va_start expects a local variable");
5193 vtop->r = r;
5194 vtop->type = char_pointer_type;
5195 vtop->c.i += 8;
5196 vstore();
5197 break;
5198 #else
5199 case TOK_builtin_va_arg_types:
5200 parse_builtin_params(0, "t");
5201 vpushi(classify_x86_64_va_arg(&vtop->type));
5202 vswap();
5203 vpop();
5204 break;
5205 #endif
5206 #endif
5208 #ifdef TCC_TARGET_ARM64
5209 case TOK___va_start: {
5210 parse_builtin_params(0, "ee");
5211 //xx check types
5212 gen_va_start();
5213 vpushi(0);
5214 vtop->type.t = VT_VOID;
5215 break;
5217 case TOK___va_arg: {
5218 parse_builtin_params(0, "et");
5219 type = vtop->type;
5220 vpop();
5221 //xx check types
5222 gen_va_arg(&type);
5223 vtop->type = type;
5224 break;
5226 case TOK___arm64_clear_cache: {
5227 parse_builtin_params(0, "ee");
5228 gen_clear_cache();
5229 vpushi(0);
5230 vtop->type.t = VT_VOID;
5231 break;
5233 #endif
5234 /* pre operations */
5235 case TOK_INC:
5236 case TOK_DEC:
5237 t = tok;
5238 next();
5239 unary();
5240 inc(0, t);
5241 break;
5242 case '-':
5243 next();
5244 unary();
5245 t = vtop->type.t & VT_BTYPE;
5246 if (is_float(t)) {
5247 /* In IEEE negate(x) isn't subtract(0,x), but rather
5248 subtract(-0, x). */
5249 vpush(&vtop->type);
5250 if (t == VT_FLOAT)
5251 vtop->c.f = -1.0 * 0.0;
5252 else if (t == VT_DOUBLE)
5253 vtop->c.d = -1.0 * 0.0;
5254 else
5255 vtop->c.ld = -1.0 * 0.0;
5256 } else
5257 vpushi(0);
5258 vswap();
5259 gen_op('-');
5260 break;
5261 case TOK_LAND:
5262 if (!gnu_ext)
5263 goto tok_identifier;
5264 next();
5265 /* allow to take the address of a label */
5266 if (tok < TOK_UIDENT)
5267 expect("label identifier");
5268 s = label_find(tok);
5269 if (!s) {
5270 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5271 } else {
5272 if (s->r == LABEL_DECLARED)
5273 s->r = LABEL_FORWARD;
5275 if (!s->type.t) {
5276 s->type.t = VT_VOID;
5277 mk_pointer(&s->type);
5278 s->type.t |= VT_STATIC;
5280 vpushsym(&s->type, s);
5281 next();
5282 break;
5284 case TOK_GENERIC:
5286 CType controlling_type;
5287 int has_default = 0;
5288 int has_match = 0;
5289 int learn = 0;
5290 TokenString *str = NULL;
5291 int saved_const_wanted = const_wanted;
5293 next();
5294 skip('(');
5295 const_wanted = 0;
5296 expr_type(&controlling_type, expr_eq);
5297 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5298 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5299 mk_pointer(&controlling_type);
5300 const_wanted = saved_const_wanted;
5301 for (;;) {
5302 learn = 0;
5303 skip(',');
5304 if (tok == TOK_DEFAULT) {
5305 if (has_default)
5306 tcc_error("too many 'default'");
5307 has_default = 1;
5308 if (!has_match)
5309 learn = 1;
5310 next();
5311 } else {
5312 AttributeDef ad_tmp;
5313 int itmp;
5314 CType cur_type;
5316 in_generic++;
5317 parse_btype(&cur_type, &ad_tmp);
5318 in_generic--;
5320 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5321 if (compare_types(&controlling_type, &cur_type, 0)) {
5322 if (has_match) {
5323 tcc_error("type match twice");
5325 has_match = 1;
5326 learn = 1;
5329 skip(':');
5330 if (learn) {
5331 if (str)
5332 tok_str_free(str);
5333 skip_or_save_block(&str);
5334 } else {
5335 skip_or_save_block(NULL);
5337 if (tok == ')')
5338 break;
5340 if (!str) {
5341 char buf[60];
5342 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5343 tcc_error("type '%s' does not match any association", buf);
5345 begin_macro(str, 1);
5346 next();
5347 expr_eq();
5348 if (tok != TOK_EOF)
5349 expect(",");
5350 end_macro();
5351 next();
5352 break;
5354 // special qnan , snan and infinity values
5355 case TOK___NAN__:
5356 n = 0x7fc00000;
5357 special_math_val:
5358 vpushi(n);
5359 vtop->type.t = VT_FLOAT;
5360 next();
5361 break;
5362 case TOK___SNAN__:
5363 n = 0x7f800001;
5364 goto special_math_val;
5365 case TOK___INF__:
5366 n = 0x7f800000;
5367 goto special_math_val;
5369 default:
5370 tok_identifier:
5371 t = tok;
5372 next();
5373 if (t < TOK_UIDENT)
5374 expect("identifier");
5375 s = sym_find(t);
5376 if (!s || IS_ASM_SYM(s)) {
5377 const char *name = get_tok_str(t, NULL);
5378 if (tok != '(')
5379 tcc_error("'%s' undeclared", name);
5380 /* for simple function calls, we tolerate undeclared
5381 external reference to int() function */
5382 if (tcc_state->warn_implicit_function_declaration
5383 #ifdef TCC_TARGET_PE
5384 /* people must be warned about using undeclared WINAPI functions
5385 (which usually start with uppercase letter) */
5386 || (name[0] >= 'A' && name[0] <= 'Z')
5387 #endif
5389 tcc_warning("implicit declaration of function '%s'", name);
5390 s = external_global_sym(t, &func_old_type);
5393 r = s->r;
5394 /* A symbol that has a register is a local register variable,
5395 which starts out as VT_LOCAL value. */
5396 if ((r & VT_VALMASK) < VT_CONST)
5397 r = (r & ~VT_VALMASK) | VT_LOCAL;
5399 vset(&s->type, r, s->c);
5400 /* Point to s as backpointer (even without r&VT_SYM).
5401 Will be used by at least the x86 inline asm parser for
5402 regvars. */
5403 vtop->sym = s;
5405 if (r & VT_SYM) {
5406 vtop->c.i = 0;
5407 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5408 vtop->c.i = s->enum_val;
5410 break;
5413 /* post operations */
5414 while (1) {
5415 if (tok == TOK_INC || tok == TOK_DEC) {
5416 inc(1, tok);
5417 next();
5418 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5419 int qualifiers, cumofs = 0;
5420 /* field */
5421 if (tok == TOK_ARROW)
5422 indir();
5423 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5424 test_lvalue();
5425 gaddrof();
5426 /* expect pointer on structure */
5427 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5428 expect("struct or union");
5429 if (tok == TOK_CDOUBLE)
5430 expect("field name");
5431 next();
5432 if (tok == TOK_CINT || tok == TOK_CUINT)
5433 expect("field name");
5434 s = find_field(&vtop->type, tok, &cumofs);
5435 if (!s)
5436 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5437 /* add field offset to pointer */
5438 vtop->type = char_pointer_type; /* change type to 'char *' */
5439 vpushi(cumofs + s->c);
5440 gen_op('+');
5441 /* change type to field type, and set to lvalue */
5442 vtop->type = s->type;
5443 vtop->type.t |= qualifiers;
5444 /* an array is never an lvalue */
5445 if (!(vtop->type.t & VT_ARRAY)) {
5446 vtop->r |= lvalue_type(vtop->type.t);
5447 #ifdef CONFIG_TCC_BCHECK
5448 /* if bound checking, the referenced pointer must be checked */
5449 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5450 vtop->r |= VT_MUSTBOUND;
5451 #endif
5453 next();
5454 } else if (tok == '[') {
5455 next();
5456 gexpr();
5457 gen_op('+');
5458 indir();
5459 skip(']');
5460 } else if (tok == '(') {
5461 SValue ret;
5462 Sym *sa;
5463 int nb_args, ret_nregs, ret_align, regsize, variadic;
5465 /* function call */
5466 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5467 /* pointer test (no array accepted) */
5468 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5469 vtop->type = *pointed_type(&vtop->type);
5470 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5471 goto error_func;
5472 } else {
5473 error_func:
5474 expect("function pointer");
5476 } else {
5477 vtop->r &= ~VT_LVAL; /* no lvalue */
5479 /* get return type */
5480 s = vtop->type.ref;
5481 next();
5482 sa = s->next; /* first parameter */
5483 nb_args = regsize = 0;
5484 ret.r2 = VT_CONST;
5485 /* compute first implicit argument if a structure is returned */
5486 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5487 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5488 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5489 &ret_align, &regsize);
5490 if (!ret_nregs) {
5491 /* get some space for the returned structure */
5492 size = type_size(&s->type, &align);
5493 #ifdef TCC_TARGET_ARM64
5494 /* On arm64, a small struct is return in registers.
5495 It is much easier to write it to memory if we know
5496 that we are allowed to write some extra bytes, so
5497 round the allocated space up to a power of 2: */
5498 if (size < 16)
5499 while (size & (size - 1))
5500 size = (size | (size - 1)) + 1;
5501 #endif
5502 loc = (loc - size) & -align;
5503 ret.type = s->type;
5504 ret.r = VT_LOCAL | VT_LVAL;
5505 /* pass it as 'int' to avoid structure arg passing
5506 problems */
5507 vseti(VT_LOCAL, loc);
5508 ret.c = vtop->c;
5509 nb_args++;
5511 } else {
5512 ret_nregs = 1;
5513 ret.type = s->type;
5516 if (ret_nregs) {
5517 /* return in register */
5518 if (is_float(ret.type.t)) {
5519 ret.r = reg_fret(ret.type.t);
5520 #ifdef TCC_TARGET_X86_64
5521 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5522 ret.r2 = REG_QRET;
5523 #endif
5524 } else {
5525 #ifndef TCC_TARGET_ARM64
5526 #ifdef TCC_TARGET_X86_64
5527 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5528 #else
5529 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5530 #endif
5531 ret.r2 = REG_LRET;
5532 #endif
5533 ret.r = REG_IRET;
5535 ret.c.i = 0;
5537 if (tok != ')') {
5538 for(;;) {
5539 expr_eq();
5540 gfunc_param_typed(s, sa);
5541 nb_args++;
5542 if (sa)
5543 sa = sa->next;
5544 if (tok == ')')
5545 break;
5546 skip(',');
5549 if (sa)
5550 tcc_error("too few arguments to function");
5551 skip(')');
5552 gfunc_call(nb_args);
5554 /* return value */
5555 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5556 vsetc(&ret.type, r, &ret.c);
5557 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5560 /* handle packed struct return */
5561 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5562 int addr, offset;
5564 size = type_size(&s->type, &align);
5565 /* We're writing whole regs often, make sure there's enough
5566 space. Assume register size is power of 2. */
5567 if (regsize > align)
5568 align = regsize;
5569 loc = (loc - size) & -align;
5570 addr = loc;
5571 offset = 0;
5572 for (;;) {
5573 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5574 vswap();
5575 vstore();
5576 vtop--;
5577 if (--ret_nregs == 0)
5578 break;
5579 offset += regsize;
5581 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5583 if (s->f.func_noreturn)
5584 CODE_OFF();
5585 } else {
5586 break;
5591 ST_FUNC void expr_prod(void)
5593 int t;
5595 unary();
5596 while (tok == '*' || tok == '/' || tok == '%') {
5597 t = tok;
5598 next();
5599 unary();
5600 gen_op(t);
5604 ST_FUNC void expr_sum(void)
5606 int t;
5608 expr_prod();
5609 while (tok == '+' || tok == '-') {
5610 t = tok;
5611 next();
5612 expr_prod();
5613 gen_op(t);
5617 static void expr_shift(void)
5619 int t;
5621 expr_sum();
5622 while (tok == TOK_SHL || tok == TOK_SAR) {
5623 t = tok;
5624 next();
5625 expr_sum();
5626 gen_op(t);
5630 static void expr_cmp(void)
5632 int t;
5634 expr_shift();
5635 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5636 tok == TOK_ULT || tok == TOK_UGE) {
5637 t = tok;
5638 next();
5639 expr_shift();
5640 gen_op(t);
5644 static void expr_cmpeq(void)
5646 int t;
5648 expr_cmp();
5649 while (tok == TOK_EQ || tok == TOK_NE) {
5650 t = tok;
5651 next();
5652 expr_cmp();
5653 gen_op(t);
5657 static void expr_and(void)
5659 expr_cmpeq();
5660 while (tok == '&') {
5661 next();
5662 expr_cmpeq();
5663 gen_op('&');
5667 static void expr_xor(void)
5669 expr_and();
5670 while (tok == '^') {
5671 next();
5672 expr_and();
5673 gen_op('^');
5677 static void expr_or(void)
5679 expr_xor();
5680 while (tok == '|') {
5681 next();
5682 expr_xor();
5683 gen_op('|');
5687 static int condition_3way(void);
5689 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5691 int t = 0, cc = 1, f = 0, c;
5692 for(;;) {
5693 c = f ? i : condition_3way();
5694 if (c < 0) {
5695 save_regs(1), cc = 0;
5696 } else if (c != i) {
5697 nocode_wanted++, f = 1;
5699 if (tok != e_op) {
5700 if (cc || f) {
5701 vpop();
5702 vpushi(i ^ f);
5703 gsym(t);
5704 nocode_wanted -= f;
5705 } else {
5706 gvtst_set(i, t);
5708 break;
5710 if (c < 0)
5711 t = gvtst(i, t);
5712 else
5713 vpop();
5714 next();
5715 e_fn();
5719 static void expr_land(void)
5721 expr_or();
5722 if (tok == TOK_LAND)
5723 expr_landor(expr_or, TOK_LAND, 1);
5726 static void expr_lor(void)
5728 expr_land();
5729 if (tok == TOK_LOR)
5730 expr_landor(expr_land, TOK_LOR, 0);
5733 /* Assuming vtop is a value used in a conditional context
5734 (i.e. compared with zero) return 0 if it's false, 1 if
5735 true and -1 if it can't be statically determined. */
5736 static int condition_3way(void)
5738 int c = -1;
5739 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5740 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5741 vdup();
5742 gen_cast_s(VT_BOOL);
5743 c = vtop->c.i;
5744 vpop();
5746 return c;
5749 static int is_cond_bool(SValue *sv)
5751 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5752 && (sv->type.t & VT_BTYPE) == VT_INT)
5753 return (unsigned)sv->c.i < 2;
5754 if (sv->r == VT_CMP)
5755 return 1;
5756 return 0;
5759 static void expr_cond(void)
5761 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5762 SValue sv;
5763 CType type, type1, type2;
5764 int ncw_prev;
5766 expr_lor();
5767 if (tok == '?') {
5768 next();
5769 c = condition_3way();
5770 g = (tok == ':' && gnu_ext);
5771 tt = 0;
5772 if (!g) {
5773 if (c < 0) {
5774 save_regs(1);
5775 tt = gvtst(1, 0);
5776 } else {
5777 vpop();
5779 } else if (c < 0) {
5780 /* needed to avoid having different registers saved in
5781 each branch */
5782 rc = RC_INT;
5783 if (is_float(vtop->type.t)) {
5784 rc = RC_FLOAT;
5785 #ifdef TCC_TARGET_X86_64
5786 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5787 rc = RC_ST0;
5789 #endif
5791 gv(rc);
5792 save_regs(1);
5793 gv_dup();
5794 tt = gvtst(0, 0);
5797 ncw_prev = nocode_wanted;
5798 if (1) {
5799 if (c == 0)
5800 nocode_wanted++;
5801 if (!g)
5802 gexpr();
5804 if (c < 0 && vtop->r == VT_CMP) {
5805 t1 = gvtst(0, 0);
5806 vpushi(0);
5807 gvtst_set(0, t1);
5810 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5811 mk_pointer(&vtop->type);
5812 type1 = vtop->type;
5813 sv = *vtop; /* save value to handle it later */
5814 vtop--; /* no vpop so that FP stack is not flushed */
5816 if (g) {
5817 u = tt;
5818 } else if (c < 0) {
5819 u = gjmp(0);
5820 gsym(tt);
5821 } else
5822 u = 0;
5824 nocode_wanted = ncw_prev;
5825 if (c == 1)
5826 nocode_wanted++;
5827 skip(':');
5828 expr_cond();
5830 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5831 if (sv.r == VT_CMP) {
5832 t1 = sv.jtrue;
5833 t2 = u;
5834 } else {
5835 t1 = gvtst(0, 0);
5836 t2 = gjmp(0);
5837 gsym(u);
5838 vpushv(&sv);
5840 gvtst_set(0, t1);
5841 gvtst_set(1, t2);
5842 nocode_wanted = ncw_prev;
5843 // tcc_warning("two conditions expr_cond");
5844 return;
5847 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5848 mk_pointer(&vtop->type);
5849 type2=vtop->type;
5850 t1 = type1.t;
5851 bt1 = t1 & VT_BTYPE;
5852 t2 = type2.t;
5853 bt2 = t2 & VT_BTYPE;
5854 type.ref = NULL;
5856 /* cast operands to correct type according to ISOC rules */
5857 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5858 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5859 } else if (is_float(bt1) || is_float(bt2)) {
5860 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5861 type.t = VT_LDOUBLE;
5863 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5864 type.t = VT_DOUBLE;
5865 } else {
5866 type.t = VT_FLOAT;
5868 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5869 /* cast to biggest op */
5870 type.t = VT_LLONG | VT_LONG;
5871 if (bt1 == VT_LLONG)
5872 type.t &= t1;
5873 if (bt2 == VT_LLONG)
5874 type.t &= t2;
5875 /* convert to unsigned if it does not fit in a long long */
5876 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5877 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5878 type.t |= VT_UNSIGNED;
5879 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5880 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5881 /* If one is a null ptr constant the result type
5882 is the other. */
5883 if (is_null_pointer (vtop)) type = type1;
5884 else if (is_null_pointer (&sv)) type = type2;
5885 else if (bt1 != bt2)
5886 tcc_error("incompatible types in conditional expressions");
5887 else {
5888 CType *pt1 = pointed_type(&type1);
5889 CType *pt2 = pointed_type(&type2);
5890 int pbt1 = pt1->t & VT_BTYPE;
5891 int pbt2 = pt2->t & VT_BTYPE;
5892 int newquals, copied = 0;
5893 /* pointers to void get preferred, otherwise the
5894 pointed to types minus qualifs should be compatible */
5895 type = (pbt1 == VT_VOID) ? type1 : type2;
5896 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5897 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5898 tcc_warning("pointer type mismatch in conditional expression\n");
5900 /* combine qualifs */
5901 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5902 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5903 & newquals)
5905 /* copy the pointer target symbol */
5906 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5907 0, type.ref->c);
5908 copied = 1;
5909 pointed_type(&type)->t |= newquals;
5911 /* pointers to incomplete arrays get converted to
5912 pointers to completed ones if possible */
5913 if (pt1->t & VT_ARRAY
5914 && pt2->t & VT_ARRAY
5915 && pointed_type(&type)->ref->c < 0
5916 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5918 if (!copied)
5919 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5920 0, type.ref->c);
5921 pointed_type(&type)->ref =
5922 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5923 0, pointed_type(&type)->ref->c);
5924 pointed_type(&type)->ref->c =
5925 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5928 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5929 /* XXX: test structure compatibility */
5930 type = bt1 == VT_STRUCT ? type1 : type2;
5931 } else {
5932 /* integer operations */
5933 type.t = VT_INT | (VT_LONG & (t1 | t2));
5934 /* convert to unsigned if it does not fit in an integer */
5935 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5936 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5937 type.t |= VT_UNSIGNED;
5939 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5940 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5941 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5943 /* now we convert second operand */
5944 if (c != 1) {
5945 gen_cast(&type);
5946 if (islv) {
5947 mk_pointer(&vtop->type);
5948 gaddrof();
5949 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5950 gaddrof();
5953 rc = RC_INT;
5954 if (is_float(type.t)) {
5955 rc = RC_FLOAT;
5956 #ifdef TCC_TARGET_X86_64
5957 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5958 rc = RC_ST0;
5960 #endif
5961 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5962 /* for long longs, we use fixed registers to avoid having
5963 to handle a complicated move */
5964 rc = RC_IRET;
5967 tt = r2 = 0;
5968 if (c < 0) {
5969 r2 = gv(rc);
5970 tt = gjmp(0);
5972 gsym(u);
5974 /* this is horrible, but we must also convert first
5975 operand */
5976 if (c != 0) {
5977 *vtop = sv;
5978 gen_cast(&type);
5979 if (islv) {
5980 mk_pointer(&vtop->type);
5981 gaddrof();
5982 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5983 gaddrof();
5986 if (c < 0) {
5987 r1 = gv(rc);
5988 move_reg(r2, r1, type.t);
5989 vtop->r = r2;
5990 gsym(tt);
5993 if (islv)
5994 indir();
5996 nocode_wanted = ncw_prev;
6000 static void expr_eq(void)
6002 int t;
6004 expr_cond();
6005 if (tok == '=' ||
6006 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6007 tok == TOK_A_XOR || tok == TOK_A_OR ||
6008 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6009 test_lvalue();
6010 t = tok;
6011 next();
6012 if (t == '=') {
6013 expr_eq();
6014 } else {
6015 vdup();
6016 expr_eq();
6017 gen_op(t & 0x7f);
6019 vstore();
6023 ST_FUNC void gexpr(void)
6025 while (1) {
6026 expr_eq();
6027 if (tok != ',')
6028 break;
6029 vpop();
6030 next();
6034 /* parse a constant expression and return value in vtop. */
6035 static void expr_const1(void)
6037 const_wanted++;
6038 nocode_wanted++;
6039 expr_cond();
6040 nocode_wanted--;
6041 const_wanted--;
6044 /* parse an integer constant and return its value. */
6045 static inline int64_t expr_const64(void)
6047 int64_t c;
6048 expr_const1();
6049 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6050 expect("constant expression");
6051 c = vtop->c.i;
6052 vpop();
6053 return c;
6056 /* parse an integer constant and return its value.
6057 Complain if it doesn't fit 32bit (signed or unsigned). */
6058 ST_FUNC int expr_const(void)
6060 int c;
6061 int64_t wc = expr_const64();
6062 c = wc;
6063 if (c != wc && (unsigned)c != wc)
6064 tcc_error("constant exceeds 32 bit");
6065 return c;
6068 /* ------------------------------------------------------------------------- */
6069 /* return from function */
6071 #ifndef TCC_TARGET_ARM64
6072 static void gfunc_return(CType *func_type)
6074 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6075 CType type, ret_type;
6076 int ret_align, ret_nregs, regsize;
6077 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6078 &ret_align, &regsize);
6079 if (0 == ret_nregs) {
6080 /* if returning structure, must copy it to implicit
6081 first pointer arg location */
6082 type = *func_type;
6083 mk_pointer(&type);
6084 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6085 indir();
6086 vswap();
6087 /* copy structure value to pointer */
6088 vstore();
6089 } else {
6090 /* returning structure packed into registers */
6091 int r, size, addr, align;
6092 size = type_size(func_type,&align);
6093 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6094 (vtop->c.i & (ret_align-1)))
6095 && (align & (ret_align-1))) {
6096 loc = (loc - size) & -ret_align;
6097 addr = loc;
6098 type = *func_type;
6099 vset(&type, VT_LOCAL | VT_LVAL, addr);
6100 vswap();
6101 vstore();
6102 vpop();
6103 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6105 vtop->type = ret_type;
6106 if (is_float(ret_type.t))
6107 r = rc_fret(ret_type.t);
6108 else
6109 r = RC_IRET;
6111 if (ret_nregs == 1)
6112 gv(r);
6113 else {
6114 for (;;) {
6115 vdup();
6116 gv(r);
6117 vpop();
6118 if (--ret_nregs == 0)
6119 break;
6120 /* We assume that when a structure is returned in multiple
6121 registers, their classes are consecutive values of the
6122 suite s(n) = 2^n */
6123 r <<= 1;
6124 vtop->c.i += regsize;
6128 } else if (is_float(func_type->t)) {
6129 gv(rc_fret(func_type->t));
6130 } else {
6131 gv(RC_IRET);
6133 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6135 #endif
6137 static int case_cmp(const void *pa, const void *pb)
6139 int64_t a = (*(struct case_t**) pa)->v1;
6140 int64_t b = (*(struct case_t**) pb)->v1;
6141 return a < b ? -1 : a > b;
6144 static void gtst_addr(int t, int a)
6146 gsym_addr(gvtst(0, t), a);
6149 static void gcase(struct case_t **base, int len, int *bsym)
6151 struct case_t *p;
6152 int e;
6153 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6154 while (len > 4) {
6155 /* binary search */
6156 p = base[len/2];
6157 vdup();
6158 if (ll)
6159 vpushll(p->v2);
6160 else
6161 vpushi(p->v2);
6162 gen_op(TOK_LE);
6163 e = gvtst(1, 0);
6164 vdup();
6165 if (ll)
6166 vpushll(p->v1);
6167 else
6168 vpushi(p->v1);
6169 gen_op(TOK_GE);
6170 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6171 /* x < v1 */
6172 gcase(base, len/2, bsym);
6173 /* x > v2 */
6174 gsym(e);
6175 e = len/2 + 1;
6176 base += e; len -= e;
6178 /* linear scan */
6179 while (len--) {
6180 p = *base++;
6181 vdup();
6182 if (ll)
6183 vpushll(p->v2);
6184 else
6185 vpushi(p->v2);
6186 if (p->v1 == p->v2) {
6187 gen_op(TOK_EQ);
6188 gtst_addr(0, p->sym);
6189 } else {
6190 gen_op(TOK_LE);
6191 e = gvtst(1, 0);
6192 vdup();
6193 if (ll)
6194 vpushll(p->v1);
6195 else
6196 vpushi(p->v1);
6197 gen_op(TOK_GE);
6198 gtst_addr(0, p->sym);
6199 gsym(e);
6202 *bsym = gjmp(*bsym);
6205 /* call 'func' for each __attribute__((cleanup(func))) */
6206 static void block_cleanup(Sym *lcleanup, int lncleanups)
6208 int jmp = 0;
6209 Sym *g, **pg;
6210 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;) {
6211 if (g->prev_tok->r & LABEL_FORWARD) {
6212 Sym *pcl = g->next;
6213 if (!jmp)
6214 jmp = gjmp(0);
6215 gsym(pcl->jnext);
6216 try_call_scope_cleanup(lcleanup);
6217 pcl->jnext = gjmp(0);
6218 if (!lncleanups)
6219 goto remove_pending;
6220 g->c = lncleanups;
6221 pg = &g->prev;
6222 } else {
6223 remove_pending:
6224 *pg = g->prev;
6225 sym_free(g);
6228 gsym(jmp);
6229 try_call_scope_cleanup(lcleanup);
6230 current_cleanups = lcleanup;
6231 ncleanups = lncleanups;
6234 static void check_func_return(void)
6236 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6237 return;
6238 if (!strcmp (funcname, "main")
6239 && (func_vt.t & VT_BTYPE) == VT_INT) {
6240 /* main returns 0 by default */
6241 vpushi(0);
6242 gen_assign_cast(&func_vt);
6243 gfunc_return(&func_vt);
6244 } else {
6245 tcc_warning("function might return no value: '%s'", funcname);
6249 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr)
6251 int a, b, c, d, e, t;
6252 Sym *s;
6254 if (is_expr) {
6255 /* default return value is (void) */
6256 vpushi(0);
6257 vtop->type.t = VT_VOID;
6260 t = tok, next();
6262 if (t == TOK_IF) {
6263 skip('(');
6264 gexpr();
6265 skip(')');
6266 a = gvtst(1, 0);
6267 block(bsym, bcl, csym, ccl, 0);
6268 if (tok == TOK_ELSE) {
6269 d = gjmp(0);
6270 gsym(a);
6271 next();
6272 block(bsym, bcl, csym, ccl, 0);
6273 gsym(d); /* patch else jmp */
6274 } else {
6275 gsym(a);
6278 } else if (t == TOK_WHILE) {
6279 d = gind();
6280 vla_sp_restore();
6281 skip('(');
6282 gexpr();
6283 skip(')');
6284 a = gvtst(1, 0);
6285 b = 0;
6286 block(&a, current_cleanups, &b, current_cleanups, 0);
6287 gjmp_addr(d);
6288 gsym_addr(b, d);
6289 gsym(a);
6291 } else if (t == '{') {
6292 Sym *llabel, *lcleanup;
6293 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6294 int lncleanups = ncleanups;
6296 /* record local declaration stack position */
6297 s = local_stack;
6298 llabel = local_label_stack;
6299 lcleanup = current_cleanups;
6300 ++local_scope;
6302 /* handle local labels declarations */
6303 while (tok == TOK_LABEL) {
6304 do {
6305 next();
6306 if (tok < TOK_UIDENT)
6307 expect("label identifier");
6308 label_push(&local_label_stack, tok, LABEL_DECLARED);
6309 next();
6310 } while (tok == ',');
6311 skip(';');
6314 while (tok != '}') {
6315 decl(VT_LOCAL);
6316 if (tok != '}') {
6317 if (is_expr)
6318 vpop();
6319 block(bsym, bcl, csym, ccl, is_expr);
6323 if (current_cleanups != lcleanup)
6324 block_cleanup(lcleanup, lncleanups);
6326 /* pop locally defined labels */
6327 label_pop(&local_label_stack, llabel, is_expr);
6329 /* In the is_expr case (a statement expression is finished here),
6330 vtop might refer to symbols on the local_stack. Either via the
6331 type or via vtop->sym. We can't pop those nor any that in turn
6332 might be referred to. To make it easier we don't roll back
6333 any symbols in that case; some upper level call to block() will
6334 do that. We do have to remove such symbols from the lookup
6335 tables, though. sym_pop will do that. */
6337 /* pop locally defined symbols */
6338 sym_pop(&local_stack, s, is_expr);
6340 /* Pop VLA frames and restore stack pointer if required */
6341 if (vlas_in_scope > saved_vlas_in_scope) {
6342 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6343 vla_sp_restore();
6345 vlas_in_scope = saved_vlas_in_scope;
6347 if (0 == --local_scope && !nocode_wanted)
6348 check_func_return();
6349 next();
6351 } else if (t == TOK_RETURN) {
6352 a = tok != ';';
6353 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6354 if (a)
6355 gexpr(), gen_assign_cast(&func_vt);
6356 try_call_scope_cleanup(NULL);
6357 if (a && b)
6358 gfunc_return(&func_vt);
6359 else if (a)
6360 vtop--;
6361 else if (b)
6362 tcc_warning("'return' with no value.");
6363 skip(';');
6364 /* jump unless last stmt in top-level block */
6365 if (tok != '}' || local_scope != 1)
6366 rsym = gjmp(rsym);
6367 CODE_OFF();
6369 } else if (t == TOK_BREAK) {
6370 /* compute jump */
6371 if (!bsym)
6372 tcc_error("cannot break");
6373 try_call_scope_cleanup(bcl);
6374 *bsym = gjmp(*bsym);
6375 skip(';');
6377 } else if (t == TOK_CONTINUE) {
6378 /* compute jump */
6379 if (!csym)
6380 tcc_error("cannot continue");
6381 try_call_scope_cleanup(ccl);
6382 vla_sp_restore_root();
6383 *csym = gjmp(*csym);
6384 skip(';');
6386 } else if (t == TOK_FOR) {
6387 Sym *lcleanup = current_cleanups;
6388 int lncleanups = ncleanups;
6390 skip('(');
6391 s = local_stack;
6392 ++local_scope;
6393 if (tok != ';') {
6394 /* c99 for-loop init decl? */
6395 if (!decl0(VT_LOCAL, 1, NULL)) {
6396 /* no, regular for-loop init expr */
6397 gexpr();
6398 vpop();
6401 skip(';');
6402 a = b = 0;
6403 c = d = gind();
6404 vla_sp_restore();
6405 if (tok != ';') {
6406 gexpr();
6407 a = gvtst(1, 0);
6409 skip(';');
6410 if (tok != ')') {
6411 e = gjmp(0);
6412 d = gind();
6413 vla_sp_restore();
6414 gexpr();
6415 vpop();
6416 gjmp_addr(c);
6417 gsym(e);
6419 skip(')');
6420 block(&a, current_cleanups, &b, current_cleanups, 0);
6421 gjmp_addr(d);
6422 gsym_addr(b, d);
6423 gsym(a);
6424 --local_scope;
6425 try_call_scope_cleanup(lcleanup);
6426 ncleanups = lncleanups;
6427 current_cleanups = lcleanup;
6428 sym_pop(&local_stack, s, 0);
6430 } else if (t == TOK_DO) {
6431 a = b = 0;
6432 d = gind();
6433 vla_sp_restore();
6434 block(&a, current_cleanups, &b, current_cleanups, 0);
6435 gsym(b);
6436 skip(TOK_WHILE);
6437 skip('(');
6438 gexpr();
6439 skip(')');
6440 skip(';');
6441 c = gvtst(0, 0);
6442 gsym_addr(c, d);
6443 gsym(a);
6445 } else if (t == TOK_SWITCH) {
6446 struct switch_t *saved, sw;
6447 SValue switchval;
6449 skip('(');
6450 gexpr();
6451 skip(')');
6452 switchval = *vtop--;
6454 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6455 saved = cur_switch;
6456 cur_switch = &sw;
6457 a = 0;
6458 b = gjmp(0); /* jump to first case */
6459 block(&a, current_cleanups, csym, ccl, 0);
6460 a = gjmp(a); /* add implicit break */
6461 /* case lookup */
6462 gsym(b);
6464 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6465 for (b = 1; b < sw.n; b++)
6466 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6467 tcc_error("duplicate case value");
6468 /* Our switch table sorting is signed, so the compared
6469 value needs to be as well when it's 64bit. */
6470 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6471 switchval.type.t &= ~VT_UNSIGNED;
6472 vpushv(&switchval);
6473 gv(RC_INT);
6474 d = 0, gcase(sw.p, sw.n, &d);
6475 vpop();
6476 if (sw.def_sym)
6477 gsym_addr(d, sw.def_sym);
6478 else
6479 gsym(d);
6480 /* break label */
6481 gsym(a);
6482 dynarray_reset(&sw.p, &sw.n);
6483 cur_switch = saved;
6485 } else if (t == TOK_CASE) {
6486 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6487 if (!cur_switch)
6488 expect("switch");
6489 cr->v1 = cr->v2 = expr_const64();
6490 if (gnu_ext && tok == TOK_DOTS) {
6491 next();
6492 cr->v2 = expr_const64();
6493 if (cr->v2 < cr->v1)
6494 tcc_warning("empty case range");
6496 cr->sym = gind();
6497 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6498 skip(':');
6499 is_expr = 0;
6500 goto block_after_label;
6502 } else if (t == TOK_DEFAULT) {
6503 if (!cur_switch)
6504 expect("switch");
6505 if (cur_switch->def_sym)
6506 tcc_error("too many 'default'");
6507 cur_switch->def_sym = gind();
6508 skip(':');
6509 is_expr = 0;
6510 goto block_after_label;
6512 } else if (t == TOK_GOTO) {
6513 if (tok == '*' && gnu_ext) {
6514 /* computed goto */
6515 next();
6516 gexpr();
6517 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6518 expect("pointer");
6519 ggoto();
6520 } else if (tok >= TOK_UIDENT) {
6521 s = label_find(tok);
6522 /* put forward definition if needed */
6523 if (!s)
6524 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6525 else if (s->r == LABEL_DECLARED)
6526 s->r = LABEL_FORWARD;
6528 vla_sp_restore_root();
6529 if (s->r & LABEL_FORWARD) {
6530 /* start new goto chain for cleanups, linked via label->next */
6531 if (current_cleanups) {
6532 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6533 pending_gotos->prev_tok = s;
6534 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6535 pending_gotos->next = s;
6537 s->jnext = gjmp(s->jnext);
6538 } else {
6539 try_call_cleanup_goto(s->cleanupstate);
6540 gjmp_addr(s->jnext);
6542 next();
6544 } else {
6545 expect("label identifier");
6547 skip(';');
6549 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6550 asm_instr();
6552 } else {
6553 if (tok == ':' && t >= TOK_UIDENT) {
6554 /* label case */
6555 next();
6556 s = label_find(t);
6557 if (s) {
6558 if (s->r == LABEL_DEFINED)
6559 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6560 s->r = LABEL_DEFINED;
6561 if (s->next) {
6562 Sym *pcl; /* pending cleanup goto */
6563 for (pcl = s->next; pcl; pcl = pcl->prev)
6564 gsym(pcl->jnext);
6565 sym_pop(&s->next, NULL, 0);
6566 } else
6567 gsym(s->jnext);
6568 } else {
6569 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6571 s->jnext = gind();
6572 s->cleanupstate = current_cleanups;
6574 block_after_label:
6575 vla_sp_restore();
6576 /* we accept this, but it is a mistake */
6577 if (tok == '}') {
6578 tcc_warning("deprecated use of label at end of compound statement");
6579 } else {
6580 if (is_expr)
6581 vpop();
6582 block(bsym, bcl, csym, ccl, is_expr);
6585 } else {
6586 /* expression case */
6587 if (t != ';') {
6588 unget_tok(t);
6589 if (is_expr) {
6590 vpop();
6591 gexpr();
6592 } else {
6593 gexpr();
6594 vpop();
6596 skip(';');
6602 /* This skips over a stream of tokens containing balanced {} and ()
6603 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6604 with a '{'). If STR then allocates and stores the skipped tokens
6605 in *STR. This doesn't check if () and {} are nested correctly,
6606 i.e. "({)}" is accepted. */
6607 static void skip_or_save_block(TokenString **str)
6609 int braces = tok == '{';
6610 int level = 0;
6611 if (str)
6612 *str = tok_str_alloc();
6614 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6615 int t;
6616 if (tok == TOK_EOF) {
6617 if (str || level > 0)
6618 tcc_error("unexpected end of file");
6619 else
6620 break;
6622 if (str)
6623 tok_str_add_tok(*str);
6624 t = tok;
6625 next();
6626 if (t == '{' || t == '(') {
6627 level++;
6628 } else if (t == '}' || t == ')') {
6629 level--;
6630 if (level == 0 && braces && t == '}')
6631 break;
6634 if (str) {
6635 tok_str_add(*str, -1);
6636 tok_str_add(*str, 0);
6640 #define EXPR_CONST 1
6641 #define EXPR_ANY 2
6643 static void parse_init_elem(int expr_type)
6645 int saved_global_expr;
6646 switch(expr_type) {
6647 case EXPR_CONST:
6648 /* compound literals must be allocated globally in this case */
6649 saved_global_expr = global_expr;
6650 global_expr = 1;
6651 expr_const1();
6652 global_expr = saved_global_expr;
6653 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6654 (compound literals). */
6655 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6656 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6657 || vtop->sym->v < SYM_FIRST_ANOM))
6658 #ifdef TCC_TARGET_PE
6659 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6660 #endif
6662 tcc_error("initializer element is not constant");
6663 break;
6664 case EXPR_ANY:
6665 expr_eq();
6666 break;
6670 /* put zeros for variable based init */
6671 static void init_putz(Section *sec, unsigned long c, int size)
6673 if (sec) {
6674 /* nothing to do because globals are already set to zero */
6675 } else {
6676 vpush_global_sym(&func_old_type, TOK_memset);
6677 vseti(VT_LOCAL, c);
6678 #ifdef TCC_TARGET_ARM
6679 vpushs(size);
6680 vpushi(0);
6681 #else
6682 vpushi(0);
6683 vpushs(size);
6684 #endif
6685 gfunc_call(3);
6689 #define DIF_FIRST 1
6690 #define DIF_SIZE_ONLY 2
6691 #define DIF_HAVE_ELEM 4
6693 /* t is the array or struct type. c is the array or struct
6694 address. cur_field is the pointer to the current
6695 field, for arrays the 'c' member contains the current start
6696 index. 'flags' is as in decl_initializer.
6697 'al' contains the already initialized length of the
6698 current container (starting at c). This returns the new length of that. */
6699 static int decl_designator(CType *type, Section *sec, unsigned long c,
6700 Sym **cur_field, int flags, int al)
6702 Sym *s, *f;
6703 int index, index_last, align, l, nb_elems, elem_size;
6704 unsigned long corig = c;
6706 elem_size = 0;
6707 nb_elems = 1;
6709 if (flags & DIF_HAVE_ELEM)
6710 goto no_designator;
6712 if (gnu_ext && tok >= TOK_UIDENT) {
6713 l = tok, next();
6714 if (tok == ':')
6715 goto struct_field;
6716 unget_tok(l);
6719 /* NOTE: we only support ranges for last designator */
6720 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6721 if (tok == '[') {
6722 if (!(type->t & VT_ARRAY))
6723 expect("array type");
6724 next();
6725 index = index_last = expr_const();
6726 if (tok == TOK_DOTS && gnu_ext) {
6727 next();
6728 index_last = expr_const();
6730 skip(']');
6731 s = type->ref;
6732 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6733 index_last < index)
6734 tcc_error("invalid index");
6735 if (cur_field)
6736 (*cur_field)->c = index_last;
6737 type = pointed_type(type);
6738 elem_size = type_size(type, &align);
6739 c += index * elem_size;
6740 nb_elems = index_last - index + 1;
6741 } else {
6742 int cumofs = 0;
6743 next();
6744 l = tok;
6745 struct_field:
6746 next();
6747 if ((type->t & VT_BTYPE) != VT_STRUCT)
6748 expect("struct/union type");
6749 f = find_field(type, l, &cumofs);
6750 if (!f)
6751 expect("field");
6752 if (cur_field)
6753 *cur_field = f;
6754 type = &f->type;
6755 c += cumofs + f->c;
6757 cur_field = NULL;
6759 if (!cur_field) {
6760 if (tok == '=') {
6761 next();
6762 } else if (!gnu_ext) {
6763 expect("=");
6765 } else {
6766 no_designator:
6767 if (type->t & VT_ARRAY) {
6768 index = (*cur_field)->c;
6769 if (type->ref->c >= 0 && index >= type->ref->c)
6770 tcc_error("index too large");
6771 type = pointed_type(type);
6772 c += index * type_size(type, &align);
6773 } else {
6774 f = *cur_field;
6775 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6776 *cur_field = f = f->next;
6777 if (!f)
6778 tcc_error("too many field init");
6779 type = &f->type;
6780 c += f->c;
6783 /* must put zero in holes (note that doing it that way
6784 ensures that it even works with designators) */
6785 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6786 init_putz(sec, corig + al, c - corig - al);
6787 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6789 /* XXX: make it more general */
6790 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6791 unsigned long c_end;
6792 uint8_t *src, *dst;
6793 int i;
6795 if (!sec) {
6796 vset(type, VT_LOCAL|VT_LVAL, c);
6797 for (i = 1; i < nb_elems; i++) {
6798 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6799 vswap();
6800 vstore();
6802 vpop();
6803 } else if (!NODATA_WANTED) {
6804 c_end = c + nb_elems * elem_size;
6805 if (c_end > sec->data_allocated)
6806 section_realloc(sec, c_end);
6807 src = sec->data + c;
6808 dst = src;
6809 for(i = 1; i < nb_elems; i++) {
6810 dst += elem_size;
6811 memcpy(dst, src, elem_size);
6815 c += nb_elems * type_size(type, &align);
6816 if (c - corig > al)
6817 al = c - corig;
6818 return al;
6821 /* store a value or an expression directly in global data or in local array */
6822 static void init_putv(CType *type, Section *sec, unsigned long c)
6824 int bt;
6825 void *ptr;
6826 CType dtype;
6828 dtype = *type;
6829 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6831 if (sec) {
6832 int size, align;
6833 /* XXX: not portable */
6834 /* XXX: generate error if incorrect relocation */
6835 gen_assign_cast(&dtype);
6836 bt = type->t & VT_BTYPE;
6838 if ((vtop->r & VT_SYM)
6839 && bt != VT_PTR
6840 && bt != VT_FUNC
6841 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6842 || (type->t & VT_BITFIELD))
6843 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6845 tcc_error("initializer element is not computable at load time");
6847 if (NODATA_WANTED) {
6848 vtop--;
6849 return;
6852 size = type_size(type, &align);
6853 section_reserve(sec, c + size);
6854 ptr = sec->data + c;
6856 /* XXX: make code faster ? */
6857 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6858 vtop->sym->v >= SYM_FIRST_ANOM &&
6859 /* XXX This rejects compound literals like
6860 '(void *){ptr}'. The problem is that '&sym' is
6861 represented the same way, which would be ruled out
6862 by the SYM_FIRST_ANOM check above, but also '"string"'
6863 in 'char *p = "string"' is represented the same
6864 with the type being VT_PTR and the symbol being an
6865 anonymous one. That is, there's no difference in vtop
6866 between '(void *){x}' and '&(void *){x}'. Ignore
6867 pointer typed entities here. Hopefully no real code
6868 will every use compound literals with scalar type. */
6869 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6870 /* These come from compound literals, memcpy stuff over. */
6871 Section *ssec;
6872 ElfSym *esym;
6873 ElfW_Rel *rel;
6874 esym = elfsym(vtop->sym);
6875 ssec = tcc_state->sections[esym->st_shndx];
6876 memmove (ptr, ssec->data + esym->st_value, size);
6877 if (ssec->reloc) {
6878 /* We need to copy over all memory contents, and that
6879 includes relocations. Use the fact that relocs are
6880 created it order, so look from the end of relocs
6881 until we hit one before the copied region. */
6882 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6883 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6884 while (num_relocs--) {
6885 rel--;
6886 if (rel->r_offset >= esym->st_value + size)
6887 continue;
6888 if (rel->r_offset < esym->st_value)
6889 break;
6890 /* Note: if the same fields are initialized multiple
6891 times (possible with designators) then we possibly
6892 add multiple relocations for the same offset here.
6893 That would lead to wrong code, the last reloc needs
6894 to win. We clean this up later after the whole
6895 initializer is parsed. */
6896 put_elf_reloca(symtab_section, sec,
6897 c + rel->r_offset - esym->st_value,
6898 ELFW(R_TYPE)(rel->r_info),
6899 ELFW(R_SYM)(rel->r_info),
6900 #if PTR_SIZE == 8
6901 rel->r_addend
6902 #else
6904 #endif
6908 } else {
6909 if (type->t & VT_BITFIELD) {
6910 int bit_pos, bit_size, bits, n;
6911 unsigned char *p, v, m;
6912 bit_pos = BIT_POS(vtop->type.t);
6913 bit_size = BIT_SIZE(vtop->type.t);
6914 p = (unsigned char*)ptr + (bit_pos >> 3);
6915 bit_pos &= 7, bits = 0;
6916 while (bit_size) {
6917 n = 8 - bit_pos;
6918 if (n > bit_size)
6919 n = bit_size;
6920 v = vtop->c.i >> bits << bit_pos;
6921 m = ((1 << n) - 1) << bit_pos;
6922 *p = (*p & ~m) | (v & m);
6923 bits += n, bit_size -= n, bit_pos = 0, ++p;
6925 } else
6926 switch(bt) {
6927 /* XXX: when cross-compiling we assume that each type has the
6928 same representation on host and target, which is likely to
6929 be wrong in the case of long double */
6930 case VT_BOOL:
6931 vtop->c.i = vtop->c.i != 0;
6932 case VT_BYTE:
6933 *(char *)ptr |= vtop->c.i;
6934 break;
6935 case VT_SHORT:
6936 *(short *)ptr |= vtop->c.i;
6937 break;
6938 case VT_FLOAT:
6939 *(float*)ptr = vtop->c.f;
6940 break;
6941 case VT_DOUBLE:
6942 *(double *)ptr = vtop->c.d;
6943 break;
6944 case VT_LDOUBLE:
6945 #if defined TCC_IS_NATIVE_387
6946 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6947 memcpy(ptr, &vtop->c.ld, 10);
6948 #ifdef __TINYC__
6949 else if (sizeof (long double) == sizeof (double))
6950 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6951 #endif
6952 else if (vtop->c.ld == 0.0)
6954 else
6955 #endif
6956 if (sizeof(long double) == LDOUBLE_SIZE)
6957 *(long double*)ptr = vtop->c.ld;
6958 else if (sizeof(double) == LDOUBLE_SIZE)
6959 *(double *)ptr = (double)vtop->c.ld;
6960 else
6961 tcc_error("can't cross compile long double constants");
6962 break;
6963 #if PTR_SIZE != 8
6964 case VT_LLONG:
6965 *(long long *)ptr |= vtop->c.i;
6966 break;
6967 #else
6968 case VT_LLONG:
6969 #endif
6970 case VT_PTR:
6972 addr_t val = vtop->c.i;
6973 #if PTR_SIZE == 8
6974 if (vtop->r & VT_SYM)
6975 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6976 else
6977 *(addr_t *)ptr |= val;
6978 #else
6979 if (vtop->r & VT_SYM)
6980 greloc(sec, vtop->sym, c, R_DATA_PTR);
6981 *(addr_t *)ptr |= val;
6982 #endif
6983 break;
6985 default:
6987 int val = vtop->c.i;
6988 #if PTR_SIZE == 8
6989 if (vtop->r & VT_SYM)
6990 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6991 else
6992 *(int *)ptr |= val;
6993 #else
6994 if (vtop->r & VT_SYM)
6995 greloc(sec, vtop->sym, c, R_DATA_PTR);
6996 *(int *)ptr |= val;
6997 #endif
6998 break;
7002 vtop--;
7003 } else {
7004 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7005 vswap();
7006 vstore();
7007 vpop();
7011 /* 't' contains the type and storage info. 'c' is the offset of the
7012 object in section 'sec'. If 'sec' is NULL, it means stack based
7013 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7014 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7015 size only evaluation is wanted (only for arrays). */
7016 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7017 int flags)
7019 int len, n, no_oblock, nb, i;
7020 int size1, align1;
7021 Sym *s, *f;
7022 Sym indexsym;
7023 CType *t1;
7025 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7026 /* In case of strings we have special handling for arrays, so
7027 don't consume them as initializer value (which would commit them
7028 to some anonymous symbol). */
7029 tok != TOK_LSTR && tok != TOK_STR &&
7030 !(flags & DIF_SIZE_ONLY)) {
7031 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7032 flags |= DIF_HAVE_ELEM;
7035 if ((flags & DIF_HAVE_ELEM) &&
7036 !(type->t & VT_ARRAY) &&
7037 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7038 The source type might have VT_CONSTANT set, which is
7039 of course assignable to non-const elements. */
7040 is_compatible_unqualified_types(type, &vtop->type)) {
7041 init_putv(type, sec, c);
7042 } else if (type->t & VT_ARRAY) {
7043 s = type->ref;
7044 n = s->c;
7045 t1 = pointed_type(type);
7046 size1 = type_size(t1, &align1);
7048 no_oblock = 1;
7049 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7050 tok == '{') {
7051 if (tok != '{')
7052 tcc_error("character array initializer must be a literal,"
7053 " optionally enclosed in braces");
7054 skip('{');
7055 no_oblock = 0;
7058 /* only parse strings here if correct type (otherwise: handle
7059 them as ((w)char *) expressions */
7060 if ((tok == TOK_LSTR &&
7061 #ifdef TCC_TARGET_PE
7062 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7063 #else
7064 (t1->t & VT_BTYPE) == VT_INT
7065 #endif
7066 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7067 len = 0;
7068 while (tok == TOK_STR || tok == TOK_LSTR) {
7069 int cstr_len, ch;
7071 /* compute maximum number of chars wanted */
7072 if (tok == TOK_STR)
7073 cstr_len = tokc.str.size;
7074 else
7075 cstr_len = tokc.str.size / sizeof(nwchar_t);
7076 cstr_len--;
7077 nb = cstr_len;
7078 if (n >= 0 && nb > (n - len))
7079 nb = n - len;
7080 if (!(flags & DIF_SIZE_ONLY)) {
7081 if (cstr_len > nb)
7082 tcc_warning("initializer-string for array is too long");
7083 /* in order to go faster for common case (char
7084 string in global variable, we handle it
7085 specifically */
7086 if (sec && tok == TOK_STR && size1 == 1) {
7087 if (!NODATA_WANTED)
7088 memcpy(sec->data + c + len, tokc.str.data, nb);
7089 } else {
7090 for(i=0;i<nb;i++) {
7091 if (tok == TOK_STR)
7092 ch = ((unsigned char *)tokc.str.data)[i];
7093 else
7094 ch = ((nwchar_t *)tokc.str.data)[i];
7095 vpushi(ch);
7096 init_putv(t1, sec, c + (len + i) * size1);
7100 len += nb;
7101 next();
7103 /* only add trailing zero if enough storage (no
7104 warning in this case since it is standard) */
7105 if (n < 0 || len < n) {
7106 if (!(flags & DIF_SIZE_ONLY)) {
7107 vpushi(0);
7108 init_putv(t1, sec, c + (len * size1));
7110 len++;
7112 len *= size1;
7113 } else {
7114 indexsym.c = 0;
7115 f = &indexsym;
7117 do_init_list:
7118 len = 0;
7119 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7120 len = decl_designator(type, sec, c, &f, flags, len);
7121 flags &= ~DIF_HAVE_ELEM;
7122 if (type->t & VT_ARRAY) {
7123 ++indexsym.c;
7124 /* special test for multi dimensional arrays (may not
7125 be strictly correct if designators are used at the
7126 same time) */
7127 if (no_oblock && len >= n*size1)
7128 break;
7129 } else {
7130 if (s->type.t == VT_UNION)
7131 f = NULL;
7132 else
7133 f = f->next;
7134 if (no_oblock && f == NULL)
7135 break;
7138 if (tok == '}')
7139 break;
7140 skip(',');
7143 /* put zeros at the end */
7144 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7145 init_putz(sec, c + len, n*size1 - len);
7146 if (!no_oblock)
7147 skip('}');
7148 /* patch type size if needed, which happens only for array types */
7149 if (n < 0)
7150 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7151 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7152 size1 = 1;
7153 no_oblock = 1;
7154 if ((flags & DIF_FIRST) || tok == '{') {
7155 skip('{');
7156 no_oblock = 0;
7158 s = type->ref;
7159 f = s->next;
7160 n = s->c;
7161 goto do_init_list;
7162 } else if (tok == '{') {
7163 if (flags & DIF_HAVE_ELEM)
7164 skip(';');
7165 next();
7166 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7167 skip('}');
7168 } else if ((flags & DIF_SIZE_ONLY)) {
7169 /* If we supported only ISO C we wouldn't have to accept calling
7170 this on anything than an array if DIF_SIZE_ONLY (and even then
7171 only on the outermost level, so no recursion would be needed),
7172 because initializing a flex array member isn't supported.
7173 But GNU C supports it, so we need to recurse even into
7174 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7175 /* just skip expression */
7176 skip_or_save_block(NULL);
7177 } else {
7178 if (!(flags & DIF_HAVE_ELEM)) {
7179 /* This should happen only when we haven't parsed
7180 the init element above for fear of committing a
7181 string constant to memory too early. */
7182 if (tok != TOK_STR && tok != TOK_LSTR)
7183 expect("string constant");
7184 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7186 init_putv(type, sec, c);
7190 /* parse an initializer for type 't' if 'has_init' is non zero, and
7191 allocate space in local or global data space ('r' is either
7192 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7193 variable 'v' of scope 'scope' is declared before initializers
7194 are parsed. If 'v' is zero, then a reference to the new object
7195 is put in the value stack. If 'has_init' is 2, a special parsing
7196 is done to handle string constants. */
7197 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7198 int has_init, int v, int scope)
7200 int size, align, addr;
7201 TokenString *init_str = NULL;
7203 Section *sec;
7204 Sym *flexible_array;
7205 Sym *sym = NULL;
7206 int saved_nocode_wanted = nocode_wanted;
7207 #ifdef CONFIG_TCC_BCHECK
7208 int bcheck;
7209 #endif
7211 /* Always allocate static or global variables */
7212 if (v && (r & VT_VALMASK) == VT_CONST)
7213 nocode_wanted |= 0x80000000;
7215 #ifdef CONFIG_TCC_BCHECK
7216 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7217 #endif
7219 flexible_array = NULL;
7220 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7221 Sym *field = type->ref->next;
7222 if (field) {
7223 while (field->next)
7224 field = field->next;
7225 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7226 flexible_array = field;
7230 size = type_size(type, &align);
7231 /* If unknown size, we must evaluate it before
7232 evaluating initializers because
7233 initializers can generate global data too
7234 (e.g. string pointers or ISOC99 compound
7235 literals). It also simplifies local
7236 initializers handling */
7237 if (size < 0 || (flexible_array && has_init)) {
7238 if (!has_init)
7239 tcc_error("unknown type size");
7240 /* get all init string */
7241 if (has_init == 2) {
7242 init_str = tok_str_alloc();
7243 /* only get strings */
7244 while (tok == TOK_STR || tok == TOK_LSTR) {
7245 tok_str_add_tok(init_str);
7246 next();
7248 tok_str_add(init_str, -1);
7249 tok_str_add(init_str, 0);
7250 } else {
7251 skip_or_save_block(&init_str);
7253 unget_tok(0);
7255 /* compute size */
7256 begin_macro(init_str, 1);
7257 next();
7258 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7259 /* prepare second initializer parsing */
7260 macro_ptr = init_str->str;
7261 next();
7263 /* if still unknown size, error */
7264 size = type_size(type, &align);
7265 if (size < 0)
7266 tcc_error("unknown type size");
7268 /* If there's a flex member and it was used in the initializer
7269 adjust size. */
7270 if (flexible_array &&
7271 flexible_array->type.ref->c > 0)
7272 size += flexible_array->type.ref->c
7273 * pointed_size(&flexible_array->type);
7274 /* take into account specified alignment if bigger */
7275 if (ad->a.aligned) {
7276 int speca = 1 << (ad->a.aligned - 1);
7277 if (speca > align)
7278 align = speca;
7279 } else if (ad->a.packed) {
7280 align = 1;
7283 if (!v && NODATA_WANTED)
7284 size = 0, align = 1;
7286 if ((r & VT_VALMASK) == VT_LOCAL) {
7287 sec = NULL;
7288 #ifdef CONFIG_TCC_BCHECK
7289 if (bcheck && (type->t & VT_ARRAY)) {
7290 loc--;
7292 #endif
7293 loc = (loc - size) & -align;
7294 addr = loc;
7295 #ifdef CONFIG_TCC_BCHECK
7296 /* handles bounds */
7297 /* XXX: currently, since we do only one pass, we cannot track
7298 '&' operators, so we add only arrays */
7299 if (bcheck && (type->t & VT_ARRAY)) {
7300 addr_t *bounds_ptr;
7301 /* add padding between regions */
7302 loc--;
7303 /* then add local bound info */
7304 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7305 bounds_ptr[0] = addr;
7306 bounds_ptr[1] = size;
7308 #endif
7309 if (v) {
7310 /* local variable */
7311 #ifdef CONFIG_TCC_ASM
7312 if (ad->asm_label) {
7313 int reg = asm_parse_regvar(ad->asm_label);
7314 if (reg >= 0)
7315 r = (r & ~VT_VALMASK) | reg;
7317 #endif
7318 sym = sym_push(v, type, r, addr);
7319 if (ad->cleanup_func) {
7320 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7321 cls->prev_tok = sym;
7322 cls->next = ad->cleanup_func;
7323 cls->ncl = current_cleanups;
7324 current_cleanups = cls;
7327 sym->a = ad->a;
7328 } else {
7329 /* push local reference */
7330 vset(type, r, addr);
7332 } else {
7333 if (v && scope == VT_CONST) {
7334 /* see if the symbol was already defined */
7335 sym = sym_find(v);
7336 if (sym) {
7337 patch_storage(sym, ad, type);
7338 /* we accept several definitions of the same global variable. */
7339 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7340 goto no_alloc;
7344 /* allocate symbol in corresponding section */
7345 sec = ad->section;
7346 if (!sec) {
7347 if (has_init)
7348 sec = data_section;
7349 else if (tcc_state->nocommon)
7350 sec = bss_section;
7353 if (sec) {
7354 addr = section_add(sec, size, align);
7355 #ifdef CONFIG_TCC_BCHECK
7356 /* add padding if bound check */
7357 if (bcheck)
7358 section_add(sec, 1, 1);
7359 #endif
7360 } else {
7361 addr = align; /* SHN_COMMON is special, symbol value is align */
7362 sec = common_section;
7365 if (v) {
7366 if (!sym) {
7367 sym = sym_push(v, type, r | VT_SYM, 0);
7368 patch_storage(sym, ad, NULL);
7370 /* update symbol definition */
7371 put_extern_sym(sym, sec, addr, size);
7372 } else {
7373 /* push global reference */
7374 vpush_ref(type, sec, addr, size);
7375 sym = vtop->sym;
7376 vtop->r |= r;
7379 #ifdef CONFIG_TCC_BCHECK
7380 /* handles bounds now because the symbol must be defined
7381 before for the relocation */
7382 if (bcheck) {
7383 addr_t *bounds_ptr;
7385 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7386 /* then add global bound info */
7387 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7388 bounds_ptr[0] = 0; /* relocated */
7389 bounds_ptr[1] = size;
7391 #endif
7394 if (type->t & VT_VLA) {
7395 int a;
7397 if (NODATA_WANTED)
7398 goto no_alloc;
7400 /* save current stack pointer */
7401 if (vlas_in_scope == 0) {
7402 if (vla_sp_root_loc == -1)
7403 vla_sp_root_loc = (loc -= PTR_SIZE);
7404 gen_vla_sp_save(vla_sp_root_loc);
7407 vla_runtime_type_size(type, &a);
7408 gen_vla_alloc(type, a);
7409 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7410 /* on _WIN64, because of the function args scratch area, the
7411 result of alloca differs from RSP and is returned in RAX. */
7412 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7413 #endif
7414 gen_vla_sp_save(addr);
7415 vla_sp_loc = addr;
7416 vlas_in_scope++;
7418 } else if (has_init) {
7419 size_t oldreloc_offset = 0;
7420 if (sec && sec->reloc)
7421 oldreloc_offset = sec->reloc->data_offset;
7422 decl_initializer(type, sec, addr, DIF_FIRST);
7423 if (sec && sec->reloc)
7424 squeeze_multi_relocs(sec, oldreloc_offset);
7425 /* patch flexible array member size back to -1, */
7426 /* for possible subsequent similar declarations */
7427 if (flexible_array)
7428 flexible_array->type.ref->c = -1;
7431 no_alloc:
7432 /* restore parse state if needed */
7433 if (init_str) {
7434 end_macro();
7435 next();
7438 nocode_wanted = saved_nocode_wanted;
7441 /* parse a function defined by symbol 'sym' and generate its code in
7442 'cur_text_section' */
7443 static void gen_function(Sym *sym)
7445 nocode_wanted = 0;
7446 ind = cur_text_section->data_offset;
7447 if (sym->a.aligned) {
7448 size_t newoff = section_add(cur_text_section, 0,
7449 1 << (sym->a.aligned - 1));
7450 gen_fill_nops(newoff - ind);
7452 /* NOTE: we patch the symbol size later */
7453 put_extern_sym(sym, cur_text_section, ind, 0);
7454 funcname = get_tok_str(sym->v, NULL);
7455 func_ind = ind;
7456 /* Initialize VLA state */
7457 vla_sp_loc = -1;
7458 vla_sp_root_loc = -1;
7459 /* put debug symbol */
7460 tcc_debug_funcstart(tcc_state, sym);
7461 /* push a dummy symbol to enable local sym storage */
7462 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7463 local_scope = 1; /* for function parameters */
7464 gfunc_prolog(&sym->type);
7465 reset_local_scope();
7466 rsym = 0;
7467 clear_temp_local_var_list();
7468 block(NULL, NULL, NULL, NULL, 0);
7469 gsym(rsym);
7470 nocode_wanted = 0;
7471 gfunc_epilog();
7472 cur_text_section->data_offset = ind;
7473 label_pop(&global_label_stack, NULL, 0);
7474 /* reset local stack */
7475 reset_local_scope();
7476 sym_pop(&local_stack, NULL, 0);
7477 /* end of function */
7478 /* patch symbol size */
7479 elfsym(sym)->st_size = ind - func_ind;
7480 tcc_debug_funcend(tcc_state, ind - func_ind);
7481 /* It's better to crash than to generate wrong code */
7482 cur_text_section = NULL;
7483 funcname = ""; /* for safety */
7484 func_vt.t = VT_VOID; /* for safety */
7485 func_var = 0; /* for safety */
7486 ind = 0; /* for safety */
7487 nocode_wanted = 0x80000000;
7488 check_vstack();
7491 static void gen_inline_functions(TCCState *s)
7493 Sym *sym;
7494 int inline_generated, i, ln;
7495 struct InlineFunc *fn;
7497 ln = file->line_num;
7498 /* iterate while inline function are referenced */
7499 do {
7500 inline_generated = 0;
7501 for (i = 0; i < s->nb_inline_fns; ++i) {
7502 fn = s->inline_fns[i];
7503 sym = fn->sym;
7504 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7505 /* the function was used or forced (and then not internal):
7506 generate its code and convert it to a normal function */
7507 fn->sym = NULL;
7508 if (file)
7509 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7510 begin_macro(fn->func_str, 1);
7511 next();
7512 cur_text_section = text_section;
7513 gen_function(sym);
7514 end_macro();
7516 inline_generated = 1;
7519 } while (inline_generated);
7520 file->line_num = ln;
7523 ST_FUNC void free_inline_functions(TCCState *s)
7525 int i;
7526 /* free tokens of unused inline functions */
7527 for (i = 0; i < s->nb_inline_fns; ++i) {
7528 struct InlineFunc *fn = s->inline_fns[i];
7529 if (fn->sym)
7530 tok_str_free(fn->func_str);
7532 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7535 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7536 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7537 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7539 int v, has_init, r;
7540 CType type, btype;
7541 Sym *sym;
7542 AttributeDef ad, adbase;
7544 while (1) {
7545 if (tok == TOK_STATIC_ASSERT) {
7546 int c;
7548 next();
7549 skip('(');
7550 c = expr_const();
7551 skip(',');
7552 if (c == 0)
7553 tcc_error("%s", get_tok_str(tok, &tokc));
7554 next();
7555 skip(')');
7556 skip(';');
7557 continue;
7559 if (!parse_btype(&btype, &adbase)) {
7560 if (is_for_loop_init)
7561 return 0;
7562 /* skip redundant ';' if not in old parameter decl scope */
7563 if (tok == ';' && l != VT_CMP) {
7564 next();
7565 continue;
7567 if (l != VT_CONST)
7568 break;
7569 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7570 /* global asm block */
7571 asm_global_instr();
7572 continue;
7574 if (tok >= TOK_UIDENT) {
7575 /* special test for old K&R protos without explicit int
7576 type. Only accepted when defining global data */
7577 btype.t = VT_INT;
7578 } else {
7579 if (tok != TOK_EOF)
7580 expect("declaration");
7581 break;
7584 if (tok == ';') {
7585 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7586 int v = btype.ref->v;
7587 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7588 tcc_warning("unnamed struct/union that defines no instances");
7589 next();
7590 continue;
7592 if (IS_ENUM(btype.t)) {
7593 next();
7594 continue;
7597 while (1) { /* iterate thru each declaration */
7598 type = btype;
7599 /* If the base type itself was an array type of unspecified
7600 size (like in 'typedef int arr[]; arr x = {1};') then
7601 we will overwrite the unknown size by the real one for
7602 this decl. We need to unshare the ref symbol holding
7603 that size. */
7604 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7605 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7607 ad = adbase;
7608 type_decl(&type, &ad, &v, TYPE_DIRECT);
7609 #if 0
7611 char buf[500];
7612 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7613 printf("type = '%s'\n", buf);
7615 #endif
7616 if ((type.t & VT_BTYPE) == VT_FUNC) {
7617 /* if old style function prototype, we accept a
7618 declaration list */
7619 sym = type.ref;
7620 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7621 decl0(VT_CMP, 0, sym);
7622 /* always compile 'extern inline' */
7623 if (type.t & VT_EXTERN)
7624 type.t &= ~VT_INLINE;
7627 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7628 ad.asm_label = asm_label_instr();
7629 /* parse one last attribute list, after asm label */
7630 parse_attribute(&ad);
7631 #if 0
7632 /* gcc does not allow __asm__("label") with function definition,
7633 but why not ... */
7634 if (tok == '{')
7635 expect(";");
7636 #endif
7639 #ifdef TCC_TARGET_PE
7640 if (ad.a.dllimport || ad.a.dllexport) {
7641 if (type.t & (VT_STATIC|VT_TYPEDEF))
7642 tcc_error("cannot have dll linkage with static or typedef");
7643 if (ad.a.dllimport) {
7644 if ((type.t & VT_BTYPE) == VT_FUNC)
7645 ad.a.dllimport = 0;
7646 else
7647 type.t |= VT_EXTERN;
7650 #endif
7651 if (tok == '{') {
7652 if (l != VT_CONST)
7653 tcc_error("cannot use local functions");
7654 if ((type.t & VT_BTYPE) != VT_FUNC)
7655 expect("function definition");
7657 /* reject abstract declarators in function definition
7658 make old style params without decl have int type */
7659 sym = type.ref;
7660 while ((sym = sym->next) != NULL) {
7661 if (!(sym->v & ~SYM_FIELD))
7662 expect("identifier");
7663 if (sym->type.t == VT_VOID)
7664 sym->type = int_type;
7667 /* put function symbol */
7668 type.t &= ~VT_EXTERN;
7669 sym = external_sym(v, &type, 0, &ad);
7670 /* static inline functions are just recorded as a kind
7671 of macro. Their code will be emitted at the end of
7672 the compilation unit only if they are used */
7673 if (sym->type.t & VT_INLINE) {
7674 struct InlineFunc *fn;
7675 const char *filename;
7677 filename = file ? file->filename : "";
7678 fn = tcc_malloc(sizeof *fn + strlen(filename));
7679 strcpy(fn->filename, filename);
7680 fn->sym = sym;
7681 skip_or_save_block(&fn->func_str);
7682 dynarray_add(&tcc_state->inline_fns,
7683 &tcc_state->nb_inline_fns, fn);
7684 } else {
7685 /* compute text section */
7686 cur_text_section = ad.section;
7687 if (!cur_text_section)
7688 cur_text_section = text_section;
7689 gen_function(sym);
7691 break;
7692 } else {
7693 if (l == VT_CMP) {
7694 /* find parameter in function parameter list */
7695 for (sym = func_sym->next; sym; sym = sym->next)
7696 if ((sym->v & ~SYM_FIELD) == v)
7697 goto found;
7698 tcc_error("declaration for parameter '%s' but no such parameter",
7699 get_tok_str(v, NULL));
7700 found:
7701 if (type.t & VT_STORAGE) /* 'register' is okay */
7702 tcc_error("storage class specified for '%s'",
7703 get_tok_str(v, NULL));
7704 if (sym->type.t != VT_VOID)
7705 tcc_error("redefinition of parameter '%s'",
7706 get_tok_str(v, NULL));
7707 convert_parameter_type(&type);
7708 sym->type = type;
7709 } else if (type.t & VT_TYPEDEF) {
7710 /* save typedefed type */
7711 /* XXX: test storage specifiers ? */
7712 sym = sym_find(v);
7713 if (sym && sym->sym_scope == local_scope) {
7714 if (!is_compatible_types(&sym->type, &type)
7715 || !(sym->type.t & VT_TYPEDEF))
7716 tcc_error("incompatible redefinition of '%s'",
7717 get_tok_str(v, NULL));
7718 sym->type = type;
7719 } else {
7720 sym = sym_push(v, &type, 0, 0);
7722 sym->a = ad.a;
7723 sym->f = ad.f;
7724 } else if ((type.t & VT_BTYPE) == VT_VOID
7725 && !(type.t & VT_EXTERN)) {
7726 tcc_error("declaration of void object");
7727 } else {
7728 r = 0;
7729 if ((type.t & VT_BTYPE) == VT_FUNC) {
7730 /* external function definition */
7731 /* specific case for func_call attribute */
7732 type.ref->f = ad.f;
7733 } else if (!(type.t & VT_ARRAY)) {
7734 /* not lvalue if array */
7735 r |= lvalue_type(type.t);
7737 has_init = (tok == '=');
7738 if (has_init && (type.t & VT_VLA))
7739 tcc_error("variable length array cannot be initialized");
7740 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7741 || (type.t & VT_BTYPE) == VT_FUNC
7742 /* as with GCC, uninitialized global arrays with no size
7743 are considered extern: */
7744 || ((type.t & VT_ARRAY) && !has_init
7745 && l == VT_CONST && type.ref->c < 0)
7747 /* external variable or function */
7748 type.t |= VT_EXTERN;
7749 sym = external_sym(v, &type, r, &ad);
7750 if (ad.alias_target) {
7751 ElfSym *esym;
7752 Sym *alias_target;
7753 alias_target = sym_find(ad.alias_target);
7754 esym = elfsym(alias_target);
7755 if (!esym)
7756 tcc_error("unsupported forward __alias__ attribute");
7757 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7759 } else {
7760 if (type.t & VT_STATIC)
7761 r |= VT_CONST;
7762 else
7763 r |= l;
7764 if (has_init)
7765 next();
7766 else if (l == VT_CONST)
7767 /* uninitialized global variables may be overridden */
7768 type.t |= VT_EXTERN;
7769 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7772 if (tok != ',') {
7773 if (is_for_loop_init)
7774 return 1;
7775 skip(';');
7776 break;
7778 next();
7782 return 0;
7785 static void decl(int l)
7787 decl0(l, 0, NULL);
7790 /* ------------------------------------------------------------------------- */
7791 #undef gjmp_addr
7792 #undef gjmp
7793 /* ------------------------------------------------------------------------- */