configure: conftest win32: don't translate LF to CRLF
[tinycc.git] / tccgen.c
blobb3123df7ef34eeab36738b8e48f8b7bad9f33bb4
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *pending_gotos;
43 static int local_scope;
44 static int in_sizeof;
45 static int in_generic;
46 static int section_sym;
48 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
50 ST_DATA int const_wanted; /* true if constant wanted */
51 ST_DATA int nocode_wanted; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
65 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
69 #define gjmp gjmp_acs
70 /* <---- */
72 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
74 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
75 ST_DATA int func_vc;
76 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
77 ST_DATA const char *funcname;
78 ST_DATA int g_debug;
80 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
82 ST_DATA struct switch_t {
83 struct case_t {
84 int64_t v1, v2;
85 int sym;
86 } **p; int n; /* list of case ranges */
87 int def_sym; /* default symbol */
88 int *bsym;
89 struct scope *scope;
90 } *cur_switch; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA struct temp_local_variable {
95 int location; //offset on stack. Svalue.c.i
96 short size;
97 short align;
98 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
99 short nb_temp_local_vars;
101 static struct scope {
102 struct scope *prev;
103 struct { int loc, num; } vla;
104 struct { Sym *s; int n; } cl;
105 int *bsym, *csym;
106 Sym *lstk, *llstk;
107 } *cur_scope, *loop_scope, *root_scope;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType *type);
112 static void gen_cast_s(int t);
113 static inline CType *pointed_type(CType *type);
114 static int is_compatible_types(CType *type1, CType *type2);
115 static int parse_btype(CType *type, AttributeDef *ad);
116 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
117 static void parse_expr_type(CType *type);
118 static void init_putv(CType *type, Section *sec, unsigned long c);
119 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
120 static void block(int is_expr);
121 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
122 static void decl(int l);
123 static int decl0(int l, int is_for_loop_init, Sym *);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType *type, int *a);
126 static int is_compatible_unqualified_types(CType *type1, CType *type2);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty, unsigned long long v);
129 static void vpush(CType *type);
130 static int gvtst(int inv, int t);
131 static void gen_inline_functions(TCCState *s);
132 static void skip_or_save_block(TokenString **str);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size,int align);
135 static void clear_temp_local_var_list();
137 ST_INLN int is_float(int t)
139 int bt;
140 bt = t & VT_BTYPE;
141 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC int ieee_finite(double d)
149 int p[4];
150 memcpy(p, &d, sizeof(double));
151 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
158 #endif
160 ST_FUNC void test_lvalue(void)
162 if (!(vtop->r & VT_LVAL))
163 expect("lvalue");
166 ST_FUNC void check_vstack(void)
168 if (pvtop != vtop)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
175 #if 0
176 void pv (const char *lbl, int a, int b)
178 int i;
179 for (i = a; i < a + b; ++i) {
180 SValue *p = &vtop[-i];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
185 #endif
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC void tcc_debug_start(TCCState *s1)
191 if (s1->do_debug) {
192 char buf[512];
194 /* file info: full path + filename */
195 section_sym = put_elf_sym(symtab_section, 0, 0,
196 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
197 text_section->sh_num, NULL);
198 getcwd(buf, sizeof(buf));
199 #ifdef _WIN32
200 normalize_slashes(buf);
201 #endif
202 pstrcat(buf, sizeof(buf), "/");
203 put_stabs_r(buf, N_SO, 0, 0,
204 text_section->data_offset, text_section, section_sym);
205 put_stabs_r(file->filename, N_SO, 0, 0,
206 text_section->data_offset, text_section, section_sym);
207 last_ind = 0;
208 last_line_num = 0;
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section, 0, 0,
214 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
215 SHN_ABS, file->filename);
218 /* put end of translation unit info */
219 ST_FUNC void tcc_debug_end(TCCState *s1)
221 if (!s1->do_debug)
222 return;
223 put_stabs_r(NULL, N_SO, 0, 0,
224 text_section->data_offset, text_section, section_sym);
228 /* generate line number info */
229 ST_FUNC void tcc_debug_line(TCCState *s1)
231 if (!s1->do_debug)
232 return;
233 if ((last_line_num != file->line_num || last_ind != ind)) {
234 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
235 last_ind = ind;
236 last_line_num = file->line_num;
240 /* put function symbol */
241 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
243 char buf[512];
245 if (!s1->do_debug)
246 return;
248 /* stabs info */
249 /* XXX: we put here a dummy type */
250 snprintf(buf, sizeof(buf), "%s:%c1",
251 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
252 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
253 cur_text_section, sym->c);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE, 0, file->line_num, 0);
257 last_ind = 0;
258 last_line_num = 0;
261 /* put function size */
262 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
264 if (!s1->do_debug)
265 return;
266 put_stabn(N_FUN, 0, 0, size);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC int tccgen_compile(TCCState *s1)
272 cur_text_section = NULL;
273 funcname = "";
274 anon_sym = SYM_FIRST_ANOM;
275 section_sym = 0;
276 const_wanted = 0;
277 nocode_wanted = 0x80000000;
278 local_scope = 0;
280 /* define some often used types */
281 int_type.t = VT_INT;
282 char_pointer_type.t = VT_BYTE;
283 mk_pointer(&char_pointer_type);
284 #if PTR_SIZE == 4
285 size_type.t = VT_INT | VT_UNSIGNED;
286 ptrdiff_type.t = VT_INT;
287 #elif LONG_SIZE == 4
288 size_type.t = VT_LLONG | VT_UNSIGNED;
289 ptrdiff_type.t = VT_LLONG;
290 #else
291 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
292 ptrdiff_type.t = VT_LONG | VT_LLONG;
293 #endif
294 func_old_type.t = VT_FUNC;
295 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
296 func_old_type.ref->f.func_call = FUNC_CDECL;
297 func_old_type.ref->f.func_type = FUNC_OLD;
299 tcc_debug_start(s1);
301 #ifdef TCC_TARGET_ARM
302 arm_init(s1);
303 #endif
305 #ifdef INC_DEBUG
306 printf("%s: **** new file\n", file->filename);
307 #endif
309 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
310 next();
311 decl(VT_CONST);
312 gen_inline_functions(s1);
313 check_vstack();
314 /* end of translation unit info */
315 tcc_debug_end(s1);
316 return 0;
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym *elfsym(Sym *s)
322 if (!s || !s->c)
323 return NULL;
324 return &((ElfSym *)symtab_section->data)[s->c];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC void update_storage(Sym *sym)
330 ElfSym *esym;
331 int sym_bind, old_sym_bind;
333 esym = elfsym(sym);
334 if (!esym)
335 return;
337 if (sym->a.visibility)
338 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
339 | sym->a.visibility;
341 if (sym->type.t & (VT_STATIC | VT_INLINE))
342 sym_bind = STB_LOCAL;
343 else if (sym->a.weak)
344 sym_bind = STB_WEAK;
345 else
346 sym_bind = STB_GLOBAL;
347 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
348 if (sym_bind != old_sym_bind) {
349 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
352 #ifdef TCC_TARGET_PE
353 if (sym->a.dllimport)
354 esym->st_other |= ST_PE_IMPORT;
355 if (sym->a.dllexport)
356 esym->st_other |= ST_PE_EXPORT;
357 #endif
359 #if 0
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym->v, NULL),
362 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
363 sym->a.visibility,
364 sym->a.dllexport,
365 sym->a.dllimport
367 #endif
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
375 addr_t value, unsigned long size,
376 int can_add_underscore)
378 int sym_type, sym_bind, info, other, t;
379 ElfSym *esym;
380 const char *name;
381 char buf1[256];
382 #ifdef CONFIG_TCC_BCHECK
383 char buf[32];
384 #endif
386 if (!sym->c) {
387 name = get_tok_str(sym->v, NULL);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state->do_bounds_check) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
393 switch(sym->v) {
394 #ifdef TCC_TARGET_PE
395 /* XXX: we rely only on malloc hooks */
396 case TOK_malloc:
397 case TOK_free:
398 case TOK_realloc:
399 case TOK_memalign:
400 case TOK_calloc:
401 #endif
402 case TOK_memcpy:
403 case TOK_memmove:
404 case TOK_memset:
405 case TOK_strlen:
406 case TOK_strcpy:
407 case TOK_alloca:
408 strcpy(buf, "__bound_");
409 strcat(buf, name);
410 name = buf;
411 break;
414 #endif
415 t = sym->type.t;
416 if ((t & VT_BTYPE) == VT_FUNC) {
417 sym_type = STT_FUNC;
418 } else if ((t & VT_BTYPE) == VT_VOID) {
419 sym_type = STT_NOTYPE;
420 } else {
421 sym_type = STT_OBJECT;
423 if (t & (VT_STATIC | VT_INLINE))
424 sym_bind = STB_LOCAL;
425 else
426 sym_bind = STB_GLOBAL;
427 other = 0;
428 #ifdef TCC_TARGET_PE
429 if (sym_type == STT_FUNC && sym->type.ref) {
430 Sym *ref = sym->type.ref;
431 if (ref->a.nodecorate) {
432 can_add_underscore = 0;
434 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
435 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
436 name = buf1;
437 other |= ST_PE_STDCALL;
438 can_add_underscore = 0;
441 #endif
442 if (tcc_state->leading_underscore && can_add_underscore) {
443 buf1[0] = '_';
444 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
445 name = buf1;
447 if (sym->asm_label)
448 name = get_tok_str(sym->asm_label, NULL);
449 info = ELFW(ST_INFO)(sym_bind, sym_type);
450 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
451 } else {
452 esym = elfsym(sym);
453 esym->st_value = value;
454 esym->st_size = size;
455 esym->st_shndx = sh_num;
457 update_storage(sym);
460 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
461 addr_t value, unsigned long size)
463 int sh_num = section ? section->sh_num : SHN_UNDEF;
464 put_extern_sym2(sym, sh_num, value, size, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
469 addr_t addend)
471 int c = 0;
473 if (nocode_wanted && s == cur_text_section)
474 return;
476 if (sym) {
477 if (0 == sym->c)
478 put_extern_sym(sym, NULL, 0, 0);
479 c = sym->c;
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section, s, offset, type, c, addend);
486 #if PTR_SIZE == 4
487 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
489 greloca(s, sym, offset, type, 0);
491 #endif
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym *__sym_malloc(void)
497 Sym *sym_pool, *sym, *last_sym;
498 int i;
500 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
501 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
503 last_sym = sym_free_first;
504 sym = sym_pool;
505 for(i = 0; i < SYM_POOL_NB; i++) {
506 sym->next = last_sym;
507 last_sym = sym;
508 sym++;
510 sym_free_first = last_sym;
511 return last_sym;
514 static inline Sym *sym_malloc(void)
516 Sym *sym;
517 #ifndef SYM_DEBUG
518 sym = sym_free_first;
519 if (!sym)
520 sym = __sym_malloc();
521 sym_free_first = sym->next;
522 return sym;
523 #else
524 sym = tcc_malloc(sizeof(Sym));
525 return sym;
526 #endif
529 ST_INLN void sym_free(Sym *sym)
531 #ifndef SYM_DEBUG
532 sym->next = sym_free_first;
533 sym_free_first = sym;
534 #else
535 tcc_free(sym);
536 #endif
539 /* push, without hashing */
540 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
542 Sym *s;
544 s = sym_malloc();
545 memset(s, 0, sizeof *s);
546 s->v = v;
547 s->type.t = t;
548 s->c = c;
549 /* add in stack */
550 s->prev = *ps;
551 *ps = s;
552 return s;
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym *sym_find2(Sym *s, int v)
559 while (s) {
560 if (s->v == v)
561 return s;
562 else if (s->v == -1)
563 return NULL;
564 s = s->prev;
566 return NULL;
569 /* structure lookup */
570 ST_INLN Sym *struct_find(int v)
572 v -= TOK_IDENT;
573 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
574 return NULL;
575 return table_ident[v]->sym_struct;
578 /* find an identifier */
579 ST_INLN Sym *sym_find(int v)
581 v -= TOK_IDENT;
582 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
583 return NULL;
584 return table_ident[v]->sym_identifier;
587 static int sym_scope(Sym *s)
589 if (IS_ENUM_VAL (s->type.t))
590 return s->type.ref->sym_scope;
591 else
592 return s->sym_scope;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
598 Sym *s, **ps;
599 TokenSym *ts;
601 if (local_stack)
602 ps = &local_stack;
603 else
604 ps = &global_stack;
605 s = sym_push2(ps, v, type->t, c);
606 s->type.ref = type->ref;
607 s->r = r;
608 /* don't record fields or anonymous symbols */
609 /* XXX: simplify */
610 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
611 /* record symbol in token array */
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 s->prev_tok = *ps;
618 *ps = s;
619 s->sym_scope = local_scope;
620 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v & ~SYM_STRUCT, NULL));
624 return s;
627 /* push a global identifier */
628 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
630 Sym *s, **ps;
631 s = sym_push2(&global_stack, v, t, c);
632 s->r = VT_CONST | VT_SYM;
633 /* don't record anonymous symbol */
634 if (v < SYM_FIRST_ANOM) {
635 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps != NULL && (*ps)->sym_scope)
639 ps = &(*ps)->prev_tok;
640 s->prev_tok = *ps;
641 *ps = s;
643 return s;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
650 Sym *s, *ss, **ps;
651 TokenSym *ts;
652 int v;
654 s = *ptop;
655 while(s != b) {
656 ss = s->prev;
657 v = s->v;
658 /* remove symbol in token array */
659 /* XXX: simplify */
660 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
661 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
662 if (v & SYM_STRUCT)
663 ps = &ts->sym_struct;
664 else
665 ps = &ts->sym_identifier;
666 *ps = s->prev_tok;
668 if (!keep)
669 sym_free(s);
670 s = ss;
672 if (!keep)
673 *ptop = b;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop->r == VT_CMP && !nocode_wanted)
692 gv(RC_INT);
695 static void vsetc(CType *type, int r, CValue *vc)
697 if (vtop >= vstack + (VSTACK_SIZE - 1))
698 tcc_error("memory full (vstack)");
699 vcheck_cmp();
700 vtop++;
701 vtop->type = *type;
702 vtop->r = r;
703 vtop->r2 = VT_CONST;
704 vtop->c = *vc;
705 vtop->sym = NULL;
708 ST_FUNC void vswap(void)
710 SValue tmp;
712 vcheck_cmp();
713 tmp = vtop[0];
714 vtop[0] = vtop[-1];
715 vtop[-1] = tmp;
718 /* pop stack value */
719 ST_FUNC void vpop(void)
721 int v;
722 v = vtop->r & VT_VALMASK;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
725 if (v == TREG_ST0) {
726 o(0xd8dd); /* fstp %st(0) */
727 } else
728 #endif
729 if (v == VT_CMP) {
730 /* need to put correct jump if && or || without test */
731 gsym(vtop->jtrue);
732 gsym(vtop->jfalse);
734 vtop--;
737 /* push constant of type "type" with useless value */
738 ST_FUNC void vpush(CType *type)
740 vset(type, VT_CONST, 0);
743 /* push integer constant */
744 ST_FUNC void vpushi(int v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&int_type, VT_CONST, &cval);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v)
754 CValue cval;
755 cval.i = v;
756 vsetc(&size_type, VT_CONST, &cval);
759 /* push arbitrary 64bit constant */
760 ST_FUNC void vpush64(int ty, unsigned long long v)
762 CValue cval;
763 CType ctype;
764 ctype.t = ty;
765 ctype.ref = NULL;
766 cval.i = v;
767 vsetc(&ctype, VT_CONST, &cval);
770 /* push long long constant */
771 static inline void vpushll(long long v)
773 vpush64(VT_LLONG, v);
776 ST_FUNC void vset(CType *type, int r, int v)
778 CValue cval;
780 cval.i = v;
781 vsetc(type, r, &cval);
784 static void vseti(int r, int v)
786 CType type;
787 type.t = VT_INT;
788 type.ref = NULL;
789 vset(&type, r, v);
792 ST_FUNC void vpushv(SValue *v)
794 if (vtop >= vstack + (VSTACK_SIZE - 1))
795 tcc_error("memory full (vstack)");
796 vtop++;
797 *vtop = *v;
800 static void vdup(void)
802 vpushv(vtop);
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC void vrotb(int n)
810 int i;
811 SValue tmp;
813 vcheck_cmp();
814 tmp = vtop[-n + 1];
815 for(i=-n+1;i!=0;i++)
816 vtop[i] = vtop[i+1];
817 vtop[0] = tmp;
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC void vrote(SValue *e, int n)
825 int i;
826 SValue tmp;
828 vcheck_cmp();
829 tmp = *e;
830 for(i = 0;i < n - 1; i++)
831 e[-i] = e[-i - 1];
832 e[-n + 1] = tmp;
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC void vrott(int n)
840 vrote(vtop, n);
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC void vset_VT_CMP(int op)
849 vtop->r = VT_CMP;
850 vtop->cmp_op = op;
851 vtop->jfalse = 0;
852 vtop->jtrue = 0;
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op = vtop->cmp_op;
859 if (vtop->jtrue || vtop->jfalse) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv = op & (op < 2); /* small optimization */
862 vseti(VT_JMP+inv, gvtst(inv, 0));
863 } else {
864 /* otherwise convert flags (rsp. 0/1) to register */
865 vtop->c.i = op;
866 if (op < 2) /* doesn't seem to happen */
867 vtop->r = VT_CONST;
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv, int t)
874 int *p;
875 if (vtop->r != VT_CMP) {
876 vpushi(0);
877 gen_op(TOK_NE);
878 if (vtop->r != VT_CMP) /* must be VT_CONST then */
879 vset_VT_CMP(vtop->c.i != 0);
881 p = inv ? &vtop->jfalse : &vtop->jtrue;
882 *p = gjmp_append(*p, t);
885 /* Generate value test
887 * Generate a test for any value (jump, comparison and integers) */
888 static int gvtst(int inv, int t)
890 int op, u, x;
892 gvtst_set(inv, t);
894 t = vtop->jtrue, u = vtop->jfalse;
895 if (inv)
896 x = u, u = t, t = x;
897 op = vtop->cmp_op;
899 /* jump to the wanted target */
900 if (op > 1)
901 t = gjmp_cond(op ^ inv, t);
902 else if (op != inv)
903 t = gjmp(t);
904 /* resolve complementary jumps to here */
905 gsym(u);
907 vtop--;
908 return t;
911 /* ------------------------------------------------------------------------- */
912 /* push a symbol value of TYPE */
913 static inline void vpushsym(CType *type, Sym *sym)
915 CValue cval;
916 cval.i = 0;
917 vsetc(type, VT_CONST | VT_SYM, &cval);
918 vtop->sym = sym;
921 /* Return a static symbol pointing to a section */
922 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
924 int v;
925 Sym *sym;
927 v = anon_sym++;
928 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
929 sym->type.t |= VT_STATIC;
930 put_extern_sym(sym, sec, offset, size);
931 return sym;
934 /* push a reference to a section offset by adding a dummy symbol */
935 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
937 vpushsym(type, get_sym_ref(type, sec, offset, size));
940 /* define a new external reference to a symbol 'v' of type 'u' */
941 ST_FUNC Sym *external_global_sym(int v, CType *type)
943 Sym *s;
945 s = sym_find(v);
946 if (!s) {
947 /* push forward reference */
948 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
949 s->type.ref = type->ref;
950 } else if (IS_ASM_SYM(s)) {
951 s->type.t = type->t | (s->type.t & VT_EXTERN);
952 s->type.ref = type->ref;
953 update_storage(s);
955 return s;
958 /* Merge symbol attributes. */
959 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
961 if (sa1->aligned && !sa->aligned)
962 sa->aligned = sa1->aligned;
963 sa->packed |= sa1->packed;
964 sa->weak |= sa1->weak;
965 if (sa1->visibility != STV_DEFAULT) {
966 int vis = sa->visibility;
967 if (vis == STV_DEFAULT
968 || vis > sa1->visibility)
969 vis = sa1->visibility;
970 sa->visibility = vis;
972 sa->dllexport |= sa1->dllexport;
973 sa->nodecorate |= sa1->nodecorate;
974 sa->dllimport |= sa1->dllimport;
977 /* Merge function attributes. */
978 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
980 if (fa1->func_call && !fa->func_call)
981 fa->func_call = fa1->func_call;
982 if (fa1->func_type && !fa->func_type)
983 fa->func_type = fa1->func_type;
984 if (fa1->func_args && !fa->func_args)
985 fa->func_args = fa1->func_args;
988 /* Merge attributes. */
989 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
991 merge_symattr(&ad->a, &ad1->a);
992 merge_funcattr(&ad->f, &ad1->f);
994 if (ad1->section)
995 ad->section = ad1->section;
996 if (ad1->alias_target)
997 ad->alias_target = ad1->alias_target;
998 if (ad1->asm_label)
999 ad->asm_label = ad1->asm_label;
1000 if (ad1->attr_mode)
1001 ad->attr_mode = ad1->attr_mode;
1004 /* Merge some type attributes. */
1005 static void patch_type(Sym *sym, CType *type)
1007 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1008 if (!(sym->type.t & VT_EXTERN))
1009 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1010 sym->type.t &= ~VT_EXTERN;
1013 if (IS_ASM_SYM(sym)) {
1014 /* stay static if both are static */
1015 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1016 sym->type.ref = type->ref;
1019 if (!is_compatible_types(&sym->type, type)) {
1020 tcc_error("incompatible types for redefinition of '%s'",
1021 get_tok_str(sym->v, NULL));
1023 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1024 int static_proto = sym->type.t & VT_STATIC;
1025 /* warn if static follows non-static function declaration */
1026 if ((type->t & VT_STATIC) && !static_proto
1027 /* XXX this test for inline shouldn't be here. Until we
1028 implement gnu-inline mode again it silences a warning for
1029 mingw caused by our workarounds. */
1030 && !((type->t | sym->type.t) & VT_INLINE))
1031 tcc_warning("static storage ignored for redefinition of '%s'",
1032 get_tok_str(sym->v, NULL));
1034 /* set 'inline' if both agree or if one has static */
1035 if ((type->t | sym->type.t) & VT_INLINE) {
1036 if (!((type->t ^ sym->type.t) & VT_INLINE)
1037 || ((type->t | sym->type.t) & VT_STATIC))
1038 static_proto |= VT_INLINE;
1041 if (0 == (type->t & VT_EXTERN)) {
1042 /* put complete type, use static from prototype */
1043 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1044 sym->type.ref = type->ref;
1045 } else {
1046 sym->type.t &= ~VT_INLINE | static_proto;
1049 if (sym->type.ref->f.func_type == FUNC_OLD
1050 && type->ref->f.func_type != FUNC_OLD) {
1051 sym->type.ref = type->ref;
1054 } else {
1055 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1056 /* set array size if it was omitted in extern declaration */
1057 sym->type.ref->c = type->ref->c;
1059 if ((type->t ^ sym->type.t) & VT_STATIC)
1060 tcc_warning("storage mismatch for redefinition of '%s'",
1061 get_tok_str(sym->v, NULL));
1065 /* Merge some storage attributes. */
1066 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1068 if (type)
1069 patch_type(sym, type);
1071 #ifdef TCC_TARGET_PE
1072 if (sym->a.dllimport != ad->a.dllimport)
1073 tcc_error("incompatible dll linkage for redefinition of '%s'",
1074 get_tok_str(sym->v, NULL));
1075 #endif
1076 merge_symattr(&sym->a, &ad->a);
1077 if (ad->asm_label)
1078 sym->asm_label = ad->asm_label;
1079 update_storage(sym);
1082 /* copy sym to other stack */
1083 static Sym *sym_copy(Sym *s0, Sym **ps)
1085 Sym *s;
1086 s = sym_malloc(), *s = *s0;
1087 s->prev = *ps, *ps = s;
1088 if (s->v < SYM_FIRST_ANOM) {
1089 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1090 s->prev_tok = *ps, *ps = s;
1092 return s;
1095 /* copy a list of syms */
1096 static void sym_copy_ref(Sym *s0, Sym **ps)
1098 Sym *s, **sp = &s0->type.ref;
1099 for (s = *sp, *sp = NULL; s; s = s->next)
1100 sp = &(*sp = sym_copy(s, ps))->next;
1103 /* define a new external reference to a symbol 'v' */
1104 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1106 Sym *s; int bt;
1108 /* look for global symbol */
1109 s = sym_find(v);
1110 while (s && s->sym_scope)
1111 s = s->prev_tok;
1113 if (!s) {
1114 /* push forward reference */
1115 s = global_identifier_push(v, type->t, 0);
1116 s->r |= r;
1117 s->a = ad->a;
1118 s->asm_label = ad->asm_label;
1119 s->type.ref = type->ref;
1120 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1121 /* copy type to the global stack also */
1122 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1123 sym_copy_ref(s, &global_stack);
1124 } else {
1125 patch_storage(s, ad, type);
1126 bt = s->type.t & VT_BTYPE;
1128 /* push variables to local scope if any */
1129 if (local_stack && bt != VT_FUNC)
1130 s = sym_copy(s, &local_stack);
1131 return s;
1134 /* push a reference to global symbol v */
1135 ST_FUNC void vpush_global_sym(CType *type, int v)
1137 vpushsym(type, external_global_sym(v, type));
1140 /* save registers up to (vtop - n) stack entry */
1141 ST_FUNC void save_regs(int n)
1143 SValue *p, *p1;
1144 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1145 save_reg(p->r);
1148 /* save r to the memory stack, and mark it as being free */
1149 ST_FUNC void save_reg(int r)
1151 save_reg_upstack(r, 0);
1154 /* save r to the memory stack, and mark it as being free,
1155 if seen up to (vtop - n) stack entry */
1156 ST_FUNC void save_reg_upstack(int r, int n)
1158 int l, saved, size, align;
1159 SValue *p, *p1, sv;
1160 CType *type;
1162 if ((r &= VT_VALMASK) >= VT_CONST)
1163 return;
1164 if (nocode_wanted)
1165 return;
1167 /* modify all stack values */
1168 saved = 0;
1169 l = 0;
1170 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1171 if ((p->r & VT_VALMASK) == r ||
1172 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1173 /* must save value on stack if not already done */
1174 if (!saved) {
1175 /* NOTE: must reload 'r' because r might be equal to r2 */
1176 r = p->r & VT_VALMASK;
1177 /* store register in the stack */
1178 type = &p->type;
1179 if ((p->r & VT_LVAL) ||
1180 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1181 #if PTR_SIZE == 8
1182 type = &char_pointer_type;
1183 #else
1184 type = &int_type;
1185 #endif
1186 size = type_size(type, &align);
1187 l=get_temp_local_var(size,align);
1188 sv.type.t = type->t;
1189 sv.r = VT_LOCAL | VT_LVAL;
1190 sv.c.i = l;
1191 store(r, &sv);
1192 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1193 /* x86 specific: need to pop fp register ST0 if saved */
1194 if (r == TREG_ST0) {
1195 o(0xd8dd); /* fstp %st(0) */
1197 #endif
1198 #if PTR_SIZE == 4
1199 /* special long long case */
1200 if ((type->t & VT_BTYPE) == VT_LLONG) {
1201 sv.c.i += 4;
1202 store(p->r2, &sv);
1204 #endif
1205 saved = 1;
1207 /* mark that stack entry as being saved on the stack */
1208 if (p->r & VT_LVAL) {
1209 /* also clear the bounded flag because the
1210 relocation address of the function was stored in
1211 p->c.i */
1212 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1213 } else {
1214 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1216 p->r2 = VT_CONST;
1217 p->c.i = l;
1222 #ifdef TCC_TARGET_ARM
1223 /* find a register of class 'rc2' with at most one reference on stack.
1224 * If none, call get_reg(rc) */
1225 ST_FUNC int get_reg_ex(int rc, int rc2)
1227 int r;
1228 SValue *p;
1230 for(r=0;r<NB_REGS;r++) {
1231 if (reg_classes[r] & rc2) {
1232 int n;
1233 n=0;
1234 for(p = vstack; p <= vtop; p++) {
1235 if ((p->r & VT_VALMASK) == r ||
1236 (p->r2 & VT_VALMASK) == r)
1237 n++;
1239 if (n <= 1)
1240 return r;
1243 return get_reg(rc);
1245 #endif
1247 /* find a free register of class 'rc'. If none, save one register */
1248 ST_FUNC int get_reg(int rc)
1250 int r;
1251 SValue *p;
1253 /* find a free register */
1254 for(r=0;r<NB_REGS;r++) {
1255 if (reg_classes[r] & rc) {
1256 if (nocode_wanted)
1257 return r;
1258 for(p=vstack;p<=vtop;p++) {
1259 if ((p->r & VT_VALMASK) == r ||
1260 (p->r2 & VT_VALMASK) == r)
1261 goto notfound;
1263 return r;
1265 notfound: ;
1268 /* no register left : free the first one on the stack (VERY
1269 IMPORTANT to start from the bottom to ensure that we don't
1270 spill registers used in gen_opi()) */
1271 for(p=vstack;p<=vtop;p++) {
1272 /* look at second register (if long long) */
1273 r = p->r2 & VT_VALMASK;
1274 if (r < VT_CONST && (reg_classes[r] & rc))
1275 goto save_found;
1276 r = p->r & VT_VALMASK;
1277 if (r < VT_CONST && (reg_classes[r] & rc)) {
1278 save_found:
1279 save_reg(r);
1280 return r;
1283 /* Should never comes here */
1284 return -1;
1287 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1288 static int get_temp_local_var(int size,int align){
1289 int i;
1290 struct temp_local_variable *temp_var;
1291 int found_var;
1292 SValue *p;
1293 int r;
1294 char free;
1295 char found;
1296 found=0;
1297 for(i=0;i<nb_temp_local_vars;i++){
1298 temp_var=&arr_temp_local_vars[i];
1299 if(temp_var->size<size||align!=temp_var->align){
1300 continue;
1302 /*check if temp_var is free*/
1303 free=1;
1304 for(p=vstack;p<=vtop;p++) {
1305 r=p->r&VT_VALMASK;
1306 if(r==VT_LOCAL||r==VT_LLOCAL){
1307 if(p->c.i==temp_var->location){
1308 free=0;
1309 break;
1313 if(free){
1314 found_var=temp_var->location;
1315 found=1;
1316 break;
1319 if(!found){
1320 loc = (loc - size) & -align;
1321 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1322 temp_var=&arr_temp_local_vars[i];
1323 temp_var->location=loc;
1324 temp_var->size=size;
1325 temp_var->align=align;
1326 nb_temp_local_vars++;
1328 found_var=loc;
1330 return found_var;
1333 static void clear_temp_local_var_list(){
1334 nb_temp_local_vars=0;
1337 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1338 if needed */
1339 static void move_reg(int r, int s, int t)
1341 SValue sv;
1343 if (r != s) {
1344 save_reg(r);
1345 sv.type.t = t;
1346 sv.type.ref = NULL;
1347 sv.r = s;
1348 sv.c.i = 0;
1349 load(r, &sv);
1353 /* get address of vtop (vtop MUST BE an lvalue) */
1354 ST_FUNC void gaddrof(void)
1356 vtop->r &= ~VT_LVAL;
1357 /* tricky: if saved lvalue, then we can go back to lvalue */
1358 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1359 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1364 #ifdef CONFIG_TCC_BCHECK
1365 /* generate lvalue bound code */
1366 static void gbound(void)
1368 int lval_type;
1369 CType type1;
1371 vtop->r &= ~VT_MUSTBOUND;
1372 /* if lvalue, then use checking code before dereferencing */
1373 if (vtop->r & VT_LVAL) {
1374 /* if not VT_BOUNDED value, then make one */
1375 if (!(vtop->r & VT_BOUNDED)) {
1376 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1377 /* must save type because we must set it to int to get pointer */
1378 type1 = vtop->type;
1379 vtop->type.t = VT_PTR;
1380 gaddrof();
1381 vpushi(0);
1382 gen_bounded_ptr_add();
1383 vtop->r |= lval_type;
1384 vtop->type = type1;
1386 /* then check for dereferencing */
1387 gen_bounded_ptr_deref();
1390 #endif
1392 static void incr_bf_adr(int o)
1394 vtop->type = char_pointer_type;
1395 gaddrof();
1396 vpushi(o);
1397 gen_op('+');
1398 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1399 | (VT_BYTE|VT_UNSIGNED);
1400 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1401 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1404 /* single-byte load mode for packed or otherwise unaligned bitfields */
1405 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1407 int n, o, bits;
1408 save_reg_upstack(vtop->r, 1);
1409 vpush64(type->t & VT_BTYPE, 0); // B X
1410 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1411 do {
1412 vswap(); // X B
1413 incr_bf_adr(o);
1414 vdup(); // X B B
1415 n = 8 - bit_pos;
1416 if (n > bit_size)
1417 n = bit_size;
1418 if (bit_pos)
1419 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1420 if (n < 8)
1421 vpushi((1 << n) - 1), gen_op('&');
1422 gen_cast(type);
1423 if (bits)
1424 vpushi(bits), gen_op(TOK_SHL);
1425 vrotb(3); // B Y X
1426 gen_op('|'); // B X
1427 bits += n, bit_size -= n, o = 1;
1428 } while (bit_size);
1429 vswap(), vpop();
1430 if (!(type->t & VT_UNSIGNED)) {
1431 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1432 vpushi(n), gen_op(TOK_SHL);
1433 vpushi(n), gen_op(TOK_SAR);
1437 /* single-byte store mode for packed or otherwise unaligned bitfields */
1438 static void store_packed_bf(int bit_pos, int bit_size)
1440 int bits, n, o, m, c;
1442 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1443 vswap(); // X B
1444 save_reg_upstack(vtop->r, 1);
1445 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1446 do {
1447 incr_bf_adr(o); // X B
1448 vswap(); //B X
1449 c ? vdup() : gv_dup(); // B V X
1450 vrott(3); // X B V
1451 if (bits)
1452 vpushi(bits), gen_op(TOK_SHR);
1453 if (bit_pos)
1454 vpushi(bit_pos), gen_op(TOK_SHL);
1455 n = 8 - bit_pos;
1456 if (n > bit_size)
1457 n = bit_size;
1458 if (n < 8) {
1459 m = ((1 << n) - 1) << bit_pos;
1460 vpushi(m), gen_op('&'); // X B V1
1461 vpushv(vtop-1); // X B V1 B
1462 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1463 gen_op('&'); // X B V1 B1
1464 gen_op('|'); // X B V2
1466 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1467 vstore(), vpop(); // X B
1468 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1469 } while (bit_size);
1470 vpop(), vpop();
1473 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1475 int t;
1476 if (0 == sv->type.ref)
1477 return 0;
1478 t = sv->type.ref->auxtype;
1479 if (t != -1 && t != VT_STRUCT) {
1480 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1481 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1483 return t;
1486 /* store vtop a register belonging to class 'rc'. lvalues are
1487 converted to values. Cannot be used if cannot be converted to
1488 register value (such as structures). */
1489 ST_FUNC int gv(int rc)
1491 int r, bit_pos, bit_size, size, align, rc2;
1493 /* NOTE: get_reg can modify vstack[] */
1494 if (vtop->type.t & VT_BITFIELD) {
1495 CType type;
1497 bit_pos = BIT_POS(vtop->type.t);
1498 bit_size = BIT_SIZE(vtop->type.t);
1499 /* remove bit field info to avoid loops */
1500 vtop->type.t &= ~VT_STRUCT_MASK;
1502 type.ref = NULL;
1503 type.t = vtop->type.t & VT_UNSIGNED;
1504 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1505 type.t |= VT_UNSIGNED;
1507 r = adjust_bf(vtop, bit_pos, bit_size);
1509 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1510 type.t |= VT_LLONG;
1511 else
1512 type.t |= VT_INT;
1514 if (r == VT_STRUCT) {
1515 load_packed_bf(&type, bit_pos, bit_size);
1516 } else {
1517 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1518 /* cast to int to propagate signedness in following ops */
1519 gen_cast(&type);
1520 /* generate shifts */
1521 vpushi(bits - (bit_pos + bit_size));
1522 gen_op(TOK_SHL);
1523 vpushi(bits - bit_size);
1524 /* NOTE: transformed to SHR if unsigned */
1525 gen_op(TOK_SAR);
1527 r = gv(rc);
1528 } else {
1529 if (is_float(vtop->type.t) &&
1530 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1531 unsigned long offset;
1532 /* CPUs usually cannot use float constants, so we store them
1533 generically in data segment */
1534 size = type_size(&vtop->type, &align);
1535 if (NODATA_WANTED)
1536 size = 0, align = 1;
1537 offset = section_add(data_section, size, align);
1538 vpush_ref(&vtop->type, data_section, offset, size);
1539 vswap();
1540 init_putv(&vtop->type, data_section, offset);
1541 vtop->r |= VT_LVAL;
1543 #ifdef CONFIG_TCC_BCHECK
1544 if (vtop->r & VT_MUSTBOUND)
1545 gbound();
1546 #endif
1548 r = vtop->r & VT_VALMASK;
1549 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1550 #ifndef TCC_TARGET_ARM64
1551 if (rc == RC_IRET)
1552 rc2 = RC_LRET;
1553 #ifdef TCC_TARGET_X86_64
1554 else if (rc == RC_FRET)
1555 rc2 = RC_QRET;
1556 #endif
1557 #endif
1558 /* need to reload if:
1559 - constant
1560 - lvalue (need to dereference pointer)
1561 - already a register, but not in the right class */
1562 if (r >= VT_CONST
1563 || (vtop->r & VT_LVAL)
1564 || !(reg_classes[r] & rc)
1565 #if PTR_SIZE == 8
1566 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1567 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1568 #else
1569 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1570 #endif
1573 r = get_reg(rc);
1574 #if PTR_SIZE == 8
1575 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1576 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1577 #else
1578 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1579 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1580 unsigned long long ll;
1581 #endif
1582 int r2, original_type;
1583 original_type = vtop->type.t;
1584 /* two register type load : expand to two words
1585 temporarily */
1586 #if PTR_SIZE == 4
1587 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1588 /* load constant */
1589 ll = vtop->c.i;
1590 vtop->c.i = ll; /* first word */
1591 load(r, vtop);
1592 vtop->r = r; /* save register value */
1593 vpushi(ll >> 32); /* second word */
1594 } else
1595 #endif
1596 if (vtop->r & VT_LVAL) {
1597 /* We do not want to modifier the long long
1598 pointer here, so the safest (and less
1599 efficient) is to save all the other registers
1600 in the stack. XXX: totally inefficient. */
1601 #if 0
1602 save_regs(1);
1603 #else
1604 /* lvalue_save: save only if used further down the stack */
1605 save_reg_upstack(vtop->r, 1);
1606 #endif
1607 /* load from memory */
1608 vtop->type.t = load_type;
1609 load(r, vtop);
1610 vdup();
1611 vtop[-1].r = r; /* save register value */
1612 /* increment pointer to get second word */
1613 vtop->type.t = addr_type;
1614 gaddrof();
1615 vpushi(load_size);
1616 gen_op('+');
1617 vtop->r |= VT_LVAL;
1618 vtop->type.t = load_type;
1619 } else {
1620 /* move registers */
1621 load(r, vtop);
1622 vdup();
1623 vtop[-1].r = r; /* save register value */
1624 vtop->r = vtop[-1].r2;
1626 /* Allocate second register. Here we rely on the fact that
1627 get_reg() tries first to free r2 of an SValue. */
1628 r2 = get_reg(rc2);
1629 load(r2, vtop);
1630 vpop();
1631 /* write second register */
1632 vtop->r2 = r2;
1633 vtop->type.t = original_type;
1634 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1635 int t1, t;
1636 /* lvalue of scalar type : need to use lvalue type
1637 because of possible cast */
1638 t = vtop->type.t;
1639 t1 = t;
1640 /* compute memory access type */
1641 if (vtop->r & VT_LVAL_BYTE)
1642 t = VT_BYTE;
1643 else if (vtop->r & VT_LVAL_SHORT)
1644 t = VT_SHORT;
1645 if (vtop->r & VT_LVAL_UNSIGNED)
1646 t |= VT_UNSIGNED;
1647 vtop->type.t = t;
1648 load(r, vtop);
1649 /* restore wanted type */
1650 vtop->type.t = t1;
1651 } else {
1652 if (vtop->r == VT_CMP)
1653 vset_VT_JMP();
1654 /* one register type load */
1655 load(r, vtop);
1658 vtop->r = r;
1659 #ifdef TCC_TARGET_C67
1660 /* uses register pairs for doubles */
1661 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1662 vtop->r2 = r+1;
1663 #endif
1665 return r;
1668 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1669 ST_FUNC void gv2(int rc1, int rc2)
1671 /* generate more generic register first. But VT_JMP or VT_CMP
1672 values must be generated first in all cases to avoid possible
1673 reload errors */
1674 if (vtop->r != VT_CMP && rc1 <= rc2) {
1675 vswap();
1676 gv(rc1);
1677 vswap();
1678 gv(rc2);
1679 /* test if reload is needed for first register */
1680 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1681 vswap();
1682 gv(rc1);
1683 vswap();
1685 } else {
1686 gv(rc2);
1687 vswap();
1688 gv(rc1);
1689 vswap();
1690 /* test if reload is needed for first register */
1691 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1692 gv(rc2);
1697 #ifndef TCC_TARGET_ARM64
1698 /* wrapper around RC_FRET to return a register by type */
1699 static int rc_fret(int t)
1701 #ifdef TCC_TARGET_X86_64
1702 if (t == VT_LDOUBLE) {
1703 return RC_ST0;
1705 #endif
1706 return RC_FRET;
1708 #endif
1710 /* wrapper around REG_FRET to return a register by type */
1711 static int reg_fret(int t)
1713 #ifdef TCC_TARGET_X86_64
1714 if (t == VT_LDOUBLE) {
1715 return TREG_ST0;
1717 #endif
1718 return REG_FRET;
1721 #if PTR_SIZE == 4
1722 /* expand 64bit on stack in two ints */
1723 ST_FUNC void lexpand(void)
1725 int u, v;
1726 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1727 v = vtop->r & (VT_VALMASK | VT_LVAL);
1728 if (v == VT_CONST) {
1729 vdup();
1730 vtop[0].c.i >>= 32;
1731 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1732 vdup();
1733 vtop[0].c.i += 4;
1734 } else {
1735 gv(RC_INT);
1736 vdup();
1737 vtop[0].r = vtop[-1].r2;
1738 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1740 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1742 #endif
1744 #if PTR_SIZE == 4
1745 /* build a long long from two ints */
1746 static void lbuild(int t)
1748 gv2(RC_INT, RC_INT);
1749 vtop[-1].r2 = vtop[0].r;
1750 vtop[-1].type.t = t;
1751 vpop();
1753 #endif
1755 /* convert stack entry to register and duplicate its value in another
1756 register */
1757 static void gv_dup(void)
1759 int rc, t, r, r1;
1760 SValue sv;
1762 t = vtop->type.t;
1763 #if PTR_SIZE == 4
1764 if ((t & VT_BTYPE) == VT_LLONG) {
1765 if (t & VT_BITFIELD) {
1766 gv(RC_INT);
1767 t = vtop->type.t;
1769 lexpand();
1770 gv_dup();
1771 vswap();
1772 vrotb(3);
1773 gv_dup();
1774 vrotb(4);
1775 /* stack: H L L1 H1 */
1776 lbuild(t);
1777 vrotb(3);
1778 vrotb(3);
1779 vswap();
1780 lbuild(t);
1781 vswap();
1782 } else
1783 #endif
1785 /* duplicate value */
1786 rc = RC_INT;
1787 sv.type.t = VT_INT;
1788 if (is_float(t)) {
1789 rc = RC_FLOAT;
1790 #ifdef TCC_TARGET_X86_64
1791 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1792 rc = RC_ST0;
1794 #endif
1795 sv.type.t = t;
1797 r = gv(rc);
1798 r1 = get_reg(rc);
1799 sv.r = r;
1800 sv.c.i = 0;
1801 load(r1, &sv); /* move r to r1 */
1802 vdup();
1803 /* duplicates value */
1804 if (r != r1)
1805 vtop->r = r1;
1809 #if PTR_SIZE == 4
1810 /* generate CPU independent (unsigned) long long operations */
1811 static void gen_opl(int op)
1813 int t, a, b, op1, c, i;
1814 int func;
1815 unsigned short reg_iret = REG_IRET;
1816 unsigned short reg_lret = REG_LRET;
1817 SValue tmp;
1819 switch(op) {
1820 case '/':
1821 case TOK_PDIV:
1822 func = TOK___divdi3;
1823 goto gen_func;
1824 case TOK_UDIV:
1825 func = TOK___udivdi3;
1826 goto gen_func;
1827 case '%':
1828 func = TOK___moddi3;
1829 goto gen_mod_func;
1830 case TOK_UMOD:
1831 func = TOK___umoddi3;
1832 gen_mod_func:
1833 #ifdef TCC_ARM_EABI
1834 reg_iret = TREG_R2;
1835 reg_lret = TREG_R3;
1836 #endif
1837 gen_func:
1838 /* call generic long long function */
1839 vpush_global_sym(&func_old_type, func);
1840 vrott(3);
1841 gfunc_call(2);
1842 vpushi(0);
1843 vtop->r = reg_iret;
1844 vtop->r2 = reg_lret;
1845 break;
1846 case '^':
1847 case '&':
1848 case '|':
1849 case '*':
1850 case '+':
1851 case '-':
1852 //pv("gen_opl A",0,2);
1853 t = vtop->type.t;
1854 vswap();
1855 lexpand();
1856 vrotb(3);
1857 lexpand();
1858 /* stack: L1 H1 L2 H2 */
1859 tmp = vtop[0];
1860 vtop[0] = vtop[-3];
1861 vtop[-3] = tmp;
1862 tmp = vtop[-2];
1863 vtop[-2] = vtop[-3];
1864 vtop[-3] = tmp;
1865 vswap();
1866 /* stack: H1 H2 L1 L2 */
1867 //pv("gen_opl B",0,4);
1868 if (op == '*') {
1869 vpushv(vtop - 1);
1870 vpushv(vtop - 1);
1871 gen_op(TOK_UMULL);
1872 lexpand();
1873 /* stack: H1 H2 L1 L2 ML MH */
1874 for(i=0;i<4;i++)
1875 vrotb(6);
1876 /* stack: ML MH H1 H2 L1 L2 */
1877 tmp = vtop[0];
1878 vtop[0] = vtop[-2];
1879 vtop[-2] = tmp;
1880 /* stack: ML MH H1 L2 H2 L1 */
1881 gen_op('*');
1882 vrotb(3);
1883 vrotb(3);
1884 gen_op('*');
1885 /* stack: ML MH M1 M2 */
1886 gen_op('+');
1887 gen_op('+');
1888 } else if (op == '+' || op == '-') {
1889 /* XXX: add non carry method too (for MIPS or alpha) */
1890 if (op == '+')
1891 op1 = TOK_ADDC1;
1892 else
1893 op1 = TOK_SUBC1;
1894 gen_op(op1);
1895 /* stack: H1 H2 (L1 op L2) */
1896 vrotb(3);
1897 vrotb(3);
1898 gen_op(op1 + 1); /* TOK_xxxC2 */
1899 } else {
1900 gen_op(op);
1901 /* stack: H1 H2 (L1 op L2) */
1902 vrotb(3);
1903 vrotb(3);
1904 /* stack: (L1 op L2) H1 H2 */
1905 gen_op(op);
1906 /* stack: (L1 op L2) (H1 op H2) */
1908 /* stack: L H */
1909 lbuild(t);
1910 break;
1911 case TOK_SAR:
1912 case TOK_SHR:
1913 case TOK_SHL:
1914 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1915 t = vtop[-1].type.t;
1916 vswap();
1917 lexpand();
1918 vrotb(3);
1919 /* stack: L H shift */
1920 c = (int)vtop->c.i;
1921 /* constant: simpler */
1922 /* NOTE: all comments are for SHL. the other cases are
1923 done by swapping words */
1924 vpop();
1925 if (op != TOK_SHL)
1926 vswap();
1927 if (c >= 32) {
1928 /* stack: L H */
1929 vpop();
1930 if (c > 32) {
1931 vpushi(c - 32);
1932 gen_op(op);
1934 if (op != TOK_SAR) {
1935 vpushi(0);
1936 } else {
1937 gv_dup();
1938 vpushi(31);
1939 gen_op(TOK_SAR);
1941 vswap();
1942 } else {
1943 vswap();
1944 gv_dup();
1945 /* stack: H L L */
1946 vpushi(c);
1947 gen_op(op);
1948 vswap();
1949 vpushi(32 - c);
1950 if (op == TOK_SHL)
1951 gen_op(TOK_SHR);
1952 else
1953 gen_op(TOK_SHL);
1954 vrotb(3);
1955 /* stack: L L H */
1956 vpushi(c);
1957 if (op == TOK_SHL)
1958 gen_op(TOK_SHL);
1959 else
1960 gen_op(TOK_SHR);
1961 gen_op('|');
1963 if (op != TOK_SHL)
1964 vswap();
1965 lbuild(t);
1966 } else {
1967 /* XXX: should provide a faster fallback on x86 ? */
1968 switch(op) {
1969 case TOK_SAR:
1970 func = TOK___ashrdi3;
1971 goto gen_func;
1972 case TOK_SHR:
1973 func = TOK___lshrdi3;
1974 goto gen_func;
1975 case TOK_SHL:
1976 func = TOK___ashldi3;
1977 goto gen_func;
1980 break;
1981 default:
1982 /* compare operations */
1983 t = vtop->type.t;
1984 vswap();
1985 lexpand();
1986 vrotb(3);
1987 lexpand();
1988 /* stack: L1 H1 L2 H2 */
1989 tmp = vtop[-1];
1990 vtop[-1] = vtop[-2];
1991 vtop[-2] = tmp;
1992 /* stack: L1 L2 H1 H2 */
1993 /* compare high */
1994 op1 = op;
1995 /* when values are equal, we need to compare low words. since
1996 the jump is inverted, we invert the test too. */
1997 if (op1 == TOK_LT)
1998 op1 = TOK_LE;
1999 else if (op1 == TOK_GT)
2000 op1 = TOK_GE;
2001 else if (op1 == TOK_ULT)
2002 op1 = TOK_ULE;
2003 else if (op1 == TOK_UGT)
2004 op1 = TOK_UGE;
2005 a = 0;
2006 b = 0;
2007 gen_op(op1);
2008 if (op == TOK_NE) {
2009 b = gvtst(0, 0);
2010 } else {
2011 a = gvtst(1, 0);
2012 if (op != TOK_EQ) {
2013 /* generate non equal test */
2014 vpushi(0);
2015 vset_VT_CMP(TOK_NE);
2016 b = gvtst(0, 0);
2019 /* compare low. Always unsigned */
2020 op1 = op;
2021 if (op1 == TOK_LT)
2022 op1 = TOK_ULT;
2023 else if (op1 == TOK_LE)
2024 op1 = TOK_ULE;
2025 else if (op1 == TOK_GT)
2026 op1 = TOK_UGT;
2027 else if (op1 == TOK_GE)
2028 op1 = TOK_UGE;
2029 gen_op(op1);
2030 #if 0//def TCC_TARGET_I386
2031 if (op == TOK_NE) { gsym(b); break; }
2032 if (op == TOK_EQ) { gsym(a); break; }
2033 #endif
2034 gvtst_set(1, a);
2035 gvtst_set(0, b);
2036 break;
2039 #endif
2041 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2043 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2044 return (a ^ b) >> 63 ? -x : x;
2047 static int gen_opic_lt(uint64_t a, uint64_t b)
2049 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2052 /* handle integer constant optimizations and various machine
2053 independent opt */
2054 static void gen_opic(int op)
2056 SValue *v1 = vtop - 1;
2057 SValue *v2 = vtop;
2058 int t1 = v1->type.t & VT_BTYPE;
2059 int t2 = v2->type.t & VT_BTYPE;
2060 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2061 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2062 uint64_t l1 = c1 ? v1->c.i : 0;
2063 uint64_t l2 = c2 ? v2->c.i : 0;
2064 int shm = (t1 == VT_LLONG) ? 63 : 31;
2066 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2067 l1 = ((uint32_t)l1 |
2068 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2069 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2070 l2 = ((uint32_t)l2 |
2071 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2073 if (c1 && c2) {
2074 switch(op) {
2075 case '+': l1 += l2; break;
2076 case '-': l1 -= l2; break;
2077 case '&': l1 &= l2; break;
2078 case '^': l1 ^= l2; break;
2079 case '|': l1 |= l2; break;
2080 case '*': l1 *= l2; break;
2082 case TOK_PDIV:
2083 case '/':
2084 case '%':
2085 case TOK_UDIV:
2086 case TOK_UMOD:
2087 /* if division by zero, generate explicit division */
2088 if (l2 == 0) {
2089 if (const_wanted)
2090 tcc_error("division by zero in constant");
2091 goto general_case;
2093 switch(op) {
2094 default: l1 = gen_opic_sdiv(l1, l2); break;
2095 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2096 case TOK_UDIV: l1 = l1 / l2; break;
2097 case TOK_UMOD: l1 = l1 % l2; break;
2099 break;
2100 case TOK_SHL: l1 <<= (l2 & shm); break;
2101 case TOK_SHR: l1 >>= (l2 & shm); break;
2102 case TOK_SAR:
2103 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2104 break;
2105 /* tests */
2106 case TOK_ULT: l1 = l1 < l2; break;
2107 case TOK_UGE: l1 = l1 >= l2; break;
2108 case TOK_EQ: l1 = l1 == l2; break;
2109 case TOK_NE: l1 = l1 != l2; break;
2110 case TOK_ULE: l1 = l1 <= l2; break;
2111 case TOK_UGT: l1 = l1 > l2; break;
2112 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2113 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2114 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2115 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2116 /* logical */
2117 case TOK_LAND: l1 = l1 && l2; break;
2118 case TOK_LOR: l1 = l1 || l2; break;
2119 default:
2120 goto general_case;
2122 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2123 l1 = ((uint32_t)l1 |
2124 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2125 v1->c.i = l1;
2126 vtop--;
2127 } else {
2128 /* if commutative ops, put c2 as constant */
2129 if (c1 && (op == '+' || op == '&' || op == '^' ||
2130 op == '|' || op == '*')) {
2131 vswap();
2132 c2 = c1; //c = c1, c1 = c2, c2 = c;
2133 l2 = l1; //l = l1, l1 = l2, l2 = l;
2135 if (!const_wanted &&
2136 c1 && ((l1 == 0 &&
2137 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2138 (l1 == -1 && op == TOK_SAR))) {
2139 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2140 vtop--;
2141 } else if (!const_wanted &&
2142 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2143 (op == '|' &&
2144 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2145 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2146 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2147 if (l2 == 1)
2148 vtop->c.i = 0;
2149 vswap();
2150 vtop--;
2151 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2152 op == TOK_PDIV) &&
2153 l2 == 1) ||
2154 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2155 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2156 l2 == 0) ||
2157 (op == '&' &&
2158 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2159 /* filter out NOP operations like x*1, x-0, x&-1... */
2160 vtop--;
2161 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2162 /* try to use shifts instead of muls or divs */
2163 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2164 int n = -1;
2165 while (l2) {
2166 l2 >>= 1;
2167 n++;
2169 vtop->c.i = n;
2170 if (op == '*')
2171 op = TOK_SHL;
2172 else if (op == TOK_PDIV)
2173 op = TOK_SAR;
2174 else
2175 op = TOK_SHR;
2177 goto general_case;
2178 } else if (c2 && (op == '+' || op == '-') &&
2179 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2180 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2181 /* symbol + constant case */
2182 if (op == '-')
2183 l2 = -l2;
2184 l2 += vtop[-1].c.i;
2185 /* The backends can't always deal with addends to symbols
2186 larger than +-1<<31. Don't construct such. */
2187 if ((int)l2 != l2)
2188 goto general_case;
2189 vtop--;
2190 vtop->c.i = l2;
2191 } else {
2192 general_case:
2193 /* call low level op generator */
2194 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2195 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2196 gen_opl(op);
2197 else
2198 gen_opi(op);
2203 /* generate a floating point operation with constant propagation */
2204 static void gen_opif(int op)
2206 int c1, c2;
2207 SValue *v1, *v2;
2208 #if defined _MSC_VER && defined _AMD64_
2209 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2210 volatile
2211 #endif
2212 long double f1, f2;
2214 v1 = vtop - 1;
2215 v2 = vtop;
2216 /* currently, we cannot do computations with forward symbols */
2217 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2218 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2219 if (c1 && c2) {
2220 if (v1->type.t == VT_FLOAT) {
2221 f1 = v1->c.f;
2222 f2 = v2->c.f;
2223 } else if (v1->type.t == VT_DOUBLE) {
2224 f1 = v1->c.d;
2225 f2 = v2->c.d;
2226 } else {
2227 f1 = v1->c.ld;
2228 f2 = v2->c.ld;
2231 /* NOTE: we only do constant propagation if finite number (not
2232 NaN or infinity) (ANSI spec) */
2233 if (!ieee_finite(f1) || !ieee_finite(f2))
2234 goto general_case;
2236 switch(op) {
2237 case '+': f1 += f2; break;
2238 case '-': f1 -= f2; break;
2239 case '*': f1 *= f2; break;
2240 case '/':
2241 if (f2 == 0.0) {
2242 /* If not in initializer we need to potentially generate
2243 FP exceptions at runtime, otherwise we want to fold. */
2244 if (!const_wanted)
2245 goto general_case;
2247 f1 /= f2;
2248 break;
2249 /* XXX: also handles tests ? */
2250 default:
2251 goto general_case;
2253 /* XXX: overflow test ? */
2254 if (v1->type.t == VT_FLOAT) {
2255 v1->c.f = f1;
2256 } else if (v1->type.t == VT_DOUBLE) {
2257 v1->c.d = f1;
2258 } else {
2259 v1->c.ld = f1;
2261 vtop--;
2262 } else {
2263 general_case:
2264 gen_opf(op);
2268 static int pointed_size(CType *type)
2270 int align;
2271 return type_size(pointed_type(type), &align);
2274 static void vla_runtime_pointed_size(CType *type)
2276 int align;
2277 vla_runtime_type_size(pointed_type(type), &align);
2280 static inline int is_null_pointer(SValue *p)
2282 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2283 return 0;
2284 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2285 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2286 ((p->type.t & VT_BTYPE) == VT_PTR &&
2287 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2288 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2289 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2292 static inline int is_integer_btype(int bt)
2294 return (bt == VT_BYTE || bt == VT_SHORT ||
2295 bt == VT_INT || bt == VT_LLONG);
2298 /* check types for comparison or subtraction of pointers */
2299 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2301 CType *type1, *type2, tmp_type1, tmp_type2;
2302 int bt1, bt2;
2304 /* null pointers are accepted for all comparisons as gcc */
2305 if (is_null_pointer(p1) || is_null_pointer(p2))
2306 return;
2307 type1 = &p1->type;
2308 type2 = &p2->type;
2309 bt1 = type1->t & VT_BTYPE;
2310 bt2 = type2->t & VT_BTYPE;
2311 /* accept comparison between pointer and integer with a warning */
2312 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2313 if (op != TOK_LOR && op != TOK_LAND )
2314 tcc_warning("comparison between pointer and integer");
2315 return;
2318 /* both must be pointers or implicit function pointers */
2319 if (bt1 == VT_PTR) {
2320 type1 = pointed_type(type1);
2321 } else if (bt1 != VT_FUNC)
2322 goto invalid_operands;
2324 if (bt2 == VT_PTR) {
2325 type2 = pointed_type(type2);
2326 } else if (bt2 != VT_FUNC) {
2327 invalid_operands:
2328 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2330 if ((type1->t & VT_BTYPE) == VT_VOID ||
2331 (type2->t & VT_BTYPE) == VT_VOID)
2332 return;
2333 tmp_type1 = *type1;
2334 tmp_type2 = *type2;
2335 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2336 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2337 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2338 /* gcc-like error if '-' is used */
2339 if (op == '-')
2340 goto invalid_operands;
2341 else
2342 tcc_warning("comparison of distinct pointer types lacks a cast");
2346 /* generic gen_op: handles types problems */
2347 ST_FUNC void gen_op(int op)
2349 int u, t1, t2, bt1, bt2, t;
2350 CType type1;
2352 redo:
2353 t1 = vtop[-1].type.t;
2354 t2 = vtop[0].type.t;
2355 bt1 = t1 & VT_BTYPE;
2356 bt2 = t2 & VT_BTYPE;
2358 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2359 tcc_error("operation on a struct");
2360 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2361 if (bt2 == VT_FUNC) {
2362 mk_pointer(&vtop->type);
2363 gaddrof();
2365 if (bt1 == VT_FUNC) {
2366 vswap();
2367 mk_pointer(&vtop->type);
2368 gaddrof();
2369 vswap();
2371 goto redo;
2372 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2373 /* at least one operand is a pointer */
2374 /* relational op: must be both pointers */
2375 if (op >= TOK_ULT && op <= TOK_LOR) {
2376 check_comparison_pointer_types(vtop - 1, vtop, op);
2377 /* pointers are handled are unsigned */
2378 #if PTR_SIZE == 8
2379 t = VT_LLONG | VT_UNSIGNED;
2380 #else
2381 t = VT_INT | VT_UNSIGNED;
2382 #endif
2383 goto std_op;
2385 /* if both pointers, then it must be the '-' op */
2386 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2387 if (op != '-')
2388 tcc_error("cannot use pointers here");
2389 check_comparison_pointer_types(vtop - 1, vtop, op);
2390 /* XXX: check that types are compatible */
2391 if (vtop[-1].type.t & VT_VLA) {
2392 vla_runtime_pointed_size(&vtop[-1].type);
2393 } else {
2394 vpushi(pointed_size(&vtop[-1].type));
2396 vrott(3);
2397 gen_opic(op);
2398 vtop->type.t = ptrdiff_type.t;
2399 vswap();
2400 gen_op(TOK_PDIV);
2401 } else {
2402 /* exactly one pointer : must be '+' or '-'. */
2403 if (op != '-' && op != '+')
2404 tcc_error("cannot use pointers here");
2405 /* Put pointer as first operand */
2406 if (bt2 == VT_PTR) {
2407 vswap();
2408 t = t1, t1 = t2, t2 = t;
2410 #if PTR_SIZE == 4
2411 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2412 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2413 gen_cast_s(VT_INT);
2414 #endif
2415 type1 = vtop[-1].type;
2416 type1.t &= ~VT_ARRAY;
2417 if (vtop[-1].type.t & VT_VLA)
2418 vla_runtime_pointed_size(&vtop[-1].type);
2419 else {
2420 u = pointed_size(&vtop[-1].type);
2421 if (u < 0)
2422 tcc_error("unknown array element size");
2423 #if PTR_SIZE == 8
2424 vpushll(u);
2425 #else
2426 /* XXX: cast to int ? (long long case) */
2427 vpushi(u);
2428 #endif
2430 gen_op('*');
2431 #if 0
2432 /* #ifdef CONFIG_TCC_BCHECK
2433 The main reason to removing this code:
2434 #include <stdio.h>
2435 int main ()
2437 int v[10];
2438 int i = 10;
2439 int j = 9;
2440 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2441 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2443 When this code is on. then the output looks like
2444 v+i-j = 0xfffffffe
2445 v+(i-j) = 0xbff84000
2447 /* if evaluating constant expression, no code should be
2448 generated, so no bound check */
2449 if (tcc_state->do_bounds_check && !const_wanted) {
2450 /* if bounded pointers, we generate a special code to
2451 test bounds */
2452 if (op == '-') {
2453 vpushi(0);
2454 vswap();
2455 gen_op('-');
2457 gen_bounded_ptr_add();
2458 } else
2459 #endif
2461 gen_opic(op);
2463 /* put again type if gen_opic() swaped operands */
2464 vtop->type = type1;
2466 } else if (is_float(bt1) || is_float(bt2)) {
2467 /* compute bigger type and do implicit casts */
2468 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2469 t = VT_LDOUBLE;
2470 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2471 t = VT_DOUBLE;
2472 } else {
2473 t = VT_FLOAT;
2475 /* floats can only be used for a few operations */
2476 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2477 (op < TOK_ULT || op > TOK_GT))
2478 tcc_error("invalid operands for binary operation");
2479 goto std_op;
2480 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2481 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2482 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2483 t |= VT_UNSIGNED;
2484 t |= (VT_LONG & t1);
2485 goto std_op;
2486 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2487 /* cast to biggest op */
2488 t = VT_LLONG | VT_LONG;
2489 if (bt1 == VT_LLONG)
2490 t &= t1;
2491 if (bt2 == VT_LLONG)
2492 t &= t2;
2493 /* convert to unsigned if it does not fit in a long long */
2494 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2495 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2496 t |= VT_UNSIGNED;
2497 goto std_op;
2498 } else {
2499 /* integer operations */
2500 t = VT_INT | (VT_LONG & (t1 | t2));
2501 /* convert to unsigned if it does not fit in an integer */
2502 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2503 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2504 t |= VT_UNSIGNED;
2505 std_op:
2506 /* XXX: currently, some unsigned operations are explicit, so
2507 we modify them here */
2508 if (t & VT_UNSIGNED) {
2509 if (op == TOK_SAR)
2510 op = TOK_SHR;
2511 else if (op == '/')
2512 op = TOK_UDIV;
2513 else if (op == '%')
2514 op = TOK_UMOD;
2515 else if (op == TOK_LT)
2516 op = TOK_ULT;
2517 else if (op == TOK_GT)
2518 op = TOK_UGT;
2519 else if (op == TOK_LE)
2520 op = TOK_ULE;
2521 else if (op == TOK_GE)
2522 op = TOK_UGE;
2524 vswap();
2525 type1.t = t;
2526 type1.ref = NULL;
2527 gen_cast(&type1);
2528 vswap();
2529 /* special case for shifts and long long: we keep the shift as
2530 an integer */
2531 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2532 type1.t = VT_INT;
2533 gen_cast(&type1);
2534 if (is_float(t))
2535 gen_opif(op);
2536 else
2537 gen_opic(op);
2538 if (op >= TOK_ULT && op <= TOK_GT) {
2539 /* relational op: the result is an int */
2540 vtop->type.t = VT_INT;
2541 } else {
2542 vtop->type.t = t;
2545 // Make sure that we have converted to an rvalue:
2546 if (vtop->r & VT_LVAL)
2547 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2550 #ifndef TCC_TARGET_ARM
2551 /* generic itof for unsigned long long case */
2552 static void gen_cvt_itof1(int t)
2554 #ifdef TCC_TARGET_ARM64
2555 gen_cvt_itof(t);
2556 #else
2557 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2558 (VT_LLONG | VT_UNSIGNED)) {
2560 if (t == VT_FLOAT)
2561 vpush_global_sym(&func_old_type, TOK___floatundisf);
2562 #if LDOUBLE_SIZE != 8
2563 else if (t == VT_LDOUBLE)
2564 vpush_global_sym(&func_old_type, TOK___floatundixf);
2565 #endif
2566 else
2567 vpush_global_sym(&func_old_type, TOK___floatundidf);
2568 vrott(2);
2569 gfunc_call(1);
2570 vpushi(0);
2571 vtop->r = reg_fret(t);
2572 } else {
2573 gen_cvt_itof(t);
2575 #endif
2577 #endif
2579 /* generic ftoi for unsigned long long case */
2580 static void gen_cvt_ftoi1(int t)
2582 #ifdef TCC_TARGET_ARM64
2583 gen_cvt_ftoi(t);
2584 #else
2585 int st;
2587 if (t == (VT_LLONG | VT_UNSIGNED)) {
2588 /* not handled natively */
2589 st = vtop->type.t & VT_BTYPE;
2590 if (st == VT_FLOAT)
2591 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2592 #if LDOUBLE_SIZE != 8
2593 else if (st == VT_LDOUBLE)
2594 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2595 #endif
2596 else
2597 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2598 vrott(2);
2599 gfunc_call(1);
2600 vpushi(0);
2601 vtop->r = REG_IRET;
2602 vtop->r2 = REG_LRET;
2603 } else {
2604 gen_cvt_ftoi(t);
2606 #endif
2609 /* force char or short cast */
2610 static void force_charshort_cast(int t)
2612 int bits, dbt;
2614 /* cannot cast static initializers */
2615 if (STATIC_DATA_WANTED)
2616 return;
2618 dbt = t & VT_BTYPE;
2619 /* XXX: add optimization if lvalue : just change type and offset */
2620 if (dbt == VT_BYTE)
2621 bits = 8;
2622 else
2623 bits = 16;
2624 if (t & VT_UNSIGNED) {
2625 vpushi((1 << bits) - 1);
2626 gen_op('&');
2627 } else {
2628 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2629 bits = 64 - bits;
2630 else
2631 bits = 32 - bits;
2632 vpushi(bits);
2633 gen_op(TOK_SHL);
2634 /* result must be signed or the SAR is converted to an SHL
2635 This was not the case when "t" was a signed short
2636 and the last value on the stack was an unsigned int */
2637 vtop->type.t &= ~VT_UNSIGNED;
2638 vpushi(bits);
2639 gen_op(TOK_SAR);
2643 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2644 static void gen_cast_s(int t)
2646 CType type;
2647 type.t = t;
2648 type.ref = NULL;
2649 gen_cast(&type);
2652 static void gen_cast(CType *type)
2654 int sbt, dbt, sf, df, c, p;
2656 /* special delayed cast for char/short */
2657 /* XXX: in some cases (multiple cascaded casts), it may still
2658 be incorrect */
2659 if (vtop->r & VT_MUSTCAST) {
2660 vtop->r &= ~VT_MUSTCAST;
2661 force_charshort_cast(vtop->type.t);
2664 /* bitfields first get cast to ints */
2665 if (vtop->type.t & VT_BITFIELD) {
2666 gv(RC_INT);
2669 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2670 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2672 if (sbt != dbt) {
2673 sf = is_float(sbt);
2674 df = is_float(dbt);
2675 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2676 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2677 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2678 c &= dbt != VT_LDOUBLE;
2679 #endif
2680 if (c) {
2681 /* constant case: we can do it now */
2682 /* XXX: in ISOC, cannot do it if error in convert */
2683 if (sbt == VT_FLOAT)
2684 vtop->c.ld = vtop->c.f;
2685 else if (sbt == VT_DOUBLE)
2686 vtop->c.ld = vtop->c.d;
2688 if (df) {
2689 if ((sbt & VT_BTYPE) == VT_LLONG) {
2690 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2691 vtop->c.ld = vtop->c.i;
2692 else
2693 vtop->c.ld = -(long double)-vtop->c.i;
2694 } else if(!sf) {
2695 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2696 vtop->c.ld = (uint32_t)vtop->c.i;
2697 else
2698 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2701 if (dbt == VT_FLOAT)
2702 vtop->c.f = (float)vtop->c.ld;
2703 else if (dbt == VT_DOUBLE)
2704 vtop->c.d = (double)vtop->c.ld;
2705 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2706 vtop->c.i = vtop->c.ld;
2707 } else if (sf && dbt == VT_BOOL) {
2708 vtop->c.i = (vtop->c.ld != 0);
2709 } else {
2710 if(sf)
2711 vtop->c.i = vtop->c.ld;
2712 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2714 else if (sbt & VT_UNSIGNED)
2715 vtop->c.i = (uint32_t)vtop->c.i;
2716 #if PTR_SIZE == 8
2717 else if (sbt == VT_PTR)
2719 #endif
2720 else if (sbt != VT_LLONG)
2721 vtop->c.i = ((uint32_t)vtop->c.i |
2722 -(vtop->c.i & 0x80000000));
2724 if (dbt == (VT_LLONG|VT_UNSIGNED))
2726 else if (dbt == VT_BOOL)
2727 vtop->c.i = (vtop->c.i != 0);
2728 #if PTR_SIZE == 8
2729 else if (dbt == VT_PTR)
2731 #endif
2732 else if (dbt != VT_LLONG) {
2733 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2734 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2735 0xffffffff);
2736 vtop->c.i &= m;
2737 if (!(dbt & VT_UNSIGNED))
2738 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2741 } else if (p && dbt == VT_BOOL) {
2742 vtop->r = VT_CONST;
2743 vtop->c.i = 1;
2744 } else {
2745 /* non constant case: generate code */
2746 if (sf && df) {
2747 /* convert from fp to fp */
2748 gen_cvt_ftof(dbt);
2749 } else if (df) {
2750 /* convert int to fp */
2751 gen_cvt_itof1(dbt);
2752 } else if (sf) {
2753 /* convert fp to int */
2754 if (dbt == VT_BOOL) {
2755 vpushi(0);
2756 gen_op(TOK_NE);
2757 } else {
2758 /* we handle char/short/etc... with generic code */
2759 if (dbt != (VT_INT | VT_UNSIGNED) &&
2760 dbt != (VT_LLONG | VT_UNSIGNED) &&
2761 dbt != VT_LLONG)
2762 dbt = VT_INT;
2763 gen_cvt_ftoi1(dbt);
2764 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2765 /* additional cast for char/short... */
2766 vtop->type.t = dbt;
2767 gen_cast(type);
2770 #if PTR_SIZE == 4
2771 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2772 if ((sbt & VT_BTYPE) != VT_LLONG) {
2773 /* scalar to long long */
2774 /* machine independent conversion */
2775 gv(RC_INT);
2776 /* generate high word */
2777 if (sbt == (VT_INT | VT_UNSIGNED)) {
2778 vpushi(0);
2779 gv(RC_INT);
2780 } else {
2781 if (sbt == VT_PTR) {
2782 /* cast from pointer to int before we apply
2783 shift operation, which pointers don't support*/
2784 gen_cast_s(VT_INT);
2786 gv_dup();
2787 vpushi(31);
2788 gen_op(TOK_SAR);
2790 /* patch second register */
2791 vtop[-1].r2 = vtop->r;
2792 vpop();
2794 #else
2795 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2796 (dbt & VT_BTYPE) == VT_PTR ||
2797 (dbt & VT_BTYPE) == VT_FUNC) {
2798 if ((sbt & VT_BTYPE) != VT_LLONG &&
2799 (sbt & VT_BTYPE) != VT_PTR &&
2800 (sbt & VT_BTYPE) != VT_FUNC) {
2801 /* need to convert from 32bit to 64bit */
2802 gv(RC_INT);
2803 if (sbt != (VT_INT | VT_UNSIGNED)) {
2804 #if defined(TCC_TARGET_ARM64)
2805 gen_cvt_sxtw();
2806 #elif defined(TCC_TARGET_X86_64)
2807 int r = gv(RC_INT);
2808 /* x86_64 specific: movslq */
2809 o(0x6348);
2810 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2811 #else
2812 #error
2813 #endif
2816 #endif
2817 } else if (dbt == VT_BOOL) {
2818 /* scalar to bool */
2819 vpushi(0);
2820 gen_op(TOK_NE);
2821 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2822 (dbt & VT_BTYPE) == VT_SHORT) {
2823 if (sbt == VT_PTR) {
2824 vtop->type.t = VT_INT;
2825 tcc_warning("nonportable conversion from pointer to char/short");
2827 force_charshort_cast(dbt);
2828 } else if ((dbt & VT_BTYPE) == VT_INT) {
2829 /* scalar to int */
2830 if ((sbt & VT_BTYPE) == VT_LLONG) {
2831 #if PTR_SIZE == 4
2832 /* from long long: just take low order word */
2833 lexpand();
2834 vpop();
2835 #else
2836 vpushi(0xffffffff);
2837 vtop->type.t |= VT_UNSIGNED;
2838 gen_op('&');
2839 #endif
2841 /* if lvalue and single word type, nothing to do because
2842 the lvalue already contains the real type size (see
2843 VT_LVAL_xxx constants) */
2846 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2847 /* if we are casting between pointer types,
2848 we must update the VT_LVAL_xxx size */
2849 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2850 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2852 vtop->type = *type;
2853 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2856 /* return type size as known at compile time. Put alignment at 'a' */
2857 ST_FUNC int type_size(CType *type, int *a)
2859 Sym *s;
2860 int bt;
2862 bt = type->t & VT_BTYPE;
2863 if (bt == VT_STRUCT) {
2864 /* struct/union */
2865 s = type->ref;
2866 *a = s->r;
2867 return s->c;
2868 } else if (bt == VT_PTR) {
2869 if (type->t & VT_ARRAY) {
2870 int ts;
2872 s = type->ref;
2873 ts = type_size(&s->type, a);
2875 if (ts < 0 && s->c < 0)
2876 ts = -ts;
2878 return ts * s->c;
2879 } else {
2880 *a = PTR_SIZE;
2881 return PTR_SIZE;
2883 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2884 return -1; /* incomplete enum */
2885 } else if (bt == VT_LDOUBLE) {
2886 *a = LDOUBLE_ALIGN;
2887 return LDOUBLE_SIZE;
2888 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2889 #ifdef TCC_TARGET_I386
2890 #ifdef TCC_TARGET_PE
2891 *a = 8;
2892 #else
2893 *a = 4;
2894 #endif
2895 #elif defined(TCC_TARGET_ARM)
2896 #ifdef TCC_ARM_EABI
2897 *a = 8;
2898 #else
2899 *a = 4;
2900 #endif
2901 #else
2902 *a = 8;
2903 #endif
2904 return 8;
2905 } else if (bt == VT_INT || bt == VT_FLOAT) {
2906 *a = 4;
2907 return 4;
2908 } else if (bt == VT_SHORT) {
2909 *a = 2;
2910 return 2;
2911 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2912 *a = 8;
2913 return 16;
2914 } else {
2915 /* char, void, function, _Bool */
2916 *a = 1;
2917 return 1;
2921 /* push type size as known at runtime time on top of value stack. Put
2922 alignment at 'a' */
2923 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2925 if (type->t & VT_VLA) {
2926 type_size(&type->ref->type, a);
2927 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2928 } else {
2929 vpushi(type_size(type, a));
2933 /* return the pointed type of t */
2934 static inline CType *pointed_type(CType *type)
2936 return &type->ref->type;
2939 /* modify type so that its it is a pointer to type. */
2940 ST_FUNC void mk_pointer(CType *type)
2942 Sym *s;
2943 s = sym_push(SYM_FIELD, type, 0, -1);
2944 type->t = VT_PTR | (type->t & VT_STORAGE);
2945 type->ref = s;
2948 /* compare function types. OLD functions match any new functions */
2949 static int is_compatible_func(CType *type1, CType *type2)
2951 Sym *s1, *s2;
2953 s1 = type1->ref;
2954 s2 = type2->ref;
2955 if (s1->f.func_call != s2->f.func_call)
2956 return 0;
2957 if (s1->f.func_type != s2->f.func_type
2958 && s1->f.func_type != FUNC_OLD
2959 && s2->f.func_type != FUNC_OLD)
2960 return 0;
2961 /* we should check the function return type for FUNC_OLD too
2962 but that causes problems with the internally used support
2963 functions such as TOK_memmove */
2964 if (s1->f.func_type == FUNC_OLD && !s1->next)
2965 return 1;
2966 if (s2->f.func_type == FUNC_OLD && !s2->next)
2967 return 1;
2968 for (;;) {
2969 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2970 return 0;
2971 s1 = s1->next;
2972 s2 = s2->next;
2973 if (!s1)
2974 return !s2;
2975 if (!s2)
2976 return 0;
2980 /* return true if type1 and type2 are the same. If unqualified is
2981 true, qualifiers on the types are ignored.
2983 static int compare_types(CType *type1, CType *type2, int unqualified)
2985 int bt1, t1, t2;
2987 t1 = type1->t & VT_TYPE;
2988 t2 = type2->t & VT_TYPE;
2989 if (unqualified) {
2990 /* strip qualifiers before comparing */
2991 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2992 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2995 /* Default Vs explicit signedness only matters for char */
2996 if ((t1 & VT_BTYPE) != VT_BYTE) {
2997 t1 &= ~VT_DEFSIGN;
2998 t2 &= ~VT_DEFSIGN;
3000 /* XXX: bitfields ? */
3001 if (t1 != t2)
3002 return 0;
3004 if ((t1 & VT_ARRAY)
3005 && !(type1->ref->c < 0
3006 || type2->ref->c < 0
3007 || type1->ref->c == type2->ref->c))
3008 return 0;
3010 /* test more complicated cases */
3011 bt1 = t1 & VT_BTYPE;
3012 if (bt1 == VT_PTR) {
3013 type1 = pointed_type(type1);
3014 type2 = pointed_type(type2);
3015 return is_compatible_types(type1, type2);
3016 } else if (bt1 == VT_STRUCT) {
3017 return (type1->ref == type2->ref);
3018 } else if (bt1 == VT_FUNC) {
3019 return is_compatible_func(type1, type2);
3020 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3021 return type1->ref == type2->ref;
3022 } else {
3023 return 1;
3027 /* return true if type1 and type2 are exactly the same (including
3028 qualifiers).
3030 static int is_compatible_types(CType *type1, CType *type2)
3032 return compare_types(type1,type2,0);
3035 /* return true if type1 and type2 are the same (ignoring qualifiers).
3037 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3039 return compare_types(type1,type2,1);
3042 /* print a type. If 'varstr' is not NULL, then the variable is also
3043 printed in the type */
3044 /* XXX: union */
3045 /* XXX: add array and function pointers */
3046 static void type_to_str(char *buf, int buf_size,
3047 CType *type, const char *varstr)
3049 int bt, v, t;
3050 Sym *s, *sa;
3051 char buf1[256];
3052 const char *tstr;
3054 t = type->t;
3055 bt = t & VT_BTYPE;
3056 buf[0] = '\0';
3058 if (t & VT_EXTERN)
3059 pstrcat(buf, buf_size, "extern ");
3060 if (t & VT_STATIC)
3061 pstrcat(buf, buf_size, "static ");
3062 if (t & VT_TYPEDEF)
3063 pstrcat(buf, buf_size, "typedef ");
3064 if (t & VT_INLINE)
3065 pstrcat(buf, buf_size, "inline ");
3066 if (t & VT_VOLATILE)
3067 pstrcat(buf, buf_size, "volatile ");
3068 if (t & VT_CONSTANT)
3069 pstrcat(buf, buf_size, "const ");
3071 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3072 || ((t & VT_UNSIGNED)
3073 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3074 && !IS_ENUM(t)
3076 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3078 buf_size -= strlen(buf);
3079 buf += strlen(buf);
3081 switch(bt) {
3082 case VT_VOID:
3083 tstr = "void";
3084 goto add_tstr;
3085 case VT_BOOL:
3086 tstr = "_Bool";
3087 goto add_tstr;
3088 case VT_BYTE:
3089 tstr = "char";
3090 goto add_tstr;
3091 case VT_SHORT:
3092 tstr = "short";
3093 goto add_tstr;
3094 case VT_INT:
3095 tstr = "int";
3096 goto maybe_long;
3097 case VT_LLONG:
3098 tstr = "long long";
3099 maybe_long:
3100 if (t & VT_LONG)
3101 tstr = "long";
3102 if (!IS_ENUM(t))
3103 goto add_tstr;
3104 tstr = "enum ";
3105 goto tstruct;
3106 case VT_FLOAT:
3107 tstr = "float";
3108 goto add_tstr;
3109 case VT_DOUBLE:
3110 tstr = "double";
3111 goto add_tstr;
3112 case VT_LDOUBLE:
3113 tstr = "long double";
3114 add_tstr:
3115 pstrcat(buf, buf_size, tstr);
3116 break;
3117 case VT_STRUCT:
3118 tstr = "struct ";
3119 if (IS_UNION(t))
3120 tstr = "union ";
3121 tstruct:
3122 pstrcat(buf, buf_size, tstr);
3123 v = type->ref->v & ~SYM_STRUCT;
3124 if (v >= SYM_FIRST_ANOM)
3125 pstrcat(buf, buf_size, "<anonymous>");
3126 else
3127 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3128 break;
3129 case VT_FUNC:
3130 s = type->ref;
3131 buf1[0]=0;
3132 if (varstr && '*' == *varstr) {
3133 pstrcat(buf1, sizeof(buf1), "(");
3134 pstrcat(buf1, sizeof(buf1), varstr);
3135 pstrcat(buf1, sizeof(buf1), ")");
3137 pstrcat(buf1, buf_size, "(");
3138 sa = s->next;
3139 while (sa != NULL) {
3140 char buf2[256];
3141 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3142 pstrcat(buf1, sizeof(buf1), buf2);
3143 sa = sa->next;
3144 if (sa)
3145 pstrcat(buf1, sizeof(buf1), ", ");
3147 if (s->f.func_type == FUNC_ELLIPSIS)
3148 pstrcat(buf1, sizeof(buf1), ", ...");
3149 pstrcat(buf1, sizeof(buf1), ")");
3150 type_to_str(buf, buf_size, &s->type, buf1);
3151 goto no_var;
3152 case VT_PTR:
3153 s = type->ref;
3154 if (t & VT_ARRAY) {
3155 if (varstr && '*' == *varstr)
3156 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3157 else
3158 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3159 type_to_str(buf, buf_size, &s->type, buf1);
3160 goto no_var;
3162 pstrcpy(buf1, sizeof(buf1), "*");
3163 if (t & VT_CONSTANT)
3164 pstrcat(buf1, buf_size, "const ");
3165 if (t & VT_VOLATILE)
3166 pstrcat(buf1, buf_size, "volatile ");
3167 if (varstr)
3168 pstrcat(buf1, sizeof(buf1), varstr);
3169 type_to_str(buf, buf_size, &s->type, buf1);
3170 goto no_var;
3172 if (varstr) {
3173 pstrcat(buf, buf_size, " ");
3174 pstrcat(buf, buf_size, varstr);
3176 no_var: ;
3179 /* verify type compatibility to store vtop in 'dt' type, and generate
3180 casts if needed. */
3181 static void gen_assign_cast(CType *dt)
3183 CType *st, *type1, *type2;
3184 char buf1[256], buf2[256];
3185 int dbt, sbt, qualwarn, lvl;
3187 st = &vtop->type; /* source type */
3188 dbt = dt->t & VT_BTYPE;
3189 sbt = st->t & VT_BTYPE;
3190 if (sbt == VT_VOID || dbt == VT_VOID) {
3191 if (sbt == VT_VOID && dbt == VT_VOID)
3192 ; /* It is Ok if both are void */
3193 else
3194 tcc_error("cannot cast from/to void");
3196 if (dt->t & VT_CONSTANT)
3197 tcc_warning("assignment of read-only location");
3198 switch(dbt) {
3199 case VT_PTR:
3200 /* special cases for pointers */
3201 /* '0' can also be a pointer */
3202 if (is_null_pointer(vtop))
3203 break;
3204 /* accept implicit pointer to integer cast with warning */
3205 if (is_integer_btype(sbt)) {
3206 tcc_warning("assignment makes pointer from integer without a cast");
3207 break;
3209 type1 = pointed_type(dt);
3210 if (sbt == VT_PTR)
3211 type2 = pointed_type(st);
3212 else if (sbt == VT_FUNC)
3213 type2 = st; /* a function is implicitly a function pointer */
3214 else
3215 goto error;
3216 if (is_compatible_types(type1, type2))
3217 break;
3218 for (qualwarn = lvl = 0;; ++lvl) {
3219 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3220 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3221 qualwarn = 1;
3222 dbt = type1->t & (VT_BTYPE|VT_LONG);
3223 sbt = type2->t & (VT_BTYPE|VT_LONG);
3224 if (dbt != VT_PTR || sbt != VT_PTR)
3225 break;
3226 type1 = pointed_type(type1);
3227 type2 = pointed_type(type2);
3229 if (!is_compatible_unqualified_types(type1, type2)) {
3230 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3231 /* void * can match anything */
3232 } else if (dbt == sbt
3233 && is_integer_btype(sbt & VT_BTYPE)
3234 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3235 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3236 /* Like GCC don't warn by default for merely changes
3237 in pointer target signedness. Do warn for different
3238 base types, though, in particular for unsigned enums
3239 and signed int targets. */
3240 } else {
3241 tcc_warning("assignment from incompatible pointer type");
3242 break;
3245 if (qualwarn)
3246 tcc_warning("assignment discards qualifiers from pointer target type");
3247 break;
3248 case VT_BYTE:
3249 case VT_SHORT:
3250 case VT_INT:
3251 case VT_LLONG:
3252 if (sbt == VT_PTR || sbt == VT_FUNC) {
3253 tcc_warning("assignment makes integer from pointer without a cast");
3254 } else if (sbt == VT_STRUCT) {
3255 goto case_VT_STRUCT;
3257 /* XXX: more tests */
3258 break;
3259 case VT_STRUCT:
3260 case_VT_STRUCT:
3261 if (!is_compatible_unqualified_types(dt, st)) {
3262 error:
3263 type_to_str(buf1, sizeof(buf1), st, NULL);
3264 type_to_str(buf2, sizeof(buf2), dt, NULL);
3265 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3267 break;
3269 gen_cast(dt);
3272 /* store vtop in lvalue pushed on stack */
3273 ST_FUNC void vstore(void)
3275 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3277 ft = vtop[-1].type.t;
3278 sbt = vtop->type.t & VT_BTYPE;
3279 dbt = ft & VT_BTYPE;
3280 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3281 (sbt == VT_INT && dbt == VT_SHORT))
3282 && !(vtop->type.t & VT_BITFIELD)) {
3283 /* optimize char/short casts */
3284 delayed_cast = VT_MUSTCAST;
3285 vtop->type.t = ft & VT_TYPE;
3286 /* XXX: factorize */
3287 if (ft & VT_CONSTANT)
3288 tcc_warning("assignment of read-only location");
3289 } else {
3290 delayed_cast = 0;
3291 if (!(ft & VT_BITFIELD))
3292 gen_assign_cast(&vtop[-1].type);
3295 if (sbt == VT_STRUCT) {
3296 /* if structure, only generate pointer */
3297 /* structure assignment : generate memcpy */
3298 /* XXX: optimize if small size */
3299 size = type_size(&vtop->type, &align);
3301 /* destination */
3302 vswap();
3303 vtop->type.t = VT_PTR;
3304 gaddrof();
3306 /* address of memcpy() */
3307 #ifdef TCC_ARM_EABI
3308 if(!(align & 7))
3309 vpush_global_sym(&func_old_type, TOK_memcpy8);
3310 else if(!(align & 3))
3311 vpush_global_sym(&func_old_type, TOK_memcpy4);
3312 else
3313 #endif
3314 /* Use memmove, rather than memcpy, as dest and src may be same: */
3315 vpush_global_sym(&func_old_type, TOK_memmove);
3317 vswap();
3318 /* source */
3319 vpushv(vtop - 2);
3320 vtop->type.t = VT_PTR;
3321 gaddrof();
3322 /* type size */
3323 vpushi(size);
3324 gfunc_call(3);
3326 /* leave source on stack */
3327 } else if (ft & VT_BITFIELD) {
3328 /* bitfield store handling */
3330 /* save lvalue as expression result (example: s.b = s.a = n;) */
3331 vdup(), vtop[-1] = vtop[-2];
3333 bit_pos = BIT_POS(ft);
3334 bit_size = BIT_SIZE(ft);
3335 /* remove bit field info to avoid loops */
3336 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3338 if ((ft & VT_BTYPE) == VT_BOOL) {
3339 gen_cast(&vtop[-1].type);
3340 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3343 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3344 if (r == VT_STRUCT) {
3345 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3346 store_packed_bf(bit_pos, bit_size);
3347 } else {
3348 unsigned long long mask = (1ULL << bit_size) - 1;
3349 if ((ft & VT_BTYPE) != VT_BOOL) {
3350 /* mask source */
3351 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3352 vpushll(mask);
3353 else
3354 vpushi((unsigned)mask);
3355 gen_op('&');
3357 /* shift source */
3358 vpushi(bit_pos);
3359 gen_op(TOK_SHL);
3360 vswap();
3361 /* duplicate destination */
3362 vdup();
3363 vrott(3);
3364 /* load destination, mask and or with source */
3365 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3366 vpushll(~(mask << bit_pos));
3367 else
3368 vpushi(~((unsigned)mask << bit_pos));
3369 gen_op('&');
3370 gen_op('|');
3371 /* store result */
3372 vstore();
3373 /* ... and discard */
3374 vpop();
3376 } else if (dbt == VT_VOID) {
3377 --vtop;
3378 } else {
3379 #ifdef CONFIG_TCC_BCHECK
3380 /* bound check case */
3381 if (vtop[-1].r & VT_MUSTBOUND) {
3382 vswap();
3383 gbound();
3384 vswap();
3386 #endif
3387 rc = RC_INT;
3388 if (is_float(ft)) {
3389 rc = RC_FLOAT;
3390 #ifdef TCC_TARGET_X86_64
3391 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3392 rc = RC_ST0;
3393 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3394 rc = RC_FRET;
3396 #endif
3398 r = gv(rc); /* generate value */
3399 /* if lvalue was saved on stack, must read it */
3400 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3401 SValue sv;
3402 t = get_reg(RC_INT);
3403 #if PTR_SIZE == 8
3404 sv.type.t = VT_PTR;
3405 #else
3406 sv.type.t = VT_INT;
3407 #endif
3408 sv.r = VT_LOCAL | VT_LVAL;
3409 sv.c.i = vtop[-1].c.i;
3410 load(t, &sv);
3411 vtop[-1].r = t | VT_LVAL;
3413 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3414 #if PTR_SIZE == 8
3415 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3416 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3417 #else
3418 if ((ft & VT_BTYPE) == VT_LLONG) {
3419 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3420 #endif
3421 vtop[-1].type.t = load_type;
3422 store(r, vtop - 1);
3423 vswap();
3424 /* convert to int to increment easily */
3425 vtop->type.t = addr_type;
3426 gaddrof();
3427 vpushi(load_size);
3428 gen_op('+');
3429 vtop->r |= VT_LVAL;
3430 vswap();
3431 vtop[-1].type.t = load_type;
3432 /* XXX: it works because r2 is spilled last ! */
3433 store(vtop->r2, vtop - 1);
3434 } else {
3435 store(r, vtop - 1);
3438 vswap();
3439 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3440 vtop->r |= delayed_cast;
3444 /* post defines POST/PRE add. c is the token ++ or -- */
3445 ST_FUNC void inc(int post, int c)
3447 test_lvalue();
3448 vdup(); /* save lvalue */
3449 if (post) {
3450 gv_dup(); /* duplicate value */
3451 vrotb(3);
3452 vrotb(3);
3454 /* add constant */
3455 vpushi(c - TOK_MID);
3456 gen_op('+');
3457 vstore(); /* store value */
3458 if (post)
3459 vpop(); /* if post op, return saved value */
3462 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3464 /* read the string */
3465 if (tok != TOK_STR)
3466 expect(msg);
3467 cstr_new(astr);
3468 while (tok == TOK_STR) {
3469 /* XXX: add \0 handling too ? */
3470 cstr_cat(astr, tokc.str.data, -1);
3471 next();
3473 cstr_ccat(astr, '\0');
3476 /* If I is >= 1 and a power of two, returns log2(i)+1.
3477 If I is 0 returns 0. */
3478 static int exact_log2p1(int i)
3480 int ret;
3481 if (!i)
3482 return 0;
3483 for (ret = 1; i >= 1 << 8; ret += 8)
3484 i >>= 8;
3485 if (i >= 1 << 4)
3486 ret += 4, i >>= 4;
3487 if (i >= 1 << 2)
3488 ret += 2, i >>= 2;
3489 if (i >= 1 << 1)
3490 ret++;
3491 return ret;
3494 /* Parse __attribute__((...)) GNUC extension. */
3495 static void parse_attribute(AttributeDef *ad)
3497 int t, n;
3498 CString astr;
3500 redo:
3501 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3502 return;
3503 next();
3504 skip('(');
3505 skip('(');
3506 while (tok != ')') {
3507 if (tok < TOK_IDENT)
3508 expect("attribute name");
3509 t = tok;
3510 next();
3511 switch(t) {
3512 case TOK_CLEANUP1:
3513 case TOK_CLEANUP2:
3515 Sym *s;
3517 skip('(');
3518 s = sym_find(tok);
3519 if (!s) {
3520 tcc_warning("implicit declaration of function '%s'",
3521 get_tok_str(tok, &tokc));
3522 s = external_global_sym(tok, &func_old_type);
3524 ad->cleanup_func = s;
3525 next();
3526 skip(')');
3527 break;
3529 case TOK_SECTION1:
3530 case TOK_SECTION2:
3531 skip('(');
3532 parse_mult_str(&astr, "section name");
3533 ad->section = find_section(tcc_state, (char *)astr.data);
3534 skip(')');
3535 cstr_free(&astr);
3536 break;
3537 case TOK_ALIAS1:
3538 case TOK_ALIAS2:
3539 skip('(');
3540 parse_mult_str(&astr, "alias(\"target\")");
3541 ad->alias_target = /* save string as token, for later */
3542 tok_alloc((char*)astr.data, astr.size-1)->tok;
3543 skip(')');
3544 cstr_free(&astr);
3545 break;
3546 case TOK_VISIBILITY1:
3547 case TOK_VISIBILITY2:
3548 skip('(');
3549 parse_mult_str(&astr,
3550 "visibility(\"default|hidden|internal|protected\")");
3551 if (!strcmp (astr.data, "default"))
3552 ad->a.visibility = STV_DEFAULT;
3553 else if (!strcmp (astr.data, "hidden"))
3554 ad->a.visibility = STV_HIDDEN;
3555 else if (!strcmp (astr.data, "internal"))
3556 ad->a.visibility = STV_INTERNAL;
3557 else if (!strcmp (astr.data, "protected"))
3558 ad->a.visibility = STV_PROTECTED;
3559 else
3560 expect("visibility(\"default|hidden|internal|protected\")");
3561 skip(')');
3562 cstr_free(&astr);
3563 break;
3564 case TOK_ALIGNED1:
3565 case TOK_ALIGNED2:
3566 if (tok == '(') {
3567 next();
3568 n = expr_const();
3569 if (n <= 0 || (n & (n - 1)) != 0)
3570 tcc_error("alignment must be a positive power of two");
3571 skip(')');
3572 } else {
3573 n = MAX_ALIGN;
3575 ad->a.aligned = exact_log2p1(n);
3576 if (n != 1 << (ad->a.aligned - 1))
3577 tcc_error("alignment of %d is larger than implemented", n);
3578 break;
3579 case TOK_PACKED1:
3580 case TOK_PACKED2:
3581 ad->a.packed = 1;
3582 break;
3583 case TOK_WEAK1:
3584 case TOK_WEAK2:
3585 ad->a.weak = 1;
3586 break;
3587 case TOK_UNUSED1:
3588 case TOK_UNUSED2:
3589 /* currently, no need to handle it because tcc does not
3590 track unused objects */
3591 break;
3592 case TOK_NORETURN1:
3593 case TOK_NORETURN2:
3594 ad->f.func_noreturn = 1;
3595 break;
3596 case TOK_CDECL1:
3597 case TOK_CDECL2:
3598 case TOK_CDECL3:
3599 ad->f.func_call = FUNC_CDECL;
3600 break;
3601 case TOK_STDCALL1:
3602 case TOK_STDCALL2:
3603 case TOK_STDCALL3:
3604 ad->f.func_call = FUNC_STDCALL;
3605 break;
3606 #ifdef TCC_TARGET_I386
3607 case TOK_REGPARM1:
3608 case TOK_REGPARM2:
3609 skip('(');
3610 n = expr_const();
3611 if (n > 3)
3612 n = 3;
3613 else if (n < 0)
3614 n = 0;
3615 if (n > 0)
3616 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3617 skip(')');
3618 break;
3619 case TOK_FASTCALL1:
3620 case TOK_FASTCALL2:
3621 case TOK_FASTCALL3:
3622 ad->f.func_call = FUNC_FASTCALLW;
3623 break;
3624 #endif
3625 case TOK_MODE:
3626 skip('(');
3627 switch(tok) {
3628 case TOK_MODE_DI:
3629 ad->attr_mode = VT_LLONG + 1;
3630 break;
3631 case TOK_MODE_QI:
3632 ad->attr_mode = VT_BYTE + 1;
3633 break;
3634 case TOK_MODE_HI:
3635 ad->attr_mode = VT_SHORT + 1;
3636 break;
3637 case TOK_MODE_SI:
3638 case TOK_MODE_word:
3639 ad->attr_mode = VT_INT + 1;
3640 break;
3641 default:
3642 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3643 break;
3645 next();
3646 skip(')');
3647 break;
3648 case TOK_DLLEXPORT:
3649 ad->a.dllexport = 1;
3650 break;
3651 case TOK_NODECORATE:
3652 ad->a.nodecorate = 1;
3653 break;
3654 case TOK_DLLIMPORT:
3655 ad->a.dllimport = 1;
3656 break;
3657 default:
3658 if (tcc_state->warn_unsupported)
3659 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3660 /* skip parameters */
3661 if (tok == '(') {
3662 int parenthesis = 0;
3663 do {
3664 if (tok == '(')
3665 parenthesis++;
3666 else if (tok == ')')
3667 parenthesis--;
3668 next();
3669 } while (parenthesis && tok != -1);
3671 break;
3673 if (tok != ',')
3674 break;
3675 next();
3677 skip(')');
3678 skip(')');
3679 goto redo;
3682 static Sym * find_field (CType *type, int v, int *cumofs)
3684 Sym *s = type->ref;
3685 v |= SYM_FIELD;
3686 while ((s = s->next) != NULL) {
3687 if ((s->v & SYM_FIELD) &&
3688 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3689 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3690 Sym *ret = find_field (&s->type, v, cumofs);
3691 if (ret) {
3692 *cumofs += s->c;
3693 return ret;
3696 if (s->v == v)
3697 break;
3699 return s;
3702 static void struct_layout(CType *type, AttributeDef *ad)
3704 int size, align, maxalign, offset, c, bit_pos, bit_size;
3705 int packed, a, bt, prevbt, prev_bit_size;
3706 int pcc = !tcc_state->ms_bitfields;
3707 int pragma_pack = *tcc_state->pack_stack_ptr;
3708 Sym *f;
3710 maxalign = 1;
3711 offset = 0;
3712 c = 0;
3713 bit_pos = 0;
3714 prevbt = VT_STRUCT; /* make it never match */
3715 prev_bit_size = 0;
3717 //#define BF_DEBUG
3719 for (f = type->ref->next; f; f = f->next) {
3720 if (f->type.t & VT_BITFIELD)
3721 bit_size = BIT_SIZE(f->type.t);
3722 else
3723 bit_size = -1;
3724 size = type_size(&f->type, &align);
3725 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3726 packed = 0;
3728 if (pcc && bit_size == 0) {
3729 /* in pcc mode, packing does not affect zero-width bitfields */
3731 } else {
3732 /* in pcc mode, attribute packed overrides if set. */
3733 if (pcc && (f->a.packed || ad->a.packed))
3734 align = packed = 1;
3736 /* pragma pack overrides align if lesser and packs bitfields always */
3737 if (pragma_pack) {
3738 packed = 1;
3739 if (pragma_pack < align)
3740 align = pragma_pack;
3741 /* in pcc mode pragma pack also overrides individual align */
3742 if (pcc && pragma_pack < a)
3743 a = 0;
3746 /* some individual align was specified */
3747 if (a)
3748 align = a;
3750 if (type->ref->type.t == VT_UNION) {
3751 if (pcc && bit_size >= 0)
3752 size = (bit_size + 7) >> 3;
3753 offset = 0;
3754 if (size > c)
3755 c = size;
3757 } else if (bit_size < 0) {
3758 if (pcc)
3759 c += (bit_pos + 7) >> 3;
3760 c = (c + align - 1) & -align;
3761 offset = c;
3762 if (size > 0)
3763 c += size;
3764 bit_pos = 0;
3765 prevbt = VT_STRUCT;
3766 prev_bit_size = 0;
3768 } else {
3769 /* A bit-field. Layout is more complicated. There are two
3770 options: PCC (GCC) compatible and MS compatible */
3771 if (pcc) {
3772 /* In PCC layout a bit-field is placed adjacent to the
3773 preceding bit-fields, except if:
3774 - it has zero-width
3775 - an individual alignment was given
3776 - it would overflow its base type container and
3777 there is no packing */
3778 if (bit_size == 0) {
3779 new_field:
3780 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3781 bit_pos = 0;
3782 } else if (f->a.aligned) {
3783 goto new_field;
3784 } else if (!packed) {
3785 int a8 = align * 8;
3786 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3787 if (ofs > size / align)
3788 goto new_field;
3791 /* in pcc mode, long long bitfields have type int if they fit */
3792 if (size == 8 && bit_size <= 32)
3793 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3795 while (bit_pos >= align * 8)
3796 c += align, bit_pos -= align * 8;
3797 offset = c;
3799 /* In PCC layout named bit-fields influence the alignment
3800 of the containing struct using the base types alignment,
3801 except for packed fields (which here have correct align). */
3802 if (f->v & SYM_FIRST_ANOM
3803 // && bit_size // ??? gcc on ARM/rpi does that
3805 align = 1;
3807 } else {
3808 bt = f->type.t & VT_BTYPE;
3809 if ((bit_pos + bit_size > size * 8)
3810 || (bit_size > 0) == (bt != prevbt)
3812 c = (c + align - 1) & -align;
3813 offset = c;
3814 bit_pos = 0;
3815 /* In MS bitfield mode a bit-field run always uses
3816 at least as many bits as the underlying type.
3817 To start a new run it's also required that this
3818 or the last bit-field had non-zero width. */
3819 if (bit_size || prev_bit_size)
3820 c += size;
3822 /* In MS layout the records alignment is normally
3823 influenced by the field, except for a zero-width
3824 field at the start of a run (but by further zero-width
3825 fields it is again). */
3826 if (bit_size == 0 && prevbt != bt)
3827 align = 1;
3828 prevbt = bt;
3829 prev_bit_size = bit_size;
3832 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3833 | (bit_pos << VT_STRUCT_SHIFT);
3834 bit_pos += bit_size;
3836 if (align > maxalign)
3837 maxalign = align;
3839 #ifdef BF_DEBUG
3840 printf("set field %s offset %-2d size %-2d align %-2d",
3841 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3842 if (f->type.t & VT_BITFIELD) {
3843 printf(" pos %-2d bits %-2d",
3844 BIT_POS(f->type.t),
3845 BIT_SIZE(f->type.t)
3848 printf("\n");
3849 #endif
3851 f->c = offset;
3852 f->r = 0;
3855 if (pcc)
3856 c += (bit_pos + 7) >> 3;
3858 /* store size and alignment */
3859 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3860 if (a < maxalign)
3861 a = maxalign;
3862 type->ref->r = a;
3863 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3864 /* can happen if individual align for some member was given. In
3865 this case MSVC ignores maxalign when aligning the size */
3866 a = pragma_pack;
3867 if (a < bt)
3868 a = bt;
3870 c = (c + a - 1) & -a;
3871 type->ref->c = c;
3873 #ifdef BF_DEBUG
3874 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3875 #endif
3877 /* check whether we can access bitfields by their type */
3878 for (f = type->ref->next; f; f = f->next) {
3879 int s, px, cx, c0;
3880 CType t;
3882 if (0 == (f->type.t & VT_BITFIELD))
3883 continue;
3884 f->type.ref = f;
3885 f->auxtype = -1;
3886 bit_size = BIT_SIZE(f->type.t);
3887 if (bit_size == 0)
3888 continue;
3889 bit_pos = BIT_POS(f->type.t);
3890 size = type_size(&f->type, &align);
3891 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3892 continue;
3894 /* try to access the field using a different type */
3895 c0 = -1, s = align = 1;
3896 for (;;) {
3897 px = f->c * 8 + bit_pos;
3898 cx = (px >> 3) & -align;
3899 px = px - (cx << 3);
3900 if (c0 == cx)
3901 break;
3902 s = (px + bit_size + 7) >> 3;
3903 if (s > 4) {
3904 t.t = VT_LLONG;
3905 } else if (s > 2) {
3906 t.t = VT_INT;
3907 } else if (s > 1) {
3908 t.t = VT_SHORT;
3909 } else {
3910 t.t = VT_BYTE;
3912 s = type_size(&t, &align);
3913 c0 = cx;
3916 if (px + bit_size <= s * 8 && cx + s <= c) {
3917 /* update offset and bit position */
3918 f->c = cx;
3919 bit_pos = px;
3920 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3921 | (bit_pos << VT_STRUCT_SHIFT);
3922 if (s != size)
3923 f->auxtype = t.t;
3924 #ifdef BF_DEBUG
3925 printf("FIX field %s offset %-2d size %-2d align %-2d "
3926 "pos %-2d bits %-2d\n",
3927 get_tok_str(f->v & ~SYM_FIELD, NULL),
3928 cx, s, align, px, bit_size);
3929 #endif
3930 } else {
3931 /* fall back to load/store single-byte wise */
3932 f->auxtype = VT_STRUCT;
3933 #ifdef BF_DEBUG
3934 printf("FIX field %s : load byte-wise\n",
3935 get_tok_str(f->v & ~SYM_FIELD, NULL));
3936 #endif
3941 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3942 static void struct_decl(CType *type, int u)
3944 int v, c, size, align, flexible;
3945 int bit_size, bsize, bt;
3946 Sym *s, *ss, **ps;
3947 AttributeDef ad, ad1;
3948 CType type1, btype;
3950 memset(&ad, 0, sizeof ad);
3951 next();
3952 parse_attribute(&ad);
3953 if (tok != '{') {
3954 v = tok;
3955 next();
3956 /* struct already defined ? return it */
3957 if (v < TOK_IDENT)
3958 expect("struct/union/enum name");
3959 s = struct_find(v);
3960 if (s && (s->sym_scope == local_scope || tok != '{')) {
3961 if (u == s->type.t)
3962 goto do_decl;
3963 if (u == VT_ENUM && IS_ENUM(s->type.t))
3964 goto do_decl;
3965 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3967 } else {
3968 v = anon_sym++;
3970 /* Record the original enum/struct/union token. */
3971 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3972 type1.ref = NULL;
3973 /* we put an undefined size for struct/union */
3974 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3975 s->r = 0; /* default alignment is zero as gcc */
3976 do_decl:
3977 type->t = s->type.t;
3978 type->ref = s;
3980 if (tok == '{') {
3981 next();
3982 if (s->c != -1)
3983 tcc_error("struct/union/enum already defined");
3984 s->c = -2;
3985 /* cannot be empty */
3986 /* non empty enums are not allowed */
3987 ps = &s->next;
3988 if (u == VT_ENUM) {
3989 long long ll = 0, pl = 0, nl = 0;
3990 CType t;
3991 t.ref = s;
3992 /* enum symbols have static storage */
3993 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3994 for(;;) {
3995 v = tok;
3996 if (v < TOK_UIDENT)
3997 expect("identifier");
3998 ss = sym_find(v);
3999 if (ss && !local_stack)
4000 tcc_error("redefinition of enumerator '%s'",
4001 get_tok_str(v, NULL));
4002 next();
4003 if (tok == '=') {
4004 next();
4005 ll = expr_const64();
4007 ss = sym_push(v, &t, VT_CONST, 0);
4008 ss->enum_val = ll;
4009 *ps = ss, ps = &ss->next;
4010 if (ll < nl)
4011 nl = ll;
4012 if (ll > pl)
4013 pl = ll;
4014 if (tok != ',')
4015 break;
4016 next();
4017 ll++;
4018 /* NOTE: we accept a trailing comma */
4019 if (tok == '}')
4020 break;
4022 skip('}');
4023 /* set integral type of the enum */
4024 t.t = VT_INT;
4025 if (nl >= 0) {
4026 if (pl != (unsigned)pl)
4027 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4028 t.t |= VT_UNSIGNED;
4029 } else if (pl != (int)pl || nl != (int)nl)
4030 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4031 s->type.t = type->t = t.t | VT_ENUM;
4032 s->c = 0;
4033 /* set type for enum members */
4034 for (ss = s->next; ss; ss = ss->next) {
4035 ll = ss->enum_val;
4036 if (ll == (int)ll) /* default is int if it fits */
4037 continue;
4038 if (t.t & VT_UNSIGNED) {
4039 ss->type.t |= VT_UNSIGNED;
4040 if (ll == (unsigned)ll)
4041 continue;
4043 ss->type.t = (ss->type.t & ~VT_BTYPE)
4044 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4046 } else {
4047 c = 0;
4048 flexible = 0;
4049 while (tok != '}') {
4050 if (!parse_btype(&btype, &ad1)) {
4051 skip(';');
4052 continue;
4054 while (1) {
4055 if (flexible)
4056 tcc_error("flexible array member '%s' not at the end of struct",
4057 get_tok_str(v, NULL));
4058 bit_size = -1;
4059 v = 0;
4060 type1 = btype;
4061 if (tok != ':') {
4062 if (tok != ';')
4063 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4064 if (v == 0) {
4065 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4066 expect("identifier");
4067 else {
4068 int v = btype.ref->v;
4069 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4070 if (tcc_state->ms_extensions == 0)
4071 expect("identifier");
4075 if (type_size(&type1, &align) < 0) {
4076 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4077 flexible = 1;
4078 else
4079 tcc_error("field '%s' has incomplete type",
4080 get_tok_str(v, NULL));
4082 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4083 (type1.t & VT_BTYPE) == VT_VOID ||
4084 (type1.t & VT_STORAGE))
4085 tcc_error("invalid type for '%s'",
4086 get_tok_str(v, NULL));
4088 if (tok == ':') {
4089 next();
4090 bit_size = expr_const();
4091 /* XXX: handle v = 0 case for messages */
4092 if (bit_size < 0)
4093 tcc_error("negative width in bit-field '%s'",
4094 get_tok_str(v, NULL));
4095 if (v && bit_size == 0)
4096 tcc_error("zero width for bit-field '%s'",
4097 get_tok_str(v, NULL));
4098 parse_attribute(&ad1);
4100 size = type_size(&type1, &align);
4101 if (bit_size >= 0) {
4102 bt = type1.t & VT_BTYPE;
4103 if (bt != VT_INT &&
4104 bt != VT_BYTE &&
4105 bt != VT_SHORT &&
4106 bt != VT_BOOL &&
4107 bt != VT_LLONG)
4108 tcc_error("bitfields must have scalar type");
4109 bsize = size * 8;
4110 if (bit_size > bsize) {
4111 tcc_error("width of '%s' exceeds its type",
4112 get_tok_str(v, NULL));
4113 } else if (bit_size == bsize
4114 && !ad.a.packed && !ad1.a.packed) {
4115 /* no need for bit fields */
4117 } else if (bit_size == 64) {
4118 tcc_error("field width 64 not implemented");
4119 } else {
4120 type1.t = (type1.t & ~VT_STRUCT_MASK)
4121 | VT_BITFIELD
4122 | (bit_size << (VT_STRUCT_SHIFT + 6));
4125 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4126 /* Remember we've seen a real field to check
4127 for placement of flexible array member. */
4128 c = 1;
4130 /* If member is a struct or bit-field, enforce
4131 placing into the struct (as anonymous). */
4132 if (v == 0 &&
4133 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4134 bit_size >= 0)) {
4135 v = anon_sym++;
4137 if (v) {
4138 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4139 ss->a = ad1.a;
4140 *ps = ss;
4141 ps = &ss->next;
4143 if (tok == ';' || tok == TOK_EOF)
4144 break;
4145 skip(',');
4147 skip(';');
4149 skip('}');
4150 parse_attribute(&ad);
4151 struct_layout(type, &ad);
4156 static void sym_to_attr(AttributeDef *ad, Sym *s)
4158 merge_symattr(&ad->a, &s->a);
4159 merge_funcattr(&ad->f, &s->f);
4162 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4163 are added to the element type, copied because it could be a typedef. */
4164 static void parse_btype_qualify(CType *type, int qualifiers)
4166 while (type->t & VT_ARRAY) {
4167 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4168 type = &type->ref->type;
4170 type->t |= qualifiers;
4173 /* return 0 if no type declaration. otherwise, return the basic type
4174 and skip it.
4176 static int parse_btype(CType *type, AttributeDef *ad)
4178 int t, u, bt, st, type_found, typespec_found, g, n;
4179 Sym *s;
4180 CType type1;
4182 memset(ad, 0, sizeof(AttributeDef));
4183 type_found = 0;
4184 typespec_found = 0;
4185 t = VT_INT;
4186 bt = st = -1;
4187 type->ref = NULL;
4189 while(1) {
4190 switch(tok) {
4191 case TOK_EXTENSION:
4192 /* currently, we really ignore extension */
4193 next();
4194 continue;
4196 /* basic types */
4197 case TOK_CHAR:
4198 u = VT_BYTE;
4199 basic_type:
4200 next();
4201 basic_type1:
4202 if (u == VT_SHORT || u == VT_LONG) {
4203 if (st != -1 || (bt != -1 && bt != VT_INT))
4204 tmbt: tcc_error("too many basic types");
4205 st = u;
4206 } else {
4207 if (bt != -1 || (st != -1 && u != VT_INT))
4208 goto tmbt;
4209 bt = u;
4211 if (u != VT_INT)
4212 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4213 typespec_found = 1;
4214 break;
4215 case TOK_VOID:
4216 u = VT_VOID;
4217 goto basic_type;
4218 case TOK_SHORT:
4219 u = VT_SHORT;
4220 goto basic_type;
4221 case TOK_INT:
4222 u = VT_INT;
4223 goto basic_type;
4224 case TOK_ALIGNAS:
4225 { int n;
4226 AttributeDef ad1;
4227 next();
4228 skip('(');
4229 memset(&ad1, 0, sizeof(AttributeDef));
4230 if (parse_btype(&type1, &ad1)) {
4231 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4232 if (ad1.a.aligned)
4233 n = 1 << (ad1.a.aligned - 1);
4234 else
4235 type_size(&type1, &n);
4236 } else {
4237 n = expr_const();
4238 if (n <= 0 || (n & (n - 1)) != 0)
4239 tcc_error("alignment must be a positive power of two");
4241 skip(')');
4242 ad->a.aligned = exact_log2p1(n);
4244 continue;
4245 case TOK_LONG:
4246 if ((t & VT_BTYPE) == VT_DOUBLE) {
4247 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4248 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4249 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4250 } else {
4251 u = VT_LONG;
4252 goto basic_type;
4254 next();
4255 break;
4256 #ifdef TCC_TARGET_ARM64
4257 case TOK_UINT128:
4258 /* GCC's __uint128_t appears in some Linux header files. Make it a
4259 synonym for long double to get the size and alignment right. */
4260 u = VT_LDOUBLE;
4261 goto basic_type;
4262 #endif
4263 case TOK_BOOL:
4264 u = VT_BOOL;
4265 goto basic_type;
4266 case TOK_FLOAT:
4267 u = VT_FLOAT;
4268 goto basic_type;
4269 case TOK_DOUBLE:
4270 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4271 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4272 } else {
4273 u = VT_DOUBLE;
4274 goto basic_type;
4276 next();
4277 break;
4278 case TOK_ENUM:
4279 struct_decl(&type1, VT_ENUM);
4280 basic_type2:
4281 u = type1.t;
4282 type->ref = type1.ref;
4283 goto basic_type1;
4284 case TOK_STRUCT:
4285 struct_decl(&type1, VT_STRUCT);
4286 goto basic_type2;
4287 case TOK_UNION:
4288 struct_decl(&type1, VT_UNION);
4289 goto basic_type2;
4291 /* type modifiers */
4292 case TOK_CONST1:
4293 case TOK_CONST2:
4294 case TOK_CONST3:
4295 type->t = t;
4296 parse_btype_qualify(type, VT_CONSTANT);
4297 t = type->t;
4298 next();
4299 break;
4300 case TOK_VOLATILE1:
4301 case TOK_VOLATILE2:
4302 case TOK_VOLATILE3:
4303 type->t = t;
4304 parse_btype_qualify(type, VT_VOLATILE);
4305 t = type->t;
4306 next();
4307 break;
4308 case TOK_SIGNED1:
4309 case TOK_SIGNED2:
4310 case TOK_SIGNED3:
4311 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4312 tcc_error("signed and unsigned modifier");
4313 t |= VT_DEFSIGN;
4314 next();
4315 typespec_found = 1;
4316 break;
4317 case TOK_REGISTER:
4318 case TOK_AUTO:
4319 case TOK_RESTRICT1:
4320 case TOK_RESTRICT2:
4321 case TOK_RESTRICT3:
4322 next();
4323 break;
4324 case TOK_UNSIGNED:
4325 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4326 tcc_error("signed and unsigned modifier");
4327 t |= VT_DEFSIGN | VT_UNSIGNED;
4328 next();
4329 typespec_found = 1;
4330 break;
4332 /* storage */
4333 case TOK_EXTERN:
4334 g = VT_EXTERN;
4335 goto storage;
4336 case TOK_STATIC:
4337 g = VT_STATIC;
4338 goto storage;
4339 case TOK_TYPEDEF:
4340 g = VT_TYPEDEF;
4341 goto storage;
4342 storage:
4343 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4344 tcc_error("multiple storage classes");
4345 t |= g;
4346 next();
4347 break;
4348 case TOK_INLINE1:
4349 case TOK_INLINE2:
4350 case TOK_INLINE3:
4351 t |= VT_INLINE;
4352 next();
4353 break;
4354 case TOK_NORETURN3:
4355 /* currently, no need to handle it because tcc does not
4356 track unused objects */
4357 next();
4358 break;
4359 /* GNUC attribute */
4360 case TOK_ATTRIBUTE1:
4361 case TOK_ATTRIBUTE2:
4362 parse_attribute(ad);
4363 if (ad->attr_mode) {
4364 u = ad->attr_mode -1;
4365 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4367 continue;
4368 /* GNUC typeof */
4369 case TOK_TYPEOF1:
4370 case TOK_TYPEOF2:
4371 case TOK_TYPEOF3:
4372 next();
4373 parse_expr_type(&type1);
4374 /* remove all storage modifiers except typedef */
4375 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4376 if (type1.ref)
4377 sym_to_attr(ad, type1.ref);
4378 goto basic_type2;
4379 default:
4380 if (typespec_found)
4381 goto the_end;
4382 s = sym_find(tok);
4383 if (!s || !(s->type.t & VT_TYPEDEF))
4384 goto the_end;
4386 n = tok, next();
4387 if (tok == ':' && !in_generic) {
4388 /* ignore if it's a label */
4389 unget_tok(n);
4390 goto the_end;
4393 t &= ~(VT_BTYPE|VT_LONG);
4394 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4395 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4396 type->ref = s->type.ref;
4397 if (t)
4398 parse_btype_qualify(type, t);
4399 t = type->t;
4400 /* get attributes from typedef */
4401 sym_to_attr(ad, s);
4402 typespec_found = 1;
4403 st = bt = -2;
4404 break;
4406 type_found = 1;
4408 the_end:
4409 if (tcc_state->char_is_unsigned) {
4410 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4411 t |= VT_UNSIGNED;
4413 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4414 bt = t & (VT_BTYPE|VT_LONG);
4415 if (bt == VT_LONG)
4416 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4417 #ifdef TCC_TARGET_PE
4418 if (bt == VT_LDOUBLE)
4419 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4420 #endif
4421 type->t = t;
4422 return type_found;
4425 /* convert a function parameter type (array to pointer and function to
4426 function pointer) */
4427 static inline void convert_parameter_type(CType *pt)
4429 /* remove const and volatile qualifiers (XXX: const could be used
4430 to indicate a const function parameter */
4431 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4432 /* array must be transformed to pointer according to ANSI C */
4433 pt->t &= ~VT_ARRAY;
4434 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4435 mk_pointer(pt);
4439 ST_FUNC void parse_asm_str(CString *astr)
4441 skip('(');
4442 parse_mult_str(astr, "string constant");
4445 /* Parse an asm label and return the token */
4446 static int asm_label_instr(void)
4448 int v;
4449 CString astr;
4451 next();
4452 parse_asm_str(&astr);
4453 skip(')');
4454 #ifdef ASM_DEBUG
4455 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4456 #endif
4457 v = tok_alloc(astr.data, astr.size - 1)->tok;
4458 cstr_free(&astr);
4459 return v;
4462 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4464 int n, l, t1, arg_size, align, unused_align;
4465 Sym **plast, *s, *first;
4466 AttributeDef ad1;
4467 CType pt;
4469 if (tok == '(') {
4470 /* function type, or recursive declarator (return if so) */
4471 next();
4472 if (td && !(td & TYPE_ABSTRACT))
4473 return 0;
4474 if (tok == ')')
4475 l = 0;
4476 else if (parse_btype(&pt, &ad1))
4477 l = FUNC_NEW;
4478 else if (td) {
4479 merge_attr (ad, &ad1);
4480 return 0;
4481 } else
4482 l = FUNC_OLD;
4483 first = NULL;
4484 plast = &first;
4485 arg_size = 0;
4486 if (l) {
4487 for(;;) {
4488 /* read param name and compute offset */
4489 if (l != FUNC_OLD) {
4490 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4491 break;
4492 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4493 if ((pt.t & VT_BTYPE) == VT_VOID)
4494 tcc_error("parameter declared as void");
4495 } else {
4496 n = tok;
4497 if (n < TOK_UIDENT)
4498 expect("identifier");
4499 pt.t = VT_VOID; /* invalid type */
4500 next();
4502 convert_parameter_type(&pt);
4503 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4504 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4505 *plast = s;
4506 plast = &s->next;
4507 if (tok == ')')
4508 break;
4509 skip(',');
4510 if (l == FUNC_NEW && tok == TOK_DOTS) {
4511 l = FUNC_ELLIPSIS;
4512 next();
4513 break;
4515 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4516 tcc_error("invalid type");
4518 } else
4519 /* if no parameters, then old type prototype */
4520 l = FUNC_OLD;
4521 skip(')');
4522 /* NOTE: const is ignored in returned type as it has a special
4523 meaning in gcc / C++ */
4524 type->t &= ~VT_CONSTANT;
4525 /* some ancient pre-K&R C allows a function to return an array
4526 and the array brackets to be put after the arguments, such
4527 that "int c()[]" means something like "int[] c()" */
4528 if (tok == '[') {
4529 next();
4530 skip(']'); /* only handle simple "[]" */
4531 mk_pointer(type);
4533 /* we push a anonymous symbol which will contain the function prototype */
4534 ad->f.func_args = arg_size;
4535 ad->f.func_type = l;
4536 s = sym_push(SYM_FIELD, type, 0, 0);
4537 s->a = ad->a;
4538 s->f = ad->f;
4539 s->next = first;
4540 type->t = VT_FUNC;
4541 type->ref = s;
4542 } else if (tok == '[') {
4543 int saved_nocode_wanted = nocode_wanted;
4544 /* array definition */
4545 next();
4546 while (1) {
4547 /* XXX The optional type-quals and static should only be accepted
4548 in parameter decls. The '*' as well, and then even only
4549 in prototypes (not function defs). */
4550 switch (tok) {
4551 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4552 case TOK_CONST1:
4553 case TOK_VOLATILE1:
4554 case TOK_STATIC:
4555 case '*':
4556 next();
4557 continue;
4558 default:
4559 break;
4561 break;
4563 n = -1;
4564 t1 = 0;
4565 if (tok != ']') {
4566 if (!local_stack || (storage & VT_STATIC))
4567 vpushi(expr_const());
4568 else {
4569 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4570 length must always be evaluated, even under nocode_wanted,
4571 so that its size slot is initialized (e.g. under sizeof
4572 or typeof). */
4573 nocode_wanted = 0;
4574 gexpr();
4576 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4577 n = vtop->c.i;
4578 if (n < 0)
4579 tcc_error("invalid array size");
4580 } else {
4581 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4582 tcc_error("size of variable length array should be an integer");
4583 n = 0;
4584 t1 = VT_VLA;
4587 skip(']');
4588 /* parse next post type */
4589 post_type(type, ad, storage, 0);
4591 if ((type->t & VT_BTYPE) == VT_FUNC)
4592 tcc_error("declaration of an array of functions");
4593 if ((type->t & VT_BTYPE) == VT_VOID
4594 || type_size(type, &unused_align) < 0)
4595 tcc_error("declaration of an array of incomplete type elements");
4597 t1 |= type->t & VT_VLA;
4599 if (t1 & VT_VLA) {
4600 if (n < 0)
4601 tcc_error("need explicit inner array size in VLAs");
4602 loc -= type_size(&int_type, &align);
4603 loc &= -align;
4604 n = loc;
4606 vla_runtime_type_size(type, &align);
4607 gen_op('*');
4608 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4609 vswap();
4610 vstore();
4612 if (n != -1)
4613 vpop();
4614 nocode_wanted = saved_nocode_wanted;
4616 /* we push an anonymous symbol which will contain the array
4617 element type */
4618 s = sym_push(SYM_FIELD, type, 0, n);
4619 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4620 type->ref = s;
4622 return 1;
4625 /* Parse a type declarator (except basic type), and return the type
4626 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4627 expected. 'type' should contain the basic type. 'ad' is the
4628 attribute definition of the basic type. It can be modified by
4629 type_decl(). If this (possibly abstract) declarator is a pointer chain
4630 it returns the innermost pointed to type (equals *type, but is a different
4631 pointer), otherwise returns type itself, that's used for recursive calls. */
4632 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4634 CType *post, *ret;
4635 int qualifiers, storage;
4637 /* recursive type, remove storage bits first, apply them later again */
4638 storage = type->t & VT_STORAGE;
4639 type->t &= ~VT_STORAGE;
4640 post = ret = type;
4642 while (tok == '*') {
4643 qualifiers = 0;
4644 redo:
4645 next();
4646 switch(tok) {
4647 case TOK_CONST1:
4648 case TOK_CONST2:
4649 case TOK_CONST3:
4650 qualifiers |= VT_CONSTANT;
4651 goto redo;
4652 case TOK_VOLATILE1:
4653 case TOK_VOLATILE2:
4654 case TOK_VOLATILE3:
4655 qualifiers |= VT_VOLATILE;
4656 goto redo;
4657 case TOK_RESTRICT1:
4658 case TOK_RESTRICT2:
4659 case TOK_RESTRICT3:
4660 goto redo;
4661 /* XXX: clarify attribute handling */
4662 case TOK_ATTRIBUTE1:
4663 case TOK_ATTRIBUTE2:
4664 parse_attribute(ad);
4665 break;
4667 mk_pointer(type);
4668 type->t |= qualifiers;
4669 if (ret == type)
4670 /* innermost pointed to type is the one for the first derivation */
4671 ret = pointed_type(type);
4674 if (tok == '(') {
4675 /* This is possibly a parameter type list for abstract declarators
4676 ('int ()'), use post_type for testing this. */
4677 if (!post_type(type, ad, 0, td)) {
4678 /* It's not, so it's a nested declarator, and the post operations
4679 apply to the innermost pointed to type (if any). */
4680 /* XXX: this is not correct to modify 'ad' at this point, but
4681 the syntax is not clear */
4682 parse_attribute(ad);
4683 post = type_decl(type, ad, v, td);
4684 skip(')');
4685 } else
4686 goto abstract;
4687 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4688 /* type identifier */
4689 *v = tok;
4690 next();
4691 } else {
4692 abstract:
4693 if (!(td & TYPE_ABSTRACT))
4694 expect("identifier");
4695 *v = 0;
4697 post_type(post, ad, storage, 0);
4698 parse_attribute(ad);
4699 type->t |= storage;
4700 return ret;
4703 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4704 ST_FUNC int lvalue_type(int t)
4706 int bt, r;
4707 r = VT_LVAL;
4708 bt = t & VT_BTYPE;
4709 if (bt == VT_BYTE || bt == VT_BOOL)
4710 r |= VT_LVAL_BYTE;
4711 else if (bt == VT_SHORT)
4712 r |= VT_LVAL_SHORT;
4713 else
4714 return r;
4715 if (t & VT_UNSIGNED)
4716 r |= VT_LVAL_UNSIGNED;
4717 return r;
4720 /* indirection with full error checking and bound check */
4721 ST_FUNC void indir(void)
4723 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4724 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4725 return;
4726 expect("pointer");
4728 if (vtop->r & VT_LVAL)
4729 gv(RC_INT);
4730 vtop->type = *pointed_type(&vtop->type);
4731 /* Arrays and functions are never lvalues */
4732 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4733 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4734 vtop->r |= lvalue_type(vtop->type.t);
4735 /* if bound checking, the referenced pointer must be checked */
4736 #ifdef CONFIG_TCC_BCHECK
4737 if (tcc_state->do_bounds_check)
4738 vtop->r |= VT_MUSTBOUND;
4739 #endif
4743 /* pass a parameter to a function and do type checking and casting */
4744 static void gfunc_param_typed(Sym *func, Sym *arg)
4746 int func_type;
4747 CType type;
4749 func_type = func->f.func_type;
4750 if (func_type == FUNC_OLD ||
4751 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4752 /* default casting : only need to convert float to double */
4753 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4754 gen_cast_s(VT_DOUBLE);
4755 } else if (vtop->type.t & VT_BITFIELD) {
4756 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4757 type.ref = vtop->type.ref;
4758 gen_cast(&type);
4760 } else if (arg == NULL) {
4761 tcc_error("too many arguments to function");
4762 } else {
4763 type = arg->type;
4764 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4765 gen_assign_cast(&type);
4769 /* parse an expression and return its type without any side effect. */
4770 static void expr_type(CType *type, void (*expr_fn)(void))
4772 nocode_wanted++;
4773 expr_fn();
4774 *type = vtop->type;
4775 vpop();
4776 nocode_wanted--;
4779 /* parse an expression of the form '(type)' or '(expr)' and return its
4780 type */
4781 static void parse_expr_type(CType *type)
4783 int n;
4784 AttributeDef ad;
4786 skip('(');
4787 if (parse_btype(type, &ad)) {
4788 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4789 } else {
4790 expr_type(type, gexpr);
4792 skip(')');
4795 static void parse_type(CType *type)
4797 AttributeDef ad;
4798 int n;
4800 if (!parse_btype(type, &ad)) {
4801 expect("type");
4803 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4806 static void parse_builtin_params(int nc, const char *args)
4808 char c, sep = '(';
4809 CType t;
4810 if (nc)
4811 nocode_wanted++;
4812 next();
4813 while ((c = *args++)) {
4814 skip(sep);
4815 sep = ',';
4816 switch (c) {
4817 case 'e': expr_eq(); continue;
4818 case 't': parse_type(&t); vpush(&t); continue;
4819 default: tcc_error("internal error"); break;
4822 skip(')');
4823 if (nc)
4824 nocode_wanted--;
4827 ST_FUNC void unary(void)
4829 int n, t, align, size, r, sizeof_caller;
4830 CType type;
4831 Sym *s;
4832 AttributeDef ad;
4834 sizeof_caller = in_sizeof;
4835 in_sizeof = 0;
4836 type.ref = NULL;
4837 /* XXX: GCC 2.95.3 does not generate a table although it should be
4838 better here */
4839 tok_next:
4840 switch(tok) {
4841 case TOK_EXTENSION:
4842 next();
4843 goto tok_next;
4844 case TOK_LCHAR:
4845 #ifdef TCC_TARGET_PE
4846 t = VT_SHORT|VT_UNSIGNED;
4847 goto push_tokc;
4848 #endif
4849 case TOK_CINT:
4850 case TOK_CCHAR:
4851 t = VT_INT;
4852 push_tokc:
4853 type.t = t;
4854 vsetc(&type, VT_CONST, &tokc);
4855 next();
4856 break;
4857 case TOK_CUINT:
4858 t = VT_INT | VT_UNSIGNED;
4859 goto push_tokc;
4860 case TOK_CLLONG:
4861 t = VT_LLONG;
4862 goto push_tokc;
4863 case TOK_CULLONG:
4864 t = VT_LLONG | VT_UNSIGNED;
4865 goto push_tokc;
4866 case TOK_CFLOAT:
4867 t = VT_FLOAT;
4868 goto push_tokc;
4869 case TOK_CDOUBLE:
4870 t = VT_DOUBLE;
4871 goto push_tokc;
4872 case TOK_CLDOUBLE:
4873 t = VT_LDOUBLE;
4874 goto push_tokc;
4875 case TOK_CLONG:
4876 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4877 goto push_tokc;
4878 case TOK_CULONG:
4879 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4880 goto push_tokc;
4881 case TOK___FUNCTION__:
4882 if (!gnu_ext)
4883 goto tok_identifier;
4884 /* fall thru */
4885 case TOK___FUNC__:
4887 void *ptr;
4888 int len;
4889 /* special function name identifier */
4890 len = strlen(funcname) + 1;
4891 /* generate char[len] type */
4892 type.t = VT_BYTE;
4893 mk_pointer(&type);
4894 type.t |= VT_ARRAY;
4895 type.ref->c = len;
4896 vpush_ref(&type, data_section, data_section->data_offset, len);
4897 if (!NODATA_WANTED) {
4898 ptr = section_ptr_add(data_section, len);
4899 memcpy(ptr, funcname, len);
4901 next();
4903 break;
4904 case TOK_LSTR:
4905 #ifdef TCC_TARGET_PE
4906 t = VT_SHORT | VT_UNSIGNED;
4907 #else
4908 t = VT_INT;
4909 #endif
4910 goto str_init;
4911 case TOK_STR:
4912 /* string parsing */
4913 t = VT_BYTE;
4914 if (tcc_state->char_is_unsigned)
4915 t = VT_BYTE | VT_UNSIGNED;
4916 str_init:
4917 if (tcc_state->warn_write_strings)
4918 t |= VT_CONSTANT;
4919 type.t = t;
4920 mk_pointer(&type);
4921 type.t |= VT_ARRAY;
4922 memset(&ad, 0, sizeof(AttributeDef));
4923 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4924 break;
4925 case '(':
4926 next();
4927 /* cast ? */
4928 if (parse_btype(&type, &ad)) {
4929 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4930 skip(')');
4931 /* check ISOC99 compound literal */
4932 if (tok == '{') {
4933 /* data is allocated locally by default */
4934 if (global_expr)
4935 r = VT_CONST;
4936 else
4937 r = VT_LOCAL;
4938 /* all except arrays are lvalues */
4939 if (!(type.t & VT_ARRAY))
4940 r |= lvalue_type(type.t);
4941 memset(&ad, 0, sizeof(AttributeDef));
4942 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4943 } else {
4944 if (sizeof_caller) {
4945 vpush(&type);
4946 return;
4948 unary();
4949 gen_cast(&type);
4951 } else if (tok == '{') {
4952 int saved_nocode_wanted = nocode_wanted;
4953 if (const_wanted)
4954 tcc_error("expected constant");
4955 /* save all registers */
4956 save_regs(0);
4957 /* statement expression : we do not accept break/continue
4958 inside as GCC does. We do retain the nocode_wanted state,
4959 as statement expressions can't ever be entered from the
4960 outside, so any reactivation of code emission (from labels
4961 or loop heads) can be disabled again after the end of it. */
4962 block(1);
4963 nocode_wanted = saved_nocode_wanted;
4964 skip(')');
4965 } else {
4966 gexpr();
4967 skip(')');
4969 break;
4970 case '*':
4971 next();
4972 unary();
4973 indir();
4974 break;
4975 case '&':
4976 next();
4977 unary();
4978 /* functions names must be treated as function pointers,
4979 except for unary '&' and sizeof. Since we consider that
4980 functions are not lvalues, we only have to handle it
4981 there and in function calls. */
4982 /* arrays can also be used although they are not lvalues */
4983 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4984 !(vtop->type.t & VT_ARRAY))
4985 test_lvalue();
4986 mk_pointer(&vtop->type);
4987 gaddrof();
4988 break;
4989 case '!':
4990 next();
4991 unary();
4992 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4993 gen_cast_s(VT_BOOL);
4994 vtop->c.i = !vtop->c.i;
4995 } else if (vtop->r == VT_CMP) {
4996 vtop->cmp_op ^= 1;
4997 n = vtop->jfalse, vtop->jfalse = vtop->jtrue, vtop->jtrue = n;
4998 } else {
4999 vpushi(0);
5000 gen_op(TOK_EQ);
5002 break;
5003 case '~':
5004 next();
5005 unary();
5006 vpushi(-1);
5007 gen_op('^');
5008 break;
5009 case '+':
5010 next();
5011 unary();
5012 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5013 tcc_error("pointer not accepted for unary plus");
5014 /* In order to force cast, we add zero, except for floating point
5015 where we really need an noop (otherwise -0.0 will be transformed
5016 into +0.0). */
5017 if (!is_float(vtop->type.t)) {
5018 vpushi(0);
5019 gen_op('+');
5021 break;
5022 case TOK_SIZEOF:
5023 case TOK_ALIGNOF1:
5024 case TOK_ALIGNOF2:
5025 case TOK_ALIGNOF3:
5026 t = tok;
5027 next();
5028 in_sizeof++;
5029 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5030 s = NULL;
5031 if (vtop[1].r & VT_SYM)
5032 s = vtop[1].sym; /* hack: accessing previous vtop */
5033 size = type_size(&type, &align);
5034 if (s && s->a.aligned)
5035 align = 1 << (s->a.aligned - 1);
5036 if (t == TOK_SIZEOF) {
5037 if (!(type.t & VT_VLA)) {
5038 if (size < 0)
5039 tcc_error("sizeof applied to an incomplete type");
5040 vpushs(size);
5041 } else {
5042 vla_runtime_type_size(&type, &align);
5044 } else {
5045 vpushs(align);
5047 vtop->type.t |= VT_UNSIGNED;
5048 break;
5050 case TOK_builtin_expect:
5051 /* __builtin_expect is a no-op for now */
5052 parse_builtin_params(0, "ee");
5053 vpop();
5054 break;
5055 case TOK_builtin_types_compatible_p:
5056 parse_builtin_params(0, "tt");
5057 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5058 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5059 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5060 vtop -= 2;
5061 vpushi(n);
5062 break;
5063 case TOK_builtin_choose_expr:
5065 int64_t c;
5066 next();
5067 skip('(');
5068 c = expr_const64();
5069 skip(',');
5070 if (!c) {
5071 nocode_wanted++;
5073 expr_eq();
5074 if (!c) {
5075 vpop();
5076 nocode_wanted--;
5078 skip(',');
5079 if (c) {
5080 nocode_wanted++;
5082 expr_eq();
5083 if (c) {
5084 vpop();
5085 nocode_wanted--;
5087 skip(')');
5089 break;
5090 case TOK_builtin_constant_p:
5091 parse_builtin_params(1, "e");
5092 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5093 vtop--;
5094 vpushi(n);
5095 break;
5096 case TOK_builtin_frame_address:
5097 case TOK_builtin_return_address:
5099 int tok1 = tok;
5100 int level;
5101 next();
5102 skip('(');
5103 if (tok != TOK_CINT) {
5104 tcc_error("%s only takes positive integers",
5105 tok1 == TOK_builtin_return_address ?
5106 "__builtin_return_address" :
5107 "__builtin_frame_address");
5109 level = (uint32_t)tokc.i;
5110 next();
5111 skip(')');
5112 type.t = VT_VOID;
5113 mk_pointer(&type);
5114 vset(&type, VT_LOCAL, 0); /* local frame */
5115 while (level--) {
5116 mk_pointer(&vtop->type);
5117 indir(); /* -> parent frame */
5119 if (tok1 == TOK_builtin_return_address) {
5120 // assume return address is just above frame pointer on stack
5121 vpushi(PTR_SIZE);
5122 gen_op('+');
5123 mk_pointer(&vtop->type);
5124 indir();
5127 break;
5128 #ifdef TCC_TARGET_X86_64
5129 #ifdef TCC_TARGET_PE
5130 case TOK_builtin_va_start:
5131 parse_builtin_params(0, "ee");
5132 r = vtop->r & VT_VALMASK;
5133 if (r == VT_LLOCAL)
5134 r = VT_LOCAL;
5135 if (r != VT_LOCAL)
5136 tcc_error("__builtin_va_start expects a local variable");
5137 vtop->r = r;
5138 vtop->type = char_pointer_type;
5139 vtop->c.i += 8;
5140 vstore();
5141 break;
5142 #else
5143 case TOK_builtin_va_arg_types:
5144 parse_builtin_params(0, "t");
5145 vpushi(classify_x86_64_va_arg(&vtop->type));
5146 vswap();
5147 vpop();
5148 break;
5149 #endif
5150 #endif
5152 #ifdef TCC_TARGET_ARM64
5153 case TOK___va_start: {
5154 parse_builtin_params(0, "ee");
5155 //xx check types
5156 gen_va_start();
5157 vpushi(0);
5158 vtop->type.t = VT_VOID;
5159 break;
5161 case TOK___va_arg: {
5162 parse_builtin_params(0, "et");
5163 type = vtop->type;
5164 vpop();
5165 //xx check types
5166 gen_va_arg(&type);
5167 vtop->type = type;
5168 break;
5170 case TOK___arm64_clear_cache: {
5171 parse_builtin_params(0, "ee");
5172 gen_clear_cache();
5173 vpushi(0);
5174 vtop->type.t = VT_VOID;
5175 break;
5177 #endif
5178 /* pre operations */
5179 case TOK_INC:
5180 case TOK_DEC:
5181 t = tok;
5182 next();
5183 unary();
5184 inc(0, t);
5185 break;
5186 case '-':
5187 next();
5188 unary();
5189 t = vtop->type.t & VT_BTYPE;
5190 if (is_float(t)) {
5191 /* In IEEE negate(x) isn't subtract(0,x), but rather
5192 subtract(-0, x). */
5193 vpush(&vtop->type);
5194 if (t == VT_FLOAT)
5195 vtop->c.f = -1.0 * 0.0;
5196 else if (t == VT_DOUBLE)
5197 vtop->c.d = -1.0 * 0.0;
5198 else
5199 vtop->c.ld = -1.0 * 0.0;
5200 } else
5201 vpushi(0);
5202 vswap();
5203 gen_op('-');
5204 break;
5205 case TOK_LAND:
5206 if (!gnu_ext)
5207 goto tok_identifier;
5208 next();
5209 /* allow to take the address of a label */
5210 if (tok < TOK_UIDENT)
5211 expect("label identifier");
5212 s = label_find(tok);
5213 if (!s) {
5214 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5215 } else {
5216 if (s->r == LABEL_DECLARED)
5217 s->r = LABEL_FORWARD;
5219 if (!s->type.t) {
5220 s->type.t = VT_VOID;
5221 mk_pointer(&s->type);
5222 s->type.t |= VT_STATIC;
5224 vpushsym(&s->type, s);
5225 next();
5226 break;
5228 case TOK_GENERIC:
5230 CType controlling_type;
5231 int has_default = 0;
5232 int has_match = 0;
5233 int learn = 0;
5234 TokenString *str = NULL;
5235 int saved_const_wanted = const_wanted;
5237 next();
5238 skip('(');
5239 const_wanted = 0;
5240 expr_type(&controlling_type, expr_eq);
5241 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5242 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5243 mk_pointer(&controlling_type);
5244 const_wanted = saved_const_wanted;
5245 for (;;) {
5246 learn = 0;
5247 skip(',');
5248 if (tok == TOK_DEFAULT) {
5249 if (has_default)
5250 tcc_error("too many 'default'");
5251 has_default = 1;
5252 if (!has_match)
5253 learn = 1;
5254 next();
5255 } else {
5256 AttributeDef ad_tmp;
5257 int itmp;
5258 CType cur_type;
5260 in_generic++;
5261 parse_btype(&cur_type, &ad_tmp);
5262 in_generic--;
5264 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5265 if (compare_types(&controlling_type, &cur_type, 0)) {
5266 if (has_match) {
5267 tcc_error("type match twice");
5269 has_match = 1;
5270 learn = 1;
5273 skip(':');
5274 if (learn) {
5275 if (str)
5276 tok_str_free(str);
5277 skip_or_save_block(&str);
5278 } else {
5279 skip_or_save_block(NULL);
5281 if (tok == ')')
5282 break;
5284 if (!str) {
5285 char buf[60];
5286 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5287 tcc_error("type '%s' does not match any association", buf);
5289 begin_macro(str, 1);
5290 next();
5291 expr_eq();
5292 if (tok != TOK_EOF)
5293 expect(",");
5294 end_macro();
5295 next();
5296 break;
5298 // special qnan , snan and infinity values
5299 case TOK___NAN__:
5300 n = 0x7fc00000;
5301 special_math_val:
5302 vpushi(n);
5303 vtop->type.t = VT_FLOAT;
5304 next();
5305 break;
5306 case TOK___SNAN__:
5307 n = 0x7f800001;
5308 goto special_math_val;
5309 case TOK___INF__:
5310 n = 0x7f800000;
5311 goto special_math_val;
5313 default:
5314 tok_identifier:
5315 t = tok;
5316 next();
5317 if (t < TOK_UIDENT)
5318 expect("identifier");
5319 s = sym_find(t);
5320 if (!s || IS_ASM_SYM(s)) {
5321 const char *name = get_tok_str(t, NULL);
5322 if (tok != '(')
5323 tcc_error("'%s' undeclared", name);
5324 /* for simple function calls, we tolerate undeclared
5325 external reference to int() function */
5326 if (tcc_state->warn_implicit_function_declaration
5327 #ifdef TCC_TARGET_PE
5328 /* people must be warned about using undeclared WINAPI functions
5329 (which usually start with uppercase letter) */
5330 || (name[0] >= 'A' && name[0] <= 'Z')
5331 #endif
5333 tcc_warning("implicit declaration of function '%s'", name);
5334 s = external_global_sym(t, &func_old_type);
5337 r = s->r;
5338 /* A symbol that has a register is a local register variable,
5339 which starts out as VT_LOCAL value. */
5340 if ((r & VT_VALMASK) < VT_CONST)
5341 r = (r & ~VT_VALMASK) | VT_LOCAL;
5343 vset(&s->type, r, s->c);
5344 /* Point to s as backpointer (even without r&VT_SYM).
5345 Will be used by at least the x86 inline asm parser for
5346 regvars. */
5347 vtop->sym = s;
5349 if (r & VT_SYM) {
5350 vtop->c.i = 0;
5351 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5352 vtop->c.i = s->enum_val;
5354 break;
5357 /* post operations */
5358 while (1) {
5359 if (tok == TOK_INC || tok == TOK_DEC) {
5360 inc(1, tok);
5361 next();
5362 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5363 int qualifiers, cumofs = 0;
5364 /* field */
5365 if (tok == TOK_ARROW)
5366 indir();
5367 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5368 test_lvalue();
5369 gaddrof();
5370 /* expect pointer on structure */
5371 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5372 expect("struct or union");
5373 if (tok == TOK_CDOUBLE)
5374 expect("field name");
5375 next();
5376 if (tok == TOK_CINT || tok == TOK_CUINT)
5377 expect("field name");
5378 s = find_field(&vtop->type, tok, &cumofs);
5379 if (!s)
5380 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5381 /* add field offset to pointer */
5382 vtop->type = char_pointer_type; /* change type to 'char *' */
5383 vpushi(cumofs + s->c);
5384 gen_op('+');
5385 /* change type to field type, and set to lvalue */
5386 vtop->type = s->type;
5387 vtop->type.t |= qualifiers;
5388 /* an array is never an lvalue */
5389 if (!(vtop->type.t & VT_ARRAY)) {
5390 vtop->r |= lvalue_type(vtop->type.t);
5391 #ifdef CONFIG_TCC_BCHECK
5392 /* if bound checking, the referenced pointer must be checked */
5393 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5394 vtop->r |= VT_MUSTBOUND;
5395 #endif
5397 next();
5398 } else if (tok == '[') {
5399 next();
5400 gexpr();
5401 gen_op('+');
5402 indir();
5403 skip(']');
5404 } else if (tok == '(') {
5405 SValue ret;
5406 Sym *sa;
5407 int nb_args, ret_nregs, ret_align, regsize, variadic;
5409 /* function call */
5410 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5411 /* pointer test (no array accepted) */
5412 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5413 vtop->type = *pointed_type(&vtop->type);
5414 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5415 goto error_func;
5416 } else {
5417 error_func:
5418 expect("function pointer");
5420 } else {
5421 vtop->r &= ~VT_LVAL; /* no lvalue */
5423 /* get return type */
5424 s = vtop->type.ref;
5425 next();
5426 sa = s->next; /* first parameter */
5427 nb_args = regsize = 0;
5428 ret.r2 = VT_CONST;
5429 /* compute first implicit argument if a structure is returned */
5430 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5431 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5432 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5433 &ret_align, &regsize);
5434 if (!ret_nregs) {
5435 /* get some space for the returned structure */
5436 size = type_size(&s->type, &align);
5437 #ifdef TCC_TARGET_ARM64
5438 /* On arm64, a small struct is return in registers.
5439 It is much easier to write it to memory if we know
5440 that we are allowed to write some extra bytes, so
5441 round the allocated space up to a power of 2: */
5442 if (size < 16)
5443 while (size & (size - 1))
5444 size = (size | (size - 1)) + 1;
5445 #endif
5446 loc = (loc - size) & -align;
5447 ret.type = s->type;
5448 ret.r = VT_LOCAL | VT_LVAL;
5449 /* pass it as 'int' to avoid structure arg passing
5450 problems */
5451 vseti(VT_LOCAL, loc);
5452 ret.c = vtop->c;
5453 nb_args++;
5455 } else {
5456 ret_nregs = 1;
5457 ret.type = s->type;
5460 if (ret_nregs) {
5461 /* return in register */
5462 if (is_float(ret.type.t)) {
5463 ret.r = reg_fret(ret.type.t);
5464 #ifdef TCC_TARGET_X86_64
5465 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5466 ret.r2 = REG_QRET;
5467 #endif
5468 } else {
5469 #ifndef TCC_TARGET_ARM64
5470 #ifdef TCC_TARGET_X86_64
5471 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5472 #else
5473 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5474 #endif
5475 ret.r2 = REG_LRET;
5476 #endif
5477 ret.r = REG_IRET;
5479 ret.c.i = 0;
5481 if (tok != ')') {
5482 for(;;) {
5483 expr_eq();
5484 gfunc_param_typed(s, sa);
5485 nb_args++;
5486 if (sa)
5487 sa = sa->next;
5488 if (tok == ')')
5489 break;
5490 skip(',');
5493 if (sa)
5494 tcc_error("too few arguments to function");
5495 skip(')');
5496 gfunc_call(nb_args);
5498 /* return value */
5499 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5500 vsetc(&ret.type, r, &ret.c);
5501 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5504 /* handle packed struct return */
5505 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5506 int addr, offset;
5508 size = type_size(&s->type, &align);
5509 /* We're writing whole regs often, make sure there's enough
5510 space. Assume register size is power of 2. */
5511 if (regsize > align)
5512 align = regsize;
5513 loc = (loc - size) & -align;
5514 addr = loc;
5515 offset = 0;
5516 for (;;) {
5517 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5518 vswap();
5519 vstore();
5520 vtop--;
5521 if (--ret_nregs == 0)
5522 break;
5523 offset += regsize;
5525 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5527 if (s->f.func_noreturn)
5528 CODE_OFF();
5529 } else {
5530 break;
5535 ST_FUNC void expr_prod(void)
5537 int t;
5539 unary();
5540 while (tok == '*' || tok == '/' || tok == '%') {
5541 t = tok;
5542 next();
5543 unary();
5544 gen_op(t);
5548 ST_FUNC void expr_sum(void)
5550 int t;
5552 expr_prod();
5553 while (tok == '+' || tok == '-') {
5554 t = tok;
5555 next();
5556 expr_prod();
5557 gen_op(t);
5561 static void expr_shift(void)
5563 int t;
5565 expr_sum();
5566 while (tok == TOK_SHL || tok == TOK_SAR) {
5567 t = tok;
5568 next();
5569 expr_sum();
5570 gen_op(t);
5574 static void expr_cmp(void)
5576 int t;
5578 expr_shift();
5579 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5580 tok == TOK_ULT || tok == TOK_UGE) {
5581 t = tok;
5582 next();
5583 expr_shift();
5584 gen_op(t);
5588 static void expr_cmpeq(void)
5590 int t;
5592 expr_cmp();
5593 while (tok == TOK_EQ || tok == TOK_NE) {
5594 t = tok;
5595 next();
5596 expr_cmp();
5597 gen_op(t);
5601 static void expr_and(void)
5603 expr_cmpeq();
5604 while (tok == '&') {
5605 next();
5606 expr_cmpeq();
5607 gen_op('&');
5611 static void expr_xor(void)
5613 expr_and();
5614 while (tok == '^') {
5615 next();
5616 expr_and();
5617 gen_op('^');
5621 static void expr_or(void)
5623 expr_xor();
5624 while (tok == '|') {
5625 next();
5626 expr_xor();
5627 gen_op('|');
5631 static int condition_3way(void);
5633 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5635 int t = 0, cc = 1, f = 0, c;
5636 for(;;) {
5637 c = f ? i : condition_3way();
5638 if (c < 0) {
5639 save_regs(1), cc = 0;
5640 } else if (c != i) {
5641 nocode_wanted++, f = 1;
5643 if (tok != e_op) {
5644 if (cc || f) {
5645 vpop();
5646 vpushi(i ^ f);
5647 gsym(t);
5648 nocode_wanted -= f;
5649 } else {
5650 gvtst_set(i, t);
5652 break;
5654 if (c < 0)
5655 t = gvtst(i, t);
5656 else
5657 vpop();
5658 next();
5659 e_fn();
5663 static void expr_land(void)
5665 expr_or();
5666 if (tok == TOK_LAND)
5667 expr_landor(expr_or, TOK_LAND, 1);
5670 static void expr_lor(void)
5672 expr_land();
5673 if (tok == TOK_LOR)
5674 expr_landor(expr_land, TOK_LOR, 0);
5677 /* Assuming vtop is a value used in a conditional context
5678 (i.e. compared with zero) return 0 if it's false, 1 if
5679 true and -1 if it can't be statically determined. */
5680 static int condition_3way(void)
5682 int c = -1;
5683 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5684 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5685 vdup();
5686 gen_cast_s(VT_BOOL);
5687 c = vtop->c.i;
5688 vpop();
5690 return c;
5693 static int is_cond_bool(SValue *sv)
5695 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5696 && (sv->type.t & VT_BTYPE) == VT_INT)
5697 return (unsigned)sv->c.i < 2;
5698 if (sv->r == VT_CMP)
5699 return 1;
5700 return 0;
5703 static void expr_cond(void)
5705 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5706 SValue sv;
5707 CType type, type1, type2;
5708 int ncw_prev;
5710 expr_lor();
5711 if (tok == '?') {
5712 next();
5713 c = condition_3way();
5714 g = (tok == ':' && gnu_ext);
5715 tt = 0;
5716 if (!g) {
5717 if (c < 0) {
5718 save_regs(1);
5719 tt = gvtst(1, 0);
5720 } else {
5721 vpop();
5723 } else if (c < 0) {
5724 /* needed to avoid having different registers saved in
5725 each branch */
5726 rc = RC_INT;
5727 if (is_float(vtop->type.t)) {
5728 rc = RC_FLOAT;
5729 #ifdef TCC_TARGET_X86_64
5730 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5731 rc = RC_ST0;
5733 #endif
5735 gv(rc);
5736 save_regs(1);
5737 gv_dup();
5738 tt = gvtst(0, 0);
5741 ncw_prev = nocode_wanted;
5742 if (1) {
5743 if (c == 0)
5744 nocode_wanted++;
5745 if (!g)
5746 gexpr();
5748 if (c < 0 && vtop->r == VT_CMP) {
5749 t1 = gvtst(0, 0);
5750 vpushi(0);
5751 gvtst_set(0, t1);
5754 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5755 mk_pointer(&vtop->type);
5756 type1 = vtop->type;
5757 sv = *vtop; /* save value to handle it later */
5758 vtop--; /* no vpop so that FP stack is not flushed */
5760 if (g) {
5761 u = tt;
5762 } else if (c < 0) {
5763 u = gjmp(0);
5764 gsym(tt);
5765 } else
5766 u = 0;
5768 nocode_wanted = ncw_prev;
5769 if (c == 1)
5770 nocode_wanted++;
5771 skip(':');
5772 expr_cond();
5774 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5775 if (sv.r == VT_CMP) {
5776 t1 = sv.jtrue;
5777 t2 = u;
5778 } else {
5779 t1 = gvtst(0, 0);
5780 t2 = gjmp(0);
5781 gsym(u);
5782 vpushv(&sv);
5784 gvtst_set(0, t1);
5785 gvtst_set(1, t2);
5786 nocode_wanted = ncw_prev;
5787 // tcc_warning("two conditions expr_cond");
5788 return;
5791 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5792 mk_pointer(&vtop->type);
5793 type2=vtop->type;
5794 t1 = type1.t;
5795 bt1 = t1 & VT_BTYPE;
5796 t2 = type2.t;
5797 bt2 = t2 & VT_BTYPE;
5798 type.ref = NULL;
5800 /* cast operands to correct type according to ISOC rules */
5801 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5802 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5803 } else if (is_float(bt1) || is_float(bt2)) {
5804 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5805 type.t = VT_LDOUBLE;
5807 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5808 type.t = VT_DOUBLE;
5809 } else {
5810 type.t = VT_FLOAT;
5812 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5813 /* cast to biggest op */
5814 type.t = VT_LLONG | VT_LONG;
5815 if (bt1 == VT_LLONG)
5816 type.t &= t1;
5817 if (bt2 == VT_LLONG)
5818 type.t &= t2;
5819 /* convert to unsigned if it does not fit in a long long */
5820 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5821 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5822 type.t |= VT_UNSIGNED;
5823 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5824 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5825 /* If one is a null ptr constant the result type
5826 is the other. */
5827 if (is_null_pointer (vtop)) type = type1;
5828 else if (is_null_pointer (&sv)) type = type2;
5829 else if (bt1 != bt2)
5830 tcc_error("incompatible types in conditional expressions");
5831 else {
5832 CType *pt1 = pointed_type(&type1);
5833 CType *pt2 = pointed_type(&type2);
5834 int pbt1 = pt1->t & VT_BTYPE;
5835 int pbt2 = pt2->t & VT_BTYPE;
5836 int newquals, copied = 0;
5837 /* pointers to void get preferred, otherwise the
5838 pointed to types minus qualifs should be compatible */
5839 type = (pbt1 == VT_VOID) ? type1 : type2;
5840 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5841 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5842 tcc_warning("pointer type mismatch in conditional expression\n");
5844 /* combine qualifs */
5845 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5846 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5847 & newquals)
5849 /* copy the pointer target symbol */
5850 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5851 0, type.ref->c);
5852 copied = 1;
5853 pointed_type(&type)->t |= newquals;
5855 /* pointers to incomplete arrays get converted to
5856 pointers to completed ones if possible */
5857 if (pt1->t & VT_ARRAY
5858 && pt2->t & VT_ARRAY
5859 && pointed_type(&type)->ref->c < 0
5860 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5862 if (!copied)
5863 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5864 0, type.ref->c);
5865 pointed_type(&type)->ref =
5866 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5867 0, pointed_type(&type)->ref->c);
5868 pointed_type(&type)->ref->c =
5869 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5872 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5873 /* XXX: test structure compatibility */
5874 type = bt1 == VT_STRUCT ? type1 : type2;
5875 } else {
5876 /* integer operations */
5877 type.t = VT_INT | (VT_LONG & (t1 | t2));
5878 /* convert to unsigned if it does not fit in an integer */
5879 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5880 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5881 type.t |= VT_UNSIGNED;
5883 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5884 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5885 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5887 /* now we convert second operand */
5888 if (c != 1) {
5889 gen_cast(&type);
5890 if (islv) {
5891 mk_pointer(&vtop->type);
5892 gaddrof();
5893 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5894 gaddrof();
5897 rc = RC_INT;
5898 if (is_float(type.t)) {
5899 rc = RC_FLOAT;
5900 #ifdef TCC_TARGET_X86_64
5901 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5902 rc = RC_ST0;
5904 #endif
5905 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5906 /* for long longs, we use fixed registers to avoid having
5907 to handle a complicated move */
5908 rc = RC_IRET;
5911 tt = r2 = 0;
5912 if (c < 0) {
5913 r2 = gv(rc);
5914 tt = gjmp(0);
5916 gsym(u);
5918 /* this is horrible, but we must also convert first
5919 operand */
5920 if (c != 0) {
5921 *vtop = sv;
5922 gen_cast(&type);
5923 if (islv) {
5924 mk_pointer(&vtop->type);
5925 gaddrof();
5926 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5927 gaddrof();
5930 if (c < 0) {
5931 r1 = gv(rc);
5932 move_reg(r2, r1, type.t);
5933 vtop->r = r2;
5934 gsym(tt);
5937 if (islv)
5938 indir();
5940 nocode_wanted = ncw_prev;
5944 static void expr_eq(void)
5946 int t;
5948 expr_cond();
5949 if (tok == '=' ||
5950 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5951 tok == TOK_A_XOR || tok == TOK_A_OR ||
5952 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5953 test_lvalue();
5954 t = tok;
5955 next();
5956 if (t == '=') {
5957 expr_eq();
5958 } else {
5959 vdup();
5960 expr_eq();
5961 gen_op(t & 0x7f);
5963 vstore();
5967 ST_FUNC void gexpr(void)
5969 while (1) {
5970 expr_eq();
5971 if (tok != ',')
5972 break;
5973 vpop();
5974 next();
5978 /* parse a constant expression and return value in vtop. */
5979 static void expr_const1(void)
5981 const_wanted++;
5982 nocode_wanted++;
5983 expr_cond();
5984 nocode_wanted--;
5985 const_wanted--;
5988 /* parse an integer constant and return its value. */
5989 static inline int64_t expr_const64(void)
5991 int64_t c;
5992 expr_const1();
5993 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5994 expect("constant expression");
5995 c = vtop->c.i;
5996 vpop();
5997 return c;
6000 /* parse an integer constant and return its value.
6001 Complain if it doesn't fit 32bit (signed or unsigned). */
6002 ST_FUNC int expr_const(void)
6004 int c;
6005 int64_t wc = expr_const64();
6006 c = wc;
6007 if (c != wc && (unsigned)c != wc)
6008 tcc_error("constant exceeds 32 bit");
6009 return c;
6012 /* ------------------------------------------------------------------------- */
6013 /* return from function */
6015 #ifndef TCC_TARGET_ARM64
6016 static void gfunc_return(CType *func_type)
6018 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6019 CType type, ret_type;
6020 int ret_align, ret_nregs, regsize;
6021 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6022 &ret_align, &regsize);
6023 if (0 == ret_nregs) {
6024 /* if returning structure, must copy it to implicit
6025 first pointer arg location */
6026 type = *func_type;
6027 mk_pointer(&type);
6028 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6029 indir();
6030 vswap();
6031 /* copy structure value to pointer */
6032 vstore();
6033 } else {
6034 /* returning structure packed into registers */
6035 int r, size, addr, align;
6036 size = type_size(func_type,&align);
6037 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6038 (vtop->c.i & (ret_align-1)))
6039 && (align & (ret_align-1))) {
6040 loc = (loc - size) & -ret_align;
6041 addr = loc;
6042 type = *func_type;
6043 vset(&type, VT_LOCAL | VT_LVAL, addr);
6044 vswap();
6045 vstore();
6046 vpop();
6047 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6049 vtop->type = ret_type;
6050 if (is_float(ret_type.t))
6051 r = rc_fret(ret_type.t);
6052 else
6053 r = RC_IRET;
6055 if (ret_nregs == 1)
6056 gv(r);
6057 else {
6058 for (;;) {
6059 vdup();
6060 gv(r);
6061 vpop();
6062 if (--ret_nregs == 0)
6063 break;
6064 /* We assume that when a structure is returned in multiple
6065 registers, their classes are consecutive values of the
6066 suite s(n) = 2^n */
6067 r <<= 1;
6068 vtop->c.i += regsize;
6072 } else if (is_float(func_type->t)) {
6073 gv(rc_fret(func_type->t));
6074 } else {
6075 gv(RC_IRET);
6077 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6079 #endif
6081 static void check_func_return(void)
6083 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6084 return;
6085 if (!strcmp (funcname, "main")
6086 && (func_vt.t & VT_BTYPE) == VT_INT) {
6087 /* main returns 0 by default */
6088 vpushi(0);
6089 gen_assign_cast(&func_vt);
6090 gfunc_return(&func_vt);
6091 } else {
6092 tcc_warning("function might return no value: '%s'", funcname);
6096 /* ------------------------------------------------------------------------- */
6097 /* switch/case */
6099 static int case_cmp(const void *pa, const void *pb)
6101 int64_t a = (*(struct case_t**) pa)->v1;
6102 int64_t b = (*(struct case_t**) pb)->v1;
6103 return a < b ? -1 : a > b;
6106 static void gtst_addr(int t, int a)
6108 gsym_addr(gvtst(0, t), a);
6111 static void gcase(struct case_t **base, int len, int *bsym)
6113 struct case_t *p;
6114 int e;
6115 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6116 while (len > 4) {
6117 /* binary search */
6118 p = base[len/2];
6119 vdup();
6120 if (ll)
6121 vpushll(p->v2);
6122 else
6123 vpushi(p->v2);
6124 gen_op(TOK_LE);
6125 e = gvtst(1, 0);
6126 vdup();
6127 if (ll)
6128 vpushll(p->v1);
6129 else
6130 vpushi(p->v1);
6131 gen_op(TOK_GE);
6132 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6133 /* x < v1 */
6134 gcase(base, len/2, bsym);
6135 /* x > v2 */
6136 gsym(e);
6137 e = len/2 + 1;
6138 base += e; len -= e;
6140 /* linear scan */
6141 while (len--) {
6142 p = *base++;
6143 vdup();
6144 if (ll)
6145 vpushll(p->v2);
6146 else
6147 vpushi(p->v2);
6148 if (p->v1 == p->v2) {
6149 gen_op(TOK_EQ);
6150 gtst_addr(0, p->sym);
6151 } else {
6152 gen_op(TOK_LE);
6153 e = gvtst(1, 0);
6154 vdup();
6155 if (ll)
6156 vpushll(p->v1);
6157 else
6158 vpushi(p->v1);
6159 gen_op(TOK_GE);
6160 gtst_addr(0, p->sym);
6161 gsym(e);
6164 *bsym = gjmp(*bsym);
6167 /* ------------------------------------------------------------------------- */
6168 /* __attribute__((cleanup(fn))) */
6170 static void try_call_scope_cleanup(Sym *stop)
6172 Sym *cls = cur_scope->cl.s;
6174 for (; cls != stop; cls = cls->ncl) {
6175 Sym *fs = cls->next;
6176 Sym *vs = cls->prev_tok;
6178 vpushsym(&fs->type, fs);
6179 vset(&vs->type, vs->r, vs->c);
6180 vtop->sym = vs;
6181 mk_pointer(&vtop->type);
6182 gaddrof();
6183 gfunc_call(1);
6187 static void try_call_cleanup_goto(Sym *cleanupstate)
6189 Sym *oc, *cc;
6190 int ocd, ccd;
6192 if (!cur_scope->cl.s)
6193 return;
6195 /* search NCA of both cleanup chains given parents and initial depth */
6196 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6197 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6199 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6201 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6204 try_call_scope_cleanup(cc);
6207 /* call 'func' for each __attribute__((cleanup(func))) */
6208 static void block_cleanup(struct scope *o)
6210 int jmp = 0;
6211 Sym *g, **pg;
6212 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6213 if (g->prev_tok->r & LABEL_FORWARD) {
6214 Sym *pcl = g->next;
6215 if (!jmp)
6216 jmp = gjmp(0);
6217 gsym(pcl->jnext);
6218 try_call_scope_cleanup(o->cl.s);
6219 pcl->jnext = gjmp(0);
6220 if (!o->cl.n)
6221 goto remove_pending;
6222 g->c = o->cl.n;
6223 pg = &g->prev;
6224 } else {
6225 remove_pending:
6226 *pg = g->prev;
6227 sym_free(g);
6230 gsym(jmp);
6231 try_call_scope_cleanup(o->cl.s);
6234 /* ------------------------------------------------------------------------- */
6235 /* VLA */
6237 static void vla_restore(int loc)
6239 if (loc)
6240 gen_vla_sp_restore(loc);
6243 static void vla_leave(struct scope *o)
6245 if (o->vla.num < cur_scope->vla.num)
6246 vla_restore(o->vla.loc);
6249 /* ------------------------------------------------------------------------- */
6250 /* local scopes */
6252 void new_scope(struct scope *o)
6254 /* copy and link previous scope */
6255 *o = *cur_scope;
6256 o->prev = cur_scope;
6257 cur_scope = o;
6259 /* record local declaration stack position */
6260 o->lstk = local_stack;
6261 o->llstk = local_label_stack;
6263 ++local_scope;
6266 void prev_scope(struct scope *o, int is_expr)
6268 vla_leave(o->prev);
6270 if (o->cl.s != o->prev->cl.s)
6271 block_cleanup(o->prev);
6273 /* pop locally defined labels */
6274 label_pop(&local_label_stack, o->llstk, is_expr);
6276 /* In the is_expr case (a statement expression is finished here),
6277 vtop might refer to symbols on the local_stack. Either via the
6278 type or via vtop->sym. We can't pop those nor any that in turn
6279 might be referred to. To make it easier we don't roll back
6280 any symbols in that case; some upper level call to block() will
6281 do that. We do have to remove such symbols from the lookup
6282 tables, though. sym_pop will do that. */
6284 /* pop locally defined symbols */
6285 sym_pop(&local_stack, o->lstk, is_expr);
6287 cur_scope = o->prev;
6288 --local_scope;
6291 /* leave a scope via break/continue(/goto) */
6292 void leave_scope(struct scope *o)
6294 if (!o)
6295 return;
6296 try_call_scope_cleanup(o->cl.s);
6297 vla_leave(o);
6300 /* ------------------------------------------------------------------------- */
6301 /* call block from 'for do while' loops */
6303 static void lblock(int *bsym, int *csym)
6305 struct scope *lo = loop_scope, *co = cur_scope;
6306 int *b = co->bsym, *c = co->csym;
6307 if (csym) {
6308 co->csym = csym;
6309 loop_scope = co;
6311 co->bsym = bsym;
6312 block(0);
6313 co->bsym = b;
6314 if (csym) {
6315 co->csym = c;
6316 loop_scope = lo;
6320 static void block(int is_expr)
6322 int a, b, c, d, e, t;
6323 Sym *s;
6325 if (is_expr) {
6326 /* default return value is (void) */
6327 vpushi(0);
6328 vtop->type.t = VT_VOID;
6331 again:
6332 t = tok, next();
6334 if (t == TOK_IF) {
6335 skip('(');
6336 gexpr();
6337 skip(')');
6338 a = gvtst(1, 0);
6339 block(0);
6340 if (tok == TOK_ELSE) {
6341 d = gjmp(0);
6342 gsym(a);
6343 next();
6344 block(0);
6345 gsym(d); /* patch else jmp */
6346 } else {
6347 gsym(a);
6350 } else if (t == TOK_WHILE) {
6351 d = gind();
6352 skip('(');
6353 gexpr();
6354 skip(')');
6355 a = gvtst(1, 0);
6356 b = 0;
6357 lblock(&a, &b);
6358 gjmp_addr(d);
6359 gsym_addr(b, d);
6360 gsym(a);
6362 } else if (t == '{') {
6363 struct scope o;
6364 new_scope(&o);
6366 /* handle local labels declarations */
6367 while (tok == TOK_LABEL) {
6368 do {
6369 next();
6370 if (tok < TOK_UIDENT)
6371 expect("label identifier");
6372 label_push(&local_label_stack, tok, LABEL_DECLARED);
6373 next();
6374 } while (tok == ',');
6375 skip(';');
6378 while (tok != '}') {
6379 decl(VT_LOCAL);
6380 if (tok != '}') {
6381 if (is_expr)
6382 vpop();
6383 block(is_expr);
6387 prev_scope(&o, is_expr);
6389 if (0 == local_scope && !nocode_wanted)
6390 check_func_return();
6391 next();
6393 } else if (t == TOK_RETURN) {
6394 a = tok != ';';
6395 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6396 if (a)
6397 gexpr(), gen_assign_cast(&func_vt);
6398 leave_scope(root_scope);
6399 if (a && b)
6400 gfunc_return(&func_vt);
6401 else if (a)
6402 vtop--;
6403 else if (b)
6404 tcc_warning("'return' with no value.");
6405 skip(';');
6406 /* jump unless last stmt in top-level block */
6407 if (tok != '}' || local_scope != 1)
6408 rsym = gjmp(rsym);
6409 CODE_OFF();
6411 } else if (t == TOK_BREAK) {
6412 /* compute jump */
6413 if (!cur_scope->bsym)
6414 tcc_error("cannot break");
6415 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6416 leave_scope(loop_scope);
6417 else
6418 leave_scope(cur_switch->scope);
6419 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6420 skip(';');
6422 } else if (t == TOK_CONTINUE) {
6423 /* compute jump */
6424 if (!cur_scope->csym)
6425 tcc_error("cannot continue");
6426 leave_scope(loop_scope);
6427 *cur_scope->csym = gjmp(*cur_scope->csym);
6428 skip(';');
6430 } else if (t == TOK_FOR) {
6431 struct scope o;
6432 new_scope(&o);
6434 skip('(');
6435 if (tok != ';') {
6436 /* c99 for-loop init decl? */
6437 if (!decl0(VT_LOCAL, 1, NULL)) {
6438 /* no, regular for-loop init expr */
6439 gexpr();
6440 vpop();
6443 skip(';');
6444 a = b = 0;
6445 c = d = gind();
6446 if (tok != ';') {
6447 gexpr();
6448 a = gvtst(1, 0);
6450 skip(';');
6451 if (tok != ')') {
6452 e = gjmp(0);
6453 d = gind();
6454 gexpr();
6455 vpop();
6456 gjmp_addr(c);
6457 gsym(e);
6459 skip(')');
6460 lblock(&a, &b);
6461 gjmp_addr(d);
6462 gsym_addr(b, d);
6463 gsym(a);
6464 prev_scope(&o, 0);
6466 } else if (t == TOK_DO) {
6467 a = b = 0;
6468 d = gind();
6469 lblock(&a, &b);
6470 gsym(b);
6471 skip(TOK_WHILE);
6472 skip('(');
6473 gexpr();
6474 skip(')');
6475 skip(';');
6476 c = gvtst(0, 0);
6477 gsym_addr(c, d);
6478 gsym(a);
6480 } else if (t == TOK_SWITCH) {
6481 struct switch_t *saved, sw;
6482 SValue switchval;
6484 sw.p = NULL;
6485 sw.n = 0;
6486 sw.def_sym = 0;
6487 sw.bsym = &a;
6488 sw.scope = cur_scope;
6490 saved = cur_switch;
6491 cur_switch = &sw;
6493 skip('(');
6494 gexpr();
6495 skip(')');
6496 switchval = *vtop--;
6498 a = 0;
6499 b = gjmp(0); /* jump to first case */
6500 lblock(&a, NULL);
6501 a = gjmp(a); /* add implicit break */
6502 /* case lookup */
6503 gsym(b);
6505 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6506 for (b = 1; b < sw.n; b++)
6507 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6508 tcc_error("duplicate case value");
6510 /* Our switch table sorting is signed, so the compared
6511 value needs to be as well when it's 64bit. */
6512 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6513 switchval.type.t &= ~VT_UNSIGNED;
6514 vpushv(&switchval);
6515 gv(RC_INT);
6516 d = 0, gcase(sw.p, sw.n, &d);
6517 vpop();
6518 if (sw.def_sym)
6519 gsym_addr(d, sw.def_sym);
6520 else
6521 gsym(d);
6522 /* break label */
6523 gsym(a);
6525 dynarray_reset(&sw.p, &sw.n);
6526 cur_switch = saved;
6528 } else if (t == TOK_CASE) {
6529 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6530 if (!cur_switch)
6531 expect("switch");
6532 cr->v1 = cr->v2 = expr_const64();
6533 if (gnu_ext && tok == TOK_DOTS) {
6534 next();
6535 cr->v2 = expr_const64();
6536 if (cr->v2 < cr->v1)
6537 tcc_warning("empty case range");
6539 cr->sym = gind();
6540 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6541 skip(':');
6542 is_expr = 0;
6543 goto block_after_label;
6545 } else if (t == TOK_DEFAULT) {
6546 if (!cur_switch)
6547 expect("switch");
6548 if (cur_switch->def_sym)
6549 tcc_error("too many 'default'");
6550 cur_switch->def_sym = gind();
6551 skip(':');
6552 is_expr = 0;
6553 goto block_after_label;
6555 } else if (t == TOK_GOTO) {
6556 vla_restore(root_scope->vla.loc);
6557 if (tok == '*' && gnu_ext) {
6558 /* computed goto */
6559 next();
6560 gexpr();
6561 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6562 expect("pointer");
6563 ggoto();
6565 } else if (tok >= TOK_UIDENT) {
6566 s = label_find(tok);
6567 /* put forward definition if needed */
6568 if (!s)
6569 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6570 else if (s->r == LABEL_DECLARED)
6571 s->r = LABEL_FORWARD;
6573 if (s->r & LABEL_FORWARD) {
6574 /* start new goto chain for cleanups, linked via label->next */
6575 if (cur_scope->cl.s && !nocode_wanted) {
6576 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6577 pending_gotos->prev_tok = s;
6578 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6579 pending_gotos->next = s;
6581 s->jnext = gjmp(s->jnext);
6582 } else {
6583 try_call_cleanup_goto(s->cleanupstate);
6584 gjmp_addr(s->jnext);
6586 next();
6588 } else {
6589 expect("label identifier");
6591 skip(';');
6593 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6594 asm_instr();
6596 } else {
6597 if (tok == ':' && t >= TOK_UIDENT) {
6598 /* label case */
6599 next();
6600 s = label_find(t);
6601 if (s) {
6602 if (s->r == LABEL_DEFINED)
6603 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6604 s->r = LABEL_DEFINED;
6605 if (s->next) {
6606 Sym *pcl; /* pending cleanup goto */
6607 for (pcl = s->next; pcl; pcl = pcl->prev)
6608 gsym(pcl->jnext);
6609 sym_pop(&s->next, NULL, 0);
6610 } else
6611 gsym(s->jnext);
6612 } else {
6613 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6615 s->jnext = gind();
6616 s->cleanupstate = cur_scope->cl.s;
6618 block_after_label:
6619 vla_restore(cur_scope->vla.loc);
6620 /* we accept this, but it is a mistake */
6621 if (tok == '}') {
6622 tcc_warning("deprecated use of label at end of compound statement");
6623 } else {
6624 goto again;
6627 } else {
6628 /* expression case */
6629 if (t != ';') {
6630 unget_tok(t);
6631 if (is_expr) {
6632 vpop();
6633 gexpr();
6634 } else {
6635 gexpr();
6636 vpop();
6638 skip(';');
6644 /* This skips over a stream of tokens containing balanced {} and ()
6645 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6646 with a '{'). If STR then allocates and stores the skipped tokens
6647 in *STR. This doesn't check if () and {} are nested correctly,
6648 i.e. "({)}" is accepted. */
6649 static void skip_or_save_block(TokenString **str)
6651 int braces = tok == '{';
6652 int level = 0;
6653 if (str)
6654 *str = tok_str_alloc();
6656 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6657 int t;
6658 if (tok == TOK_EOF) {
6659 if (str || level > 0)
6660 tcc_error("unexpected end of file");
6661 else
6662 break;
6664 if (str)
6665 tok_str_add_tok(*str);
6666 t = tok;
6667 next();
6668 if (t == '{' || t == '(') {
6669 level++;
6670 } else if (t == '}' || t == ')') {
6671 level--;
6672 if (level == 0 && braces && t == '}')
6673 break;
6676 if (str) {
6677 tok_str_add(*str, -1);
6678 tok_str_add(*str, 0);
6682 #define EXPR_CONST 1
6683 #define EXPR_ANY 2
6685 static void parse_init_elem(int expr_type)
6687 int saved_global_expr;
6688 switch(expr_type) {
6689 case EXPR_CONST:
6690 /* compound literals must be allocated globally in this case */
6691 saved_global_expr = global_expr;
6692 global_expr = 1;
6693 expr_const1();
6694 global_expr = saved_global_expr;
6695 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6696 (compound literals). */
6697 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6698 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6699 || vtop->sym->v < SYM_FIRST_ANOM))
6700 #ifdef TCC_TARGET_PE
6701 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6702 #endif
6704 tcc_error("initializer element is not constant");
6705 break;
6706 case EXPR_ANY:
6707 expr_eq();
6708 break;
6712 /* put zeros for variable based init */
6713 static void init_putz(Section *sec, unsigned long c, int size)
6715 if (sec) {
6716 /* nothing to do because globals are already set to zero */
6717 } else {
6718 vpush_global_sym(&func_old_type, TOK_memset);
6719 vseti(VT_LOCAL, c);
6720 #ifdef TCC_TARGET_ARM
6721 vpushs(size);
6722 vpushi(0);
6723 #else
6724 vpushi(0);
6725 vpushs(size);
6726 #endif
6727 gfunc_call(3);
6731 #define DIF_FIRST 1
6732 #define DIF_SIZE_ONLY 2
6733 #define DIF_HAVE_ELEM 4
6735 /* t is the array or struct type. c is the array or struct
6736 address. cur_field is the pointer to the current
6737 field, for arrays the 'c' member contains the current start
6738 index. 'flags' is as in decl_initializer.
6739 'al' contains the already initialized length of the
6740 current container (starting at c). This returns the new length of that. */
6741 static int decl_designator(CType *type, Section *sec, unsigned long c,
6742 Sym **cur_field, int flags, int al)
6744 Sym *s, *f;
6745 int index, index_last, align, l, nb_elems, elem_size;
6746 unsigned long corig = c;
6748 elem_size = 0;
6749 nb_elems = 1;
6751 if (flags & DIF_HAVE_ELEM)
6752 goto no_designator;
6754 if (gnu_ext && tok >= TOK_UIDENT) {
6755 l = tok, next();
6756 if (tok == ':')
6757 goto struct_field;
6758 unget_tok(l);
6761 /* NOTE: we only support ranges for last designator */
6762 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6763 if (tok == '[') {
6764 if (!(type->t & VT_ARRAY))
6765 expect("array type");
6766 next();
6767 index = index_last = expr_const();
6768 if (tok == TOK_DOTS && gnu_ext) {
6769 next();
6770 index_last = expr_const();
6772 skip(']');
6773 s = type->ref;
6774 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6775 index_last < index)
6776 tcc_error("invalid index");
6777 if (cur_field)
6778 (*cur_field)->c = index_last;
6779 type = pointed_type(type);
6780 elem_size = type_size(type, &align);
6781 c += index * elem_size;
6782 nb_elems = index_last - index + 1;
6783 } else {
6784 int cumofs = 0;
6785 next();
6786 l = tok;
6787 struct_field:
6788 next();
6789 if ((type->t & VT_BTYPE) != VT_STRUCT)
6790 expect("struct/union type");
6791 f = find_field(type, l, &cumofs);
6792 if (!f)
6793 expect("field");
6794 if (cur_field)
6795 *cur_field = f;
6796 type = &f->type;
6797 c += cumofs + f->c;
6799 cur_field = NULL;
6801 if (!cur_field) {
6802 if (tok == '=') {
6803 next();
6804 } else if (!gnu_ext) {
6805 expect("=");
6807 } else {
6808 no_designator:
6809 if (type->t & VT_ARRAY) {
6810 index = (*cur_field)->c;
6811 if (type->ref->c >= 0 && index >= type->ref->c)
6812 tcc_error("index too large");
6813 type = pointed_type(type);
6814 c += index * type_size(type, &align);
6815 } else {
6816 f = *cur_field;
6817 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6818 *cur_field = f = f->next;
6819 if (!f)
6820 tcc_error("too many field init");
6821 type = &f->type;
6822 c += f->c;
6825 /* must put zero in holes (note that doing it that way
6826 ensures that it even works with designators) */
6827 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6828 init_putz(sec, corig + al, c - corig - al);
6829 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6831 /* XXX: make it more general */
6832 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6833 unsigned long c_end;
6834 uint8_t *src, *dst;
6835 int i;
6837 if (!sec) {
6838 vset(type, VT_LOCAL|VT_LVAL, c);
6839 for (i = 1; i < nb_elems; i++) {
6840 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6841 vswap();
6842 vstore();
6844 vpop();
6845 } else if (!NODATA_WANTED) {
6846 c_end = c + nb_elems * elem_size;
6847 if (c_end > sec->data_allocated)
6848 section_realloc(sec, c_end);
6849 src = sec->data + c;
6850 dst = src;
6851 for(i = 1; i < nb_elems; i++) {
6852 dst += elem_size;
6853 memcpy(dst, src, elem_size);
6857 c += nb_elems * type_size(type, &align);
6858 if (c - corig > al)
6859 al = c - corig;
6860 return al;
6863 /* store a value or an expression directly in global data or in local array */
6864 static void init_putv(CType *type, Section *sec, unsigned long c)
6866 int bt;
6867 void *ptr;
6868 CType dtype;
6870 dtype = *type;
6871 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6873 if (sec) {
6874 int size, align;
6875 /* XXX: not portable */
6876 /* XXX: generate error if incorrect relocation */
6877 gen_assign_cast(&dtype);
6878 bt = type->t & VT_BTYPE;
6880 if ((vtop->r & VT_SYM)
6881 && bt != VT_PTR
6882 && bt != VT_FUNC
6883 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6884 || (type->t & VT_BITFIELD))
6885 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6887 tcc_error("initializer element is not computable at load time");
6889 if (NODATA_WANTED) {
6890 vtop--;
6891 return;
6894 size = type_size(type, &align);
6895 section_reserve(sec, c + size);
6896 ptr = sec->data + c;
6898 /* XXX: make code faster ? */
6899 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6900 vtop->sym->v >= SYM_FIRST_ANOM &&
6901 /* XXX This rejects compound literals like
6902 '(void *){ptr}'. The problem is that '&sym' is
6903 represented the same way, which would be ruled out
6904 by the SYM_FIRST_ANOM check above, but also '"string"'
6905 in 'char *p = "string"' is represented the same
6906 with the type being VT_PTR and the symbol being an
6907 anonymous one. That is, there's no difference in vtop
6908 between '(void *){x}' and '&(void *){x}'. Ignore
6909 pointer typed entities here. Hopefully no real code
6910 will every use compound literals with scalar type. */
6911 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6912 /* These come from compound literals, memcpy stuff over. */
6913 Section *ssec;
6914 ElfSym *esym;
6915 ElfW_Rel *rel;
6916 esym = elfsym(vtop->sym);
6917 ssec = tcc_state->sections[esym->st_shndx];
6918 memmove (ptr, ssec->data + esym->st_value, size);
6919 if (ssec->reloc) {
6920 /* We need to copy over all memory contents, and that
6921 includes relocations. Use the fact that relocs are
6922 created it order, so look from the end of relocs
6923 until we hit one before the copied region. */
6924 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6925 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6926 while (num_relocs--) {
6927 rel--;
6928 if (rel->r_offset >= esym->st_value + size)
6929 continue;
6930 if (rel->r_offset < esym->st_value)
6931 break;
6932 /* Note: if the same fields are initialized multiple
6933 times (possible with designators) then we possibly
6934 add multiple relocations for the same offset here.
6935 That would lead to wrong code, the last reloc needs
6936 to win. We clean this up later after the whole
6937 initializer is parsed. */
6938 put_elf_reloca(symtab_section, sec,
6939 c + rel->r_offset - esym->st_value,
6940 ELFW(R_TYPE)(rel->r_info),
6941 ELFW(R_SYM)(rel->r_info),
6942 #if PTR_SIZE == 8
6943 rel->r_addend
6944 #else
6946 #endif
6950 } else {
6951 if (type->t & VT_BITFIELD) {
6952 int bit_pos, bit_size, bits, n;
6953 unsigned char *p, v, m;
6954 bit_pos = BIT_POS(vtop->type.t);
6955 bit_size = BIT_SIZE(vtop->type.t);
6956 p = (unsigned char*)ptr + (bit_pos >> 3);
6957 bit_pos &= 7, bits = 0;
6958 while (bit_size) {
6959 n = 8 - bit_pos;
6960 if (n > bit_size)
6961 n = bit_size;
6962 v = vtop->c.i >> bits << bit_pos;
6963 m = ((1 << n) - 1) << bit_pos;
6964 *p = (*p & ~m) | (v & m);
6965 bits += n, bit_size -= n, bit_pos = 0, ++p;
6967 } else
6968 switch(bt) {
6969 /* XXX: when cross-compiling we assume that each type has the
6970 same representation on host and target, which is likely to
6971 be wrong in the case of long double */
6972 case VT_BOOL:
6973 vtop->c.i = vtop->c.i != 0;
6974 case VT_BYTE:
6975 *(char *)ptr |= vtop->c.i;
6976 break;
6977 case VT_SHORT:
6978 *(short *)ptr |= vtop->c.i;
6979 break;
6980 case VT_FLOAT:
6981 *(float*)ptr = vtop->c.f;
6982 break;
6983 case VT_DOUBLE:
6984 *(double *)ptr = vtop->c.d;
6985 break;
6986 case VT_LDOUBLE:
6987 #if defined TCC_IS_NATIVE_387
6988 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6989 memcpy(ptr, &vtop->c.ld, 10);
6990 #ifdef __TINYC__
6991 else if (sizeof (long double) == sizeof (double))
6992 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6993 #endif
6994 else if (vtop->c.ld == 0.0)
6996 else
6997 #endif
6998 if (sizeof(long double) == LDOUBLE_SIZE)
6999 *(long double*)ptr = vtop->c.ld;
7000 else if (sizeof(double) == LDOUBLE_SIZE)
7001 *(double *)ptr = (double)vtop->c.ld;
7002 else
7003 tcc_error("can't cross compile long double constants");
7004 break;
7005 #if PTR_SIZE != 8
7006 case VT_LLONG:
7007 *(long long *)ptr |= vtop->c.i;
7008 break;
7009 #else
7010 case VT_LLONG:
7011 #endif
7012 case VT_PTR:
7014 addr_t val = vtop->c.i;
7015 #if PTR_SIZE == 8
7016 if (vtop->r & VT_SYM)
7017 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7018 else
7019 *(addr_t *)ptr |= val;
7020 #else
7021 if (vtop->r & VT_SYM)
7022 greloc(sec, vtop->sym, c, R_DATA_PTR);
7023 *(addr_t *)ptr |= val;
7024 #endif
7025 break;
7027 default:
7029 int val = vtop->c.i;
7030 #if PTR_SIZE == 8
7031 if (vtop->r & VT_SYM)
7032 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7033 else
7034 *(int *)ptr |= val;
7035 #else
7036 if (vtop->r & VT_SYM)
7037 greloc(sec, vtop->sym, c, R_DATA_PTR);
7038 *(int *)ptr |= val;
7039 #endif
7040 break;
7044 vtop--;
7045 } else {
7046 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7047 vswap();
7048 vstore();
7049 vpop();
7053 /* 't' contains the type and storage info. 'c' is the offset of the
7054 object in section 'sec'. If 'sec' is NULL, it means stack based
7055 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7056 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7057 size only evaluation is wanted (only for arrays). */
7058 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7059 int flags)
7061 int len, n, no_oblock, nb, i;
7062 int size1, align1;
7063 Sym *s, *f;
7064 Sym indexsym;
7065 CType *t1;
7067 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7068 /* In case of strings we have special handling for arrays, so
7069 don't consume them as initializer value (which would commit them
7070 to some anonymous symbol). */
7071 tok != TOK_LSTR && tok != TOK_STR &&
7072 !(flags & DIF_SIZE_ONLY)) {
7073 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7074 flags |= DIF_HAVE_ELEM;
7077 if ((flags & DIF_HAVE_ELEM) &&
7078 !(type->t & VT_ARRAY) &&
7079 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7080 The source type might have VT_CONSTANT set, which is
7081 of course assignable to non-const elements. */
7082 is_compatible_unqualified_types(type, &vtop->type)) {
7083 init_putv(type, sec, c);
7084 } else if (type->t & VT_ARRAY) {
7085 s = type->ref;
7086 n = s->c;
7087 t1 = pointed_type(type);
7088 size1 = type_size(t1, &align1);
7090 no_oblock = 1;
7091 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7092 tok == '{') {
7093 if (tok != '{')
7094 tcc_error("character array initializer must be a literal,"
7095 " optionally enclosed in braces");
7096 skip('{');
7097 no_oblock = 0;
7100 /* only parse strings here if correct type (otherwise: handle
7101 them as ((w)char *) expressions */
7102 if ((tok == TOK_LSTR &&
7103 #ifdef TCC_TARGET_PE
7104 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7105 #else
7106 (t1->t & VT_BTYPE) == VT_INT
7107 #endif
7108 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7109 len = 0;
7110 while (tok == TOK_STR || tok == TOK_LSTR) {
7111 int cstr_len, ch;
7113 /* compute maximum number of chars wanted */
7114 if (tok == TOK_STR)
7115 cstr_len = tokc.str.size;
7116 else
7117 cstr_len = tokc.str.size / sizeof(nwchar_t);
7118 cstr_len--;
7119 nb = cstr_len;
7120 if (n >= 0 && nb > (n - len))
7121 nb = n - len;
7122 if (!(flags & DIF_SIZE_ONLY)) {
7123 if (cstr_len > nb)
7124 tcc_warning("initializer-string for array is too long");
7125 /* in order to go faster for common case (char
7126 string in global variable, we handle it
7127 specifically */
7128 if (sec && tok == TOK_STR && size1 == 1) {
7129 if (!NODATA_WANTED)
7130 memcpy(sec->data + c + len, tokc.str.data, nb);
7131 } else {
7132 for(i=0;i<nb;i++) {
7133 if (tok == TOK_STR)
7134 ch = ((unsigned char *)tokc.str.data)[i];
7135 else
7136 ch = ((nwchar_t *)tokc.str.data)[i];
7137 vpushi(ch);
7138 init_putv(t1, sec, c + (len + i) * size1);
7142 len += nb;
7143 next();
7145 /* only add trailing zero if enough storage (no
7146 warning in this case since it is standard) */
7147 if (n < 0 || len < n) {
7148 if (!(flags & DIF_SIZE_ONLY)) {
7149 vpushi(0);
7150 init_putv(t1, sec, c + (len * size1));
7152 len++;
7154 len *= size1;
7155 } else {
7156 indexsym.c = 0;
7157 f = &indexsym;
7159 do_init_list:
7160 len = 0;
7161 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7162 len = decl_designator(type, sec, c, &f, flags, len);
7163 flags &= ~DIF_HAVE_ELEM;
7164 if (type->t & VT_ARRAY) {
7165 ++indexsym.c;
7166 /* special test for multi dimensional arrays (may not
7167 be strictly correct if designators are used at the
7168 same time) */
7169 if (no_oblock && len >= n*size1)
7170 break;
7171 } else {
7172 if (s->type.t == VT_UNION)
7173 f = NULL;
7174 else
7175 f = f->next;
7176 if (no_oblock && f == NULL)
7177 break;
7180 if (tok == '}')
7181 break;
7182 skip(',');
7185 /* put zeros at the end */
7186 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7187 init_putz(sec, c + len, n*size1 - len);
7188 if (!no_oblock)
7189 skip('}');
7190 /* patch type size if needed, which happens only for array types */
7191 if (n < 0)
7192 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7193 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7194 size1 = 1;
7195 no_oblock = 1;
7196 if ((flags & DIF_FIRST) || tok == '{') {
7197 skip('{');
7198 no_oblock = 0;
7200 s = type->ref;
7201 f = s->next;
7202 n = s->c;
7203 goto do_init_list;
7204 } else if (tok == '{') {
7205 if (flags & DIF_HAVE_ELEM)
7206 skip(';');
7207 next();
7208 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7209 skip('}');
7210 } else if ((flags & DIF_SIZE_ONLY)) {
7211 /* If we supported only ISO C we wouldn't have to accept calling
7212 this on anything than an array if DIF_SIZE_ONLY (and even then
7213 only on the outermost level, so no recursion would be needed),
7214 because initializing a flex array member isn't supported.
7215 But GNU C supports it, so we need to recurse even into
7216 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7217 /* just skip expression */
7218 skip_or_save_block(NULL);
7219 } else {
7220 if (!(flags & DIF_HAVE_ELEM)) {
7221 /* This should happen only when we haven't parsed
7222 the init element above for fear of committing a
7223 string constant to memory too early. */
7224 if (tok != TOK_STR && tok != TOK_LSTR)
7225 expect("string constant");
7226 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7228 init_putv(type, sec, c);
7232 /* parse an initializer for type 't' if 'has_init' is non zero, and
7233 allocate space in local or global data space ('r' is either
7234 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7235 variable 'v' of scope 'scope' is declared before initializers
7236 are parsed. If 'v' is zero, then a reference to the new object
7237 is put in the value stack. If 'has_init' is 2, a special parsing
7238 is done to handle string constants. */
7239 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7240 int has_init, int v, int scope)
7242 int size, align, addr;
7243 TokenString *init_str = NULL;
7245 Section *sec;
7246 Sym *flexible_array;
7247 Sym *sym = NULL;
7248 int saved_nocode_wanted = nocode_wanted;
7249 #ifdef CONFIG_TCC_BCHECK
7250 int bcheck;
7251 #endif
7253 /* Always allocate static or global variables */
7254 if (v && (r & VT_VALMASK) == VT_CONST)
7255 nocode_wanted |= 0x80000000;
7257 #ifdef CONFIG_TCC_BCHECK
7258 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7259 #endif
7261 flexible_array = NULL;
7262 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7263 Sym *field = type->ref->next;
7264 if (field) {
7265 while (field->next)
7266 field = field->next;
7267 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7268 flexible_array = field;
7272 size = type_size(type, &align);
7273 /* If unknown size, we must evaluate it before
7274 evaluating initializers because
7275 initializers can generate global data too
7276 (e.g. string pointers or ISOC99 compound
7277 literals). It also simplifies local
7278 initializers handling */
7279 if (size < 0 || (flexible_array && has_init)) {
7280 if (!has_init)
7281 tcc_error("unknown type size");
7282 /* get all init string */
7283 if (has_init == 2) {
7284 init_str = tok_str_alloc();
7285 /* only get strings */
7286 while (tok == TOK_STR || tok == TOK_LSTR) {
7287 tok_str_add_tok(init_str);
7288 next();
7290 tok_str_add(init_str, -1);
7291 tok_str_add(init_str, 0);
7292 } else {
7293 skip_or_save_block(&init_str);
7295 unget_tok(0);
7297 /* compute size */
7298 begin_macro(init_str, 1);
7299 next();
7300 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7301 /* prepare second initializer parsing */
7302 macro_ptr = init_str->str;
7303 next();
7305 /* if still unknown size, error */
7306 size = type_size(type, &align);
7307 if (size < 0)
7308 tcc_error("unknown type size");
7310 /* If there's a flex member and it was used in the initializer
7311 adjust size. */
7312 if (flexible_array &&
7313 flexible_array->type.ref->c > 0)
7314 size += flexible_array->type.ref->c
7315 * pointed_size(&flexible_array->type);
7316 /* take into account specified alignment if bigger */
7317 if (ad->a.aligned) {
7318 int speca = 1 << (ad->a.aligned - 1);
7319 if (speca > align)
7320 align = speca;
7321 } else if (ad->a.packed) {
7322 align = 1;
7325 if (!v && NODATA_WANTED)
7326 size = 0, align = 1;
7328 if ((r & VT_VALMASK) == VT_LOCAL) {
7329 sec = NULL;
7330 #ifdef CONFIG_TCC_BCHECK
7331 if (bcheck && (type->t & VT_ARRAY)) {
7332 loc--;
7334 #endif
7335 loc = (loc - size) & -align;
7336 addr = loc;
7337 #ifdef CONFIG_TCC_BCHECK
7338 /* handles bounds */
7339 /* XXX: currently, since we do only one pass, we cannot track
7340 '&' operators, so we add only arrays */
7341 if (bcheck && (type->t & VT_ARRAY)) {
7342 addr_t *bounds_ptr;
7343 /* add padding between regions */
7344 loc--;
7345 /* then add local bound info */
7346 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7347 bounds_ptr[0] = addr;
7348 bounds_ptr[1] = size;
7350 #endif
7351 if (v) {
7352 /* local variable */
7353 #ifdef CONFIG_TCC_ASM
7354 if (ad->asm_label) {
7355 int reg = asm_parse_regvar(ad->asm_label);
7356 if (reg >= 0)
7357 r = (r & ~VT_VALMASK) | reg;
7359 #endif
7360 sym = sym_push(v, type, r, addr);
7361 if (ad->cleanup_func) {
7362 Sym *cls = sym_push2(&all_cleanups,
7363 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7364 cls->prev_tok = sym;
7365 cls->next = ad->cleanup_func;
7366 cls->ncl = cur_scope->cl.s;
7367 cur_scope->cl.s = cls;
7370 sym->a = ad->a;
7371 } else {
7372 /* push local reference */
7373 vset(type, r, addr);
7375 } else {
7376 if (v && scope == VT_CONST) {
7377 /* see if the symbol was already defined */
7378 sym = sym_find(v);
7379 if (sym) {
7380 patch_storage(sym, ad, type);
7381 /* we accept several definitions of the same global variable. */
7382 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7383 goto no_alloc;
7387 /* allocate symbol in corresponding section */
7388 sec = ad->section;
7389 if (!sec) {
7390 if (has_init)
7391 sec = data_section;
7392 else if (tcc_state->nocommon)
7393 sec = bss_section;
7396 if (sec) {
7397 addr = section_add(sec, size, align);
7398 #ifdef CONFIG_TCC_BCHECK
7399 /* add padding if bound check */
7400 if (bcheck)
7401 section_add(sec, 1, 1);
7402 #endif
7403 } else {
7404 addr = align; /* SHN_COMMON is special, symbol value is align */
7405 sec = common_section;
7408 if (v) {
7409 if (!sym) {
7410 sym = sym_push(v, type, r | VT_SYM, 0);
7411 patch_storage(sym, ad, NULL);
7413 /* update symbol definition */
7414 put_extern_sym(sym, sec, addr, size);
7415 } else {
7416 /* push global reference */
7417 vpush_ref(type, sec, addr, size);
7418 sym = vtop->sym;
7419 vtop->r |= r;
7422 #ifdef CONFIG_TCC_BCHECK
7423 /* handles bounds now because the symbol must be defined
7424 before for the relocation */
7425 if (bcheck) {
7426 addr_t *bounds_ptr;
7428 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7429 /* then add global bound info */
7430 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7431 bounds_ptr[0] = 0; /* relocated */
7432 bounds_ptr[1] = size;
7434 #endif
7437 if (type->t & VT_VLA) {
7438 int a;
7440 if (NODATA_WANTED)
7441 goto no_alloc;
7443 /* save current stack pointer */
7444 if (root_scope->vla.loc == 0) {
7445 struct scope *v = cur_scope;
7446 gen_vla_sp_save(loc -= PTR_SIZE);
7447 do v->vla.loc = loc; while ((v = v->prev));
7450 vla_runtime_type_size(type, &a);
7451 gen_vla_alloc(type, a);
7452 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7453 /* on _WIN64, because of the function args scratch area, the
7454 result of alloca differs from RSP and is returned in RAX. */
7455 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7456 #endif
7457 gen_vla_sp_save(addr);
7458 cur_scope->vla.loc = addr;
7459 cur_scope->vla.num++;
7461 } else if (has_init) {
7462 size_t oldreloc_offset = 0;
7463 if (sec && sec->reloc)
7464 oldreloc_offset = sec->reloc->data_offset;
7465 decl_initializer(type, sec, addr, DIF_FIRST);
7466 if (sec && sec->reloc)
7467 squeeze_multi_relocs(sec, oldreloc_offset);
7468 /* patch flexible array member size back to -1, */
7469 /* for possible subsequent similar declarations */
7470 if (flexible_array)
7471 flexible_array->type.ref->c = -1;
7474 no_alloc:
7475 /* restore parse state if needed */
7476 if (init_str) {
7477 end_macro();
7478 next();
7481 nocode_wanted = saved_nocode_wanted;
7484 /* parse a function defined by symbol 'sym' and generate its code in
7485 'cur_text_section' */
7486 static void gen_function(Sym *sym)
7488 /* Initialize VLA state */
7489 struct scope f = { 0 };
7490 cur_scope = root_scope = &f;
7492 nocode_wanted = 0;
7493 ind = cur_text_section->data_offset;
7494 if (sym->a.aligned) {
7495 size_t newoff = section_add(cur_text_section, 0,
7496 1 << (sym->a.aligned - 1));
7497 gen_fill_nops(newoff - ind);
7499 /* NOTE: we patch the symbol size later */
7500 put_extern_sym(sym, cur_text_section, ind, 0);
7502 funcname = get_tok_str(sym->v, NULL);
7503 func_ind = ind;
7505 /* put debug symbol */
7506 tcc_debug_funcstart(tcc_state, sym);
7507 /* push a dummy symbol to enable local sym storage */
7508 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7509 local_scope = 1; /* for function parameters */
7510 gfunc_prolog(&sym->type);
7511 local_scope = 0;
7512 rsym = 0;
7513 clear_temp_local_var_list();
7514 block(0);
7515 gsym(rsym);
7516 nocode_wanted = 0;
7517 gfunc_epilog();
7518 cur_text_section->data_offset = ind;
7519 /* reset local stack */
7520 sym_pop(&local_stack, NULL, 0);
7521 local_scope = 0;
7522 label_pop(&global_label_stack, NULL, 0);
7523 sym_pop(&all_cleanups, NULL, 0);
7524 /* patch symbol size */
7525 elfsym(sym)->st_size = ind - func_ind;
7526 /* end of function */
7527 tcc_debug_funcend(tcc_state, ind - func_ind);
7528 /* It's better to crash than to generate wrong code */
7529 cur_text_section = NULL;
7530 funcname = ""; /* for safety */
7531 func_vt.t = VT_VOID; /* for safety */
7532 func_var = 0; /* for safety */
7533 ind = 0; /* for safety */
7534 nocode_wanted = 0x80000000;
7535 check_vstack();
7538 static void gen_inline_functions(TCCState *s)
7540 Sym *sym;
7541 int inline_generated, i, ln;
7542 struct InlineFunc *fn;
7544 ln = file->line_num;
7545 /* iterate while inline function are referenced */
7546 do {
7547 inline_generated = 0;
7548 for (i = 0; i < s->nb_inline_fns; ++i) {
7549 fn = s->inline_fns[i];
7550 sym = fn->sym;
7551 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7552 /* the function was used or forced (and then not internal):
7553 generate its code and convert it to a normal function */
7554 fn->sym = NULL;
7555 if (file)
7556 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7557 begin_macro(fn->func_str, 1);
7558 next();
7559 cur_text_section = text_section;
7560 gen_function(sym);
7561 end_macro();
7563 inline_generated = 1;
7566 } while (inline_generated);
7567 file->line_num = ln;
7570 ST_FUNC void free_inline_functions(TCCState *s)
7572 int i;
7573 /* free tokens of unused inline functions */
7574 for (i = 0; i < s->nb_inline_fns; ++i) {
7575 struct InlineFunc *fn = s->inline_fns[i];
7576 if (fn->sym)
7577 tok_str_free(fn->func_str);
7579 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7582 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7583 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7584 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7586 int v, has_init, r;
7587 CType type, btype;
7588 Sym *sym;
7589 AttributeDef ad, adbase;
7591 while (1) {
7592 if (tok == TOK_STATIC_ASSERT) {
7593 int c;
7595 next();
7596 skip('(');
7597 c = expr_const();
7598 skip(',');
7599 if (c == 0)
7600 tcc_error("%s", get_tok_str(tok, &tokc));
7601 next();
7602 skip(')');
7603 skip(';');
7604 continue;
7606 if (!parse_btype(&btype, &adbase)) {
7607 if (is_for_loop_init)
7608 return 0;
7609 /* skip redundant ';' if not in old parameter decl scope */
7610 if (tok == ';' && l != VT_CMP) {
7611 next();
7612 continue;
7614 if (l != VT_CONST)
7615 break;
7616 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7617 /* global asm block */
7618 asm_global_instr();
7619 continue;
7621 if (tok >= TOK_UIDENT) {
7622 /* special test for old K&R protos without explicit int
7623 type. Only accepted when defining global data */
7624 btype.t = VT_INT;
7625 } else {
7626 if (tok != TOK_EOF)
7627 expect("declaration");
7628 break;
7631 if (tok == ';') {
7632 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7633 int v = btype.ref->v;
7634 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7635 tcc_warning("unnamed struct/union that defines no instances");
7636 next();
7637 continue;
7639 if (IS_ENUM(btype.t)) {
7640 next();
7641 continue;
7644 while (1) { /* iterate thru each declaration */
7645 type = btype;
7646 /* If the base type itself was an array type of unspecified
7647 size (like in 'typedef int arr[]; arr x = {1};') then
7648 we will overwrite the unknown size by the real one for
7649 this decl. We need to unshare the ref symbol holding
7650 that size. */
7651 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7652 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7654 ad = adbase;
7655 type_decl(&type, &ad, &v, TYPE_DIRECT);
7656 #if 0
7658 char buf[500];
7659 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7660 printf("type = '%s'\n", buf);
7662 #endif
7663 if ((type.t & VT_BTYPE) == VT_FUNC) {
7664 /* if old style function prototype, we accept a
7665 declaration list */
7666 sym = type.ref;
7667 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7668 decl0(VT_CMP, 0, sym);
7669 /* always compile 'extern inline' */
7670 if (type.t & VT_EXTERN)
7671 type.t &= ~VT_INLINE;
7674 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7675 ad.asm_label = asm_label_instr();
7676 /* parse one last attribute list, after asm label */
7677 parse_attribute(&ad);
7678 #if 0
7679 /* gcc does not allow __asm__("label") with function definition,
7680 but why not ... */
7681 if (tok == '{')
7682 expect(";");
7683 #endif
7686 #ifdef TCC_TARGET_PE
7687 if (ad.a.dllimport || ad.a.dllexport) {
7688 if (type.t & (VT_STATIC|VT_TYPEDEF))
7689 tcc_error("cannot have dll linkage with static or typedef");
7690 if (ad.a.dllimport) {
7691 if ((type.t & VT_BTYPE) == VT_FUNC)
7692 ad.a.dllimport = 0;
7693 else
7694 type.t |= VT_EXTERN;
7697 #endif
7698 if (tok == '{') {
7699 if (l != VT_CONST)
7700 tcc_error("cannot use local functions");
7701 if ((type.t & VT_BTYPE) != VT_FUNC)
7702 expect("function definition");
7704 /* reject abstract declarators in function definition
7705 make old style params without decl have int type */
7706 sym = type.ref;
7707 while ((sym = sym->next) != NULL) {
7708 if (!(sym->v & ~SYM_FIELD))
7709 expect("identifier");
7710 if (sym->type.t == VT_VOID)
7711 sym->type = int_type;
7714 /* put function symbol */
7715 type.t &= ~VT_EXTERN;
7716 sym = external_sym(v, &type, 0, &ad);
7717 /* static inline functions are just recorded as a kind
7718 of macro. Their code will be emitted at the end of
7719 the compilation unit only if they are used */
7720 if (sym->type.t & VT_INLINE) {
7721 struct InlineFunc *fn;
7722 const char *filename;
7724 filename = file ? file->filename : "";
7725 fn = tcc_malloc(sizeof *fn + strlen(filename));
7726 strcpy(fn->filename, filename);
7727 fn->sym = sym;
7728 skip_or_save_block(&fn->func_str);
7729 dynarray_add(&tcc_state->inline_fns,
7730 &tcc_state->nb_inline_fns, fn);
7731 } else {
7732 /* compute text section */
7733 cur_text_section = ad.section;
7734 if (!cur_text_section)
7735 cur_text_section = text_section;
7736 gen_function(sym);
7738 break;
7739 } else {
7740 if (l == VT_CMP) {
7741 /* find parameter in function parameter list */
7742 for (sym = func_sym->next; sym; sym = sym->next)
7743 if ((sym->v & ~SYM_FIELD) == v)
7744 goto found;
7745 tcc_error("declaration for parameter '%s' but no such parameter",
7746 get_tok_str(v, NULL));
7747 found:
7748 if (type.t & VT_STORAGE) /* 'register' is okay */
7749 tcc_error("storage class specified for '%s'",
7750 get_tok_str(v, NULL));
7751 if (sym->type.t != VT_VOID)
7752 tcc_error("redefinition of parameter '%s'",
7753 get_tok_str(v, NULL));
7754 convert_parameter_type(&type);
7755 sym->type = type;
7756 } else if (type.t & VT_TYPEDEF) {
7757 /* save typedefed type */
7758 /* XXX: test storage specifiers ? */
7759 sym = sym_find(v);
7760 if (sym && sym->sym_scope == local_scope) {
7761 if (!is_compatible_types(&sym->type, &type)
7762 || !(sym->type.t & VT_TYPEDEF))
7763 tcc_error("incompatible redefinition of '%s'",
7764 get_tok_str(v, NULL));
7765 sym->type = type;
7766 } else {
7767 sym = sym_push(v, &type, 0, 0);
7769 sym->a = ad.a;
7770 sym->f = ad.f;
7771 } else if ((type.t & VT_BTYPE) == VT_VOID
7772 && !(type.t & VT_EXTERN)) {
7773 tcc_error("declaration of void object");
7774 } else {
7775 r = 0;
7776 if ((type.t & VT_BTYPE) == VT_FUNC) {
7777 /* external function definition */
7778 /* specific case for func_call attribute */
7779 type.ref->f = ad.f;
7780 } else if (!(type.t & VT_ARRAY)) {
7781 /* not lvalue if array */
7782 r |= lvalue_type(type.t);
7784 has_init = (tok == '=');
7785 if (has_init && (type.t & VT_VLA))
7786 tcc_error("variable length array cannot be initialized");
7787 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7788 || (type.t & VT_BTYPE) == VT_FUNC
7789 /* as with GCC, uninitialized global arrays with no size
7790 are considered extern: */
7791 || ((type.t & VT_ARRAY) && !has_init
7792 && l == VT_CONST && type.ref->c < 0)
7794 /* external variable or function */
7795 type.t |= VT_EXTERN;
7796 sym = external_sym(v, &type, r, &ad);
7797 if (ad.alias_target) {
7798 ElfSym *esym;
7799 Sym *alias_target;
7800 alias_target = sym_find(ad.alias_target);
7801 esym = elfsym(alias_target);
7802 if (!esym)
7803 tcc_error("unsupported forward __alias__ attribute");
7804 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7806 } else {
7807 if (type.t & VT_STATIC)
7808 r |= VT_CONST;
7809 else
7810 r |= l;
7811 if (has_init)
7812 next();
7813 else if (l == VT_CONST)
7814 /* uninitialized global variables may be overridden */
7815 type.t |= VT_EXTERN;
7816 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7819 if (tok != ',') {
7820 if (is_for_loop_init)
7821 return 1;
7822 skip(';');
7823 break;
7825 next();
7829 return 0;
7832 static void decl(int l)
7834 decl0(l, 0, NULL);
7837 /* ------------------------------------------------------------------------- */
7838 #undef gjmp_addr
7839 #undef gjmp
7840 /* ------------------------------------------------------------------------- */