work on local extern declarations
[tinycc.git] / tccgen.c
blob61f090e574a31ea2222d86a80b2904ac897cb930
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
97 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
266 local_scope = 0;
268 /* define some often used types */
269 int_type.t = VT_INT;
270 char_pointer_type.t = VT_BYTE;
271 mk_pointer(&char_pointer_type);
272 #if PTR_SIZE == 4
273 size_type.t = VT_INT | VT_UNSIGNED;
274 ptrdiff_type.t = VT_INT;
275 #elif LONG_SIZE == 4
276 size_type.t = VT_LLONG | VT_UNSIGNED;
277 ptrdiff_type.t = VT_LLONG;
278 #else
279 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
280 ptrdiff_type.t = VT_LONG | VT_LLONG;
281 #endif
282 func_old_type.t = VT_FUNC;
283 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
284 func_old_type.ref->f.func_call = FUNC_CDECL;
285 func_old_type.ref->f.func_type = FUNC_OLD;
287 tcc_debug_start(s1);
289 #ifdef TCC_TARGET_ARM
290 arm_init(s1);
291 #endif
293 #ifdef INC_DEBUG
294 printf("%s: **** new file\n", file->filename);
295 #endif
297 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
298 next();
299 decl(VT_CONST);
300 gen_inline_functions(s1);
301 check_vstack();
302 /* end of translation unit info */
303 tcc_debug_end(s1);
304 return 0;
307 /* ------------------------------------------------------------------------- */
308 ST_FUNC ElfSym *elfsym(Sym *s)
310 if (!s || !s->c)
311 return NULL;
312 return &((ElfSym *)symtab_section->data)[s->c];
315 /* apply storage attributes to Elf symbol */
316 ST_FUNC void update_storage(Sym *sym)
318 ElfSym *esym;
319 int sym_bind, old_sym_bind;
321 esym = elfsym(sym);
322 if (!esym)
323 return;
325 if (sym->a.visibility)
326 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
327 | sym->a.visibility;
329 if (sym->type.t & (VT_STATIC | VT_INLINE))
330 sym_bind = STB_LOCAL;
331 else if (sym->a.weak)
332 sym_bind = STB_WEAK;
333 else
334 sym_bind = STB_GLOBAL;
335 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
336 if (sym_bind != old_sym_bind) {
337 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
340 #ifdef TCC_TARGET_PE
341 if (sym->a.dllimport)
342 esym->st_other |= ST_PE_IMPORT;
343 if (sym->a.dllexport)
344 esym->st_other |= ST_PE_EXPORT;
345 #endif
347 #if 0
348 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
349 get_tok_str(sym->v, NULL),
350 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
351 sym->a.visibility,
352 sym->a.dllexport,
353 sym->a.dllimport
355 #endif
358 /* ------------------------------------------------------------------------- */
359 /* update sym->c so that it points to an external symbol in section
360 'section' with value 'value' */
362 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
363 addr_t value, unsigned long size,
364 int can_add_underscore)
366 int sym_type, sym_bind, info, other, t;
367 ElfSym *esym;
368 const char *name;
369 char buf1[256];
370 #ifdef CONFIG_TCC_BCHECK
371 char buf[32];
372 #endif
374 if (!sym->c) {
375 name = get_tok_str(sym->v, NULL);
376 #ifdef CONFIG_TCC_BCHECK
377 if (tcc_state->do_bounds_check) {
378 /* XXX: avoid doing that for statics ? */
379 /* if bound checking is activated, we change some function
380 names by adding the "__bound" prefix */
381 switch(sym->v) {
382 #ifdef TCC_TARGET_PE
383 /* XXX: we rely only on malloc hooks */
384 case TOK_malloc:
385 case TOK_free:
386 case TOK_realloc:
387 case TOK_memalign:
388 case TOK_calloc:
389 #endif
390 case TOK_memcpy:
391 case TOK_memmove:
392 case TOK_memset:
393 case TOK_strlen:
394 case TOK_strcpy:
395 case TOK_alloca:
396 strcpy(buf, "__bound_");
397 strcat(buf, name);
398 name = buf;
399 break;
402 #endif
403 t = sym->type.t;
404 if ((t & VT_BTYPE) == VT_FUNC) {
405 sym_type = STT_FUNC;
406 } else if ((t & VT_BTYPE) == VT_VOID) {
407 sym_type = STT_NOTYPE;
408 } else {
409 sym_type = STT_OBJECT;
411 if (t & (VT_STATIC | VT_INLINE))
412 sym_bind = STB_LOCAL;
413 else
414 sym_bind = STB_GLOBAL;
415 other = 0;
416 #ifdef TCC_TARGET_PE
417 if (sym_type == STT_FUNC && sym->type.ref) {
418 Sym *ref = sym->type.ref;
419 if (ref->a.nodecorate) {
420 can_add_underscore = 0;
422 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
423 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
424 name = buf1;
425 other |= ST_PE_STDCALL;
426 can_add_underscore = 0;
429 #endif
430 if (tcc_state->leading_underscore && can_add_underscore) {
431 buf1[0] = '_';
432 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
433 name = buf1;
435 if (sym->asm_label)
436 name = get_tok_str(sym->asm_label, NULL);
437 info = ELFW(ST_INFO)(sym_bind, sym_type);
438 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
439 } else {
440 esym = elfsym(sym);
441 esym->st_value = value;
442 esym->st_size = size;
443 esym->st_shndx = sh_num;
445 update_storage(sym);
448 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
449 addr_t value, unsigned long size)
451 int sh_num = section ? section->sh_num : SHN_UNDEF;
452 put_extern_sym2(sym, sh_num, value, size, 1);
455 /* add a new relocation entry to symbol 'sym' in section 's' */
456 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
457 addr_t addend)
459 int c = 0;
461 if (nocode_wanted && s == cur_text_section)
462 return;
464 if (sym) {
465 if (0 == sym->c)
466 put_extern_sym(sym, NULL, 0, 0);
467 c = sym->c;
470 /* now we can add ELF relocation info */
471 put_elf_reloca(symtab_section, s, offset, type, c, addend);
474 #if PTR_SIZE == 4
475 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
477 greloca(s, sym, offset, type, 0);
479 #endif
481 /* ------------------------------------------------------------------------- */
482 /* symbol allocator */
483 static Sym *__sym_malloc(void)
485 Sym *sym_pool, *sym, *last_sym;
486 int i;
488 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
489 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
491 last_sym = sym_free_first;
492 sym = sym_pool;
493 for(i = 0; i < SYM_POOL_NB; i++) {
494 sym->next = last_sym;
495 last_sym = sym;
496 sym++;
498 sym_free_first = last_sym;
499 return last_sym;
502 static inline Sym *sym_malloc(void)
504 Sym *sym;
505 #ifndef SYM_DEBUG
506 sym = sym_free_first;
507 if (!sym)
508 sym = __sym_malloc();
509 sym_free_first = sym->next;
510 return sym;
511 #else
512 sym = tcc_malloc(sizeof(Sym));
513 return sym;
514 #endif
517 ST_INLN void sym_free(Sym *sym)
519 #ifndef SYM_DEBUG
520 sym->next = sym_free_first;
521 sym_free_first = sym;
522 #else
523 tcc_free(sym);
524 #endif
527 /* push, without hashing */
528 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
530 Sym *s;
532 s = sym_malloc();
533 memset(s, 0, sizeof *s);
534 s->v = v;
535 s->type.t = t;
536 s->c = c;
537 /* add in stack */
538 s->prev = *ps;
539 *ps = s;
540 return s;
543 /* find a symbol and return its associated structure. 's' is the top
544 of the symbol stack */
545 ST_FUNC Sym *sym_find2(Sym *s, int v)
547 while (s) {
548 if (s->v == v)
549 return s;
550 else if (s->v == -1)
551 return NULL;
552 s = s->prev;
554 return NULL;
557 /* structure lookup */
558 ST_INLN Sym *struct_find(int v)
560 v -= TOK_IDENT;
561 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
562 return NULL;
563 return table_ident[v]->sym_struct;
566 /* find an identifier */
567 ST_INLN Sym *sym_find(int v)
569 v -= TOK_IDENT;
570 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
571 return NULL;
572 return table_ident[v]->sym_identifier;
575 static int sym_scope(Sym *s)
577 if (IS_ENUM_VAL (s->type.t))
578 return s->type.ref->sym_scope;
579 else
580 return s->sym_scope;
583 /* push a given symbol on the symbol stack */
584 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
586 Sym *s, **ps;
587 TokenSym *ts;
589 if (local_stack)
590 ps = &local_stack;
591 else
592 ps = &global_stack;
593 s = sym_push2(ps, v, type->t, c);
594 s->type.ref = type->ref;
595 s->r = r;
596 /* don't record fields or anonymous symbols */
597 /* XXX: simplify */
598 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
599 /* record symbol in token array */
600 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
601 if (v & SYM_STRUCT)
602 ps = &ts->sym_struct;
603 else
604 ps = &ts->sym_identifier;
605 s->prev_tok = *ps;
606 *ps = s;
607 s->sym_scope = local_scope;
608 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
609 tcc_error("redeclaration of '%s'",
610 get_tok_str(v & ~SYM_STRUCT, NULL));
612 return s;
615 /* push a global identifier */
616 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
618 Sym *s, **ps;
619 s = sym_push2(&global_stack, v, t, c);
620 s->r = VT_CONST | VT_SYM;
621 /* don't record anonymous symbol */
622 if (v < SYM_FIRST_ANOM) {
623 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
624 /* modify the top most local identifier, so that sym_identifier will
625 point to 's' when popped; happens when called from inline asm */
626 while (*ps != NULL && (*ps)->sym_scope)
627 ps = &(*ps)->prev_tok;
628 s->prev_tok = *ps;
629 *ps = s;
631 return s;
634 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
635 pop them yet from the list, but do remove them from the token array. */
636 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
638 Sym *s, *ss, **ps;
639 TokenSym *ts;
640 int v;
642 s = *ptop;
643 while(s != b) {
644 ss = s->prev;
645 v = s->v;
646 /* remove symbol in token array */
647 /* XXX: simplify */
648 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
649 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
650 if (v & SYM_STRUCT)
651 ps = &ts->sym_struct;
652 else
653 ps = &ts->sym_identifier;
654 *ps = s->prev_tok;
656 if (!keep)
657 sym_free(s);
658 s = ss;
660 if (!keep)
661 *ptop = b;
664 /* ------------------------------------------------------------------------- */
666 static void vsetc(CType *type, int r, CValue *vc)
668 int v;
670 if (vtop >= vstack + (VSTACK_SIZE - 1))
671 tcc_error("memory full (vstack)");
672 /* cannot let cpu flags if other instruction are generated. Also
673 avoid leaving VT_JMP anywhere except on the top of the stack
674 because it would complicate the code generator.
676 Don't do this when nocode_wanted. vtop might come from
677 !nocode_wanted regions (see 88_codeopt.c) and transforming
678 it to a register without actually generating code is wrong
679 as their value might still be used for real. All values
680 we push under nocode_wanted will eventually be popped
681 again, so that the VT_CMP/VT_JMP value will be in vtop
682 when code is unsuppressed again.
684 Same logic below in vswap(); */
685 if (vtop >= vstack && !nocode_wanted) {
686 v = vtop->r & VT_VALMASK;
687 if (v == VT_CMP || (v & ~1) == VT_JMP)
688 gv(RC_INT);
691 vtop++;
692 vtop->type = *type;
693 vtop->r = r;
694 vtop->r2 = VT_CONST;
695 vtop->c = *vc;
696 vtop->sym = NULL;
699 ST_FUNC void vswap(void)
701 SValue tmp;
702 /* cannot vswap cpu flags. See comment at vsetc() above */
703 if (vtop >= vstack && !nocode_wanted) {
704 int v = vtop->r & VT_VALMASK;
705 if (v == VT_CMP || (v & ~1) == VT_JMP)
706 gv(RC_INT);
708 tmp = vtop[0];
709 vtop[0] = vtop[-1];
710 vtop[-1] = tmp;
713 /* pop stack value */
714 ST_FUNC void vpop(void)
716 int v;
717 v = vtop->r & VT_VALMASK;
718 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
719 /* for x86, we need to pop the FP stack */
720 if (v == TREG_ST0) {
721 o(0xd8dd); /* fstp %st(0) */
722 } else
723 #endif
724 if (v == VT_JMP || v == VT_JMPI) {
725 /* need to put correct jump if && or || without test */
726 gsym(vtop->c.i);
728 vtop--;
731 /* push constant of type "type" with useless value */
732 ST_FUNC void vpush(CType *type)
734 vset(type, VT_CONST, 0);
737 /* push integer constant */
738 ST_FUNC void vpushi(int v)
740 CValue cval;
741 cval.i = v;
742 vsetc(&int_type, VT_CONST, &cval);
745 /* push a pointer sized constant */
746 static void vpushs(addr_t v)
748 CValue cval;
749 cval.i = v;
750 vsetc(&size_type, VT_CONST, &cval);
753 /* push arbitrary 64bit constant */
754 ST_FUNC void vpush64(int ty, unsigned long long v)
756 CValue cval;
757 CType ctype;
758 ctype.t = ty;
759 ctype.ref = NULL;
760 cval.i = v;
761 vsetc(&ctype, VT_CONST, &cval);
764 /* push long long constant */
765 static inline void vpushll(long long v)
767 vpush64(VT_LLONG, v);
770 ST_FUNC void vset(CType *type, int r, int v)
772 CValue cval;
774 cval.i = v;
775 vsetc(type, r, &cval);
778 static void vseti(int r, int v)
780 CType type;
781 type.t = VT_INT;
782 type.ref = NULL;
783 vset(&type, r, v);
786 ST_FUNC void vpushv(SValue *v)
788 if (vtop >= vstack + (VSTACK_SIZE - 1))
789 tcc_error("memory full (vstack)");
790 vtop++;
791 *vtop = *v;
794 static void vdup(void)
796 vpushv(vtop);
799 /* rotate n first stack elements to the bottom
800 I1 ... In -> I2 ... In I1 [top is right]
802 ST_FUNC void vrotb(int n)
804 int i;
805 SValue tmp;
807 tmp = vtop[-n + 1];
808 for(i=-n+1;i!=0;i++)
809 vtop[i] = vtop[i+1];
810 vtop[0] = tmp;
813 /* rotate the n elements before entry e towards the top
814 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
816 ST_FUNC void vrote(SValue *e, int n)
818 int i;
819 SValue tmp;
821 tmp = *e;
822 for(i = 0;i < n - 1; i++)
823 e[-i] = e[-i - 1];
824 e[-n + 1] = tmp;
827 /* rotate n first stack elements to the top
828 I1 ... In -> In I1 ... I(n-1) [top is right]
830 ST_FUNC void vrott(int n)
832 vrote(vtop, n);
835 /* push a symbol value of TYPE */
836 static inline void vpushsym(CType *type, Sym *sym)
838 CValue cval;
839 cval.i = 0;
840 vsetc(type, VT_CONST | VT_SYM, &cval);
841 vtop->sym = sym;
844 /* Return a static symbol pointing to a section */
845 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
847 int v;
848 Sym *sym;
850 v = anon_sym++;
851 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
852 sym->type.t |= VT_STATIC;
853 put_extern_sym(sym, sec, offset, size);
854 return sym;
857 /* push a reference to a section offset by adding a dummy symbol */
858 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
860 vpushsym(type, get_sym_ref(type, sec, offset, size));
863 /* define a new external reference to a symbol 'v' of type 'u' */
864 ST_FUNC Sym *external_global_sym(int v, CType *type)
866 Sym *s;
868 s = sym_find(v);
869 if (!s) {
870 /* push forward reference */
871 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
872 s->type.ref = type->ref;
873 } else if (IS_ASM_SYM(s)) {
874 s->type.t = type->t | (s->type.t & VT_EXTERN);
875 s->type.ref = type->ref;
876 update_storage(s);
878 return s;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
884 if (sa1->aligned && !sa->aligned)
885 sa->aligned = sa1->aligned;
886 sa->packed |= sa1->packed;
887 sa->weak |= sa1->weak;
888 if (sa1->visibility != STV_DEFAULT) {
889 int vis = sa->visibility;
890 if (vis == STV_DEFAULT
891 || vis > sa1->visibility)
892 vis = sa1->visibility;
893 sa->visibility = vis;
895 sa->dllexport |= sa1->dllexport;
896 sa->nodecorate |= sa1->nodecorate;
897 sa->dllimport |= sa1->dllimport;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
903 if (fa1->func_call && !fa->func_call)
904 fa->func_call = fa1->func_call;
905 if (fa1->func_type && !fa->func_type)
906 fa->func_type = fa1->func_type;
907 if (fa1->func_args && !fa->func_args)
908 fa->func_args = fa1->func_args;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
914 merge_symattr(&ad->a, &ad1->a);
915 merge_funcattr(&ad->f, &ad1->f);
917 if (ad1->section)
918 ad->section = ad1->section;
919 if (ad1->alias_target)
920 ad->alias_target = ad1->alias_target;
921 if (ad1->asm_label)
922 ad->asm_label = ad1->asm_label;
923 if (ad1->attr_mode)
924 ad->attr_mode = ad1->attr_mode;
927 /* Merge some type attributes. */
928 static void patch_type(Sym *sym, CType *type)
930 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
931 if (!(sym->type.t & VT_EXTERN))
932 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
933 sym->type.t &= ~VT_EXTERN;
936 if (IS_ASM_SYM(sym)) {
937 /* stay static if both are static */
938 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
939 sym->type.ref = type->ref;
942 if (!is_compatible_types(&sym->type, type)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym->v, NULL));
946 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
947 int static_proto = sym->type.t & VT_STATIC;
948 /* warn if static follows non-static function declaration */
949 if ((type->t & VT_STATIC) && !static_proto
950 /* XXX this test for inline shouldn't be here. Until we
951 implement gnu-inline mode again it silences a warning for
952 mingw caused by our workarounds. */
953 && !((type->t | sym->type.t) & VT_INLINE))
954 tcc_warning("static storage ignored for redefinition of '%s'",
955 get_tok_str(sym->v, NULL));
957 /* set 'inline' if both agree or if one has static */
958 if ((type->t | sym->type.t) & VT_INLINE) {
959 if (!((type->t ^ sym->type.t) & VT_INLINE)
960 || ((type->t | sym->type.t) & VT_STATIC))
961 static_proto |= VT_INLINE;
964 if (0 == (type->t & VT_EXTERN)) {
965 /* put complete type, use static from prototype */
966 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
967 sym->type.ref = type->ref;
968 } else {
969 sym->type.t &= ~VT_INLINE | static_proto;
972 if (sym->type.ref->f.func_type == FUNC_OLD
973 && type->ref->f.func_type != FUNC_OLD) {
974 sym->type.ref = type->ref;
977 } else {
978 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
979 /* set array size if it was omitted in extern declaration */
980 sym->type.ref->c = type->ref->c;
982 if ((type->t ^ sym->type.t) & VT_STATIC)
983 tcc_warning("storage mismatch for redefinition of '%s'",
984 get_tok_str(sym->v, NULL));
988 /* Merge some storage attributes. */
989 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
991 if (type)
992 patch_type(sym, type);
994 #ifdef TCC_TARGET_PE
995 if (sym->a.dllimport != ad->a.dllimport)
996 tcc_error("incompatible dll linkage for redefinition of '%s'",
997 get_tok_str(sym->v, NULL));
998 #endif
999 merge_symattr(&sym->a, &ad->a);
1000 if (ad->asm_label)
1001 sym->asm_label = ad->asm_label;
1002 update_storage(sym);
1005 /* copy sym to other stack */
1006 static Sym *sym_copy(Sym *s0, Sym **ps)
1008 Sym *s;
1009 s = sym_malloc(), *s = *s0;
1010 s->prev = *ps, *ps = s;
1011 if (s->v < SYM_FIRST_ANOM) {
1012 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1013 s->prev_tok = *ps, *ps = s;
1015 return s;
1018 /* copy a list of syms */
1019 static void sym_copy_ref(Sym *s0, Sym **ps)
1021 Sym *s, **sp = &s0->type.ref;
1022 for (s = *sp, *sp = NULL; s; s = s->next)
1023 sp = &(*sp = sym_copy(s, ps))->next;
1026 /* define a new external reference to a symbol 'v' */
1027 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1029 Sym *s; int bt;
1031 /* look for global symbol */
1032 s = sym_find(v);
1033 while (s && s->sym_scope)
1034 s = s->prev_tok;
1036 if (!s) {
1037 /* push forward reference */
1038 s = global_identifier_push(v, type->t, 0);
1039 s->r |= r;
1040 s->a = ad->a;
1041 s->asm_label = ad->asm_label;
1042 s->type.ref = type->ref;
1043 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1044 /* copy type to the global stack also */
1045 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1046 sym_copy_ref(s, &global_stack);
1047 } else {
1048 patch_storage(s, ad, type);
1049 bt = s->type.t & VT_BTYPE;
1051 /* push variables to local scope if any */
1052 if (local_stack && bt != VT_FUNC)
1053 s = sym_copy(s, &local_stack);
1054 return s;
1057 /* push a reference to global symbol v */
1058 ST_FUNC void vpush_global_sym(CType *type, int v)
1060 vpushsym(type, external_global_sym(v, type));
1063 /* save registers up to (vtop - n) stack entry */
1064 ST_FUNC void save_regs(int n)
1066 SValue *p, *p1;
1067 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1068 save_reg(p->r);
1071 /* save r to the memory stack, and mark it as being free */
1072 ST_FUNC void save_reg(int r)
1074 save_reg_upstack(r, 0);
1077 /* save r to the memory stack, and mark it as being free,
1078 if seen up to (vtop - n) stack entry */
1079 ST_FUNC void save_reg_upstack(int r, int n)
1081 int l, saved, size, align;
1082 SValue *p, *p1, sv;
1083 CType *type;
1085 if ((r &= VT_VALMASK) >= VT_CONST)
1086 return;
1087 if (nocode_wanted)
1088 return;
1090 /* modify all stack values */
1091 saved = 0;
1092 l = 0;
1093 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1094 if ((p->r & VT_VALMASK) == r ||
1095 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1096 /* must save value on stack if not already done */
1097 if (!saved) {
1098 /* NOTE: must reload 'r' because r might be equal to r2 */
1099 r = p->r & VT_VALMASK;
1100 /* store register in the stack */
1101 type = &p->type;
1102 if ((p->r & VT_LVAL) ||
1103 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1104 #if PTR_SIZE == 8
1105 type = &char_pointer_type;
1106 #else
1107 type = &int_type;
1108 #endif
1109 size = type_size(type, &align);
1110 l=get_temp_local_var(size,align);
1111 sv.type.t = type->t;
1112 sv.r = VT_LOCAL | VT_LVAL;
1113 sv.c.i = l;
1114 store(r, &sv);
1115 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1116 /* x86 specific: need to pop fp register ST0 if saved */
1117 if (r == TREG_ST0) {
1118 o(0xd8dd); /* fstp %st(0) */
1120 #endif
1121 #if PTR_SIZE == 4
1122 /* special long long case */
1123 if ((type->t & VT_BTYPE) == VT_LLONG) {
1124 sv.c.i += 4;
1125 store(p->r2, &sv);
1127 #endif
1128 saved = 1;
1130 /* mark that stack entry as being saved on the stack */
1131 if (p->r & VT_LVAL) {
1132 /* also clear the bounded flag because the
1133 relocation address of the function was stored in
1134 p->c.i */
1135 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1136 } else {
1137 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1139 p->r2 = VT_CONST;
1140 p->c.i = l;
1145 #ifdef TCC_TARGET_ARM
1146 /* find a register of class 'rc2' with at most one reference on stack.
1147 * If none, call get_reg(rc) */
1148 ST_FUNC int get_reg_ex(int rc, int rc2)
1150 int r;
1151 SValue *p;
1153 for(r=0;r<NB_REGS;r++) {
1154 if (reg_classes[r] & rc2) {
1155 int n;
1156 n=0;
1157 for(p = vstack; p <= vtop; p++) {
1158 if ((p->r & VT_VALMASK) == r ||
1159 (p->r2 & VT_VALMASK) == r)
1160 n++;
1162 if (n <= 1)
1163 return r;
1166 return get_reg(rc);
1168 #endif
1170 /* find a free register of class 'rc'. If none, save one register */
1171 ST_FUNC int get_reg(int rc)
1173 int r;
1174 SValue *p;
1176 /* find a free register */
1177 for(r=0;r<NB_REGS;r++) {
1178 if (reg_classes[r] & rc) {
1179 if (nocode_wanted)
1180 return r;
1181 for(p=vstack;p<=vtop;p++) {
1182 if ((p->r & VT_VALMASK) == r ||
1183 (p->r2 & VT_VALMASK) == r)
1184 goto notfound;
1186 return r;
1188 notfound: ;
1191 /* no register left : free the first one on the stack (VERY
1192 IMPORTANT to start from the bottom to ensure that we don't
1193 spill registers used in gen_opi()) */
1194 for(p=vstack;p<=vtop;p++) {
1195 /* look at second register (if long long) */
1196 r = p->r2 & VT_VALMASK;
1197 if (r < VT_CONST && (reg_classes[r] & rc))
1198 goto save_found;
1199 r = p->r & VT_VALMASK;
1200 if (r < VT_CONST && (reg_classes[r] & rc)) {
1201 save_found:
1202 save_reg(r);
1203 return r;
1206 /* Should never comes here */
1207 return -1;
1210 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1211 static int get_temp_local_var(int size,int align){
1212 int i;
1213 struct temp_local_variable *temp_var;
1214 int found_var;
1215 SValue *p;
1216 int r;
1217 char free;
1218 char found;
1219 found=0;
1220 for(i=0;i<nb_temp_local_vars;i++){
1221 temp_var=&arr_temp_local_vars[i];
1222 if(temp_var->size<size||align!=temp_var->align){
1223 continue;
1225 /*check if temp_var is free*/
1226 free=1;
1227 for(p=vstack;p<=vtop;p++) {
1228 r=p->r&VT_VALMASK;
1229 if(r==VT_LOCAL||r==VT_LLOCAL){
1230 if(p->c.i==temp_var->location){
1231 free=0;
1232 break;
1236 if(free){
1237 found_var=temp_var->location;
1238 found=1;
1239 break;
1242 if(!found){
1243 loc = (loc - size) & -align;
1244 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1245 temp_var=&arr_temp_local_vars[i];
1246 temp_var->location=loc;
1247 temp_var->size=size;
1248 temp_var->align=align;
1249 nb_temp_local_vars++;
1251 found_var=loc;
1253 return found_var;
1256 static void clear_temp_local_var_list(){
1257 nb_temp_local_vars=0;
1260 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1261 if needed */
1262 static void move_reg(int r, int s, int t)
1264 SValue sv;
1266 if (r != s) {
1267 save_reg(r);
1268 sv.type.t = t;
1269 sv.type.ref = NULL;
1270 sv.r = s;
1271 sv.c.i = 0;
1272 load(r, &sv);
1276 /* get address of vtop (vtop MUST BE an lvalue) */
1277 ST_FUNC void gaddrof(void)
1279 vtop->r &= ~VT_LVAL;
1280 /* tricky: if saved lvalue, then we can go back to lvalue */
1281 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1282 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1287 #ifdef CONFIG_TCC_BCHECK
1288 /* generate lvalue bound code */
1289 static void gbound(void)
1291 int lval_type;
1292 CType type1;
1294 vtop->r &= ~VT_MUSTBOUND;
1295 /* if lvalue, then use checking code before dereferencing */
1296 if (vtop->r & VT_LVAL) {
1297 /* if not VT_BOUNDED value, then make one */
1298 if (!(vtop->r & VT_BOUNDED)) {
1299 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1300 /* must save type because we must set it to int to get pointer */
1301 type1 = vtop->type;
1302 vtop->type.t = VT_PTR;
1303 gaddrof();
1304 vpushi(0);
1305 gen_bounded_ptr_add();
1306 vtop->r |= lval_type;
1307 vtop->type = type1;
1309 /* then check for dereferencing */
1310 gen_bounded_ptr_deref();
1313 #endif
1315 static void incr_bf_adr(int o)
1317 vtop->type = char_pointer_type;
1318 gaddrof();
1319 vpushi(o);
1320 gen_op('+');
1321 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1322 | (VT_BYTE|VT_UNSIGNED);
1323 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1324 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1327 /* single-byte load mode for packed or otherwise unaligned bitfields */
1328 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1330 int n, o, bits;
1331 save_reg_upstack(vtop->r, 1);
1332 vpush64(type->t & VT_BTYPE, 0); // B X
1333 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1334 do {
1335 vswap(); // X B
1336 incr_bf_adr(o);
1337 vdup(); // X B B
1338 n = 8 - bit_pos;
1339 if (n > bit_size)
1340 n = bit_size;
1341 if (bit_pos)
1342 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1343 if (n < 8)
1344 vpushi((1 << n) - 1), gen_op('&');
1345 gen_cast(type);
1346 if (bits)
1347 vpushi(bits), gen_op(TOK_SHL);
1348 vrotb(3); // B Y X
1349 gen_op('|'); // B X
1350 bits += n, bit_size -= n, o = 1;
1351 } while (bit_size);
1352 vswap(), vpop();
1353 if (!(type->t & VT_UNSIGNED)) {
1354 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1355 vpushi(n), gen_op(TOK_SHL);
1356 vpushi(n), gen_op(TOK_SAR);
1360 /* single-byte store mode for packed or otherwise unaligned bitfields */
1361 static void store_packed_bf(int bit_pos, int bit_size)
1363 int bits, n, o, m, c;
1365 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1366 vswap(); // X B
1367 save_reg_upstack(vtop->r, 1);
1368 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1369 do {
1370 incr_bf_adr(o); // X B
1371 vswap(); //B X
1372 c ? vdup() : gv_dup(); // B V X
1373 vrott(3); // X B V
1374 if (bits)
1375 vpushi(bits), gen_op(TOK_SHR);
1376 if (bit_pos)
1377 vpushi(bit_pos), gen_op(TOK_SHL);
1378 n = 8 - bit_pos;
1379 if (n > bit_size)
1380 n = bit_size;
1381 if (n < 8) {
1382 m = ((1 << n) - 1) << bit_pos;
1383 vpushi(m), gen_op('&'); // X B V1
1384 vpushv(vtop-1); // X B V1 B
1385 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1386 gen_op('&'); // X B V1 B1
1387 gen_op('|'); // X B V2
1389 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1390 vstore(), vpop(); // X B
1391 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1392 } while (bit_size);
1393 vpop(), vpop();
1396 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1398 int t;
1399 if (0 == sv->type.ref)
1400 return 0;
1401 t = sv->type.ref->auxtype;
1402 if (t != -1 && t != VT_STRUCT) {
1403 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1404 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1406 return t;
1409 /* store vtop a register belonging to class 'rc'. lvalues are
1410 converted to values. Cannot be used if cannot be converted to
1411 register value (such as structures). */
1412 ST_FUNC int gv(int rc)
1414 int r, bit_pos, bit_size, size, align, rc2;
1416 /* NOTE: get_reg can modify vstack[] */
1417 if (vtop->type.t & VT_BITFIELD) {
1418 CType type;
1420 bit_pos = BIT_POS(vtop->type.t);
1421 bit_size = BIT_SIZE(vtop->type.t);
1422 /* remove bit field info to avoid loops */
1423 vtop->type.t &= ~VT_STRUCT_MASK;
1425 type.ref = NULL;
1426 type.t = vtop->type.t & VT_UNSIGNED;
1427 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1428 type.t |= VT_UNSIGNED;
1430 r = adjust_bf(vtop, bit_pos, bit_size);
1432 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1433 type.t |= VT_LLONG;
1434 else
1435 type.t |= VT_INT;
1437 if (r == VT_STRUCT) {
1438 load_packed_bf(&type, bit_pos, bit_size);
1439 } else {
1440 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1441 /* cast to int to propagate signedness in following ops */
1442 gen_cast(&type);
1443 /* generate shifts */
1444 vpushi(bits - (bit_pos + bit_size));
1445 gen_op(TOK_SHL);
1446 vpushi(bits - bit_size);
1447 /* NOTE: transformed to SHR if unsigned */
1448 gen_op(TOK_SAR);
1450 r = gv(rc);
1451 } else {
1452 if (is_float(vtop->type.t) &&
1453 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1454 unsigned long offset;
1455 /* CPUs usually cannot use float constants, so we store them
1456 generically in data segment */
1457 size = type_size(&vtop->type, &align);
1458 if (NODATA_WANTED)
1459 size = 0, align = 1;
1460 offset = section_add(data_section, size, align);
1461 vpush_ref(&vtop->type, data_section, offset, size);
1462 vswap();
1463 init_putv(&vtop->type, data_section, offset);
1464 vtop->r |= VT_LVAL;
1466 #ifdef CONFIG_TCC_BCHECK
1467 if (vtop->r & VT_MUSTBOUND)
1468 gbound();
1469 #endif
1471 r = vtop->r & VT_VALMASK;
1472 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1473 #ifndef TCC_TARGET_ARM64
1474 if (rc == RC_IRET)
1475 rc2 = RC_LRET;
1476 #ifdef TCC_TARGET_X86_64
1477 else if (rc == RC_FRET)
1478 rc2 = RC_QRET;
1479 #endif
1480 #endif
1481 /* need to reload if:
1482 - constant
1483 - lvalue (need to dereference pointer)
1484 - already a register, but not in the right class */
1485 if (r >= VT_CONST
1486 || (vtop->r & VT_LVAL)
1487 || !(reg_classes[r] & rc)
1488 #if PTR_SIZE == 8
1489 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1490 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1491 #else
1492 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1493 #endif
1496 r = get_reg(rc);
1497 #if PTR_SIZE == 8
1498 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1499 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1500 #else
1501 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1502 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1503 unsigned long long ll;
1504 #endif
1505 int r2, original_type;
1506 original_type = vtop->type.t;
1507 /* two register type load : expand to two words
1508 temporarily */
1509 #if PTR_SIZE == 4
1510 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1511 /* load constant */
1512 ll = vtop->c.i;
1513 vtop->c.i = ll; /* first word */
1514 load(r, vtop);
1515 vtop->r = r; /* save register value */
1516 vpushi(ll >> 32); /* second word */
1517 } else
1518 #endif
1519 if (vtop->r & VT_LVAL) {
1520 /* We do not want to modifier the long long
1521 pointer here, so the safest (and less
1522 efficient) is to save all the other registers
1523 in the stack. XXX: totally inefficient. */
1524 #if 0
1525 save_regs(1);
1526 #else
1527 /* lvalue_save: save only if used further down the stack */
1528 save_reg_upstack(vtop->r, 1);
1529 #endif
1530 /* load from memory */
1531 vtop->type.t = load_type;
1532 load(r, vtop);
1533 vdup();
1534 vtop[-1].r = r; /* save register value */
1535 /* increment pointer to get second word */
1536 vtop->type.t = addr_type;
1537 gaddrof();
1538 vpushi(load_size);
1539 gen_op('+');
1540 vtop->r |= VT_LVAL;
1541 vtop->type.t = load_type;
1542 } else {
1543 /* move registers */
1544 load(r, vtop);
1545 vdup();
1546 vtop[-1].r = r; /* save register value */
1547 vtop->r = vtop[-1].r2;
1549 /* Allocate second register. Here we rely on the fact that
1550 get_reg() tries first to free r2 of an SValue. */
1551 r2 = get_reg(rc2);
1552 load(r2, vtop);
1553 vpop();
1554 /* write second register */
1555 vtop->r2 = r2;
1556 vtop->type.t = original_type;
1557 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1558 int t1, t;
1559 /* lvalue of scalar type : need to use lvalue type
1560 because of possible cast */
1561 t = vtop->type.t;
1562 t1 = t;
1563 /* compute memory access type */
1564 if (vtop->r & VT_LVAL_BYTE)
1565 t = VT_BYTE;
1566 else if (vtop->r & VT_LVAL_SHORT)
1567 t = VT_SHORT;
1568 if (vtop->r & VT_LVAL_UNSIGNED)
1569 t |= VT_UNSIGNED;
1570 vtop->type.t = t;
1571 load(r, vtop);
1572 /* restore wanted type */
1573 vtop->type.t = t1;
1574 } else {
1575 /* one register type load */
1576 load(r, vtop);
1579 vtop->r = r;
1580 #ifdef TCC_TARGET_C67
1581 /* uses register pairs for doubles */
1582 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1583 vtop->r2 = r+1;
1584 #endif
1586 return r;
1589 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1590 ST_FUNC void gv2(int rc1, int rc2)
1592 int v;
1594 /* generate more generic register first. But VT_JMP or VT_CMP
1595 values must be generated first in all cases to avoid possible
1596 reload errors */
1597 v = vtop[0].r & VT_VALMASK;
1598 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1599 vswap();
1600 gv(rc1);
1601 vswap();
1602 gv(rc2);
1603 /* test if reload is needed for first register */
1604 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1605 vswap();
1606 gv(rc1);
1607 vswap();
1609 } else {
1610 gv(rc2);
1611 vswap();
1612 gv(rc1);
1613 vswap();
1614 /* test if reload is needed for first register */
1615 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1616 gv(rc2);
1621 #ifndef TCC_TARGET_ARM64
1622 /* wrapper around RC_FRET to return a register by type */
1623 static int rc_fret(int t)
1625 #ifdef TCC_TARGET_X86_64
1626 if (t == VT_LDOUBLE) {
1627 return RC_ST0;
1629 #endif
1630 return RC_FRET;
1632 #endif
1634 /* wrapper around REG_FRET to return a register by type */
1635 static int reg_fret(int t)
1637 #ifdef TCC_TARGET_X86_64
1638 if (t == VT_LDOUBLE) {
1639 return TREG_ST0;
1641 #endif
1642 return REG_FRET;
1645 #if PTR_SIZE == 4
1646 /* expand 64bit on stack in two ints */
1647 ST_FUNC void lexpand(void)
1649 int u, v;
1650 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1651 v = vtop->r & (VT_VALMASK | VT_LVAL);
1652 if (v == VT_CONST) {
1653 vdup();
1654 vtop[0].c.i >>= 32;
1655 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1656 vdup();
1657 vtop[0].c.i += 4;
1658 } else {
1659 gv(RC_INT);
1660 vdup();
1661 vtop[0].r = vtop[-1].r2;
1662 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1664 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1666 #endif
1668 #if PTR_SIZE == 4
1669 /* build a long long from two ints */
1670 static void lbuild(int t)
1672 gv2(RC_INT, RC_INT);
1673 vtop[-1].r2 = vtop[0].r;
1674 vtop[-1].type.t = t;
1675 vpop();
1677 #endif
1679 /* convert stack entry to register and duplicate its value in another
1680 register */
1681 static void gv_dup(void)
1683 int rc, t, r, r1;
1684 SValue sv;
1686 t = vtop->type.t;
1687 #if PTR_SIZE == 4
1688 if ((t & VT_BTYPE) == VT_LLONG) {
1689 if (t & VT_BITFIELD) {
1690 gv(RC_INT);
1691 t = vtop->type.t;
1693 lexpand();
1694 gv_dup();
1695 vswap();
1696 vrotb(3);
1697 gv_dup();
1698 vrotb(4);
1699 /* stack: H L L1 H1 */
1700 lbuild(t);
1701 vrotb(3);
1702 vrotb(3);
1703 vswap();
1704 lbuild(t);
1705 vswap();
1706 } else
1707 #endif
1709 /* duplicate value */
1710 rc = RC_INT;
1711 sv.type.t = VT_INT;
1712 if (is_float(t)) {
1713 rc = RC_FLOAT;
1714 #ifdef TCC_TARGET_X86_64
1715 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1716 rc = RC_ST0;
1718 #endif
1719 sv.type.t = t;
1721 r = gv(rc);
1722 r1 = get_reg(rc);
1723 sv.r = r;
1724 sv.c.i = 0;
1725 load(r1, &sv); /* move r to r1 */
1726 vdup();
1727 /* duplicates value */
1728 if (r != r1)
1729 vtop->r = r1;
1733 /* Generate value test
1735 * Generate a test for any value (jump, comparison and integers) */
1736 ST_FUNC int gvtst(int inv, int t)
1738 int v = vtop->r & VT_VALMASK;
1739 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1740 vpushi(0);
1741 gen_op(TOK_NE);
1743 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1744 /* constant jmp optimization */
1745 if ((vtop->c.i != 0) != inv)
1746 t = gjmp(t);
1747 vtop--;
1748 return t;
1750 return gtst(inv, t);
1753 #if PTR_SIZE == 4
1754 /* generate CPU independent (unsigned) long long operations */
1755 static void gen_opl(int op)
1757 int t, a, b, op1, c, i;
1758 int func;
1759 unsigned short reg_iret = REG_IRET;
1760 unsigned short reg_lret = REG_LRET;
1761 SValue tmp;
1763 switch(op) {
1764 case '/':
1765 case TOK_PDIV:
1766 func = TOK___divdi3;
1767 goto gen_func;
1768 case TOK_UDIV:
1769 func = TOK___udivdi3;
1770 goto gen_func;
1771 case '%':
1772 func = TOK___moddi3;
1773 goto gen_mod_func;
1774 case TOK_UMOD:
1775 func = TOK___umoddi3;
1776 gen_mod_func:
1777 #ifdef TCC_ARM_EABI
1778 reg_iret = TREG_R2;
1779 reg_lret = TREG_R3;
1780 #endif
1781 gen_func:
1782 /* call generic long long function */
1783 vpush_global_sym(&func_old_type, func);
1784 vrott(3);
1785 gfunc_call(2);
1786 vpushi(0);
1787 vtop->r = reg_iret;
1788 vtop->r2 = reg_lret;
1789 break;
1790 case '^':
1791 case '&':
1792 case '|':
1793 case '*':
1794 case '+':
1795 case '-':
1796 //pv("gen_opl A",0,2);
1797 t = vtop->type.t;
1798 vswap();
1799 lexpand();
1800 vrotb(3);
1801 lexpand();
1802 /* stack: L1 H1 L2 H2 */
1803 tmp = vtop[0];
1804 vtop[0] = vtop[-3];
1805 vtop[-3] = tmp;
1806 tmp = vtop[-2];
1807 vtop[-2] = vtop[-3];
1808 vtop[-3] = tmp;
1809 vswap();
1810 /* stack: H1 H2 L1 L2 */
1811 //pv("gen_opl B",0,4);
1812 if (op == '*') {
1813 vpushv(vtop - 1);
1814 vpushv(vtop - 1);
1815 gen_op(TOK_UMULL);
1816 lexpand();
1817 /* stack: H1 H2 L1 L2 ML MH */
1818 for(i=0;i<4;i++)
1819 vrotb(6);
1820 /* stack: ML MH H1 H2 L1 L2 */
1821 tmp = vtop[0];
1822 vtop[0] = vtop[-2];
1823 vtop[-2] = tmp;
1824 /* stack: ML MH H1 L2 H2 L1 */
1825 gen_op('*');
1826 vrotb(3);
1827 vrotb(3);
1828 gen_op('*');
1829 /* stack: ML MH M1 M2 */
1830 gen_op('+');
1831 gen_op('+');
1832 } else if (op == '+' || op == '-') {
1833 /* XXX: add non carry method too (for MIPS or alpha) */
1834 if (op == '+')
1835 op1 = TOK_ADDC1;
1836 else
1837 op1 = TOK_SUBC1;
1838 gen_op(op1);
1839 /* stack: H1 H2 (L1 op L2) */
1840 vrotb(3);
1841 vrotb(3);
1842 gen_op(op1 + 1); /* TOK_xxxC2 */
1843 } else {
1844 gen_op(op);
1845 /* stack: H1 H2 (L1 op L2) */
1846 vrotb(3);
1847 vrotb(3);
1848 /* stack: (L1 op L2) H1 H2 */
1849 gen_op(op);
1850 /* stack: (L1 op L2) (H1 op H2) */
1852 /* stack: L H */
1853 lbuild(t);
1854 break;
1855 case TOK_SAR:
1856 case TOK_SHR:
1857 case TOK_SHL:
1858 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1859 t = vtop[-1].type.t;
1860 vswap();
1861 lexpand();
1862 vrotb(3);
1863 /* stack: L H shift */
1864 c = (int)vtop->c.i;
1865 /* constant: simpler */
1866 /* NOTE: all comments are for SHL. the other cases are
1867 done by swapping words */
1868 vpop();
1869 if (op != TOK_SHL)
1870 vswap();
1871 if (c >= 32) {
1872 /* stack: L H */
1873 vpop();
1874 if (c > 32) {
1875 vpushi(c - 32);
1876 gen_op(op);
1878 if (op != TOK_SAR) {
1879 vpushi(0);
1880 } else {
1881 gv_dup();
1882 vpushi(31);
1883 gen_op(TOK_SAR);
1885 vswap();
1886 } else {
1887 vswap();
1888 gv_dup();
1889 /* stack: H L L */
1890 vpushi(c);
1891 gen_op(op);
1892 vswap();
1893 vpushi(32 - c);
1894 if (op == TOK_SHL)
1895 gen_op(TOK_SHR);
1896 else
1897 gen_op(TOK_SHL);
1898 vrotb(3);
1899 /* stack: L L H */
1900 vpushi(c);
1901 if (op == TOK_SHL)
1902 gen_op(TOK_SHL);
1903 else
1904 gen_op(TOK_SHR);
1905 gen_op('|');
1907 if (op != TOK_SHL)
1908 vswap();
1909 lbuild(t);
1910 } else {
1911 /* XXX: should provide a faster fallback on x86 ? */
1912 switch(op) {
1913 case TOK_SAR:
1914 func = TOK___ashrdi3;
1915 goto gen_func;
1916 case TOK_SHR:
1917 func = TOK___lshrdi3;
1918 goto gen_func;
1919 case TOK_SHL:
1920 func = TOK___ashldi3;
1921 goto gen_func;
1924 break;
1925 default:
1926 /* compare operations */
1927 t = vtop->type.t;
1928 vswap();
1929 lexpand();
1930 vrotb(3);
1931 lexpand();
1932 /* stack: L1 H1 L2 H2 */
1933 tmp = vtop[-1];
1934 vtop[-1] = vtop[-2];
1935 vtop[-2] = tmp;
1936 /* stack: L1 L2 H1 H2 */
1937 /* compare high */
1938 op1 = op;
1939 /* when values are equal, we need to compare low words. since
1940 the jump is inverted, we invert the test too. */
1941 if (op1 == TOK_LT)
1942 op1 = TOK_LE;
1943 else if (op1 == TOK_GT)
1944 op1 = TOK_GE;
1945 else if (op1 == TOK_ULT)
1946 op1 = TOK_ULE;
1947 else if (op1 == TOK_UGT)
1948 op1 = TOK_UGE;
1949 a = 0;
1950 b = 0;
1951 gen_op(op1);
1952 if (op == TOK_NE) {
1953 b = gvtst(0, 0);
1954 } else {
1955 a = gvtst(1, 0);
1956 if (op != TOK_EQ) {
1957 /* generate non equal test */
1958 vpushi(TOK_NE);
1959 vtop->r = VT_CMP;
1960 b = gvtst(0, 0);
1963 /* compare low. Always unsigned */
1964 op1 = op;
1965 if (op1 == TOK_LT)
1966 op1 = TOK_ULT;
1967 else if (op1 == TOK_LE)
1968 op1 = TOK_ULE;
1969 else if (op1 == TOK_GT)
1970 op1 = TOK_UGT;
1971 else if (op1 == TOK_GE)
1972 op1 = TOK_UGE;
1973 gen_op(op1);
1974 a = gvtst(1, a);
1975 gsym(b);
1976 vseti(VT_JMPI, a);
1977 break;
1980 #endif
1982 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1984 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1985 return (a ^ b) >> 63 ? -x : x;
1988 static int gen_opic_lt(uint64_t a, uint64_t b)
1990 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1993 /* handle integer constant optimizations and various machine
1994 independent opt */
1995 static void gen_opic(int op)
1997 SValue *v1 = vtop - 1;
1998 SValue *v2 = vtop;
1999 int t1 = v1->type.t & VT_BTYPE;
2000 int t2 = v2->type.t & VT_BTYPE;
2001 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2002 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2003 uint64_t l1 = c1 ? v1->c.i : 0;
2004 uint64_t l2 = c2 ? v2->c.i : 0;
2005 int shm = (t1 == VT_LLONG) ? 63 : 31;
2007 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2008 l1 = ((uint32_t)l1 |
2009 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2010 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2011 l2 = ((uint32_t)l2 |
2012 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2014 if (c1 && c2) {
2015 switch(op) {
2016 case '+': l1 += l2; break;
2017 case '-': l1 -= l2; break;
2018 case '&': l1 &= l2; break;
2019 case '^': l1 ^= l2; break;
2020 case '|': l1 |= l2; break;
2021 case '*': l1 *= l2; break;
2023 case TOK_PDIV:
2024 case '/':
2025 case '%':
2026 case TOK_UDIV:
2027 case TOK_UMOD:
2028 /* if division by zero, generate explicit division */
2029 if (l2 == 0) {
2030 if (const_wanted)
2031 tcc_error("division by zero in constant");
2032 goto general_case;
2034 switch(op) {
2035 default: l1 = gen_opic_sdiv(l1, l2); break;
2036 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2037 case TOK_UDIV: l1 = l1 / l2; break;
2038 case TOK_UMOD: l1 = l1 % l2; break;
2040 break;
2041 case TOK_SHL: l1 <<= (l2 & shm); break;
2042 case TOK_SHR: l1 >>= (l2 & shm); break;
2043 case TOK_SAR:
2044 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2045 break;
2046 /* tests */
2047 case TOK_ULT: l1 = l1 < l2; break;
2048 case TOK_UGE: l1 = l1 >= l2; break;
2049 case TOK_EQ: l1 = l1 == l2; break;
2050 case TOK_NE: l1 = l1 != l2; break;
2051 case TOK_ULE: l1 = l1 <= l2; break;
2052 case TOK_UGT: l1 = l1 > l2; break;
2053 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2054 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2055 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2056 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2057 /* logical */
2058 case TOK_LAND: l1 = l1 && l2; break;
2059 case TOK_LOR: l1 = l1 || l2; break;
2060 default:
2061 goto general_case;
2063 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2064 l1 = ((uint32_t)l1 |
2065 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2066 v1->c.i = l1;
2067 vtop--;
2068 } else {
2069 /* if commutative ops, put c2 as constant */
2070 if (c1 && (op == '+' || op == '&' || op == '^' ||
2071 op == '|' || op == '*')) {
2072 vswap();
2073 c2 = c1; //c = c1, c1 = c2, c2 = c;
2074 l2 = l1; //l = l1, l1 = l2, l2 = l;
2076 if (!const_wanted &&
2077 c1 && ((l1 == 0 &&
2078 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2079 (l1 == -1 && op == TOK_SAR))) {
2080 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2081 vtop--;
2082 } else if (!const_wanted &&
2083 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2084 (op == '|' &&
2085 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2086 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2087 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2088 if (l2 == 1)
2089 vtop->c.i = 0;
2090 vswap();
2091 vtop--;
2092 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2093 op == TOK_PDIV) &&
2094 l2 == 1) ||
2095 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2096 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2097 l2 == 0) ||
2098 (op == '&' &&
2099 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2100 /* filter out NOP operations like x*1, x-0, x&-1... */
2101 vtop--;
2102 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2103 /* try to use shifts instead of muls or divs */
2104 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2105 int n = -1;
2106 while (l2) {
2107 l2 >>= 1;
2108 n++;
2110 vtop->c.i = n;
2111 if (op == '*')
2112 op = TOK_SHL;
2113 else if (op == TOK_PDIV)
2114 op = TOK_SAR;
2115 else
2116 op = TOK_SHR;
2118 goto general_case;
2119 } else if (c2 && (op == '+' || op == '-') &&
2120 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2121 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2122 /* symbol + constant case */
2123 if (op == '-')
2124 l2 = -l2;
2125 l2 += vtop[-1].c.i;
2126 /* The backends can't always deal with addends to symbols
2127 larger than +-1<<31. Don't construct such. */
2128 if ((int)l2 != l2)
2129 goto general_case;
2130 vtop--;
2131 vtop->c.i = l2;
2132 } else {
2133 general_case:
2134 /* call low level op generator */
2135 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2136 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2137 gen_opl(op);
2138 else
2139 gen_opi(op);
2144 /* generate a floating point operation with constant propagation */
2145 static void gen_opif(int op)
2147 int c1, c2;
2148 SValue *v1, *v2;
2149 #if defined _MSC_VER && defined _AMD64_
2150 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2151 volatile
2152 #endif
2153 long double f1, f2;
2155 v1 = vtop - 1;
2156 v2 = vtop;
2157 /* currently, we cannot do computations with forward symbols */
2158 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2159 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2160 if (c1 && c2) {
2161 if (v1->type.t == VT_FLOAT) {
2162 f1 = v1->c.f;
2163 f2 = v2->c.f;
2164 } else if (v1->type.t == VT_DOUBLE) {
2165 f1 = v1->c.d;
2166 f2 = v2->c.d;
2167 } else {
2168 f1 = v1->c.ld;
2169 f2 = v2->c.ld;
2172 /* NOTE: we only do constant propagation if finite number (not
2173 NaN or infinity) (ANSI spec) */
2174 if (!ieee_finite(f1) || !ieee_finite(f2))
2175 goto general_case;
2177 switch(op) {
2178 case '+': f1 += f2; break;
2179 case '-': f1 -= f2; break;
2180 case '*': f1 *= f2; break;
2181 case '/':
2182 if (f2 == 0.0) {
2183 /* If not in initializer we need to potentially generate
2184 FP exceptions at runtime, otherwise we want to fold. */
2185 if (!const_wanted)
2186 goto general_case;
2188 f1 /= f2;
2189 break;
2190 /* XXX: also handles tests ? */
2191 default:
2192 goto general_case;
2194 /* XXX: overflow test ? */
2195 if (v1->type.t == VT_FLOAT) {
2196 v1->c.f = f1;
2197 } else if (v1->type.t == VT_DOUBLE) {
2198 v1->c.d = f1;
2199 } else {
2200 v1->c.ld = f1;
2202 vtop--;
2203 } else {
2204 general_case:
2205 gen_opf(op);
2209 static int pointed_size(CType *type)
2211 int align;
2212 return type_size(pointed_type(type), &align);
2215 static void vla_runtime_pointed_size(CType *type)
2217 int align;
2218 vla_runtime_type_size(pointed_type(type), &align);
2221 static inline int is_null_pointer(SValue *p)
2223 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2224 return 0;
2225 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2226 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2227 ((p->type.t & VT_BTYPE) == VT_PTR &&
2228 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2229 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2230 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2233 static inline int is_integer_btype(int bt)
2235 return (bt == VT_BYTE || bt == VT_SHORT ||
2236 bt == VT_INT || bt == VT_LLONG);
2239 /* check types for comparison or subtraction of pointers */
2240 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2242 CType *type1, *type2, tmp_type1, tmp_type2;
2243 int bt1, bt2;
2245 /* null pointers are accepted for all comparisons as gcc */
2246 if (is_null_pointer(p1) || is_null_pointer(p2))
2247 return;
2248 type1 = &p1->type;
2249 type2 = &p2->type;
2250 bt1 = type1->t & VT_BTYPE;
2251 bt2 = type2->t & VT_BTYPE;
2252 /* accept comparison between pointer and integer with a warning */
2253 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2254 if (op != TOK_LOR && op != TOK_LAND )
2255 tcc_warning("comparison between pointer and integer");
2256 return;
2259 /* both must be pointers or implicit function pointers */
2260 if (bt1 == VT_PTR) {
2261 type1 = pointed_type(type1);
2262 } else if (bt1 != VT_FUNC)
2263 goto invalid_operands;
2265 if (bt2 == VT_PTR) {
2266 type2 = pointed_type(type2);
2267 } else if (bt2 != VT_FUNC) {
2268 invalid_operands:
2269 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2271 if ((type1->t & VT_BTYPE) == VT_VOID ||
2272 (type2->t & VT_BTYPE) == VT_VOID)
2273 return;
2274 tmp_type1 = *type1;
2275 tmp_type2 = *type2;
2276 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2277 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2278 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2279 /* gcc-like error if '-' is used */
2280 if (op == '-')
2281 goto invalid_operands;
2282 else
2283 tcc_warning("comparison of distinct pointer types lacks a cast");
2287 /* generic gen_op: handles types problems */
2288 ST_FUNC void gen_op(int op)
2290 int u, t1, t2, bt1, bt2, t;
2291 CType type1;
2293 redo:
2294 t1 = vtop[-1].type.t;
2295 t2 = vtop[0].type.t;
2296 bt1 = t1 & VT_BTYPE;
2297 bt2 = t2 & VT_BTYPE;
2299 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2300 tcc_error("operation on a struct");
2301 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2302 if (bt2 == VT_FUNC) {
2303 mk_pointer(&vtop->type);
2304 gaddrof();
2306 if (bt1 == VT_FUNC) {
2307 vswap();
2308 mk_pointer(&vtop->type);
2309 gaddrof();
2310 vswap();
2312 goto redo;
2313 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2314 /* at least one operand is a pointer */
2315 /* relational op: must be both pointers */
2316 if (op >= TOK_ULT && op <= TOK_LOR) {
2317 check_comparison_pointer_types(vtop - 1, vtop, op);
2318 /* pointers are handled are unsigned */
2319 #if PTR_SIZE == 8
2320 t = VT_LLONG | VT_UNSIGNED;
2321 #else
2322 t = VT_INT | VT_UNSIGNED;
2323 #endif
2324 goto std_op;
2326 /* if both pointers, then it must be the '-' op */
2327 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2328 if (op != '-')
2329 tcc_error("cannot use pointers here");
2330 check_comparison_pointer_types(vtop - 1, vtop, op);
2331 /* XXX: check that types are compatible */
2332 if (vtop[-1].type.t & VT_VLA) {
2333 vla_runtime_pointed_size(&vtop[-1].type);
2334 } else {
2335 vpushi(pointed_size(&vtop[-1].type));
2337 vrott(3);
2338 gen_opic(op);
2339 vtop->type.t = ptrdiff_type.t;
2340 vswap();
2341 gen_op(TOK_PDIV);
2342 } else {
2343 /* exactly one pointer : must be '+' or '-'. */
2344 if (op != '-' && op != '+')
2345 tcc_error("cannot use pointers here");
2346 /* Put pointer as first operand */
2347 if (bt2 == VT_PTR) {
2348 vswap();
2349 t = t1, t1 = t2, t2 = t;
2351 #if PTR_SIZE == 4
2352 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2353 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2354 gen_cast_s(VT_INT);
2355 #endif
2356 type1 = vtop[-1].type;
2357 type1.t &= ~VT_ARRAY;
2358 if (vtop[-1].type.t & VT_VLA)
2359 vla_runtime_pointed_size(&vtop[-1].type);
2360 else {
2361 u = pointed_size(&vtop[-1].type);
2362 if (u < 0)
2363 tcc_error("unknown array element size");
2364 #if PTR_SIZE == 8
2365 vpushll(u);
2366 #else
2367 /* XXX: cast to int ? (long long case) */
2368 vpushi(u);
2369 #endif
2371 gen_op('*');
2372 #if 0
2373 /* #ifdef CONFIG_TCC_BCHECK
2374 The main reason to removing this code:
2375 #include <stdio.h>
2376 int main ()
2378 int v[10];
2379 int i = 10;
2380 int j = 9;
2381 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2382 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2384 When this code is on. then the output looks like
2385 v+i-j = 0xfffffffe
2386 v+(i-j) = 0xbff84000
2388 /* if evaluating constant expression, no code should be
2389 generated, so no bound check */
2390 if (tcc_state->do_bounds_check && !const_wanted) {
2391 /* if bounded pointers, we generate a special code to
2392 test bounds */
2393 if (op == '-') {
2394 vpushi(0);
2395 vswap();
2396 gen_op('-');
2398 gen_bounded_ptr_add();
2399 } else
2400 #endif
2402 gen_opic(op);
2404 /* put again type if gen_opic() swaped operands */
2405 vtop->type = type1;
2407 } else if (is_float(bt1) || is_float(bt2)) {
2408 /* compute bigger type and do implicit casts */
2409 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2410 t = VT_LDOUBLE;
2411 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2412 t = VT_DOUBLE;
2413 } else {
2414 t = VT_FLOAT;
2416 /* floats can only be used for a few operations */
2417 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2418 (op < TOK_ULT || op > TOK_GT))
2419 tcc_error("invalid operands for binary operation");
2420 goto std_op;
2421 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2422 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2423 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2424 t |= VT_UNSIGNED;
2425 t |= (VT_LONG & t1);
2426 goto std_op;
2427 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2428 /* cast to biggest op */
2429 t = VT_LLONG | VT_LONG;
2430 if (bt1 == VT_LLONG)
2431 t &= t1;
2432 if (bt2 == VT_LLONG)
2433 t &= t2;
2434 /* convert to unsigned if it does not fit in a long long */
2435 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2436 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2437 t |= VT_UNSIGNED;
2438 goto std_op;
2439 } else {
2440 /* integer operations */
2441 t = VT_INT | (VT_LONG & (t1 | t2));
2442 /* convert to unsigned if it does not fit in an integer */
2443 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2444 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2445 t |= VT_UNSIGNED;
2446 std_op:
2447 /* XXX: currently, some unsigned operations are explicit, so
2448 we modify them here */
2449 if (t & VT_UNSIGNED) {
2450 if (op == TOK_SAR)
2451 op = TOK_SHR;
2452 else if (op == '/')
2453 op = TOK_UDIV;
2454 else if (op == '%')
2455 op = TOK_UMOD;
2456 else if (op == TOK_LT)
2457 op = TOK_ULT;
2458 else if (op == TOK_GT)
2459 op = TOK_UGT;
2460 else if (op == TOK_LE)
2461 op = TOK_ULE;
2462 else if (op == TOK_GE)
2463 op = TOK_UGE;
2465 vswap();
2466 type1.t = t;
2467 type1.ref = NULL;
2468 gen_cast(&type1);
2469 vswap();
2470 /* special case for shifts and long long: we keep the shift as
2471 an integer */
2472 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2473 type1.t = VT_INT;
2474 gen_cast(&type1);
2475 if (is_float(t))
2476 gen_opif(op);
2477 else
2478 gen_opic(op);
2479 if (op >= TOK_ULT && op <= TOK_GT) {
2480 /* relational op: the result is an int */
2481 vtop->type.t = VT_INT;
2482 } else {
2483 vtop->type.t = t;
2486 // Make sure that we have converted to an rvalue:
2487 if (vtop->r & VT_LVAL)
2488 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2491 #ifndef TCC_TARGET_ARM
2492 /* generic itof for unsigned long long case */
2493 static void gen_cvt_itof1(int t)
2495 #ifdef TCC_TARGET_ARM64
2496 gen_cvt_itof(t);
2497 #else
2498 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2499 (VT_LLONG | VT_UNSIGNED)) {
2501 if (t == VT_FLOAT)
2502 vpush_global_sym(&func_old_type, TOK___floatundisf);
2503 #if LDOUBLE_SIZE != 8
2504 else if (t == VT_LDOUBLE)
2505 vpush_global_sym(&func_old_type, TOK___floatundixf);
2506 #endif
2507 else
2508 vpush_global_sym(&func_old_type, TOK___floatundidf);
2509 vrott(2);
2510 gfunc_call(1);
2511 vpushi(0);
2512 vtop->r = reg_fret(t);
2513 } else {
2514 gen_cvt_itof(t);
2516 #endif
2518 #endif
2520 /* generic ftoi for unsigned long long case */
2521 static void gen_cvt_ftoi1(int t)
2523 #ifdef TCC_TARGET_ARM64
2524 gen_cvt_ftoi(t);
2525 #else
2526 int st;
2528 if (t == (VT_LLONG | VT_UNSIGNED)) {
2529 /* not handled natively */
2530 st = vtop->type.t & VT_BTYPE;
2531 if (st == VT_FLOAT)
2532 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2533 #if LDOUBLE_SIZE != 8
2534 else if (st == VT_LDOUBLE)
2535 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2536 #endif
2537 else
2538 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2539 vrott(2);
2540 gfunc_call(1);
2541 vpushi(0);
2542 vtop->r = REG_IRET;
2543 vtop->r2 = REG_LRET;
2544 } else {
2545 gen_cvt_ftoi(t);
2547 #endif
2550 /* force char or short cast */
2551 static void force_charshort_cast(int t)
2553 int bits, dbt;
2555 /* cannot cast static initializers */
2556 if (STATIC_DATA_WANTED)
2557 return;
2559 dbt = t & VT_BTYPE;
2560 /* XXX: add optimization if lvalue : just change type and offset */
2561 if (dbt == VT_BYTE)
2562 bits = 8;
2563 else
2564 bits = 16;
2565 if (t & VT_UNSIGNED) {
2566 vpushi((1 << bits) - 1);
2567 gen_op('&');
2568 } else {
2569 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2570 bits = 64 - bits;
2571 else
2572 bits = 32 - bits;
2573 vpushi(bits);
2574 gen_op(TOK_SHL);
2575 /* result must be signed or the SAR is converted to an SHL
2576 This was not the case when "t" was a signed short
2577 and the last value on the stack was an unsigned int */
2578 vtop->type.t &= ~VT_UNSIGNED;
2579 vpushi(bits);
2580 gen_op(TOK_SAR);
2584 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2585 static void gen_cast_s(int t)
2587 CType type;
2588 type.t = t;
2589 type.ref = NULL;
2590 gen_cast(&type);
2593 static void gen_cast(CType *type)
2595 int sbt, dbt, sf, df, c, p;
2597 /* special delayed cast for char/short */
2598 /* XXX: in some cases (multiple cascaded casts), it may still
2599 be incorrect */
2600 if (vtop->r & VT_MUSTCAST) {
2601 vtop->r &= ~VT_MUSTCAST;
2602 force_charshort_cast(vtop->type.t);
2605 /* bitfields first get cast to ints */
2606 if (vtop->type.t & VT_BITFIELD) {
2607 gv(RC_INT);
2610 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2611 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2613 if (sbt != dbt) {
2614 sf = is_float(sbt);
2615 df = is_float(dbt);
2616 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2617 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2618 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2619 c &= dbt != VT_LDOUBLE;
2620 #endif
2621 if (c) {
2622 /* constant case: we can do it now */
2623 /* XXX: in ISOC, cannot do it if error in convert */
2624 if (sbt == VT_FLOAT)
2625 vtop->c.ld = vtop->c.f;
2626 else if (sbt == VT_DOUBLE)
2627 vtop->c.ld = vtop->c.d;
2629 if (df) {
2630 if ((sbt & VT_BTYPE) == VT_LLONG) {
2631 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2632 vtop->c.ld = vtop->c.i;
2633 else
2634 vtop->c.ld = -(long double)-vtop->c.i;
2635 } else if(!sf) {
2636 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2637 vtop->c.ld = (uint32_t)vtop->c.i;
2638 else
2639 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2642 if (dbt == VT_FLOAT)
2643 vtop->c.f = (float)vtop->c.ld;
2644 else if (dbt == VT_DOUBLE)
2645 vtop->c.d = (double)vtop->c.ld;
2646 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2647 vtop->c.i = vtop->c.ld;
2648 } else if (sf && dbt == VT_BOOL) {
2649 vtop->c.i = (vtop->c.ld != 0);
2650 } else {
2651 if(sf)
2652 vtop->c.i = vtop->c.ld;
2653 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2655 else if (sbt & VT_UNSIGNED)
2656 vtop->c.i = (uint32_t)vtop->c.i;
2657 #if PTR_SIZE == 8
2658 else if (sbt == VT_PTR)
2660 #endif
2661 else if (sbt != VT_LLONG)
2662 vtop->c.i = ((uint32_t)vtop->c.i |
2663 -(vtop->c.i & 0x80000000));
2665 if (dbt == (VT_LLONG|VT_UNSIGNED))
2667 else if (dbt == VT_BOOL)
2668 vtop->c.i = (vtop->c.i != 0);
2669 #if PTR_SIZE == 8
2670 else if (dbt == VT_PTR)
2672 #endif
2673 else if (dbt != VT_LLONG) {
2674 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2675 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2676 0xffffffff);
2677 vtop->c.i &= m;
2678 if (!(dbt & VT_UNSIGNED))
2679 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2682 } else if (p && dbt == VT_BOOL) {
2683 vtop->r = VT_CONST;
2684 vtop->c.i = 1;
2685 } else {
2686 /* non constant case: generate code */
2687 if (sf && df) {
2688 /* convert from fp to fp */
2689 gen_cvt_ftof(dbt);
2690 } else if (df) {
2691 /* convert int to fp */
2692 gen_cvt_itof1(dbt);
2693 } else if (sf) {
2694 /* convert fp to int */
2695 if (dbt == VT_BOOL) {
2696 vpushi(0);
2697 gen_op(TOK_NE);
2698 } else {
2699 /* we handle char/short/etc... with generic code */
2700 if (dbt != (VT_INT | VT_UNSIGNED) &&
2701 dbt != (VT_LLONG | VT_UNSIGNED) &&
2702 dbt != VT_LLONG)
2703 dbt = VT_INT;
2704 gen_cvt_ftoi1(dbt);
2705 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2706 /* additional cast for char/short... */
2707 vtop->type.t = dbt;
2708 gen_cast(type);
2711 #if PTR_SIZE == 4
2712 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2713 if ((sbt & VT_BTYPE) != VT_LLONG) {
2714 /* scalar to long long */
2715 /* machine independent conversion */
2716 gv(RC_INT);
2717 /* generate high word */
2718 if (sbt == (VT_INT | VT_UNSIGNED)) {
2719 vpushi(0);
2720 gv(RC_INT);
2721 } else {
2722 if (sbt == VT_PTR) {
2723 /* cast from pointer to int before we apply
2724 shift operation, which pointers don't support*/
2725 gen_cast_s(VT_INT);
2727 gv_dup();
2728 vpushi(31);
2729 gen_op(TOK_SAR);
2731 /* patch second register */
2732 vtop[-1].r2 = vtop->r;
2733 vpop();
2735 #else
2736 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2737 (dbt & VT_BTYPE) == VT_PTR ||
2738 (dbt & VT_BTYPE) == VT_FUNC) {
2739 if ((sbt & VT_BTYPE) != VT_LLONG &&
2740 (sbt & VT_BTYPE) != VT_PTR &&
2741 (sbt & VT_BTYPE) != VT_FUNC) {
2742 /* need to convert from 32bit to 64bit */
2743 gv(RC_INT);
2744 if (sbt != (VT_INT | VT_UNSIGNED)) {
2745 #if defined(TCC_TARGET_ARM64)
2746 gen_cvt_sxtw();
2747 #elif defined(TCC_TARGET_X86_64)
2748 int r = gv(RC_INT);
2749 /* x86_64 specific: movslq */
2750 o(0x6348);
2751 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2752 #else
2753 #error
2754 #endif
2757 #endif
2758 } else if (dbt == VT_BOOL) {
2759 /* scalar to bool */
2760 vpushi(0);
2761 gen_op(TOK_NE);
2762 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2763 (dbt & VT_BTYPE) == VT_SHORT) {
2764 if (sbt == VT_PTR) {
2765 vtop->type.t = VT_INT;
2766 tcc_warning("nonportable conversion from pointer to char/short");
2768 force_charshort_cast(dbt);
2769 } else if ((dbt & VT_BTYPE) == VT_INT) {
2770 /* scalar to int */
2771 if ((sbt & VT_BTYPE) == VT_LLONG) {
2772 #if PTR_SIZE == 4
2773 /* from long long: just take low order word */
2774 lexpand();
2775 vpop();
2776 #else
2777 vpushi(0xffffffff);
2778 vtop->type.t |= VT_UNSIGNED;
2779 gen_op('&');
2780 #endif
2782 /* if lvalue and single word type, nothing to do because
2783 the lvalue already contains the real type size (see
2784 VT_LVAL_xxx constants) */
2787 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2788 /* if we are casting between pointer types,
2789 we must update the VT_LVAL_xxx size */
2790 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2791 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2793 vtop->type = *type;
2794 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2797 /* return type size as known at compile time. Put alignment at 'a' */
2798 ST_FUNC int type_size(CType *type, int *a)
2800 Sym *s;
2801 int bt;
2803 bt = type->t & VT_BTYPE;
2804 if (bt == VT_STRUCT) {
2805 /* struct/union */
2806 s = type->ref;
2807 *a = s->r;
2808 return s->c;
2809 } else if (bt == VT_PTR) {
2810 if (type->t & VT_ARRAY) {
2811 int ts;
2813 s = type->ref;
2814 ts = type_size(&s->type, a);
2816 if (ts < 0 && s->c < 0)
2817 ts = -ts;
2819 return ts * s->c;
2820 } else {
2821 *a = PTR_SIZE;
2822 return PTR_SIZE;
2824 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2825 return -1; /* incomplete enum */
2826 } else if (bt == VT_LDOUBLE) {
2827 *a = LDOUBLE_ALIGN;
2828 return LDOUBLE_SIZE;
2829 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2830 #ifdef TCC_TARGET_I386
2831 #ifdef TCC_TARGET_PE
2832 *a = 8;
2833 #else
2834 *a = 4;
2835 #endif
2836 #elif defined(TCC_TARGET_ARM)
2837 #ifdef TCC_ARM_EABI
2838 *a = 8;
2839 #else
2840 *a = 4;
2841 #endif
2842 #else
2843 *a = 8;
2844 #endif
2845 return 8;
2846 } else if (bt == VT_INT || bt == VT_FLOAT) {
2847 *a = 4;
2848 return 4;
2849 } else if (bt == VT_SHORT) {
2850 *a = 2;
2851 return 2;
2852 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2853 *a = 8;
2854 return 16;
2855 } else {
2856 /* char, void, function, _Bool */
2857 *a = 1;
2858 return 1;
2862 /* push type size as known at runtime time on top of value stack. Put
2863 alignment at 'a' */
2864 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2866 if (type->t & VT_VLA) {
2867 type_size(&type->ref->type, a);
2868 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2869 } else {
2870 vpushi(type_size(type, a));
2874 static void vla_sp_restore(void) {
2875 if (vlas_in_scope) {
2876 gen_vla_sp_restore(vla_sp_loc);
2880 static void vla_sp_restore_root(void) {
2881 if (vlas_in_scope) {
2882 gen_vla_sp_restore(vla_sp_root_loc);
2886 /* return the pointed type of t */
2887 static inline CType *pointed_type(CType *type)
2889 return &type->ref->type;
2892 /* modify type so that its it is a pointer to type. */
2893 ST_FUNC void mk_pointer(CType *type)
2895 Sym *s;
2896 s = sym_push(SYM_FIELD, type, 0, -1);
2897 type->t = VT_PTR | (type->t & VT_STORAGE);
2898 type->ref = s;
2901 /* compare function types. OLD functions match any new functions */
2902 static int is_compatible_func(CType *type1, CType *type2)
2904 Sym *s1, *s2;
2906 s1 = type1->ref;
2907 s2 = type2->ref;
2908 if (s1->f.func_call != s2->f.func_call)
2909 return 0;
2910 if (s1->f.func_type != s2->f.func_type
2911 && s1->f.func_type != FUNC_OLD
2912 && s2->f.func_type != FUNC_OLD)
2913 return 0;
2914 /* we should check the function return type for FUNC_OLD too
2915 but that causes problems with the internally used support
2916 functions such as TOK_memmove */
2917 if (s1->f.func_type == FUNC_OLD && !s1->next)
2918 return 1;
2919 if (s2->f.func_type == FUNC_OLD && !s2->next)
2920 return 1;
2921 for (;;) {
2922 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2923 return 0;
2924 s1 = s1->next;
2925 s2 = s2->next;
2926 if (!s1)
2927 return !s2;
2928 if (!s2)
2929 return 0;
2933 /* return true if type1 and type2 are the same. If unqualified is
2934 true, qualifiers on the types are ignored.
2936 static int compare_types(CType *type1, CType *type2, int unqualified)
2938 int bt1, t1, t2;
2940 t1 = type1->t & VT_TYPE;
2941 t2 = type2->t & VT_TYPE;
2942 if (unqualified) {
2943 /* strip qualifiers before comparing */
2944 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2945 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2948 /* Default Vs explicit signedness only matters for char */
2949 if ((t1 & VT_BTYPE) != VT_BYTE) {
2950 t1 &= ~VT_DEFSIGN;
2951 t2 &= ~VT_DEFSIGN;
2953 /* XXX: bitfields ? */
2954 if (t1 != t2)
2955 return 0;
2957 if ((t1 & VT_ARRAY)
2958 && !(type1->ref->c < 0
2959 || type2->ref->c < 0
2960 || type1->ref->c == type2->ref->c))
2961 return 0;
2963 /* test more complicated cases */
2964 bt1 = t1 & VT_BTYPE;
2965 if (bt1 == VT_PTR) {
2966 type1 = pointed_type(type1);
2967 type2 = pointed_type(type2);
2968 return is_compatible_types(type1, type2);
2969 } else if (bt1 == VT_STRUCT) {
2970 return (type1->ref == type2->ref);
2971 } else if (bt1 == VT_FUNC) {
2972 return is_compatible_func(type1, type2);
2973 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2974 return type1->ref == type2->ref;
2975 } else {
2976 return 1;
2980 /* return true if type1 and type2 are exactly the same (including
2981 qualifiers).
2983 static int is_compatible_types(CType *type1, CType *type2)
2985 return compare_types(type1,type2,0);
2988 /* return true if type1 and type2 are the same (ignoring qualifiers).
2990 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2992 return compare_types(type1,type2,1);
2995 /* print a type. If 'varstr' is not NULL, then the variable is also
2996 printed in the type */
2997 /* XXX: union */
2998 /* XXX: add array and function pointers */
2999 static void type_to_str(char *buf, int buf_size,
3000 CType *type, const char *varstr)
3002 int bt, v, t;
3003 Sym *s, *sa;
3004 char buf1[256];
3005 const char *tstr;
3007 t = type->t;
3008 bt = t & VT_BTYPE;
3009 buf[0] = '\0';
3011 if (t & VT_EXTERN)
3012 pstrcat(buf, buf_size, "extern ");
3013 if (t & VT_STATIC)
3014 pstrcat(buf, buf_size, "static ");
3015 if (t & VT_TYPEDEF)
3016 pstrcat(buf, buf_size, "typedef ");
3017 if (t & VT_INLINE)
3018 pstrcat(buf, buf_size, "inline ");
3019 if (t & VT_VOLATILE)
3020 pstrcat(buf, buf_size, "volatile ");
3021 if (t & VT_CONSTANT)
3022 pstrcat(buf, buf_size, "const ");
3024 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3025 || ((t & VT_UNSIGNED)
3026 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3027 && !IS_ENUM(t)
3029 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3031 buf_size -= strlen(buf);
3032 buf += strlen(buf);
3034 switch(bt) {
3035 case VT_VOID:
3036 tstr = "void";
3037 goto add_tstr;
3038 case VT_BOOL:
3039 tstr = "_Bool";
3040 goto add_tstr;
3041 case VT_BYTE:
3042 tstr = "char";
3043 goto add_tstr;
3044 case VT_SHORT:
3045 tstr = "short";
3046 goto add_tstr;
3047 case VT_INT:
3048 tstr = "int";
3049 goto maybe_long;
3050 case VT_LLONG:
3051 tstr = "long long";
3052 maybe_long:
3053 if (t & VT_LONG)
3054 tstr = "long";
3055 if (!IS_ENUM(t))
3056 goto add_tstr;
3057 tstr = "enum ";
3058 goto tstruct;
3059 case VT_FLOAT:
3060 tstr = "float";
3061 goto add_tstr;
3062 case VT_DOUBLE:
3063 tstr = "double";
3064 goto add_tstr;
3065 case VT_LDOUBLE:
3066 tstr = "long double";
3067 add_tstr:
3068 pstrcat(buf, buf_size, tstr);
3069 break;
3070 case VT_STRUCT:
3071 tstr = "struct ";
3072 if (IS_UNION(t))
3073 tstr = "union ";
3074 tstruct:
3075 pstrcat(buf, buf_size, tstr);
3076 v = type->ref->v & ~SYM_STRUCT;
3077 if (v >= SYM_FIRST_ANOM)
3078 pstrcat(buf, buf_size, "<anonymous>");
3079 else
3080 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3081 break;
3082 case VT_FUNC:
3083 s = type->ref;
3084 buf1[0]=0;
3085 if (varstr && '*' == *varstr) {
3086 pstrcat(buf1, sizeof(buf1), "(");
3087 pstrcat(buf1, sizeof(buf1), varstr);
3088 pstrcat(buf1, sizeof(buf1), ")");
3090 pstrcat(buf1, buf_size, "(");
3091 sa = s->next;
3092 while (sa != NULL) {
3093 char buf2[256];
3094 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3095 pstrcat(buf1, sizeof(buf1), buf2);
3096 sa = sa->next;
3097 if (sa)
3098 pstrcat(buf1, sizeof(buf1), ", ");
3100 if (s->f.func_type == FUNC_ELLIPSIS)
3101 pstrcat(buf1, sizeof(buf1), ", ...");
3102 pstrcat(buf1, sizeof(buf1), ")");
3103 type_to_str(buf, buf_size, &s->type, buf1);
3104 goto no_var;
3105 case VT_PTR:
3106 s = type->ref;
3107 if (t & VT_ARRAY) {
3108 if (varstr && '*' == *varstr)
3109 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3110 else
3111 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3112 type_to_str(buf, buf_size, &s->type, buf1);
3113 goto no_var;
3115 pstrcpy(buf1, sizeof(buf1), "*");
3116 if (t & VT_CONSTANT)
3117 pstrcat(buf1, buf_size, "const ");
3118 if (t & VT_VOLATILE)
3119 pstrcat(buf1, buf_size, "volatile ");
3120 if (varstr)
3121 pstrcat(buf1, sizeof(buf1), varstr);
3122 type_to_str(buf, buf_size, &s->type, buf1);
3123 goto no_var;
3125 if (varstr) {
3126 pstrcat(buf, buf_size, " ");
3127 pstrcat(buf, buf_size, varstr);
3129 no_var: ;
3132 /* verify type compatibility to store vtop in 'dt' type, and generate
3133 casts if needed. */
3134 static void gen_assign_cast(CType *dt)
3136 CType *st, *type1, *type2;
3137 char buf1[256], buf2[256];
3138 int dbt, sbt, qualwarn, lvl;
3140 st = &vtop->type; /* source type */
3141 dbt = dt->t & VT_BTYPE;
3142 sbt = st->t & VT_BTYPE;
3143 if (sbt == VT_VOID || dbt == VT_VOID) {
3144 if (sbt == VT_VOID && dbt == VT_VOID)
3145 ; /* It is Ok if both are void */
3146 else
3147 tcc_error("cannot cast from/to void");
3149 if (dt->t & VT_CONSTANT)
3150 tcc_warning("assignment of read-only location");
3151 switch(dbt) {
3152 case VT_PTR:
3153 /* special cases for pointers */
3154 /* '0' can also be a pointer */
3155 if (is_null_pointer(vtop))
3156 break;
3157 /* accept implicit pointer to integer cast with warning */
3158 if (is_integer_btype(sbt)) {
3159 tcc_warning("assignment makes pointer from integer without a cast");
3160 break;
3162 type1 = pointed_type(dt);
3163 if (sbt == VT_PTR)
3164 type2 = pointed_type(st);
3165 else if (sbt == VT_FUNC)
3166 type2 = st; /* a function is implicitly a function pointer */
3167 else
3168 goto error;
3169 if (is_compatible_types(type1, type2))
3170 break;
3171 for (qualwarn = lvl = 0;; ++lvl) {
3172 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3173 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3174 qualwarn = 1;
3175 dbt = type1->t & (VT_BTYPE|VT_LONG);
3176 sbt = type2->t & (VT_BTYPE|VT_LONG);
3177 if (dbt != VT_PTR || sbt != VT_PTR)
3178 break;
3179 type1 = pointed_type(type1);
3180 type2 = pointed_type(type2);
3182 if (!is_compatible_unqualified_types(type1, type2)) {
3183 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3184 /* void * can match anything */
3185 } else if (dbt == sbt
3186 && is_integer_btype(sbt & VT_BTYPE)
3187 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3188 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3189 /* Like GCC don't warn by default for merely changes
3190 in pointer target signedness. Do warn for different
3191 base types, though, in particular for unsigned enums
3192 and signed int targets. */
3193 } else {
3194 tcc_warning("assignment from incompatible pointer type");
3195 break;
3198 if (qualwarn)
3199 tcc_warning("assignment discards qualifiers from pointer target type");
3200 break;
3201 case VT_BYTE:
3202 case VT_SHORT:
3203 case VT_INT:
3204 case VT_LLONG:
3205 if (sbt == VT_PTR || sbt == VT_FUNC) {
3206 tcc_warning("assignment makes integer from pointer without a cast");
3207 } else if (sbt == VT_STRUCT) {
3208 goto case_VT_STRUCT;
3210 /* XXX: more tests */
3211 break;
3212 case VT_STRUCT:
3213 case_VT_STRUCT:
3214 if (!is_compatible_unqualified_types(dt, st)) {
3215 error:
3216 type_to_str(buf1, sizeof(buf1), st, NULL);
3217 type_to_str(buf2, sizeof(buf2), dt, NULL);
3218 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3220 break;
3222 gen_cast(dt);
3225 /* store vtop in lvalue pushed on stack */
3226 ST_FUNC void vstore(void)
3228 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3230 ft = vtop[-1].type.t;
3231 sbt = vtop->type.t & VT_BTYPE;
3232 dbt = ft & VT_BTYPE;
3233 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3234 (sbt == VT_INT && dbt == VT_SHORT))
3235 && !(vtop->type.t & VT_BITFIELD)) {
3236 /* optimize char/short casts */
3237 delayed_cast = VT_MUSTCAST;
3238 vtop->type.t = ft & VT_TYPE;
3239 /* XXX: factorize */
3240 if (ft & VT_CONSTANT)
3241 tcc_warning("assignment of read-only location");
3242 } else {
3243 delayed_cast = 0;
3244 if (!(ft & VT_BITFIELD))
3245 gen_assign_cast(&vtop[-1].type);
3248 if (sbt == VT_STRUCT) {
3249 /* if structure, only generate pointer */
3250 /* structure assignment : generate memcpy */
3251 /* XXX: optimize if small size */
3252 size = type_size(&vtop->type, &align);
3254 /* destination */
3255 vswap();
3256 vtop->type.t = VT_PTR;
3257 gaddrof();
3259 /* address of memcpy() */
3260 #ifdef TCC_ARM_EABI
3261 if(!(align & 7))
3262 vpush_global_sym(&func_old_type, TOK_memcpy8);
3263 else if(!(align & 3))
3264 vpush_global_sym(&func_old_type, TOK_memcpy4);
3265 else
3266 #endif
3267 /* Use memmove, rather than memcpy, as dest and src may be same: */
3268 vpush_global_sym(&func_old_type, TOK_memmove);
3270 vswap();
3271 /* source */
3272 vpushv(vtop - 2);
3273 vtop->type.t = VT_PTR;
3274 gaddrof();
3275 /* type size */
3276 vpushi(size);
3277 gfunc_call(3);
3279 /* leave source on stack */
3280 } else if (ft & VT_BITFIELD) {
3281 /* bitfield store handling */
3283 /* save lvalue as expression result (example: s.b = s.a = n;) */
3284 vdup(), vtop[-1] = vtop[-2];
3286 bit_pos = BIT_POS(ft);
3287 bit_size = BIT_SIZE(ft);
3288 /* remove bit field info to avoid loops */
3289 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3291 if ((ft & VT_BTYPE) == VT_BOOL) {
3292 gen_cast(&vtop[-1].type);
3293 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3296 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3297 if (r == VT_STRUCT) {
3298 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3299 store_packed_bf(bit_pos, bit_size);
3300 } else {
3301 unsigned long long mask = (1ULL << bit_size) - 1;
3302 if ((ft & VT_BTYPE) != VT_BOOL) {
3303 /* mask source */
3304 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3305 vpushll(mask);
3306 else
3307 vpushi((unsigned)mask);
3308 gen_op('&');
3310 /* shift source */
3311 vpushi(bit_pos);
3312 gen_op(TOK_SHL);
3313 vswap();
3314 /* duplicate destination */
3315 vdup();
3316 vrott(3);
3317 /* load destination, mask and or with source */
3318 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3319 vpushll(~(mask << bit_pos));
3320 else
3321 vpushi(~((unsigned)mask << bit_pos));
3322 gen_op('&');
3323 gen_op('|');
3324 /* store result */
3325 vstore();
3326 /* ... and discard */
3327 vpop();
3329 } else if (dbt == VT_VOID) {
3330 --vtop;
3331 } else {
3332 #ifdef CONFIG_TCC_BCHECK
3333 /* bound check case */
3334 if (vtop[-1].r & VT_MUSTBOUND) {
3335 vswap();
3336 gbound();
3337 vswap();
3339 #endif
3340 rc = RC_INT;
3341 if (is_float(ft)) {
3342 rc = RC_FLOAT;
3343 #ifdef TCC_TARGET_X86_64
3344 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3345 rc = RC_ST0;
3346 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3347 rc = RC_FRET;
3349 #endif
3351 r = gv(rc); /* generate value */
3352 /* if lvalue was saved on stack, must read it */
3353 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3354 SValue sv;
3355 t = get_reg(RC_INT);
3356 #if PTR_SIZE == 8
3357 sv.type.t = VT_PTR;
3358 #else
3359 sv.type.t = VT_INT;
3360 #endif
3361 sv.r = VT_LOCAL | VT_LVAL;
3362 sv.c.i = vtop[-1].c.i;
3363 load(t, &sv);
3364 vtop[-1].r = t | VT_LVAL;
3366 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3367 #if PTR_SIZE == 8
3368 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3369 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3370 #else
3371 if ((ft & VT_BTYPE) == VT_LLONG) {
3372 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3373 #endif
3374 vtop[-1].type.t = load_type;
3375 store(r, vtop - 1);
3376 vswap();
3377 /* convert to int to increment easily */
3378 vtop->type.t = addr_type;
3379 gaddrof();
3380 vpushi(load_size);
3381 gen_op('+');
3382 vtop->r |= VT_LVAL;
3383 vswap();
3384 vtop[-1].type.t = load_type;
3385 /* XXX: it works because r2 is spilled last ! */
3386 store(vtop->r2, vtop - 1);
3387 } else {
3388 store(r, vtop - 1);
3391 vswap();
3392 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3393 vtop->r |= delayed_cast;
3397 /* post defines POST/PRE add. c is the token ++ or -- */
3398 ST_FUNC void inc(int post, int c)
3400 test_lvalue();
3401 vdup(); /* save lvalue */
3402 if (post) {
3403 gv_dup(); /* duplicate value */
3404 vrotb(3);
3405 vrotb(3);
3407 /* add constant */
3408 vpushi(c - TOK_MID);
3409 gen_op('+');
3410 vstore(); /* store value */
3411 if (post)
3412 vpop(); /* if post op, return saved value */
3415 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3417 /* read the string */
3418 if (tok != TOK_STR)
3419 expect(msg);
3420 cstr_new(astr);
3421 while (tok == TOK_STR) {
3422 /* XXX: add \0 handling too ? */
3423 cstr_cat(astr, tokc.str.data, -1);
3424 next();
3426 cstr_ccat(astr, '\0');
3429 /* If I is >= 1 and a power of two, returns log2(i)+1.
3430 If I is 0 returns 0. */
3431 static int exact_log2p1(int i)
3433 int ret;
3434 if (!i)
3435 return 0;
3436 for (ret = 1; i >= 1 << 8; ret += 8)
3437 i >>= 8;
3438 if (i >= 1 << 4)
3439 ret += 4, i >>= 4;
3440 if (i >= 1 << 2)
3441 ret += 2, i >>= 2;
3442 if (i >= 1 << 1)
3443 ret++;
3444 return ret;
3447 /* Parse __attribute__((...)) GNUC extension. */
3448 static void parse_attribute(AttributeDef *ad)
3450 int t, n;
3451 CString astr;
3453 redo:
3454 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3455 return;
3456 next();
3457 skip('(');
3458 skip('(');
3459 while (tok != ')') {
3460 if (tok < TOK_IDENT)
3461 expect("attribute name");
3462 t = tok;
3463 next();
3464 switch(t) {
3465 case TOK_CLEANUP1:
3466 case TOK_CLEANUP2:
3468 Sym *s;
3470 skip('(');
3471 s = sym_find(tok);
3472 if (!s) {
3473 tcc_warning("implicit declaration of function '%s'",
3474 get_tok_str(tok, &tokc));
3475 s = external_global_sym(tok, &func_old_type);
3477 ad->cleanup_func = s;
3478 next();
3479 skip(')');
3480 break;
3482 case TOK_SECTION1:
3483 case TOK_SECTION2:
3484 skip('(');
3485 parse_mult_str(&astr, "section name");
3486 ad->section = find_section(tcc_state, (char *)astr.data);
3487 skip(')');
3488 cstr_free(&astr);
3489 break;
3490 case TOK_ALIAS1:
3491 case TOK_ALIAS2:
3492 skip('(');
3493 parse_mult_str(&astr, "alias(\"target\")");
3494 ad->alias_target = /* save string as token, for later */
3495 tok_alloc((char*)astr.data, astr.size-1)->tok;
3496 skip(')');
3497 cstr_free(&astr);
3498 break;
3499 case TOK_VISIBILITY1:
3500 case TOK_VISIBILITY2:
3501 skip('(');
3502 parse_mult_str(&astr,
3503 "visibility(\"default|hidden|internal|protected\")");
3504 if (!strcmp (astr.data, "default"))
3505 ad->a.visibility = STV_DEFAULT;
3506 else if (!strcmp (astr.data, "hidden"))
3507 ad->a.visibility = STV_HIDDEN;
3508 else if (!strcmp (astr.data, "internal"))
3509 ad->a.visibility = STV_INTERNAL;
3510 else if (!strcmp (astr.data, "protected"))
3511 ad->a.visibility = STV_PROTECTED;
3512 else
3513 expect("visibility(\"default|hidden|internal|protected\")");
3514 skip(')');
3515 cstr_free(&astr);
3516 break;
3517 case TOK_ALIGNED1:
3518 case TOK_ALIGNED2:
3519 if (tok == '(') {
3520 next();
3521 n = expr_const();
3522 if (n <= 0 || (n & (n - 1)) != 0)
3523 tcc_error("alignment must be a positive power of two");
3524 skip(')');
3525 } else {
3526 n = MAX_ALIGN;
3528 ad->a.aligned = exact_log2p1(n);
3529 if (n != 1 << (ad->a.aligned - 1))
3530 tcc_error("alignment of %d is larger than implemented", n);
3531 break;
3532 case TOK_PACKED1:
3533 case TOK_PACKED2:
3534 ad->a.packed = 1;
3535 break;
3536 case TOK_WEAK1:
3537 case TOK_WEAK2:
3538 ad->a.weak = 1;
3539 break;
3540 case TOK_UNUSED1:
3541 case TOK_UNUSED2:
3542 /* currently, no need to handle it because tcc does not
3543 track unused objects */
3544 break;
3545 case TOK_NORETURN1:
3546 case TOK_NORETURN2:
3547 /* currently, no need to handle it because tcc does not
3548 track unused objects */
3549 break;
3550 case TOK_CDECL1:
3551 case TOK_CDECL2:
3552 case TOK_CDECL3:
3553 ad->f.func_call = FUNC_CDECL;
3554 break;
3555 case TOK_STDCALL1:
3556 case TOK_STDCALL2:
3557 case TOK_STDCALL3:
3558 ad->f.func_call = FUNC_STDCALL;
3559 break;
3560 #ifdef TCC_TARGET_I386
3561 case TOK_REGPARM1:
3562 case TOK_REGPARM2:
3563 skip('(');
3564 n = expr_const();
3565 if (n > 3)
3566 n = 3;
3567 else if (n < 0)
3568 n = 0;
3569 if (n > 0)
3570 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3571 skip(')');
3572 break;
3573 case TOK_FASTCALL1:
3574 case TOK_FASTCALL2:
3575 case TOK_FASTCALL3:
3576 ad->f.func_call = FUNC_FASTCALLW;
3577 break;
3578 #endif
3579 case TOK_MODE:
3580 skip('(');
3581 switch(tok) {
3582 case TOK_MODE_DI:
3583 ad->attr_mode = VT_LLONG + 1;
3584 break;
3585 case TOK_MODE_QI:
3586 ad->attr_mode = VT_BYTE + 1;
3587 break;
3588 case TOK_MODE_HI:
3589 ad->attr_mode = VT_SHORT + 1;
3590 break;
3591 case TOK_MODE_SI:
3592 case TOK_MODE_word:
3593 ad->attr_mode = VT_INT + 1;
3594 break;
3595 default:
3596 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3597 break;
3599 next();
3600 skip(')');
3601 break;
3602 case TOK_DLLEXPORT:
3603 ad->a.dllexport = 1;
3604 break;
3605 case TOK_NODECORATE:
3606 ad->a.nodecorate = 1;
3607 break;
3608 case TOK_DLLIMPORT:
3609 ad->a.dllimport = 1;
3610 break;
3611 default:
3612 if (tcc_state->warn_unsupported)
3613 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3614 /* skip parameters */
3615 if (tok == '(') {
3616 int parenthesis = 0;
3617 do {
3618 if (tok == '(')
3619 parenthesis++;
3620 else if (tok == ')')
3621 parenthesis--;
3622 next();
3623 } while (parenthesis && tok != -1);
3625 break;
3627 if (tok != ',')
3628 break;
3629 next();
3631 skip(')');
3632 skip(')');
3633 goto redo;
3636 static Sym * find_field (CType *type, int v, int *cumofs)
3638 Sym *s = type->ref;
3639 v |= SYM_FIELD;
3640 while ((s = s->next) != NULL) {
3641 if ((s->v & SYM_FIELD) &&
3642 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3643 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3644 Sym *ret = find_field (&s->type, v, cumofs);
3645 if (ret) {
3646 *cumofs += s->c;
3647 return ret;
3650 if (s->v == v)
3651 break;
3653 return s;
3656 static void struct_layout(CType *type, AttributeDef *ad)
3658 int size, align, maxalign, offset, c, bit_pos, bit_size;
3659 int packed, a, bt, prevbt, prev_bit_size;
3660 int pcc = !tcc_state->ms_bitfields;
3661 int pragma_pack = *tcc_state->pack_stack_ptr;
3662 Sym *f;
3664 maxalign = 1;
3665 offset = 0;
3666 c = 0;
3667 bit_pos = 0;
3668 prevbt = VT_STRUCT; /* make it never match */
3669 prev_bit_size = 0;
3671 //#define BF_DEBUG
3673 for (f = type->ref->next; f; f = f->next) {
3674 if (f->type.t & VT_BITFIELD)
3675 bit_size = BIT_SIZE(f->type.t);
3676 else
3677 bit_size = -1;
3678 size = type_size(&f->type, &align);
3679 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3680 packed = 0;
3682 if (pcc && bit_size == 0) {
3683 /* in pcc mode, packing does not affect zero-width bitfields */
3685 } else {
3686 /* in pcc mode, attribute packed overrides if set. */
3687 if (pcc && (f->a.packed || ad->a.packed))
3688 align = packed = 1;
3690 /* pragma pack overrides align if lesser and packs bitfields always */
3691 if (pragma_pack) {
3692 packed = 1;
3693 if (pragma_pack < align)
3694 align = pragma_pack;
3695 /* in pcc mode pragma pack also overrides individual align */
3696 if (pcc && pragma_pack < a)
3697 a = 0;
3700 /* some individual align was specified */
3701 if (a)
3702 align = a;
3704 if (type->ref->type.t == VT_UNION) {
3705 if (pcc && bit_size >= 0)
3706 size = (bit_size + 7) >> 3;
3707 offset = 0;
3708 if (size > c)
3709 c = size;
3711 } else if (bit_size < 0) {
3712 if (pcc)
3713 c += (bit_pos + 7) >> 3;
3714 c = (c + align - 1) & -align;
3715 offset = c;
3716 if (size > 0)
3717 c += size;
3718 bit_pos = 0;
3719 prevbt = VT_STRUCT;
3720 prev_bit_size = 0;
3722 } else {
3723 /* A bit-field. Layout is more complicated. There are two
3724 options: PCC (GCC) compatible and MS compatible */
3725 if (pcc) {
3726 /* In PCC layout a bit-field is placed adjacent to the
3727 preceding bit-fields, except if:
3728 - it has zero-width
3729 - an individual alignment was given
3730 - it would overflow its base type container and
3731 there is no packing */
3732 if (bit_size == 0) {
3733 new_field:
3734 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3735 bit_pos = 0;
3736 } else if (f->a.aligned) {
3737 goto new_field;
3738 } else if (!packed) {
3739 int a8 = align * 8;
3740 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3741 if (ofs > size / align)
3742 goto new_field;
3745 /* in pcc mode, long long bitfields have type int if they fit */
3746 if (size == 8 && bit_size <= 32)
3747 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3749 while (bit_pos >= align * 8)
3750 c += align, bit_pos -= align * 8;
3751 offset = c;
3753 /* In PCC layout named bit-fields influence the alignment
3754 of the containing struct using the base types alignment,
3755 except for packed fields (which here have correct align). */
3756 if (f->v & SYM_FIRST_ANOM
3757 // && bit_size // ??? gcc on ARM/rpi does that
3759 align = 1;
3761 } else {
3762 bt = f->type.t & VT_BTYPE;
3763 if ((bit_pos + bit_size > size * 8)
3764 || (bit_size > 0) == (bt != prevbt)
3766 c = (c + align - 1) & -align;
3767 offset = c;
3768 bit_pos = 0;
3769 /* In MS bitfield mode a bit-field run always uses
3770 at least as many bits as the underlying type.
3771 To start a new run it's also required that this
3772 or the last bit-field had non-zero width. */
3773 if (bit_size || prev_bit_size)
3774 c += size;
3776 /* In MS layout the records alignment is normally
3777 influenced by the field, except for a zero-width
3778 field at the start of a run (but by further zero-width
3779 fields it is again). */
3780 if (bit_size == 0 && prevbt != bt)
3781 align = 1;
3782 prevbt = bt;
3783 prev_bit_size = bit_size;
3786 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3787 | (bit_pos << VT_STRUCT_SHIFT);
3788 bit_pos += bit_size;
3790 if (align > maxalign)
3791 maxalign = align;
3793 #ifdef BF_DEBUG
3794 printf("set field %s offset %-2d size %-2d align %-2d",
3795 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3796 if (f->type.t & VT_BITFIELD) {
3797 printf(" pos %-2d bits %-2d",
3798 BIT_POS(f->type.t),
3799 BIT_SIZE(f->type.t)
3802 printf("\n");
3803 #endif
3805 f->c = offset;
3806 f->r = 0;
3809 if (pcc)
3810 c += (bit_pos + 7) >> 3;
3812 /* store size and alignment */
3813 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3814 if (a < maxalign)
3815 a = maxalign;
3816 type->ref->r = a;
3817 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3818 /* can happen if individual align for some member was given. In
3819 this case MSVC ignores maxalign when aligning the size */
3820 a = pragma_pack;
3821 if (a < bt)
3822 a = bt;
3824 c = (c + a - 1) & -a;
3825 type->ref->c = c;
3827 #ifdef BF_DEBUG
3828 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3829 #endif
3831 /* check whether we can access bitfields by their type */
3832 for (f = type->ref->next; f; f = f->next) {
3833 int s, px, cx, c0;
3834 CType t;
3836 if (0 == (f->type.t & VT_BITFIELD))
3837 continue;
3838 f->type.ref = f;
3839 f->auxtype = -1;
3840 bit_size = BIT_SIZE(f->type.t);
3841 if (bit_size == 0)
3842 continue;
3843 bit_pos = BIT_POS(f->type.t);
3844 size = type_size(&f->type, &align);
3845 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3846 continue;
3848 /* try to access the field using a different type */
3849 c0 = -1, s = align = 1;
3850 for (;;) {
3851 px = f->c * 8 + bit_pos;
3852 cx = (px >> 3) & -align;
3853 px = px - (cx << 3);
3854 if (c0 == cx)
3855 break;
3856 s = (px + bit_size + 7) >> 3;
3857 if (s > 4) {
3858 t.t = VT_LLONG;
3859 } else if (s > 2) {
3860 t.t = VT_INT;
3861 } else if (s > 1) {
3862 t.t = VT_SHORT;
3863 } else {
3864 t.t = VT_BYTE;
3866 s = type_size(&t, &align);
3867 c0 = cx;
3870 if (px + bit_size <= s * 8 && cx + s <= c) {
3871 /* update offset and bit position */
3872 f->c = cx;
3873 bit_pos = px;
3874 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3875 | (bit_pos << VT_STRUCT_SHIFT);
3876 if (s != size)
3877 f->auxtype = t.t;
3878 #ifdef BF_DEBUG
3879 printf("FIX field %s offset %-2d size %-2d align %-2d "
3880 "pos %-2d bits %-2d\n",
3881 get_tok_str(f->v & ~SYM_FIELD, NULL),
3882 cx, s, align, px, bit_size);
3883 #endif
3884 } else {
3885 /* fall back to load/store single-byte wise */
3886 f->auxtype = VT_STRUCT;
3887 #ifdef BF_DEBUG
3888 printf("FIX field %s : load byte-wise\n",
3889 get_tok_str(f->v & ~SYM_FIELD, NULL));
3890 #endif
3895 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3896 static void struct_decl(CType *type, int u)
3898 int v, c, size, align, flexible;
3899 int bit_size, bsize, bt;
3900 Sym *s, *ss, **ps;
3901 AttributeDef ad, ad1;
3902 CType type1, btype;
3904 memset(&ad, 0, sizeof ad);
3905 next();
3906 parse_attribute(&ad);
3907 if (tok != '{') {
3908 v = tok;
3909 next();
3910 /* struct already defined ? return it */
3911 if (v < TOK_IDENT)
3912 expect("struct/union/enum name");
3913 s = struct_find(v);
3914 if (s && (s->sym_scope == local_scope || tok != '{')) {
3915 if (u == s->type.t)
3916 goto do_decl;
3917 if (u == VT_ENUM && IS_ENUM(s->type.t))
3918 goto do_decl;
3919 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3921 } else {
3922 v = anon_sym++;
3924 /* Record the original enum/struct/union token. */
3925 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3926 type1.ref = NULL;
3927 /* we put an undefined size for struct/union */
3928 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3929 s->r = 0; /* default alignment is zero as gcc */
3930 do_decl:
3931 type->t = s->type.t;
3932 type->ref = s;
3934 if (tok == '{') {
3935 next();
3936 if (s->c != -1)
3937 tcc_error("struct/union/enum already defined");
3938 s->c = -2;
3939 /* cannot be empty */
3940 /* non empty enums are not allowed */
3941 ps = &s->next;
3942 if (u == VT_ENUM) {
3943 long long ll = 0, pl = 0, nl = 0;
3944 CType t;
3945 t.ref = s;
3946 /* enum symbols have static storage */
3947 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3948 for(;;) {
3949 v = tok;
3950 if (v < TOK_UIDENT)
3951 expect("identifier");
3952 ss = sym_find(v);
3953 if (ss && !local_stack)
3954 tcc_error("redefinition of enumerator '%s'",
3955 get_tok_str(v, NULL));
3956 next();
3957 if (tok == '=') {
3958 next();
3959 ll = expr_const64();
3961 ss = sym_push(v, &t, VT_CONST, 0);
3962 ss->enum_val = ll;
3963 *ps = ss, ps = &ss->next;
3964 if (ll < nl)
3965 nl = ll;
3966 if (ll > pl)
3967 pl = ll;
3968 if (tok != ',')
3969 break;
3970 next();
3971 ll++;
3972 /* NOTE: we accept a trailing comma */
3973 if (tok == '}')
3974 break;
3976 skip('}');
3977 /* set integral type of the enum */
3978 t.t = VT_INT;
3979 if (nl >= 0) {
3980 if (pl != (unsigned)pl)
3981 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3982 t.t |= VT_UNSIGNED;
3983 } else if (pl != (int)pl || nl != (int)nl)
3984 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3985 s->type.t = type->t = t.t | VT_ENUM;
3986 s->c = 0;
3987 /* set type for enum members */
3988 for (ss = s->next; ss; ss = ss->next) {
3989 ll = ss->enum_val;
3990 if (ll == (int)ll) /* default is int if it fits */
3991 continue;
3992 if (t.t & VT_UNSIGNED) {
3993 ss->type.t |= VT_UNSIGNED;
3994 if (ll == (unsigned)ll)
3995 continue;
3997 ss->type.t = (ss->type.t & ~VT_BTYPE)
3998 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4000 } else {
4001 c = 0;
4002 flexible = 0;
4003 while (tok != '}') {
4004 if (!parse_btype(&btype, &ad1)) {
4005 skip(';');
4006 continue;
4008 while (1) {
4009 if (flexible)
4010 tcc_error("flexible array member '%s' not at the end of struct",
4011 get_tok_str(v, NULL));
4012 bit_size = -1;
4013 v = 0;
4014 type1 = btype;
4015 if (tok != ':') {
4016 if (tok != ';')
4017 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4018 if (v == 0) {
4019 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4020 expect("identifier");
4021 else {
4022 int v = btype.ref->v;
4023 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4024 if (tcc_state->ms_extensions == 0)
4025 expect("identifier");
4029 if (type_size(&type1, &align) < 0) {
4030 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4031 flexible = 1;
4032 else
4033 tcc_error("field '%s' has incomplete type",
4034 get_tok_str(v, NULL));
4036 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4037 (type1.t & VT_BTYPE) == VT_VOID ||
4038 (type1.t & VT_STORAGE))
4039 tcc_error("invalid type for '%s'",
4040 get_tok_str(v, NULL));
4042 if (tok == ':') {
4043 next();
4044 bit_size = expr_const();
4045 /* XXX: handle v = 0 case for messages */
4046 if (bit_size < 0)
4047 tcc_error("negative width in bit-field '%s'",
4048 get_tok_str(v, NULL));
4049 if (v && bit_size == 0)
4050 tcc_error("zero width for bit-field '%s'",
4051 get_tok_str(v, NULL));
4052 parse_attribute(&ad1);
4054 size = type_size(&type1, &align);
4055 if (bit_size >= 0) {
4056 bt = type1.t & VT_BTYPE;
4057 if (bt != VT_INT &&
4058 bt != VT_BYTE &&
4059 bt != VT_SHORT &&
4060 bt != VT_BOOL &&
4061 bt != VT_LLONG)
4062 tcc_error("bitfields must have scalar type");
4063 bsize = size * 8;
4064 if (bit_size > bsize) {
4065 tcc_error("width of '%s' exceeds its type",
4066 get_tok_str(v, NULL));
4067 } else if (bit_size == bsize
4068 && !ad.a.packed && !ad1.a.packed) {
4069 /* no need for bit fields */
4071 } else if (bit_size == 64) {
4072 tcc_error("field width 64 not implemented");
4073 } else {
4074 type1.t = (type1.t & ~VT_STRUCT_MASK)
4075 | VT_BITFIELD
4076 | (bit_size << (VT_STRUCT_SHIFT + 6));
4079 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4080 /* Remember we've seen a real field to check
4081 for placement of flexible array member. */
4082 c = 1;
4084 /* If member is a struct or bit-field, enforce
4085 placing into the struct (as anonymous). */
4086 if (v == 0 &&
4087 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4088 bit_size >= 0)) {
4089 v = anon_sym++;
4091 if (v) {
4092 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4093 ss->a = ad1.a;
4094 *ps = ss;
4095 ps = &ss->next;
4097 if (tok == ';' || tok == TOK_EOF)
4098 break;
4099 skip(',');
4101 skip(';');
4103 skip('}');
4104 parse_attribute(&ad);
4105 struct_layout(type, &ad);
4110 static void sym_to_attr(AttributeDef *ad, Sym *s)
4112 merge_symattr(&ad->a, &s->a);
4113 merge_funcattr(&ad->f, &s->f);
4116 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4117 are added to the element type, copied because it could be a typedef. */
4118 static void parse_btype_qualify(CType *type, int qualifiers)
4120 while (type->t & VT_ARRAY) {
4121 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4122 type = &type->ref->type;
4124 type->t |= qualifiers;
4127 /* return 0 if no type declaration. otherwise, return the basic type
4128 and skip it.
4130 static int parse_btype(CType *type, AttributeDef *ad)
4132 int t, u, bt, st, type_found, typespec_found, g;
4133 Sym *s;
4134 CType type1;
4136 memset(ad, 0, sizeof(AttributeDef));
4137 type_found = 0;
4138 typespec_found = 0;
4139 t = VT_INT;
4140 bt = st = -1;
4141 type->ref = NULL;
4143 while(1) {
4144 switch(tok) {
4145 case TOK_EXTENSION:
4146 /* currently, we really ignore extension */
4147 next();
4148 continue;
4150 /* basic types */
4151 case TOK_CHAR:
4152 u = VT_BYTE;
4153 basic_type:
4154 next();
4155 basic_type1:
4156 if (u == VT_SHORT || u == VT_LONG) {
4157 if (st != -1 || (bt != -1 && bt != VT_INT))
4158 tmbt: tcc_error("too many basic types");
4159 st = u;
4160 } else {
4161 if (bt != -1 || (st != -1 && u != VT_INT))
4162 goto tmbt;
4163 bt = u;
4165 if (u != VT_INT)
4166 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4167 typespec_found = 1;
4168 break;
4169 case TOK_VOID:
4170 u = VT_VOID;
4171 goto basic_type;
4172 case TOK_SHORT:
4173 u = VT_SHORT;
4174 goto basic_type;
4175 case TOK_INT:
4176 u = VT_INT;
4177 goto basic_type;
4178 case TOK_ALIGNAS:
4179 { int n;
4180 AttributeDef ad1;
4181 next();
4182 skip('(');
4183 memset(&ad1, 0, sizeof(AttributeDef));
4184 if (parse_btype(&type1, &ad1)) {
4185 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4186 if (ad1.a.aligned)
4187 n = 1 << (ad1.a.aligned - 1);
4188 else
4189 type_size(&type1, &n);
4190 } else {
4191 n = expr_const();
4192 if (n <= 0 || (n & (n - 1)) != 0)
4193 tcc_error("alignment must be a positive power of two");
4195 skip(')');
4196 ad->a.aligned = exact_log2p1(n);
4198 continue;
4199 case TOK_LONG:
4200 if ((t & VT_BTYPE) == VT_DOUBLE) {
4201 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4202 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4203 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4204 } else {
4205 u = VT_LONG;
4206 goto basic_type;
4208 next();
4209 break;
4210 #ifdef TCC_TARGET_ARM64
4211 case TOK_UINT128:
4212 /* GCC's __uint128_t appears in some Linux header files. Make it a
4213 synonym for long double to get the size and alignment right. */
4214 u = VT_LDOUBLE;
4215 goto basic_type;
4216 #endif
4217 case TOK_BOOL:
4218 u = VT_BOOL;
4219 goto basic_type;
4220 case TOK_FLOAT:
4221 u = VT_FLOAT;
4222 goto basic_type;
4223 case TOK_DOUBLE:
4224 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4225 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4226 } else {
4227 u = VT_DOUBLE;
4228 goto basic_type;
4230 next();
4231 break;
4232 case TOK_ENUM:
4233 struct_decl(&type1, VT_ENUM);
4234 basic_type2:
4235 u = type1.t;
4236 type->ref = type1.ref;
4237 goto basic_type1;
4238 case TOK_STRUCT:
4239 struct_decl(&type1, VT_STRUCT);
4240 goto basic_type2;
4241 case TOK_UNION:
4242 struct_decl(&type1, VT_UNION);
4243 goto basic_type2;
4245 /* type modifiers */
4246 case TOK_CONST1:
4247 case TOK_CONST2:
4248 case TOK_CONST3:
4249 type->t = t;
4250 parse_btype_qualify(type, VT_CONSTANT);
4251 t = type->t;
4252 next();
4253 break;
4254 case TOK_VOLATILE1:
4255 case TOK_VOLATILE2:
4256 case TOK_VOLATILE3:
4257 type->t = t;
4258 parse_btype_qualify(type, VT_VOLATILE);
4259 t = type->t;
4260 next();
4261 break;
4262 case TOK_SIGNED1:
4263 case TOK_SIGNED2:
4264 case TOK_SIGNED3:
4265 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4266 tcc_error("signed and unsigned modifier");
4267 t |= VT_DEFSIGN;
4268 next();
4269 typespec_found = 1;
4270 break;
4271 case TOK_REGISTER:
4272 case TOK_AUTO:
4273 case TOK_RESTRICT1:
4274 case TOK_RESTRICT2:
4275 case TOK_RESTRICT3:
4276 next();
4277 break;
4278 case TOK_UNSIGNED:
4279 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4280 tcc_error("signed and unsigned modifier");
4281 t |= VT_DEFSIGN | VT_UNSIGNED;
4282 next();
4283 typespec_found = 1;
4284 break;
4286 /* storage */
4287 case TOK_EXTERN:
4288 g = VT_EXTERN;
4289 goto storage;
4290 case TOK_STATIC:
4291 g = VT_STATIC;
4292 goto storage;
4293 case TOK_TYPEDEF:
4294 g = VT_TYPEDEF;
4295 goto storage;
4296 storage:
4297 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4298 tcc_error("multiple storage classes");
4299 t |= g;
4300 next();
4301 break;
4302 case TOK_INLINE1:
4303 case TOK_INLINE2:
4304 case TOK_INLINE3:
4305 t |= VT_INLINE;
4306 next();
4307 break;
4308 case TOK_NORETURN3:
4309 /* currently, no need to handle it because tcc does not
4310 track unused objects */
4311 next();
4312 break;
4313 /* GNUC attribute */
4314 case TOK_ATTRIBUTE1:
4315 case TOK_ATTRIBUTE2:
4316 parse_attribute(ad);
4317 if (ad->attr_mode) {
4318 u = ad->attr_mode -1;
4319 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4321 continue;
4322 /* GNUC typeof */
4323 case TOK_TYPEOF1:
4324 case TOK_TYPEOF2:
4325 case TOK_TYPEOF3:
4326 next();
4327 parse_expr_type(&type1);
4328 /* remove all storage modifiers except typedef */
4329 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4330 if (type1.ref)
4331 sym_to_attr(ad, type1.ref);
4332 goto basic_type2;
4333 default:
4334 if (typespec_found)
4335 goto the_end;
4336 s = sym_find(tok);
4337 if (!s || !(s->type.t & VT_TYPEDEF))
4338 goto the_end;
4339 t &= ~(VT_BTYPE|VT_LONG);
4340 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4341 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4342 type->ref = s->type.ref;
4343 if (t)
4344 parse_btype_qualify(type, t);
4345 t = type->t;
4346 /* get attributes from typedef */
4347 sym_to_attr(ad, s);
4348 next();
4349 typespec_found = 1;
4350 st = bt = -2;
4351 break;
4353 type_found = 1;
4355 the_end:
4356 if (tcc_state->char_is_unsigned) {
4357 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4358 t |= VT_UNSIGNED;
4360 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4361 bt = t & (VT_BTYPE|VT_LONG);
4362 if (bt == VT_LONG)
4363 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4364 #ifdef TCC_TARGET_PE
4365 if (bt == VT_LDOUBLE)
4366 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4367 #endif
4368 type->t = t;
4369 return type_found;
4372 /* convert a function parameter type (array to pointer and function to
4373 function pointer) */
4374 static inline void convert_parameter_type(CType *pt)
4376 /* remove const and volatile qualifiers (XXX: const could be used
4377 to indicate a const function parameter */
4378 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4379 /* array must be transformed to pointer according to ANSI C */
4380 pt->t &= ~VT_ARRAY;
4381 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4382 mk_pointer(pt);
4386 ST_FUNC void parse_asm_str(CString *astr)
4388 skip('(');
4389 parse_mult_str(astr, "string constant");
4392 /* Parse an asm label and return the token */
4393 static int asm_label_instr(void)
4395 int v;
4396 CString astr;
4398 next();
4399 parse_asm_str(&astr);
4400 skip(')');
4401 #ifdef ASM_DEBUG
4402 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4403 #endif
4404 v = tok_alloc(astr.data, astr.size - 1)->tok;
4405 cstr_free(&astr);
4406 return v;
4409 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4411 int n, l, t1, arg_size, align, unused_align;
4412 Sym **plast, *s, *first;
4413 AttributeDef ad1;
4414 CType pt;
4416 if (tok == '(') {
4417 /* function type, or recursive declarator (return if so) */
4418 next();
4419 if (td && !(td & TYPE_ABSTRACT))
4420 return 0;
4421 if (tok == ')')
4422 l = 0;
4423 else if (parse_btype(&pt, &ad1))
4424 l = FUNC_NEW;
4425 else if (td) {
4426 merge_attr (ad, &ad1);
4427 return 0;
4428 } else
4429 l = FUNC_OLD;
4430 first = NULL;
4431 plast = &first;
4432 arg_size = 0;
4433 if (l) {
4434 for(;;) {
4435 /* read param name and compute offset */
4436 if (l != FUNC_OLD) {
4437 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4438 break;
4439 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4440 if ((pt.t & VT_BTYPE) == VT_VOID)
4441 tcc_error("parameter declared as void");
4442 } else {
4443 n = tok;
4444 if (n < TOK_UIDENT)
4445 expect("identifier");
4446 pt.t = VT_VOID; /* invalid type */
4447 next();
4449 convert_parameter_type(&pt);
4450 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4451 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4452 *plast = s;
4453 plast = &s->next;
4454 if (tok == ')')
4455 break;
4456 skip(',');
4457 if (l == FUNC_NEW && tok == TOK_DOTS) {
4458 l = FUNC_ELLIPSIS;
4459 next();
4460 break;
4462 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4463 tcc_error("invalid type");
4465 } else
4466 /* if no parameters, then old type prototype */
4467 l = FUNC_OLD;
4468 skip(')');
4469 /* NOTE: const is ignored in returned type as it has a special
4470 meaning in gcc / C++ */
4471 type->t &= ~VT_CONSTANT;
4472 /* some ancient pre-K&R C allows a function to return an array
4473 and the array brackets to be put after the arguments, such
4474 that "int c()[]" means something like "int[] c()" */
4475 if (tok == '[') {
4476 next();
4477 skip(']'); /* only handle simple "[]" */
4478 mk_pointer(type);
4480 /* we push a anonymous symbol which will contain the function prototype */
4481 ad->f.func_args = arg_size;
4482 ad->f.func_type = l;
4483 s = sym_push(SYM_FIELD, type, 0, 0);
4484 s->a = ad->a;
4485 s->f = ad->f;
4486 s->next = first;
4487 type->t = VT_FUNC;
4488 type->ref = s;
4489 } else if (tok == '[') {
4490 int saved_nocode_wanted = nocode_wanted;
4491 /* array definition */
4492 next();
4493 while (1) {
4494 /* XXX The optional type-quals and static should only be accepted
4495 in parameter decls. The '*' as well, and then even only
4496 in prototypes (not function defs). */
4497 switch (tok) {
4498 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4499 case TOK_CONST1:
4500 case TOK_VOLATILE1:
4501 case TOK_STATIC:
4502 case '*':
4503 next();
4504 continue;
4505 default:
4506 break;
4508 break;
4510 n = -1;
4511 t1 = 0;
4512 if (tok != ']') {
4513 if (!local_stack || (storage & VT_STATIC))
4514 vpushi(expr_const());
4515 else {
4516 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4517 length must always be evaluated, even under nocode_wanted,
4518 so that its size slot is initialized (e.g. under sizeof
4519 or typeof). */
4520 nocode_wanted = 0;
4521 gexpr();
4523 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4524 n = vtop->c.i;
4525 if (n < 0)
4526 tcc_error("invalid array size");
4527 } else {
4528 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4529 tcc_error("size of variable length array should be an integer");
4530 n = 0;
4531 t1 = VT_VLA;
4534 skip(']');
4535 /* parse next post type */
4536 post_type(type, ad, storage, 0);
4538 if ((type->t & VT_BTYPE) == VT_FUNC)
4539 tcc_error("declaration of an array of functions");
4540 if ((type->t & VT_BTYPE) == VT_VOID
4541 || type_size(type, &unused_align) < 0)
4542 tcc_error("declaration of an array of incomplete type elements");
4544 t1 |= type->t & VT_VLA;
4546 if (t1 & VT_VLA) {
4547 if (n < 0)
4548 tcc_error("need explicit inner array size in VLAs");
4549 loc -= type_size(&int_type, &align);
4550 loc &= -align;
4551 n = loc;
4553 vla_runtime_type_size(type, &align);
4554 gen_op('*');
4555 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4556 vswap();
4557 vstore();
4559 if (n != -1)
4560 vpop();
4561 nocode_wanted = saved_nocode_wanted;
4563 /* we push an anonymous symbol which will contain the array
4564 element type */
4565 s = sym_push(SYM_FIELD, type, 0, n);
4566 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4567 type->ref = s;
4569 return 1;
4572 /* Parse a type declarator (except basic type), and return the type
4573 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4574 expected. 'type' should contain the basic type. 'ad' is the
4575 attribute definition of the basic type. It can be modified by
4576 type_decl(). If this (possibly abstract) declarator is a pointer chain
4577 it returns the innermost pointed to type (equals *type, but is a different
4578 pointer), otherwise returns type itself, that's used for recursive calls. */
4579 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4581 CType *post, *ret;
4582 int qualifiers, storage;
4584 /* recursive type, remove storage bits first, apply them later again */
4585 storage = type->t & VT_STORAGE;
4586 type->t &= ~VT_STORAGE;
4587 post = ret = type;
4589 while (tok == '*') {
4590 qualifiers = 0;
4591 redo:
4592 next();
4593 switch(tok) {
4594 case TOK_CONST1:
4595 case TOK_CONST2:
4596 case TOK_CONST3:
4597 qualifiers |= VT_CONSTANT;
4598 goto redo;
4599 case TOK_VOLATILE1:
4600 case TOK_VOLATILE2:
4601 case TOK_VOLATILE3:
4602 qualifiers |= VT_VOLATILE;
4603 goto redo;
4604 case TOK_RESTRICT1:
4605 case TOK_RESTRICT2:
4606 case TOK_RESTRICT3:
4607 goto redo;
4608 /* XXX: clarify attribute handling */
4609 case TOK_ATTRIBUTE1:
4610 case TOK_ATTRIBUTE2:
4611 parse_attribute(ad);
4612 break;
4614 mk_pointer(type);
4615 type->t |= qualifiers;
4616 if (ret == type)
4617 /* innermost pointed to type is the one for the first derivation */
4618 ret = pointed_type(type);
4621 if (tok == '(') {
4622 /* This is possibly a parameter type list for abstract declarators
4623 ('int ()'), use post_type for testing this. */
4624 if (!post_type(type, ad, 0, td)) {
4625 /* It's not, so it's a nested declarator, and the post operations
4626 apply to the innermost pointed to type (if any). */
4627 /* XXX: this is not correct to modify 'ad' at this point, but
4628 the syntax is not clear */
4629 parse_attribute(ad);
4630 post = type_decl(type, ad, v, td);
4631 skip(')');
4632 } else
4633 goto abstract;
4634 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4635 /* type identifier */
4636 *v = tok;
4637 next();
4638 } else {
4639 abstract:
4640 if (!(td & TYPE_ABSTRACT))
4641 expect("identifier");
4642 *v = 0;
4644 post_type(post, ad, storage, 0);
4645 parse_attribute(ad);
4646 type->t |= storage;
4647 return ret;
4650 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4651 ST_FUNC int lvalue_type(int t)
4653 int bt, r;
4654 r = VT_LVAL;
4655 bt = t & VT_BTYPE;
4656 if (bt == VT_BYTE || bt == VT_BOOL)
4657 r |= VT_LVAL_BYTE;
4658 else if (bt == VT_SHORT)
4659 r |= VT_LVAL_SHORT;
4660 else
4661 return r;
4662 if (t & VT_UNSIGNED)
4663 r |= VT_LVAL_UNSIGNED;
4664 return r;
4667 /* indirection with full error checking and bound check */
4668 ST_FUNC void indir(void)
4670 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4671 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4672 return;
4673 expect("pointer");
4675 if (vtop->r & VT_LVAL)
4676 gv(RC_INT);
4677 vtop->type = *pointed_type(&vtop->type);
4678 /* Arrays and functions are never lvalues */
4679 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4680 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4681 vtop->r |= lvalue_type(vtop->type.t);
4682 /* if bound checking, the referenced pointer must be checked */
4683 #ifdef CONFIG_TCC_BCHECK
4684 if (tcc_state->do_bounds_check)
4685 vtop->r |= VT_MUSTBOUND;
4686 #endif
4690 /* pass a parameter to a function and do type checking and casting */
4691 static void gfunc_param_typed(Sym *func, Sym *arg)
4693 int func_type;
4694 CType type;
4696 func_type = func->f.func_type;
4697 if (func_type == FUNC_OLD ||
4698 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4699 /* default casting : only need to convert float to double */
4700 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4701 gen_cast_s(VT_DOUBLE);
4702 } else if (vtop->type.t & VT_BITFIELD) {
4703 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4704 type.ref = vtop->type.ref;
4705 gen_cast(&type);
4707 } else if (arg == NULL) {
4708 tcc_error("too many arguments to function");
4709 } else {
4710 type = arg->type;
4711 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4712 gen_assign_cast(&type);
4716 /* parse an expression and return its type without any side effect. */
4717 static void expr_type(CType *type, void (*expr_fn)(void))
4719 nocode_wanted++;
4720 expr_fn();
4721 *type = vtop->type;
4722 vpop();
4723 nocode_wanted--;
4726 /* parse an expression of the form '(type)' or '(expr)' and return its
4727 type */
4728 static void parse_expr_type(CType *type)
4730 int n;
4731 AttributeDef ad;
4733 skip('(');
4734 if (parse_btype(type, &ad)) {
4735 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4736 } else {
4737 expr_type(type, gexpr);
4739 skip(')');
4742 static void parse_type(CType *type)
4744 AttributeDef ad;
4745 int n;
4747 if (!parse_btype(type, &ad)) {
4748 expect("type");
4750 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4753 static void parse_builtin_params(int nc, const char *args)
4755 char c, sep = '(';
4756 CType t;
4757 if (nc)
4758 nocode_wanted++;
4759 next();
4760 while ((c = *args++)) {
4761 skip(sep);
4762 sep = ',';
4763 switch (c) {
4764 case 'e': expr_eq(); continue;
4765 case 't': parse_type(&t); vpush(&t); continue;
4766 default: tcc_error("internal error"); break;
4769 skip(')');
4770 if (nc)
4771 nocode_wanted--;
4774 static void try_call_scope_cleanup(Sym *stop)
4776 Sym *cls = current_cleanups;
4778 for (; cls != stop; cls = cls->ncl) {
4779 Sym *fs = cls->next;
4780 Sym *vs = cls->prev_tok;
4782 vpushsym(&fs->type, fs);
4783 vset(&vs->type, vs->r, vs->c);
4784 vtop->sym = vs;
4785 mk_pointer(&vtop->type);
4786 gaddrof();
4787 gfunc_call(1);
4791 static void try_call_cleanup_goto(Sym *cleanupstate)
4793 Sym *oc, *cc;
4794 int ocd, ccd;
4796 if (!current_cleanups)
4797 return;
4799 /* search NCA of both cleanup chains given parents and initial depth */
4800 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4801 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4803 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4805 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4808 try_call_scope_cleanup(cc);
4811 ST_FUNC void unary(void)
4813 int n, t, align, size, r, sizeof_caller;
4814 CType type;
4815 Sym *s;
4816 AttributeDef ad;
4818 sizeof_caller = in_sizeof;
4819 in_sizeof = 0;
4820 type.ref = NULL;
4821 /* XXX: GCC 2.95.3 does not generate a table although it should be
4822 better here */
4823 tok_next:
4824 switch(tok) {
4825 case TOK_EXTENSION:
4826 next();
4827 goto tok_next;
4828 case TOK_LCHAR:
4829 #ifdef TCC_TARGET_PE
4830 t = VT_SHORT|VT_UNSIGNED;
4831 goto push_tokc;
4832 #endif
4833 case TOK_CINT:
4834 case TOK_CCHAR:
4835 t = VT_INT;
4836 push_tokc:
4837 type.t = t;
4838 vsetc(&type, VT_CONST, &tokc);
4839 next();
4840 break;
4841 case TOK_CUINT:
4842 t = VT_INT | VT_UNSIGNED;
4843 goto push_tokc;
4844 case TOK_CLLONG:
4845 t = VT_LLONG;
4846 goto push_tokc;
4847 case TOK_CULLONG:
4848 t = VT_LLONG | VT_UNSIGNED;
4849 goto push_tokc;
4850 case TOK_CFLOAT:
4851 t = VT_FLOAT;
4852 goto push_tokc;
4853 case TOK_CDOUBLE:
4854 t = VT_DOUBLE;
4855 goto push_tokc;
4856 case TOK_CLDOUBLE:
4857 t = VT_LDOUBLE;
4858 goto push_tokc;
4859 case TOK_CLONG:
4860 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4861 goto push_tokc;
4862 case TOK_CULONG:
4863 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4864 goto push_tokc;
4865 case TOK___FUNCTION__:
4866 if (!gnu_ext)
4867 goto tok_identifier;
4868 /* fall thru */
4869 case TOK___FUNC__:
4871 void *ptr;
4872 int len;
4873 /* special function name identifier */
4874 len = strlen(funcname) + 1;
4875 /* generate char[len] type */
4876 type.t = VT_BYTE;
4877 mk_pointer(&type);
4878 type.t |= VT_ARRAY;
4879 type.ref->c = len;
4880 vpush_ref(&type, data_section, data_section->data_offset, len);
4881 if (!NODATA_WANTED) {
4882 ptr = section_ptr_add(data_section, len);
4883 memcpy(ptr, funcname, len);
4885 next();
4887 break;
4888 case TOK_LSTR:
4889 #ifdef TCC_TARGET_PE
4890 t = VT_SHORT | VT_UNSIGNED;
4891 #else
4892 t = VT_INT;
4893 #endif
4894 goto str_init;
4895 case TOK_STR:
4896 /* string parsing */
4897 t = VT_BYTE;
4898 if (tcc_state->char_is_unsigned)
4899 t = VT_BYTE | VT_UNSIGNED;
4900 str_init:
4901 if (tcc_state->warn_write_strings)
4902 t |= VT_CONSTANT;
4903 type.t = t;
4904 mk_pointer(&type);
4905 type.t |= VT_ARRAY;
4906 memset(&ad, 0, sizeof(AttributeDef));
4907 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4908 break;
4909 case '(':
4910 next();
4911 /* cast ? */
4912 if (parse_btype(&type, &ad)) {
4913 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4914 skip(')');
4915 /* check ISOC99 compound literal */
4916 if (tok == '{') {
4917 /* data is allocated locally by default */
4918 if (global_expr)
4919 r = VT_CONST;
4920 else
4921 r = VT_LOCAL;
4922 /* all except arrays are lvalues */
4923 if (!(type.t & VT_ARRAY))
4924 r |= lvalue_type(type.t);
4925 memset(&ad, 0, sizeof(AttributeDef));
4926 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4927 } else {
4928 if (sizeof_caller) {
4929 vpush(&type);
4930 return;
4932 unary();
4933 gen_cast(&type);
4935 } else if (tok == '{') {
4936 int saved_nocode_wanted = nocode_wanted;
4937 if (const_wanted)
4938 tcc_error("expected constant");
4939 /* save all registers */
4940 save_regs(0);
4941 /* statement expression : we do not accept break/continue
4942 inside as GCC does. We do retain the nocode_wanted state,
4943 as statement expressions can't ever be entered from the
4944 outside, so any reactivation of code emission (from labels
4945 or loop heads) can be disabled again after the end of it. */
4946 block(NULL, NULL, NULL, NULL, 1);
4947 nocode_wanted = saved_nocode_wanted;
4948 skip(')');
4949 } else {
4950 gexpr();
4951 skip(')');
4953 break;
4954 case '*':
4955 next();
4956 unary();
4957 indir();
4958 break;
4959 case '&':
4960 next();
4961 unary();
4962 /* functions names must be treated as function pointers,
4963 except for unary '&' and sizeof. Since we consider that
4964 functions are not lvalues, we only have to handle it
4965 there and in function calls. */
4966 /* arrays can also be used although they are not lvalues */
4967 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4968 !(vtop->type.t & VT_ARRAY))
4969 test_lvalue();
4970 mk_pointer(&vtop->type);
4971 gaddrof();
4972 break;
4973 case '!':
4974 next();
4975 unary();
4976 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4977 gen_cast_s(VT_BOOL);
4978 vtop->c.i = !vtop->c.i;
4979 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4980 vtop->c.i ^= 1;
4981 else {
4982 save_regs(1);
4983 vseti(VT_JMP, gvtst(1, 0));
4985 break;
4986 case '~':
4987 next();
4988 unary();
4989 vpushi(-1);
4990 gen_op('^');
4991 break;
4992 case '+':
4993 next();
4994 unary();
4995 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4996 tcc_error("pointer not accepted for unary plus");
4997 /* In order to force cast, we add zero, except for floating point
4998 where we really need an noop (otherwise -0.0 will be transformed
4999 into +0.0). */
5000 if (!is_float(vtop->type.t)) {
5001 vpushi(0);
5002 gen_op('+');
5004 break;
5005 case TOK_SIZEOF:
5006 case TOK_ALIGNOF1:
5007 case TOK_ALIGNOF2:
5008 case TOK_ALIGNOF3:
5009 t = tok;
5010 next();
5011 in_sizeof++;
5012 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5013 s = vtop[1].sym; /* hack: accessing previous vtop */
5014 size = type_size(&type, &align);
5015 if (s && s->a.aligned)
5016 align = 1 << (s->a.aligned - 1);
5017 if (t == TOK_SIZEOF) {
5018 if (!(type.t & VT_VLA)) {
5019 if (size < 0)
5020 tcc_error("sizeof applied to an incomplete type");
5021 vpushs(size);
5022 } else {
5023 vla_runtime_type_size(&type, &align);
5025 } else {
5026 vpushs(align);
5028 vtop->type.t |= VT_UNSIGNED;
5029 break;
5031 case TOK_builtin_expect:
5032 /* __builtin_expect is a no-op for now */
5033 parse_builtin_params(0, "ee");
5034 vpop();
5035 break;
5036 case TOK_builtin_types_compatible_p:
5037 parse_builtin_params(0, "tt");
5038 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5039 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5040 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5041 vtop -= 2;
5042 vpushi(n);
5043 break;
5044 case TOK_builtin_choose_expr:
5046 int64_t c;
5047 next();
5048 skip('(');
5049 c = expr_const64();
5050 skip(',');
5051 if (!c) {
5052 nocode_wanted++;
5054 expr_eq();
5055 if (!c) {
5056 vpop();
5057 nocode_wanted--;
5059 skip(',');
5060 if (c) {
5061 nocode_wanted++;
5063 expr_eq();
5064 if (c) {
5065 vpop();
5066 nocode_wanted--;
5068 skip(')');
5070 break;
5071 case TOK_builtin_constant_p:
5072 parse_builtin_params(1, "e");
5073 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5074 vtop--;
5075 vpushi(n);
5076 break;
5077 case TOK_builtin_frame_address:
5078 case TOK_builtin_return_address:
5080 int tok1 = tok;
5081 int level;
5082 next();
5083 skip('(');
5084 if (tok != TOK_CINT) {
5085 tcc_error("%s only takes positive integers",
5086 tok1 == TOK_builtin_return_address ?
5087 "__builtin_return_address" :
5088 "__builtin_frame_address");
5090 level = (uint32_t)tokc.i;
5091 next();
5092 skip(')');
5093 type.t = VT_VOID;
5094 mk_pointer(&type);
5095 vset(&type, VT_LOCAL, 0); /* local frame */
5096 while (level--) {
5097 mk_pointer(&vtop->type);
5098 indir(); /* -> parent frame */
5100 if (tok1 == TOK_builtin_return_address) {
5101 // assume return address is just above frame pointer on stack
5102 vpushi(PTR_SIZE);
5103 gen_op('+');
5104 mk_pointer(&vtop->type);
5105 indir();
5108 break;
5109 #ifdef TCC_TARGET_X86_64
5110 #ifdef TCC_TARGET_PE
5111 case TOK_builtin_va_start:
5112 parse_builtin_params(0, "ee");
5113 r = vtop->r & VT_VALMASK;
5114 if (r == VT_LLOCAL)
5115 r = VT_LOCAL;
5116 if (r != VT_LOCAL)
5117 tcc_error("__builtin_va_start expects a local variable");
5118 vtop->r = r;
5119 vtop->type = char_pointer_type;
5120 vtop->c.i += 8;
5121 vstore();
5122 break;
5123 #else
5124 case TOK_builtin_va_arg_types:
5125 parse_builtin_params(0, "t");
5126 vpushi(classify_x86_64_va_arg(&vtop->type));
5127 vswap();
5128 vpop();
5129 break;
5130 #endif
5131 #endif
5133 #ifdef TCC_TARGET_ARM64
5134 case TOK___va_start: {
5135 parse_builtin_params(0, "ee");
5136 //xx check types
5137 gen_va_start();
5138 vpushi(0);
5139 vtop->type.t = VT_VOID;
5140 break;
5142 case TOK___va_arg: {
5143 parse_builtin_params(0, "et");
5144 type = vtop->type;
5145 vpop();
5146 //xx check types
5147 gen_va_arg(&type);
5148 vtop->type = type;
5149 break;
5151 case TOK___arm64_clear_cache: {
5152 parse_builtin_params(0, "ee");
5153 gen_clear_cache();
5154 vpushi(0);
5155 vtop->type.t = VT_VOID;
5156 break;
5158 #endif
5159 /* pre operations */
5160 case TOK_INC:
5161 case TOK_DEC:
5162 t = tok;
5163 next();
5164 unary();
5165 inc(0, t);
5166 break;
5167 case '-':
5168 next();
5169 unary();
5170 t = vtop->type.t & VT_BTYPE;
5171 if (is_float(t)) {
5172 /* In IEEE negate(x) isn't subtract(0,x), but rather
5173 subtract(-0, x). */
5174 vpush(&vtop->type);
5175 if (t == VT_FLOAT)
5176 vtop->c.f = -1.0 * 0.0;
5177 else if (t == VT_DOUBLE)
5178 vtop->c.d = -1.0 * 0.0;
5179 else
5180 vtop->c.ld = -1.0 * 0.0;
5181 } else
5182 vpushi(0);
5183 vswap();
5184 gen_op('-');
5185 break;
5186 case TOK_LAND:
5187 if (!gnu_ext)
5188 goto tok_identifier;
5189 next();
5190 /* allow to take the address of a label */
5191 if (tok < TOK_UIDENT)
5192 expect("label identifier");
5193 s = label_find(tok);
5194 if (!s) {
5195 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5196 } else {
5197 if (s->r == LABEL_DECLARED)
5198 s->r = LABEL_FORWARD;
5200 if (!s->type.t) {
5201 s->type.t = VT_VOID;
5202 mk_pointer(&s->type);
5203 s->type.t |= VT_STATIC;
5205 vpushsym(&s->type, s);
5206 next();
5207 break;
5209 case TOK_GENERIC:
5211 CType controlling_type;
5212 int has_default = 0;
5213 int has_match = 0;
5214 int learn = 0;
5215 TokenString *str = NULL;
5216 int saved_const_wanted = const_wanted;
5218 next();
5219 skip('(');
5220 const_wanted = 0;
5221 expr_type(&controlling_type, expr_eq);
5222 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5223 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5224 mk_pointer(&controlling_type);
5225 const_wanted = saved_const_wanted;
5226 for (;;) {
5227 learn = 0;
5228 skip(',');
5229 if (tok == TOK_DEFAULT) {
5230 if (has_default)
5231 tcc_error("too many 'default'");
5232 has_default = 1;
5233 if (!has_match)
5234 learn = 1;
5235 next();
5236 } else {
5237 AttributeDef ad_tmp;
5238 int itmp;
5239 CType cur_type;
5240 parse_btype(&cur_type, &ad_tmp);
5241 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5242 if (compare_types(&controlling_type, &cur_type, 0)) {
5243 if (has_match) {
5244 tcc_error("type match twice");
5246 has_match = 1;
5247 learn = 1;
5250 skip(':');
5251 if (learn) {
5252 if (str)
5253 tok_str_free(str);
5254 skip_or_save_block(&str);
5255 } else {
5256 skip_or_save_block(NULL);
5258 if (tok == ')')
5259 break;
5261 if (!str) {
5262 char buf[60];
5263 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5264 tcc_error("type '%s' does not match any association", buf);
5266 begin_macro(str, 1);
5267 next();
5268 expr_eq();
5269 if (tok != TOK_EOF)
5270 expect(",");
5271 end_macro();
5272 next();
5273 break;
5275 // special qnan , snan and infinity values
5276 case TOK___NAN__:
5277 n = 0x7fc00000;
5278 special_math_val:
5279 vpushi(n);
5280 vtop->type.t = VT_FLOAT;
5281 next();
5282 break;
5283 case TOK___SNAN__:
5284 n = 0x7f800001;
5285 goto special_math_val;
5286 case TOK___INF__:
5287 n = 0x7f800000;
5288 goto special_math_val;
5290 default:
5291 tok_identifier:
5292 t = tok;
5293 next();
5294 if (t < TOK_UIDENT)
5295 expect("identifier");
5296 s = sym_find(t);
5297 if (!s || IS_ASM_SYM(s)) {
5298 const char *name = get_tok_str(t, NULL);
5299 if (tok != '(')
5300 tcc_error("'%s' undeclared", name);
5301 /* for simple function calls, we tolerate undeclared
5302 external reference to int() function */
5303 if (tcc_state->warn_implicit_function_declaration
5304 #ifdef TCC_TARGET_PE
5305 /* people must be warned about using undeclared WINAPI functions
5306 (which usually start with uppercase letter) */
5307 || (name[0] >= 'A' && name[0] <= 'Z')
5308 #endif
5310 tcc_warning("implicit declaration of function '%s'", name);
5311 s = external_global_sym(t, &func_old_type);
5314 r = s->r;
5315 /* A symbol that has a register is a local register variable,
5316 which starts out as VT_LOCAL value. */
5317 if ((r & VT_VALMASK) < VT_CONST)
5318 r = (r & ~VT_VALMASK) | VT_LOCAL;
5320 vset(&s->type, r, s->c);
5321 /* Point to s as backpointer (even without r&VT_SYM).
5322 Will be used by at least the x86 inline asm parser for
5323 regvars. */
5324 vtop->sym = s;
5326 if (r & VT_SYM) {
5327 vtop->c.i = 0;
5328 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5329 vtop->c.i = s->enum_val;
5331 break;
5334 /* post operations */
5335 while (1) {
5336 if (tok == TOK_INC || tok == TOK_DEC) {
5337 inc(1, tok);
5338 next();
5339 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5340 int qualifiers, cumofs = 0;
5341 /* field */
5342 if (tok == TOK_ARROW)
5343 indir();
5344 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5345 test_lvalue();
5346 gaddrof();
5347 /* expect pointer on structure */
5348 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5349 expect("struct or union");
5350 if (tok == TOK_CDOUBLE)
5351 expect("field name");
5352 next();
5353 if (tok == TOK_CINT || tok == TOK_CUINT)
5354 expect("field name");
5355 s = find_field(&vtop->type, tok, &cumofs);
5356 if (!s)
5357 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5358 /* add field offset to pointer */
5359 vtop->type = char_pointer_type; /* change type to 'char *' */
5360 vpushi(cumofs + s->c);
5361 gen_op('+');
5362 /* change type to field type, and set to lvalue */
5363 vtop->type = s->type;
5364 vtop->type.t |= qualifiers;
5365 /* an array is never an lvalue */
5366 if (!(vtop->type.t & VT_ARRAY)) {
5367 vtop->r |= lvalue_type(vtop->type.t);
5368 #ifdef CONFIG_TCC_BCHECK
5369 /* if bound checking, the referenced pointer must be checked */
5370 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5371 vtop->r |= VT_MUSTBOUND;
5372 #endif
5374 next();
5375 } else if (tok == '[') {
5376 next();
5377 gexpr();
5378 gen_op('+');
5379 indir();
5380 skip(']');
5381 } else if (tok == '(') {
5382 SValue ret;
5383 Sym *sa;
5384 int nb_args, ret_nregs, ret_align, regsize, variadic;
5386 /* function call */
5387 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5388 /* pointer test (no array accepted) */
5389 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5390 vtop->type = *pointed_type(&vtop->type);
5391 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5392 goto error_func;
5393 } else {
5394 error_func:
5395 expect("function pointer");
5397 } else {
5398 vtop->r &= ~VT_LVAL; /* no lvalue */
5400 /* get return type */
5401 s = vtop->type.ref;
5402 next();
5403 sa = s->next; /* first parameter */
5404 nb_args = regsize = 0;
5405 ret.r2 = VT_CONST;
5406 /* compute first implicit argument if a structure is returned */
5407 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5408 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5409 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5410 &ret_align, &regsize);
5411 if (!ret_nregs) {
5412 /* get some space for the returned structure */
5413 size = type_size(&s->type, &align);
5414 #ifdef TCC_TARGET_ARM64
5415 /* On arm64, a small struct is return in registers.
5416 It is much easier to write it to memory if we know
5417 that we are allowed to write some extra bytes, so
5418 round the allocated space up to a power of 2: */
5419 if (size < 16)
5420 while (size & (size - 1))
5421 size = (size | (size - 1)) + 1;
5422 #endif
5423 loc = (loc - size) & -align;
5424 ret.type = s->type;
5425 ret.r = VT_LOCAL | VT_LVAL;
5426 /* pass it as 'int' to avoid structure arg passing
5427 problems */
5428 vseti(VT_LOCAL, loc);
5429 ret.c = vtop->c;
5430 nb_args++;
5432 } else {
5433 ret_nregs = 1;
5434 ret.type = s->type;
5437 if (ret_nregs) {
5438 /* return in register */
5439 if (is_float(ret.type.t)) {
5440 ret.r = reg_fret(ret.type.t);
5441 #ifdef TCC_TARGET_X86_64
5442 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5443 ret.r2 = REG_QRET;
5444 #endif
5445 } else {
5446 #ifndef TCC_TARGET_ARM64
5447 #ifdef TCC_TARGET_X86_64
5448 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5449 #else
5450 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5451 #endif
5452 ret.r2 = REG_LRET;
5453 #endif
5454 ret.r = REG_IRET;
5456 ret.c.i = 0;
5458 if (tok != ')') {
5459 for(;;) {
5460 expr_eq();
5461 gfunc_param_typed(s, sa);
5462 nb_args++;
5463 if (sa)
5464 sa = sa->next;
5465 if (tok == ')')
5466 break;
5467 skip(',');
5470 if (sa)
5471 tcc_error("too few arguments to function");
5472 skip(')');
5473 gfunc_call(nb_args);
5475 /* return value */
5476 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5477 vsetc(&ret.type, r, &ret.c);
5478 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5481 /* handle packed struct return */
5482 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5483 int addr, offset;
5485 size = type_size(&s->type, &align);
5486 /* We're writing whole regs often, make sure there's enough
5487 space. Assume register size is power of 2. */
5488 if (regsize > align)
5489 align = regsize;
5490 loc = (loc - size) & -align;
5491 addr = loc;
5492 offset = 0;
5493 for (;;) {
5494 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5495 vswap();
5496 vstore();
5497 vtop--;
5498 if (--ret_nregs == 0)
5499 break;
5500 offset += regsize;
5502 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5504 } else {
5505 break;
5510 ST_FUNC void expr_prod(void)
5512 int t;
5514 unary();
5515 while (tok == '*' || tok == '/' || tok == '%') {
5516 t = tok;
5517 next();
5518 unary();
5519 gen_op(t);
5523 ST_FUNC void expr_sum(void)
5525 int t;
5527 expr_prod();
5528 while (tok == '+' || tok == '-') {
5529 t = tok;
5530 next();
5531 expr_prod();
5532 gen_op(t);
5536 static void expr_shift(void)
5538 int t;
5540 expr_sum();
5541 while (tok == TOK_SHL || tok == TOK_SAR) {
5542 t = tok;
5543 next();
5544 expr_sum();
5545 gen_op(t);
5549 static void expr_cmp(void)
5551 int t;
5553 expr_shift();
5554 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5555 tok == TOK_ULT || tok == TOK_UGE) {
5556 t = tok;
5557 next();
5558 expr_shift();
5559 gen_op(t);
5563 static void expr_cmpeq(void)
5565 int t;
5567 expr_cmp();
5568 while (tok == TOK_EQ || tok == TOK_NE) {
5569 t = tok;
5570 next();
5571 expr_cmp();
5572 gen_op(t);
5576 static void expr_and(void)
5578 expr_cmpeq();
5579 while (tok == '&') {
5580 next();
5581 expr_cmpeq();
5582 gen_op('&');
5586 static void expr_xor(void)
5588 expr_and();
5589 while (tok == '^') {
5590 next();
5591 expr_and();
5592 gen_op('^');
5596 static void expr_or(void)
5598 expr_xor();
5599 while (tok == '|') {
5600 next();
5601 expr_xor();
5602 gen_op('|');
5606 static void expr_land(void)
5608 expr_or();
5609 if (tok == TOK_LAND) {
5610 int t = 0;
5611 for(;;) {
5612 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5613 gen_cast_s(VT_BOOL);
5614 if (vtop->c.i) {
5615 vpop();
5616 } else {
5617 nocode_wanted++;
5618 while (tok == TOK_LAND) {
5619 next();
5620 expr_or();
5621 vpop();
5623 nocode_wanted--;
5624 if (t)
5625 gsym(t);
5626 gen_cast_s(VT_INT);
5627 break;
5629 } else {
5630 if (!t)
5631 save_regs(1);
5632 t = gvtst(1, t);
5634 if (tok != TOK_LAND) {
5635 if (t)
5636 vseti(VT_JMPI, t);
5637 else
5638 vpushi(1);
5639 break;
5641 next();
5642 expr_or();
5647 static void expr_lor(void)
5649 expr_land();
5650 if (tok == TOK_LOR) {
5651 int t = 0;
5652 for(;;) {
5653 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5654 gen_cast_s(VT_BOOL);
5655 if (!vtop->c.i) {
5656 vpop();
5657 } else {
5658 nocode_wanted++;
5659 while (tok == TOK_LOR) {
5660 next();
5661 expr_land();
5662 vpop();
5664 nocode_wanted--;
5665 if (t)
5666 gsym(t);
5667 gen_cast_s(VT_INT);
5668 break;
5670 } else {
5671 if (!t)
5672 save_regs(1);
5673 t = gvtst(0, t);
5675 if (tok != TOK_LOR) {
5676 if (t)
5677 vseti(VT_JMP, t);
5678 else
5679 vpushi(0);
5680 break;
5682 next();
5683 expr_land();
5688 /* Assuming vtop is a value used in a conditional context
5689 (i.e. compared with zero) return 0 if it's false, 1 if
5690 true and -1 if it can't be statically determined. */
5691 static int condition_3way(void)
5693 int c = -1;
5694 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5695 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5696 vdup();
5697 gen_cast_s(VT_BOOL);
5698 c = vtop->c.i;
5699 vpop();
5701 return c;
5704 static void expr_cond(void)
5706 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5707 SValue sv;
5708 CType type, type1, type2;
5710 expr_lor();
5711 if (tok == '?') {
5712 next();
5713 c = condition_3way();
5714 g = (tok == ':' && gnu_ext);
5715 if (c < 0) {
5716 /* needed to avoid having different registers saved in
5717 each branch */
5718 if (is_float(vtop->type.t)) {
5719 rc = RC_FLOAT;
5720 #ifdef TCC_TARGET_X86_64
5721 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5722 rc = RC_ST0;
5724 #endif
5725 } else
5726 rc = RC_INT;
5727 gv(rc);
5728 save_regs(1);
5729 if (g)
5730 gv_dup();
5731 tt = gvtst(1, 0);
5733 } else {
5734 if (!g)
5735 vpop();
5736 tt = 0;
5739 if (1) {
5740 if (c == 0)
5741 nocode_wanted++;
5742 if (!g)
5743 gexpr();
5745 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5746 mk_pointer(&vtop->type);
5747 type1 = vtop->type;
5748 sv = *vtop; /* save value to handle it later */
5749 vtop--; /* no vpop so that FP stack is not flushed */
5750 skip(':');
5752 u = 0;
5753 if (c < 0)
5754 u = gjmp(0);
5755 gsym(tt);
5757 if (c == 0)
5758 nocode_wanted--;
5759 if (c == 1)
5760 nocode_wanted++;
5761 expr_cond();
5762 if (c == 1)
5763 nocode_wanted--;
5765 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5766 mk_pointer(&vtop->type);
5767 type2=vtop->type;
5768 t1 = type1.t;
5769 bt1 = t1 & VT_BTYPE;
5770 t2 = type2.t;
5771 bt2 = t2 & VT_BTYPE;
5772 type.ref = NULL;
5775 /* cast operands to correct type according to ISOC rules */
5776 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5777 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5778 } else if (is_float(bt1) || is_float(bt2)) {
5779 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5780 type.t = VT_LDOUBLE;
5782 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5783 type.t = VT_DOUBLE;
5784 } else {
5785 type.t = VT_FLOAT;
5787 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5788 /* cast to biggest op */
5789 type.t = VT_LLONG | VT_LONG;
5790 if (bt1 == VT_LLONG)
5791 type.t &= t1;
5792 if (bt2 == VT_LLONG)
5793 type.t &= t2;
5794 /* convert to unsigned if it does not fit in a long long */
5795 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5796 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5797 type.t |= VT_UNSIGNED;
5798 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5799 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5800 /* If one is a null ptr constant the result type
5801 is the other. */
5802 if (is_null_pointer (vtop)) type = type1;
5803 else if (is_null_pointer (&sv)) type = type2;
5804 else if (bt1 != bt2)
5805 tcc_error("incompatible types in conditional expressions");
5806 else {
5807 CType *pt1 = pointed_type(&type1);
5808 CType *pt2 = pointed_type(&type2);
5809 int pbt1 = pt1->t & VT_BTYPE;
5810 int pbt2 = pt2->t & VT_BTYPE;
5811 int newquals, copied = 0;
5812 /* pointers to void get preferred, otherwise the
5813 pointed to types minus qualifs should be compatible */
5814 type = (pbt1 == VT_VOID) ? type1 : type2;
5815 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5816 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5817 tcc_warning("pointer type mismatch in conditional expression\n");
5819 /* combine qualifs */
5820 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5821 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5822 & newquals)
5824 /* copy the pointer target symbol */
5825 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5826 0, type.ref->c);
5827 copied = 1;
5828 pointed_type(&type)->t |= newquals;
5830 /* pointers to incomplete arrays get converted to
5831 pointers to completed ones if possible */
5832 if (pt1->t & VT_ARRAY
5833 && pt2->t & VT_ARRAY
5834 && pointed_type(&type)->ref->c < 0
5835 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5837 if (!copied)
5838 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5839 0, type.ref->c);
5840 pointed_type(&type)->ref =
5841 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5842 0, pointed_type(&type)->ref->c);
5843 pointed_type(&type)->ref->c =
5844 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5847 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5848 /* XXX: test structure compatibility */
5849 type = bt1 == VT_STRUCT ? type1 : type2;
5850 } else {
5851 /* integer operations */
5852 type.t = VT_INT | (VT_LONG & (t1 | t2));
5853 /* convert to unsigned if it does not fit in an integer */
5854 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5855 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5856 type.t |= VT_UNSIGNED;
5858 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5859 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5860 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5862 /* now we convert second operand */
5863 if (c != 1) {
5864 gen_cast(&type);
5865 if (islv) {
5866 mk_pointer(&vtop->type);
5867 gaddrof();
5868 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5869 gaddrof();
5872 rc = RC_INT;
5873 if (is_float(type.t)) {
5874 rc = RC_FLOAT;
5875 #ifdef TCC_TARGET_X86_64
5876 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5877 rc = RC_ST0;
5879 #endif
5880 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5881 /* for long longs, we use fixed registers to avoid having
5882 to handle a complicated move */
5883 rc = RC_IRET;
5886 tt = r2 = 0;
5887 if (c < 0) {
5888 r2 = gv(rc);
5889 tt = gjmp(0);
5891 gsym(u);
5893 /* this is horrible, but we must also convert first
5894 operand */
5895 if (c != 0) {
5896 *vtop = sv;
5897 gen_cast(&type);
5898 if (islv) {
5899 mk_pointer(&vtop->type);
5900 gaddrof();
5901 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5902 gaddrof();
5905 if (c < 0 || islv) {
5906 r1 = gv(rc);
5907 move_reg(r2, r1, type.t);
5908 vtop->r = r2;
5909 gsym(tt);
5910 if (islv)
5911 indir();
5917 static void expr_eq(void)
5919 int t;
5921 expr_cond();
5922 if (tok == '=' ||
5923 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5924 tok == TOK_A_XOR || tok == TOK_A_OR ||
5925 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5926 test_lvalue();
5927 t = tok;
5928 next();
5929 if (t == '=') {
5930 expr_eq();
5931 } else {
5932 vdup();
5933 expr_eq();
5934 gen_op(t & 0x7f);
5936 vstore();
5940 ST_FUNC void gexpr(void)
5942 while (1) {
5943 expr_eq();
5944 if (tok != ',')
5945 break;
5946 vpop();
5947 next();
5951 /* parse a constant expression and return value in vtop. */
5952 static void expr_const1(void)
5954 const_wanted++;
5955 nocode_wanted++;
5956 expr_cond();
5957 nocode_wanted--;
5958 const_wanted--;
5961 /* parse an integer constant and return its value. */
5962 static inline int64_t expr_const64(void)
5964 int64_t c;
5965 expr_const1();
5966 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5967 expect("constant expression");
5968 c = vtop->c.i;
5969 vpop();
5970 return c;
5973 /* parse an integer constant and return its value.
5974 Complain if it doesn't fit 32bit (signed or unsigned). */
5975 ST_FUNC int expr_const(void)
5977 int c;
5978 int64_t wc = expr_const64();
5979 c = wc;
5980 if (c != wc && (unsigned)c != wc)
5981 tcc_error("constant exceeds 32 bit");
5982 return c;
5985 /* return the label token if current token is a label, otherwise
5986 return zero */
5987 static int is_label(void)
5989 int last_tok;
5991 /* fast test first */
5992 if (tok < TOK_UIDENT)
5993 return 0;
5994 /* no need to save tokc because tok is an identifier */
5995 last_tok = tok;
5996 next();
5997 if (tok == ':') {
5998 return last_tok;
5999 } else {
6000 unget_tok(last_tok);
6001 return 0;
6005 #ifndef TCC_TARGET_ARM64
6006 static void gfunc_return(CType *func_type)
6008 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6009 CType type, ret_type;
6010 int ret_align, ret_nregs, regsize;
6011 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6012 &ret_align, &regsize);
6013 if (0 == ret_nregs) {
6014 /* if returning structure, must copy it to implicit
6015 first pointer arg location */
6016 type = *func_type;
6017 mk_pointer(&type);
6018 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6019 indir();
6020 vswap();
6021 /* copy structure value to pointer */
6022 vstore();
6023 } else {
6024 /* returning structure packed into registers */
6025 int r, size, addr, align;
6026 size = type_size(func_type,&align);
6027 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6028 (vtop->c.i & (ret_align-1)))
6029 && (align & (ret_align-1))) {
6030 loc = (loc - size) & -ret_align;
6031 addr = loc;
6032 type = *func_type;
6033 vset(&type, VT_LOCAL | VT_LVAL, addr);
6034 vswap();
6035 vstore();
6036 vpop();
6037 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6039 vtop->type = ret_type;
6040 if (is_float(ret_type.t))
6041 r = rc_fret(ret_type.t);
6042 else
6043 r = RC_IRET;
6045 if (ret_nregs == 1)
6046 gv(r);
6047 else {
6048 for (;;) {
6049 vdup();
6050 gv(r);
6051 vpop();
6052 if (--ret_nregs == 0)
6053 break;
6054 /* We assume that when a structure is returned in multiple
6055 registers, their classes are consecutive values of the
6056 suite s(n) = 2^n */
6057 r <<= 1;
6058 vtop->c.i += regsize;
6062 } else if (is_float(func_type->t)) {
6063 gv(rc_fret(func_type->t));
6064 } else {
6065 gv(RC_IRET);
6067 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6069 #endif
6071 static int case_cmp(const void *pa, const void *pb)
6073 int64_t a = (*(struct case_t**) pa)->v1;
6074 int64_t b = (*(struct case_t**) pb)->v1;
6075 return a < b ? -1 : a > b;
6078 static void gcase(struct case_t **base, int len, int *bsym)
6080 struct case_t *p;
6081 int e;
6082 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6083 gv(RC_INT);
6084 while (len > 4) {
6085 /* binary search */
6086 p = base[len/2];
6087 vdup();
6088 if (ll)
6089 vpushll(p->v2);
6090 else
6091 vpushi(p->v2);
6092 gen_op(TOK_LE);
6093 e = gtst(1, 0);
6094 vdup();
6095 if (ll)
6096 vpushll(p->v1);
6097 else
6098 vpushi(p->v1);
6099 gen_op(TOK_GE);
6100 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6101 /* x < v1 */
6102 gcase(base, len/2, bsym);
6103 if (cur_switch->def_sym)
6104 gjmp_addr(cur_switch->def_sym);
6105 else
6106 *bsym = gjmp(*bsym);
6107 /* x > v2 */
6108 gsym(e);
6109 e = len/2 + 1;
6110 base += e; len -= e;
6112 /* linear scan */
6113 while (len--) {
6114 p = *base++;
6115 vdup();
6116 if (ll)
6117 vpushll(p->v2);
6118 else
6119 vpushi(p->v2);
6120 if (p->v1 == p->v2) {
6121 gen_op(TOK_EQ);
6122 gtst_addr(0, p->sym);
6123 } else {
6124 gen_op(TOK_LE);
6125 e = gtst(1, 0);
6126 vdup();
6127 if (ll)
6128 vpushll(p->v1);
6129 else
6130 vpushi(p->v1);
6131 gen_op(TOK_GE);
6132 gtst_addr(0, p->sym);
6133 gsym(e);
6138 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr)
6140 int a, b, c, d, cond;
6141 Sym *s;
6143 /* generate line number info */
6144 if (tcc_state->do_debug)
6145 tcc_debug_line(tcc_state);
6147 if (is_expr) {
6148 /* default return value is (void) */
6149 vpushi(0);
6150 vtop->type.t = VT_VOID;
6153 if (tok == TOK_IF) {
6154 /* if test */
6155 int saved_nocode_wanted = nocode_wanted;
6156 next();
6157 skip('(');
6158 gexpr();
6159 skip(')');
6160 cond = condition_3way();
6161 if (cond == 1)
6162 a = 0, vpop();
6163 else
6164 a = gvtst(1, 0);
6165 if (cond == 0)
6166 nocode_wanted |= 0x20000000;
6167 block(bsym, bcl, csym, ccl, 0);
6168 if (cond != 1)
6169 nocode_wanted = saved_nocode_wanted;
6170 if (tok == TOK_ELSE) {
6171 next();
6172 d = gjmp(0);
6173 gsym(a);
6174 if (cond == 1)
6175 nocode_wanted |= 0x20000000;
6176 block(bsym, bcl, csym, ccl, 0);
6177 gsym(d); /* patch else jmp */
6178 if (cond != 0)
6179 nocode_wanted = saved_nocode_wanted;
6180 } else
6181 gsym(a);
6182 } else if (tok == TOK_WHILE) {
6183 int saved_nocode_wanted;
6184 nocode_wanted &= ~0x20000000;
6185 next();
6186 d = ind;
6187 vla_sp_restore();
6188 skip('(');
6189 gexpr();
6190 skip(')');
6191 a = gvtst(1, 0);
6192 b = 0;
6193 ++local_scope;
6194 saved_nocode_wanted = nocode_wanted;
6195 block(&a, current_cleanups, &b, current_cleanups, 0);
6196 nocode_wanted = saved_nocode_wanted;
6197 --local_scope;
6198 gjmp_addr(d);
6199 gsym(a);
6200 gsym_addr(b, d);
6201 } else if (tok == '{') {
6202 Sym *llabel, *lcleanup;
6203 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6204 int lncleanups = ncleanups;
6206 next();
6207 /* record local declaration stack position */
6208 s = local_stack;
6209 llabel = local_label_stack;
6210 lcleanup = current_cleanups;
6211 ++local_scope;
6213 /* handle local labels declarations */
6214 while (tok == TOK_LABEL) {
6215 next();
6216 for(;;) {
6217 if (tok < TOK_UIDENT)
6218 expect("label identifier");
6219 label_push(&local_label_stack, tok, LABEL_DECLARED);
6220 next();
6221 if (tok == ',') {
6222 next();
6223 } else {
6224 skip(';');
6225 break;
6229 while (tok != '}') {
6230 if ((a = is_label()))
6231 unget_tok(a);
6232 else
6233 decl(VT_LOCAL);
6234 if (tok != '}') {
6235 if (is_expr)
6236 vpop();
6237 block(bsym, bcl, csym, ccl, is_expr);
6241 if (current_cleanups != lcleanup) {
6242 int jmp = 0;
6243 Sym *g, **pg;
6245 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6246 if (g->prev_tok->r & LABEL_FORWARD) {
6247 Sym *pcl = g->next;
6248 if (!jmp)
6249 jmp = gjmp(0);
6250 gsym(pcl->jnext);
6251 try_call_scope_cleanup(lcleanup);
6252 pcl->jnext = gjmp(0);
6253 if (!lncleanups)
6254 goto remove_pending;
6255 g->c = lncleanups;
6256 pg = &g->prev;
6257 } else {
6258 remove_pending:
6259 *pg = g->prev;
6260 sym_free(g);
6262 gsym(jmp);
6263 if (!nocode_wanted) {
6264 try_call_scope_cleanup(lcleanup);
6268 current_cleanups = lcleanup;
6269 ncleanups = lncleanups;
6270 /* pop locally defined labels */
6271 label_pop(&local_label_stack, llabel, is_expr);
6272 /* pop locally defined symbols */
6273 --local_scope;
6274 /* In the is_expr case (a statement expression is finished here),
6275 vtop might refer to symbols on the local_stack. Either via the
6276 type or via vtop->sym. We can't pop those nor any that in turn
6277 might be referred to. To make it easier we don't roll back
6278 any symbols in that case; some upper level call to block() will
6279 do that. We do have to remove such symbols from the lookup
6280 tables, though. sym_pop will do that. */
6281 sym_pop(&local_stack, s, is_expr);
6283 /* Pop VLA frames and restore stack pointer if required */
6284 if (vlas_in_scope > saved_vlas_in_scope) {
6285 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6286 vla_sp_restore();
6288 vlas_in_scope = saved_vlas_in_scope;
6290 next();
6291 } else if (tok == TOK_RETURN) {
6292 next();
6293 if (tok != ';') {
6294 gexpr();
6295 gen_assign_cast(&func_vt);
6296 try_call_scope_cleanup(NULL);
6297 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6298 vtop--;
6299 else
6300 gfunc_return(&func_vt);
6301 } else {
6302 try_call_scope_cleanup(NULL);
6304 skip(';');
6305 /* jump unless last stmt in top-level block */
6306 if (tok != '}' || local_scope != 1)
6307 rsym = gjmp(rsym);
6308 nocode_wanted |= 0x20000000;
6309 } else if (tok == TOK_BREAK) {
6310 /* compute jump */
6311 if (!bsym)
6312 tcc_error("cannot break");
6313 try_call_scope_cleanup(bcl);
6314 *bsym = gjmp(*bsym);
6315 next();
6316 skip(';');
6317 nocode_wanted |= 0x20000000;
6318 } else if (tok == TOK_CONTINUE) {
6319 /* compute jump */
6320 if (!csym)
6321 tcc_error("cannot continue");
6322 try_call_scope_cleanup(ccl);
6323 vla_sp_restore_root();
6324 *csym = gjmp(*csym);
6325 next();
6326 skip(';');
6327 nocode_wanted |= 0x20000000;
6328 } else if (tok == TOK_FOR) {
6329 int e;
6330 int saved_nocode_wanted;
6331 Sym *lcleanup = current_cleanups;
6332 int lncleanups = ncleanups;
6334 nocode_wanted &= ~0x20000000;
6335 next();
6336 skip('(');
6337 s = local_stack;
6338 ++local_scope;
6339 if (tok != ';') {
6340 /* c99 for-loop init decl? */
6341 if (!decl0(VT_LOCAL, 1, NULL)) {
6342 /* no, regular for-loop init expr */
6343 gexpr();
6344 vpop();
6347 skip(';');
6348 d = ind;
6349 c = ind;
6350 vla_sp_restore();
6351 a = 0;
6352 b = 0;
6353 if (tok != ';') {
6354 gexpr();
6355 a = gvtst(1, 0);
6357 skip(';');
6358 if (tok != ')') {
6359 e = gjmp(0);
6360 c = ind;
6361 vla_sp_restore();
6362 gexpr();
6363 vpop();
6364 gjmp_addr(d);
6365 gsym(e);
6367 skip(')');
6368 saved_nocode_wanted = nocode_wanted;
6369 block(&a, current_cleanups, &b, current_cleanups, 0);
6370 nocode_wanted = saved_nocode_wanted;
6371 gjmp_addr(c);
6372 gsym(a);
6373 gsym_addr(b, c);
6374 --local_scope;
6375 try_call_scope_cleanup(lcleanup);
6376 ncleanups = lncleanups;
6377 current_cleanups = lcleanup;
6378 sym_pop(&local_stack, s, 0);
6380 } else
6381 if (tok == TOK_DO) {
6382 int saved_nocode_wanted;
6383 nocode_wanted &= ~0x20000000;
6384 next();
6385 a = 0;
6386 b = 0;
6387 d = ind;
6388 vla_sp_restore();
6389 saved_nocode_wanted = nocode_wanted;
6390 block(&a, current_cleanups, &b, current_cleanups, 0);
6391 skip(TOK_WHILE);
6392 skip('(');
6393 gsym(b);
6394 if (b)
6395 nocode_wanted = saved_nocode_wanted;
6396 gexpr();
6397 c = gvtst(0, 0);
6398 gsym_addr(c, d);
6399 nocode_wanted = saved_nocode_wanted;
6400 skip(')');
6401 gsym(a);
6402 skip(';');
6403 } else
6404 if (tok == TOK_SWITCH) {
6405 struct switch_t *saved, sw;
6406 int saved_nocode_wanted = nocode_wanted;
6407 SValue switchval;
6408 next();
6409 skip('(');
6410 gexpr();
6411 skip(')');
6412 switchval = *vtop--;
6413 a = 0;
6414 b = gjmp(0); /* jump to first case */
6415 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6416 saved = cur_switch;
6417 cur_switch = &sw;
6418 block(&a, current_cleanups, csym, ccl, 0);
6419 nocode_wanted = saved_nocode_wanted;
6420 a = gjmp(a); /* add implicit break */
6421 /* case lookup */
6422 gsym(b);
6423 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6424 for (b = 1; b < sw.n; b++)
6425 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6426 tcc_error("duplicate case value");
6427 /* Our switch table sorting is signed, so the compared
6428 value needs to be as well when it's 64bit. */
6429 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6430 switchval.type.t &= ~VT_UNSIGNED;
6431 vpushv(&switchval);
6432 gcase(sw.p, sw.n, &a);
6433 vpop();
6434 if (sw.def_sym)
6435 gjmp_addr(sw.def_sym);
6436 dynarray_reset(&sw.p, &sw.n);
6437 cur_switch = saved;
6438 /* break label */
6439 gsym(a);
6440 } else
6441 if (tok == TOK_CASE) {
6442 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6443 if (!cur_switch)
6444 expect("switch");
6445 nocode_wanted &= ~0x20000000;
6446 next();
6447 cr->v1 = cr->v2 = expr_const64();
6448 if (gnu_ext && tok == TOK_DOTS) {
6449 next();
6450 cr->v2 = expr_const64();
6451 if (cr->v2 < cr->v1)
6452 tcc_warning("empty case range");
6454 cr->sym = ind;
6455 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6456 skip(':');
6457 is_expr = 0;
6458 goto block_after_label;
6459 } else
6460 if (tok == TOK_DEFAULT) {
6461 next();
6462 skip(':');
6463 if (!cur_switch)
6464 expect("switch");
6465 if (cur_switch->def_sym)
6466 tcc_error("too many 'default'");
6467 cur_switch->def_sym = ind;
6468 is_expr = 0;
6469 goto block_after_label;
6470 } else
6471 if (tok == TOK_GOTO) {
6472 next();
6473 if (tok == '*' && gnu_ext) {
6474 /* computed goto */
6475 next();
6476 gexpr();
6477 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6478 expect("pointer");
6479 ggoto();
6480 } else if (tok >= TOK_UIDENT) {
6481 s = label_find(tok);
6482 /* put forward definition if needed */
6483 if (!s)
6484 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6485 else if (s->r == LABEL_DECLARED)
6486 s->r = LABEL_FORWARD;
6488 vla_sp_restore_root();
6489 if (s->r & LABEL_FORWARD) {
6490 /* start new goto chain for cleanups, linked via label->next */
6491 if (current_cleanups) {
6492 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6493 pending_gotos->prev_tok = s;
6494 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6495 pending_gotos->next = s;
6497 s->jnext = gjmp(s->jnext);
6498 } else {
6499 try_call_cleanup_goto(s->cleanupstate);
6500 gjmp_addr(s->jnext);
6502 next();
6503 } else {
6504 expect("label identifier");
6506 skip(';');
6507 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6508 asm_instr();
6509 } else {
6510 b = is_label();
6511 if (b) {
6512 /* label case */
6513 next();
6514 s = label_find(b);
6515 if (s) {
6516 if (s->r == LABEL_DEFINED)
6517 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6518 s->r = LABEL_DEFINED;
6519 if (s->next) {
6520 Sym *pcl; /* pending cleanup goto */
6521 for (pcl = s->next; pcl; pcl = pcl->prev)
6522 gsym(pcl->jnext);
6523 sym_pop(&s->next, NULL, 0);
6524 } else
6525 gsym(s->jnext);
6526 } else {
6527 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6529 s->jnext = ind;
6530 s->cleanupstate = current_cleanups;
6531 vla_sp_restore();
6532 /* we accept this, but it is a mistake */
6533 block_after_label:
6534 nocode_wanted &= ~0x20000000;
6535 if (tok == '}') {
6536 tcc_warning("deprecated use of label at end of compound statement");
6537 } else {
6538 if (is_expr)
6539 vpop();
6540 block(bsym, bcl, csym, ccl, is_expr);
6542 } else {
6543 /* expression case */
6544 if (tok != ';') {
6545 if (is_expr) {
6546 vpop();
6547 gexpr();
6548 } else {
6549 gexpr();
6550 vpop();
6553 skip(';');
6558 /* This skips over a stream of tokens containing balanced {} and ()
6559 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6560 with a '{'). If STR then allocates and stores the skipped tokens
6561 in *STR. This doesn't check if () and {} are nested correctly,
6562 i.e. "({)}" is accepted. */
6563 static void skip_or_save_block(TokenString **str)
6565 int braces = tok == '{';
6566 int level = 0;
6567 if (str)
6568 *str = tok_str_alloc();
6570 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6571 int t;
6572 if (tok == TOK_EOF) {
6573 if (str || level > 0)
6574 tcc_error("unexpected end of file");
6575 else
6576 break;
6578 if (str)
6579 tok_str_add_tok(*str);
6580 t = tok;
6581 next();
6582 if (t == '{' || t == '(') {
6583 level++;
6584 } else if (t == '}' || t == ')') {
6585 level--;
6586 if (level == 0 && braces && t == '}')
6587 break;
6590 if (str) {
6591 tok_str_add(*str, -1);
6592 tok_str_add(*str, 0);
6596 #define EXPR_CONST 1
6597 #define EXPR_ANY 2
6599 static void parse_init_elem(int expr_type)
6601 int saved_global_expr;
6602 switch(expr_type) {
6603 case EXPR_CONST:
6604 /* compound literals must be allocated globally in this case */
6605 saved_global_expr = global_expr;
6606 global_expr = 1;
6607 expr_const1();
6608 global_expr = saved_global_expr;
6609 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6610 (compound literals). */
6611 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6612 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6613 || vtop->sym->v < SYM_FIRST_ANOM))
6614 #ifdef TCC_TARGET_PE
6615 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6616 #endif
6618 tcc_error("initializer element is not constant");
6619 break;
6620 case EXPR_ANY:
6621 expr_eq();
6622 break;
6626 /* put zeros for variable based init */
6627 static void init_putz(Section *sec, unsigned long c, int size)
6629 if (sec) {
6630 /* nothing to do because globals are already set to zero */
6631 } else {
6632 vpush_global_sym(&func_old_type, TOK_memset);
6633 vseti(VT_LOCAL, c);
6634 #ifdef TCC_TARGET_ARM
6635 vpushs(size);
6636 vpushi(0);
6637 #else
6638 vpushi(0);
6639 vpushs(size);
6640 #endif
6641 gfunc_call(3);
6645 #define DIF_FIRST 1
6646 #define DIF_SIZE_ONLY 2
6647 #define DIF_HAVE_ELEM 4
6649 /* t is the array or struct type. c is the array or struct
6650 address. cur_field is the pointer to the current
6651 field, for arrays the 'c' member contains the current start
6652 index. 'flags' is as in decl_initializer.
6653 'al' contains the already initialized length of the
6654 current container (starting at c). This returns the new length of that. */
6655 static int decl_designator(CType *type, Section *sec, unsigned long c,
6656 Sym **cur_field, int flags, int al)
6658 Sym *s, *f;
6659 int index, index_last, align, l, nb_elems, elem_size;
6660 unsigned long corig = c;
6662 elem_size = 0;
6663 nb_elems = 1;
6664 if (flags & DIF_HAVE_ELEM)
6665 goto no_designator;
6666 if (gnu_ext && (l = is_label()) != 0)
6667 goto struct_field;
6668 /* NOTE: we only support ranges for last designator */
6669 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6670 if (tok == '[') {
6671 if (!(type->t & VT_ARRAY))
6672 expect("array type");
6673 next();
6674 index = index_last = expr_const();
6675 if (tok == TOK_DOTS && gnu_ext) {
6676 next();
6677 index_last = expr_const();
6679 skip(']');
6680 s = type->ref;
6681 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6682 index_last < index)
6683 tcc_error("invalid index");
6684 if (cur_field)
6685 (*cur_field)->c = index_last;
6686 type = pointed_type(type);
6687 elem_size = type_size(type, &align);
6688 c += index * elem_size;
6689 nb_elems = index_last - index + 1;
6690 } else {
6691 int cumofs = 0;
6692 next();
6693 l = tok;
6694 struct_field:
6695 next();
6696 if ((type->t & VT_BTYPE) != VT_STRUCT)
6697 expect("struct/union type");
6698 f = find_field(type, l, &cumofs);
6699 if (!f)
6700 expect("field");
6701 if (cur_field)
6702 *cur_field = f;
6703 type = &f->type;
6704 c += cumofs + f->c;
6706 cur_field = NULL;
6708 if (!cur_field) {
6709 if (tok == '=') {
6710 next();
6711 } else if (!gnu_ext) {
6712 expect("=");
6714 } else {
6715 no_designator:
6716 if (type->t & VT_ARRAY) {
6717 index = (*cur_field)->c;
6718 if (type->ref->c >= 0 && index >= type->ref->c)
6719 tcc_error("index too large");
6720 type = pointed_type(type);
6721 c += index * type_size(type, &align);
6722 } else {
6723 f = *cur_field;
6724 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6725 *cur_field = f = f->next;
6726 if (!f)
6727 tcc_error("too many field init");
6728 type = &f->type;
6729 c += f->c;
6732 /* must put zero in holes (note that doing it that way
6733 ensures that it even works with designators) */
6734 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6735 init_putz(sec, corig + al, c - corig - al);
6736 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6738 /* XXX: make it more general */
6739 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6740 unsigned long c_end;
6741 uint8_t *src, *dst;
6742 int i;
6744 if (!sec) {
6745 vset(type, VT_LOCAL|VT_LVAL, c);
6746 for (i = 1; i < nb_elems; i++) {
6747 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6748 vswap();
6749 vstore();
6751 vpop();
6752 } else if (!NODATA_WANTED) {
6753 c_end = c + nb_elems * elem_size;
6754 if (c_end > sec->data_allocated)
6755 section_realloc(sec, c_end);
6756 src = sec->data + c;
6757 dst = src;
6758 for(i = 1; i < nb_elems; i++) {
6759 dst += elem_size;
6760 memcpy(dst, src, elem_size);
6764 c += nb_elems * type_size(type, &align);
6765 if (c - corig > al)
6766 al = c - corig;
6767 return al;
6770 /* store a value or an expression directly in global data or in local array */
6771 static void init_putv(CType *type, Section *sec, unsigned long c)
6773 int bt;
6774 void *ptr;
6775 CType dtype;
6777 dtype = *type;
6778 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6780 if (sec) {
6781 int size, align;
6782 /* XXX: not portable */
6783 /* XXX: generate error if incorrect relocation */
6784 gen_assign_cast(&dtype);
6785 bt = type->t & VT_BTYPE;
6787 if ((vtop->r & VT_SYM)
6788 && bt != VT_PTR
6789 && bt != VT_FUNC
6790 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6791 || (type->t & VT_BITFIELD))
6792 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6794 tcc_error("initializer element is not computable at load time");
6796 if (NODATA_WANTED) {
6797 vtop--;
6798 return;
6801 size = type_size(type, &align);
6802 section_reserve(sec, c + size);
6803 ptr = sec->data + c;
6805 /* XXX: make code faster ? */
6806 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6807 vtop->sym->v >= SYM_FIRST_ANOM &&
6808 /* XXX This rejects compound literals like
6809 '(void *){ptr}'. The problem is that '&sym' is
6810 represented the same way, which would be ruled out
6811 by the SYM_FIRST_ANOM check above, but also '"string"'
6812 in 'char *p = "string"' is represented the same
6813 with the type being VT_PTR and the symbol being an
6814 anonymous one. That is, there's no difference in vtop
6815 between '(void *){x}' and '&(void *){x}'. Ignore
6816 pointer typed entities here. Hopefully no real code
6817 will every use compound literals with scalar type. */
6818 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6819 /* These come from compound literals, memcpy stuff over. */
6820 Section *ssec;
6821 ElfSym *esym;
6822 ElfW_Rel *rel;
6823 esym = elfsym(vtop->sym);
6824 ssec = tcc_state->sections[esym->st_shndx];
6825 memmove (ptr, ssec->data + esym->st_value, size);
6826 if (ssec->reloc) {
6827 /* We need to copy over all memory contents, and that
6828 includes relocations. Use the fact that relocs are
6829 created it order, so look from the end of relocs
6830 until we hit one before the copied region. */
6831 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6832 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6833 while (num_relocs--) {
6834 rel--;
6835 if (rel->r_offset >= esym->st_value + size)
6836 continue;
6837 if (rel->r_offset < esym->st_value)
6838 break;
6839 /* Note: if the same fields are initialized multiple
6840 times (possible with designators) then we possibly
6841 add multiple relocations for the same offset here.
6842 That would lead to wrong code, the last reloc needs
6843 to win. We clean this up later after the whole
6844 initializer is parsed. */
6845 put_elf_reloca(symtab_section, sec,
6846 c + rel->r_offset - esym->st_value,
6847 ELFW(R_TYPE)(rel->r_info),
6848 ELFW(R_SYM)(rel->r_info),
6849 #if PTR_SIZE == 8
6850 rel->r_addend
6851 #else
6853 #endif
6857 } else {
6858 if (type->t & VT_BITFIELD) {
6859 int bit_pos, bit_size, bits, n;
6860 unsigned char *p, v, m;
6861 bit_pos = BIT_POS(vtop->type.t);
6862 bit_size = BIT_SIZE(vtop->type.t);
6863 p = (unsigned char*)ptr + (bit_pos >> 3);
6864 bit_pos &= 7, bits = 0;
6865 while (bit_size) {
6866 n = 8 - bit_pos;
6867 if (n > bit_size)
6868 n = bit_size;
6869 v = vtop->c.i >> bits << bit_pos;
6870 m = ((1 << n) - 1) << bit_pos;
6871 *p = (*p & ~m) | (v & m);
6872 bits += n, bit_size -= n, bit_pos = 0, ++p;
6874 } else
6875 switch(bt) {
6876 /* XXX: when cross-compiling we assume that each type has the
6877 same representation on host and target, which is likely to
6878 be wrong in the case of long double */
6879 case VT_BOOL:
6880 vtop->c.i = vtop->c.i != 0;
6881 case VT_BYTE:
6882 *(char *)ptr |= vtop->c.i;
6883 break;
6884 case VT_SHORT:
6885 *(short *)ptr |= vtop->c.i;
6886 break;
6887 case VT_FLOAT:
6888 *(float*)ptr = vtop->c.f;
6889 break;
6890 case VT_DOUBLE:
6891 *(double *)ptr = vtop->c.d;
6892 break;
6893 case VT_LDOUBLE:
6894 #if defined TCC_IS_NATIVE_387
6895 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6896 memcpy(ptr, &vtop->c.ld, 10);
6897 #ifdef __TINYC__
6898 else if (sizeof (long double) == sizeof (double))
6899 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6900 #endif
6901 else if (vtop->c.ld == 0.0)
6903 else
6904 #endif
6905 if (sizeof(long double) == LDOUBLE_SIZE)
6906 *(long double*)ptr = vtop->c.ld;
6907 else if (sizeof(double) == LDOUBLE_SIZE)
6908 *(double *)ptr = (double)vtop->c.ld;
6909 else
6910 tcc_error("can't cross compile long double constants");
6911 break;
6912 #if PTR_SIZE != 8
6913 case VT_LLONG:
6914 *(long long *)ptr |= vtop->c.i;
6915 break;
6916 #else
6917 case VT_LLONG:
6918 #endif
6919 case VT_PTR:
6921 addr_t val = vtop->c.i;
6922 #if PTR_SIZE == 8
6923 if (vtop->r & VT_SYM)
6924 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6925 else
6926 *(addr_t *)ptr |= val;
6927 #else
6928 if (vtop->r & VT_SYM)
6929 greloc(sec, vtop->sym, c, R_DATA_PTR);
6930 *(addr_t *)ptr |= val;
6931 #endif
6932 break;
6934 default:
6936 int val = vtop->c.i;
6937 #if PTR_SIZE == 8
6938 if (vtop->r & VT_SYM)
6939 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6940 else
6941 *(int *)ptr |= val;
6942 #else
6943 if (vtop->r & VT_SYM)
6944 greloc(sec, vtop->sym, c, R_DATA_PTR);
6945 *(int *)ptr |= val;
6946 #endif
6947 break;
6951 vtop--;
6952 } else {
6953 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6954 vswap();
6955 vstore();
6956 vpop();
6960 /* 't' contains the type and storage info. 'c' is the offset of the
6961 object in section 'sec'. If 'sec' is NULL, it means stack based
6962 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6963 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6964 size only evaluation is wanted (only for arrays). */
6965 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6966 int flags)
6968 int len, n, no_oblock, nb, i;
6969 int size1, align1;
6970 Sym *s, *f;
6971 Sym indexsym;
6972 CType *t1;
6974 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
6975 /* In case of strings we have special handling for arrays, so
6976 don't consume them as initializer value (which would commit them
6977 to some anonymous symbol). */
6978 tok != TOK_LSTR && tok != TOK_STR &&
6979 !(flags & DIF_SIZE_ONLY)) {
6980 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6981 flags |= DIF_HAVE_ELEM;
6984 if ((flags & DIF_HAVE_ELEM) &&
6985 !(type->t & VT_ARRAY) &&
6986 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6987 The source type might have VT_CONSTANT set, which is
6988 of course assignable to non-const elements. */
6989 is_compatible_unqualified_types(type, &vtop->type)) {
6990 init_putv(type, sec, c);
6991 } else if (type->t & VT_ARRAY) {
6992 s = type->ref;
6993 n = s->c;
6994 t1 = pointed_type(type);
6995 size1 = type_size(t1, &align1);
6997 no_oblock = 1;
6998 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
6999 tok == '{') {
7000 if (tok != '{')
7001 tcc_error("character array initializer must be a literal,"
7002 " optionally enclosed in braces");
7003 skip('{');
7004 no_oblock = 0;
7007 /* only parse strings here if correct type (otherwise: handle
7008 them as ((w)char *) expressions */
7009 if ((tok == TOK_LSTR &&
7010 #ifdef TCC_TARGET_PE
7011 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7012 #else
7013 (t1->t & VT_BTYPE) == VT_INT
7014 #endif
7015 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7016 len = 0;
7017 while (tok == TOK_STR || tok == TOK_LSTR) {
7018 int cstr_len, ch;
7020 /* compute maximum number of chars wanted */
7021 if (tok == TOK_STR)
7022 cstr_len = tokc.str.size;
7023 else
7024 cstr_len = tokc.str.size / sizeof(nwchar_t);
7025 cstr_len--;
7026 nb = cstr_len;
7027 if (n >= 0 && nb > (n - len))
7028 nb = n - len;
7029 if (!(flags & DIF_SIZE_ONLY)) {
7030 if (cstr_len > nb)
7031 tcc_warning("initializer-string for array is too long");
7032 /* in order to go faster for common case (char
7033 string in global variable, we handle it
7034 specifically */
7035 if (sec && tok == TOK_STR && size1 == 1) {
7036 if (!NODATA_WANTED)
7037 memcpy(sec->data + c + len, tokc.str.data, nb);
7038 } else {
7039 for(i=0;i<nb;i++) {
7040 if (tok == TOK_STR)
7041 ch = ((unsigned char *)tokc.str.data)[i];
7042 else
7043 ch = ((nwchar_t *)tokc.str.data)[i];
7044 vpushi(ch);
7045 init_putv(t1, sec, c + (len + i) * size1);
7049 len += nb;
7050 next();
7052 /* only add trailing zero if enough storage (no
7053 warning in this case since it is standard) */
7054 if (n < 0 || len < n) {
7055 if (!(flags & DIF_SIZE_ONLY)) {
7056 vpushi(0);
7057 init_putv(t1, sec, c + (len * size1));
7059 len++;
7061 len *= size1;
7062 } else {
7063 indexsym.c = 0;
7064 f = &indexsym;
7066 do_init_list:
7067 len = 0;
7068 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7069 len = decl_designator(type, sec, c, &f, flags, len);
7070 flags &= ~DIF_HAVE_ELEM;
7071 if (type->t & VT_ARRAY) {
7072 ++indexsym.c;
7073 /* special test for multi dimensional arrays (may not
7074 be strictly correct if designators are used at the
7075 same time) */
7076 if (no_oblock && len >= n*size1)
7077 break;
7078 } else {
7079 if (s->type.t == VT_UNION)
7080 f = NULL;
7081 else
7082 f = f->next;
7083 if (no_oblock && f == NULL)
7084 break;
7087 if (tok == '}')
7088 break;
7089 skip(',');
7092 /* put zeros at the end */
7093 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7094 init_putz(sec, c + len, n*size1 - len);
7095 if (!no_oblock)
7096 skip('}');
7097 /* patch type size if needed, which happens only for array types */
7098 if (n < 0)
7099 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7100 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7101 size1 = 1;
7102 no_oblock = 1;
7103 if ((flags & DIF_FIRST) || tok == '{') {
7104 skip('{');
7105 no_oblock = 0;
7107 s = type->ref;
7108 f = s->next;
7109 n = s->c;
7110 goto do_init_list;
7111 } else if (tok == '{') {
7112 if (flags & DIF_HAVE_ELEM)
7113 skip(';');
7114 next();
7115 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7116 skip('}');
7117 } else if ((flags & DIF_SIZE_ONLY)) {
7118 /* If we supported only ISO C we wouldn't have to accept calling
7119 this on anything than an array if DIF_SIZE_ONLY (and even then
7120 only on the outermost level, so no recursion would be needed),
7121 because initializing a flex array member isn't supported.
7122 But GNU C supports it, so we need to recurse even into
7123 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7124 /* just skip expression */
7125 skip_or_save_block(NULL);
7126 } else {
7127 if (!(flags & DIF_HAVE_ELEM)) {
7128 /* This should happen only when we haven't parsed
7129 the init element above for fear of committing a
7130 string constant to memory too early. */
7131 if (tok != TOK_STR && tok != TOK_LSTR)
7132 expect("string constant");
7133 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7135 init_putv(type, sec, c);
7139 /* parse an initializer for type 't' if 'has_init' is non zero, and
7140 allocate space in local or global data space ('r' is either
7141 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7142 variable 'v' of scope 'scope' is declared before initializers
7143 are parsed. If 'v' is zero, then a reference to the new object
7144 is put in the value stack. If 'has_init' is 2, a special parsing
7145 is done to handle string constants. */
7146 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7147 int has_init, int v, int scope)
7149 int size, align, addr;
7150 TokenString *init_str = NULL;
7152 Section *sec;
7153 Sym *flexible_array;
7154 Sym *sym = NULL;
7155 int saved_nocode_wanted = nocode_wanted;
7156 #ifdef CONFIG_TCC_BCHECK
7157 int bcheck;
7158 #endif
7160 /* Always allocate static or global variables */
7161 if (v && (r & VT_VALMASK) == VT_CONST)
7162 nocode_wanted |= 0x80000000;
7164 #ifdef CONFIG_TCC_BCHECK
7165 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7166 #endif
7168 flexible_array = NULL;
7169 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7170 Sym *field = type->ref->next;
7171 if (field) {
7172 while (field->next)
7173 field = field->next;
7174 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7175 flexible_array = field;
7179 size = type_size(type, &align);
7180 /* If unknown size, we must evaluate it before
7181 evaluating initializers because
7182 initializers can generate global data too
7183 (e.g. string pointers or ISOC99 compound
7184 literals). It also simplifies local
7185 initializers handling */
7186 if (size < 0 || (flexible_array && has_init)) {
7187 if (!has_init)
7188 tcc_error("unknown type size");
7189 /* get all init string */
7190 if (has_init == 2) {
7191 init_str = tok_str_alloc();
7192 /* only get strings */
7193 while (tok == TOK_STR || tok == TOK_LSTR) {
7194 tok_str_add_tok(init_str);
7195 next();
7197 tok_str_add(init_str, -1);
7198 tok_str_add(init_str, 0);
7199 } else {
7200 skip_or_save_block(&init_str);
7202 unget_tok(0);
7204 /* compute size */
7205 begin_macro(init_str, 1);
7206 next();
7207 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7208 /* prepare second initializer parsing */
7209 macro_ptr = init_str->str;
7210 next();
7212 /* if still unknown size, error */
7213 size = type_size(type, &align);
7214 if (size < 0)
7215 tcc_error("unknown type size");
7217 /* If there's a flex member and it was used in the initializer
7218 adjust size. */
7219 if (flexible_array &&
7220 flexible_array->type.ref->c > 0)
7221 size += flexible_array->type.ref->c
7222 * pointed_size(&flexible_array->type);
7223 /* take into account specified alignment if bigger */
7224 if (ad->a.aligned) {
7225 int speca = 1 << (ad->a.aligned - 1);
7226 if (speca > align)
7227 align = speca;
7228 } else if (ad->a.packed) {
7229 align = 1;
7232 if (!v && NODATA_WANTED)
7233 size = 0, align = 1;
7235 if ((r & VT_VALMASK) == VT_LOCAL) {
7236 sec = NULL;
7237 #ifdef CONFIG_TCC_BCHECK
7238 if (bcheck && (type->t & VT_ARRAY)) {
7239 loc--;
7241 #endif
7242 loc = (loc - size) & -align;
7243 addr = loc;
7244 #ifdef CONFIG_TCC_BCHECK
7245 /* handles bounds */
7246 /* XXX: currently, since we do only one pass, we cannot track
7247 '&' operators, so we add only arrays */
7248 if (bcheck && (type->t & VT_ARRAY)) {
7249 addr_t *bounds_ptr;
7250 /* add padding between regions */
7251 loc--;
7252 /* then add local bound info */
7253 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7254 bounds_ptr[0] = addr;
7255 bounds_ptr[1] = size;
7257 #endif
7258 if (v) {
7259 /* local variable */
7260 #ifdef CONFIG_TCC_ASM
7261 if (ad->asm_label) {
7262 int reg = asm_parse_regvar(ad->asm_label);
7263 if (reg >= 0)
7264 r = (r & ~VT_VALMASK) | reg;
7266 #endif
7267 sym = sym_push(v, type, r, addr);
7268 if (ad->cleanup_func) {
7269 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7270 cls->prev_tok = sym;
7271 cls->next = ad->cleanup_func;
7272 cls->ncl = current_cleanups;
7273 current_cleanups = cls;
7276 sym->a = ad->a;
7277 } else {
7278 /* push local reference */
7279 vset(type, r, addr);
7281 } else {
7282 if (v && scope == VT_CONST) {
7283 /* see if the symbol was already defined */
7284 sym = sym_find(v);
7285 if (sym) {
7286 patch_storage(sym, ad, type);
7287 /* we accept several definitions of the same global variable. */
7288 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7289 goto no_alloc;
7293 /* allocate symbol in corresponding section */
7294 sec = ad->section;
7295 if (!sec) {
7296 if (has_init)
7297 sec = data_section;
7298 else if (tcc_state->nocommon)
7299 sec = bss_section;
7302 if (sec) {
7303 addr = section_add(sec, size, align);
7304 #ifdef CONFIG_TCC_BCHECK
7305 /* add padding if bound check */
7306 if (bcheck)
7307 section_add(sec, 1, 1);
7308 #endif
7309 } else {
7310 addr = align; /* SHN_COMMON is special, symbol value is align */
7311 sec = common_section;
7314 if (v) {
7315 if (!sym) {
7316 sym = sym_push(v, type, r | VT_SYM, 0);
7317 patch_storage(sym, ad, NULL);
7319 /* update symbol definition */
7320 put_extern_sym(sym, sec, addr, size);
7321 } else {
7322 /* push global reference */
7323 vpush_ref(type, sec, addr, size);
7324 sym = vtop->sym;
7325 vtop->r |= r;
7328 #ifdef CONFIG_TCC_BCHECK
7329 /* handles bounds now because the symbol must be defined
7330 before for the relocation */
7331 if (bcheck) {
7332 addr_t *bounds_ptr;
7334 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7335 /* then add global bound info */
7336 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7337 bounds_ptr[0] = 0; /* relocated */
7338 bounds_ptr[1] = size;
7340 #endif
7343 if (type->t & VT_VLA) {
7344 int a;
7346 if (NODATA_WANTED)
7347 goto no_alloc;
7349 /* save current stack pointer */
7350 if (vlas_in_scope == 0) {
7351 if (vla_sp_root_loc == -1)
7352 vla_sp_root_loc = (loc -= PTR_SIZE);
7353 gen_vla_sp_save(vla_sp_root_loc);
7356 vla_runtime_type_size(type, &a);
7357 gen_vla_alloc(type, a);
7358 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7359 /* on _WIN64, because of the function args scratch area, the
7360 result of alloca differs from RSP and is returned in RAX. */
7361 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7362 #endif
7363 gen_vla_sp_save(addr);
7364 vla_sp_loc = addr;
7365 vlas_in_scope++;
7367 } else if (has_init) {
7368 size_t oldreloc_offset = 0;
7369 if (sec && sec->reloc)
7370 oldreloc_offset = sec->reloc->data_offset;
7371 decl_initializer(type, sec, addr, DIF_FIRST);
7372 if (sec && sec->reloc)
7373 squeeze_multi_relocs(sec, oldreloc_offset);
7374 /* patch flexible array member size back to -1, */
7375 /* for possible subsequent similar declarations */
7376 if (flexible_array)
7377 flexible_array->type.ref->c = -1;
7380 no_alloc:
7381 /* restore parse state if needed */
7382 if (init_str) {
7383 end_macro();
7384 next();
7387 nocode_wanted = saved_nocode_wanted;
7390 /* parse a function defined by symbol 'sym' and generate its code in
7391 'cur_text_section' */
7392 static void gen_function(Sym *sym)
7394 nocode_wanted = 0;
7395 ind = cur_text_section->data_offset;
7396 if (sym->a.aligned) {
7397 size_t newoff = section_add(cur_text_section, 0,
7398 1 << (sym->a.aligned - 1));
7399 gen_fill_nops(newoff - ind);
7401 /* NOTE: we patch the symbol size later */
7402 put_extern_sym(sym, cur_text_section, ind, 0);
7403 funcname = get_tok_str(sym->v, NULL);
7404 func_ind = ind;
7405 /* Initialize VLA state */
7406 vla_sp_loc = -1;
7407 vla_sp_root_loc = -1;
7408 /* put debug symbol */
7409 tcc_debug_funcstart(tcc_state, sym);
7410 /* push a dummy symbol to enable local sym storage */
7411 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7412 local_scope = 1; /* for function parameters */
7413 gfunc_prolog(&sym->type);
7414 reset_local_scope();
7415 rsym = 0;
7416 clear_temp_local_var_list();
7417 block(NULL, NULL, NULL, NULL, 0);
7418 if (!(nocode_wanted & 0x20000000)
7419 && ((func_vt.t & VT_BTYPE) == VT_INT)
7420 && !strcmp (funcname, "main"))
7422 nocode_wanted = 0;
7423 vpushi(0);
7424 gen_assign_cast(&func_vt);
7425 gfunc_return(&func_vt);
7427 nocode_wanted = 0;
7428 gsym(rsym);
7429 gfunc_epilog();
7430 cur_text_section->data_offset = ind;
7431 label_pop(&global_label_stack, NULL, 0);
7432 /* reset local stack */
7433 reset_local_scope();
7434 sym_pop(&local_stack, NULL, 0);
7435 /* end of function */
7436 /* patch symbol size */
7437 elfsym(sym)->st_size = ind - func_ind;
7438 tcc_debug_funcend(tcc_state, ind - func_ind);
7439 /* It's better to crash than to generate wrong code */
7440 cur_text_section = NULL;
7441 funcname = ""; /* for safety */
7442 func_vt.t = VT_VOID; /* for safety */
7443 func_var = 0; /* for safety */
7444 ind = 0; /* for safety */
7445 nocode_wanted = 0x80000000;
7446 check_vstack();
7449 static void gen_inline_functions(TCCState *s)
7451 Sym *sym;
7452 int inline_generated, i, ln;
7453 struct InlineFunc *fn;
7455 ln = file->line_num;
7456 /* iterate while inline function are referenced */
7457 do {
7458 inline_generated = 0;
7459 for (i = 0; i < s->nb_inline_fns; ++i) {
7460 fn = s->inline_fns[i];
7461 sym = fn->sym;
7462 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7463 /* the function was used or forced (and then not internal):
7464 generate its code and convert it to a normal function */
7465 fn->sym = NULL;
7466 if (file)
7467 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7468 begin_macro(fn->func_str, 1);
7469 next();
7470 cur_text_section = text_section;
7471 gen_function(sym);
7472 end_macro();
7474 inline_generated = 1;
7477 } while (inline_generated);
7478 file->line_num = ln;
7481 ST_FUNC void free_inline_functions(TCCState *s)
7483 int i;
7484 /* free tokens of unused inline functions */
7485 for (i = 0; i < s->nb_inline_fns; ++i) {
7486 struct InlineFunc *fn = s->inline_fns[i];
7487 if (fn->sym)
7488 tok_str_free(fn->func_str);
7490 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7493 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7494 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7495 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7497 int v, has_init, r;
7498 CType type, btype;
7499 Sym *sym;
7500 AttributeDef ad, adbase;
7502 while (1) {
7503 if (tok == TOK_STATIC_ASSERT) {
7504 int c;
7506 next();
7507 skip('(');
7508 c = expr_const();
7509 skip(',');
7510 if (c == 0)
7511 tcc_error("%s", get_tok_str(tok, &tokc));
7512 next();
7513 skip(')');
7514 skip(';');
7515 continue;
7517 if (!parse_btype(&btype, &adbase)) {
7518 if (is_for_loop_init)
7519 return 0;
7520 /* skip redundant ';' if not in old parameter decl scope */
7521 if (tok == ';' && l != VT_CMP) {
7522 next();
7523 continue;
7525 if (l != VT_CONST)
7526 break;
7527 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7528 /* global asm block */
7529 asm_global_instr();
7530 continue;
7532 if (tok >= TOK_UIDENT) {
7533 /* special test for old K&R protos without explicit int
7534 type. Only accepted when defining global data */
7535 btype.t = VT_INT;
7536 } else {
7537 if (tok != TOK_EOF)
7538 expect("declaration");
7539 break;
7542 if (tok == ';') {
7543 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7544 int v = btype.ref->v;
7545 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7546 tcc_warning("unnamed struct/union that defines no instances");
7547 next();
7548 continue;
7550 if (IS_ENUM(btype.t)) {
7551 next();
7552 continue;
7555 while (1) { /* iterate thru each declaration */
7556 type = btype;
7557 /* If the base type itself was an array type of unspecified
7558 size (like in 'typedef int arr[]; arr x = {1};') then
7559 we will overwrite the unknown size by the real one for
7560 this decl. We need to unshare the ref symbol holding
7561 that size. */
7562 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7563 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7565 ad = adbase;
7566 type_decl(&type, &ad, &v, TYPE_DIRECT);
7567 #if 0
7569 char buf[500];
7570 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7571 printf("type = '%s'\n", buf);
7573 #endif
7574 if ((type.t & VT_BTYPE) == VT_FUNC) {
7575 /* if old style function prototype, we accept a
7576 declaration list */
7577 sym = type.ref;
7578 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7579 decl0(VT_CMP, 0, sym);
7580 /* always compile 'extern inline' */
7581 if (type.t & VT_EXTERN)
7582 type.t &= ~VT_INLINE;
7585 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7586 ad.asm_label = asm_label_instr();
7587 /* parse one last attribute list, after asm label */
7588 parse_attribute(&ad);
7589 #if 0
7590 /* gcc does not allow __asm__("label") with function definition,
7591 but why not ... */
7592 if (tok == '{')
7593 expect(";");
7594 #endif
7597 #ifdef TCC_TARGET_PE
7598 if (ad.a.dllimport || ad.a.dllexport) {
7599 if (type.t & (VT_STATIC|VT_TYPEDEF))
7600 tcc_error("cannot have dll linkage with static or typedef");
7601 if (ad.a.dllimport) {
7602 if ((type.t & VT_BTYPE) == VT_FUNC)
7603 ad.a.dllimport = 0;
7604 else
7605 type.t |= VT_EXTERN;
7608 #endif
7609 if (tok == '{') {
7610 if (l != VT_CONST)
7611 tcc_error("cannot use local functions");
7612 if ((type.t & VT_BTYPE) != VT_FUNC)
7613 expect("function definition");
7615 /* reject abstract declarators in function definition
7616 make old style params without decl have int type */
7617 sym = type.ref;
7618 while ((sym = sym->next) != NULL) {
7619 if (!(sym->v & ~SYM_FIELD))
7620 expect("identifier");
7621 if (sym->type.t == VT_VOID)
7622 sym->type = int_type;
7625 /* put function symbol */
7626 type.t &= ~VT_EXTERN;
7627 sym = external_sym(v, &type, 0, &ad);
7628 /* static inline functions are just recorded as a kind
7629 of macro. Their code will be emitted at the end of
7630 the compilation unit only if they are used */
7631 if (sym->type.t & VT_INLINE) {
7632 struct InlineFunc *fn;
7633 const char *filename;
7635 filename = file ? file->filename : "";
7636 fn = tcc_malloc(sizeof *fn + strlen(filename));
7637 strcpy(fn->filename, filename);
7638 fn->sym = sym;
7639 skip_or_save_block(&fn->func_str);
7640 dynarray_add(&tcc_state->inline_fns,
7641 &tcc_state->nb_inline_fns, fn);
7642 } else {
7643 /* compute text section */
7644 cur_text_section = ad.section;
7645 if (!cur_text_section)
7646 cur_text_section = text_section;
7647 gen_function(sym);
7649 break;
7650 } else {
7651 if (l == VT_CMP) {
7652 /* find parameter in function parameter list */
7653 for (sym = func_sym->next; sym; sym = sym->next)
7654 if ((sym->v & ~SYM_FIELD) == v)
7655 goto found;
7656 tcc_error("declaration for parameter '%s' but no such parameter",
7657 get_tok_str(v, NULL));
7658 found:
7659 if (type.t & VT_STORAGE) /* 'register' is okay */
7660 tcc_error("storage class specified for '%s'",
7661 get_tok_str(v, NULL));
7662 if (sym->type.t != VT_VOID)
7663 tcc_error("redefinition of parameter '%s'",
7664 get_tok_str(v, NULL));
7665 convert_parameter_type(&type);
7666 sym->type = type;
7667 } else if (type.t & VT_TYPEDEF) {
7668 /* save typedefed type */
7669 /* XXX: test storage specifiers ? */
7670 sym = sym_find(v);
7671 if (sym && sym->sym_scope == local_scope) {
7672 if (!is_compatible_types(&sym->type, &type)
7673 || !(sym->type.t & VT_TYPEDEF))
7674 tcc_error("incompatible redefinition of '%s'",
7675 get_tok_str(v, NULL));
7676 sym->type = type;
7677 } else {
7678 sym = sym_push(v, &type, 0, 0);
7680 sym->a = ad.a;
7681 sym->f = ad.f;
7682 } else if ((type.t & VT_BTYPE) == VT_VOID
7683 && !(type.t & VT_EXTERN)) {
7684 tcc_error("declaration of void object");
7685 } else {
7686 r = 0;
7687 if ((type.t & VT_BTYPE) == VT_FUNC) {
7688 /* external function definition */
7689 /* specific case for func_call attribute */
7690 type.ref->f = ad.f;
7691 } else if (!(type.t & VT_ARRAY)) {
7692 /* not lvalue if array */
7693 r |= lvalue_type(type.t);
7695 has_init = (tok == '=');
7696 if (has_init && (type.t & VT_VLA))
7697 tcc_error("variable length array cannot be initialized");
7698 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7699 || (type.t & VT_BTYPE) == VT_FUNC
7700 /* as with GCC, uninitialized global arrays with no size
7701 are considered extern: */
7702 || ((type.t & VT_ARRAY) && !has_init
7703 && l == VT_CONST && type.ref->c < 0)
7705 /* external variable or function */
7706 type.t |= VT_EXTERN;
7707 sym = external_sym(v, &type, r, &ad);
7708 if (ad.alias_target) {
7709 ElfSym *esym;
7710 Sym *alias_target;
7711 alias_target = sym_find(ad.alias_target);
7712 esym = elfsym(alias_target);
7713 if (!esym)
7714 tcc_error("unsupported forward __alias__ attribute");
7715 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7717 } else {
7718 if (type.t & VT_STATIC)
7719 r |= VT_CONST;
7720 else
7721 r |= l;
7722 if (has_init)
7723 next();
7724 else if (l == VT_CONST)
7725 /* uninitialized global variables may be overridden */
7726 type.t |= VT_EXTERN;
7727 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7730 if (tok != ',') {
7731 if (is_for_loop_init)
7732 return 1;
7733 skip(';');
7734 break;
7736 next();
7740 return 0;
7743 static void decl(int l)
7745 decl0(l, 0, NULL);
7748 /* ------------------------------------------------------------------------- */