Fix another corner case with C/asm symtable
[tinycc.git] / tccgen.c
blob2d3f2d0454e75c07cfba0eafef7471e4ba057dea
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
58 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
59 ST_DATA int func_vc;
60 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
61 ST_DATA const char *funcname;
62 ST_DATA int g_debug;
64 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
66 ST_DATA struct switch_t {
67 struct case_t {
68 int64_t v1, v2;
69 int sym;
70 } **p; int n; /* list of case ranges */
71 int def_sym; /* default symbol */
72 } *cur_switch; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static void gen_cast_s(int t);
78 static inline CType *pointed_type(CType *type);
79 static int is_compatible_types(CType *type1, CType *type2);
80 static int parse_btype(CType *type, AttributeDef *ad);
81 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
82 static void parse_expr_type(CType *type);
83 static void init_putv(CType *type, Section *sec, unsigned long c);
84 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
85 static void block(int *bsym, int *csym, int is_expr);
86 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
87 static void decl(int l);
88 static int decl0(int l, int is_for_loop_init, Sym *);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType *type, int *a);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType *type1, CType *type2);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty, unsigned long long v);
96 static void vpush(CType *type);
97 static int gvtst(int inv, int t);
98 static void gen_inline_functions(TCCState *s);
99 static void skip_or_save_block(TokenString **str);
100 static void gv_dup(void);
102 ST_INLN int is_float(int t)
104 int bt;
105 bt = t & VT_BTYPE;
106 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC int ieee_finite(double d)
114 int p[4];
115 memcpy(p, &d, sizeof(double));
116 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
123 #endif
125 ST_FUNC void test_lvalue(void)
127 if (!(vtop->r & VT_LVAL))
128 expect("lvalue");
131 ST_FUNC void check_vstack(void)
133 if (pvtop != vtop)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
140 #if 0
141 void pv (const char *lbl, int a, int b)
143 int i;
144 for (i = a; i < a + b; ++i) {
145 SValue *p = &vtop[-i];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
150 #endif
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC void tcc_debug_start(TCCState *s1)
156 if (s1->do_debug) {
157 char buf[512];
159 /* file info: full path + filename */
160 section_sym = put_elf_sym(symtab_section, 0, 0,
161 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
162 text_section->sh_num, NULL);
163 getcwd(buf, sizeof(buf));
164 #ifdef _WIN32
165 normalize_slashes(buf);
166 #endif
167 pstrcat(buf, sizeof(buf), "/");
168 put_stabs_r(buf, N_SO, 0, 0,
169 text_section->data_offset, text_section, section_sym);
170 put_stabs_r(file->filename, N_SO, 0, 0,
171 text_section->data_offset, text_section, section_sym);
172 last_ind = 0;
173 last_line_num = 0;
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section, 0, 0,
179 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
180 SHN_ABS, file->filename);
183 /* put end of translation unit info */
184 ST_FUNC void tcc_debug_end(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 put_stabs_r(NULL, N_SO, 0, 0,
189 text_section->data_offset, text_section, section_sym);
193 /* generate line number info */
194 ST_FUNC void tcc_debug_line(TCCState *s1)
196 if (!s1->do_debug)
197 return;
198 if ((last_line_num != file->line_num || last_ind != ind)) {
199 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
200 last_ind = ind;
201 last_line_num = file->line_num;
205 /* put function symbol */
206 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
208 char buf[512];
210 if (!s1->do_debug)
211 return;
213 /* stabs info */
214 /* XXX: we put here a dummy type */
215 snprintf(buf, sizeof(buf), "%s:%c1",
216 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
217 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
218 cur_text_section, sym->c);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE, 0, file->line_num, 0);
222 last_ind = 0;
223 last_line_num = 0;
226 /* put function size */
227 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
229 if (!s1->do_debug)
230 return;
231 put_stabn(N_FUN, 0, 0, size);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC int tccgen_compile(TCCState *s1)
237 cur_text_section = NULL;
238 funcname = "";
239 anon_sym = SYM_FIRST_ANOM;
240 section_sym = 0;
241 const_wanted = 0;
242 nocode_wanted = 0x80000000;
244 /* define some often used types */
245 int_type.t = VT_INT;
246 char_pointer_type.t = VT_BYTE;
247 mk_pointer(&char_pointer_type);
248 #if PTR_SIZE == 4
249 size_type.t = VT_INT | VT_UNSIGNED;
250 ptrdiff_type.t = VT_INT;
251 #elif LONG_SIZE == 4
252 size_type.t = VT_LLONG | VT_UNSIGNED;
253 ptrdiff_type.t = VT_LLONG;
254 #else
255 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
256 ptrdiff_type.t = VT_LONG | VT_LLONG;
257 #endif
258 func_old_type.t = VT_FUNC;
259 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
260 func_old_type.ref->f.func_call = FUNC_CDECL;
261 func_old_type.ref->f.func_type = FUNC_OLD;
263 tcc_debug_start(s1);
265 #ifdef TCC_TARGET_ARM
266 arm_init(s1);
267 #endif
269 #ifdef INC_DEBUG
270 printf("%s: **** new file\n", file->filename);
271 #endif
273 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
274 next();
275 decl(VT_CONST);
276 gen_inline_functions(s1);
277 check_vstack();
279 #ifdef CONFIG_TCC_ASM
280 asm_free_labels(s1);
281 #endif
283 /* end of translation unit info */
284 tcc_debug_end(s1);
285 return 0;
288 /* ------------------------------------------------------------------------- */
289 ST_FUNC ElfSym *elfsym(Sym *s)
291 if (!s || !s->c)
292 return NULL;
293 if (s->c == -1)
294 return &tcc_state->esym_dot;
295 else
296 return &((ElfSym *)symtab_section->data)[s->c];
299 /* apply storage attributes to Elf symbol */
301 static void update_storage(Sym *sym)
303 ElfSym *esym = elfsym(sym);
304 if (!esym)
305 return;
306 if (sym->a.visibility)
307 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
308 | sym->a.visibility;
309 if (sym->a.weak)
310 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
311 #ifdef TCC_TARGET_PE
312 if (sym->a.dllimport)
313 esym->st_other |= ST_PE_IMPORT;
314 if (sym->a.dllexport)
315 esym->st_other |= ST_PE_EXPORT;
316 #endif
317 #if 0
318 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
319 get_tok_str(sym->v, NULL),
320 sym->a.visibility,
321 sym->a.weak,
322 sym->a.dllexport,
323 sym->a.dllimport
325 #endif
328 /* ------------------------------------------------------------------------- */
329 /* update sym->c so that it points to an external symbol in section
330 'section' with value 'value' */
332 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
333 addr_t value, unsigned long size,
334 int can_add_underscore)
336 int sym_type, sym_bind, sh_num, info, other, t;
337 ElfSym *esym;
338 const char *name;
339 char buf1[256];
340 #ifdef CONFIG_TCC_BCHECK
341 char buf[32];
342 #endif
344 if (section == NULL)
345 sh_num = SHN_UNDEF;
346 else if (section == SECTION_ABS)
347 sh_num = SHN_ABS;
348 else
349 sh_num = section->sh_num;
351 if (!sym->c) {
352 name = get_tok_str(sym->v, NULL);
353 #ifdef CONFIG_TCC_BCHECK
354 if (tcc_state->do_bounds_check) {
355 /* XXX: avoid doing that for statics ? */
356 /* if bound checking is activated, we change some function
357 names by adding the "__bound" prefix */
358 switch(sym->v) {
359 #ifdef TCC_TARGET_PE
360 /* XXX: we rely only on malloc hooks */
361 case TOK_malloc:
362 case TOK_free:
363 case TOK_realloc:
364 case TOK_memalign:
365 case TOK_calloc:
366 #endif
367 case TOK_memcpy:
368 case TOK_memmove:
369 case TOK_memset:
370 case TOK_strlen:
371 case TOK_strcpy:
372 case TOK_alloca:
373 strcpy(buf, "__bound_");
374 strcat(buf, name);
375 name = buf;
376 break;
379 #endif
380 t = sym->type.t;
381 if ((t & VT_BTYPE) == VT_FUNC) {
382 sym_type = STT_FUNC;
383 } else if ((t & VT_BTYPE) == VT_VOID) {
384 sym_type = STT_NOTYPE;
385 } else {
386 sym_type = STT_OBJECT;
388 if (t & VT_STATIC)
389 sym_bind = STB_LOCAL;
390 else
391 sym_bind = STB_GLOBAL;
392 other = 0;
393 #ifdef TCC_TARGET_PE
394 if (sym_type == STT_FUNC && sym->type.ref) {
395 Sym *ref = sym->type.ref;
396 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
397 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
398 name = buf1;
399 other |= ST_PE_STDCALL;
400 can_add_underscore = 0;
403 #endif
404 if (tcc_state->leading_underscore && can_add_underscore) {
405 buf1[0] = '_';
406 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
407 name = buf1;
409 if (sym->asm_label)
410 name = get_tok_str(sym->asm_label, NULL);
411 info = ELFW(ST_INFO)(sym_bind, sym_type);
412 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
413 } else {
414 esym = elfsym(sym);
415 esym->st_value = value;
416 esym->st_size = size;
417 esym->st_shndx = sh_num;
419 update_storage(sym);
422 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
423 addr_t value, unsigned long size)
425 put_extern_sym2(sym, section, value, size, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
430 addr_t addend)
432 int c = 0;
434 if (nocode_wanted && s == cur_text_section)
435 return;
437 if (sym) {
438 if (0 == sym->c)
439 put_extern_sym(sym, NULL, 0, 0);
440 c = sym->c;
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section, s, offset, type, c, addend);
447 #if PTR_SIZE == 4
448 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
450 greloca(s, sym, offset, type, 0);
452 #endif
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym *__sym_malloc(void)
458 Sym *sym_pool, *sym, *last_sym;
459 int i;
461 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
462 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
464 last_sym = sym_free_first;
465 sym = sym_pool;
466 for(i = 0; i < SYM_POOL_NB; i++) {
467 sym->next = last_sym;
468 last_sym = sym;
469 sym++;
471 sym_free_first = last_sym;
472 return last_sym;
475 static inline Sym *sym_malloc(void)
477 Sym *sym;
478 #ifndef SYM_DEBUG
479 sym = sym_free_first;
480 if (!sym)
481 sym = __sym_malloc();
482 sym_free_first = sym->next;
483 return sym;
484 #else
485 sym = tcc_malloc(sizeof(Sym));
486 return sym;
487 #endif
490 ST_INLN void sym_free(Sym *sym)
492 #ifndef SYM_DEBUG
493 sym->next = sym_free_first;
494 sym_free_first = sym;
495 #else
496 tcc_free(sym);
497 #endif
500 /* push, without hashing */
501 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
503 Sym *s;
505 s = sym_malloc();
506 memset(s, 0, sizeof *s);
507 s->v = v;
508 s->type.t = t;
509 s->c = c;
510 /* add in stack */
511 s->prev = *ps;
512 *ps = s;
513 return s;
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym *sym_find2(Sym *s, int v)
520 while (s) {
521 if (s->v == v)
522 return s;
523 else if (s->v == -1)
524 return NULL;
525 s = s->prev;
527 return NULL;
530 /* structure lookup */
531 ST_INLN Sym *struct_find(int v)
533 v -= TOK_IDENT;
534 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
535 return NULL;
536 return table_ident[v]->sym_struct;
539 /* find an identifier */
540 ST_INLN Sym *sym_find(int v)
542 v -= TOK_IDENT;
543 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
544 return NULL;
545 return table_ident[v]->sym_identifier;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
551 Sym *s, **ps;
552 TokenSym *ts;
554 if (local_stack)
555 ps = &local_stack;
556 else
557 ps = &global_stack;
558 s = sym_push2(ps, v, type->t, c);
559 s->type.ref = type->ref;
560 s->r = r;
561 /* don't record fields or anonymous symbols */
562 /* XXX: simplify */
563 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
564 /* record symbol in token array */
565 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
566 if (v & SYM_STRUCT)
567 ps = &ts->sym_struct;
568 else
569 ps = &ts->sym_identifier;
570 s->prev_tok = *ps;
571 *ps = s;
572 s->sym_scope = local_scope;
573 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v & ~SYM_STRUCT, NULL));
577 return s;
580 /* push a global identifier */
581 ST_FUNC Sym *global_identifier_push_1(Sym **ptop, int v, int t, int c)
583 Sym *s, **ps;
584 s = sym_push2(ptop, v, t, c);
585 /* don't record anonymous symbol */
586 if (v < SYM_FIRST_ANOM) {
587 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps != NULL && (*ps)->sym_scope)
591 ps = &(*ps)->prev_tok;
592 s->prev_tok = *ps;
593 *ps = s;
595 return s;
598 static Sym *global_identifier_push(int v, int t, int c)
600 return global_identifier_push_1(&global_stack, v, t, c);
603 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
604 pop them yet from the list, but do remove them from the token array. */
605 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
607 Sym *s, *ss, **ps;
608 TokenSym *ts;
609 int v;
611 s = *ptop;
612 while(s != b) {
613 ss = s->prev;
614 v = s->v;
615 /* remove symbol in token array */
616 /* XXX: simplify */
617 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
618 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
619 if (v & SYM_STRUCT)
620 ps = &ts->sym_struct;
621 else
622 ps = &ts->sym_identifier;
623 *ps = s->prev_tok;
625 if (!keep)
626 sym_free(s);
627 s = ss;
629 if (!keep)
630 *ptop = b;
633 /* ------------------------------------------------------------------------- */
635 static void vsetc(CType *type, int r, CValue *vc)
637 int v;
639 if (vtop >= vstack + (VSTACK_SIZE - 1))
640 tcc_error("memory full (vstack)");
641 /* cannot let cpu flags if other instruction are generated. Also
642 avoid leaving VT_JMP anywhere except on the top of the stack
643 because it would complicate the code generator.
645 Don't do this when nocode_wanted. vtop might come from
646 !nocode_wanted regions (see 88_codeopt.c) and transforming
647 it to a register without actually generating code is wrong
648 as their value might still be used for real. All values
649 we push under nocode_wanted will eventually be popped
650 again, so that the VT_CMP/VT_JMP value will be in vtop
651 when code is unsuppressed again.
653 Same logic below in vswap(); */
654 if (vtop >= vstack && !nocode_wanted) {
655 v = vtop->r & VT_VALMASK;
656 if (v == VT_CMP || (v & ~1) == VT_JMP)
657 gv(RC_INT);
660 vtop++;
661 vtop->type = *type;
662 vtop->r = r;
663 vtop->r2 = VT_CONST;
664 vtop->c = *vc;
665 vtop->sym = NULL;
668 ST_FUNC void vswap(void)
670 SValue tmp;
671 /* cannot vswap cpu flags. See comment at vsetc() above */
672 if (vtop >= vstack && !nocode_wanted) {
673 int v = vtop->r & VT_VALMASK;
674 if (v == VT_CMP || (v & ~1) == VT_JMP)
675 gv(RC_INT);
677 tmp = vtop[0];
678 vtop[0] = vtop[-1];
679 vtop[-1] = tmp;
682 /* pop stack value */
683 ST_FUNC void vpop(void)
685 int v;
686 v = vtop->r & VT_VALMASK;
687 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
688 /* for x86, we need to pop the FP stack */
689 if (v == TREG_ST0) {
690 o(0xd8dd); /* fstp %st(0) */
691 } else
692 #endif
693 if (v == VT_JMP || v == VT_JMPI) {
694 /* need to put correct jump if && or || without test */
695 gsym(vtop->c.i);
697 vtop--;
700 /* push constant of type "type" with useless value */
701 ST_FUNC void vpush(CType *type)
703 vset(type, VT_CONST, 0);
706 /* push integer constant */
707 ST_FUNC void vpushi(int v)
709 CValue cval;
710 cval.i = v;
711 vsetc(&int_type, VT_CONST, &cval);
714 /* push a pointer sized constant */
715 static void vpushs(addr_t v)
717 CValue cval;
718 cval.i = v;
719 vsetc(&size_type, VT_CONST, &cval);
722 /* push arbitrary 64bit constant */
723 ST_FUNC void vpush64(int ty, unsigned long long v)
725 CValue cval;
726 CType ctype;
727 ctype.t = ty;
728 ctype.ref = NULL;
729 cval.i = v;
730 vsetc(&ctype, VT_CONST, &cval);
733 /* push long long constant */
734 static inline void vpushll(long long v)
736 vpush64(VT_LLONG, v);
739 ST_FUNC void vset(CType *type, int r, int v)
741 CValue cval;
743 cval.i = v;
744 vsetc(type, r, &cval);
747 static void vseti(int r, int v)
749 CType type;
750 type.t = VT_INT;
751 type.ref = NULL;
752 vset(&type, r, v);
755 ST_FUNC void vpushv(SValue *v)
757 if (vtop >= vstack + (VSTACK_SIZE - 1))
758 tcc_error("memory full (vstack)");
759 vtop++;
760 *vtop = *v;
763 static void vdup(void)
765 vpushv(vtop);
768 /* rotate n first stack elements to the bottom
769 I1 ... In -> I2 ... In I1 [top is right]
771 ST_FUNC void vrotb(int n)
773 int i;
774 SValue tmp;
776 tmp = vtop[-n + 1];
777 for(i=-n+1;i!=0;i++)
778 vtop[i] = vtop[i+1];
779 vtop[0] = tmp;
782 /* rotate the n elements before entry e towards the top
783 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
785 ST_FUNC void vrote(SValue *e, int n)
787 int i;
788 SValue tmp;
790 tmp = *e;
791 for(i = 0;i < n - 1; i++)
792 e[-i] = e[-i - 1];
793 e[-n + 1] = tmp;
796 /* rotate n first stack elements to the top
797 I1 ... In -> In I1 ... I(n-1) [top is right]
799 ST_FUNC void vrott(int n)
801 vrote(vtop, n);
804 /* push a symbol value of TYPE */
805 static inline void vpushsym(CType *type, Sym *sym)
807 CValue cval;
808 cval.i = 0;
809 vsetc(type, VT_CONST | VT_SYM, &cval);
810 vtop->sym = sym;
813 /* Return a static symbol pointing to a section */
814 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
816 int v;
817 Sym *sym;
819 v = anon_sym++;
820 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
821 sym->type.ref = type->ref;
822 sym->r = VT_CONST | VT_SYM;
823 put_extern_sym(sym, sec, offset, size);
824 return sym;
827 /* push a reference to a section offset by adding a dummy symbol */
828 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
830 vpushsym(type, get_sym_ref(type, sec, offset, size));
833 /* define a new external reference to a symbol 'v' of type 'u' */
834 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
836 Sym *s;
838 s = sym_find(v);
839 if (!s) {
840 /* push forward reference */
841 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
842 s->type.ref = type->ref;
843 s->r = r | VT_CONST | VT_SYM;
845 return s;
848 /* Merge some storage attributes. */
849 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
851 if (type) {
852 if ((sym->type.t & VT_BTYPE) == VT_VOID) /* from asm */
853 sym->type = *type;
854 else if (!is_compatible_types(&sym->type, type))
855 tcc_error("incompatible types for redefinition of '%s'",
856 get_tok_str(sym->v, NULL));
858 #ifdef TCC_TARGET_PE
859 if (sym->a.dllimport != ad->a.dllimport)
860 tcc_error("incompatible dll linkage for redefinition of '%s'",
861 get_tok_str(sym->v, NULL));
862 #endif
863 sym->a.dllexport |= ad->a.dllexport;
864 sym->a.weak |= ad->a.weak;
865 if (ad->a.visibility) {
866 int vis = sym->a.visibility;
867 int vis2 = ad->a.visibility;
868 if (vis == STV_DEFAULT)
869 vis = vis2;
870 else if (vis2 != STV_DEFAULT)
871 vis = (vis < vis2) ? vis : vis2;
872 sym->a.visibility = vis;
874 if (ad->a.aligned)
875 sym->a.aligned = ad->a.aligned;
876 if (ad->asm_label)
877 sym->asm_label = ad->asm_label;
878 update_storage(sym);
881 /* define a new external reference to a symbol 'v' */
882 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
884 Sym *s;
885 s = sym_find(v);
886 if (!s) {
887 /* push forward reference */
888 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
889 s->type.t |= VT_EXTERN;
890 s->a = ad->a;
891 s->sym_scope = 0;
892 } else {
893 if (s->type.ref == func_old_type.ref) {
894 s->type.ref = type->ref;
895 s->r = r | VT_CONST | VT_SYM;
896 s->type.t |= VT_EXTERN;
898 patch_storage(s, ad, type);
900 return s;
903 /* push a reference to global symbol v */
904 ST_FUNC void vpush_global_sym(CType *type, int v)
906 vpushsym(type, external_global_sym(v, type, 0));
909 /* save registers up to (vtop - n) stack entry */
910 ST_FUNC void save_regs(int n)
912 SValue *p, *p1;
913 for(p = vstack, p1 = vtop - n; p <= p1; p++)
914 save_reg(p->r);
917 /* save r to the memory stack, and mark it as being free */
918 ST_FUNC void save_reg(int r)
920 save_reg_upstack(r, 0);
923 /* save r to the memory stack, and mark it as being free,
924 if seen up to (vtop - n) stack entry */
925 ST_FUNC void save_reg_upstack(int r, int n)
927 int l, saved, size, align;
928 SValue *p, *p1, sv;
929 CType *type;
931 if ((r &= VT_VALMASK) >= VT_CONST)
932 return;
933 if (nocode_wanted)
934 return;
936 /* modify all stack values */
937 saved = 0;
938 l = 0;
939 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
940 if ((p->r & VT_VALMASK) == r ||
941 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
942 /* must save value on stack if not already done */
943 if (!saved) {
944 /* NOTE: must reload 'r' because r might be equal to r2 */
945 r = p->r & VT_VALMASK;
946 /* store register in the stack */
947 type = &p->type;
948 if ((p->r & VT_LVAL) ||
949 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
950 #if PTR_SIZE == 8
951 type = &char_pointer_type;
952 #else
953 type = &int_type;
954 #endif
955 size = type_size(type, &align);
956 loc = (loc - size) & -align;
957 sv.type.t = type->t;
958 sv.r = VT_LOCAL | VT_LVAL;
959 sv.c.i = loc;
960 store(r, &sv);
961 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
962 /* x86 specific: need to pop fp register ST0 if saved */
963 if (r == TREG_ST0) {
964 o(0xd8dd); /* fstp %st(0) */
966 #endif
967 #if PTR_SIZE == 4
968 /* special long long case */
969 if ((type->t & VT_BTYPE) == VT_LLONG) {
970 sv.c.i += 4;
971 store(p->r2, &sv);
973 #endif
974 l = loc;
975 saved = 1;
977 /* mark that stack entry as being saved on the stack */
978 if (p->r & VT_LVAL) {
979 /* also clear the bounded flag because the
980 relocation address of the function was stored in
981 p->c.i */
982 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
983 } else {
984 p->r = lvalue_type(p->type.t) | VT_LOCAL;
986 p->r2 = VT_CONST;
987 p->c.i = l;
992 #ifdef TCC_TARGET_ARM
993 /* find a register of class 'rc2' with at most one reference on stack.
994 * If none, call get_reg(rc) */
995 ST_FUNC int get_reg_ex(int rc, int rc2)
997 int r;
998 SValue *p;
1000 for(r=0;r<NB_REGS;r++) {
1001 if (reg_classes[r] & rc2) {
1002 int n;
1003 n=0;
1004 for(p = vstack; p <= vtop; p++) {
1005 if ((p->r & VT_VALMASK) == r ||
1006 (p->r2 & VT_VALMASK) == r)
1007 n++;
1009 if (n <= 1)
1010 return r;
1013 return get_reg(rc);
1015 #endif
1017 /* find a free register of class 'rc'. If none, save one register */
1018 ST_FUNC int get_reg(int rc)
1020 int r;
1021 SValue *p;
1023 /* find a free register */
1024 for(r=0;r<NB_REGS;r++) {
1025 if (reg_classes[r] & rc) {
1026 if (nocode_wanted)
1027 return r;
1028 for(p=vstack;p<=vtop;p++) {
1029 if ((p->r & VT_VALMASK) == r ||
1030 (p->r2 & VT_VALMASK) == r)
1031 goto notfound;
1033 return r;
1035 notfound: ;
1038 /* no register left : free the first one on the stack (VERY
1039 IMPORTANT to start from the bottom to ensure that we don't
1040 spill registers used in gen_opi()) */
1041 for(p=vstack;p<=vtop;p++) {
1042 /* look at second register (if long long) */
1043 r = p->r2 & VT_VALMASK;
1044 if (r < VT_CONST && (reg_classes[r] & rc))
1045 goto save_found;
1046 r = p->r & VT_VALMASK;
1047 if (r < VT_CONST && (reg_classes[r] & rc)) {
1048 save_found:
1049 save_reg(r);
1050 return r;
1053 /* Should never comes here */
1054 return -1;
1057 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1058 if needed */
1059 static void move_reg(int r, int s, int t)
1061 SValue sv;
1063 if (r != s) {
1064 save_reg(r);
1065 sv.type.t = t;
1066 sv.type.ref = NULL;
1067 sv.r = s;
1068 sv.c.i = 0;
1069 load(r, &sv);
1073 /* get address of vtop (vtop MUST BE an lvalue) */
1074 ST_FUNC void gaddrof(void)
1076 vtop->r &= ~VT_LVAL;
1077 /* tricky: if saved lvalue, then we can go back to lvalue */
1078 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1079 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1084 #ifdef CONFIG_TCC_BCHECK
1085 /* generate lvalue bound code */
1086 static void gbound(void)
1088 int lval_type;
1089 CType type1;
1091 vtop->r &= ~VT_MUSTBOUND;
1092 /* if lvalue, then use checking code before dereferencing */
1093 if (vtop->r & VT_LVAL) {
1094 /* if not VT_BOUNDED value, then make one */
1095 if (!(vtop->r & VT_BOUNDED)) {
1096 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1097 /* must save type because we must set it to int to get pointer */
1098 type1 = vtop->type;
1099 vtop->type.t = VT_PTR;
1100 gaddrof();
1101 vpushi(0);
1102 gen_bounded_ptr_add();
1103 vtop->r |= lval_type;
1104 vtop->type = type1;
1106 /* then check for dereferencing */
1107 gen_bounded_ptr_deref();
1110 #endif
1112 static void incr_bf_adr(int o)
1114 vtop->type = char_pointer_type;
1115 gaddrof();
1116 vpushi(o);
1117 gen_op('+');
1118 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1119 | (VT_BYTE|VT_UNSIGNED);
1120 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1121 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1124 /* single-byte load mode for packed or otherwise unaligned bitfields */
1125 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1127 int n, o, bits;
1128 save_reg_upstack(vtop->r, 1);
1129 vpush64(type->t & VT_BTYPE, 0); // B X
1130 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1131 do {
1132 vswap(); // X B
1133 incr_bf_adr(o);
1134 vdup(); // X B B
1135 n = 8 - bit_pos;
1136 if (n > bit_size)
1137 n = bit_size;
1138 if (bit_pos)
1139 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1140 if (n < 8)
1141 vpushi((1 << n) - 1), gen_op('&');
1142 gen_cast(type);
1143 if (bits)
1144 vpushi(bits), gen_op(TOK_SHL);
1145 vrotb(3); // B Y X
1146 gen_op('|'); // B X
1147 bits += n, bit_size -= n, o = 1;
1148 } while (bit_size);
1149 vswap(), vpop();
1150 if (!(type->t & VT_UNSIGNED)) {
1151 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1152 vpushi(n), gen_op(TOK_SHL);
1153 vpushi(n), gen_op(TOK_SAR);
1157 /* single-byte store mode for packed or otherwise unaligned bitfields */
1158 static void store_packed_bf(int bit_pos, int bit_size)
1160 int bits, n, o, m, c;
1162 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1163 vswap(); // X B
1164 save_reg_upstack(vtop->r, 1);
1165 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1166 do {
1167 incr_bf_adr(o); // X B
1168 vswap(); //B X
1169 c ? vdup() : gv_dup(); // B V X
1170 vrott(3); // X B V
1171 if (bits)
1172 vpushi(bits), gen_op(TOK_SHR);
1173 if (bit_pos)
1174 vpushi(bit_pos), gen_op(TOK_SHL);
1175 n = 8 - bit_pos;
1176 if (n > bit_size)
1177 n = bit_size;
1178 if (n < 8) {
1179 m = ((1 << n) - 1) << bit_pos;
1180 vpushi(m), gen_op('&'); // X B V1
1181 vpushv(vtop-1); // X B V1 B
1182 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1183 gen_op('&'); // X B V1 B1
1184 gen_op('|'); // X B V2
1186 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1187 vstore(), vpop(); // X B
1188 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1189 } while (bit_size);
1190 vpop(), vpop();
1193 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1195 int t;
1196 if (0 == sv->type.ref)
1197 return 0;
1198 t = sv->type.ref->auxtype;
1199 if (t != -1 && t != VT_STRUCT) {
1200 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1201 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1203 return t;
1206 /* store vtop a register belonging to class 'rc'. lvalues are
1207 converted to values. Cannot be used if cannot be converted to
1208 register value (such as structures). */
1209 ST_FUNC int gv(int rc)
1211 int r, bit_pos, bit_size, size, align, rc2;
1213 /* NOTE: get_reg can modify vstack[] */
1214 if (vtop->type.t & VT_BITFIELD) {
1215 CType type;
1217 bit_pos = BIT_POS(vtop->type.t);
1218 bit_size = BIT_SIZE(vtop->type.t);
1219 /* remove bit field info to avoid loops */
1220 vtop->type.t &= ~VT_STRUCT_MASK;
1222 type.ref = NULL;
1223 type.t = vtop->type.t & VT_UNSIGNED;
1224 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1225 type.t |= VT_UNSIGNED;
1227 r = adjust_bf(vtop, bit_pos, bit_size);
1229 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1230 type.t |= VT_LLONG;
1231 else
1232 type.t |= VT_INT;
1234 if (r == VT_STRUCT) {
1235 load_packed_bf(&type, bit_pos, bit_size);
1236 } else {
1237 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1238 /* cast to int to propagate signedness in following ops */
1239 gen_cast(&type);
1240 /* generate shifts */
1241 vpushi(bits - (bit_pos + bit_size));
1242 gen_op(TOK_SHL);
1243 vpushi(bits - bit_size);
1244 /* NOTE: transformed to SHR if unsigned */
1245 gen_op(TOK_SAR);
1247 r = gv(rc);
1248 } else {
1249 if (is_float(vtop->type.t) &&
1250 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1251 unsigned long offset;
1252 /* CPUs usually cannot use float constants, so we store them
1253 generically in data segment */
1254 size = type_size(&vtop->type, &align);
1255 if (NODATA_WANTED)
1256 size = 0, align = 1;
1257 offset = section_add(data_section, size, align);
1258 vpush_ref(&vtop->type, data_section, offset, size);
1259 vswap();
1260 init_putv(&vtop->type, data_section, offset);
1261 vtop->r |= VT_LVAL;
1263 #ifdef CONFIG_TCC_BCHECK
1264 if (vtop->r & VT_MUSTBOUND)
1265 gbound();
1266 #endif
1268 r = vtop->r & VT_VALMASK;
1269 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1270 #ifndef TCC_TARGET_ARM64
1271 if (rc == RC_IRET)
1272 rc2 = RC_LRET;
1273 #ifdef TCC_TARGET_X86_64
1274 else if (rc == RC_FRET)
1275 rc2 = RC_QRET;
1276 #endif
1277 #endif
1278 /* need to reload if:
1279 - constant
1280 - lvalue (need to dereference pointer)
1281 - already a register, but not in the right class */
1282 if (r >= VT_CONST
1283 || (vtop->r & VT_LVAL)
1284 || !(reg_classes[r] & rc)
1285 #if PTR_SIZE == 8
1286 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1287 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1288 #else
1289 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1290 #endif
1293 r = get_reg(rc);
1294 #if PTR_SIZE == 8
1295 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1296 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1297 #else
1298 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1299 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1300 unsigned long long ll;
1301 #endif
1302 int r2, original_type;
1303 original_type = vtop->type.t;
1304 /* two register type load : expand to two words
1305 temporarily */
1306 #if PTR_SIZE == 4
1307 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1308 /* load constant */
1309 ll = vtop->c.i;
1310 vtop->c.i = ll; /* first word */
1311 load(r, vtop);
1312 vtop->r = r; /* save register value */
1313 vpushi(ll >> 32); /* second word */
1314 } else
1315 #endif
1316 if (vtop->r & VT_LVAL) {
1317 /* We do not want to modifier the long long
1318 pointer here, so the safest (and less
1319 efficient) is to save all the other registers
1320 in the stack. XXX: totally inefficient. */
1321 #if 0
1322 save_regs(1);
1323 #else
1324 /* lvalue_save: save only if used further down the stack */
1325 save_reg_upstack(vtop->r, 1);
1326 #endif
1327 /* load from memory */
1328 vtop->type.t = load_type;
1329 load(r, vtop);
1330 vdup();
1331 vtop[-1].r = r; /* save register value */
1332 /* increment pointer to get second word */
1333 vtop->type.t = addr_type;
1334 gaddrof();
1335 vpushi(load_size);
1336 gen_op('+');
1337 vtop->r |= VT_LVAL;
1338 vtop->type.t = load_type;
1339 } else {
1340 /* move registers */
1341 load(r, vtop);
1342 vdup();
1343 vtop[-1].r = r; /* save register value */
1344 vtop->r = vtop[-1].r2;
1346 /* Allocate second register. Here we rely on the fact that
1347 get_reg() tries first to free r2 of an SValue. */
1348 r2 = get_reg(rc2);
1349 load(r2, vtop);
1350 vpop();
1351 /* write second register */
1352 vtop->r2 = r2;
1353 vtop->type.t = original_type;
1354 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1355 int t1, t;
1356 /* lvalue of scalar type : need to use lvalue type
1357 because of possible cast */
1358 t = vtop->type.t;
1359 t1 = t;
1360 /* compute memory access type */
1361 if (vtop->r & VT_LVAL_BYTE)
1362 t = VT_BYTE;
1363 else if (vtop->r & VT_LVAL_SHORT)
1364 t = VT_SHORT;
1365 if (vtop->r & VT_LVAL_UNSIGNED)
1366 t |= VT_UNSIGNED;
1367 vtop->type.t = t;
1368 load(r, vtop);
1369 /* restore wanted type */
1370 vtop->type.t = t1;
1371 } else {
1372 /* one register type load */
1373 load(r, vtop);
1376 vtop->r = r;
1377 #ifdef TCC_TARGET_C67
1378 /* uses register pairs for doubles */
1379 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1380 vtop->r2 = r+1;
1381 #endif
1383 return r;
1386 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1387 ST_FUNC void gv2(int rc1, int rc2)
1389 int v;
1391 /* generate more generic register first. But VT_JMP or VT_CMP
1392 values must be generated first in all cases to avoid possible
1393 reload errors */
1394 v = vtop[0].r & VT_VALMASK;
1395 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1396 vswap();
1397 gv(rc1);
1398 vswap();
1399 gv(rc2);
1400 /* test if reload is needed for first register */
1401 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1402 vswap();
1403 gv(rc1);
1404 vswap();
1406 } else {
1407 gv(rc2);
1408 vswap();
1409 gv(rc1);
1410 vswap();
1411 /* test if reload is needed for first register */
1412 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1413 gv(rc2);
1418 #ifndef TCC_TARGET_ARM64
1419 /* wrapper around RC_FRET to return a register by type */
1420 static int rc_fret(int t)
1422 #ifdef TCC_TARGET_X86_64
1423 if (t == VT_LDOUBLE) {
1424 return RC_ST0;
1426 #endif
1427 return RC_FRET;
1429 #endif
1431 /* wrapper around REG_FRET to return a register by type */
1432 static int reg_fret(int t)
1434 #ifdef TCC_TARGET_X86_64
1435 if (t == VT_LDOUBLE) {
1436 return TREG_ST0;
1438 #endif
1439 return REG_FRET;
1442 #if PTR_SIZE == 4
1443 /* expand 64bit on stack in two ints */
1444 static void lexpand(void)
1446 int u, v;
1447 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1448 v = vtop->r & (VT_VALMASK | VT_LVAL);
1449 if (v == VT_CONST) {
1450 vdup();
1451 vtop[0].c.i >>= 32;
1452 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1453 vdup();
1454 vtop[0].c.i += 4;
1455 } else {
1456 gv(RC_INT);
1457 vdup();
1458 vtop[0].r = vtop[-1].r2;
1459 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1461 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1463 #endif
1465 #ifdef TCC_TARGET_ARM
1466 /* expand long long on stack */
1467 ST_FUNC void lexpand_nr(void)
1469 int u,v;
1471 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1472 vdup();
1473 vtop->r2 = VT_CONST;
1474 vtop->type.t = VT_INT | u;
1475 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1476 if (v == VT_CONST) {
1477 vtop[-1].c.i = vtop->c.i;
1478 vtop->c.i = vtop->c.i >> 32;
1479 vtop->r = VT_CONST;
1480 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1481 vtop->c.i += 4;
1482 vtop->r = vtop[-1].r;
1483 } else if (v > VT_CONST) {
1484 vtop--;
1485 lexpand();
1486 } else
1487 vtop->r = vtop[-1].r2;
1488 vtop[-1].r2 = VT_CONST;
1489 vtop[-1].type.t = VT_INT | u;
1491 #endif
1493 #if PTR_SIZE == 4
1494 /* build a long long from two ints */
1495 static void lbuild(int t)
1497 gv2(RC_INT, RC_INT);
1498 vtop[-1].r2 = vtop[0].r;
1499 vtop[-1].type.t = t;
1500 vpop();
1502 #endif
1504 /* convert stack entry to register and duplicate its value in another
1505 register */
1506 static void gv_dup(void)
1508 int rc, t, r, r1;
1509 SValue sv;
1511 t = vtop->type.t;
1512 #if PTR_SIZE == 4
1513 if ((t & VT_BTYPE) == VT_LLONG) {
1514 if (t & VT_BITFIELD) {
1515 gv(RC_INT);
1516 t = vtop->type.t;
1518 lexpand();
1519 gv_dup();
1520 vswap();
1521 vrotb(3);
1522 gv_dup();
1523 vrotb(4);
1524 /* stack: H L L1 H1 */
1525 lbuild(t);
1526 vrotb(3);
1527 vrotb(3);
1528 vswap();
1529 lbuild(t);
1530 vswap();
1531 } else
1532 #endif
1534 /* duplicate value */
1535 rc = RC_INT;
1536 sv.type.t = VT_INT;
1537 if (is_float(t)) {
1538 rc = RC_FLOAT;
1539 #ifdef TCC_TARGET_X86_64
1540 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1541 rc = RC_ST0;
1543 #endif
1544 sv.type.t = t;
1546 r = gv(rc);
1547 r1 = get_reg(rc);
1548 sv.r = r;
1549 sv.c.i = 0;
1550 load(r1, &sv); /* move r to r1 */
1551 vdup();
1552 /* duplicates value */
1553 if (r != r1)
1554 vtop->r = r1;
1558 /* Generate value test
1560 * Generate a test for any value (jump, comparison and integers) */
1561 ST_FUNC int gvtst(int inv, int t)
1563 int v = vtop->r & VT_VALMASK;
1564 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1565 vpushi(0);
1566 gen_op(TOK_NE);
1568 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1569 /* constant jmp optimization */
1570 if ((vtop->c.i != 0) != inv)
1571 t = gjmp(t);
1572 vtop--;
1573 return t;
1575 return gtst(inv, t);
1578 #if PTR_SIZE == 4
1579 /* generate CPU independent (unsigned) long long operations */
1580 static void gen_opl(int op)
1582 int t, a, b, op1, c, i;
1583 int func;
1584 unsigned short reg_iret = REG_IRET;
1585 unsigned short reg_lret = REG_LRET;
1586 SValue tmp;
1588 switch(op) {
1589 case '/':
1590 case TOK_PDIV:
1591 func = TOK___divdi3;
1592 goto gen_func;
1593 case TOK_UDIV:
1594 func = TOK___udivdi3;
1595 goto gen_func;
1596 case '%':
1597 func = TOK___moddi3;
1598 goto gen_mod_func;
1599 case TOK_UMOD:
1600 func = TOK___umoddi3;
1601 gen_mod_func:
1602 #ifdef TCC_ARM_EABI
1603 reg_iret = TREG_R2;
1604 reg_lret = TREG_R3;
1605 #endif
1606 gen_func:
1607 /* call generic long long function */
1608 vpush_global_sym(&func_old_type, func);
1609 vrott(3);
1610 gfunc_call(2);
1611 vpushi(0);
1612 vtop->r = reg_iret;
1613 vtop->r2 = reg_lret;
1614 break;
1615 case '^':
1616 case '&':
1617 case '|':
1618 case '*':
1619 case '+':
1620 case '-':
1621 //pv("gen_opl A",0,2);
1622 t = vtop->type.t;
1623 vswap();
1624 lexpand();
1625 vrotb(3);
1626 lexpand();
1627 /* stack: L1 H1 L2 H2 */
1628 tmp = vtop[0];
1629 vtop[0] = vtop[-3];
1630 vtop[-3] = tmp;
1631 tmp = vtop[-2];
1632 vtop[-2] = vtop[-3];
1633 vtop[-3] = tmp;
1634 vswap();
1635 /* stack: H1 H2 L1 L2 */
1636 //pv("gen_opl B",0,4);
1637 if (op == '*') {
1638 vpushv(vtop - 1);
1639 vpushv(vtop - 1);
1640 gen_op(TOK_UMULL);
1641 lexpand();
1642 /* stack: H1 H2 L1 L2 ML MH */
1643 for(i=0;i<4;i++)
1644 vrotb(6);
1645 /* stack: ML MH H1 H2 L1 L2 */
1646 tmp = vtop[0];
1647 vtop[0] = vtop[-2];
1648 vtop[-2] = tmp;
1649 /* stack: ML MH H1 L2 H2 L1 */
1650 gen_op('*');
1651 vrotb(3);
1652 vrotb(3);
1653 gen_op('*');
1654 /* stack: ML MH M1 M2 */
1655 gen_op('+');
1656 gen_op('+');
1657 } else if (op == '+' || op == '-') {
1658 /* XXX: add non carry method too (for MIPS or alpha) */
1659 if (op == '+')
1660 op1 = TOK_ADDC1;
1661 else
1662 op1 = TOK_SUBC1;
1663 gen_op(op1);
1664 /* stack: H1 H2 (L1 op L2) */
1665 vrotb(3);
1666 vrotb(3);
1667 gen_op(op1 + 1); /* TOK_xxxC2 */
1668 } else {
1669 gen_op(op);
1670 /* stack: H1 H2 (L1 op L2) */
1671 vrotb(3);
1672 vrotb(3);
1673 /* stack: (L1 op L2) H1 H2 */
1674 gen_op(op);
1675 /* stack: (L1 op L2) (H1 op H2) */
1677 /* stack: L H */
1678 lbuild(t);
1679 break;
1680 case TOK_SAR:
1681 case TOK_SHR:
1682 case TOK_SHL:
1683 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1684 t = vtop[-1].type.t;
1685 vswap();
1686 lexpand();
1687 vrotb(3);
1688 /* stack: L H shift */
1689 c = (int)vtop->c.i;
1690 /* constant: simpler */
1691 /* NOTE: all comments are for SHL. the other cases are
1692 done by swapping words */
1693 vpop();
1694 if (op != TOK_SHL)
1695 vswap();
1696 if (c >= 32) {
1697 /* stack: L H */
1698 vpop();
1699 if (c > 32) {
1700 vpushi(c - 32);
1701 gen_op(op);
1703 if (op != TOK_SAR) {
1704 vpushi(0);
1705 } else {
1706 gv_dup();
1707 vpushi(31);
1708 gen_op(TOK_SAR);
1710 vswap();
1711 } else {
1712 vswap();
1713 gv_dup();
1714 /* stack: H L L */
1715 vpushi(c);
1716 gen_op(op);
1717 vswap();
1718 vpushi(32 - c);
1719 if (op == TOK_SHL)
1720 gen_op(TOK_SHR);
1721 else
1722 gen_op(TOK_SHL);
1723 vrotb(3);
1724 /* stack: L L H */
1725 vpushi(c);
1726 if (op == TOK_SHL)
1727 gen_op(TOK_SHL);
1728 else
1729 gen_op(TOK_SHR);
1730 gen_op('|');
1732 if (op != TOK_SHL)
1733 vswap();
1734 lbuild(t);
1735 } else {
1736 /* XXX: should provide a faster fallback on x86 ? */
1737 switch(op) {
1738 case TOK_SAR:
1739 func = TOK___ashrdi3;
1740 goto gen_func;
1741 case TOK_SHR:
1742 func = TOK___lshrdi3;
1743 goto gen_func;
1744 case TOK_SHL:
1745 func = TOK___ashldi3;
1746 goto gen_func;
1749 break;
1750 default:
1751 /* compare operations */
1752 t = vtop->type.t;
1753 vswap();
1754 lexpand();
1755 vrotb(3);
1756 lexpand();
1757 /* stack: L1 H1 L2 H2 */
1758 tmp = vtop[-1];
1759 vtop[-1] = vtop[-2];
1760 vtop[-2] = tmp;
1761 /* stack: L1 L2 H1 H2 */
1762 /* compare high */
1763 op1 = op;
1764 /* when values are equal, we need to compare low words. since
1765 the jump is inverted, we invert the test too. */
1766 if (op1 == TOK_LT)
1767 op1 = TOK_LE;
1768 else if (op1 == TOK_GT)
1769 op1 = TOK_GE;
1770 else if (op1 == TOK_ULT)
1771 op1 = TOK_ULE;
1772 else if (op1 == TOK_UGT)
1773 op1 = TOK_UGE;
1774 a = 0;
1775 b = 0;
1776 gen_op(op1);
1777 if (op == TOK_NE) {
1778 b = gvtst(0, 0);
1779 } else {
1780 a = gvtst(1, 0);
1781 if (op != TOK_EQ) {
1782 /* generate non equal test */
1783 vpushi(TOK_NE);
1784 vtop->r = VT_CMP;
1785 b = gvtst(0, 0);
1788 /* compare low. Always unsigned */
1789 op1 = op;
1790 if (op1 == TOK_LT)
1791 op1 = TOK_ULT;
1792 else if (op1 == TOK_LE)
1793 op1 = TOK_ULE;
1794 else if (op1 == TOK_GT)
1795 op1 = TOK_UGT;
1796 else if (op1 == TOK_GE)
1797 op1 = TOK_UGE;
1798 gen_op(op1);
1799 a = gvtst(1, a);
1800 gsym(b);
1801 vseti(VT_JMPI, a);
1802 break;
1805 #endif
1807 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1809 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1810 return (a ^ b) >> 63 ? -x : x;
1813 static int gen_opic_lt(uint64_t a, uint64_t b)
1815 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1818 /* handle integer constant optimizations and various machine
1819 independent opt */
1820 static void gen_opic(int op)
1822 SValue *v1 = vtop - 1;
1823 SValue *v2 = vtop;
1824 int t1 = v1->type.t & VT_BTYPE;
1825 int t2 = v2->type.t & VT_BTYPE;
1826 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1827 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1828 uint64_t l1 = c1 ? v1->c.i : 0;
1829 uint64_t l2 = c2 ? v2->c.i : 0;
1830 int shm = (t1 == VT_LLONG) ? 63 : 31;
1832 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1833 l1 = ((uint32_t)l1 |
1834 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1835 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1836 l2 = ((uint32_t)l2 |
1837 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1839 if (c1 && c2) {
1840 switch(op) {
1841 case '+': l1 += l2; break;
1842 case '-': l1 -= l2; break;
1843 case '&': l1 &= l2; break;
1844 case '^': l1 ^= l2; break;
1845 case '|': l1 |= l2; break;
1846 case '*': l1 *= l2; break;
1848 case TOK_PDIV:
1849 case '/':
1850 case '%':
1851 case TOK_UDIV:
1852 case TOK_UMOD:
1853 /* if division by zero, generate explicit division */
1854 if (l2 == 0) {
1855 if (const_wanted)
1856 tcc_error("division by zero in constant");
1857 goto general_case;
1859 switch(op) {
1860 default: l1 = gen_opic_sdiv(l1, l2); break;
1861 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1862 case TOK_UDIV: l1 = l1 / l2; break;
1863 case TOK_UMOD: l1 = l1 % l2; break;
1865 break;
1866 case TOK_SHL: l1 <<= (l2 & shm); break;
1867 case TOK_SHR: l1 >>= (l2 & shm); break;
1868 case TOK_SAR:
1869 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1870 break;
1871 /* tests */
1872 case TOK_ULT: l1 = l1 < l2; break;
1873 case TOK_UGE: l1 = l1 >= l2; break;
1874 case TOK_EQ: l1 = l1 == l2; break;
1875 case TOK_NE: l1 = l1 != l2; break;
1876 case TOK_ULE: l1 = l1 <= l2; break;
1877 case TOK_UGT: l1 = l1 > l2; break;
1878 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1879 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1880 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1881 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1882 /* logical */
1883 case TOK_LAND: l1 = l1 && l2; break;
1884 case TOK_LOR: l1 = l1 || l2; break;
1885 default:
1886 goto general_case;
1888 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1889 l1 = ((uint32_t)l1 |
1890 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1891 v1->c.i = l1;
1892 vtop--;
1893 } else {
1894 /* if commutative ops, put c2 as constant */
1895 if (c1 && (op == '+' || op == '&' || op == '^' ||
1896 op == '|' || op == '*')) {
1897 vswap();
1898 c2 = c1; //c = c1, c1 = c2, c2 = c;
1899 l2 = l1; //l = l1, l1 = l2, l2 = l;
1901 if (!const_wanted &&
1902 c1 && ((l1 == 0 &&
1903 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1904 (l1 == -1 && op == TOK_SAR))) {
1905 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1906 vtop--;
1907 } else if (!const_wanted &&
1908 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1909 (op == '|' &&
1910 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1911 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1912 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1913 if (l2 == 1)
1914 vtop->c.i = 0;
1915 vswap();
1916 vtop--;
1917 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1918 op == TOK_PDIV) &&
1919 l2 == 1) ||
1920 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1921 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1922 l2 == 0) ||
1923 (op == '&' &&
1924 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1925 /* filter out NOP operations like x*1, x-0, x&-1... */
1926 vtop--;
1927 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1928 /* try to use shifts instead of muls or divs */
1929 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1930 int n = -1;
1931 while (l2) {
1932 l2 >>= 1;
1933 n++;
1935 vtop->c.i = n;
1936 if (op == '*')
1937 op = TOK_SHL;
1938 else if (op == TOK_PDIV)
1939 op = TOK_SAR;
1940 else
1941 op = TOK_SHR;
1943 goto general_case;
1944 } else if (c2 && (op == '+' || op == '-') &&
1945 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1946 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1947 /* symbol + constant case */
1948 if (op == '-')
1949 l2 = -l2;
1950 l2 += vtop[-1].c.i;
1951 /* The backends can't always deal with addends to symbols
1952 larger than +-1<<31. Don't construct such. */
1953 if ((int)l2 != l2)
1954 goto general_case;
1955 vtop--;
1956 vtop->c.i = l2;
1957 } else {
1958 general_case:
1959 /* call low level op generator */
1960 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1961 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1962 gen_opl(op);
1963 else
1964 gen_opi(op);
1969 /* generate a floating point operation with constant propagation */
1970 static void gen_opif(int op)
1972 int c1, c2;
1973 SValue *v1, *v2;
1974 #if defined _MSC_VER && defined _AMD64_
1975 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1976 volatile
1977 #endif
1978 long double f1, f2;
1980 v1 = vtop - 1;
1981 v2 = vtop;
1982 /* currently, we cannot do computations with forward symbols */
1983 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1984 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1985 if (c1 && c2) {
1986 if (v1->type.t == VT_FLOAT) {
1987 f1 = v1->c.f;
1988 f2 = v2->c.f;
1989 } else if (v1->type.t == VT_DOUBLE) {
1990 f1 = v1->c.d;
1991 f2 = v2->c.d;
1992 } else {
1993 f1 = v1->c.ld;
1994 f2 = v2->c.ld;
1997 /* NOTE: we only do constant propagation if finite number (not
1998 NaN or infinity) (ANSI spec) */
1999 if (!ieee_finite(f1) || !ieee_finite(f2))
2000 goto general_case;
2002 switch(op) {
2003 case '+': f1 += f2; break;
2004 case '-': f1 -= f2; break;
2005 case '*': f1 *= f2; break;
2006 case '/':
2007 if (f2 == 0.0) {
2008 if (const_wanted)
2009 tcc_error("division by zero in constant");
2010 goto general_case;
2012 f1 /= f2;
2013 break;
2014 /* XXX: also handles tests ? */
2015 default:
2016 goto general_case;
2018 /* XXX: overflow test ? */
2019 if (v1->type.t == VT_FLOAT) {
2020 v1->c.f = f1;
2021 } else if (v1->type.t == VT_DOUBLE) {
2022 v1->c.d = f1;
2023 } else {
2024 v1->c.ld = f1;
2026 vtop--;
2027 } else {
2028 general_case:
2029 gen_opf(op);
2033 static int pointed_size(CType *type)
2035 int align;
2036 return type_size(pointed_type(type), &align);
2039 static void vla_runtime_pointed_size(CType *type)
2041 int align;
2042 vla_runtime_type_size(pointed_type(type), &align);
2045 static inline int is_null_pointer(SValue *p)
2047 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2048 return 0;
2049 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2050 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2051 ((p->type.t & VT_BTYPE) == VT_PTR &&
2052 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2055 static inline int is_integer_btype(int bt)
2057 return (bt == VT_BYTE || bt == VT_SHORT ||
2058 bt == VT_INT || bt == VT_LLONG);
2061 /* check types for comparison or subtraction of pointers */
2062 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2064 CType *type1, *type2, tmp_type1, tmp_type2;
2065 int bt1, bt2;
2067 /* null pointers are accepted for all comparisons as gcc */
2068 if (is_null_pointer(p1) || is_null_pointer(p2))
2069 return;
2070 type1 = &p1->type;
2071 type2 = &p2->type;
2072 bt1 = type1->t & VT_BTYPE;
2073 bt2 = type2->t & VT_BTYPE;
2074 /* accept comparison between pointer and integer with a warning */
2075 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2076 if (op != TOK_LOR && op != TOK_LAND )
2077 tcc_warning("comparison between pointer and integer");
2078 return;
2081 /* both must be pointers or implicit function pointers */
2082 if (bt1 == VT_PTR) {
2083 type1 = pointed_type(type1);
2084 } else if (bt1 != VT_FUNC)
2085 goto invalid_operands;
2087 if (bt2 == VT_PTR) {
2088 type2 = pointed_type(type2);
2089 } else if (bt2 != VT_FUNC) {
2090 invalid_operands:
2091 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2093 if ((type1->t & VT_BTYPE) == VT_VOID ||
2094 (type2->t & VT_BTYPE) == VT_VOID)
2095 return;
2096 tmp_type1 = *type1;
2097 tmp_type2 = *type2;
2098 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2099 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2100 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2101 /* gcc-like error if '-' is used */
2102 if (op == '-')
2103 goto invalid_operands;
2104 else
2105 tcc_warning("comparison of distinct pointer types lacks a cast");
2109 /* generic gen_op: handles types problems */
2110 ST_FUNC void gen_op(int op)
2112 int u, t1, t2, bt1, bt2, t;
2113 CType type1;
2115 redo:
2116 t1 = vtop[-1].type.t;
2117 t2 = vtop[0].type.t;
2118 bt1 = t1 & VT_BTYPE;
2119 bt2 = t2 & VT_BTYPE;
2121 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2122 tcc_error("operation on a struct");
2123 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2124 if (bt2 == VT_FUNC) {
2125 mk_pointer(&vtop->type);
2126 gaddrof();
2128 if (bt1 == VT_FUNC) {
2129 vswap();
2130 mk_pointer(&vtop->type);
2131 gaddrof();
2132 vswap();
2134 goto redo;
2135 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2136 /* at least one operand is a pointer */
2137 /* relational op: must be both pointers */
2138 if (op >= TOK_ULT && op <= TOK_LOR) {
2139 check_comparison_pointer_types(vtop - 1, vtop, op);
2140 /* pointers are handled are unsigned */
2141 #if PTR_SIZE == 8
2142 t = VT_LLONG | VT_UNSIGNED;
2143 #else
2144 t = VT_INT | VT_UNSIGNED;
2145 #endif
2146 goto std_op;
2148 /* if both pointers, then it must be the '-' op */
2149 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2150 if (op != '-')
2151 tcc_error("cannot use pointers here");
2152 check_comparison_pointer_types(vtop - 1, vtop, op);
2153 /* XXX: check that types are compatible */
2154 if (vtop[-1].type.t & VT_VLA) {
2155 vla_runtime_pointed_size(&vtop[-1].type);
2156 } else {
2157 vpushi(pointed_size(&vtop[-1].type));
2159 vrott(3);
2160 gen_opic(op);
2161 vtop->type.t = ptrdiff_type.t;
2162 vswap();
2163 gen_op(TOK_PDIV);
2164 } else {
2165 /* exactly one pointer : must be '+' or '-'. */
2166 if (op != '-' && op != '+')
2167 tcc_error("cannot use pointers here");
2168 /* Put pointer as first operand */
2169 if (bt2 == VT_PTR) {
2170 vswap();
2171 t = t1, t1 = t2, t2 = t;
2173 #if PTR_SIZE == 4
2174 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2175 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2176 gen_cast_s(VT_INT);
2177 #endif
2178 type1 = vtop[-1].type;
2179 type1.t &= ~VT_ARRAY;
2180 if (vtop[-1].type.t & VT_VLA)
2181 vla_runtime_pointed_size(&vtop[-1].type);
2182 else {
2183 u = pointed_size(&vtop[-1].type);
2184 if (u < 0)
2185 tcc_error("unknown array element size");
2186 #if PTR_SIZE == 8
2187 vpushll(u);
2188 #else
2189 /* XXX: cast to int ? (long long case) */
2190 vpushi(u);
2191 #endif
2193 gen_op('*');
2194 #if 0
2195 /* #ifdef CONFIG_TCC_BCHECK
2196 The main reason to removing this code:
2197 #include <stdio.h>
2198 int main ()
2200 int v[10];
2201 int i = 10;
2202 int j = 9;
2203 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2204 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2206 When this code is on. then the output looks like
2207 v+i-j = 0xfffffffe
2208 v+(i-j) = 0xbff84000
2210 /* if evaluating constant expression, no code should be
2211 generated, so no bound check */
2212 if (tcc_state->do_bounds_check && !const_wanted) {
2213 /* if bounded pointers, we generate a special code to
2214 test bounds */
2215 if (op == '-') {
2216 vpushi(0);
2217 vswap();
2218 gen_op('-');
2220 gen_bounded_ptr_add();
2221 } else
2222 #endif
2224 gen_opic(op);
2226 /* put again type if gen_opic() swaped operands */
2227 vtop->type = type1;
2229 } else if (is_float(bt1) || is_float(bt2)) {
2230 /* compute bigger type and do implicit casts */
2231 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2232 t = VT_LDOUBLE;
2233 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2234 t = VT_DOUBLE;
2235 } else {
2236 t = VT_FLOAT;
2238 /* floats can only be used for a few operations */
2239 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2240 (op < TOK_ULT || op > TOK_GT))
2241 tcc_error("invalid operands for binary operation");
2242 goto std_op;
2243 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2244 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2245 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2246 t |= VT_UNSIGNED;
2247 t |= (VT_LONG & t1);
2248 goto std_op;
2249 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2250 /* cast to biggest op */
2251 t = VT_LLONG | VT_LONG;
2252 if (bt1 == VT_LLONG)
2253 t &= t1;
2254 if (bt2 == VT_LLONG)
2255 t &= t2;
2256 /* convert to unsigned if it does not fit in a long long */
2257 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2258 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2259 t |= VT_UNSIGNED;
2260 goto std_op;
2261 } else {
2262 /* integer operations */
2263 t = VT_INT | (VT_LONG & (t1 | t2));
2264 /* convert to unsigned if it does not fit in an integer */
2265 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2266 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2267 t |= VT_UNSIGNED;
2268 std_op:
2269 /* XXX: currently, some unsigned operations are explicit, so
2270 we modify them here */
2271 if (t & VT_UNSIGNED) {
2272 if (op == TOK_SAR)
2273 op = TOK_SHR;
2274 else if (op == '/')
2275 op = TOK_UDIV;
2276 else if (op == '%')
2277 op = TOK_UMOD;
2278 else if (op == TOK_LT)
2279 op = TOK_ULT;
2280 else if (op == TOK_GT)
2281 op = TOK_UGT;
2282 else if (op == TOK_LE)
2283 op = TOK_ULE;
2284 else if (op == TOK_GE)
2285 op = TOK_UGE;
2287 vswap();
2288 type1.t = t;
2289 type1.ref = NULL;
2290 gen_cast(&type1);
2291 vswap();
2292 /* special case for shifts and long long: we keep the shift as
2293 an integer */
2294 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2295 type1.t = VT_INT;
2296 gen_cast(&type1);
2297 if (is_float(t))
2298 gen_opif(op);
2299 else
2300 gen_opic(op);
2301 if (op >= TOK_ULT && op <= TOK_GT) {
2302 /* relational op: the result is an int */
2303 vtop->type.t = VT_INT;
2304 } else {
2305 vtop->type.t = t;
2308 // Make sure that we have converted to an rvalue:
2309 if (vtop->r & VT_LVAL)
2310 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2313 #ifndef TCC_TARGET_ARM
2314 /* generic itof for unsigned long long case */
2315 static void gen_cvt_itof1(int t)
2317 #ifdef TCC_TARGET_ARM64
2318 gen_cvt_itof(t);
2319 #else
2320 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2321 (VT_LLONG | VT_UNSIGNED)) {
2323 if (t == VT_FLOAT)
2324 vpush_global_sym(&func_old_type, TOK___floatundisf);
2325 #if LDOUBLE_SIZE != 8
2326 else if (t == VT_LDOUBLE)
2327 vpush_global_sym(&func_old_type, TOK___floatundixf);
2328 #endif
2329 else
2330 vpush_global_sym(&func_old_type, TOK___floatundidf);
2331 vrott(2);
2332 gfunc_call(1);
2333 vpushi(0);
2334 vtop->r = reg_fret(t);
2335 } else {
2336 gen_cvt_itof(t);
2338 #endif
2340 #endif
2342 /* generic ftoi for unsigned long long case */
2343 static void gen_cvt_ftoi1(int t)
2345 #ifdef TCC_TARGET_ARM64
2346 gen_cvt_ftoi(t);
2347 #else
2348 int st;
2350 if (t == (VT_LLONG | VT_UNSIGNED)) {
2351 /* not handled natively */
2352 st = vtop->type.t & VT_BTYPE;
2353 if (st == VT_FLOAT)
2354 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2355 #if LDOUBLE_SIZE != 8
2356 else if (st == VT_LDOUBLE)
2357 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2358 #endif
2359 else
2360 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2361 vrott(2);
2362 gfunc_call(1);
2363 vpushi(0);
2364 vtop->r = REG_IRET;
2365 vtop->r2 = REG_LRET;
2366 } else {
2367 gen_cvt_ftoi(t);
2369 #endif
2372 /* force char or short cast */
2373 static void force_charshort_cast(int t)
2375 int bits, dbt;
2377 /* cannot cast static initializers */
2378 if (STATIC_DATA_WANTED)
2379 return;
2381 dbt = t & VT_BTYPE;
2382 /* XXX: add optimization if lvalue : just change type and offset */
2383 if (dbt == VT_BYTE)
2384 bits = 8;
2385 else
2386 bits = 16;
2387 if (t & VT_UNSIGNED) {
2388 vpushi((1 << bits) - 1);
2389 gen_op('&');
2390 } else {
2391 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2392 bits = 64 - bits;
2393 else
2394 bits = 32 - bits;
2395 vpushi(bits);
2396 gen_op(TOK_SHL);
2397 /* result must be signed or the SAR is converted to an SHL
2398 This was not the case when "t" was a signed short
2399 and the last value on the stack was an unsigned int */
2400 vtop->type.t &= ~VT_UNSIGNED;
2401 vpushi(bits);
2402 gen_op(TOK_SAR);
2406 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2407 static void gen_cast_s(int t)
2409 CType type;
2410 type.t = t;
2411 type.ref = NULL;
2412 gen_cast(&type);
2415 static void gen_cast(CType *type)
2417 int sbt, dbt, sf, df, c, p;
2419 /* special delayed cast for char/short */
2420 /* XXX: in some cases (multiple cascaded casts), it may still
2421 be incorrect */
2422 if (vtop->r & VT_MUSTCAST) {
2423 vtop->r &= ~VT_MUSTCAST;
2424 force_charshort_cast(vtop->type.t);
2427 /* bitfields first get cast to ints */
2428 if (vtop->type.t & VT_BITFIELD) {
2429 gv(RC_INT);
2432 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2433 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2435 if (sbt != dbt) {
2436 sf = is_float(sbt);
2437 df = is_float(dbt);
2438 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2439 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2440 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2441 c &= dbt != VT_LDOUBLE;
2442 #endif
2443 if (c) {
2444 /* constant case: we can do it now */
2445 /* XXX: in ISOC, cannot do it if error in convert */
2446 if (sbt == VT_FLOAT)
2447 vtop->c.ld = vtop->c.f;
2448 else if (sbt == VT_DOUBLE)
2449 vtop->c.ld = vtop->c.d;
2451 if (df) {
2452 if ((sbt & VT_BTYPE) == VT_LLONG) {
2453 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2454 vtop->c.ld = vtop->c.i;
2455 else
2456 vtop->c.ld = -(long double)-vtop->c.i;
2457 } else if(!sf) {
2458 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2459 vtop->c.ld = (uint32_t)vtop->c.i;
2460 else
2461 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2464 if (dbt == VT_FLOAT)
2465 vtop->c.f = (float)vtop->c.ld;
2466 else if (dbt == VT_DOUBLE)
2467 vtop->c.d = (double)vtop->c.ld;
2468 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2469 vtop->c.i = vtop->c.ld;
2470 } else if (sf && dbt == VT_BOOL) {
2471 vtop->c.i = (vtop->c.ld != 0);
2472 } else {
2473 if(sf)
2474 vtop->c.i = vtop->c.ld;
2475 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2477 else if (sbt & VT_UNSIGNED)
2478 vtop->c.i = (uint32_t)vtop->c.i;
2479 #if PTR_SIZE == 8
2480 else if (sbt == VT_PTR)
2482 #endif
2483 else if (sbt != VT_LLONG)
2484 vtop->c.i = ((uint32_t)vtop->c.i |
2485 -(vtop->c.i & 0x80000000));
2487 if (dbt == (VT_LLONG|VT_UNSIGNED))
2489 else if (dbt == VT_BOOL)
2490 vtop->c.i = (vtop->c.i != 0);
2491 #if PTR_SIZE == 8
2492 else if (dbt == VT_PTR)
2494 #endif
2495 else if (dbt != VT_LLONG) {
2496 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2497 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2498 0xffffffff);
2499 vtop->c.i &= m;
2500 if (!(dbt & VT_UNSIGNED))
2501 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2504 } else if (p && dbt == VT_BOOL) {
2505 vtop->r = VT_CONST;
2506 vtop->c.i = 1;
2507 } else {
2508 /* non constant case: generate code */
2509 if (sf && df) {
2510 /* convert from fp to fp */
2511 gen_cvt_ftof(dbt);
2512 } else if (df) {
2513 /* convert int to fp */
2514 gen_cvt_itof1(dbt);
2515 } else if (sf) {
2516 /* convert fp to int */
2517 if (dbt == VT_BOOL) {
2518 vpushi(0);
2519 gen_op(TOK_NE);
2520 } else {
2521 /* we handle char/short/etc... with generic code */
2522 if (dbt != (VT_INT | VT_UNSIGNED) &&
2523 dbt != (VT_LLONG | VT_UNSIGNED) &&
2524 dbt != VT_LLONG)
2525 dbt = VT_INT;
2526 gen_cvt_ftoi1(dbt);
2527 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2528 /* additional cast for char/short... */
2529 vtop->type.t = dbt;
2530 gen_cast(type);
2533 #if PTR_SIZE == 4
2534 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2535 if ((sbt & VT_BTYPE) != VT_LLONG) {
2536 /* scalar to long long */
2537 /* machine independent conversion */
2538 gv(RC_INT);
2539 /* generate high word */
2540 if (sbt == (VT_INT | VT_UNSIGNED)) {
2541 vpushi(0);
2542 gv(RC_INT);
2543 } else {
2544 if (sbt == VT_PTR) {
2545 /* cast from pointer to int before we apply
2546 shift operation, which pointers don't support*/
2547 gen_cast_s(VT_INT);
2549 gv_dup();
2550 vpushi(31);
2551 gen_op(TOK_SAR);
2553 /* patch second register */
2554 vtop[-1].r2 = vtop->r;
2555 vpop();
2557 #else
2558 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2559 (dbt & VT_BTYPE) == VT_PTR ||
2560 (dbt & VT_BTYPE) == VT_FUNC) {
2561 if ((sbt & VT_BTYPE) != VT_LLONG &&
2562 (sbt & VT_BTYPE) != VT_PTR &&
2563 (sbt & VT_BTYPE) != VT_FUNC) {
2564 /* need to convert from 32bit to 64bit */
2565 gv(RC_INT);
2566 if (sbt != (VT_INT | VT_UNSIGNED)) {
2567 #if defined(TCC_TARGET_ARM64)
2568 gen_cvt_sxtw();
2569 #elif defined(TCC_TARGET_X86_64)
2570 int r = gv(RC_INT);
2571 /* x86_64 specific: movslq */
2572 o(0x6348);
2573 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2574 #else
2575 #error
2576 #endif
2579 #endif
2580 } else if (dbt == VT_BOOL) {
2581 /* scalar to bool */
2582 vpushi(0);
2583 gen_op(TOK_NE);
2584 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2585 (dbt & VT_BTYPE) == VT_SHORT) {
2586 if (sbt == VT_PTR) {
2587 vtop->type.t = VT_INT;
2588 tcc_warning("nonportable conversion from pointer to char/short");
2590 force_charshort_cast(dbt);
2591 #if PTR_SIZE == 4
2592 } else if ((dbt & VT_BTYPE) == VT_INT) {
2593 /* scalar to int */
2594 if ((sbt & VT_BTYPE) == VT_LLONG) {
2595 /* from long long: just take low order word */
2596 lexpand();
2597 vpop();
2599 /* if lvalue and single word type, nothing to do because
2600 the lvalue already contains the real type size (see
2601 VT_LVAL_xxx constants) */
2602 #endif
2605 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2606 /* if we are casting between pointer types,
2607 we must update the VT_LVAL_xxx size */
2608 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2609 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2611 vtop->type = *type;
2614 /* return type size as known at compile time. Put alignment at 'a' */
2615 ST_FUNC int type_size(CType *type, int *a)
2617 Sym *s;
2618 int bt;
2620 bt = type->t & VT_BTYPE;
2621 if (bt == VT_STRUCT) {
2622 /* struct/union */
2623 s = type->ref;
2624 *a = s->r;
2625 return s->c;
2626 } else if (bt == VT_PTR) {
2627 if (type->t & VT_ARRAY) {
2628 int ts;
2630 s = type->ref;
2631 ts = type_size(&s->type, a);
2633 if (ts < 0 && s->c < 0)
2634 ts = -ts;
2636 return ts * s->c;
2637 } else {
2638 *a = PTR_SIZE;
2639 return PTR_SIZE;
2641 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2642 return -1; /* incomplete enum */
2643 } else if (bt == VT_LDOUBLE) {
2644 *a = LDOUBLE_ALIGN;
2645 return LDOUBLE_SIZE;
2646 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2647 #ifdef TCC_TARGET_I386
2648 #ifdef TCC_TARGET_PE
2649 *a = 8;
2650 #else
2651 *a = 4;
2652 #endif
2653 #elif defined(TCC_TARGET_ARM)
2654 #ifdef TCC_ARM_EABI
2655 *a = 8;
2656 #else
2657 *a = 4;
2658 #endif
2659 #else
2660 *a = 8;
2661 #endif
2662 return 8;
2663 } else if (bt == VT_INT || bt == VT_FLOAT) {
2664 *a = 4;
2665 return 4;
2666 } else if (bt == VT_SHORT) {
2667 *a = 2;
2668 return 2;
2669 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2670 *a = 8;
2671 return 16;
2672 } else {
2673 /* char, void, function, _Bool */
2674 *a = 1;
2675 return 1;
2679 /* push type size as known at runtime time on top of value stack. Put
2680 alignment at 'a' */
2681 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2683 if (type->t & VT_VLA) {
2684 type_size(&type->ref->type, a);
2685 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2686 } else {
2687 vpushi(type_size(type, a));
2691 static void vla_sp_restore(void) {
2692 if (vlas_in_scope) {
2693 gen_vla_sp_restore(vla_sp_loc);
2697 static void vla_sp_restore_root(void) {
2698 if (vlas_in_scope) {
2699 gen_vla_sp_restore(vla_sp_root_loc);
2703 /* return the pointed type of t */
2704 static inline CType *pointed_type(CType *type)
2706 return &type->ref->type;
2709 /* modify type so that its it is a pointer to type. */
2710 ST_FUNC void mk_pointer(CType *type)
2712 Sym *s;
2713 s = sym_push(SYM_FIELD, type, 0, -1);
2714 type->t = VT_PTR | (type->t & VT_STORAGE);
2715 type->ref = s;
2718 /* compare function types. OLD functions match any new functions */
2719 static int is_compatible_func(CType *type1, CType *type2)
2721 Sym *s1, *s2;
2723 s1 = type1->ref;
2724 s2 = type2->ref;
2725 if (!is_compatible_types(&s1->type, &s2->type))
2726 return 0;
2727 /* check func_call */
2728 if (s1->f.func_call != s2->f.func_call)
2729 return 0;
2730 /* XXX: not complete */
2731 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2732 return 1;
2733 if (s1->f.func_type != s2->f.func_type)
2734 return 0;
2735 while (s1 != NULL) {
2736 if (s2 == NULL)
2737 return 0;
2738 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2739 return 0;
2740 s1 = s1->next;
2741 s2 = s2->next;
2743 if (s2)
2744 return 0;
2745 return 1;
2748 /* return true if type1 and type2 are the same. If unqualified is
2749 true, qualifiers on the types are ignored.
2751 - enums are not checked as gcc __builtin_types_compatible_p ()
2753 static int compare_types(CType *type1, CType *type2, int unqualified)
2755 int bt1, t1, t2;
2757 t1 = type1->t & VT_TYPE;
2758 t2 = type2->t & VT_TYPE;
2759 if (unqualified) {
2760 /* strip qualifiers before comparing */
2761 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2762 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2765 /* Default Vs explicit signedness only matters for char */
2766 if ((t1 & VT_BTYPE) != VT_BYTE) {
2767 t1 &= ~VT_DEFSIGN;
2768 t2 &= ~VT_DEFSIGN;
2770 /* XXX: bitfields ? */
2771 if (t1 != t2)
2772 return 0;
2773 /* test more complicated cases */
2774 bt1 = t1 & VT_BTYPE;
2775 if (bt1 == VT_PTR) {
2776 type1 = pointed_type(type1);
2777 type2 = pointed_type(type2);
2778 return is_compatible_types(type1, type2);
2779 } else if (bt1 == VT_STRUCT) {
2780 return (type1->ref == type2->ref);
2781 } else if (bt1 == VT_FUNC) {
2782 return is_compatible_func(type1, type2);
2783 } else {
2784 return 1;
2788 /* return true if type1 and type2 are exactly the same (including
2789 qualifiers).
2791 static int is_compatible_types(CType *type1, CType *type2)
2793 return compare_types(type1,type2,0);
2796 /* return true if type1 and type2 are the same (ignoring qualifiers).
2798 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2800 return compare_types(type1,type2,1);
2803 /* print a type. If 'varstr' is not NULL, then the variable is also
2804 printed in the type */
2805 /* XXX: union */
2806 /* XXX: add array and function pointers */
2807 static void type_to_str(char *buf, int buf_size,
2808 CType *type, const char *varstr)
2810 int bt, v, t;
2811 Sym *s, *sa;
2812 char buf1[256];
2813 const char *tstr;
2815 t = type->t;
2816 bt = t & VT_BTYPE;
2817 buf[0] = '\0';
2819 if (t & VT_EXTERN)
2820 pstrcat(buf, buf_size, "extern ");
2821 if (t & VT_STATIC)
2822 pstrcat(buf, buf_size, "static ");
2823 if (t & VT_TYPEDEF)
2824 pstrcat(buf, buf_size, "typedef ");
2825 if (t & VT_INLINE)
2826 pstrcat(buf, buf_size, "inline ");
2827 if (t & VT_VOLATILE)
2828 pstrcat(buf, buf_size, "volatile ");
2829 if (t & VT_CONSTANT)
2830 pstrcat(buf, buf_size, "const ");
2832 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2833 || ((t & VT_UNSIGNED)
2834 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2835 && !IS_ENUM(t)
2837 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2839 buf_size -= strlen(buf);
2840 buf += strlen(buf);
2842 switch(bt) {
2843 case VT_VOID:
2844 tstr = "void";
2845 goto add_tstr;
2846 case VT_BOOL:
2847 tstr = "_Bool";
2848 goto add_tstr;
2849 case VT_BYTE:
2850 tstr = "char";
2851 goto add_tstr;
2852 case VT_SHORT:
2853 tstr = "short";
2854 goto add_tstr;
2855 case VT_INT:
2856 tstr = "int";
2857 goto maybe_long;
2858 case VT_LLONG:
2859 tstr = "long long";
2860 maybe_long:
2861 if (t & VT_LONG)
2862 tstr = "long";
2863 if (!IS_ENUM(t))
2864 goto add_tstr;
2865 tstr = "enum ";
2866 goto tstruct;
2867 case VT_FLOAT:
2868 tstr = "float";
2869 goto add_tstr;
2870 case VT_DOUBLE:
2871 tstr = "double";
2872 goto add_tstr;
2873 case VT_LDOUBLE:
2874 tstr = "long double";
2875 add_tstr:
2876 pstrcat(buf, buf_size, tstr);
2877 break;
2878 case VT_STRUCT:
2879 tstr = "struct ";
2880 if (IS_UNION(t))
2881 tstr = "union ";
2882 tstruct:
2883 pstrcat(buf, buf_size, tstr);
2884 v = type->ref->v & ~SYM_STRUCT;
2885 if (v >= SYM_FIRST_ANOM)
2886 pstrcat(buf, buf_size, "<anonymous>");
2887 else
2888 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2889 break;
2890 case VT_FUNC:
2891 s = type->ref;
2892 type_to_str(buf, buf_size, &s->type, varstr);
2893 pstrcat(buf, buf_size, "(");
2894 sa = s->next;
2895 while (sa != NULL) {
2896 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2897 pstrcat(buf, buf_size, buf1);
2898 sa = sa->next;
2899 if (sa)
2900 pstrcat(buf, buf_size, ", ");
2902 pstrcat(buf, buf_size, ")");
2903 goto no_var;
2904 case VT_PTR:
2905 s = type->ref;
2906 if (t & VT_ARRAY) {
2907 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2908 type_to_str(buf, buf_size, &s->type, buf1);
2909 goto no_var;
2911 pstrcpy(buf1, sizeof(buf1), "*");
2912 if (t & VT_CONSTANT)
2913 pstrcat(buf1, buf_size, "const ");
2914 if (t & VT_VOLATILE)
2915 pstrcat(buf1, buf_size, "volatile ");
2916 if (varstr)
2917 pstrcat(buf1, sizeof(buf1), varstr);
2918 type_to_str(buf, buf_size, &s->type, buf1);
2919 goto no_var;
2921 if (varstr) {
2922 pstrcat(buf, buf_size, " ");
2923 pstrcat(buf, buf_size, varstr);
2925 no_var: ;
2928 /* verify type compatibility to store vtop in 'dt' type, and generate
2929 casts if needed. */
2930 static void gen_assign_cast(CType *dt)
2932 CType *st, *type1, *type2;
2933 char buf1[256], buf2[256];
2934 int dbt, sbt;
2936 st = &vtop->type; /* source type */
2937 dbt = dt->t & VT_BTYPE;
2938 sbt = st->t & VT_BTYPE;
2939 if (sbt == VT_VOID || dbt == VT_VOID) {
2940 if (sbt == VT_VOID && dbt == VT_VOID)
2941 ; /*
2942 It is Ok if both are void
2943 A test program:
2944 void func1() {}
2945 void func2() {
2946 return func1();
2948 gcc accepts this program
2950 else
2951 tcc_error("cannot cast from/to void");
2953 if (dt->t & VT_CONSTANT)
2954 tcc_warning("assignment of read-only location");
2955 switch(dbt) {
2956 case VT_PTR:
2957 /* special cases for pointers */
2958 /* '0' can also be a pointer */
2959 if (is_null_pointer(vtop))
2960 goto type_ok;
2961 /* accept implicit pointer to integer cast with warning */
2962 if (is_integer_btype(sbt)) {
2963 tcc_warning("assignment makes pointer from integer without a cast");
2964 goto type_ok;
2966 type1 = pointed_type(dt);
2967 /* a function is implicitly a function pointer */
2968 if (sbt == VT_FUNC) {
2969 if ((type1->t & VT_BTYPE) != VT_VOID &&
2970 !is_compatible_types(pointed_type(dt), st))
2971 tcc_warning("assignment from incompatible pointer type");
2972 goto type_ok;
2974 if (sbt != VT_PTR)
2975 goto error;
2976 type2 = pointed_type(st);
2977 if ((type1->t & VT_BTYPE) == VT_VOID ||
2978 (type2->t & VT_BTYPE) == VT_VOID) {
2979 /* void * can match anything */
2980 } else {
2981 //printf("types %08x %08x\n", type1->t, type2->t);
2982 /* exact type match, except for qualifiers */
2983 if (!is_compatible_unqualified_types(type1, type2)) {
2984 /* Like GCC don't warn by default for merely changes
2985 in pointer target signedness. Do warn for different
2986 base types, though, in particular for unsigned enums
2987 and signed int targets. */
2988 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
2989 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
2991 tcc_warning("assignment from incompatible pointer type");
2994 /* check const and volatile */
2995 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2996 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2997 tcc_warning("assignment discards qualifiers from pointer target type");
2998 break;
2999 case VT_BYTE:
3000 case VT_SHORT:
3001 case VT_INT:
3002 case VT_LLONG:
3003 if (sbt == VT_PTR || sbt == VT_FUNC) {
3004 tcc_warning("assignment makes integer from pointer without a cast");
3005 } else if (sbt == VT_STRUCT) {
3006 goto case_VT_STRUCT;
3008 /* XXX: more tests */
3009 break;
3010 case VT_STRUCT:
3011 case_VT_STRUCT:
3012 if (!is_compatible_unqualified_types(dt, st)) {
3013 error:
3014 type_to_str(buf1, sizeof(buf1), st, NULL);
3015 type_to_str(buf2, sizeof(buf2), dt, NULL);
3016 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3018 break;
3020 type_ok:
3021 gen_cast(dt);
3024 /* store vtop in lvalue pushed on stack */
3025 ST_FUNC void vstore(void)
3027 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3029 ft = vtop[-1].type.t;
3030 sbt = vtop->type.t & VT_BTYPE;
3031 dbt = ft & VT_BTYPE;
3032 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3033 (sbt == VT_INT && dbt == VT_SHORT))
3034 && !(vtop->type.t & VT_BITFIELD)) {
3035 /* optimize char/short casts */
3036 delayed_cast = VT_MUSTCAST;
3037 vtop->type.t = ft & VT_TYPE;
3038 /* XXX: factorize */
3039 if (ft & VT_CONSTANT)
3040 tcc_warning("assignment of read-only location");
3041 } else {
3042 delayed_cast = 0;
3043 if (!(ft & VT_BITFIELD))
3044 gen_assign_cast(&vtop[-1].type);
3047 if (sbt == VT_STRUCT) {
3048 /* if structure, only generate pointer */
3049 /* structure assignment : generate memcpy */
3050 /* XXX: optimize if small size */
3051 size = type_size(&vtop->type, &align);
3053 /* destination */
3054 vswap();
3055 vtop->type.t = VT_PTR;
3056 gaddrof();
3058 /* address of memcpy() */
3059 #ifdef TCC_ARM_EABI
3060 if(!(align & 7))
3061 vpush_global_sym(&func_old_type, TOK_memcpy8);
3062 else if(!(align & 3))
3063 vpush_global_sym(&func_old_type, TOK_memcpy4);
3064 else
3065 #endif
3066 /* Use memmove, rather than memcpy, as dest and src may be same: */
3067 vpush_global_sym(&func_old_type, TOK_memmove);
3069 vswap();
3070 /* source */
3071 vpushv(vtop - 2);
3072 vtop->type.t = VT_PTR;
3073 gaddrof();
3074 /* type size */
3075 vpushi(size);
3076 gfunc_call(3);
3078 /* leave source on stack */
3079 } else if (ft & VT_BITFIELD) {
3080 /* bitfield store handling */
3082 /* save lvalue as expression result (example: s.b = s.a = n;) */
3083 vdup(), vtop[-1] = vtop[-2];
3085 bit_pos = BIT_POS(ft);
3086 bit_size = BIT_SIZE(ft);
3087 /* remove bit field info to avoid loops */
3088 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3090 if ((ft & VT_BTYPE) == VT_BOOL) {
3091 gen_cast(&vtop[-1].type);
3092 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3095 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3096 if (r == VT_STRUCT) {
3097 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3098 store_packed_bf(bit_pos, bit_size);
3099 } else {
3100 unsigned long long mask = (1ULL << bit_size) - 1;
3101 if ((ft & VT_BTYPE) != VT_BOOL) {
3102 /* mask source */
3103 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3104 vpushll(mask);
3105 else
3106 vpushi((unsigned)mask);
3107 gen_op('&');
3109 /* shift source */
3110 vpushi(bit_pos);
3111 gen_op(TOK_SHL);
3112 vswap();
3113 /* duplicate destination */
3114 vdup();
3115 vrott(3);
3116 /* load destination, mask and or with source */
3117 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3118 vpushll(~(mask << bit_pos));
3119 else
3120 vpushi(~((unsigned)mask << bit_pos));
3121 gen_op('&');
3122 gen_op('|');
3123 /* store result */
3124 vstore();
3125 /* ... and discard */
3126 vpop();
3128 } else if (dbt == VT_VOID) {
3129 --vtop;
3130 } else {
3131 #ifdef CONFIG_TCC_BCHECK
3132 /* bound check case */
3133 if (vtop[-1].r & VT_MUSTBOUND) {
3134 vswap();
3135 gbound();
3136 vswap();
3138 #endif
3139 rc = RC_INT;
3140 if (is_float(ft)) {
3141 rc = RC_FLOAT;
3142 #ifdef TCC_TARGET_X86_64
3143 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3144 rc = RC_ST0;
3145 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3146 rc = RC_FRET;
3148 #endif
3150 r = gv(rc); /* generate value */
3151 /* if lvalue was saved on stack, must read it */
3152 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3153 SValue sv;
3154 t = get_reg(RC_INT);
3155 #if PTR_SIZE == 8
3156 sv.type.t = VT_PTR;
3157 #else
3158 sv.type.t = VT_INT;
3159 #endif
3160 sv.r = VT_LOCAL | VT_LVAL;
3161 sv.c.i = vtop[-1].c.i;
3162 load(t, &sv);
3163 vtop[-1].r = t | VT_LVAL;
3165 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3166 #if PTR_SIZE == 8
3167 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3168 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3169 #else
3170 if ((ft & VT_BTYPE) == VT_LLONG) {
3171 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3172 #endif
3173 vtop[-1].type.t = load_type;
3174 store(r, vtop - 1);
3175 vswap();
3176 /* convert to int to increment easily */
3177 vtop->type.t = addr_type;
3178 gaddrof();
3179 vpushi(load_size);
3180 gen_op('+');
3181 vtop->r |= VT_LVAL;
3182 vswap();
3183 vtop[-1].type.t = load_type;
3184 /* XXX: it works because r2 is spilled last ! */
3185 store(vtop->r2, vtop - 1);
3186 } else {
3187 store(r, vtop - 1);
3190 vswap();
3191 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3192 vtop->r |= delayed_cast;
3196 /* post defines POST/PRE add. c is the token ++ or -- */
3197 ST_FUNC void inc(int post, int c)
3199 test_lvalue();
3200 vdup(); /* save lvalue */
3201 if (post) {
3202 gv_dup(); /* duplicate value */
3203 vrotb(3);
3204 vrotb(3);
3206 /* add constant */
3207 vpushi(c - TOK_MID);
3208 gen_op('+');
3209 vstore(); /* store value */
3210 if (post)
3211 vpop(); /* if post op, return saved value */
3214 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3216 /* read the string */
3217 if (tok != TOK_STR)
3218 expect(msg);
3219 cstr_new(astr);
3220 while (tok == TOK_STR) {
3221 /* XXX: add \0 handling too ? */
3222 cstr_cat(astr, tokc.str.data, -1);
3223 next();
3225 cstr_ccat(astr, '\0');
3228 /* If I is >= 1 and a power of two, returns log2(i)+1.
3229 If I is 0 returns 0. */
3230 static int exact_log2p1(int i)
3232 int ret;
3233 if (!i)
3234 return 0;
3235 for (ret = 1; i >= 1 << 8; ret += 8)
3236 i >>= 8;
3237 if (i >= 1 << 4)
3238 ret += 4, i >>= 4;
3239 if (i >= 1 << 2)
3240 ret += 2, i >>= 2;
3241 if (i >= 1 << 1)
3242 ret++;
3243 return ret;
3246 /* Parse __attribute__((...)) GNUC extension. */
3247 static void parse_attribute(AttributeDef *ad)
3249 int t, n;
3250 CString astr;
3252 redo:
3253 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3254 return;
3255 next();
3256 skip('(');
3257 skip('(');
3258 while (tok != ')') {
3259 if (tok < TOK_IDENT)
3260 expect("attribute name");
3261 t = tok;
3262 next();
3263 switch(t) {
3264 case TOK_SECTION1:
3265 case TOK_SECTION2:
3266 skip('(');
3267 parse_mult_str(&astr, "section name");
3268 ad->section = find_section(tcc_state, (char *)astr.data);
3269 skip(')');
3270 cstr_free(&astr);
3271 break;
3272 case TOK_ALIAS1:
3273 case TOK_ALIAS2:
3274 skip('(');
3275 parse_mult_str(&astr, "alias(\"target\")");
3276 ad->alias_target = /* save string as token, for later */
3277 tok_alloc((char*)astr.data, astr.size-1)->tok;
3278 skip(')');
3279 cstr_free(&astr);
3280 break;
3281 case TOK_VISIBILITY1:
3282 case TOK_VISIBILITY2:
3283 skip('(');
3284 parse_mult_str(&astr,
3285 "visibility(\"default|hidden|internal|protected\")");
3286 if (!strcmp (astr.data, "default"))
3287 ad->a.visibility = STV_DEFAULT;
3288 else if (!strcmp (astr.data, "hidden"))
3289 ad->a.visibility = STV_HIDDEN;
3290 else if (!strcmp (astr.data, "internal"))
3291 ad->a.visibility = STV_INTERNAL;
3292 else if (!strcmp (astr.data, "protected"))
3293 ad->a.visibility = STV_PROTECTED;
3294 else
3295 expect("visibility(\"default|hidden|internal|protected\")");
3296 skip(')');
3297 cstr_free(&astr);
3298 break;
3299 case TOK_ALIGNED1:
3300 case TOK_ALIGNED2:
3301 if (tok == '(') {
3302 next();
3303 n = expr_const();
3304 if (n <= 0 || (n & (n - 1)) != 0)
3305 tcc_error("alignment must be a positive power of two");
3306 skip(')');
3307 } else {
3308 n = MAX_ALIGN;
3310 ad->a.aligned = exact_log2p1(n);
3311 if (n != 1 << (ad->a.aligned - 1))
3312 tcc_error("alignment of %d is larger than implemented", n);
3313 break;
3314 case TOK_PACKED1:
3315 case TOK_PACKED2:
3316 ad->a.packed = 1;
3317 break;
3318 case TOK_WEAK1:
3319 case TOK_WEAK2:
3320 ad->a.weak = 1;
3321 break;
3322 case TOK_UNUSED1:
3323 case TOK_UNUSED2:
3324 /* currently, no need to handle it because tcc does not
3325 track unused objects */
3326 break;
3327 case TOK_NORETURN1:
3328 case TOK_NORETURN2:
3329 /* currently, no need to handle it because tcc does not
3330 track unused objects */
3331 break;
3332 case TOK_CDECL1:
3333 case TOK_CDECL2:
3334 case TOK_CDECL3:
3335 ad->f.func_call = FUNC_CDECL;
3336 break;
3337 case TOK_STDCALL1:
3338 case TOK_STDCALL2:
3339 case TOK_STDCALL3:
3340 ad->f.func_call = FUNC_STDCALL;
3341 break;
3342 #ifdef TCC_TARGET_I386
3343 case TOK_REGPARM1:
3344 case TOK_REGPARM2:
3345 skip('(');
3346 n = expr_const();
3347 if (n > 3)
3348 n = 3;
3349 else if (n < 0)
3350 n = 0;
3351 if (n > 0)
3352 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3353 skip(')');
3354 break;
3355 case TOK_FASTCALL1:
3356 case TOK_FASTCALL2:
3357 case TOK_FASTCALL3:
3358 ad->f.func_call = FUNC_FASTCALLW;
3359 break;
3360 #endif
3361 case TOK_MODE:
3362 skip('(');
3363 switch(tok) {
3364 case TOK_MODE_DI:
3365 ad->attr_mode = VT_LLONG + 1;
3366 break;
3367 case TOK_MODE_QI:
3368 ad->attr_mode = VT_BYTE + 1;
3369 break;
3370 case TOK_MODE_HI:
3371 ad->attr_mode = VT_SHORT + 1;
3372 break;
3373 case TOK_MODE_SI:
3374 case TOK_MODE_word:
3375 ad->attr_mode = VT_INT + 1;
3376 break;
3377 default:
3378 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3379 break;
3381 next();
3382 skip(')');
3383 break;
3384 case TOK_DLLEXPORT:
3385 ad->a.dllexport = 1;
3386 break;
3387 case TOK_DLLIMPORT:
3388 ad->a.dllimport = 1;
3389 break;
3390 default:
3391 if (tcc_state->warn_unsupported)
3392 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3393 /* skip parameters */
3394 if (tok == '(') {
3395 int parenthesis = 0;
3396 do {
3397 if (tok == '(')
3398 parenthesis++;
3399 else if (tok == ')')
3400 parenthesis--;
3401 next();
3402 } while (parenthesis && tok != -1);
3404 break;
3406 if (tok != ',')
3407 break;
3408 next();
3410 skip(')');
3411 skip(')');
3412 goto redo;
3415 static Sym * find_field (CType *type, int v)
3417 Sym *s = type->ref;
3418 v |= SYM_FIELD;
3419 while ((s = s->next) != NULL) {
3420 if ((s->v & SYM_FIELD) &&
3421 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3422 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3423 Sym *ret = find_field (&s->type, v);
3424 if (ret)
3425 return ret;
3427 if (s->v == v)
3428 break;
3430 return s;
3433 static void struct_add_offset (Sym *s, int offset)
3435 while ((s = s->next) != NULL) {
3436 if ((s->v & SYM_FIELD) &&
3437 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3438 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3439 struct_add_offset(s->type.ref, offset);
3440 } else
3441 s->c += offset;
3445 static void struct_layout(CType *type, AttributeDef *ad)
3447 int size, align, maxalign, offset, c, bit_pos, bit_size;
3448 int packed, a, bt, prevbt, prev_bit_size;
3449 int pcc = !tcc_state->ms_bitfields;
3450 int pragma_pack = *tcc_state->pack_stack_ptr;
3451 Sym *f;
3453 maxalign = 1;
3454 offset = 0;
3455 c = 0;
3456 bit_pos = 0;
3457 prevbt = VT_STRUCT; /* make it never match */
3458 prev_bit_size = 0;
3460 //#define BF_DEBUG
3462 for (f = type->ref->next; f; f = f->next) {
3463 if (f->type.t & VT_BITFIELD)
3464 bit_size = BIT_SIZE(f->type.t);
3465 else
3466 bit_size = -1;
3467 size = type_size(&f->type, &align);
3468 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3469 packed = 0;
3471 if (pcc && bit_size == 0) {
3472 /* in pcc mode, packing does not affect zero-width bitfields */
3474 } else {
3475 /* in pcc mode, attribute packed overrides if set. */
3476 if (pcc && (f->a.packed || ad->a.packed))
3477 align = packed = 1;
3479 /* pragma pack overrides align if lesser and packs bitfields always */
3480 if (pragma_pack) {
3481 packed = 1;
3482 if (pragma_pack < align)
3483 align = pragma_pack;
3484 /* in pcc mode pragma pack also overrides individual align */
3485 if (pcc && pragma_pack < a)
3486 a = 0;
3489 /* some individual align was specified */
3490 if (a)
3491 align = a;
3493 if (type->ref->type.t == VT_UNION) {
3494 if (pcc && bit_size >= 0)
3495 size = (bit_size + 7) >> 3;
3496 offset = 0;
3497 if (size > c)
3498 c = size;
3500 } else if (bit_size < 0) {
3501 if (pcc)
3502 c += (bit_pos + 7) >> 3;
3503 c = (c + align - 1) & -align;
3504 offset = c;
3505 if (size > 0)
3506 c += size;
3507 bit_pos = 0;
3508 prevbt = VT_STRUCT;
3509 prev_bit_size = 0;
3511 } else {
3512 /* A bit-field. Layout is more complicated. There are two
3513 options: PCC (GCC) compatible and MS compatible */
3514 if (pcc) {
3515 /* In PCC layout a bit-field is placed adjacent to the
3516 preceding bit-fields, except if:
3517 - it has zero-width
3518 - an individual alignment was given
3519 - it would overflow its base type container and
3520 there is no packing */
3521 if (bit_size == 0) {
3522 new_field:
3523 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3524 bit_pos = 0;
3525 } else if (f->a.aligned) {
3526 goto new_field;
3527 } else if (!packed) {
3528 int a8 = align * 8;
3529 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3530 if (ofs > size / align)
3531 goto new_field;
3534 /* in pcc mode, long long bitfields have type int if they fit */
3535 if (size == 8 && bit_size <= 32)
3536 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3538 while (bit_pos >= align * 8)
3539 c += align, bit_pos -= align * 8;
3540 offset = c;
3542 /* In PCC layout named bit-fields influence the alignment
3543 of the containing struct using the base types alignment,
3544 except for packed fields (which here have correct align). */
3545 if (f->v & SYM_FIRST_ANOM
3546 // && bit_size // ??? gcc on ARM/rpi does that
3548 align = 1;
3550 } else {
3551 bt = f->type.t & VT_BTYPE;
3552 if ((bit_pos + bit_size > size * 8)
3553 || (bit_size > 0) == (bt != prevbt)
3555 c = (c + align - 1) & -align;
3556 offset = c;
3557 bit_pos = 0;
3558 /* In MS bitfield mode a bit-field run always uses
3559 at least as many bits as the underlying type.
3560 To start a new run it's also required that this
3561 or the last bit-field had non-zero width. */
3562 if (bit_size || prev_bit_size)
3563 c += size;
3565 /* In MS layout the records alignment is normally
3566 influenced by the field, except for a zero-width
3567 field at the start of a run (but by further zero-width
3568 fields it is again). */
3569 if (bit_size == 0 && prevbt != bt)
3570 align = 1;
3571 prevbt = bt;
3572 prev_bit_size = bit_size;
3575 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3576 | (bit_pos << VT_STRUCT_SHIFT);
3577 bit_pos += bit_size;
3579 if (align > maxalign)
3580 maxalign = align;
3582 #ifdef BF_DEBUG
3583 printf("set field %s offset %-2d size %-2d align %-2d",
3584 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3585 if (f->type.t & VT_BITFIELD) {
3586 printf(" pos %-2d bits %-2d",
3587 BIT_POS(f->type.t),
3588 BIT_SIZE(f->type.t)
3591 printf("\n");
3592 #endif
3594 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3595 Sym *ass;
3596 /* An anonymous struct/union. Adjust member offsets
3597 to reflect the real offset of our containing struct.
3598 Also set the offset of this anon member inside
3599 the outer struct to be zero. Via this it
3600 works when accessing the field offset directly
3601 (from base object), as well as when recursing
3602 members in initializer handling. */
3603 int v2 = f->type.ref->v;
3604 if (!(v2 & SYM_FIELD) &&
3605 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3606 Sym **pps;
3607 /* This happens only with MS extensions. The
3608 anon member has a named struct type, so it
3609 potentially is shared with other references.
3610 We need to unshare members so we can modify
3611 them. */
3612 ass = f->type.ref;
3613 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3614 &f->type.ref->type, 0,
3615 f->type.ref->c);
3616 pps = &f->type.ref->next;
3617 while ((ass = ass->next) != NULL) {
3618 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3619 pps = &((*pps)->next);
3621 *pps = NULL;
3623 struct_add_offset(f->type.ref, offset);
3624 f->c = 0;
3625 } else {
3626 f->c = offset;
3629 f->r = 0;
3632 if (pcc)
3633 c += (bit_pos + 7) >> 3;
3635 /* store size and alignment */
3636 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3637 if (a < maxalign)
3638 a = maxalign;
3639 type->ref->r = a;
3640 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3641 /* can happen if individual align for some member was given. In
3642 this case MSVC ignores maxalign when aligning the size */
3643 a = pragma_pack;
3644 if (a < bt)
3645 a = bt;
3647 c = (c + a - 1) & -a;
3648 type->ref->c = c;
3650 #ifdef BF_DEBUG
3651 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3652 #endif
3654 /* check whether we can access bitfields by their type */
3655 for (f = type->ref->next; f; f = f->next) {
3656 int s, px, cx, c0;
3657 CType t;
3659 if (0 == (f->type.t & VT_BITFIELD))
3660 continue;
3661 f->type.ref = f;
3662 f->auxtype = -1;
3663 bit_size = BIT_SIZE(f->type.t);
3664 if (bit_size == 0)
3665 continue;
3666 bit_pos = BIT_POS(f->type.t);
3667 size = type_size(&f->type, &align);
3668 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3669 continue;
3671 /* try to access the field using a different type */
3672 c0 = -1, s = align = 1;
3673 for (;;) {
3674 px = f->c * 8 + bit_pos;
3675 cx = (px >> 3) & -align;
3676 px = px - (cx << 3);
3677 if (c0 == cx)
3678 break;
3679 s = (px + bit_size + 7) >> 3;
3680 if (s > 4) {
3681 t.t = VT_LLONG;
3682 } else if (s > 2) {
3683 t.t = VT_INT;
3684 } else if (s > 1) {
3685 t.t = VT_SHORT;
3686 } else {
3687 t.t = VT_BYTE;
3689 s = type_size(&t, &align);
3690 c0 = cx;
3693 if (px + bit_size <= s * 8 && cx + s <= c) {
3694 /* update offset and bit position */
3695 f->c = cx;
3696 bit_pos = px;
3697 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3698 | (bit_pos << VT_STRUCT_SHIFT);
3699 if (s != size)
3700 f->auxtype = t.t;
3701 #ifdef BF_DEBUG
3702 printf("FIX field %s offset %-2d size %-2d align %-2d "
3703 "pos %-2d bits %-2d\n",
3704 get_tok_str(f->v & ~SYM_FIELD, NULL),
3705 cx, s, align, px, bit_size);
3706 #endif
3707 } else {
3708 /* fall back to load/store single-byte wise */
3709 f->auxtype = VT_STRUCT;
3710 #ifdef BF_DEBUG
3711 printf("FIX field %s : load byte-wise\n",
3712 get_tok_str(f->v & ~SYM_FIELD, NULL));
3713 #endif
3718 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3719 static void struct_decl(CType *type, int u)
3721 int v, c, size, align, flexible;
3722 int bit_size, bsize, bt;
3723 Sym *s, *ss, **ps;
3724 AttributeDef ad, ad1;
3725 CType type1, btype;
3727 memset(&ad, 0, sizeof ad);
3728 next();
3729 parse_attribute(&ad);
3730 if (tok != '{') {
3731 v = tok;
3732 next();
3733 /* struct already defined ? return it */
3734 if (v < TOK_IDENT)
3735 expect("struct/union/enum name");
3736 s = struct_find(v);
3737 if (s && (s->sym_scope == local_scope || tok != '{')) {
3738 if (u == s->type.t)
3739 goto do_decl;
3740 if (u == VT_ENUM && IS_ENUM(s->type.t))
3741 goto do_decl;
3742 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3744 } else {
3745 v = anon_sym++;
3747 /* Record the original enum/struct/union token. */
3748 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3749 type1.ref = NULL;
3750 /* we put an undefined size for struct/union */
3751 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3752 s->r = 0; /* default alignment is zero as gcc */
3753 do_decl:
3754 type->t = s->type.t;
3755 type->ref = s;
3757 if (tok == '{') {
3758 next();
3759 if (s->c != -1)
3760 tcc_error("struct/union/enum already defined");
3761 /* cannot be empty */
3762 /* non empty enums are not allowed */
3763 ps = &s->next;
3764 if (u == VT_ENUM) {
3765 long long ll = 0, pl = 0, nl = 0;
3766 CType t;
3767 t.ref = s;
3768 /* enum symbols have static storage */
3769 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3770 for(;;) {
3771 v = tok;
3772 if (v < TOK_UIDENT)
3773 expect("identifier");
3774 ss = sym_find(v);
3775 if (ss && !local_stack)
3776 tcc_error("redefinition of enumerator '%s'",
3777 get_tok_str(v, NULL));
3778 next();
3779 if (tok == '=') {
3780 next();
3781 ll = expr_const64();
3783 ss = sym_push(v, &t, VT_CONST, 0);
3784 ss->enum_val = ll;
3785 *ps = ss, ps = &ss->next;
3786 if (ll < nl)
3787 nl = ll;
3788 if (ll > pl)
3789 pl = ll;
3790 if (tok != ',')
3791 break;
3792 next();
3793 ll++;
3794 /* NOTE: we accept a trailing comma */
3795 if (tok == '}')
3796 break;
3798 skip('}');
3799 /* set integral type of the enum */
3800 t.t = VT_INT;
3801 if (nl >= 0) {
3802 if (pl != (unsigned)pl)
3803 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3804 t.t |= VT_UNSIGNED;
3805 } else if (pl != (int)pl || nl != (int)nl)
3806 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3807 s->type.t = type->t = t.t | VT_ENUM;
3808 s->c = 0;
3809 /* set type for enum members */
3810 for (ss = s->next; ss; ss = ss->next) {
3811 ll = ss->enum_val;
3812 if (ll == (int)ll) /* default is int if it fits */
3813 continue;
3814 if (t.t & VT_UNSIGNED) {
3815 ss->type.t |= VT_UNSIGNED;
3816 if (ll == (unsigned)ll)
3817 continue;
3819 ss->type.t = (ss->type.t & ~VT_BTYPE)
3820 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3822 } else {
3823 c = 0;
3824 flexible = 0;
3825 while (tok != '}') {
3826 if (!parse_btype(&btype, &ad1)) {
3827 skip(';');
3828 continue;
3830 while (1) {
3831 if (flexible)
3832 tcc_error("flexible array member '%s' not at the end of struct",
3833 get_tok_str(v, NULL));
3834 bit_size = -1;
3835 v = 0;
3836 type1 = btype;
3837 if (tok != ':') {
3838 if (tok != ';')
3839 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3840 if (v == 0) {
3841 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3842 expect("identifier");
3843 else {
3844 int v = btype.ref->v;
3845 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3846 if (tcc_state->ms_extensions == 0)
3847 expect("identifier");
3851 if (type_size(&type1, &align) < 0) {
3852 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3853 flexible = 1;
3854 else
3855 tcc_error("field '%s' has incomplete type",
3856 get_tok_str(v, NULL));
3858 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3859 (type1.t & VT_STORAGE))
3860 tcc_error("invalid type for '%s'",
3861 get_tok_str(v, NULL));
3863 if (tok == ':') {
3864 next();
3865 bit_size = expr_const();
3866 /* XXX: handle v = 0 case for messages */
3867 if (bit_size < 0)
3868 tcc_error("negative width in bit-field '%s'",
3869 get_tok_str(v, NULL));
3870 if (v && bit_size == 0)
3871 tcc_error("zero width for bit-field '%s'",
3872 get_tok_str(v, NULL));
3873 parse_attribute(&ad1);
3875 size = type_size(&type1, &align);
3876 if (bit_size >= 0) {
3877 bt = type1.t & VT_BTYPE;
3878 if (bt != VT_INT &&
3879 bt != VT_BYTE &&
3880 bt != VT_SHORT &&
3881 bt != VT_BOOL &&
3882 bt != VT_LLONG)
3883 tcc_error("bitfields must have scalar type");
3884 bsize = size * 8;
3885 if (bit_size > bsize) {
3886 tcc_error("width of '%s' exceeds its type",
3887 get_tok_str(v, NULL));
3888 } else if (bit_size == bsize
3889 && !ad.a.packed && !ad1.a.packed) {
3890 /* no need for bit fields */
3892 } else if (bit_size == 64) {
3893 tcc_error("field width 64 not implemented");
3894 } else {
3895 type1.t = (type1.t & ~VT_STRUCT_MASK)
3896 | VT_BITFIELD
3897 | (bit_size << (VT_STRUCT_SHIFT + 6));
3900 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3901 /* Remember we've seen a real field to check
3902 for placement of flexible array member. */
3903 c = 1;
3905 /* If member is a struct or bit-field, enforce
3906 placing into the struct (as anonymous). */
3907 if (v == 0 &&
3908 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3909 bit_size >= 0)) {
3910 v = anon_sym++;
3912 if (v) {
3913 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3914 ss->a = ad1.a;
3915 *ps = ss;
3916 ps = &ss->next;
3918 if (tok == ';' || tok == TOK_EOF)
3919 break;
3920 skip(',');
3922 skip(';');
3924 skip('}');
3925 parse_attribute(&ad);
3926 struct_layout(type, &ad);
3931 static void sym_to_attr(AttributeDef *ad, Sym *s)
3933 if (s->a.aligned && 0 == ad->a.aligned)
3934 ad->a.aligned = s->a.aligned;
3935 if (s->f.func_call && 0 == ad->f.func_call)
3936 ad->f.func_call = s->f.func_call;
3937 if (s->f.func_type && 0 == ad->f.func_type)
3938 ad->f.func_type = s->f.func_type;
3939 if (s->a.packed)
3940 ad->a.packed = 1;
3943 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3944 are added to the element type, copied because it could be a typedef. */
3945 static void parse_btype_qualify(CType *type, int qualifiers)
3947 while (type->t & VT_ARRAY) {
3948 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3949 type = &type->ref->type;
3951 type->t |= qualifiers;
3954 /* return 0 if no type declaration. otherwise, return the basic type
3955 and skip it.
3957 static int parse_btype(CType *type, AttributeDef *ad)
3959 int t, u, bt, st, type_found, typespec_found, g;
3960 Sym *s;
3961 CType type1;
3963 memset(ad, 0, sizeof(AttributeDef));
3964 type_found = 0;
3965 typespec_found = 0;
3966 t = VT_INT;
3967 bt = st = -1;
3968 type->ref = NULL;
3970 while(1) {
3971 switch(tok) {
3972 case TOK_EXTENSION:
3973 /* currently, we really ignore extension */
3974 next();
3975 continue;
3977 /* basic types */
3978 case TOK_CHAR:
3979 u = VT_BYTE;
3980 basic_type:
3981 next();
3982 basic_type1:
3983 if (u == VT_SHORT || u == VT_LONG) {
3984 if (st != -1 || (bt != -1 && bt != VT_INT))
3985 tmbt: tcc_error("too many basic types");
3986 st = u;
3987 } else {
3988 if (bt != -1 || (st != -1 && u != VT_INT))
3989 goto tmbt;
3990 bt = u;
3992 if (u != VT_INT)
3993 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
3994 typespec_found = 1;
3995 break;
3996 case TOK_VOID:
3997 u = VT_VOID;
3998 goto basic_type;
3999 case TOK_SHORT:
4000 u = VT_SHORT;
4001 goto basic_type;
4002 case TOK_INT:
4003 u = VT_INT;
4004 goto basic_type;
4005 case TOK_LONG:
4006 if ((t & VT_BTYPE) == VT_DOUBLE) {
4007 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4008 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4009 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4010 } else {
4011 u = VT_LONG;
4012 goto basic_type;
4014 next();
4015 break;
4016 #ifdef TCC_TARGET_ARM64
4017 case TOK_UINT128:
4018 /* GCC's __uint128_t appears in some Linux header files. Make it a
4019 synonym for long double to get the size and alignment right. */
4020 u = VT_LDOUBLE;
4021 goto basic_type;
4022 #endif
4023 case TOK_BOOL:
4024 u = VT_BOOL;
4025 goto basic_type;
4026 case TOK_FLOAT:
4027 u = VT_FLOAT;
4028 goto basic_type;
4029 case TOK_DOUBLE:
4030 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4031 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4032 } else {
4033 u = VT_DOUBLE;
4034 goto basic_type;
4036 next();
4037 break;
4038 case TOK_ENUM:
4039 struct_decl(&type1, VT_ENUM);
4040 basic_type2:
4041 u = type1.t;
4042 type->ref = type1.ref;
4043 goto basic_type1;
4044 case TOK_STRUCT:
4045 struct_decl(&type1, VT_STRUCT);
4046 goto basic_type2;
4047 case TOK_UNION:
4048 struct_decl(&type1, VT_UNION);
4049 goto basic_type2;
4051 /* type modifiers */
4052 case TOK_CONST1:
4053 case TOK_CONST2:
4054 case TOK_CONST3:
4055 type->t = t;
4056 parse_btype_qualify(type, VT_CONSTANT);
4057 t = type->t;
4058 next();
4059 break;
4060 case TOK_VOLATILE1:
4061 case TOK_VOLATILE2:
4062 case TOK_VOLATILE3:
4063 type->t = t;
4064 parse_btype_qualify(type, VT_VOLATILE);
4065 t = type->t;
4066 next();
4067 break;
4068 case TOK_SIGNED1:
4069 case TOK_SIGNED2:
4070 case TOK_SIGNED3:
4071 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4072 tcc_error("signed and unsigned modifier");
4073 t |= VT_DEFSIGN;
4074 next();
4075 typespec_found = 1;
4076 break;
4077 case TOK_REGISTER:
4078 case TOK_AUTO:
4079 case TOK_RESTRICT1:
4080 case TOK_RESTRICT2:
4081 case TOK_RESTRICT3:
4082 next();
4083 break;
4084 case TOK_UNSIGNED:
4085 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4086 tcc_error("signed and unsigned modifier");
4087 t |= VT_DEFSIGN | VT_UNSIGNED;
4088 next();
4089 typespec_found = 1;
4090 break;
4092 /* storage */
4093 case TOK_EXTERN:
4094 g = VT_EXTERN;
4095 goto storage;
4096 case TOK_STATIC:
4097 g = VT_STATIC;
4098 goto storage;
4099 case TOK_TYPEDEF:
4100 g = VT_TYPEDEF;
4101 goto storage;
4102 storage:
4103 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4104 tcc_error("multiple storage classes");
4105 t |= g;
4106 next();
4107 break;
4108 case TOK_INLINE1:
4109 case TOK_INLINE2:
4110 case TOK_INLINE3:
4111 t |= VT_INLINE;
4112 next();
4113 break;
4115 /* GNUC attribute */
4116 case TOK_ATTRIBUTE1:
4117 case TOK_ATTRIBUTE2:
4118 parse_attribute(ad);
4119 if (ad->attr_mode) {
4120 u = ad->attr_mode -1;
4121 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4123 break;
4124 /* GNUC typeof */
4125 case TOK_TYPEOF1:
4126 case TOK_TYPEOF2:
4127 case TOK_TYPEOF3:
4128 next();
4129 parse_expr_type(&type1);
4130 /* remove all storage modifiers except typedef */
4131 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4132 if (type1.ref)
4133 sym_to_attr(ad, type1.ref);
4134 goto basic_type2;
4135 default:
4136 if (typespec_found)
4137 goto the_end;
4138 s = sym_find(tok);
4139 if (!s || !(s->type.t & VT_TYPEDEF))
4140 goto the_end;
4141 t &= ~(VT_BTYPE|VT_LONG);
4142 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4143 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4144 type->ref = s->type.ref;
4145 if (t)
4146 parse_btype_qualify(type, t);
4147 t = type->t;
4148 /* get attributes from typedef */
4149 sym_to_attr(ad, s);
4150 next();
4151 typespec_found = 1;
4152 st = bt = -2;
4153 break;
4155 type_found = 1;
4157 the_end:
4158 if (tcc_state->char_is_unsigned) {
4159 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4160 t |= VT_UNSIGNED;
4162 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4163 bt = t & (VT_BTYPE|VT_LONG);
4164 if (bt == VT_LONG)
4165 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4166 #ifdef TCC_TARGET_PE
4167 if (bt == VT_LDOUBLE)
4168 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4169 #endif
4170 type->t = t;
4171 return type_found;
4174 /* convert a function parameter type (array to pointer and function to
4175 function pointer) */
4176 static inline void convert_parameter_type(CType *pt)
4178 /* remove const and volatile qualifiers (XXX: const could be used
4179 to indicate a const function parameter */
4180 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4181 /* array must be transformed to pointer according to ANSI C */
4182 pt->t &= ~VT_ARRAY;
4183 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4184 mk_pointer(pt);
4188 ST_FUNC void parse_asm_str(CString *astr)
4190 skip('(');
4191 parse_mult_str(astr, "string constant");
4194 /* Parse an asm label and return the token */
4195 static int asm_label_instr(void)
4197 int v;
4198 CString astr;
4200 next();
4201 parse_asm_str(&astr);
4202 skip(')');
4203 #ifdef ASM_DEBUG
4204 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4205 #endif
4206 v = tok_alloc(astr.data, astr.size - 1)->tok;
4207 cstr_free(&astr);
4208 return v;
4211 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4213 int n, l, t1, arg_size, align;
4214 Sym **plast, *s, *first;
4215 AttributeDef ad1;
4216 CType pt;
4218 if (tok == '(') {
4219 /* function type, or recursive declarator (return if so) */
4220 next();
4221 if (td && !(td & TYPE_ABSTRACT))
4222 return 0;
4223 if (tok == ')')
4224 l = 0;
4225 else if (parse_btype(&pt, &ad1))
4226 l = FUNC_NEW;
4227 else if (td)
4228 return 0;
4229 else
4230 l = FUNC_OLD;
4231 first = NULL;
4232 plast = &first;
4233 arg_size = 0;
4234 if (l) {
4235 for(;;) {
4236 /* read param name and compute offset */
4237 if (l != FUNC_OLD) {
4238 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4239 break;
4240 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4241 if ((pt.t & VT_BTYPE) == VT_VOID)
4242 tcc_error("parameter declared as void");
4243 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4244 } else {
4245 n = tok;
4246 if (n < TOK_UIDENT)
4247 expect("identifier");
4248 pt.t = VT_VOID; /* invalid type */
4249 next();
4251 convert_parameter_type(&pt);
4252 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4253 *plast = s;
4254 plast = &s->next;
4255 if (tok == ')')
4256 break;
4257 skip(',');
4258 if (l == FUNC_NEW && tok == TOK_DOTS) {
4259 l = FUNC_ELLIPSIS;
4260 next();
4261 break;
4263 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4264 tcc_error("invalid type");
4266 } else
4267 /* if no parameters, then old type prototype */
4268 l = FUNC_OLD;
4269 skip(')');
4270 /* NOTE: const is ignored in returned type as it has a special
4271 meaning in gcc / C++ */
4272 type->t &= ~VT_CONSTANT;
4273 /* some ancient pre-K&R C allows a function to return an array
4274 and the array brackets to be put after the arguments, such
4275 that "int c()[]" means something like "int[] c()" */
4276 if (tok == '[') {
4277 next();
4278 skip(']'); /* only handle simple "[]" */
4279 mk_pointer(type);
4281 /* we push a anonymous symbol which will contain the function prototype */
4282 ad->f.func_args = arg_size;
4283 ad->f.func_type = l;
4284 s = sym_push(SYM_FIELD, type, 0, 0);
4285 s->a = ad->a;
4286 s->f = ad->f;
4287 s->next = first;
4288 type->t = VT_FUNC;
4289 type->ref = s;
4290 } else if (tok == '[') {
4291 int saved_nocode_wanted = nocode_wanted;
4292 /* array definition */
4293 next();
4294 if (tok == TOK_RESTRICT1)
4295 next();
4296 n = -1;
4297 t1 = 0;
4298 if (tok != ']') {
4299 if (!local_stack || (storage & VT_STATIC))
4300 vpushi(expr_const());
4301 else {
4302 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4303 length must always be evaluated, even under nocode_wanted,
4304 so that its size slot is initialized (e.g. under sizeof
4305 or typeof). */
4306 nocode_wanted = 0;
4307 gexpr();
4309 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4310 n = vtop->c.i;
4311 if (n < 0)
4312 tcc_error("invalid array size");
4313 } else {
4314 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4315 tcc_error("size of variable length array should be an integer");
4316 t1 = VT_VLA;
4319 skip(']');
4320 /* parse next post type */
4321 post_type(type, ad, storage, 0);
4322 if (type->t == VT_FUNC)
4323 tcc_error("declaration of an array of functions");
4324 t1 |= type->t & VT_VLA;
4326 if (t1 & VT_VLA) {
4327 loc -= type_size(&int_type, &align);
4328 loc &= -align;
4329 n = loc;
4331 vla_runtime_type_size(type, &align);
4332 gen_op('*');
4333 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4334 vswap();
4335 vstore();
4337 if (n != -1)
4338 vpop();
4339 nocode_wanted = saved_nocode_wanted;
4341 /* we push an anonymous symbol which will contain the array
4342 element type */
4343 s = sym_push(SYM_FIELD, type, 0, n);
4344 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4345 type->ref = s;
4347 return 1;
4350 /* Parse a type declarator (except basic type), and return the type
4351 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4352 expected. 'type' should contain the basic type. 'ad' is the
4353 attribute definition of the basic type. It can be modified by
4354 type_decl(). If this (possibly abstract) declarator is a pointer chain
4355 it returns the innermost pointed to type (equals *type, but is a different
4356 pointer), otherwise returns type itself, that's used for recursive calls. */
4357 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4359 CType *post, *ret;
4360 int qualifiers, storage;
4362 /* recursive type, remove storage bits first, apply them later again */
4363 storage = type->t & VT_STORAGE;
4364 type->t &= ~VT_STORAGE;
4365 post = ret = type;
4367 while (tok == '*') {
4368 qualifiers = 0;
4369 redo:
4370 next();
4371 switch(tok) {
4372 case TOK_CONST1:
4373 case TOK_CONST2:
4374 case TOK_CONST3:
4375 qualifiers |= VT_CONSTANT;
4376 goto redo;
4377 case TOK_VOLATILE1:
4378 case TOK_VOLATILE2:
4379 case TOK_VOLATILE3:
4380 qualifiers |= VT_VOLATILE;
4381 goto redo;
4382 case TOK_RESTRICT1:
4383 case TOK_RESTRICT2:
4384 case TOK_RESTRICT3:
4385 goto redo;
4386 /* XXX: clarify attribute handling */
4387 case TOK_ATTRIBUTE1:
4388 case TOK_ATTRIBUTE2:
4389 parse_attribute(ad);
4390 break;
4392 mk_pointer(type);
4393 type->t |= qualifiers;
4394 if (ret == type)
4395 /* innermost pointed to type is the one for the first derivation */
4396 ret = pointed_type(type);
4399 if (tok == '(') {
4400 /* This is possibly a parameter type list for abstract declarators
4401 ('int ()'), use post_type for testing this. */
4402 if (!post_type(type, ad, 0, td)) {
4403 /* It's not, so it's a nested declarator, and the post operations
4404 apply to the innermost pointed to type (if any). */
4405 /* XXX: this is not correct to modify 'ad' at this point, but
4406 the syntax is not clear */
4407 parse_attribute(ad);
4408 post = type_decl(type, ad, v, td);
4409 skip(')');
4411 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4412 /* type identifier */
4413 *v = tok;
4414 next();
4415 } else {
4416 if (!(td & TYPE_ABSTRACT))
4417 expect("identifier");
4418 *v = 0;
4420 post_type(post, ad, storage, 0);
4421 parse_attribute(ad);
4422 type->t |= storage;
4423 return ret;
4426 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4427 ST_FUNC int lvalue_type(int t)
4429 int bt, r;
4430 r = VT_LVAL;
4431 bt = t & VT_BTYPE;
4432 if (bt == VT_BYTE || bt == VT_BOOL)
4433 r |= VT_LVAL_BYTE;
4434 else if (bt == VT_SHORT)
4435 r |= VT_LVAL_SHORT;
4436 else
4437 return r;
4438 if (t & VT_UNSIGNED)
4439 r |= VT_LVAL_UNSIGNED;
4440 return r;
4443 /* indirection with full error checking and bound check */
4444 ST_FUNC void indir(void)
4446 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4447 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4448 return;
4449 expect("pointer");
4451 if (vtop->r & VT_LVAL)
4452 gv(RC_INT);
4453 vtop->type = *pointed_type(&vtop->type);
4454 /* Arrays and functions are never lvalues */
4455 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4456 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4457 vtop->r |= lvalue_type(vtop->type.t);
4458 /* if bound checking, the referenced pointer must be checked */
4459 #ifdef CONFIG_TCC_BCHECK
4460 if (tcc_state->do_bounds_check)
4461 vtop->r |= VT_MUSTBOUND;
4462 #endif
4466 /* pass a parameter to a function and do type checking and casting */
4467 static void gfunc_param_typed(Sym *func, Sym *arg)
4469 int func_type;
4470 CType type;
4472 func_type = func->f.func_type;
4473 if (func_type == FUNC_OLD ||
4474 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4475 /* default casting : only need to convert float to double */
4476 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4477 gen_cast_s(VT_DOUBLE);
4478 } else if (vtop->type.t & VT_BITFIELD) {
4479 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4480 type.ref = vtop->type.ref;
4481 gen_cast(&type);
4483 } else if (arg == NULL) {
4484 tcc_error("too many arguments to function");
4485 } else {
4486 type = arg->type;
4487 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4488 gen_assign_cast(&type);
4492 /* parse an expression and return its type without any side effect. */
4493 static void expr_type(CType *type, void (*expr_fn)(void))
4495 nocode_wanted++;
4496 expr_fn();
4497 *type = vtop->type;
4498 vpop();
4499 nocode_wanted--;
4502 /* parse an expression of the form '(type)' or '(expr)' and return its
4503 type */
4504 static void parse_expr_type(CType *type)
4506 int n;
4507 AttributeDef ad;
4509 skip('(');
4510 if (parse_btype(type, &ad)) {
4511 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4512 } else {
4513 expr_type(type, gexpr);
4515 skip(')');
4518 static void parse_type(CType *type)
4520 AttributeDef ad;
4521 int n;
4523 if (!parse_btype(type, &ad)) {
4524 expect("type");
4526 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4529 static void parse_builtin_params(int nc, const char *args)
4531 char c, sep = '(';
4532 CType t;
4533 if (nc)
4534 nocode_wanted++;
4535 next();
4536 while ((c = *args++)) {
4537 skip(sep);
4538 sep = ',';
4539 switch (c) {
4540 case 'e': expr_eq(); continue;
4541 case 't': parse_type(&t); vpush(&t); continue;
4542 default: tcc_error("internal error"); break;
4545 skip(')');
4546 if (nc)
4547 nocode_wanted--;
4550 ST_FUNC void unary(void)
4552 int n, t, align, size, r, sizeof_caller;
4553 CType type;
4554 Sym *s;
4555 AttributeDef ad;
4557 sizeof_caller = in_sizeof;
4558 in_sizeof = 0;
4559 type.ref = NULL;
4560 /* XXX: GCC 2.95.3 does not generate a table although it should be
4561 better here */
4562 tok_next:
4563 switch(tok) {
4564 case TOK_EXTENSION:
4565 next();
4566 goto tok_next;
4567 case TOK_LCHAR:
4568 #ifdef TCC_TARGET_PE
4569 t = VT_SHORT|VT_UNSIGNED;
4570 goto push_tokc;
4571 #endif
4572 case TOK_CINT:
4573 case TOK_CCHAR:
4574 t = VT_INT;
4575 push_tokc:
4576 type.t = t;
4577 vsetc(&type, VT_CONST, &tokc);
4578 next();
4579 break;
4580 case TOK_CUINT:
4581 t = VT_INT | VT_UNSIGNED;
4582 goto push_tokc;
4583 case TOK_CLLONG:
4584 t = VT_LLONG;
4585 goto push_tokc;
4586 case TOK_CULLONG:
4587 t = VT_LLONG | VT_UNSIGNED;
4588 goto push_tokc;
4589 case TOK_CFLOAT:
4590 t = VT_FLOAT;
4591 goto push_tokc;
4592 case TOK_CDOUBLE:
4593 t = VT_DOUBLE;
4594 goto push_tokc;
4595 case TOK_CLDOUBLE:
4596 t = VT_LDOUBLE;
4597 goto push_tokc;
4598 case TOK_CLONG:
4599 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4600 goto push_tokc;
4601 case TOK_CULONG:
4602 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4603 goto push_tokc;
4604 case TOK___FUNCTION__:
4605 if (!gnu_ext)
4606 goto tok_identifier;
4607 /* fall thru */
4608 case TOK___FUNC__:
4610 void *ptr;
4611 int len;
4612 /* special function name identifier */
4613 len = strlen(funcname) + 1;
4614 /* generate char[len] type */
4615 type.t = VT_BYTE;
4616 mk_pointer(&type);
4617 type.t |= VT_ARRAY;
4618 type.ref->c = len;
4619 vpush_ref(&type, data_section, data_section->data_offset, len);
4620 if (!NODATA_WANTED) {
4621 ptr = section_ptr_add(data_section, len);
4622 memcpy(ptr, funcname, len);
4624 next();
4626 break;
4627 case TOK_LSTR:
4628 #ifdef TCC_TARGET_PE
4629 t = VT_SHORT | VT_UNSIGNED;
4630 #else
4631 t = VT_INT;
4632 #endif
4633 goto str_init;
4634 case TOK_STR:
4635 /* string parsing */
4636 t = VT_BYTE;
4637 if (tcc_state->char_is_unsigned)
4638 t = VT_BYTE | VT_UNSIGNED;
4639 str_init:
4640 if (tcc_state->warn_write_strings)
4641 t |= VT_CONSTANT;
4642 type.t = t;
4643 mk_pointer(&type);
4644 type.t |= VT_ARRAY;
4645 memset(&ad, 0, sizeof(AttributeDef));
4646 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4647 break;
4648 case '(':
4649 next();
4650 /* cast ? */
4651 if (parse_btype(&type, &ad)) {
4652 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4653 skip(')');
4654 /* check ISOC99 compound literal */
4655 if (tok == '{') {
4656 /* data is allocated locally by default */
4657 if (global_expr)
4658 r = VT_CONST;
4659 else
4660 r = VT_LOCAL;
4661 /* all except arrays are lvalues */
4662 if (!(type.t & VT_ARRAY))
4663 r |= lvalue_type(type.t);
4664 memset(&ad, 0, sizeof(AttributeDef));
4665 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4666 } else {
4667 if (sizeof_caller) {
4668 vpush(&type);
4669 return;
4671 unary();
4672 gen_cast(&type);
4674 } else if (tok == '{') {
4675 int saved_nocode_wanted = nocode_wanted;
4676 if (const_wanted)
4677 tcc_error("expected constant");
4678 /* save all registers */
4679 save_regs(0);
4680 /* statement expression : we do not accept break/continue
4681 inside as GCC does. We do retain the nocode_wanted state,
4682 as statement expressions can't ever be entered from the
4683 outside, so any reactivation of code emission (from labels
4684 or loop heads) can be disabled again after the end of it. */
4685 block(NULL, NULL, 1);
4686 nocode_wanted = saved_nocode_wanted;
4687 skip(')');
4688 } else {
4689 gexpr();
4690 skip(')');
4692 break;
4693 case '*':
4694 next();
4695 unary();
4696 indir();
4697 break;
4698 case '&':
4699 next();
4700 unary();
4701 /* functions names must be treated as function pointers,
4702 except for unary '&' and sizeof. Since we consider that
4703 functions are not lvalues, we only have to handle it
4704 there and in function calls. */
4705 /* arrays can also be used although they are not lvalues */
4706 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4707 !(vtop->type.t & VT_ARRAY))
4708 test_lvalue();
4709 mk_pointer(&vtop->type);
4710 gaddrof();
4711 break;
4712 case '!':
4713 next();
4714 unary();
4715 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4716 gen_cast_s(VT_BOOL);
4717 vtop->c.i = !vtop->c.i;
4718 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4719 vtop->c.i ^= 1;
4720 else {
4721 save_regs(1);
4722 vseti(VT_JMP, gvtst(1, 0));
4724 break;
4725 case '~':
4726 next();
4727 unary();
4728 vpushi(-1);
4729 gen_op('^');
4730 break;
4731 case '+':
4732 next();
4733 unary();
4734 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4735 tcc_error("pointer not accepted for unary plus");
4736 /* In order to force cast, we add zero, except for floating point
4737 where we really need an noop (otherwise -0.0 will be transformed
4738 into +0.0). */
4739 if (!is_float(vtop->type.t)) {
4740 vpushi(0);
4741 gen_op('+');
4743 break;
4744 case TOK_SIZEOF:
4745 case TOK_ALIGNOF1:
4746 case TOK_ALIGNOF2:
4747 t = tok;
4748 next();
4749 in_sizeof++;
4750 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4751 s = vtop[1].sym; /* hack: accessing previous vtop */
4752 size = type_size(&type, &align);
4753 if (s && s->a.aligned)
4754 align = 1 << (s->a.aligned - 1);
4755 if (t == TOK_SIZEOF) {
4756 if (!(type.t & VT_VLA)) {
4757 if (size < 0)
4758 tcc_error("sizeof applied to an incomplete type");
4759 vpushs(size);
4760 } else {
4761 vla_runtime_type_size(&type, &align);
4763 } else {
4764 vpushs(align);
4766 vtop->type.t |= VT_UNSIGNED;
4767 break;
4769 case TOK_builtin_expect:
4770 /* __builtin_expect is a no-op for now */
4771 parse_builtin_params(0, "ee");
4772 vpop();
4773 break;
4774 case TOK_builtin_types_compatible_p:
4775 parse_builtin_params(0, "tt");
4776 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4777 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4778 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4779 vtop -= 2;
4780 vpushi(n);
4781 break;
4782 case TOK_builtin_choose_expr:
4784 int64_t c;
4785 next();
4786 skip('(');
4787 c = expr_const64();
4788 skip(',');
4789 if (!c) {
4790 nocode_wanted++;
4792 expr_eq();
4793 if (!c) {
4794 vpop();
4795 nocode_wanted--;
4797 skip(',');
4798 if (c) {
4799 nocode_wanted++;
4801 expr_eq();
4802 if (c) {
4803 vpop();
4804 nocode_wanted--;
4806 skip(')');
4808 break;
4809 case TOK_builtin_constant_p:
4810 parse_builtin_params(1, "e");
4811 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4812 vtop--;
4813 vpushi(n);
4814 break;
4815 case TOK_builtin_frame_address:
4816 case TOK_builtin_return_address:
4818 int tok1 = tok;
4819 int level;
4820 next();
4821 skip('(');
4822 if (tok != TOK_CINT) {
4823 tcc_error("%s only takes positive integers",
4824 tok1 == TOK_builtin_return_address ?
4825 "__builtin_return_address" :
4826 "__builtin_frame_address");
4828 level = (uint32_t)tokc.i;
4829 next();
4830 skip(')');
4831 type.t = VT_VOID;
4832 mk_pointer(&type);
4833 vset(&type, VT_LOCAL, 0); /* local frame */
4834 while (level--) {
4835 mk_pointer(&vtop->type);
4836 indir(); /* -> parent frame */
4838 if (tok1 == TOK_builtin_return_address) {
4839 // assume return address is just above frame pointer on stack
4840 vpushi(PTR_SIZE);
4841 gen_op('+');
4842 mk_pointer(&vtop->type);
4843 indir();
4846 break;
4847 #ifdef TCC_TARGET_X86_64
4848 #ifdef TCC_TARGET_PE
4849 case TOK_builtin_va_start:
4850 parse_builtin_params(0, "ee");
4851 r = vtop->r & VT_VALMASK;
4852 if (r == VT_LLOCAL)
4853 r = VT_LOCAL;
4854 if (r != VT_LOCAL)
4855 tcc_error("__builtin_va_start expects a local variable");
4856 vtop->r = r;
4857 vtop->type = char_pointer_type;
4858 vtop->c.i += 8;
4859 vstore();
4860 break;
4861 #else
4862 case TOK_builtin_va_arg_types:
4863 parse_builtin_params(0, "t");
4864 vpushi(classify_x86_64_va_arg(&vtop->type));
4865 vswap();
4866 vpop();
4867 break;
4868 #endif
4869 #endif
4871 #ifdef TCC_TARGET_ARM64
4872 case TOK___va_start: {
4873 parse_builtin_params(0, "ee");
4874 //xx check types
4875 gen_va_start();
4876 vpushi(0);
4877 vtop->type.t = VT_VOID;
4878 break;
4880 case TOK___va_arg: {
4881 parse_builtin_params(0, "et");
4882 type = vtop->type;
4883 vpop();
4884 //xx check types
4885 gen_va_arg(&type);
4886 vtop->type = type;
4887 break;
4889 case TOK___arm64_clear_cache: {
4890 parse_builtin_params(0, "ee");
4891 gen_clear_cache();
4892 vpushi(0);
4893 vtop->type.t = VT_VOID;
4894 break;
4896 #endif
4897 /* pre operations */
4898 case TOK_INC:
4899 case TOK_DEC:
4900 t = tok;
4901 next();
4902 unary();
4903 inc(0, t);
4904 break;
4905 case '-':
4906 next();
4907 unary();
4908 t = vtop->type.t & VT_BTYPE;
4909 if (is_float(t)) {
4910 /* In IEEE negate(x) isn't subtract(0,x), but rather
4911 subtract(-0, x). */
4912 vpush(&vtop->type);
4913 if (t == VT_FLOAT)
4914 vtop->c.f = -1.0 * 0.0;
4915 else if (t == VT_DOUBLE)
4916 vtop->c.d = -1.0 * 0.0;
4917 else
4918 vtop->c.ld = -1.0 * 0.0;
4919 } else
4920 vpushi(0);
4921 vswap();
4922 gen_op('-');
4923 break;
4924 case TOK_LAND:
4925 if (!gnu_ext)
4926 goto tok_identifier;
4927 next();
4928 /* allow to take the address of a label */
4929 if (tok < TOK_UIDENT)
4930 expect("label identifier");
4931 s = label_find(tok);
4932 if (!s) {
4933 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4934 } else {
4935 if (s->r == LABEL_DECLARED)
4936 s->r = LABEL_FORWARD;
4938 if (!s->type.t) {
4939 s->type.t = VT_VOID;
4940 mk_pointer(&s->type);
4941 s->type.t |= VT_STATIC;
4943 vpushsym(&s->type, s);
4944 next();
4945 break;
4947 case TOK_GENERIC:
4949 CType controlling_type;
4950 int has_default = 0;
4951 int has_match = 0;
4952 int learn = 0;
4953 TokenString *str = NULL;
4955 next();
4956 skip('(');
4957 expr_type(&controlling_type, expr_eq);
4958 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
4959 for (;;) {
4960 learn = 0;
4961 skip(',');
4962 if (tok == TOK_DEFAULT) {
4963 if (has_default)
4964 tcc_error("too many 'default'");
4965 has_default = 1;
4966 if (!has_match)
4967 learn = 1;
4968 next();
4969 } else {
4970 AttributeDef ad_tmp;
4971 int itmp;
4972 CType cur_type;
4973 parse_btype(&cur_type, &ad_tmp);
4974 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
4975 if (compare_types(&controlling_type, &cur_type, 0)) {
4976 if (has_match) {
4977 tcc_error("type match twice");
4979 has_match = 1;
4980 learn = 1;
4983 skip(':');
4984 if (learn) {
4985 if (str)
4986 tok_str_free(str);
4987 skip_or_save_block(&str);
4988 } else {
4989 skip_or_save_block(NULL);
4991 if (tok == ')')
4992 break;
4994 if (!str) {
4995 char buf[60];
4996 type_to_str(buf, sizeof buf, &controlling_type, NULL);
4997 tcc_error("type '%s' does not match any association", buf);
4999 begin_macro(str, 1);
5000 next();
5001 expr_eq();
5002 if (tok != TOK_EOF)
5003 expect(",");
5004 end_macro();
5005 next();
5006 break;
5008 // special qnan , snan and infinity values
5009 case TOK___NAN__:
5010 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
5011 next();
5012 break;
5013 case TOK___SNAN__:
5014 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
5015 next();
5016 break;
5017 case TOK___INF__:
5018 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
5019 next();
5020 break;
5022 default:
5023 tok_identifier:
5024 t = tok;
5025 next();
5026 if (t < TOK_UIDENT)
5027 expect("identifier");
5028 s = sym_find(t);
5029 if (!s) {
5030 const char *name = get_tok_str(t, NULL);
5031 if (tok != '(')
5032 tcc_error("'%s' undeclared", name);
5033 /* for simple function calls, we tolerate undeclared
5034 external reference to int() function */
5035 if (tcc_state->warn_implicit_function_declaration
5036 #ifdef TCC_TARGET_PE
5037 /* people must be warned about using undeclared WINAPI functions
5038 (which usually start with uppercase letter) */
5039 || (name[0] >= 'A' && name[0] <= 'Z')
5040 #endif
5042 tcc_warning("implicit declaration of function '%s'", name);
5043 s = external_global_sym(t, &func_old_type, 0);
5046 r = s->r;
5047 /* A symbol that has a register is a local register variable,
5048 which starts out as VT_LOCAL value. */
5049 if ((r & VT_VALMASK) < VT_CONST)
5050 r = (r & ~VT_VALMASK) | VT_LOCAL;
5052 vset(&s->type, r, s->c);
5053 /* Point to s as backpointer (even without r&VT_SYM).
5054 Will be used by at least the x86 inline asm parser for
5055 regvars. */
5056 vtop->sym = s;
5058 if (r & VT_SYM) {
5059 vtop->c.i = 0;
5060 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5061 vtop->c.i = s->enum_val;
5063 break;
5066 /* post operations */
5067 while (1) {
5068 if (tok == TOK_INC || tok == TOK_DEC) {
5069 inc(1, tok);
5070 next();
5071 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5072 int qualifiers;
5073 /* field */
5074 if (tok == TOK_ARROW)
5075 indir();
5076 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5077 test_lvalue();
5078 gaddrof();
5079 /* expect pointer on structure */
5080 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5081 expect("struct or union");
5082 if (tok == TOK_CDOUBLE)
5083 expect("field name");
5084 next();
5085 if (tok == TOK_CINT || tok == TOK_CUINT)
5086 expect("field name");
5087 s = find_field(&vtop->type, tok);
5088 if (!s)
5089 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5090 /* add field offset to pointer */
5091 vtop->type = char_pointer_type; /* change type to 'char *' */
5092 vpushi(s->c);
5093 gen_op('+');
5094 /* change type to field type, and set to lvalue */
5095 vtop->type = s->type;
5096 vtop->type.t |= qualifiers;
5097 /* an array is never an lvalue */
5098 if (!(vtop->type.t & VT_ARRAY)) {
5099 vtop->r |= lvalue_type(vtop->type.t);
5100 #ifdef CONFIG_TCC_BCHECK
5101 /* if bound checking, the referenced pointer must be checked */
5102 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5103 vtop->r |= VT_MUSTBOUND;
5104 #endif
5106 next();
5107 } else if (tok == '[') {
5108 next();
5109 gexpr();
5110 gen_op('+');
5111 indir();
5112 skip(']');
5113 } else if (tok == '(') {
5114 SValue ret;
5115 Sym *sa;
5116 int nb_args, ret_nregs, ret_align, regsize, variadic;
5118 /* function call */
5119 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5120 /* pointer test (no array accepted) */
5121 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5122 vtop->type = *pointed_type(&vtop->type);
5123 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5124 goto error_func;
5125 } else {
5126 error_func:
5127 expect("function pointer");
5129 } else {
5130 vtop->r &= ~VT_LVAL; /* no lvalue */
5132 /* get return type */
5133 s = vtop->type.ref;
5134 next();
5135 sa = s->next; /* first parameter */
5136 nb_args = regsize = 0;
5137 ret.r2 = VT_CONST;
5138 /* compute first implicit argument if a structure is returned */
5139 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5140 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5141 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5142 &ret_align, &regsize);
5143 if (!ret_nregs) {
5144 /* get some space for the returned structure */
5145 size = type_size(&s->type, &align);
5146 #ifdef TCC_TARGET_ARM64
5147 /* On arm64, a small struct is return in registers.
5148 It is much easier to write it to memory if we know
5149 that we are allowed to write some extra bytes, so
5150 round the allocated space up to a power of 2: */
5151 if (size < 16)
5152 while (size & (size - 1))
5153 size = (size | (size - 1)) + 1;
5154 #endif
5155 loc = (loc - size) & -align;
5156 ret.type = s->type;
5157 ret.r = VT_LOCAL | VT_LVAL;
5158 /* pass it as 'int' to avoid structure arg passing
5159 problems */
5160 vseti(VT_LOCAL, loc);
5161 ret.c = vtop->c;
5162 nb_args++;
5164 } else {
5165 ret_nregs = 1;
5166 ret.type = s->type;
5169 if (ret_nregs) {
5170 /* return in register */
5171 if (is_float(ret.type.t)) {
5172 ret.r = reg_fret(ret.type.t);
5173 #ifdef TCC_TARGET_X86_64
5174 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5175 ret.r2 = REG_QRET;
5176 #endif
5177 } else {
5178 #ifndef TCC_TARGET_ARM64
5179 #ifdef TCC_TARGET_X86_64
5180 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5181 #else
5182 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5183 #endif
5184 ret.r2 = REG_LRET;
5185 #endif
5186 ret.r = REG_IRET;
5188 ret.c.i = 0;
5190 if (tok != ')') {
5191 for(;;) {
5192 expr_eq();
5193 gfunc_param_typed(s, sa);
5194 nb_args++;
5195 if (sa)
5196 sa = sa->next;
5197 if (tok == ')')
5198 break;
5199 skip(',');
5202 if (sa)
5203 tcc_error("too few arguments to function");
5204 skip(')');
5205 gfunc_call(nb_args);
5207 /* return value */
5208 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5209 vsetc(&ret.type, r, &ret.c);
5210 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5213 /* handle packed struct return */
5214 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5215 int addr, offset;
5217 size = type_size(&s->type, &align);
5218 /* We're writing whole regs often, make sure there's enough
5219 space. Assume register size is power of 2. */
5220 if (regsize > align)
5221 align = regsize;
5222 loc = (loc - size) & -align;
5223 addr = loc;
5224 offset = 0;
5225 for (;;) {
5226 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5227 vswap();
5228 vstore();
5229 vtop--;
5230 if (--ret_nregs == 0)
5231 break;
5232 offset += regsize;
5234 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5236 } else {
5237 break;
5242 ST_FUNC void expr_prod(void)
5244 int t;
5246 unary();
5247 while (tok == '*' || tok == '/' || tok == '%') {
5248 t = tok;
5249 next();
5250 unary();
5251 gen_op(t);
5255 ST_FUNC void expr_sum(void)
5257 int t;
5259 expr_prod();
5260 while (tok == '+' || tok == '-') {
5261 t = tok;
5262 next();
5263 expr_prod();
5264 gen_op(t);
5268 static void expr_shift(void)
5270 int t;
5272 expr_sum();
5273 while (tok == TOK_SHL || tok == TOK_SAR) {
5274 t = tok;
5275 next();
5276 expr_sum();
5277 gen_op(t);
5281 static void expr_cmp(void)
5283 int t;
5285 expr_shift();
5286 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5287 tok == TOK_ULT || tok == TOK_UGE) {
5288 t = tok;
5289 next();
5290 expr_shift();
5291 gen_op(t);
5295 static void expr_cmpeq(void)
5297 int t;
5299 expr_cmp();
5300 while (tok == TOK_EQ || tok == TOK_NE) {
5301 t = tok;
5302 next();
5303 expr_cmp();
5304 gen_op(t);
5308 static void expr_and(void)
5310 expr_cmpeq();
5311 while (tok == '&') {
5312 next();
5313 expr_cmpeq();
5314 gen_op('&');
5318 static void expr_xor(void)
5320 expr_and();
5321 while (tok == '^') {
5322 next();
5323 expr_and();
5324 gen_op('^');
5328 static void expr_or(void)
5330 expr_xor();
5331 while (tok == '|') {
5332 next();
5333 expr_xor();
5334 gen_op('|');
5338 static void expr_land(void)
5340 expr_or();
5341 if (tok == TOK_LAND) {
5342 int t = 0;
5343 for(;;) {
5344 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5345 gen_cast_s(VT_BOOL);
5346 if (vtop->c.i) {
5347 vpop();
5348 } else {
5349 nocode_wanted++;
5350 while (tok == TOK_LAND) {
5351 next();
5352 expr_or();
5353 vpop();
5355 nocode_wanted--;
5356 if (t)
5357 gsym(t);
5358 gen_cast_s(VT_INT);
5359 break;
5361 } else {
5362 if (!t)
5363 save_regs(1);
5364 t = gvtst(1, t);
5366 if (tok != TOK_LAND) {
5367 if (t)
5368 vseti(VT_JMPI, t);
5369 else
5370 vpushi(1);
5371 break;
5373 next();
5374 expr_or();
5379 static void expr_lor(void)
5381 expr_land();
5382 if (tok == TOK_LOR) {
5383 int t = 0;
5384 for(;;) {
5385 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5386 gen_cast_s(VT_BOOL);
5387 if (!vtop->c.i) {
5388 vpop();
5389 } else {
5390 nocode_wanted++;
5391 while (tok == TOK_LOR) {
5392 next();
5393 expr_land();
5394 vpop();
5396 nocode_wanted--;
5397 if (t)
5398 gsym(t);
5399 gen_cast_s(VT_INT);
5400 break;
5402 } else {
5403 if (!t)
5404 save_regs(1);
5405 t = gvtst(0, t);
5407 if (tok != TOK_LOR) {
5408 if (t)
5409 vseti(VT_JMP, t);
5410 else
5411 vpushi(0);
5412 break;
5414 next();
5415 expr_land();
5420 /* Assuming vtop is a value used in a conditional context
5421 (i.e. compared with zero) return 0 if it's false, 1 if
5422 true and -1 if it can't be statically determined. */
5423 static int condition_3way(void)
5425 int c = -1;
5426 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5427 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5428 vdup();
5429 gen_cast_s(VT_BOOL);
5430 c = vtop->c.i;
5431 vpop();
5433 return c;
5436 static void expr_cond(void)
5438 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5439 SValue sv;
5440 CType type, type1, type2;
5442 expr_lor();
5443 if (tok == '?') {
5444 next();
5445 c = condition_3way();
5446 g = (tok == ':' && gnu_ext);
5447 if (c < 0) {
5448 /* needed to avoid having different registers saved in
5449 each branch */
5450 if (is_float(vtop->type.t)) {
5451 rc = RC_FLOAT;
5452 #ifdef TCC_TARGET_X86_64
5453 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5454 rc = RC_ST0;
5456 #endif
5457 } else
5458 rc = RC_INT;
5459 gv(rc);
5460 save_regs(1);
5461 if (g)
5462 gv_dup();
5463 tt = gvtst(1, 0);
5465 } else {
5466 if (!g)
5467 vpop();
5468 tt = 0;
5471 if (1) {
5472 if (c == 0)
5473 nocode_wanted++;
5474 if (!g)
5475 gexpr();
5477 type1 = vtop->type;
5478 sv = *vtop; /* save value to handle it later */
5479 vtop--; /* no vpop so that FP stack is not flushed */
5480 skip(':');
5482 u = 0;
5483 if (c < 0)
5484 u = gjmp(0);
5485 gsym(tt);
5487 if (c == 0)
5488 nocode_wanted--;
5489 if (c == 1)
5490 nocode_wanted++;
5491 expr_cond();
5492 if (c == 1)
5493 nocode_wanted--;
5495 type2 = vtop->type;
5496 t1 = type1.t;
5497 bt1 = t1 & VT_BTYPE;
5498 t2 = type2.t;
5499 bt2 = t2 & VT_BTYPE;
5500 type.ref = NULL;
5502 /* cast operands to correct type according to ISOC rules */
5503 if (is_float(bt1) || is_float(bt2)) {
5504 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5505 type.t = VT_LDOUBLE;
5507 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5508 type.t = VT_DOUBLE;
5509 } else {
5510 type.t = VT_FLOAT;
5512 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5513 /* cast to biggest op */
5514 type.t = VT_LLONG | VT_LONG;
5515 if (bt1 == VT_LLONG)
5516 type.t &= t1;
5517 if (bt2 == VT_LLONG)
5518 type.t &= t2;
5519 /* convert to unsigned if it does not fit in a long long */
5520 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5521 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5522 type.t |= VT_UNSIGNED;
5523 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5524 /* If one is a null ptr constant the result type
5525 is the other. */
5526 if (is_null_pointer (vtop))
5527 type = type1;
5528 else if (is_null_pointer (&sv))
5529 type = type2;
5530 /* XXX: test pointer compatibility, C99 has more elaborate
5531 rules here. */
5532 else
5533 type = type1;
5534 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5535 /* XXX: test function pointer compatibility */
5536 type = bt1 == VT_FUNC ? type1 : type2;
5537 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5538 /* XXX: test structure compatibility */
5539 type = bt1 == VT_STRUCT ? type1 : type2;
5540 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5541 /* NOTE: as an extension, we accept void on only one side */
5542 type.t = VT_VOID;
5543 } else {
5544 /* integer operations */
5545 type.t = VT_INT | (VT_LONG & (t1 | t2));
5546 /* convert to unsigned if it does not fit in an integer */
5547 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5548 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5549 type.t |= VT_UNSIGNED;
5551 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5552 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5553 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5554 islv &= c < 0;
5556 /* now we convert second operand */
5557 if (c != 1) {
5558 gen_cast(&type);
5559 if (islv) {
5560 mk_pointer(&vtop->type);
5561 gaddrof();
5562 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5563 gaddrof();
5566 rc = RC_INT;
5567 if (is_float(type.t)) {
5568 rc = RC_FLOAT;
5569 #ifdef TCC_TARGET_X86_64
5570 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5571 rc = RC_ST0;
5573 #endif
5574 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5575 /* for long longs, we use fixed registers to avoid having
5576 to handle a complicated move */
5577 rc = RC_IRET;
5580 tt = r2 = 0;
5581 if (c < 0) {
5582 r2 = gv(rc);
5583 tt = gjmp(0);
5585 gsym(u);
5587 /* this is horrible, but we must also convert first
5588 operand */
5589 if (c != 0) {
5590 *vtop = sv;
5591 gen_cast(&type);
5592 if (islv) {
5593 mk_pointer(&vtop->type);
5594 gaddrof();
5595 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5596 gaddrof();
5599 if (c < 0) {
5600 r1 = gv(rc);
5601 move_reg(r2, r1, type.t);
5602 vtop->r = r2;
5603 gsym(tt);
5604 if (islv)
5605 indir();
5611 static void expr_eq(void)
5613 int t;
5615 expr_cond();
5616 if (tok == '=' ||
5617 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5618 tok == TOK_A_XOR || tok == TOK_A_OR ||
5619 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5620 test_lvalue();
5621 t = tok;
5622 next();
5623 if (t == '=') {
5624 expr_eq();
5625 } else {
5626 vdup();
5627 expr_eq();
5628 gen_op(t & 0x7f);
5630 vstore();
5634 ST_FUNC void gexpr(void)
5636 while (1) {
5637 expr_eq();
5638 if (tok != ',')
5639 break;
5640 vpop();
5641 next();
5645 /* parse a constant expression and return value in vtop. */
5646 static void expr_const1(void)
5648 const_wanted++;
5649 nocode_wanted++;
5650 expr_cond();
5651 nocode_wanted--;
5652 const_wanted--;
5655 /* parse an integer constant and return its value. */
5656 static inline int64_t expr_const64(void)
5658 int64_t c;
5659 expr_const1();
5660 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5661 expect("constant expression");
5662 c = vtop->c.i;
5663 vpop();
5664 return c;
5667 /* parse an integer constant and return its value.
5668 Complain if it doesn't fit 32bit (signed or unsigned). */
5669 ST_FUNC int expr_const(void)
5671 int c;
5672 int64_t wc = expr_const64();
5673 c = wc;
5674 if (c != wc && (unsigned)c != wc)
5675 tcc_error("constant exceeds 32 bit");
5676 return c;
5679 /* return the label token if current token is a label, otherwise
5680 return zero */
5681 static int is_label(void)
5683 int last_tok;
5685 /* fast test first */
5686 if (tok < TOK_UIDENT)
5687 return 0;
5688 /* no need to save tokc because tok is an identifier */
5689 last_tok = tok;
5690 next();
5691 if (tok == ':') {
5692 return last_tok;
5693 } else {
5694 unget_tok(last_tok);
5695 return 0;
5699 #ifndef TCC_TARGET_ARM64
5700 static void gfunc_return(CType *func_type)
5702 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5703 CType type, ret_type;
5704 int ret_align, ret_nregs, regsize;
5705 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5706 &ret_align, &regsize);
5707 if (0 == ret_nregs) {
5708 /* if returning structure, must copy it to implicit
5709 first pointer arg location */
5710 type = *func_type;
5711 mk_pointer(&type);
5712 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5713 indir();
5714 vswap();
5715 /* copy structure value to pointer */
5716 vstore();
5717 } else {
5718 /* returning structure packed into registers */
5719 int r, size, addr, align;
5720 size = type_size(func_type,&align);
5721 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5722 (vtop->c.i & (ret_align-1)))
5723 && (align & (ret_align-1))) {
5724 loc = (loc - size) & -ret_align;
5725 addr = loc;
5726 type = *func_type;
5727 vset(&type, VT_LOCAL | VT_LVAL, addr);
5728 vswap();
5729 vstore();
5730 vpop();
5731 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5733 vtop->type = ret_type;
5734 if (is_float(ret_type.t))
5735 r = rc_fret(ret_type.t);
5736 else
5737 r = RC_IRET;
5739 if (ret_nregs == 1)
5740 gv(r);
5741 else {
5742 for (;;) {
5743 vdup();
5744 gv(r);
5745 vpop();
5746 if (--ret_nregs == 0)
5747 break;
5748 /* We assume that when a structure is returned in multiple
5749 registers, their classes are consecutive values of the
5750 suite s(n) = 2^n */
5751 r <<= 1;
5752 vtop->c.i += regsize;
5756 } else if (is_float(func_type->t)) {
5757 gv(rc_fret(func_type->t));
5758 } else {
5759 gv(RC_IRET);
5761 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5763 #endif
5765 static int case_cmp(const void *pa, const void *pb)
5767 int64_t a = (*(struct case_t**) pa)->v1;
5768 int64_t b = (*(struct case_t**) pb)->v1;
5769 return a < b ? -1 : a > b;
5772 static void gcase(struct case_t **base, int len, int *bsym)
5774 struct case_t *p;
5775 int e;
5776 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5777 gv(RC_INT);
5778 while (len > 4) {
5779 /* binary search */
5780 p = base[len/2];
5781 vdup();
5782 if (ll)
5783 vpushll(p->v2);
5784 else
5785 vpushi(p->v2);
5786 gen_op(TOK_LE);
5787 e = gtst(1, 0);
5788 vdup();
5789 if (ll)
5790 vpushll(p->v1);
5791 else
5792 vpushi(p->v1);
5793 gen_op(TOK_GE);
5794 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5795 /* x < v1 */
5796 gcase(base, len/2, bsym);
5797 if (cur_switch->def_sym)
5798 gjmp_addr(cur_switch->def_sym);
5799 else
5800 *bsym = gjmp(*bsym);
5801 /* x > v2 */
5802 gsym(e);
5803 e = len/2 + 1;
5804 base += e; len -= e;
5806 /* linear scan */
5807 while (len--) {
5808 p = *base++;
5809 vdup();
5810 if (ll)
5811 vpushll(p->v2);
5812 else
5813 vpushi(p->v2);
5814 if (p->v1 == p->v2) {
5815 gen_op(TOK_EQ);
5816 gtst_addr(0, p->sym);
5817 } else {
5818 gen_op(TOK_LE);
5819 e = gtst(1, 0);
5820 vdup();
5821 if (ll)
5822 vpushll(p->v1);
5823 else
5824 vpushi(p->v1);
5825 gen_op(TOK_GE);
5826 gtst_addr(0, p->sym);
5827 gsym(e);
5832 static void block(int *bsym, int *csym, int is_expr)
5834 int a, b, c, d, cond;
5835 Sym *s;
5837 /* generate line number info */
5838 if (tcc_state->do_debug)
5839 tcc_debug_line(tcc_state);
5841 if (is_expr) {
5842 /* default return value is (void) */
5843 vpushi(0);
5844 vtop->type.t = VT_VOID;
5847 if (tok == TOK_IF) {
5848 /* if test */
5849 int saved_nocode_wanted = nocode_wanted;
5850 next();
5851 skip('(');
5852 gexpr();
5853 skip(')');
5854 cond = condition_3way();
5855 if (cond == 1)
5856 a = 0, vpop();
5857 else
5858 a = gvtst(1, 0);
5859 if (cond == 0)
5860 nocode_wanted |= 0x20000000;
5861 block(bsym, csym, 0);
5862 if (cond != 1)
5863 nocode_wanted = saved_nocode_wanted;
5864 c = tok;
5865 if (c == TOK_ELSE) {
5866 next();
5867 d = gjmp(0);
5868 gsym(a);
5869 if (cond == 1)
5870 nocode_wanted |= 0x20000000;
5871 block(bsym, csym, 0);
5872 gsym(d); /* patch else jmp */
5873 if (cond != 0)
5874 nocode_wanted = saved_nocode_wanted;
5875 } else
5876 gsym(a);
5877 } else if (tok == TOK_WHILE) {
5878 int saved_nocode_wanted;
5879 nocode_wanted &= ~0x20000000;
5880 next();
5881 d = ind;
5882 vla_sp_restore();
5883 skip('(');
5884 gexpr();
5885 skip(')');
5886 a = gvtst(1, 0);
5887 b = 0;
5888 ++local_scope;
5889 saved_nocode_wanted = nocode_wanted;
5890 block(&a, &b, 0);
5891 nocode_wanted = saved_nocode_wanted;
5892 --local_scope;
5893 gjmp_addr(d);
5894 gsym(a);
5895 gsym_addr(b, d);
5896 } else if (tok == '{') {
5897 Sym *llabel;
5898 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5900 next();
5901 /* record local declaration stack position */
5902 s = local_stack;
5903 llabel = local_label_stack;
5904 ++local_scope;
5906 /* handle local labels declarations */
5907 if (tok == TOK_LABEL) {
5908 next();
5909 for(;;) {
5910 if (tok < TOK_UIDENT)
5911 expect("label identifier");
5912 label_push(&local_label_stack, tok, LABEL_DECLARED);
5913 next();
5914 if (tok == ',') {
5915 next();
5916 } else {
5917 skip(';');
5918 break;
5922 while (tok != '}') {
5923 if ((a = is_label()))
5924 unget_tok(a);
5925 else
5926 decl(VT_LOCAL);
5927 if (tok != '}') {
5928 if (is_expr)
5929 vpop();
5930 block(bsym, csym, is_expr);
5933 /* pop locally defined labels */
5934 label_pop(&local_label_stack, llabel, is_expr);
5935 /* pop locally defined symbols */
5936 --local_scope;
5937 /* In the is_expr case (a statement expression is finished here),
5938 vtop might refer to symbols on the local_stack. Either via the
5939 type or via vtop->sym. We can't pop those nor any that in turn
5940 might be referred to. To make it easier we don't roll back
5941 any symbols in that case; some upper level call to block() will
5942 do that. We do have to remove such symbols from the lookup
5943 tables, though. sym_pop will do that. */
5944 sym_pop(&local_stack, s, is_expr);
5946 /* Pop VLA frames and restore stack pointer if required */
5947 if (vlas_in_scope > saved_vlas_in_scope) {
5948 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5949 vla_sp_restore();
5951 vlas_in_scope = saved_vlas_in_scope;
5953 next();
5954 } else if (tok == TOK_RETURN) {
5955 next();
5956 if (tok != ';') {
5957 gexpr();
5958 gen_assign_cast(&func_vt);
5959 if ((func_vt.t & VT_BTYPE) == VT_VOID)
5960 vtop--;
5961 else
5962 gfunc_return(&func_vt);
5964 skip(';');
5965 /* jump unless last stmt in top-level block */
5966 if (tok != '}' || local_scope != 1)
5967 rsym = gjmp(rsym);
5968 nocode_wanted |= 0x20000000;
5969 } else if (tok == TOK_BREAK) {
5970 /* compute jump */
5971 if (!bsym)
5972 tcc_error("cannot break");
5973 *bsym = gjmp(*bsym);
5974 next();
5975 skip(';');
5976 nocode_wanted |= 0x20000000;
5977 } else if (tok == TOK_CONTINUE) {
5978 /* compute jump */
5979 if (!csym)
5980 tcc_error("cannot continue");
5981 vla_sp_restore_root();
5982 *csym = gjmp(*csym);
5983 next();
5984 skip(';');
5985 } else if (tok == TOK_FOR) {
5986 int e;
5987 int saved_nocode_wanted;
5988 nocode_wanted &= ~0x20000000;
5989 next();
5990 skip('(');
5991 s = local_stack;
5992 ++local_scope;
5993 if (tok != ';') {
5994 /* c99 for-loop init decl? */
5995 if (!decl0(VT_LOCAL, 1, NULL)) {
5996 /* no, regular for-loop init expr */
5997 gexpr();
5998 vpop();
6001 skip(';');
6002 d = ind;
6003 c = ind;
6004 vla_sp_restore();
6005 a = 0;
6006 b = 0;
6007 if (tok != ';') {
6008 gexpr();
6009 a = gvtst(1, 0);
6011 skip(';');
6012 if (tok != ')') {
6013 e = gjmp(0);
6014 c = ind;
6015 vla_sp_restore();
6016 gexpr();
6017 vpop();
6018 gjmp_addr(d);
6019 gsym(e);
6021 skip(')');
6022 saved_nocode_wanted = nocode_wanted;
6023 block(&a, &b, 0);
6024 nocode_wanted = saved_nocode_wanted;
6025 gjmp_addr(c);
6026 gsym(a);
6027 gsym_addr(b, c);
6028 --local_scope;
6029 sym_pop(&local_stack, s, 0);
6031 } else
6032 if (tok == TOK_DO) {
6033 int saved_nocode_wanted;
6034 nocode_wanted &= ~0x20000000;
6035 next();
6036 a = 0;
6037 b = 0;
6038 d = ind;
6039 vla_sp_restore();
6040 saved_nocode_wanted = nocode_wanted;
6041 block(&a, &b, 0);
6042 skip(TOK_WHILE);
6043 skip('(');
6044 gsym(b);
6045 gexpr();
6046 c = gvtst(0, 0);
6047 gsym_addr(c, d);
6048 nocode_wanted = saved_nocode_wanted;
6049 skip(')');
6050 gsym(a);
6051 skip(';');
6052 } else
6053 if (tok == TOK_SWITCH) {
6054 struct switch_t *saved, sw;
6055 int saved_nocode_wanted = nocode_wanted;
6056 SValue switchval;
6057 next();
6058 skip('(');
6059 gexpr();
6060 skip(')');
6061 switchval = *vtop--;
6062 a = 0;
6063 b = gjmp(0); /* jump to first case */
6064 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6065 saved = cur_switch;
6066 cur_switch = &sw;
6067 block(&a, csym, 0);
6068 nocode_wanted = saved_nocode_wanted;
6069 a = gjmp(a); /* add implicit break */
6070 /* case lookup */
6071 gsym(b);
6072 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6073 for (b = 1; b < sw.n; b++)
6074 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6075 tcc_error("duplicate case value");
6076 /* Our switch table sorting is signed, so the compared
6077 value needs to be as well when it's 64bit. */
6078 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6079 switchval.type.t &= ~VT_UNSIGNED;
6080 vpushv(&switchval);
6081 gcase(sw.p, sw.n, &a);
6082 vpop();
6083 if (sw.def_sym)
6084 gjmp_addr(sw.def_sym);
6085 dynarray_reset(&sw.p, &sw.n);
6086 cur_switch = saved;
6087 /* break label */
6088 gsym(a);
6089 } else
6090 if (tok == TOK_CASE) {
6091 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6092 if (!cur_switch)
6093 expect("switch");
6094 nocode_wanted &= ~0x20000000;
6095 next();
6096 cr->v1 = cr->v2 = expr_const64();
6097 if (gnu_ext && tok == TOK_DOTS) {
6098 next();
6099 cr->v2 = expr_const64();
6100 if (cr->v2 < cr->v1)
6101 tcc_warning("empty case range");
6103 cr->sym = ind;
6104 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6105 skip(':');
6106 is_expr = 0;
6107 goto block_after_label;
6108 } else
6109 if (tok == TOK_DEFAULT) {
6110 next();
6111 skip(':');
6112 if (!cur_switch)
6113 expect("switch");
6114 if (cur_switch->def_sym)
6115 tcc_error("too many 'default'");
6116 cur_switch->def_sym = ind;
6117 is_expr = 0;
6118 goto block_after_label;
6119 } else
6120 if (tok == TOK_GOTO) {
6121 next();
6122 if (tok == '*' && gnu_ext) {
6123 /* computed goto */
6124 next();
6125 gexpr();
6126 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6127 expect("pointer");
6128 ggoto();
6129 } else if (tok >= TOK_UIDENT) {
6130 s = label_find(tok);
6131 /* put forward definition if needed */
6132 if (!s) {
6133 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6134 } else {
6135 if (s->r == LABEL_DECLARED)
6136 s->r = LABEL_FORWARD;
6138 vla_sp_restore_root();
6139 if (s->r & LABEL_FORWARD)
6140 s->jnext = gjmp(s->jnext);
6141 else
6142 gjmp_addr(s->jnext);
6143 next();
6144 } else {
6145 expect("label identifier");
6147 skip(';');
6148 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6149 asm_instr();
6150 } else {
6151 b = is_label();
6152 if (b) {
6153 /* label case */
6154 next();
6155 s = label_find(b);
6156 if (s) {
6157 if (s->r == LABEL_DEFINED)
6158 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6159 gsym(s->jnext);
6160 s->r = LABEL_DEFINED;
6161 } else {
6162 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6164 s->jnext = ind;
6165 vla_sp_restore();
6166 /* we accept this, but it is a mistake */
6167 block_after_label:
6168 nocode_wanted &= ~0x20000000;
6169 if (tok == '}') {
6170 tcc_warning("deprecated use of label at end of compound statement");
6171 } else {
6172 if (is_expr)
6173 vpop();
6174 block(bsym, csym, is_expr);
6176 } else {
6177 /* expression case */
6178 if (tok != ';') {
6179 if (is_expr) {
6180 vpop();
6181 gexpr();
6182 } else {
6183 gexpr();
6184 vpop();
6187 skip(';');
6192 /* This skips over a stream of tokens containing balanced {} and ()
6193 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6194 with a '{'). If STR then allocates and stores the skipped tokens
6195 in *STR. This doesn't check if () and {} are nested correctly,
6196 i.e. "({)}" is accepted. */
6197 static void skip_or_save_block(TokenString **str)
6199 int braces = tok == '{';
6200 int level = 0;
6201 if (str)
6202 *str = tok_str_alloc();
6204 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6205 int t;
6206 if (tok == TOK_EOF) {
6207 if (str || level > 0)
6208 tcc_error("unexpected end of file");
6209 else
6210 break;
6212 if (str)
6213 tok_str_add_tok(*str);
6214 t = tok;
6215 next();
6216 if (t == '{' || t == '(') {
6217 level++;
6218 } else if (t == '}' || t == ')') {
6219 level--;
6220 if (level == 0 && braces && t == '}')
6221 break;
6224 if (str) {
6225 tok_str_add(*str, -1);
6226 tok_str_add(*str, 0);
6230 #define EXPR_CONST 1
6231 #define EXPR_ANY 2
6233 static void parse_init_elem(int expr_type)
6235 int saved_global_expr;
6236 switch(expr_type) {
6237 case EXPR_CONST:
6238 /* compound literals must be allocated globally in this case */
6239 saved_global_expr = global_expr;
6240 global_expr = 1;
6241 expr_const1();
6242 global_expr = saved_global_expr;
6243 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6244 (compound literals). */
6245 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6246 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6247 || vtop->sym->v < SYM_FIRST_ANOM))
6248 #ifdef TCC_TARGET_PE
6249 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6250 #endif
6252 tcc_error("initializer element is not constant");
6253 break;
6254 case EXPR_ANY:
6255 expr_eq();
6256 break;
6260 /* put zeros for variable based init */
6261 static void init_putz(Section *sec, unsigned long c, int size)
6263 if (sec) {
6264 /* nothing to do because globals are already set to zero */
6265 } else {
6266 vpush_global_sym(&func_old_type, TOK_memset);
6267 vseti(VT_LOCAL, c);
6268 #ifdef TCC_TARGET_ARM
6269 vpushs(size);
6270 vpushi(0);
6271 #else
6272 vpushi(0);
6273 vpushs(size);
6274 #endif
6275 gfunc_call(3);
6279 /* t is the array or struct type. c is the array or struct
6280 address. cur_field is the pointer to the current
6281 field, for arrays the 'c' member contains the current start
6282 index. 'size_only' is true if only size info is needed (only used
6283 in arrays). al contains the already initialized length of the
6284 current container (starting at c). This returns the new length of that. */
6285 static int decl_designator(CType *type, Section *sec, unsigned long c,
6286 Sym **cur_field, int size_only, int al)
6288 Sym *s, *f;
6289 int index, index_last, align, l, nb_elems, elem_size;
6290 unsigned long corig = c;
6292 elem_size = 0;
6293 nb_elems = 1;
6294 if (gnu_ext && (l = is_label()) != 0)
6295 goto struct_field;
6296 /* NOTE: we only support ranges for last designator */
6297 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6298 if (tok == '[') {
6299 if (!(type->t & VT_ARRAY))
6300 expect("array type");
6301 next();
6302 index = index_last = expr_const();
6303 if (tok == TOK_DOTS && gnu_ext) {
6304 next();
6305 index_last = expr_const();
6307 skip(']');
6308 s = type->ref;
6309 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6310 index_last < index)
6311 tcc_error("invalid index");
6312 if (cur_field)
6313 (*cur_field)->c = index_last;
6314 type = pointed_type(type);
6315 elem_size = type_size(type, &align);
6316 c += index * elem_size;
6317 nb_elems = index_last - index + 1;
6318 } else {
6319 next();
6320 l = tok;
6321 struct_field:
6322 next();
6323 if ((type->t & VT_BTYPE) != VT_STRUCT)
6324 expect("struct/union type");
6325 f = find_field(type, l);
6326 if (!f)
6327 expect("field");
6328 if (cur_field)
6329 *cur_field = f;
6330 type = &f->type;
6331 c += f->c;
6333 cur_field = NULL;
6335 if (!cur_field) {
6336 if (tok == '=') {
6337 next();
6338 } else if (!gnu_ext) {
6339 expect("=");
6341 } else {
6342 if (type->t & VT_ARRAY) {
6343 index = (*cur_field)->c;
6344 if (type->ref->c >= 0 && index >= type->ref->c)
6345 tcc_error("index too large");
6346 type = pointed_type(type);
6347 c += index * type_size(type, &align);
6348 } else {
6349 f = *cur_field;
6350 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6351 *cur_field = f = f->next;
6352 if (!f)
6353 tcc_error("too many field init");
6354 type = &f->type;
6355 c += f->c;
6358 /* must put zero in holes (note that doing it that way
6359 ensures that it even works with designators) */
6360 if (!size_only && c - corig > al)
6361 init_putz(sec, corig + al, c - corig - al);
6362 decl_initializer(type, sec, c, 0, size_only);
6364 /* XXX: make it more general */
6365 if (!size_only && nb_elems > 1) {
6366 unsigned long c_end;
6367 uint8_t *src, *dst;
6368 int i;
6370 if (!sec) {
6371 vset(type, VT_LOCAL|VT_LVAL, c);
6372 for (i = 1; i < nb_elems; i++) {
6373 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6374 vswap();
6375 vstore();
6377 vpop();
6378 } else if (!NODATA_WANTED) {
6379 c_end = c + nb_elems * elem_size;
6380 if (c_end > sec->data_allocated)
6381 section_realloc(sec, c_end);
6382 src = sec->data + c;
6383 dst = src;
6384 for(i = 1; i < nb_elems; i++) {
6385 dst += elem_size;
6386 memcpy(dst, src, elem_size);
6390 c += nb_elems * type_size(type, &align);
6391 if (c - corig > al)
6392 al = c - corig;
6393 return al;
6396 /* store a value or an expression directly in global data or in local array */
6397 static void init_putv(CType *type, Section *sec, unsigned long c)
6399 int bt;
6400 void *ptr;
6401 CType dtype;
6403 dtype = *type;
6404 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6406 if (sec) {
6407 int size, align;
6408 /* XXX: not portable */
6409 /* XXX: generate error if incorrect relocation */
6410 gen_assign_cast(&dtype);
6411 bt = type->t & VT_BTYPE;
6413 if ((vtop->r & VT_SYM)
6414 && bt != VT_PTR
6415 && bt != VT_FUNC
6416 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6417 || (type->t & VT_BITFIELD))
6418 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6420 tcc_error("initializer element is not computable at load time");
6422 if (NODATA_WANTED) {
6423 vtop--;
6424 return;
6427 size = type_size(type, &align);
6428 section_reserve(sec, c + size);
6429 ptr = sec->data + c;
6431 /* XXX: make code faster ? */
6432 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6433 vtop->sym->v >= SYM_FIRST_ANOM &&
6434 /* XXX This rejects compound literals like
6435 '(void *){ptr}'. The problem is that '&sym' is
6436 represented the same way, which would be ruled out
6437 by the SYM_FIRST_ANOM check above, but also '"string"'
6438 in 'char *p = "string"' is represented the same
6439 with the type being VT_PTR and the symbol being an
6440 anonymous one. That is, there's no difference in vtop
6441 between '(void *){x}' and '&(void *){x}'. Ignore
6442 pointer typed entities here. Hopefully no real code
6443 will every use compound literals with scalar type. */
6444 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6445 /* These come from compound literals, memcpy stuff over. */
6446 Section *ssec;
6447 ElfSym *esym;
6448 ElfW_Rel *rel;
6449 esym = elfsym(vtop->sym);
6450 ssec = tcc_state->sections[esym->st_shndx];
6451 memmove (ptr, ssec->data + esym->st_value, size);
6452 if (ssec->reloc) {
6453 /* We need to copy over all memory contents, and that
6454 includes relocations. Use the fact that relocs are
6455 created it order, so look from the end of relocs
6456 until we hit one before the copied region. */
6457 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6458 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6459 while (num_relocs--) {
6460 rel--;
6461 if (rel->r_offset >= esym->st_value + size)
6462 continue;
6463 if (rel->r_offset < esym->st_value)
6464 break;
6465 /* Note: if the same fields are initialized multiple
6466 times (possible with designators) then we possibly
6467 add multiple relocations for the same offset here.
6468 That would lead to wrong code, the last reloc needs
6469 to win. We clean this up later after the whole
6470 initializer is parsed. */
6471 put_elf_reloca(symtab_section, sec,
6472 c + rel->r_offset - esym->st_value,
6473 ELFW(R_TYPE)(rel->r_info),
6474 ELFW(R_SYM)(rel->r_info),
6475 #if PTR_SIZE == 8
6476 rel->r_addend
6477 #else
6479 #endif
6483 } else {
6484 if (type->t & VT_BITFIELD) {
6485 int bit_pos, bit_size, bits, n;
6486 unsigned char *p, v, m;
6487 bit_pos = BIT_POS(vtop->type.t);
6488 bit_size = BIT_SIZE(vtop->type.t);
6489 p = (unsigned char*)ptr + (bit_pos >> 3);
6490 bit_pos &= 7, bits = 0;
6491 while (bit_size) {
6492 n = 8 - bit_pos;
6493 if (n > bit_size)
6494 n = bit_size;
6495 v = vtop->c.i >> bits << bit_pos;
6496 m = ((1 << n) - 1) << bit_pos;
6497 *p = (*p & ~m) | (v & m);
6498 bits += n, bit_size -= n, bit_pos = 0, ++p;
6500 } else
6501 switch(bt) {
6502 /* XXX: when cross-compiling we assume that each type has the
6503 same representation on host and target, which is likely to
6504 be wrong in the case of long double */
6505 case VT_BOOL:
6506 vtop->c.i = vtop->c.i != 0;
6507 case VT_BYTE:
6508 *(char *)ptr |= vtop->c.i;
6509 break;
6510 case VT_SHORT:
6511 *(short *)ptr |= vtop->c.i;
6512 break;
6513 case VT_FLOAT:
6514 *(float*)ptr = vtop->c.f;
6515 break;
6516 case VT_DOUBLE:
6517 *(double *)ptr = vtop->c.d;
6518 break;
6519 case VT_LDOUBLE:
6520 #if defined TCC_IS_NATIVE_387
6521 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6522 memcpy(ptr, &vtop->c.ld, 10);
6523 #ifdef __TINYC__
6524 else if (sizeof (long double) == sizeof (double))
6525 __asm__("fldl %1\nfstpt %0\n" : "=m" (ptr) : "m" (vtop->c.ld));
6526 #endif
6527 else
6528 #endif
6529 if (sizeof(long double) == LDOUBLE_SIZE)
6530 *(long double*)ptr = vtop->c.ld;
6531 else if (sizeof(double) == LDOUBLE_SIZE)
6532 *(double *)ptr = (double)vtop->c.ld;
6533 else
6534 tcc_error("can't cross compile long double constants");
6535 break;
6536 #if PTR_SIZE != 8
6537 case VT_LLONG:
6538 *(long long *)ptr |= vtop->c.i;
6539 break;
6540 #else
6541 case VT_LLONG:
6542 #endif
6543 case VT_PTR:
6545 addr_t val = vtop->c.i;
6546 #if PTR_SIZE == 8
6547 if (vtop->r & VT_SYM)
6548 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6549 else
6550 *(addr_t *)ptr |= val;
6551 #else
6552 if (vtop->r & VT_SYM)
6553 greloc(sec, vtop->sym, c, R_DATA_PTR);
6554 *(addr_t *)ptr |= val;
6555 #endif
6556 break;
6558 default:
6560 int val = vtop->c.i;
6561 #if PTR_SIZE == 8
6562 if (vtop->r & VT_SYM)
6563 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6564 else
6565 *(int *)ptr |= val;
6566 #else
6567 if (vtop->r & VT_SYM)
6568 greloc(sec, vtop->sym, c, R_DATA_PTR);
6569 *(int *)ptr |= val;
6570 #endif
6571 break;
6575 vtop--;
6576 } else {
6577 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6578 vswap();
6579 vstore();
6580 vpop();
6584 /* 't' contains the type and storage info. 'c' is the offset of the
6585 object in section 'sec'. If 'sec' is NULL, it means stack based
6586 allocation. 'first' is true if array '{' must be read (multi
6587 dimension implicit array init handling). 'size_only' is true if
6588 size only evaluation is wanted (only for arrays). */
6589 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6590 int first, int size_only)
6592 int len, n, no_oblock, nb, i;
6593 int size1, align1;
6594 int have_elem;
6595 Sym *s, *f;
6596 Sym indexsym;
6597 CType *t1;
6599 /* If we currently are at an '}' or ',' we have read an initializer
6600 element in one of our callers, and not yet consumed it. */
6601 have_elem = tok == '}' || tok == ',';
6602 if (!have_elem && tok != '{' &&
6603 /* In case of strings we have special handling for arrays, so
6604 don't consume them as initializer value (which would commit them
6605 to some anonymous symbol). */
6606 tok != TOK_LSTR && tok != TOK_STR &&
6607 !size_only) {
6608 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6609 have_elem = 1;
6612 if (have_elem &&
6613 !(type->t & VT_ARRAY) &&
6614 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6615 The source type might have VT_CONSTANT set, which is
6616 of course assignable to non-const elements. */
6617 is_compatible_unqualified_types(type, &vtop->type)) {
6618 init_putv(type, sec, c);
6619 } else if (type->t & VT_ARRAY) {
6620 s = type->ref;
6621 n = s->c;
6622 t1 = pointed_type(type);
6623 size1 = type_size(t1, &align1);
6625 no_oblock = 1;
6626 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6627 tok == '{') {
6628 if (tok != '{')
6629 tcc_error("character array initializer must be a literal,"
6630 " optionally enclosed in braces");
6631 skip('{');
6632 no_oblock = 0;
6635 /* only parse strings here if correct type (otherwise: handle
6636 them as ((w)char *) expressions */
6637 if ((tok == TOK_LSTR &&
6638 #ifdef TCC_TARGET_PE
6639 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6640 #else
6641 (t1->t & VT_BTYPE) == VT_INT
6642 #endif
6643 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6644 len = 0;
6645 while (tok == TOK_STR || tok == TOK_LSTR) {
6646 int cstr_len, ch;
6648 /* compute maximum number of chars wanted */
6649 if (tok == TOK_STR)
6650 cstr_len = tokc.str.size;
6651 else
6652 cstr_len = tokc.str.size / sizeof(nwchar_t);
6653 cstr_len--;
6654 nb = cstr_len;
6655 if (n >= 0 && nb > (n - len))
6656 nb = n - len;
6657 if (!size_only) {
6658 if (cstr_len > nb)
6659 tcc_warning("initializer-string for array is too long");
6660 /* in order to go faster for common case (char
6661 string in global variable, we handle it
6662 specifically */
6663 if (sec && tok == TOK_STR && size1 == 1) {
6664 if (!NODATA_WANTED)
6665 memcpy(sec->data + c + len, tokc.str.data, nb);
6666 } else {
6667 for(i=0;i<nb;i++) {
6668 if (tok == TOK_STR)
6669 ch = ((unsigned char *)tokc.str.data)[i];
6670 else
6671 ch = ((nwchar_t *)tokc.str.data)[i];
6672 vpushi(ch);
6673 init_putv(t1, sec, c + (len + i) * size1);
6677 len += nb;
6678 next();
6680 /* only add trailing zero if enough storage (no
6681 warning in this case since it is standard) */
6682 if (n < 0 || len < n) {
6683 if (!size_only) {
6684 vpushi(0);
6685 init_putv(t1, sec, c + (len * size1));
6687 len++;
6689 len *= size1;
6690 } else {
6691 indexsym.c = 0;
6692 f = &indexsym;
6694 do_init_list:
6695 len = 0;
6696 while (tok != '}' || have_elem) {
6697 len = decl_designator(type, sec, c, &f, size_only, len);
6698 have_elem = 0;
6699 if (type->t & VT_ARRAY) {
6700 ++indexsym.c;
6701 /* special test for multi dimensional arrays (may not
6702 be strictly correct if designators are used at the
6703 same time) */
6704 if (no_oblock && len >= n*size1)
6705 break;
6706 } else {
6707 if (s->type.t == VT_UNION)
6708 f = NULL;
6709 else
6710 f = f->next;
6711 if (no_oblock && f == NULL)
6712 break;
6715 if (tok == '}')
6716 break;
6717 skip(',');
6720 /* put zeros at the end */
6721 if (!size_only && len < n*size1)
6722 init_putz(sec, c + len, n*size1 - len);
6723 if (!no_oblock)
6724 skip('}');
6725 /* patch type size if needed, which happens only for array types */
6726 if (n < 0)
6727 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6728 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6729 size1 = 1;
6730 no_oblock = 1;
6731 if (first || tok == '{') {
6732 skip('{');
6733 no_oblock = 0;
6735 s = type->ref;
6736 f = s->next;
6737 n = s->c;
6738 goto do_init_list;
6739 } else if (tok == '{') {
6740 next();
6741 decl_initializer(type, sec, c, first, size_only);
6742 skip('}');
6743 } else if (size_only) {
6744 /* If we supported only ISO C we wouldn't have to accept calling
6745 this on anything than an array size_only==1 (and even then
6746 only on the outermost level, so no recursion would be needed),
6747 because initializing a flex array member isn't supported.
6748 But GNU C supports it, so we need to recurse even into
6749 subfields of structs and arrays when size_only is set. */
6750 /* just skip expression */
6751 skip_or_save_block(NULL);
6752 } else {
6753 if (!have_elem) {
6754 /* This should happen only when we haven't parsed
6755 the init element above for fear of committing a
6756 string constant to memory too early. */
6757 if (tok != TOK_STR && tok != TOK_LSTR)
6758 expect("string constant");
6759 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6761 init_putv(type, sec, c);
6765 /* parse an initializer for type 't' if 'has_init' is non zero, and
6766 allocate space in local or global data space ('r' is either
6767 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6768 variable 'v' of scope 'scope' is declared before initializers
6769 are parsed. If 'v' is zero, then a reference to the new object
6770 is put in the value stack. If 'has_init' is 2, a special parsing
6771 is done to handle string constants. */
6772 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6773 int has_init, int v, int scope)
6775 int size, align, addr;
6776 TokenString *init_str = NULL;
6778 Section *sec;
6779 Sym *flexible_array;
6780 Sym *sym = NULL;
6781 int saved_nocode_wanted = nocode_wanted;
6782 #ifdef CONFIG_TCC_BCHECK
6783 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6784 #endif
6786 if (type->t & VT_STATIC)
6787 nocode_wanted |= NODATA_WANTED ? 0x40000000 : 0x80000000;
6789 flexible_array = NULL;
6790 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6791 Sym *field = type->ref->next;
6792 if (field) {
6793 while (field->next)
6794 field = field->next;
6795 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6796 flexible_array = field;
6800 size = type_size(type, &align);
6801 /* If unknown size, we must evaluate it before
6802 evaluating initializers because
6803 initializers can generate global data too
6804 (e.g. string pointers or ISOC99 compound
6805 literals). It also simplifies local
6806 initializers handling */
6807 if (size < 0 || (flexible_array && has_init)) {
6808 if (!has_init)
6809 tcc_error("unknown type size");
6810 /* get all init string */
6811 if (has_init == 2) {
6812 init_str = tok_str_alloc();
6813 /* only get strings */
6814 while (tok == TOK_STR || tok == TOK_LSTR) {
6815 tok_str_add_tok(init_str);
6816 next();
6818 tok_str_add(init_str, -1);
6819 tok_str_add(init_str, 0);
6820 } else {
6821 skip_or_save_block(&init_str);
6823 unget_tok(0);
6825 /* compute size */
6826 begin_macro(init_str, 1);
6827 next();
6828 decl_initializer(type, NULL, 0, 1, 1);
6829 /* prepare second initializer parsing */
6830 macro_ptr = init_str->str;
6831 next();
6833 /* if still unknown size, error */
6834 size = type_size(type, &align);
6835 if (size < 0)
6836 tcc_error("unknown type size");
6838 /* If there's a flex member and it was used in the initializer
6839 adjust size. */
6840 if (flexible_array &&
6841 flexible_array->type.ref->c > 0)
6842 size += flexible_array->type.ref->c
6843 * pointed_size(&flexible_array->type);
6844 /* take into account specified alignment if bigger */
6845 if (ad->a.aligned) {
6846 int speca = 1 << (ad->a.aligned - 1);
6847 if (speca > align)
6848 align = speca;
6849 } else if (ad->a.packed) {
6850 align = 1;
6853 if (NODATA_WANTED)
6854 size = 0, align = 1;
6856 if ((r & VT_VALMASK) == VT_LOCAL) {
6857 sec = NULL;
6858 #ifdef CONFIG_TCC_BCHECK
6859 if (bcheck && (type->t & VT_ARRAY)) {
6860 loc--;
6862 #endif
6863 loc = (loc - size) & -align;
6864 addr = loc;
6865 #ifdef CONFIG_TCC_BCHECK
6866 /* handles bounds */
6867 /* XXX: currently, since we do only one pass, we cannot track
6868 '&' operators, so we add only arrays */
6869 if (bcheck && (type->t & VT_ARRAY)) {
6870 addr_t *bounds_ptr;
6871 /* add padding between regions */
6872 loc--;
6873 /* then add local bound info */
6874 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6875 bounds_ptr[0] = addr;
6876 bounds_ptr[1] = size;
6878 #endif
6879 if (v) {
6880 /* local variable */
6881 #ifdef CONFIG_TCC_ASM
6882 if (ad->asm_label) {
6883 int reg = asm_parse_regvar(ad->asm_label);
6884 if (reg >= 0)
6885 r = (r & ~VT_VALMASK) | reg;
6887 #endif
6888 sym = sym_push(v, type, r, addr);
6889 sym->a = ad->a;
6890 } else {
6891 /* push local reference */
6892 vset(type, r, addr);
6894 } else {
6895 if (v && scope == VT_CONST) {
6896 /* see if the symbol was already defined */
6897 sym = sym_find(v);
6898 if (sym) {
6899 patch_storage(sym, ad, type);
6900 if (sym->type.t & VT_EXTERN) {
6901 /* if the variable is extern, it was not allocated */
6902 sym->type.t &= ~VT_EXTERN;
6903 /* set array size if it was omitted in extern
6904 declaration */
6905 if ((sym->type.t & VT_ARRAY) &&
6906 sym->type.ref->c < 0 &&
6907 type->ref->c >= 0)
6908 sym->type.ref->c = type->ref->c;
6909 } else if (!has_init) {
6910 /* we accept several definitions of the same
6911 global variable. this is tricky, because we
6912 must play with the SHN_COMMON type of the symbol */
6913 /* no init data, we won't add more to the symbol */
6914 goto no_alloc;
6915 } else if (sym->c) {
6916 ElfSym *esym = elfsym(sym);
6917 if (esym->st_shndx == data_section->sh_num)
6918 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6923 /* allocate symbol in corresponding section */
6924 sec = ad->section;
6925 if (!sec) {
6926 if (has_init)
6927 sec = data_section;
6928 else if (tcc_state->nocommon)
6929 sec = bss_section;
6932 if (sec) {
6933 addr = section_add(sec, size, align);
6934 #ifdef CONFIG_TCC_BCHECK
6935 /* add padding if bound check */
6936 if (bcheck)
6937 section_add(sec, 1, 1);
6938 #endif
6939 } else {
6940 addr = align; /* SHN_COMMON is special, symbol value is align */
6941 sec = common_section;
6944 if (v) {
6945 if (!sym) {
6946 sym = sym_push(v, type, r | VT_SYM, 0);
6947 patch_storage(sym, ad, NULL);
6949 /* Local statics have a scope until now (for
6950 warnings), remove it here. */
6951 sym->sym_scope = 0;
6952 /* update symbol definition */
6953 put_extern_sym(sym, sec, addr, size);
6954 } else {
6955 /* push global reference */
6956 sym = get_sym_ref(type, sec, addr, size);
6957 vpushsym(type, sym);
6958 vtop->r |= r;
6961 #ifdef CONFIG_TCC_BCHECK
6962 /* handles bounds now because the symbol must be defined
6963 before for the relocation */
6964 if (bcheck) {
6965 addr_t *bounds_ptr;
6967 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
6968 /* then add global bound info */
6969 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6970 bounds_ptr[0] = 0; /* relocated */
6971 bounds_ptr[1] = size;
6973 #endif
6976 if (type->t & VT_VLA) {
6977 int a;
6979 if (NODATA_WANTED)
6980 goto no_alloc;
6982 /* save current stack pointer */
6983 if (vlas_in_scope == 0) {
6984 if (vla_sp_root_loc == -1)
6985 vla_sp_root_loc = (loc -= PTR_SIZE);
6986 gen_vla_sp_save(vla_sp_root_loc);
6989 vla_runtime_type_size(type, &a);
6990 gen_vla_alloc(type, a);
6991 gen_vla_sp_save(addr);
6992 vla_sp_loc = addr;
6993 vlas_in_scope++;
6995 } else if (has_init) {
6996 size_t oldreloc_offset = 0;
6997 if (sec && sec->reloc)
6998 oldreloc_offset = sec->reloc->data_offset;
6999 decl_initializer(type, sec, addr, 1, 0);
7000 if (sec && sec->reloc)
7001 squeeze_multi_relocs(sec, oldreloc_offset);
7002 /* patch flexible array member size back to -1, */
7003 /* for possible subsequent similar declarations */
7004 if (flexible_array)
7005 flexible_array->type.ref->c = -1;
7008 no_alloc:
7009 /* restore parse state if needed */
7010 if (init_str) {
7011 end_macro();
7012 next();
7015 nocode_wanted = saved_nocode_wanted;
7018 /* parse a function defined by symbol 'sym' and generate its code in
7019 'cur_text_section' */
7020 static void gen_function(Sym *sym)
7022 nocode_wanted = 0;
7023 ind = cur_text_section->data_offset;
7024 /* NOTE: we patch the symbol size later */
7025 put_extern_sym(sym, cur_text_section, ind, 0);
7026 funcname = get_tok_str(sym->v, NULL);
7027 func_ind = ind;
7028 /* Initialize VLA state */
7029 vla_sp_loc = -1;
7030 vla_sp_root_loc = -1;
7031 /* put debug symbol */
7032 tcc_debug_funcstart(tcc_state, sym);
7033 /* push a dummy symbol to enable local sym storage */
7034 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7035 local_scope = 1; /* for function parameters */
7036 gfunc_prolog(&sym->type);
7037 local_scope = 0;
7038 rsym = 0;
7039 block(NULL, NULL, 0);
7040 nocode_wanted = 0;
7041 gsym(rsym);
7042 gfunc_epilog();
7043 cur_text_section->data_offset = ind;
7044 label_pop(&global_label_stack, NULL, 0);
7045 /* reset local stack */
7046 local_scope = 0;
7047 sym_pop(&local_stack, NULL, 0);
7048 /* end of function */
7049 /* patch symbol size */
7050 elfsym(sym)->st_size = ind - func_ind;
7051 tcc_debug_funcend(tcc_state, ind - func_ind);
7052 /* It's better to crash than to generate wrong code */
7053 cur_text_section = NULL;
7054 funcname = ""; /* for safety */
7055 func_vt.t = VT_VOID; /* for safety */
7056 func_var = 0; /* for safety */
7057 ind = 0; /* for safety */
7058 nocode_wanted = 0x80000000;
7059 check_vstack();
7062 static void gen_inline_functions(TCCState *s)
7064 Sym *sym;
7065 int inline_generated, i, ln;
7066 struct InlineFunc *fn;
7068 ln = file->line_num;
7069 /* iterate while inline function are referenced */
7070 do {
7071 inline_generated = 0;
7072 for (i = 0; i < s->nb_inline_fns; ++i) {
7073 fn = s->inline_fns[i];
7074 sym = fn->sym;
7075 if (sym && sym->c) {
7076 /* the function was used: generate its code and
7077 convert it to a normal function */
7078 fn->sym = NULL;
7079 if (file)
7080 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7081 sym->type.t &= ~VT_INLINE;
7083 begin_macro(fn->func_str, 1);
7084 next();
7085 cur_text_section = text_section;
7086 gen_function(sym);
7087 end_macro();
7089 inline_generated = 1;
7092 } while (inline_generated);
7093 file->line_num = ln;
7096 ST_FUNC void free_inline_functions(TCCState *s)
7098 int i;
7099 /* free tokens of unused inline functions */
7100 for (i = 0; i < s->nb_inline_fns; ++i) {
7101 struct InlineFunc *fn = s->inline_fns[i];
7102 if (fn->sym)
7103 tok_str_free(fn->func_str);
7105 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7108 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7109 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7110 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7112 int v, has_init, r;
7113 CType type, btype;
7114 Sym *sym;
7115 AttributeDef ad;
7117 while (1) {
7118 if (!parse_btype(&btype, &ad)) {
7119 if (is_for_loop_init)
7120 return 0;
7121 /* skip redundant ';' if not in old parameter decl scope */
7122 if (tok == ';' && l != VT_CMP) {
7123 next();
7124 continue;
7126 if (l != VT_CONST)
7127 break;
7128 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7129 /* global asm block */
7130 asm_global_instr();
7131 continue;
7133 if (tok >= TOK_UIDENT) {
7134 /* special test for old K&R protos without explicit int
7135 type. Only accepted when defining global data */
7136 btype.t = VT_INT;
7137 } else {
7138 if (tok != TOK_EOF)
7139 expect("declaration");
7140 break;
7143 if (tok == ';') {
7144 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7145 int v = btype.ref->v;
7146 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7147 tcc_warning("unnamed struct/union that defines no instances");
7148 next();
7149 continue;
7151 if (IS_ENUM(btype.t)) {
7152 next();
7153 continue;
7156 while (1) { /* iterate thru each declaration */
7157 type = btype;
7158 /* If the base type itself was an array type of unspecified
7159 size (like in 'typedef int arr[]; arr x = {1};') then
7160 we will overwrite the unknown size by the real one for
7161 this decl. We need to unshare the ref symbol holding
7162 that size. */
7163 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7164 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7166 type_decl(&type, &ad, &v, TYPE_DIRECT);
7167 #if 0
7169 char buf[500];
7170 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7171 printf("type = '%s'\n", buf);
7173 #endif
7174 if ((type.t & VT_BTYPE) == VT_FUNC) {
7175 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7176 tcc_error("function without file scope cannot be static");
7178 /* if old style function prototype, we accept a
7179 declaration list */
7180 sym = type.ref;
7181 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7182 decl0(VT_CMP, 0, sym);
7185 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7186 ad.asm_label = asm_label_instr();
7187 /* parse one last attribute list, after asm label */
7188 parse_attribute(&ad);
7189 if (tok == '{')
7190 expect(";");
7193 #ifdef TCC_TARGET_PE
7194 if (ad.a.dllimport || ad.a.dllexport) {
7195 if (type.t & (VT_STATIC|VT_TYPEDEF))
7196 tcc_error("cannot have dll linkage with static or typedef");
7197 if (ad.a.dllimport) {
7198 if ((type.t & VT_BTYPE) == VT_FUNC)
7199 ad.a.dllimport = 0;
7200 else
7201 type.t |= VT_EXTERN;
7204 #endif
7205 if (tok == '{') {
7206 if (l != VT_CONST)
7207 tcc_error("cannot use local functions");
7208 if ((type.t & VT_BTYPE) != VT_FUNC)
7209 expect("function definition");
7211 /* reject abstract declarators in function definition
7212 make old style params without decl have int type */
7213 sym = type.ref;
7214 while ((sym = sym->next) != NULL) {
7215 if (!(sym->v & ~SYM_FIELD))
7216 expect("identifier");
7217 if (sym->type.t == VT_VOID)
7218 sym->type = int_type;
7221 /* XXX: cannot do better now: convert extern line to static inline */
7222 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7223 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7225 sym = sym_find(v);
7226 if (sym) {
7227 Sym *ref;
7228 /* If type is VT_VOID the symbol was created by tccasm
7229 first, and we see the first reference from C now. */
7230 if ((sym->type.t & VT_BTYPE) == VT_VOID)
7231 sym->type = type;
7233 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7234 goto func_error1;
7236 ref = sym->type.ref;
7238 /* use func_call from prototype if not defined */
7239 if (ref->f.func_call != FUNC_CDECL
7240 && type.ref->f.func_call == FUNC_CDECL)
7241 type.ref->f.func_call = ref->f.func_call;
7243 /* use static from prototype */
7244 if (sym->type.t & VT_STATIC)
7245 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7247 /* If the definition has no visibility use the
7248 one from prototype. */
7249 if (!type.ref->a.visibility)
7250 type.ref->a.visibility = ref->a.visibility;
7251 /* apply other storage attributes from prototype */
7252 type.ref->a.dllexport |= ref->a.dllexport;
7253 type.ref->a.weak |= ref->a.weak;
7255 if (!is_compatible_types(&sym->type, &type)) {
7256 func_error1:
7257 tcc_error("incompatible types for redefinition of '%s'",
7258 get_tok_str(v, NULL));
7260 if (ref->f.func_body)
7261 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7262 /* if symbol is already defined, then put complete type */
7263 sym->type = type;
7265 } else {
7266 /* put function symbol */
7267 sym = global_identifier_push(v, type.t, 0);
7268 sym->type.ref = type.ref;
7271 sym->type.ref->f.func_body = 1;
7272 sym->r = VT_SYM | VT_CONST;
7273 patch_storage(sym, &ad, NULL);
7275 /* static inline functions are just recorded as a kind
7276 of macro. Their code will be emitted at the end of
7277 the compilation unit only if they are used */
7278 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7279 (VT_INLINE | VT_STATIC)) {
7280 struct InlineFunc *fn;
7281 const char *filename;
7283 filename = file ? file->filename : "";
7284 fn = tcc_malloc(sizeof *fn + strlen(filename));
7285 strcpy(fn->filename, filename);
7286 fn->sym = sym;
7287 skip_or_save_block(&fn->func_str);
7288 dynarray_add(&tcc_state->inline_fns,
7289 &tcc_state->nb_inline_fns, fn);
7290 } else {
7291 /* compute text section */
7292 cur_text_section = ad.section;
7293 if (!cur_text_section)
7294 cur_text_section = text_section;
7295 gen_function(sym);
7297 break;
7298 } else {
7299 if (l == VT_CMP) {
7300 /* find parameter in function parameter list */
7301 for (sym = func_sym->next; sym; sym = sym->next)
7302 if ((sym->v & ~SYM_FIELD) == v)
7303 goto found;
7304 tcc_error("declaration for parameter '%s' but no such parameter",
7305 get_tok_str(v, NULL));
7306 found:
7307 if (type.t & VT_STORAGE) /* 'register' is okay */
7308 tcc_error("storage class specified for '%s'",
7309 get_tok_str(v, NULL));
7310 if (sym->type.t != VT_VOID)
7311 tcc_error("redefinition of parameter '%s'",
7312 get_tok_str(v, NULL));
7313 convert_parameter_type(&type);
7314 sym->type = type;
7315 } else if (type.t & VT_TYPEDEF) {
7316 /* save typedefed type */
7317 /* XXX: test storage specifiers ? */
7318 sym = sym_find(v);
7319 if (sym && sym->sym_scope == local_scope) {
7320 if (!is_compatible_types(&sym->type, &type)
7321 || !(sym->type.t & VT_TYPEDEF))
7322 tcc_error("incompatible redefinition of '%s'",
7323 get_tok_str(v, NULL));
7324 sym->type = type;
7325 } else {
7326 sym = sym_push(v, &type, 0, 0);
7328 sym->a = ad.a;
7329 sym->f = ad.f;
7330 } else {
7331 r = 0;
7332 if ((type.t & VT_BTYPE) == VT_FUNC) {
7333 /* external function definition */
7334 /* specific case for func_call attribute */
7335 type.ref->f = ad.f;
7336 } else if (!(type.t & VT_ARRAY)) {
7337 /* not lvalue if array */
7338 r |= lvalue_type(type.t);
7340 has_init = (tok == '=');
7341 if (has_init && (type.t & VT_VLA))
7342 tcc_error("variable length array cannot be initialized");
7343 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7344 ((type.t & VT_BTYPE) == VT_FUNC) ||
7345 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7346 !has_init && l == VT_CONST && type.ref->c < 0)) {
7347 /* external variable or function */
7348 /* NOTE: as GCC, uninitialized global static
7349 arrays of null size are considered as
7350 extern */
7351 sym = external_sym(v, &type, r, &ad);
7352 if (ad.alias_target) {
7353 Section tsec;
7354 ElfSym *esym;
7355 Sym *alias_target;
7356 alias_target = sym_find(ad.alias_target);
7357 esym = elfsym(alias_target);
7358 if (!esym)
7359 tcc_error("unsupported forward __alias__ attribute");
7360 tsec.sh_num = esym->st_shndx;
7361 /* Local statics have a scope until now (for
7362 warnings), remove it here. */
7363 sym->sym_scope = 0;
7364 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7366 } else {
7367 if (type.t & VT_STATIC)
7368 r |= VT_CONST;
7369 else
7370 r |= l;
7371 if (has_init)
7372 next();
7373 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7376 if (tok != ',') {
7377 if (is_for_loop_init)
7378 return 1;
7379 skip(';');
7380 break;
7382 next();
7384 ad.a.aligned = 0;
7387 return 0;
7390 static void decl(int l)
7392 decl0(l, 0, NULL);
7395 /* ------------------------------------------------------------------------- */