Implement __attribute__((nodecorate))
[tinycc.git] / tccgen.c
blob4b731f079be0707673a4073e13e1175418d4b7d6
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->a.nodecorate) {
395 can_add_underscore = 0;
397 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
398 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
399 name = buf1;
400 other |= ST_PE_STDCALL;
401 can_add_underscore = 0;
404 #endif
405 if (tcc_state->leading_underscore && can_add_underscore) {
406 buf1[0] = '_';
407 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
408 name = buf1;
410 if (sym->asm_label)
411 name = get_tok_str(sym->asm_label, NULL);
412 info = ELFW(ST_INFO)(sym_bind, sym_type);
413 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
414 } else {
415 esym = elfsym(sym);
416 esym->st_value = value;
417 esym->st_size = size;
418 esym->st_shndx = sh_num;
420 update_storage(sym);
423 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
424 addr_t value, unsigned long size)
426 int sh_num = section ? section->sh_num : SHN_UNDEF;
427 put_extern_sym2(sym, sh_num, value, size, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
432 addr_t addend)
434 int c = 0;
436 if (nocode_wanted && s == cur_text_section)
437 return;
439 if (sym) {
440 if (0 == sym->c)
441 put_extern_sym(sym, NULL, 0, 0);
442 c = sym->c;
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section, s, offset, type, c, addend);
449 #if PTR_SIZE == 4
450 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
452 greloca(s, sym, offset, type, 0);
454 #endif
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym *__sym_malloc(void)
460 Sym *sym_pool, *sym, *last_sym;
461 int i;
463 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
464 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
466 last_sym = sym_free_first;
467 sym = sym_pool;
468 for(i = 0; i < SYM_POOL_NB; i++) {
469 sym->next = last_sym;
470 last_sym = sym;
471 sym++;
473 sym_free_first = last_sym;
474 return last_sym;
477 static inline Sym *sym_malloc(void)
479 Sym *sym;
480 #ifndef SYM_DEBUG
481 sym = sym_free_first;
482 if (!sym)
483 sym = __sym_malloc();
484 sym_free_first = sym->next;
485 return sym;
486 #else
487 sym = tcc_malloc(sizeof(Sym));
488 return sym;
489 #endif
492 ST_INLN void sym_free(Sym *sym)
494 #ifndef SYM_DEBUG
495 sym->next = sym_free_first;
496 sym_free_first = sym;
497 #else
498 tcc_free(sym);
499 #endif
502 /* push, without hashing */
503 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
505 Sym *s;
507 s = sym_malloc();
508 memset(s, 0, sizeof *s);
509 s->v = v;
510 s->type.t = t;
511 s->c = c;
512 /* add in stack */
513 s->prev = *ps;
514 *ps = s;
515 return s;
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym *sym_find2(Sym *s, int v)
522 while (s) {
523 if (s->v == v)
524 return s;
525 else if (s->v == -1)
526 return NULL;
527 s = s->prev;
529 return NULL;
532 /* structure lookup */
533 ST_INLN Sym *struct_find(int v)
535 v -= TOK_IDENT;
536 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
537 return NULL;
538 return table_ident[v]->sym_struct;
541 /* find an identifier */
542 ST_INLN Sym *sym_find(int v)
544 v -= TOK_IDENT;
545 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
546 return NULL;
547 return table_ident[v]->sym_identifier;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
553 Sym *s, **ps;
554 TokenSym *ts;
556 if (local_stack)
557 ps = &local_stack;
558 else
559 ps = &global_stack;
560 s = sym_push2(ps, v, type->t, c);
561 s->type.ref = type->ref;
562 s->r = r;
563 /* don't record fields or anonymous symbols */
564 /* XXX: simplify */
565 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
566 /* record symbol in token array */
567 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
568 if (v & SYM_STRUCT)
569 ps = &ts->sym_struct;
570 else
571 ps = &ts->sym_identifier;
572 s->prev_tok = *ps;
573 *ps = s;
574 s->sym_scope = local_scope;
575 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v & ~SYM_STRUCT, NULL));
579 return s;
582 /* push a global identifier */
583 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
585 Sym *s, **ps;
586 s = sym_push2(&global_stack, v, t, c);
587 /* don't record anonymous symbol */
588 if (v < SYM_FIRST_ANOM) {
589 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps != NULL && (*ps)->sym_scope)
593 ps = &(*ps)->prev_tok;
594 s->prev_tok = *ps;
595 *ps = s;
597 return s;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
604 Sym *s, *ss, **ps;
605 TokenSym *ts;
606 int v;
608 s = *ptop;
609 while(s != b) {
610 ss = s->prev;
611 v = s->v;
612 /* remove symbol in token array */
613 /* XXX: simplify */
614 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
615 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
616 if (v & SYM_STRUCT)
617 ps = &ts->sym_struct;
618 else
619 ps = &ts->sym_identifier;
620 *ps = s->prev_tok;
622 if (!keep)
623 sym_free(s);
624 s = ss;
626 if (!keep)
627 *ptop = b;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType *type, int r, CValue *vc)
634 int v;
636 if (vtop >= vstack + (VSTACK_SIZE - 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop >= vstack && !nocode_wanted) {
652 v = vtop->r & VT_VALMASK;
653 if (v == VT_CMP || (v & ~1) == VT_JMP)
654 gv(RC_INT);
657 vtop++;
658 vtop->type = *type;
659 vtop->r = r;
660 vtop->r2 = VT_CONST;
661 vtop->c = *vc;
662 vtop->sym = NULL;
665 ST_FUNC void vswap(void)
667 SValue tmp;
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop >= vstack && !nocode_wanted) {
670 int v = vtop->r & VT_VALMASK;
671 if (v == VT_CMP || (v & ~1) == VT_JMP)
672 gv(RC_INT);
674 tmp = vtop[0];
675 vtop[0] = vtop[-1];
676 vtop[-1] = tmp;
679 /* pop stack value */
680 ST_FUNC void vpop(void)
682 int v;
683 v = vtop->r & VT_VALMASK;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
686 if (v == TREG_ST0) {
687 o(0xd8dd); /* fstp %st(0) */
688 } else
689 #endif
690 if (v == VT_JMP || v == VT_JMPI) {
691 /* need to put correct jump if && or || without test */
692 gsym(vtop->c.i);
694 vtop--;
697 /* push constant of type "type" with useless value */
698 ST_FUNC void vpush(CType *type)
700 vset(type, VT_CONST, 0);
703 /* push integer constant */
704 ST_FUNC void vpushi(int v)
706 CValue cval;
707 cval.i = v;
708 vsetc(&int_type, VT_CONST, &cval);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v)
714 CValue cval;
715 cval.i = v;
716 vsetc(&size_type, VT_CONST, &cval);
719 /* push arbitrary 64bit constant */
720 ST_FUNC void vpush64(int ty, unsigned long long v)
722 CValue cval;
723 CType ctype;
724 ctype.t = ty;
725 ctype.ref = NULL;
726 cval.i = v;
727 vsetc(&ctype, VT_CONST, &cval);
730 /* push long long constant */
731 static inline void vpushll(long long v)
733 vpush64(VT_LLONG, v);
736 ST_FUNC void vset(CType *type, int r, int v)
738 CValue cval;
740 cval.i = v;
741 vsetc(type, r, &cval);
744 static void vseti(int r, int v)
746 CType type;
747 type.t = VT_INT;
748 type.ref = NULL;
749 vset(&type, r, v);
752 ST_FUNC void vpushv(SValue *v)
754 if (vtop >= vstack + (VSTACK_SIZE - 1))
755 tcc_error("memory full (vstack)");
756 vtop++;
757 *vtop = *v;
760 static void vdup(void)
762 vpushv(vtop);
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC void vrotb(int n)
770 int i;
771 SValue tmp;
773 tmp = vtop[-n + 1];
774 for(i=-n+1;i!=0;i++)
775 vtop[i] = vtop[i+1];
776 vtop[0] = tmp;
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC void vrote(SValue *e, int n)
784 int i;
785 SValue tmp;
787 tmp = *e;
788 for(i = 0;i < n - 1; i++)
789 e[-i] = e[-i - 1];
790 e[-n + 1] = tmp;
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC void vrott(int n)
798 vrote(vtop, n);
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType *type, Sym *sym)
804 CValue cval;
805 cval.i = 0;
806 vsetc(type, VT_CONST | VT_SYM, &cval);
807 vtop->sym = sym;
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
813 int v;
814 Sym *sym;
816 v = anon_sym++;
817 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
818 sym->type.ref = type->ref;
819 sym->r = VT_CONST | VT_SYM;
820 put_extern_sym(sym, sec, offset, size);
821 return sym;
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
827 vpushsym(type, get_sym_ref(type, sec, offset, size));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
833 Sym *s;
835 s = sym_find(v);
836 if (!s) {
837 /* push forward reference */
838 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
839 s->type.ref = type->ref;
840 s->r = r | VT_CONST | VT_SYM;
841 } else if (IS_ASM_SYM(s)) {
842 s->type.t = type->t | (s->type.t & VT_EXTERN);
843 s->type.ref = type->ref;
844 update_storage(s);
846 return s;
849 /* Merge some type attributes. */
850 static void patch_type(Sym *sym, CType *type)
852 if (!(type->t & VT_EXTERN)) {
853 if (!(sym->type.t & VT_EXTERN))
854 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
855 sym->type.t &= ~VT_EXTERN;
858 if (IS_ASM_SYM(sym)) {
859 /* stay static if both are static */
860 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
861 sym->type.ref = type->ref;
864 if (!is_compatible_types(&sym->type, type)) {
865 tcc_error("incompatible types for redefinition of '%s'",
866 get_tok_str(sym->v, NULL));
868 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
869 int static_proto = sym->type.t & VT_STATIC;
870 /* warn if static follows non-static function declaration */
871 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
872 tcc_warning("static storage ignored for redefinition of '%s'",
873 get_tok_str(sym->v, NULL));
875 if (0 == (type->t & VT_EXTERN)) {
876 /* put complete type, use static from prototype */
877 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
878 if (type->t & VT_INLINE)
879 sym->type.t = type->t;
880 sym->type.ref = type->ref;
883 } else {
884 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
885 /* set array size if it was omitted in extern declaration */
886 if (sym->type.ref->c < 0)
887 sym->type.ref->c = type->ref->c;
888 else if (sym->type.ref->c != type->ref->c)
889 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
891 if ((type->t ^ sym->type.t) & VT_STATIC)
892 tcc_warning("storage mismatch for redefinition of '%s'",
893 get_tok_str(sym->v, NULL));
898 /* Merge some storage attributes. */
899 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
901 if (type)
902 patch_type(sym, type);
904 #ifdef TCC_TARGET_PE
905 if (sym->a.dllimport != ad->a.dllimport)
906 tcc_error("incompatible dll linkage for redefinition of '%s'",
907 get_tok_str(sym->v, NULL));
908 sym->a.dllexport |= ad->a.dllexport;
909 #endif
910 sym->a.weak |= ad->a.weak;
911 if (ad->a.visibility) {
912 int vis = sym->a.visibility;
913 int vis2 = ad->a.visibility;
914 if (vis == STV_DEFAULT)
915 vis = vis2;
916 else if (vis2 != STV_DEFAULT)
917 vis = (vis < vis2) ? vis : vis2;
918 sym->a.visibility = vis;
920 if (ad->a.aligned)
921 sym->a.aligned = ad->a.aligned;
922 if (ad->asm_label)
923 sym->asm_label = ad->asm_label;
924 update_storage(sym);
927 /* define a new external reference to a symbol 'v' */
928 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
930 Sym *s;
931 s = sym_find(v);
932 if (!s) {
933 /* push forward reference */
934 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
935 s->type.t |= VT_EXTERN;
936 s->a = ad->a;
937 s->sym_scope = 0;
938 } else {
939 if (s->type.ref == func_old_type.ref) {
940 s->type.ref = type->ref;
941 s->r = r | VT_CONST | VT_SYM;
942 s->type.t |= VT_EXTERN;
944 patch_storage(s, ad, type);
946 return s;
949 /* push a reference to global symbol v */
950 ST_FUNC void vpush_global_sym(CType *type, int v)
952 vpushsym(type, external_global_sym(v, type, 0));
955 /* save registers up to (vtop - n) stack entry */
956 ST_FUNC void save_regs(int n)
958 SValue *p, *p1;
959 for(p = vstack, p1 = vtop - n; p <= p1; p++)
960 save_reg(p->r);
963 /* save r to the memory stack, and mark it as being free */
964 ST_FUNC void save_reg(int r)
966 save_reg_upstack(r, 0);
969 /* save r to the memory stack, and mark it as being free,
970 if seen up to (vtop - n) stack entry */
971 ST_FUNC void save_reg_upstack(int r, int n)
973 int l, saved, size, align;
974 SValue *p, *p1, sv;
975 CType *type;
977 if ((r &= VT_VALMASK) >= VT_CONST)
978 return;
979 if (nocode_wanted)
980 return;
982 /* modify all stack values */
983 saved = 0;
984 l = 0;
985 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
986 if ((p->r & VT_VALMASK) == r ||
987 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
988 /* must save value on stack if not already done */
989 if (!saved) {
990 /* NOTE: must reload 'r' because r might be equal to r2 */
991 r = p->r & VT_VALMASK;
992 /* store register in the stack */
993 type = &p->type;
994 if ((p->r & VT_LVAL) ||
995 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
996 #if PTR_SIZE == 8
997 type = &char_pointer_type;
998 #else
999 type = &int_type;
1000 #endif
1001 size = type_size(type, &align);
1002 loc = (loc - size) & -align;
1003 sv.type.t = type->t;
1004 sv.r = VT_LOCAL | VT_LVAL;
1005 sv.c.i = loc;
1006 store(r, &sv);
1007 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1008 /* x86 specific: need to pop fp register ST0 if saved */
1009 if (r == TREG_ST0) {
1010 o(0xd8dd); /* fstp %st(0) */
1012 #endif
1013 #if PTR_SIZE == 4
1014 /* special long long case */
1015 if ((type->t & VT_BTYPE) == VT_LLONG) {
1016 sv.c.i += 4;
1017 store(p->r2, &sv);
1019 #endif
1020 l = loc;
1021 saved = 1;
1023 /* mark that stack entry as being saved on the stack */
1024 if (p->r & VT_LVAL) {
1025 /* also clear the bounded flag because the
1026 relocation address of the function was stored in
1027 p->c.i */
1028 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1029 } else {
1030 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1032 p->r2 = VT_CONST;
1033 p->c.i = l;
1038 #ifdef TCC_TARGET_ARM
1039 /* find a register of class 'rc2' with at most one reference on stack.
1040 * If none, call get_reg(rc) */
1041 ST_FUNC int get_reg_ex(int rc, int rc2)
1043 int r;
1044 SValue *p;
1046 for(r=0;r<NB_REGS;r++) {
1047 if (reg_classes[r] & rc2) {
1048 int n;
1049 n=0;
1050 for(p = vstack; p <= vtop; p++) {
1051 if ((p->r & VT_VALMASK) == r ||
1052 (p->r2 & VT_VALMASK) == r)
1053 n++;
1055 if (n <= 1)
1056 return r;
1059 return get_reg(rc);
1061 #endif
1063 /* find a free register of class 'rc'. If none, save one register */
1064 ST_FUNC int get_reg(int rc)
1066 int r;
1067 SValue *p;
1069 /* find a free register */
1070 for(r=0;r<NB_REGS;r++) {
1071 if (reg_classes[r] & rc) {
1072 if (nocode_wanted)
1073 return r;
1074 for(p=vstack;p<=vtop;p++) {
1075 if ((p->r & VT_VALMASK) == r ||
1076 (p->r2 & VT_VALMASK) == r)
1077 goto notfound;
1079 return r;
1081 notfound: ;
1084 /* no register left : free the first one on the stack (VERY
1085 IMPORTANT to start from the bottom to ensure that we don't
1086 spill registers used in gen_opi()) */
1087 for(p=vstack;p<=vtop;p++) {
1088 /* look at second register (if long long) */
1089 r = p->r2 & VT_VALMASK;
1090 if (r < VT_CONST && (reg_classes[r] & rc))
1091 goto save_found;
1092 r = p->r & VT_VALMASK;
1093 if (r < VT_CONST && (reg_classes[r] & rc)) {
1094 save_found:
1095 save_reg(r);
1096 return r;
1099 /* Should never comes here */
1100 return -1;
1103 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1104 if needed */
1105 static void move_reg(int r, int s, int t)
1107 SValue sv;
1109 if (r != s) {
1110 save_reg(r);
1111 sv.type.t = t;
1112 sv.type.ref = NULL;
1113 sv.r = s;
1114 sv.c.i = 0;
1115 load(r, &sv);
1119 /* get address of vtop (vtop MUST BE an lvalue) */
1120 ST_FUNC void gaddrof(void)
1122 vtop->r &= ~VT_LVAL;
1123 /* tricky: if saved lvalue, then we can go back to lvalue */
1124 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1125 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1130 #ifdef CONFIG_TCC_BCHECK
1131 /* generate lvalue bound code */
1132 static void gbound(void)
1134 int lval_type;
1135 CType type1;
1137 vtop->r &= ~VT_MUSTBOUND;
1138 /* if lvalue, then use checking code before dereferencing */
1139 if (vtop->r & VT_LVAL) {
1140 /* if not VT_BOUNDED value, then make one */
1141 if (!(vtop->r & VT_BOUNDED)) {
1142 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1143 /* must save type because we must set it to int to get pointer */
1144 type1 = vtop->type;
1145 vtop->type.t = VT_PTR;
1146 gaddrof();
1147 vpushi(0);
1148 gen_bounded_ptr_add();
1149 vtop->r |= lval_type;
1150 vtop->type = type1;
1152 /* then check for dereferencing */
1153 gen_bounded_ptr_deref();
1156 #endif
1158 static void incr_bf_adr(int o)
1160 vtop->type = char_pointer_type;
1161 gaddrof();
1162 vpushi(o);
1163 gen_op('+');
1164 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1165 | (VT_BYTE|VT_UNSIGNED);
1166 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1167 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1170 /* single-byte load mode for packed or otherwise unaligned bitfields */
1171 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1173 int n, o, bits;
1174 save_reg_upstack(vtop->r, 1);
1175 vpush64(type->t & VT_BTYPE, 0); // B X
1176 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1177 do {
1178 vswap(); // X B
1179 incr_bf_adr(o);
1180 vdup(); // X B B
1181 n = 8 - bit_pos;
1182 if (n > bit_size)
1183 n = bit_size;
1184 if (bit_pos)
1185 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1186 if (n < 8)
1187 vpushi((1 << n) - 1), gen_op('&');
1188 gen_cast(type);
1189 if (bits)
1190 vpushi(bits), gen_op(TOK_SHL);
1191 vrotb(3); // B Y X
1192 gen_op('|'); // B X
1193 bits += n, bit_size -= n, o = 1;
1194 } while (bit_size);
1195 vswap(), vpop();
1196 if (!(type->t & VT_UNSIGNED)) {
1197 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1198 vpushi(n), gen_op(TOK_SHL);
1199 vpushi(n), gen_op(TOK_SAR);
1203 /* single-byte store mode for packed or otherwise unaligned bitfields */
1204 static void store_packed_bf(int bit_pos, int bit_size)
1206 int bits, n, o, m, c;
1208 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1209 vswap(); // X B
1210 save_reg_upstack(vtop->r, 1);
1211 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1212 do {
1213 incr_bf_adr(o); // X B
1214 vswap(); //B X
1215 c ? vdup() : gv_dup(); // B V X
1216 vrott(3); // X B V
1217 if (bits)
1218 vpushi(bits), gen_op(TOK_SHR);
1219 if (bit_pos)
1220 vpushi(bit_pos), gen_op(TOK_SHL);
1221 n = 8 - bit_pos;
1222 if (n > bit_size)
1223 n = bit_size;
1224 if (n < 8) {
1225 m = ((1 << n) - 1) << bit_pos;
1226 vpushi(m), gen_op('&'); // X B V1
1227 vpushv(vtop-1); // X B V1 B
1228 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1229 gen_op('&'); // X B V1 B1
1230 gen_op('|'); // X B V2
1232 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1233 vstore(), vpop(); // X B
1234 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1235 } while (bit_size);
1236 vpop(), vpop();
1239 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1241 int t;
1242 if (0 == sv->type.ref)
1243 return 0;
1244 t = sv->type.ref->auxtype;
1245 if (t != -1 && t != VT_STRUCT) {
1246 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1247 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1249 return t;
1252 /* store vtop a register belonging to class 'rc'. lvalues are
1253 converted to values. Cannot be used if cannot be converted to
1254 register value (such as structures). */
1255 ST_FUNC int gv(int rc)
1257 int r, bit_pos, bit_size, size, align, rc2;
1259 /* NOTE: get_reg can modify vstack[] */
1260 if (vtop->type.t & VT_BITFIELD) {
1261 CType type;
1263 bit_pos = BIT_POS(vtop->type.t);
1264 bit_size = BIT_SIZE(vtop->type.t);
1265 /* remove bit field info to avoid loops */
1266 vtop->type.t &= ~VT_STRUCT_MASK;
1268 type.ref = NULL;
1269 type.t = vtop->type.t & VT_UNSIGNED;
1270 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1271 type.t |= VT_UNSIGNED;
1273 r = adjust_bf(vtop, bit_pos, bit_size);
1275 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1276 type.t |= VT_LLONG;
1277 else
1278 type.t |= VT_INT;
1280 if (r == VT_STRUCT) {
1281 load_packed_bf(&type, bit_pos, bit_size);
1282 } else {
1283 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1284 /* cast to int to propagate signedness in following ops */
1285 gen_cast(&type);
1286 /* generate shifts */
1287 vpushi(bits - (bit_pos + bit_size));
1288 gen_op(TOK_SHL);
1289 vpushi(bits - bit_size);
1290 /* NOTE: transformed to SHR if unsigned */
1291 gen_op(TOK_SAR);
1293 r = gv(rc);
1294 } else {
1295 if (is_float(vtop->type.t) &&
1296 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1297 unsigned long offset;
1298 /* CPUs usually cannot use float constants, so we store them
1299 generically in data segment */
1300 size = type_size(&vtop->type, &align);
1301 if (NODATA_WANTED)
1302 size = 0, align = 1;
1303 offset = section_add(data_section, size, align);
1304 vpush_ref(&vtop->type, data_section, offset, size);
1305 vswap();
1306 init_putv(&vtop->type, data_section, offset);
1307 vtop->r |= VT_LVAL;
1309 #ifdef CONFIG_TCC_BCHECK
1310 if (vtop->r & VT_MUSTBOUND)
1311 gbound();
1312 #endif
1314 r = vtop->r & VT_VALMASK;
1315 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1316 #ifndef TCC_TARGET_ARM64
1317 if (rc == RC_IRET)
1318 rc2 = RC_LRET;
1319 #ifdef TCC_TARGET_X86_64
1320 else if (rc == RC_FRET)
1321 rc2 = RC_QRET;
1322 #endif
1323 #endif
1324 /* need to reload if:
1325 - constant
1326 - lvalue (need to dereference pointer)
1327 - already a register, but not in the right class */
1328 if (r >= VT_CONST
1329 || (vtop->r & VT_LVAL)
1330 || !(reg_classes[r] & rc)
1331 #if PTR_SIZE == 8
1332 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1333 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1334 #else
1335 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1336 #endif
1339 r = get_reg(rc);
1340 #if PTR_SIZE == 8
1341 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1342 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1343 #else
1344 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1345 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1346 unsigned long long ll;
1347 #endif
1348 int r2, original_type;
1349 original_type = vtop->type.t;
1350 /* two register type load : expand to two words
1351 temporarily */
1352 #if PTR_SIZE == 4
1353 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1354 /* load constant */
1355 ll = vtop->c.i;
1356 vtop->c.i = ll; /* first word */
1357 load(r, vtop);
1358 vtop->r = r; /* save register value */
1359 vpushi(ll >> 32); /* second word */
1360 } else
1361 #endif
1362 if (vtop->r & VT_LVAL) {
1363 /* We do not want to modifier the long long
1364 pointer here, so the safest (and less
1365 efficient) is to save all the other registers
1366 in the stack. XXX: totally inefficient. */
1367 #if 0
1368 save_regs(1);
1369 #else
1370 /* lvalue_save: save only if used further down the stack */
1371 save_reg_upstack(vtop->r, 1);
1372 #endif
1373 /* load from memory */
1374 vtop->type.t = load_type;
1375 load(r, vtop);
1376 vdup();
1377 vtop[-1].r = r; /* save register value */
1378 /* increment pointer to get second word */
1379 vtop->type.t = addr_type;
1380 gaddrof();
1381 vpushi(load_size);
1382 gen_op('+');
1383 vtop->r |= VT_LVAL;
1384 vtop->type.t = load_type;
1385 } else {
1386 /* move registers */
1387 load(r, vtop);
1388 vdup();
1389 vtop[-1].r = r; /* save register value */
1390 vtop->r = vtop[-1].r2;
1392 /* Allocate second register. Here we rely on the fact that
1393 get_reg() tries first to free r2 of an SValue. */
1394 r2 = get_reg(rc2);
1395 load(r2, vtop);
1396 vpop();
1397 /* write second register */
1398 vtop->r2 = r2;
1399 vtop->type.t = original_type;
1400 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1401 int t1, t;
1402 /* lvalue of scalar type : need to use lvalue type
1403 because of possible cast */
1404 t = vtop->type.t;
1405 t1 = t;
1406 /* compute memory access type */
1407 if (vtop->r & VT_LVAL_BYTE)
1408 t = VT_BYTE;
1409 else if (vtop->r & VT_LVAL_SHORT)
1410 t = VT_SHORT;
1411 if (vtop->r & VT_LVAL_UNSIGNED)
1412 t |= VT_UNSIGNED;
1413 vtop->type.t = t;
1414 load(r, vtop);
1415 /* restore wanted type */
1416 vtop->type.t = t1;
1417 } else {
1418 /* one register type load */
1419 load(r, vtop);
1422 vtop->r = r;
1423 #ifdef TCC_TARGET_C67
1424 /* uses register pairs for doubles */
1425 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1426 vtop->r2 = r+1;
1427 #endif
1429 return r;
1432 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1433 ST_FUNC void gv2(int rc1, int rc2)
1435 int v;
1437 /* generate more generic register first. But VT_JMP or VT_CMP
1438 values must be generated first in all cases to avoid possible
1439 reload errors */
1440 v = vtop[0].r & VT_VALMASK;
1441 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1442 vswap();
1443 gv(rc1);
1444 vswap();
1445 gv(rc2);
1446 /* test if reload is needed for first register */
1447 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1448 vswap();
1449 gv(rc1);
1450 vswap();
1452 } else {
1453 gv(rc2);
1454 vswap();
1455 gv(rc1);
1456 vswap();
1457 /* test if reload is needed for first register */
1458 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1459 gv(rc2);
1464 #ifndef TCC_TARGET_ARM64
1465 /* wrapper around RC_FRET to return a register by type */
1466 static int rc_fret(int t)
1468 #ifdef TCC_TARGET_X86_64
1469 if (t == VT_LDOUBLE) {
1470 return RC_ST0;
1472 #endif
1473 return RC_FRET;
1475 #endif
1477 /* wrapper around REG_FRET to return a register by type */
1478 static int reg_fret(int t)
1480 #ifdef TCC_TARGET_X86_64
1481 if (t == VT_LDOUBLE) {
1482 return TREG_ST0;
1484 #endif
1485 return REG_FRET;
1488 #if PTR_SIZE == 4
1489 /* expand 64bit on stack in two ints */
1490 static void lexpand(void)
1492 int u, v;
1493 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1494 v = vtop->r & (VT_VALMASK | VT_LVAL);
1495 if (v == VT_CONST) {
1496 vdup();
1497 vtop[0].c.i >>= 32;
1498 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1499 vdup();
1500 vtop[0].c.i += 4;
1501 } else {
1502 gv(RC_INT);
1503 vdup();
1504 vtop[0].r = vtop[-1].r2;
1505 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1507 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1509 #endif
1511 #ifdef TCC_TARGET_ARM
1512 /* expand long long on stack */
1513 ST_FUNC void lexpand_nr(void)
1515 int u,v;
1517 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1518 vdup();
1519 vtop->r2 = VT_CONST;
1520 vtop->type.t = VT_INT | u;
1521 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1522 if (v == VT_CONST) {
1523 vtop[-1].c.i = vtop->c.i;
1524 vtop->c.i = vtop->c.i >> 32;
1525 vtop->r = VT_CONST;
1526 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1527 vtop->c.i += 4;
1528 vtop->r = vtop[-1].r;
1529 } else if (v > VT_CONST) {
1530 vtop--;
1531 lexpand();
1532 } else
1533 vtop->r = vtop[-1].r2;
1534 vtop[-1].r2 = VT_CONST;
1535 vtop[-1].type.t = VT_INT | u;
1537 #endif
1539 #if PTR_SIZE == 4
1540 /* build a long long from two ints */
1541 static void lbuild(int t)
1543 gv2(RC_INT, RC_INT);
1544 vtop[-1].r2 = vtop[0].r;
1545 vtop[-1].type.t = t;
1546 vpop();
1548 #endif
1550 /* convert stack entry to register and duplicate its value in another
1551 register */
1552 static void gv_dup(void)
1554 int rc, t, r, r1;
1555 SValue sv;
1557 t = vtop->type.t;
1558 #if PTR_SIZE == 4
1559 if ((t & VT_BTYPE) == VT_LLONG) {
1560 if (t & VT_BITFIELD) {
1561 gv(RC_INT);
1562 t = vtop->type.t;
1564 lexpand();
1565 gv_dup();
1566 vswap();
1567 vrotb(3);
1568 gv_dup();
1569 vrotb(4);
1570 /* stack: H L L1 H1 */
1571 lbuild(t);
1572 vrotb(3);
1573 vrotb(3);
1574 vswap();
1575 lbuild(t);
1576 vswap();
1577 } else
1578 #endif
1580 /* duplicate value */
1581 rc = RC_INT;
1582 sv.type.t = VT_INT;
1583 if (is_float(t)) {
1584 rc = RC_FLOAT;
1585 #ifdef TCC_TARGET_X86_64
1586 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1587 rc = RC_ST0;
1589 #endif
1590 sv.type.t = t;
1592 r = gv(rc);
1593 r1 = get_reg(rc);
1594 sv.r = r;
1595 sv.c.i = 0;
1596 load(r1, &sv); /* move r to r1 */
1597 vdup();
1598 /* duplicates value */
1599 if (r != r1)
1600 vtop->r = r1;
1604 /* Generate value test
1606 * Generate a test for any value (jump, comparison and integers) */
1607 ST_FUNC int gvtst(int inv, int t)
1609 int v = vtop->r & VT_VALMASK;
1610 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1611 vpushi(0);
1612 gen_op(TOK_NE);
1614 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1615 /* constant jmp optimization */
1616 if ((vtop->c.i != 0) != inv)
1617 t = gjmp(t);
1618 vtop--;
1619 return t;
1621 return gtst(inv, t);
1624 #if PTR_SIZE == 4
1625 /* generate CPU independent (unsigned) long long operations */
1626 static void gen_opl(int op)
1628 int t, a, b, op1, c, i;
1629 int func;
1630 unsigned short reg_iret = REG_IRET;
1631 unsigned short reg_lret = REG_LRET;
1632 SValue tmp;
1634 switch(op) {
1635 case '/':
1636 case TOK_PDIV:
1637 func = TOK___divdi3;
1638 goto gen_func;
1639 case TOK_UDIV:
1640 func = TOK___udivdi3;
1641 goto gen_func;
1642 case '%':
1643 func = TOK___moddi3;
1644 goto gen_mod_func;
1645 case TOK_UMOD:
1646 func = TOK___umoddi3;
1647 gen_mod_func:
1648 #ifdef TCC_ARM_EABI
1649 reg_iret = TREG_R2;
1650 reg_lret = TREG_R3;
1651 #endif
1652 gen_func:
1653 /* call generic long long function */
1654 vpush_global_sym(&func_old_type, func);
1655 vrott(3);
1656 gfunc_call(2);
1657 vpushi(0);
1658 vtop->r = reg_iret;
1659 vtop->r2 = reg_lret;
1660 break;
1661 case '^':
1662 case '&':
1663 case '|':
1664 case '*':
1665 case '+':
1666 case '-':
1667 //pv("gen_opl A",0,2);
1668 t = vtop->type.t;
1669 vswap();
1670 lexpand();
1671 vrotb(3);
1672 lexpand();
1673 /* stack: L1 H1 L2 H2 */
1674 tmp = vtop[0];
1675 vtop[0] = vtop[-3];
1676 vtop[-3] = tmp;
1677 tmp = vtop[-2];
1678 vtop[-2] = vtop[-3];
1679 vtop[-3] = tmp;
1680 vswap();
1681 /* stack: H1 H2 L1 L2 */
1682 //pv("gen_opl B",0,4);
1683 if (op == '*') {
1684 vpushv(vtop - 1);
1685 vpushv(vtop - 1);
1686 gen_op(TOK_UMULL);
1687 lexpand();
1688 /* stack: H1 H2 L1 L2 ML MH */
1689 for(i=0;i<4;i++)
1690 vrotb(6);
1691 /* stack: ML MH H1 H2 L1 L2 */
1692 tmp = vtop[0];
1693 vtop[0] = vtop[-2];
1694 vtop[-2] = tmp;
1695 /* stack: ML MH H1 L2 H2 L1 */
1696 gen_op('*');
1697 vrotb(3);
1698 vrotb(3);
1699 gen_op('*');
1700 /* stack: ML MH M1 M2 */
1701 gen_op('+');
1702 gen_op('+');
1703 } else if (op == '+' || op == '-') {
1704 /* XXX: add non carry method too (for MIPS or alpha) */
1705 if (op == '+')
1706 op1 = TOK_ADDC1;
1707 else
1708 op1 = TOK_SUBC1;
1709 gen_op(op1);
1710 /* stack: H1 H2 (L1 op L2) */
1711 vrotb(3);
1712 vrotb(3);
1713 gen_op(op1 + 1); /* TOK_xxxC2 */
1714 } else {
1715 gen_op(op);
1716 /* stack: H1 H2 (L1 op L2) */
1717 vrotb(3);
1718 vrotb(3);
1719 /* stack: (L1 op L2) H1 H2 */
1720 gen_op(op);
1721 /* stack: (L1 op L2) (H1 op H2) */
1723 /* stack: L H */
1724 lbuild(t);
1725 break;
1726 case TOK_SAR:
1727 case TOK_SHR:
1728 case TOK_SHL:
1729 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1730 t = vtop[-1].type.t;
1731 vswap();
1732 lexpand();
1733 vrotb(3);
1734 /* stack: L H shift */
1735 c = (int)vtop->c.i;
1736 /* constant: simpler */
1737 /* NOTE: all comments are for SHL. the other cases are
1738 done by swapping words */
1739 vpop();
1740 if (op != TOK_SHL)
1741 vswap();
1742 if (c >= 32) {
1743 /* stack: L H */
1744 vpop();
1745 if (c > 32) {
1746 vpushi(c - 32);
1747 gen_op(op);
1749 if (op != TOK_SAR) {
1750 vpushi(0);
1751 } else {
1752 gv_dup();
1753 vpushi(31);
1754 gen_op(TOK_SAR);
1756 vswap();
1757 } else {
1758 vswap();
1759 gv_dup();
1760 /* stack: H L L */
1761 vpushi(c);
1762 gen_op(op);
1763 vswap();
1764 vpushi(32 - c);
1765 if (op == TOK_SHL)
1766 gen_op(TOK_SHR);
1767 else
1768 gen_op(TOK_SHL);
1769 vrotb(3);
1770 /* stack: L L H */
1771 vpushi(c);
1772 if (op == TOK_SHL)
1773 gen_op(TOK_SHL);
1774 else
1775 gen_op(TOK_SHR);
1776 gen_op('|');
1778 if (op != TOK_SHL)
1779 vswap();
1780 lbuild(t);
1781 } else {
1782 /* XXX: should provide a faster fallback on x86 ? */
1783 switch(op) {
1784 case TOK_SAR:
1785 func = TOK___ashrdi3;
1786 goto gen_func;
1787 case TOK_SHR:
1788 func = TOK___lshrdi3;
1789 goto gen_func;
1790 case TOK_SHL:
1791 func = TOK___ashldi3;
1792 goto gen_func;
1795 break;
1796 default:
1797 /* compare operations */
1798 t = vtop->type.t;
1799 vswap();
1800 lexpand();
1801 vrotb(3);
1802 lexpand();
1803 /* stack: L1 H1 L2 H2 */
1804 tmp = vtop[-1];
1805 vtop[-1] = vtop[-2];
1806 vtop[-2] = tmp;
1807 /* stack: L1 L2 H1 H2 */
1808 /* compare high */
1809 op1 = op;
1810 /* when values are equal, we need to compare low words. since
1811 the jump is inverted, we invert the test too. */
1812 if (op1 == TOK_LT)
1813 op1 = TOK_LE;
1814 else if (op1 == TOK_GT)
1815 op1 = TOK_GE;
1816 else if (op1 == TOK_ULT)
1817 op1 = TOK_ULE;
1818 else if (op1 == TOK_UGT)
1819 op1 = TOK_UGE;
1820 a = 0;
1821 b = 0;
1822 gen_op(op1);
1823 if (op == TOK_NE) {
1824 b = gvtst(0, 0);
1825 } else {
1826 a = gvtst(1, 0);
1827 if (op != TOK_EQ) {
1828 /* generate non equal test */
1829 vpushi(TOK_NE);
1830 vtop->r = VT_CMP;
1831 b = gvtst(0, 0);
1834 /* compare low. Always unsigned */
1835 op1 = op;
1836 if (op1 == TOK_LT)
1837 op1 = TOK_ULT;
1838 else if (op1 == TOK_LE)
1839 op1 = TOK_ULE;
1840 else if (op1 == TOK_GT)
1841 op1 = TOK_UGT;
1842 else if (op1 == TOK_GE)
1843 op1 = TOK_UGE;
1844 gen_op(op1);
1845 a = gvtst(1, a);
1846 gsym(b);
1847 vseti(VT_JMPI, a);
1848 break;
1851 #endif
1853 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1855 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1856 return (a ^ b) >> 63 ? -x : x;
1859 static int gen_opic_lt(uint64_t a, uint64_t b)
1861 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1864 /* handle integer constant optimizations and various machine
1865 independent opt */
1866 static void gen_opic(int op)
1868 SValue *v1 = vtop - 1;
1869 SValue *v2 = vtop;
1870 int t1 = v1->type.t & VT_BTYPE;
1871 int t2 = v2->type.t & VT_BTYPE;
1872 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1873 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1874 uint64_t l1 = c1 ? v1->c.i : 0;
1875 uint64_t l2 = c2 ? v2->c.i : 0;
1876 int shm = (t1 == VT_LLONG) ? 63 : 31;
1878 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1879 l1 = ((uint32_t)l1 |
1880 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1881 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1882 l2 = ((uint32_t)l2 |
1883 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1885 if (c1 && c2) {
1886 switch(op) {
1887 case '+': l1 += l2; break;
1888 case '-': l1 -= l2; break;
1889 case '&': l1 &= l2; break;
1890 case '^': l1 ^= l2; break;
1891 case '|': l1 |= l2; break;
1892 case '*': l1 *= l2; break;
1894 case TOK_PDIV:
1895 case '/':
1896 case '%':
1897 case TOK_UDIV:
1898 case TOK_UMOD:
1899 /* if division by zero, generate explicit division */
1900 if (l2 == 0) {
1901 if (const_wanted)
1902 tcc_error("division by zero in constant");
1903 goto general_case;
1905 switch(op) {
1906 default: l1 = gen_opic_sdiv(l1, l2); break;
1907 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1908 case TOK_UDIV: l1 = l1 / l2; break;
1909 case TOK_UMOD: l1 = l1 % l2; break;
1911 break;
1912 case TOK_SHL: l1 <<= (l2 & shm); break;
1913 case TOK_SHR: l1 >>= (l2 & shm); break;
1914 case TOK_SAR:
1915 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1916 break;
1917 /* tests */
1918 case TOK_ULT: l1 = l1 < l2; break;
1919 case TOK_UGE: l1 = l1 >= l2; break;
1920 case TOK_EQ: l1 = l1 == l2; break;
1921 case TOK_NE: l1 = l1 != l2; break;
1922 case TOK_ULE: l1 = l1 <= l2; break;
1923 case TOK_UGT: l1 = l1 > l2; break;
1924 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1925 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1926 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1927 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1928 /* logical */
1929 case TOK_LAND: l1 = l1 && l2; break;
1930 case TOK_LOR: l1 = l1 || l2; break;
1931 default:
1932 goto general_case;
1934 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1935 l1 = ((uint32_t)l1 |
1936 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1937 v1->c.i = l1;
1938 vtop--;
1939 } else {
1940 /* if commutative ops, put c2 as constant */
1941 if (c1 && (op == '+' || op == '&' || op == '^' ||
1942 op == '|' || op == '*')) {
1943 vswap();
1944 c2 = c1; //c = c1, c1 = c2, c2 = c;
1945 l2 = l1; //l = l1, l1 = l2, l2 = l;
1947 if (!const_wanted &&
1948 c1 && ((l1 == 0 &&
1949 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1950 (l1 == -1 && op == TOK_SAR))) {
1951 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1952 vtop--;
1953 } else if (!const_wanted &&
1954 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1955 (op == '|' &&
1956 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1957 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1958 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1959 if (l2 == 1)
1960 vtop->c.i = 0;
1961 vswap();
1962 vtop--;
1963 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1964 op == TOK_PDIV) &&
1965 l2 == 1) ||
1966 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1967 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1968 l2 == 0) ||
1969 (op == '&' &&
1970 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1971 /* filter out NOP operations like x*1, x-0, x&-1... */
1972 vtop--;
1973 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1974 /* try to use shifts instead of muls or divs */
1975 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1976 int n = -1;
1977 while (l2) {
1978 l2 >>= 1;
1979 n++;
1981 vtop->c.i = n;
1982 if (op == '*')
1983 op = TOK_SHL;
1984 else if (op == TOK_PDIV)
1985 op = TOK_SAR;
1986 else
1987 op = TOK_SHR;
1989 goto general_case;
1990 } else if (c2 && (op == '+' || op == '-') &&
1991 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1992 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1993 /* symbol + constant case */
1994 if (op == '-')
1995 l2 = -l2;
1996 l2 += vtop[-1].c.i;
1997 /* The backends can't always deal with addends to symbols
1998 larger than +-1<<31. Don't construct such. */
1999 if ((int)l2 != l2)
2000 goto general_case;
2001 vtop--;
2002 vtop->c.i = l2;
2003 } else {
2004 general_case:
2005 /* call low level op generator */
2006 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2007 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2008 gen_opl(op);
2009 else
2010 gen_opi(op);
2015 /* generate a floating point operation with constant propagation */
2016 static void gen_opif(int op)
2018 int c1, c2;
2019 SValue *v1, *v2;
2020 #if defined _MSC_VER && defined _AMD64_
2021 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2022 volatile
2023 #endif
2024 long double f1, f2;
2026 v1 = vtop - 1;
2027 v2 = vtop;
2028 /* currently, we cannot do computations with forward symbols */
2029 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2030 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2031 if (c1 && c2) {
2032 if (v1->type.t == VT_FLOAT) {
2033 f1 = v1->c.f;
2034 f2 = v2->c.f;
2035 } else if (v1->type.t == VT_DOUBLE) {
2036 f1 = v1->c.d;
2037 f2 = v2->c.d;
2038 } else {
2039 f1 = v1->c.ld;
2040 f2 = v2->c.ld;
2043 /* NOTE: we only do constant propagation if finite number (not
2044 NaN or infinity) (ANSI spec) */
2045 if (!ieee_finite(f1) || !ieee_finite(f2))
2046 goto general_case;
2048 switch(op) {
2049 case '+': f1 += f2; break;
2050 case '-': f1 -= f2; break;
2051 case '*': f1 *= f2; break;
2052 case '/':
2053 if (f2 == 0.0) {
2054 /* If not in initializer we need to potentially generate
2055 FP exceptions at runtime, otherwise we want to fold. */
2056 if (!const_wanted)
2057 goto general_case;
2059 f1 /= f2;
2060 break;
2061 /* XXX: also handles tests ? */
2062 default:
2063 goto general_case;
2065 /* XXX: overflow test ? */
2066 if (v1->type.t == VT_FLOAT) {
2067 v1->c.f = f1;
2068 } else if (v1->type.t == VT_DOUBLE) {
2069 v1->c.d = f1;
2070 } else {
2071 v1->c.ld = f1;
2073 vtop--;
2074 } else {
2075 general_case:
2076 gen_opf(op);
2080 static int pointed_size(CType *type)
2082 int align;
2083 return type_size(pointed_type(type), &align);
2086 static void vla_runtime_pointed_size(CType *type)
2088 int align;
2089 vla_runtime_type_size(pointed_type(type), &align);
2092 static inline int is_null_pointer(SValue *p)
2094 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2095 return 0;
2096 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2097 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2098 ((p->type.t & VT_BTYPE) == VT_PTR &&
2099 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2102 static inline int is_integer_btype(int bt)
2104 return (bt == VT_BYTE || bt == VT_SHORT ||
2105 bt == VT_INT || bt == VT_LLONG);
2108 /* check types for comparison or subtraction of pointers */
2109 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2111 CType *type1, *type2, tmp_type1, tmp_type2;
2112 int bt1, bt2;
2114 /* null pointers are accepted for all comparisons as gcc */
2115 if (is_null_pointer(p1) || is_null_pointer(p2))
2116 return;
2117 type1 = &p1->type;
2118 type2 = &p2->type;
2119 bt1 = type1->t & VT_BTYPE;
2120 bt2 = type2->t & VT_BTYPE;
2121 /* accept comparison between pointer and integer with a warning */
2122 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2123 if (op != TOK_LOR && op != TOK_LAND )
2124 tcc_warning("comparison between pointer and integer");
2125 return;
2128 /* both must be pointers or implicit function pointers */
2129 if (bt1 == VT_PTR) {
2130 type1 = pointed_type(type1);
2131 } else if (bt1 != VT_FUNC)
2132 goto invalid_operands;
2134 if (bt2 == VT_PTR) {
2135 type2 = pointed_type(type2);
2136 } else if (bt2 != VT_FUNC) {
2137 invalid_operands:
2138 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2140 if ((type1->t & VT_BTYPE) == VT_VOID ||
2141 (type2->t & VT_BTYPE) == VT_VOID)
2142 return;
2143 tmp_type1 = *type1;
2144 tmp_type2 = *type2;
2145 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2146 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2147 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2148 /* gcc-like error if '-' is used */
2149 if (op == '-')
2150 goto invalid_operands;
2151 else
2152 tcc_warning("comparison of distinct pointer types lacks a cast");
2156 /* generic gen_op: handles types problems */
2157 ST_FUNC void gen_op(int op)
2159 int u, t1, t2, bt1, bt2, t;
2160 CType type1;
2162 redo:
2163 t1 = vtop[-1].type.t;
2164 t2 = vtop[0].type.t;
2165 bt1 = t1 & VT_BTYPE;
2166 bt2 = t2 & VT_BTYPE;
2168 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2169 tcc_error("operation on a struct");
2170 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2171 if (bt2 == VT_FUNC) {
2172 mk_pointer(&vtop->type);
2173 gaddrof();
2175 if (bt1 == VT_FUNC) {
2176 vswap();
2177 mk_pointer(&vtop->type);
2178 gaddrof();
2179 vswap();
2181 goto redo;
2182 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2183 /* at least one operand is a pointer */
2184 /* relational op: must be both pointers */
2185 if (op >= TOK_ULT && op <= TOK_LOR) {
2186 check_comparison_pointer_types(vtop - 1, vtop, op);
2187 /* pointers are handled are unsigned */
2188 #if PTR_SIZE == 8
2189 t = VT_LLONG | VT_UNSIGNED;
2190 #else
2191 t = VT_INT | VT_UNSIGNED;
2192 #endif
2193 goto std_op;
2195 /* if both pointers, then it must be the '-' op */
2196 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2197 if (op != '-')
2198 tcc_error("cannot use pointers here");
2199 check_comparison_pointer_types(vtop - 1, vtop, op);
2200 /* XXX: check that types are compatible */
2201 if (vtop[-1].type.t & VT_VLA) {
2202 vla_runtime_pointed_size(&vtop[-1].type);
2203 } else {
2204 vpushi(pointed_size(&vtop[-1].type));
2206 vrott(3);
2207 gen_opic(op);
2208 vtop->type.t = ptrdiff_type.t;
2209 vswap();
2210 gen_op(TOK_PDIV);
2211 } else {
2212 /* exactly one pointer : must be '+' or '-'. */
2213 if (op != '-' && op != '+')
2214 tcc_error("cannot use pointers here");
2215 /* Put pointer as first operand */
2216 if (bt2 == VT_PTR) {
2217 vswap();
2218 t = t1, t1 = t2, t2 = t;
2220 #if PTR_SIZE == 4
2221 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2222 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2223 gen_cast_s(VT_INT);
2224 #endif
2225 type1 = vtop[-1].type;
2226 type1.t &= ~VT_ARRAY;
2227 if (vtop[-1].type.t & VT_VLA)
2228 vla_runtime_pointed_size(&vtop[-1].type);
2229 else {
2230 u = pointed_size(&vtop[-1].type);
2231 if (u < 0)
2232 tcc_error("unknown array element size");
2233 #if PTR_SIZE == 8
2234 vpushll(u);
2235 #else
2236 /* XXX: cast to int ? (long long case) */
2237 vpushi(u);
2238 #endif
2240 gen_op('*');
2241 #if 0
2242 /* #ifdef CONFIG_TCC_BCHECK
2243 The main reason to removing this code:
2244 #include <stdio.h>
2245 int main ()
2247 int v[10];
2248 int i = 10;
2249 int j = 9;
2250 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2251 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2253 When this code is on. then the output looks like
2254 v+i-j = 0xfffffffe
2255 v+(i-j) = 0xbff84000
2257 /* if evaluating constant expression, no code should be
2258 generated, so no bound check */
2259 if (tcc_state->do_bounds_check && !const_wanted) {
2260 /* if bounded pointers, we generate a special code to
2261 test bounds */
2262 if (op == '-') {
2263 vpushi(0);
2264 vswap();
2265 gen_op('-');
2267 gen_bounded_ptr_add();
2268 } else
2269 #endif
2271 gen_opic(op);
2273 /* put again type if gen_opic() swaped operands */
2274 vtop->type = type1;
2276 } else if (is_float(bt1) || is_float(bt2)) {
2277 /* compute bigger type and do implicit casts */
2278 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2279 t = VT_LDOUBLE;
2280 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2281 t = VT_DOUBLE;
2282 } else {
2283 t = VT_FLOAT;
2285 /* floats can only be used for a few operations */
2286 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2287 (op < TOK_ULT || op > TOK_GT))
2288 tcc_error("invalid operands for binary operation");
2289 goto std_op;
2290 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2291 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2292 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2293 t |= VT_UNSIGNED;
2294 t |= (VT_LONG & t1);
2295 goto std_op;
2296 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2297 /* cast to biggest op */
2298 t = VT_LLONG | VT_LONG;
2299 if (bt1 == VT_LLONG)
2300 t &= t1;
2301 if (bt2 == VT_LLONG)
2302 t &= t2;
2303 /* convert to unsigned if it does not fit in a long long */
2304 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2305 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2306 t |= VT_UNSIGNED;
2307 goto std_op;
2308 } else {
2309 /* integer operations */
2310 t = VT_INT | (VT_LONG & (t1 | t2));
2311 /* convert to unsigned if it does not fit in an integer */
2312 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2313 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2314 t |= VT_UNSIGNED;
2315 std_op:
2316 /* XXX: currently, some unsigned operations are explicit, so
2317 we modify them here */
2318 if (t & VT_UNSIGNED) {
2319 if (op == TOK_SAR)
2320 op = TOK_SHR;
2321 else if (op == '/')
2322 op = TOK_UDIV;
2323 else if (op == '%')
2324 op = TOK_UMOD;
2325 else if (op == TOK_LT)
2326 op = TOK_ULT;
2327 else if (op == TOK_GT)
2328 op = TOK_UGT;
2329 else if (op == TOK_LE)
2330 op = TOK_ULE;
2331 else if (op == TOK_GE)
2332 op = TOK_UGE;
2334 vswap();
2335 type1.t = t;
2336 type1.ref = NULL;
2337 gen_cast(&type1);
2338 vswap();
2339 /* special case for shifts and long long: we keep the shift as
2340 an integer */
2341 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2342 type1.t = VT_INT;
2343 gen_cast(&type1);
2344 if (is_float(t))
2345 gen_opif(op);
2346 else
2347 gen_opic(op);
2348 if (op >= TOK_ULT && op <= TOK_GT) {
2349 /* relational op: the result is an int */
2350 vtop->type.t = VT_INT;
2351 } else {
2352 vtop->type.t = t;
2355 // Make sure that we have converted to an rvalue:
2356 if (vtop->r & VT_LVAL)
2357 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2360 #ifndef TCC_TARGET_ARM
2361 /* generic itof for unsigned long long case */
2362 static void gen_cvt_itof1(int t)
2364 #ifdef TCC_TARGET_ARM64
2365 gen_cvt_itof(t);
2366 #else
2367 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2368 (VT_LLONG | VT_UNSIGNED)) {
2370 if (t == VT_FLOAT)
2371 vpush_global_sym(&func_old_type, TOK___floatundisf);
2372 #if LDOUBLE_SIZE != 8
2373 else if (t == VT_LDOUBLE)
2374 vpush_global_sym(&func_old_type, TOK___floatundixf);
2375 #endif
2376 else
2377 vpush_global_sym(&func_old_type, TOK___floatundidf);
2378 vrott(2);
2379 gfunc_call(1);
2380 vpushi(0);
2381 vtop->r = reg_fret(t);
2382 } else {
2383 gen_cvt_itof(t);
2385 #endif
2387 #endif
2389 /* generic ftoi for unsigned long long case */
2390 static void gen_cvt_ftoi1(int t)
2392 #ifdef TCC_TARGET_ARM64
2393 gen_cvt_ftoi(t);
2394 #else
2395 int st;
2397 if (t == (VT_LLONG | VT_UNSIGNED)) {
2398 /* not handled natively */
2399 st = vtop->type.t & VT_BTYPE;
2400 if (st == VT_FLOAT)
2401 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2402 #if LDOUBLE_SIZE != 8
2403 else if (st == VT_LDOUBLE)
2404 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2405 #endif
2406 else
2407 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2408 vrott(2);
2409 gfunc_call(1);
2410 vpushi(0);
2411 vtop->r = REG_IRET;
2412 vtop->r2 = REG_LRET;
2413 } else {
2414 gen_cvt_ftoi(t);
2416 #endif
2419 /* force char or short cast */
2420 static void force_charshort_cast(int t)
2422 int bits, dbt;
2424 /* cannot cast static initializers */
2425 if (STATIC_DATA_WANTED)
2426 return;
2428 dbt = t & VT_BTYPE;
2429 /* XXX: add optimization if lvalue : just change type and offset */
2430 if (dbt == VT_BYTE)
2431 bits = 8;
2432 else
2433 bits = 16;
2434 if (t & VT_UNSIGNED) {
2435 vpushi((1 << bits) - 1);
2436 gen_op('&');
2437 } else {
2438 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2439 bits = 64 - bits;
2440 else
2441 bits = 32 - bits;
2442 vpushi(bits);
2443 gen_op(TOK_SHL);
2444 /* result must be signed or the SAR is converted to an SHL
2445 This was not the case when "t" was a signed short
2446 and the last value on the stack was an unsigned int */
2447 vtop->type.t &= ~VT_UNSIGNED;
2448 vpushi(bits);
2449 gen_op(TOK_SAR);
2453 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2454 static void gen_cast_s(int t)
2456 CType type;
2457 type.t = t;
2458 type.ref = NULL;
2459 gen_cast(&type);
2462 static void gen_cast(CType *type)
2464 int sbt, dbt, sf, df, c, p;
2466 /* special delayed cast for char/short */
2467 /* XXX: in some cases (multiple cascaded casts), it may still
2468 be incorrect */
2469 if (vtop->r & VT_MUSTCAST) {
2470 vtop->r &= ~VT_MUSTCAST;
2471 force_charshort_cast(vtop->type.t);
2474 /* bitfields first get cast to ints */
2475 if (vtop->type.t & VT_BITFIELD) {
2476 gv(RC_INT);
2479 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2480 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2482 if (sbt != dbt) {
2483 sf = is_float(sbt);
2484 df = is_float(dbt);
2485 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2486 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2487 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2488 c &= dbt != VT_LDOUBLE;
2489 #endif
2490 if (c) {
2491 /* constant case: we can do it now */
2492 /* XXX: in ISOC, cannot do it if error in convert */
2493 if (sbt == VT_FLOAT)
2494 vtop->c.ld = vtop->c.f;
2495 else if (sbt == VT_DOUBLE)
2496 vtop->c.ld = vtop->c.d;
2498 if (df) {
2499 if ((sbt & VT_BTYPE) == VT_LLONG) {
2500 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2501 vtop->c.ld = vtop->c.i;
2502 else
2503 vtop->c.ld = -(long double)-vtop->c.i;
2504 } else if(!sf) {
2505 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2506 vtop->c.ld = (uint32_t)vtop->c.i;
2507 else
2508 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2511 if (dbt == VT_FLOAT)
2512 vtop->c.f = (float)vtop->c.ld;
2513 else if (dbt == VT_DOUBLE)
2514 vtop->c.d = (double)vtop->c.ld;
2515 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2516 vtop->c.i = vtop->c.ld;
2517 } else if (sf && dbt == VT_BOOL) {
2518 vtop->c.i = (vtop->c.ld != 0);
2519 } else {
2520 if(sf)
2521 vtop->c.i = vtop->c.ld;
2522 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2524 else if (sbt & VT_UNSIGNED)
2525 vtop->c.i = (uint32_t)vtop->c.i;
2526 #if PTR_SIZE == 8
2527 else if (sbt == VT_PTR)
2529 #endif
2530 else if (sbt != VT_LLONG)
2531 vtop->c.i = ((uint32_t)vtop->c.i |
2532 -(vtop->c.i & 0x80000000));
2534 if (dbt == (VT_LLONG|VT_UNSIGNED))
2536 else if (dbt == VT_BOOL)
2537 vtop->c.i = (vtop->c.i != 0);
2538 #if PTR_SIZE == 8
2539 else if (dbt == VT_PTR)
2541 #endif
2542 else if (dbt != VT_LLONG) {
2543 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2544 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2545 0xffffffff);
2546 vtop->c.i &= m;
2547 if (!(dbt & VT_UNSIGNED))
2548 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2551 } else if (p && dbt == VT_BOOL) {
2552 vtop->r = VT_CONST;
2553 vtop->c.i = 1;
2554 } else {
2555 /* non constant case: generate code */
2556 if (sf && df) {
2557 /* convert from fp to fp */
2558 gen_cvt_ftof(dbt);
2559 } else if (df) {
2560 /* convert int to fp */
2561 gen_cvt_itof1(dbt);
2562 } else if (sf) {
2563 /* convert fp to int */
2564 if (dbt == VT_BOOL) {
2565 vpushi(0);
2566 gen_op(TOK_NE);
2567 } else {
2568 /* we handle char/short/etc... with generic code */
2569 if (dbt != (VT_INT | VT_UNSIGNED) &&
2570 dbt != (VT_LLONG | VT_UNSIGNED) &&
2571 dbt != VT_LLONG)
2572 dbt = VT_INT;
2573 gen_cvt_ftoi1(dbt);
2574 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2575 /* additional cast for char/short... */
2576 vtop->type.t = dbt;
2577 gen_cast(type);
2580 #if PTR_SIZE == 4
2581 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2582 if ((sbt & VT_BTYPE) != VT_LLONG) {
2583 /* scalar to long long */
2584 /* machine independent conversion */
2585 gv(RC_INT);
2586 /* generate high word */
2587 if (sbt == (VT_INT | VT_UNSIGNED)) {
2588 vpushi(0);
2589 gv(RC_INT);
2590 } else {
2591 if (sbt == VT_PTR) {
2592 /* cast from pointer to int before we apply
2593 shift operation, which pointers don't support*/
2594 gen_cast_s(VT_INT);
2596 gv_dup();
2597 vpushi(31);
2598 gen_op(TOK_SAR);
2600 /* patch second register */
2601 vtop[-1].r2 = vtop->r;
2602 vpop();
2604 #else
2605 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2606 (dbt & VT_BTYPE) == VT_PTR ||
2607 (dbt & VT_BTYPE) == VT_FUNC) {
2608 if ((sbt & VT_BTYPE) != VT_LLONG &&
2609 (sbt & VT_BTYPE) != VT_PTR &&
2610 (sbt & VT_BTYPE) != VT_FUNC) {
2611 /* need to convert from 32bit to 64bit */
2612 gv(RC_INT);
2613 if (sbt != (VT_INT | VT_UNSIGNED)) {
2614 #if defined(TCC_TARGET_ARM64)
2615 gen_cvt_sxtw();
2616 #elif defined(TCC_TARGET_X86_64)
2617 int r = gv(RC_INT);
2618 /* x86_64 specific: movslq */
2619 o(0x6348);
2620 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2621 #else
2622 #error
2623 #endif
2626 #endif
2627 } else if (dbt == VT_BOOL) {
2628 /* scalar to bool */
2629 vpushi(0);
2630 gen_op(TOK_NE);
2631 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2632 (dbt & VT_BTYPE) == VT_SHORT) {
2633 if (sbt == VT_PTR) {
2634 vtop->type.t = VT_INT;
2635 tcc_warning("nonportable conversion from pointer to char/short");
2637 force_charshort_cast(dbt);
2638 } else if ((dbt & VT_BTYPE) == VT_INT) {
2639 /* scalar to int */
2640 if ((sbt & VT_BTYPE) == VT_LLONG) {
2641 #if PTR_SIZE == 4
2642 /* from long long: just take low order word */
2643 lexpand();
2644 vpop();
2645 #else
2646 vpushi(0xffffffff);
2647 vtop->type.t |= VT_UNSIGNED;
2648 gen_op('&');
2649 #endif
2651 /* if lvalue and single word type, nothing to do because
2652 the lvalue already contains the real type size (see
2653 VT_LVAL_xxx constants) */
2656 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2657 /* if we are casting between pointer types,
2658 we must update the VT_LVAL_xxx size */
2659 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2660 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2662 vtop->type = *type;
2665 /* return type size as known at compile time. Put alignment at 'a' */
2666 ST_FUNC int type_size(CType *type, int *a)
2668 Sym *s;
2669 int bt;
2671 bt = type->t & VT_BTYPE;
2672 if (bt == VT_STRUCT) {
2673 /* struct/union */
2674 s = type->ref;
2675 *a = s->r;
2676 return s->c;
2677 } else if (bt == VT_PTR) {
2678 if (type->t & VT_ARRAY) {
2679 int ts;
2681 s = type->ref;
2682 ts = type_size(&s->type, a);
2684 if (ts < 0 && s->c < 0)
2685 ts = -ts;
2687 return ts * s->c;
2688 } else {
2689 *a = PTR_SIZE;
2690 return PTR_SIZE;
2692 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2693 return -1; /* incomplete enum */
2694 } else if (bt == VT_LDOUBLE) {
2695 *a = LDOUBLE_ALIGN;
2696 return LDOUBLE_SIZE;
2697 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2698 #ifdef TCC_TARGET_I386
2699 #ifdef TCC_TARGET_PE
2700 *a = 8;
2701 #else
2702 *a = 4;
2703 #endif
2704 #elif defined(TCC_TARGET_ARM)
2705 #ifdef TCC_ARM_EABI
2706 *a = 8;
2707 #else
2708 *a = 4;
2709 #endif
2710 #else
2711 *a = 8;
2712 #endif
2713 return 8;
2714 } else if (bt == VT_INT || bt == VT_FLOAT) {
2715 *a = 4;
2716 return 4;
2717 } else if (bt == VT_SHORT) {
2718 *a = 2;
2719 return 2;
2720 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2721 *a = 8;
2722 return 16;
2723 } else {
2724 /* char, void, function, _Bool */
2725 *a = 1;
2726 return 1;
2730 /* push type size as known at runtime time on top of value stack. Put
2731 alignment at 'a' */
2732 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2734 if (type->t & VT_VLA) {
2735 type_size(&type->ref->type, a);
2736 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2737 } else {
2738 vpushi(type_size(type, a));
2742 static void vla_sp_restore(void) {
2743 if (vlas_in_scope) {
2744 gen_vla_sp_restore(vla_sp_loc);
2748 static void vla_sp_restore_root(void) {
2749 if (vlas_in_scope) {
2750 gen_vla_sp_restore(vla_sp_root_loc);
2754 /* return the pointed type of t */
2755 static inline CType *pointed_type(CType *type)
2757 return &type->ref->type;
2760 /* modify type so that its it is a pointer to type. */
2761 ST_FUNC void mk_pointer(CType *type)
2763 Sym *s;
2764 s = sym_push(SYM_FIELD, type, 0, -1);
2765 type->t = VT_PTR | (type->t & VT_STORAGE);
2766 type->ref = s;
2769 /* compare function types. OLD functions match any new functions */
2770 static int is_compatible_func(CType *type1, CType *type2)
2772 Sym *s1, *s2;
2774 s1 = type1->ref;
2775 s2 = type2->ref;
2776 if (!is_compatible_types(&s1->type, &s2->type))
2777 return 0;
2778 /* check func_call */
2779 if (s1->f.func_call != s2->f.func_call)
2780 return 0;
2781 /* XXX: not complete */
2782 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2783 return 1;
2784 if (s1->f.func_type != s2->f.func_type)
2785 return 0;
2786 while (s1 != NULL) {
2787 if (s2 == NULL)
2788 return 0;
2789 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2790 return 0;
2791 s1 = s1->next;
2792 s2 = s2->next;
2794 if (s2)
2795 return 0;
2796 return 1;
2799 /* return true if type1 and type2 are the same. If unqualified is
2800 true, qualifiers on the types are ignored.
2802 static int compare_types(CType *type1, CType *type2, int unqualified)
2804 int bt1, t1, t2;
2806 t1 = type1->t & VT_TYPE;
2807 t2 = type2->t & VT_TYPE;
2808 if (unqualified) {
2809 /* strip qualifiers before comparing */
2810 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2811 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2814 /* Default Vs explicit signedness only matters for char */
2815 if ((t1 & VT_BTYPE) != VT_BYTE) {
2816 t1 &= ~VT_DEFSIGN;
2817 t2 &= ~VT_DEFSIGN;
2819 /* XXX: bitfields ? */
2820 if (t1 != t2)
2821 return 0;
2822 /* test more complicated cases */
2823 bt1 = t1 & VT_BTYPE;
2824 if (bt1 == VT_PTR) {
2825 type1 = pointed_type(type1);
2826 type2 = pointed_type(type2);
2827 return is_compatible_types(type1, type2);
2828 } else if (bt1 == VT_STRUCT) {
2829 return (type1->ref == type2->ref);
2830 } else if (bt1 == VT_FUNC) {
2831 return is_compatible_func(type1, type2);
2832 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2833 return type1->ref == type2->ref;
2834 } else {
2835 return 1;
2839 /* return true if type1 and type2 are exactly the same (including
2840 qualifiers).
2842 static int is_compatible_types(CType *type1, CType *type2)
2844 return compare_types(type1,type2,0);
2847 /* return true if type1 and type2 are the same (ignoring qualifiers).
2849 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2851 return compare_types(type1,type2,1);
2854 /* print a type. If 'varstr' is not NULL, then the variable is also
2855 printed in the type */
2856 /* XXX: union */
2857 /* XXX: add array and function pointers */
2858 static void type_to_str(char *buf, int buf_size,
2859 CType *type, const char *varstr)
2861 int bt, v, t;
2862 Sym *s, *sa;
2863 char buf1[256];
2864 const char *tstr;
2866 t = type->t;
2867 bt = t & VT_BTYPE;
2868 buf[0] = '\0';
2870 if (t & VT_EXTERN)
2871 pstrcat(buf, buf_size, "extern ");
2872 if (t & VT_STATIC)
2873 pstrcat(buf, buf_size, "static ");
2874 if (t & VT_TYPEDEF)
2875 pstrcat(buf, buf_size, "typedef ");
2876 if (t & VT_INLINE)
2877 pstrcat(buf, buf_size, "inline ");
2878 if (t & VT_VOLATILE)
2879 pstrcat(buf, buf_size, "volatile ");
2880 if (t & VT_CONSTANT)
2881 pstrcat(buf, buf_size, "const ");
2883 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2884 || ((t & VT_UNSIGNED)
2885 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2886 && !IS_ENUM(t)
2888 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2890 buf_size -= strlen(buf);
2891 buf += strlen(buf);
2893 switch(bt) {
2894 case VT_VOID:
2895 tstr = "void";
2896 goto add_tstr;
2897 case VT_BOOL:
2898 tstr = "_Bool";
2899 goto add_tstr;
2900 case VT_BYTE:
2901 tstr = "char";
2902 goto add_tstr;
2903 case VT_SHORT:
2904 tstr = "short";
2905 goto add_tstr;
2906 case VT_INT:
2907 tstr = "int";
2908 goto maybe_long;
2909 case VT_LLONG:
2910 tstr = "long long";
2911 maybe_long:
2912 if (t & VT_LONG)
2913 tstr = "long";
2914 if (!IS_ENUM(t))
2915 goto add_tstr;
2916 tstr = "enum ";
2917 goto tstruct;
2918 case VT_FLOAT:
2919 tstr = "float";
2920 goto add_tstr;
2921 case VT_DOUBLE:
2922 tstr = "double";
2923 goto add_tstr;
2924 case VT_LDOUBLE:
2925 tstr = "long double";
2926 add_tstr:
2927 pstrcat(buf, buf_size, tstr);
2928 break;
2929 case VT_STRUCT:
2930 tstr = "struct ";
2931 if (IS_UNION(t))
2932 tstr = "union ";
2933 tstruct:
2934 pstrcat(buf, buf_size, tstr);
2935 v = type->ref->v & ~SYM_STRUCT;
2936 if (v >= SYM_FIRST_ANOM)
2937 pstrcat(buf, buf_size, "<anonymous>");
2938 else
2939 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2940 break;
2941 case VT_FUNC:
2942 s = type->ref;
2943 buf1[0]=0;
2944 if (varstr && '*' == *varstr) {
2945 pstrcat(buf1, sizeof(buf1), "(");
2946 pstrcat(buf1, sizeof(buf1), varstr);
2947 pstrcat(buf1, sizeof(buf1), ")");
2949 pstrcat(buf1, buf_size, "(");
2950 sa = s->next;
2951 while (sa != NULL) {
2952 char buf2[256];
2953 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2954 pstrcat(buf1, sizeof(buf1), buf2);
2955 sa = sa->next;
2956 if (sa)
2957 pstrcat(buf1, sizeof(buf1), ", ");
2959 if (s->f.func_type == FUNC_ELLIPSIS)
2960 pstrcat(buf1, sizeof(buf1), ", ...");
2961 pstrcat(buf1, sizeof(buf1), ")");
2962 type_to_str(buf, buf_size, &s->type, buf1);
2963 goto no_var;
2964 case VT_PTR:
2965 s = type->ref;
2966 if (t & VT_ARRAY) {
2967 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2968 type_to_str(buf, buf_size, &s->type, buf1);
2969 goto no_var;
2971 pstrcpy(buf1, sizeof(buf1), "*");
2972 if (t & VT_CONSTANT)
2973 pstrcat(buf1, buf_size, "const ");
2974 if (t & VT_VOLATILE)
2975 pstrcat(buf1, buf_size, "volatile ");
2976 if (varstr)
2977 pstrcat(buf1, sizeof(buf1), varstr);
2978 type_to_str(buf, buf_size, &s->type, buf1);
2979 goto no_var;
2981 if (varstr) {
2982 pstrcat(buf, buf_size, " ");
2983 pstrcat(buf, buf_size, varstr);
2985 no_var: ;
2988 /* verify type compatibility to store vtop in 'dt' type, and generate
2989 casts if needed. */
2990 static void gen_assign_cast(CType *dt)
2992 CType *st, *type1, *type2;
2993 char buf1[256], buf2[256];
2994 int dbt, sbt, qualwarn, lvl;
2996 st = &vtop->type; /* source type */
2997 dbt = dt->t & VT_BTYPE;
2998 sbt = st->t & VT_BTYPE;
2999 if (sbt == VT_VOID || dbt == VT_VOID) {
3000 if (sbt == VT_VOID && dbt == VT_VOID)
3001 ; /* It is Ok if both are void */
3002 else
3003 tcc_error("cannot cast from/to void");
3005 if (dt->t & VT_CONSTANT)
3006 tcc_warning("assignment of read-only location");
3007 switch(dbt) {
3008 case VT_PTR:
3009 /* special cases for pointers */
3010 /* '0' can also be a pointer */
3011 if (is_null_pointer(vtop))
3012 break;
3013 /* accept implicit pointer to integer cast with warning */
3014 if (is_integer_btype(sbt)) {
3015 tcc_warning("assignment makes pointer from integer without a cast");
3016 break;
3018 type1 = pointed_type(dt);
3019 if (sbt == VT_PTR)
3020 type2 = pointed_type(st);
3021 else if (sbt == VT_FUNC)
3022 type2 = st; /* a function is implicitly a function pointer */
3023 else
3024 goto error;
3025 if (is_compatible_types(type1, type2))
3026 break;
3027 for (qualwarn = lvl = 0;; ++lvl) {
3028 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3029 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3030 qualwarn = 1;
3031 dbt = type1->t & (VT_BTYPE|VT_LONG);
3032 sbt = type2->t & (VT_BTYPE|VT_LONG);
3033 if (dbt != VT_PTR || sbt != VT_PTR)
3034 break;
3035 type1 = pointed_type(type1);
3036 type2 = pointed_type(type2);
3038 if (!is_compatible_unqualified_types(type1, type2)) {
3039 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3040 /* void * can match anything */
3041 } else if (dbt == sbt
3042 && is_integer_btype(sbt & VT_BTYPE)
3043 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3044 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3045 /* Like GCC don't warn by default for merely changes
3046 in pointer target signedness. Do warn for different
3047 base types, though, in particular for unsigned enums
3048 and signed int targets. */
3049 } else {
3050 tcc_warning("assignment from incompatible pointer type");
3051 break;
3054 if (qualwarn)
3055 tcc_warning("assignment discards qualifiers from pointer target type");
3056 break;
3057 case VT_BYTE:
3058 case VT_SHORT:
3059 case VT_INT:
3060 case VT_LLONG:
3061 if (sbt == VT_PTR || sbt == VT_FUNC) {
3062 tcc_warning("assignment makes integer from pointer without a cast");
3063 } else if (sbt == VT_STRUCT) {
3064 goto case_VT_STRUCT;
3066 /* XXX: more tests */
3067 break;
3068 case VT_STRUCT:
3069 case_VT_STRUCT:
3070 if (!is_compatible_unqualified_types(dt, st)) {
3071 error:
3072 type_to_str(buf1, sizeof(buf1), st, NULL);
3073 type_to_str(buf2, sizeof(buf2), dt, NULL);
3074 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3076 break;
3078 gen_cast(dt);
3081 /* store vtop in lvalue pushed on stack */
3082 ST_FUNC void vstore(void)
3084 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3086 ft = vtop[-1].type.t;
3087 sbt = vtop->type.t & VT_BTYPE;
3088 dbt = ft & VT_BTYPE;
3089 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3090 (sbt == VT_INT && dbt == VT_SHORT))
3091 && !(vtop->type.t & VT_BITFIELD)) {
3092 /* optimize char/short casts */
3093 delayed_cast = VT_MUSTCAST;
3094 vtop->type.t = ft & VT_TYPE;
3095 /* XXX: factorize */
3096 if (ft & VT_CONSTANT)
3097 tcc_warning("assignment of read-only location");
3098 } else {
3099 delayed_cast = 0;
3100 if (!(ft & VT_BITFIELD))
3101 gen_assign_cast(&vtop[-1].type);
3104 if (sbt == VT_STRUCT) {
3105 /* if structure, only generate pointer */
3106 /* structure assignment : generate memcpy */
3107 /* XXX: optimize if small size */
3108 size = type_size(&vtop->type, &align);
3110 /* destination */
3111 vswap();
3112 vtop->type.t = VT_PTR;
3113 gaddrof();
3115 /* address of memcpy() */
3116 #ifdef TCC_ARM_EABI
3117 if(!(align & 7))
3118 vpush_global_sym(&func_old_type, TOK_memcpy8);
3119 else if(!(align & 3))
3120 vpush_global_sym(&func_old_type, TOK_memcpy4);
3121 else
3122 #endif
3123 /* Use memmove, rather than memcpy, as dest and src may be same: */
3124 vpush_global_sym(&func_old_type, TOK_memmove);
3126 vswap();
3127 /* source */
3128 vpushv(vtop - 2);
3129 vtop->type.t = VT_PTR;
3130 gaddrof();
3131 /* type size */
3132 vpushi(size);
3133 gfunc_call(3);
3135 /* leave source on stack */
3136 } else if (ft & VT_BITFIELD) {
3137 /* bitfield store handling */
3139 /* save lvalue as expression result (example: s.b = s.a = n;) */
3140 vdup(), vtop[-1] = vtop[-2];
3142 bit_pos = BIT_POS(ft);
3143 bit_size = BIT_SIZE(ft);
3144 /* remove bit field info to avoid loops */
3145 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3147 if ((ft & VT_BTYPE) == VT_BOOL) {
3148 gen_cast(&vtop[-1].type);
3149 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3152 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3153 if (r == VT_STRUCT) {
3154 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3155 store_packed_bf(bit_pos, bit_size);
3156 } else {
3157 unsigned long long mask = (1ULL << bit_size) - 1;
3158 if ((ft & VT_BTYPE) != VT_BOOL) {
3159 /* mask source */
3160 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3161 vpushll(mask);
3162 else
3163 vpushi((unsigned)mask);
3164 gen_op('&');
3166 /* shift source */
3167 vpushi(bit_pos);
3168 gen_op(TOK_SHL);
3169 vswap();
3170 /* duplicate destination */
3171 vdup();
3172 vrott(3);
3173 /* load destination, mask and or with source */
3174 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3175 vpushll(~(mask << bit_pos));
3176 else
3177 vpushi(~((unsigned)mask << bit_pos));
3178 gen_op('&');
3179 gen_op('|');
3180 /* store result */
3181 vstore();
3182 /* ... and discard */
3183 vpop();
3185 } else if (dbt == VT_VOID) {
3186 --vtop;
3187 } else {
3188 #ifdef CONFIG_TCC_BCHECK
3189 /* bound check case */
3190 if (vtop[-1].r & VT_MUSTBOUND) {
3191 vswap();
3192 gbound();
3193 vswap();
3195 #endif
3196 rc = RC_INT;
3197 if (is_float(ft)) {
3198 rc = RC_FLOAT;
3199 #ifdef TCC_TARGET_X86_64
3200 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3201 rc = RC_ST0;
3202 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3203 rc = RC_FRET;
3205 #endif
3207 r = gv(rc); /* generate value */
3208 /* if lvalue was saved on stack, must read it */
3209 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3210 SValue sv;
3211 t = get_reg(RC_INT);
3212 #if PTR_SIZE == 8
3213 sv.type.t = VT_PTR;
3214 #else
3215 sv.type.t = VT_INT;
3216 #endif
3217 sv.r = VT_LOCAL | VT_LVAL;
3218 sv.c.i = vtop[-1].c.i;
3219 load(t, &sv);
3220 vtop[-1].r = t | VT_LVAL;
3222 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3223 #if PTR_SIZE == 8
3224 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3225 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3226 #else
3227 if ((ft & VT_BTYPE) == VT_LLONG) {
3228 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3229 #endif
3230 vtop[-1].type.t = load_type;
3231 store(r, vtop - 1);
3232 vswap();
3233 /* convert to int to increment easily */
3234 vtop->type.t = addr_type;
3235 gaddrof();
3236 vpushi(load_size);
3237 gen_op('+');
3238 vtop->r |= VT_LVAL;
3239 vswap();
3240 vtop[-1].type.t = load_type;
3241 /* XXX: it works because r2 is spilled last ! */
3242 store(vtop->r2, vtop - 1);
3243 } else {
3244 store(r, vtop - 1);
3247 vswap();
3248 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3249 vtop->r |= delayed_cast;
3253 /* post defines POST/PRE add. c is the token ++ or -- */
3254 ST_FUNC void inc(int post, int c)
3256 test_lvalue();
3257 vdup(); /* save lvalue */
3258 if (post) {
3259 gv_dup(); /* duplicate value */
3260 vrotb(3);
3261 vrotb(3);
3263 /* add constant */
3264 vpushi(c - TOK_MID);
3265 gen_op('+');
3266 vstore(); /* store value */
3267 if (post)
3268 vpop(); /* if post op, return saved value */
3271 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3273 /* read the string */
3274 if (tok != TOK_STR)
3275 expect(msg);
3276 cstr_new(astr);
3277 while (tok == TOK_STR) {
3278 /* XXX: add \0 handling too ? */
3279 cstr_cat(astr, tokc.str.data, -1);
3280 next();
3282 cstr_ccat(astr, '\0');
3285 /* If I is >= 1 and a power of two, returns log2(i)+1.
3286 If I is 0 returns 0. */
3287 static int exact_log2p1(int i)
3289 int ret;
3290 if (!i)
3291 return 0;
3292 for (ret = 1; i >= 1 << 8; ret += 8)
3293 i >>= 8;
3294 if (i >= 1 << 4)
3295 ret += 4, i >>= 4;
3296 if (i >= 1 << 2)
3297 ret += 2, i >>= 2;
3298 if (i >= 1 << 1)
3299 ret++;
3300 return ret;
3303 /* Parse __attribute__((...)) GNUC extension. */
3304 static void parse_attribute(AttributeDef *ad)
3306 int t, n;
3307 CString astr;
3309 redo:
3310 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3311 return;
3312 next();
3313 skip('(');
3314 skip('(');
3315 while (tok != ')') {
3316 if (tok < TOK_IDENT)
3317 expect("attribute name");
3318 t = tok;
3319 next();
3320 switch(t) {
3321 case TOK_SECTION1:
3322 case TOK_SECTION2:
3323 skip('(');
3324 parse_mult_str(&astr, "section name");
3325 ad->section = find_section(tcc_state, (char *)astr.data);
3326 skip(')');
3327 cstr_free(&astr);
3328 break;
3329 case TOK_ALIAS1:
3330 case TOK_ALIAS2:
3331 skip('(');
3332 parse_mult_str(&astr, "alias(\"target\")");
3333 ad->alias_target = /* save string as token, for later */
3334 tok_alloc((char*)astr.data, astr.size-1)->tok;
3335 skip(')');
3336 cstr_free(&astr);
3337 break;
3338 case TOK_VISIBILITY1:
3339 case TOK_VISIBILITY2:
3340 skip('(');
3341 parse_mult_str(&astr,
3342 "visibility(\"default|hidden|internal|protected\")");
3343 if (!strcmp (astr.data, "default"))
3344 ad->a.visibility = STV_DEFAULT;
3345 else if (!strcmp (astr.data, "hidden"))
3346 ad->a.visibility = STV_HIDDEN;
3347 else if (!strcmp (astr.data, "internal"))
3348 ad->a.visibility = STV_INTERNAL;
3349 else if (!strcmp (astr.data, "protected"))
3350 ad->a.visibility = STV_PROTECTED;
3351 else
3352 expect("visibility(\"default|hidden|internal|protected\")");
3353 skip(')');
3354 cstr_free(&astr);
3355 break;
3356 case TOK_ALIGNED1:
3357 case TOK_ALIGNED2:
3358 if (tok == '(') {
3359 next();
3360 n = expr_const();
3361 if (n <= 0 || (n & (n - 1)) != 0)
3362 tcc_error("alignment must be a positive power of two");
3363 skip(')');
3364 } else {
3365 n = MAX_ALIGN;
3367 ad->a.aligned = exact_log2p1(n);
3368 if (n != 1 << (ad->a.aligned - 1))
3369 tcc_error("alignment of %d is larger than implemented", n);
3370 break;
3371 case TOK_PACKED1:
3372 case TOK_PACKED2:
3373 ad->a.packed = 1;
3374 break;
3375 case TOK_WEAK1:
3376 case TOK_WEAK2:
3377 ad->a.weak = 1;
3378 break;
3379 case TOK_UNUSED1:
3380 case TOK_UNUSED2:
3381 /* currently, no need to handle it because tcc does not
3382 track unused objects */
3383 break;
3384 case TOK_NORETURN1:
3385 case TOK_NORETURN2:
3386 /* currently, no need to handle it because tcc does not
3387 track unused objects */
3388 break;
3389 case TOK_CDECL1:
3390 case TOK_CDECL2:
3391 case TOK_CDECL3:
3392 ad->f.func_call = FUNC_CDECL;
3393 break;
3394 case TOK_STDCALL1:
3395 case TOK_STDCALL2:
3396 case TOK_STDCALL3:
3397 ad->f.func_call = FUNC_STDCALL;
3398 break;
3399 #ifdef TCC_TARGET_I386
3400 case TOK_REGPARM1:
3401 case TOK_REGPARM2:
3402 skip('(');
3403 n = expr_const();
3404 if (n > 3)
3405 n = 3;
3406 else if (n < 0)
3407 n = 0;
3408 if (n > 0)
3409 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3410 skip(')');
3411 break;
3412 case TOK_FASTCALL1:
3413 case TOK_FASTCALL2:
3414 case TOK_FASTCALL3:
3415 ad->f.func_call = FUNC_FASTCALLW;
3416 break;
3417 #endif
3418 case TOK_MODE:
3419 skip('(');
3420 switch(tok) {
3421 case TOK_MODE_DI:
3422 ad->attr_mode = VT_LLONG + 1;
3423 break;
3424 case TOK_MODE_QI:
3425 ad->attr_mode = VT_BYTE + 1;
3426 break;
3427 case TOK_MODE_HI:
3428 ad->attr_mode = VT_SHORT + 1;
3429 break;
3430 case TOK_MODE_SI:
3431 case TOK_MODE_word:
3432 ad->attr_mode = VT_INT + 1;
3433 break;
3434 default:
3435 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3436 break;
3438 next();
3439 skip(')');
3440 break;
3441 case TOK_DLLEXPORT:
3442 ad->a.dllexport = 1;
3443 break;
3444 case TOK_NODECORATE:
3445 ad->a.nodecorate = 1;
3446 break;
3447 case TOK_DLLIMPORT:
3448 ad->a.dllimport = 1;
3449 break;
3450 default:
3451 if (tcc_state->warn_unsupported)
3452 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3453 /* skip parameters */
3454 if (tok == '(') {
3455 int parenthesis = 0;
3456 do {
3457 if (tok == '(')
3458 parenthesis++;
3459 else if (tok == ')')
3460 parenthesis--;
3461 next();
3462 } while (parenthesis && tok != -1);
3464 break;
3466 if (tok != ',')
3467 break;
3468 next();
3470 skip(')');
3471 skip(')');
3472 goto redo;
3475 static Sym * find_field (CType *type, int v)
3477 Sym *s = type->ref;
3478 v |= SYM_FIELD;
3479 while ((s = s->next) != NULL) {
3480 if ((s->v & SYM_FIELD) &&
3481 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3482 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3483 Sym *ret = find_field (&s->type, v);
3484 if (ret)
3485 return ret;
3487 if (s->v == v)
3488 break;
3490 return s;
3493 static void struct_add_offset (Sym *s, int offset)
3495 while ((s = s->next) != NULL) {
3496 if ((s->v & SYM_FIELD) &&
3497 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3498 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3499 struct_add_offset(s->type.ref, offset);
3500 } else
3501 s->c += offset;
3505 static void struct_layout(CType *type, AttributeDef *ad)
3507 int size, align, maxalign, offset, c, bit_pos, bit_size;
3508 int packed, a, bt, prevbt, prev_bit_size;
3509 int pcc = !tcc_state->ms_bitfields;
3510 int pragma_pack = *tcc_state->pack_stack_ptr;
3511 Sym *f;
3513 maxalign = 1;
3514 offset = 0;
3515 c = 0;
3516 bit_pos = 0;
3517 prevbt = VT_STRUCT; /* make it never match */
3518 prev_bit_size = 0;
3520 //#define BF_DEBUG
3522 for (f = type->ref->next; f; f = f->next) {
3523 if (f->type.t & VT_BITFIELD)
3524 bit_size = BIT_SIZE(f->type.t);
3525 else
3526 bit_size = -1;
3527 size = type_size(&f->type, &align);
3528 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3529 packed = 0;
3531 if (pcc && bit_size == 0) {
3532 /* in pcc mode, packing does not affect zero-width bitfields */
3534 } else {
3535 /* in pcc mode, attribute packed overrides if set. */
3536 if (pcc && (f->a.packed || ad->a.packed))
3537 align = packed = 1;
3539 /* pragma pack overrides align if lesser and packs bitfields always */
3540 if (pragma_pack) {
3541 packed = 1;
3542 if (pragma_pack < align)
3543 align = pragma_pack;
3544 /* in pcc mode pragma pack also overrides individual align */
3545 if (pcc && pragma_pack < a)
3546 a = 0;
3549 /* some individual align was specified */
3550 if (a)
3551 align = a;
3553 if (type->ref->type.t == VT_UNION) {
3554 if (pcc && bit_size >= 0)
3555 size = (bit_size + 7) >> 3;
3556 offset = 0;
3557 if (size > c)
3558 c = size;
3560 } else if (bit_size < 0) {
3561 if (pcc)
3562 c += (bit_pos + 7) >> 3;
3563 c = (c + align - 1) & -align;
3564 offset = c;
3565 if (size > 0)
3566 c += size;
3567 bit_pos = 0;
3568 prevbt = VT_STRUCT;
3569 prev_bit_size = 0;
3571 } else {
3572 /* A bit-field. Layout is more complicated. There are two
3573 options: PCC (GCC) compatible and MS compatible */
3574 if (pcc) {
3575 /* In PCC layout a bit-field is placed adjacent to the
3576 preceding bit-fields, except if:
3577 - it has zero-width
3578 - an individual alignment was given
3579 - it would overflow its base type container and
3580 there is no packing */
3581 if (bit_size == 0) {
3582 new_field:
3583 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3584 bit_pos = 0;
3585 } else if (f->a.aligned) {
3586 goto new_field;
3587 } else if (!packed) {
3588 int a8 = align * 8;
3589 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3590 if (ofs > size / align)
3591 goto new_field;
3594 /* in pcc mode, long long bitfields have type int if they fit */
3595 if (size == 8 && bit_size <= 32)
3596 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3598 while (bit_pos >= align * 8)
3599 c += align, bit_pos -= align * 8;
3600 offset = c;
3602 /* In PCC layout named bit-fields influence the alignment
3603 of the containing struct using the base types alignment,
3604 except for packed fields (which here have correct align). */
3605 if (f->v & SYM_FIRST_ANOM
3606 // && bit_size // ??? gcc on ARM/rpi does that
3608 align = 1;
3610 } else {
3611 bt = f->type.t & VT_BTYPE;
3612 if ((bit_pos + bit_size > size * 8)
3613 || (bit_size > 0) == (bt != prevbt)
3615 c = (c + align - 1) & -align;
3616 offset = c;
3617 bit_pos = 0;
3618 /* In MS bitfield mode a bit-field run always uses
3619 at least as many bits as the underlying type.
3620 To start a new run it's also required that this
3621 or the last bit-field had non-zero width. */
3622 if (bit_size || prev_bit_size)
3623 c += size;
3625 /* In MS layout the records alignment is normally
3626 influenced by the field, except for a zero-width
3627 field at the start of a run (but by further zero-width
3628 fields it is again). */
3629 if (bit_size == 0 && prevbt != bt)
3630 align = 1;
3631 prevbt = bt;
3632 prev_bit_size = bit_size;
3635 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3636 | (bit_pos << VT_STRUCT_SHIFT);
3637 bit_pos += bit_size;
3639 if (align > maxalign)
3640 maxalign = align;
3642 #ifdef BF_DEBUG
3643 printf("set field %s offset %-2d size %-2d align %-2d",
3644 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3645 if (f->type.t & VT_BITFIELD) {
3646 printf(" pos %-2d bits %-2d",
3647 BIT_POS(f->type.t),
3648 BIT_SIZE(f->type.t)
3651 printf("\n");
3652 #endif
3654 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3655 Sym *ass;
3656 /* An anonymous struct/union. Adjust member offsets
3657 to reflect the real offset of our containing struct.
3658 Also set the offset of this anon member inside
3659 the outer struct to be zero. Via this it
3660 works when accessing the field offset directly
3661 (from base object), as well as when recursing
3662 members in initializer handling. */
3663 int v2 = f->type.ref->v;
3664 if (!(v2 & SYM_FIELD) &&
3665 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3666 Sym **pps;
3667 /* This happens only with MS extensions. The
3668 anon member has a named struct type, so it
3669 potentially is shared with other references.
3670 We need to unshare members so we can modify
3671 them. */
3672 ass = f->type.ref;
3673 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3674 &f->type.ref->type, 0,
3675 f->type.ref->c);
3676 pps = &f->type.ref->next;
3677 while ((ass = ass->next) != NULL) {
3678 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3679 pps = &((*pps)->next);
3681 *pps = NULL;
3683 struct_add_offset(f->type.ref, offset);
3684 f->c = 0;
3685 } else {
3686 f->c = offset;
3689 f->r = 0;
3692 if (pcc)
3693 c += (bit_pos + 7) >> 3;
3695 /* store size and alignment */
3696 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3697 if (a < maxalign)
3698 a = maxalign;
3699 type->ref->r = a;
3700 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3701 /* can happen if individual align for some member was given. In
3702 this case MSVC ignores maxalign when aligning the size */
3703 a = pragma_pack;
3704 if (a < bt)
3705 a = bt;
3707 c = (c + a - 1) & -a;
3708 type->ref->c = c;
3710 #ifdef BF_DEBUG
3711 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3712 #endif
3714 /* check whether we can access bitfields by their type */
3715 for (f = type->ref->next; f; f = f->next) {
3716 int s, px, cx, c0;
3717 CType t;
3719 if (0 == (f->type.t & VT_BITFIELD))
3720 continue;
3721 f->type.ref = f;
3722 f->auxtype = -1;
3723 bit_size = BIT_SIZE(f->type.t);
3724 if (bit_size == 0)
3725 continue;
3726 bit_pos = BIT_POS(f->type.t);
3727 size = type_size(&f->type, &align);
3728 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3729 continue;
3731 /* try to access the field using a different type */
3732 c0 = -1, s = align = 1;
3733 for (;;) {
3734 px = f->c * 8 + bit_pos;
3735 cx = (px >> 3) & -align;
3736 px = px - (cx << 3);
3737 if (c0 == cx)
3738 break;
3739 s = (px + bit_size + 7) >> 3;
3740 if (s > 4) {
3741 t.t = VT_LLONG;
3742 } else if (s > 2) {
3743 t.t = VT_INT;
3744 } else if (s > 1) {
3745 t.t = VT_SHORT;
3746 } else {
3747 t.t = VT_BYTE;
3749 s = type_size(&t, &align);
3750 c0 = cx;
3753 if (px + bit_size <= s * 8 && cx + s <= c) {
3754 /* update offset and bit position */
3755 f->c = cx;
3756 bit_pos = px;
3757 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3758 | (bit_pos << VT_STRUCT_SHIFT);
3759 if (s != size)
3760 f->auxtype = t.t;
3761 #ifdef BF_DEBUG
3762 printf("FIX field %s offset %-2d size %-2d align %-2d "
3763 "pos %-2d bits %-2d\n",
3764 get_tok_str(f->v & ~SYM_FIELD, NULL),
3765 cx, s, align, px, bit_size);
3766 #endif
3767 } else {
3768 /* fall back to load/store single-byte wise */
3769 f->auxtype = VT_STRUCT;
3770 #ifdef BF_DEBUG
3771 printf("FIX field %s : load byte-wise\n",
3772 get_tok_str(f->v & ~SYM_FIELD, NULL));
3773 #endif
3778 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3779 static void struct_decl(CType *type, int u)
3781 int v, c, size, align, flexible;
3782 int bit_size, bsize, bt;
3783 Sym *s, *ss, **ps;
3784 AttributeDef ad, ad1;
3785 CType type1, btype;
3787 memset(&ad, 0, sizeof ad);
3788 next();
3789 parse_attribute(&ad);
3790 if (tok != '{') {
3791 v = tok;
3792 next();
3793 /* struct already defined ? return it */
3794 if (v < TOK_IDENT)
3795 expect("struct/union/enum name");
3796 s = struct_find(v);
3797 if (s && (s->sym_scope == local_scope || tok != '{')) {
3798 if (u == s->type.t)
3799 goto do_decl;
3800 if (u == VT_ENUM && IS_ENUM(s->type.t))
3801 goto do_decl;
3802 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3804 } else {
3805 v = anon_sym++;
3807 /* Record the original enum/struct/union token. */
3808 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3809 type1.ref = NULL;
3810 /* we put an undefined size for struct/union */
3811 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3812 s->r = 0; /* default alignment is zero as gcc */
3813 do_decl:
3814 type->t = s->type.t;
3815 type->ref = s;
3817 if (tok == '{') {
3818 next();
3819 if (s->c != -1)
3820 tcc_error("struct/union/enum already defined");
3821 /* cannot be empty */
3822 /* non empty enums are not allowed */
3823 ps = &s->next;
3824 if (u == VT_ENUM) {
3825 long long ll = 0, pl = 0, nl = 0;
3826 CType t;
3827 t.ref = s;
3828 /* enum symbols have static storage */
3829 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3830 for(;;) {
3831 v = tok;
3832 if (v < TOK_UIDENT)
3833 expect("identifier");
3834 ss = sym_find(v);
3835 if (ss && !local_stack)
3836 tcc_error("redefinition of enumerator '%s'",
3837 get_tok_str(v, NULL));
3838 next();
3839 if (tok == '=') {
3840 next();
3841 ll = expr_const64();
3843 ss = sym_push(v, &t, VT_CONST, 0);
3844 ss->enum_val = ll;
3845 *ps = ss, ps = &ss->next;
3846 if (ll < nl)
3847 nl = ll;
3848 if (ll > pl)
3849 pl = ll;
3850 if (tok != ',')
3851 break;
3852 next();
3853 ll++;
3854 /* NOTE: we accept a trailing comma */
3855 if (tok == '}')
3856 break;
3858 skip('}');
3859 /* set integral type of the enum */
3860 t.t = VT_INT;
3861 if (nl >= 0) {
3862 if (pl != (unsigned)pl)
3863 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3864 t.t |= VT_UNSIGNED;
3865 } else if (pl != (int)pl || nl != (int)nl)
3866 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3867 s->type.t = type->t = t.t | VT_ENUM;
3868 s->c = 0;
3869 /* set type for enum members */
3870 for (ss = s->next; ss; ss = ss->next) {
3871 ll = ss->enum_val;
3872 if (ll == (int)ll) /* default is int if it fits */
3873 continue;
3874 if (t.t & VT_UNSIGNED) {
3875 ss->type.t |= VT_UNSIGNED;
3876 if (ll == (unsigned)ll)
3877 continue;
3879 ss->type.t = (ss->type.t & ~VT_BTYPE)
3880 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3882 } else {
3883 c = 0;
3884 flexible = 0;
3885 while (tok != '}') {
3886 if (!parse_btype(&btype, &ad1)) {
3887 skip(';');
3888 continue;
3890 while (1) {
3891 if (flexible)
3892 tcc_error("flexible array member '%s' not at the end of struct",
3893 get_tok_str(v, NULL));
3894 bit_size = -1;
3895 v = 0;
3896 type1 = btype;
3897 if (tok != ':') {
3898 if (tok != ';')
3899 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3900 if (v == 0) {
3901 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3902 expect("identifier");
3903 else {
3904 int v = btype.ref->v;
3905 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3906 if (tcc_state->ms_extensions == 0)
3907 expect("identifier");
3911 if (type_size(&type1, &align) < 0) {
3912 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3913 flexible = 1;
3914 else
3915 tcc_error("field '%s' has incomplete type",
3916 get_tok_str(v, NULL));
3918 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3919 (type1.t & VT_STORAGE))
3920 tcc_error("invalid type for '%s'",
3921 get_tok_str(v, NULL));
3923 if (tok == ':') {
3924 next();
3925 bit_size = expr_const();
3926 /* XXX: handle v = 0 case for messages */
3927 if (bit_size < 0)
3928 tcc_error("negative width in bit-field '%s'",
3929 get_tok_str(v, NULL));
3930 if (v && bit_size == 0)
3931 tcc_error("zero width for bit-field '%s'",
3932 get_tok_str(v, NULL));
3933 parse_attribute(&ad1);
3935 size = type_size(&type1, &align);
3936 if (bit_size >= 0) {
3937 bt = type1.t & VT_BTYPE;
3938 if (bt != VT_INT &&
3939 bt != VT_BYTE &&
3940 bt != VT_SHORT &&
3941 bt != VT_BOOL &&
3942 bt != VT_LLONG)
3943 tcc_error("bitfields must have scalar type");
3944 bsize = size * 8;
3945 if (bit_size > bsize) {
3946 tcc_error("width of '%s' exceeds its type",
3947 get_tok_str(v, NULL));
3948 } else if (bit_size == bsize
3949 && !ad.a.packed && !ad1.a.packed) {
3950 /* no need for bit fields */
3952 } else if (bit_size == 64) {
3953 tcc_error("field width 64 not implemented");
3954 } else {
3955 type1.t = (type1.t & ~VT_STRUCT_MASK)
3956 | VT_BITFIELD
3957 | (bit_size << (VT_STRUCT_SHIFT + 6));
3960 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3961 /* Remember we've seen a real field to check
3962 for placement of flexible array member. */
3963 c = 1;
3965 /* If member is a struct or bit-field, enforce
3966 placing into the struct (as anonymous). */
3967 if (v == 0 &&
3968 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3969 bit_size >= 0)) {
3970 v = anon_sym++;
3972 if (v) {
3973 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3974 ss->a = ad1.a;
3975 *ps = ss;
3976 ps = &ss->next;
3978 if (tok == ';' || tok == TOK_EOF)
3979 break;
3980 skip(',');
3982 skip(';');
3984 skip('}');
3985 parse_attribute(&ad);
3986 struct_layout(type, &ad);
3991 static void sym_to_attr(AttributeDef *ad, Sym *s)
3993 if (s->a.aligned && 0 == ad->a.aligned)
3994 ad->a.aligned = s->a.aligned;
3995 if (s->f.func_call && 0 == ad->f.func_call)
3996 ad->f.func_call = s->f.func_call;
3997 if (s->f.func_type && 0 == ad->f.func_type)
3998 ad->f.func_type = s->f.func_type;
3999 if (s->a.packed)
4000 ad->a.packed = 1;
4003 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4004 are added to the element type, copied because it could be a typedef. */
4005 static void parse_btype_qualify(CType *type, int qualifiers)
4007 while (type->t & VT_ARRAY) {
4008 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4009 type = &type->ref->type;
4011 type->t |= qualifiers;
4014 /* return 0 if no type declaration. otherwise, return the basic type
4015 and skip it.
4017 static int parse_btype(CType *type, AttributeDef *ad)
4019 int t, u, bt, st, type_found, typespec_found, g;
4020 Sym *s;
4021 CType type1;
4023 memset(ad, 0, sizeof(AttributeDef));
4024 type_found = 0;
4025 typespec_found = 0;
4026 t = VT_INT;
4027 bt = st = -1;
4028 type->ref = NULL;
4030 while(1) {
4031 switch(tok) {
4032 case TOK_EXTENSION:
4033 /* currently, we really ignore extension */
4034 next();
4035 continue;
4037 /* basic types */
4038 case TOK_CHAR:
4039 u = VT_BYTE;
4040 basic_type:
4041 next();
4042 basic_type1:
4043 if (u == VT_SHORT || u == VT_LONG) {
4044 if (st != -1 || (bt != -1 && bt != VT_INT))
4045 tmbt: tcc_error("too many basic types");
4046 st = u;
4047 } else {
4048 if (bt != -1 || (st != -1 && u != VT_INT))
4049 goto tmbt;
4050 bt = u;
4052 if (u != VT_INT)
4053 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4054 typespec_found = 1;
4055 break;
4056 case TOK_VOID:
4057 u = VT_VOID;
4058 goto basic_type;
4059 case TOK_SHORT:
4060 u = VT_SHORT;
4061 goto basic_type;
4062 case TOK_INT:
4063 u = VT_INT;
4064 goto basic_type;
4065 case TOK_LONG:
4066 if ((t & VT_BTYPE) == VT_DOUBLE) {
4067 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4068 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4069 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4070 } else {
4071 u = VT_LONG;
4072 goto basic_type;
4074 next();
4075 break;
4076 #ifdef TCC_TARGET_ARM64
4077 case TOK_UINT128:
4078 /* GCC's __uint128_t appears in some Linux header files. Make it a
4079 synonym for long double to get the size and alignment right. */
4080 u = VT_LDOUBLE;
4081 goto basic_type;
4082 #endif
4083 case TOK_BOOL:
4084 u = VT_BOOL;
4085 goto basic_type;
4086 case TOK_FLOAT:
4087 u = VT_FLOAT;
4088 goto basic_type;
4089 case TOK_DOUBLE:
4090 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4091 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4092 } else {
4093 u = VT_DOUBLE;
4094 goto basic_type;
4096 next();
4097 break;
4098 case TOK_ENUM:
4099 struct_decl(&type1, VT_ENUM);
4100 basic_type2:
4101 u = type1.t;
4102 type->ref = type1.ref;
4103 goto basic_type1;
4104 case TOK_STRUCT:
4105 struct_decl(&type1, VT_STRUCT);
4106 goto basic_type2;
4107 case TOK_UNION:
4108 struct_decl(&type1, VT_UNION);
4109 goto basic_type2;
4111 /* type modifiers */
4112 case TOK_CONST1:
4113 case TOK_CONST2:
4114 case TOK_CONST3:
4115 type->t = t;
4116 parse_btype_qualify(type, VT_CONSTANT);
4117 t = type->t;
4118 next();
4119 break;
4120 case TOK_VOLATILE1:
4121 case TOK_VOLATILE2:
4122 case TOK_VOLATILE3:
4123 type->t = t;
4124 parse_btype_qualify(type, VT_VOLATILE);
4125 t = type->t;
4126 next();
4127 break;
4128 case TOK_SIGNED1:
4129 case TOK_SIGNED2:
4130 case TOK_SIGNED3:
4131 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4132 tcc_error("signed and unsigned modifier");
4133 t |= VT_DEFSIGN;
4134 next();
4135 typespec_found = 1;
4136 break;
4137 case TOK_REGISTER:
4138 case TOK_AUTO:
4139 case TOK_RESTRICT1:
4140 case TOK_RESTRICT2:
4141 case TOK_RESTRICT3:
4142 next();
4143 break;
4144 case TOK_UNSIGNED:
4145 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4146 tcc_error("signed and unsigned modifier");
4147 t |= VT_DEFSIGN | VT_UNSIGNED;
4148 next();
4149 typespec_found = 1;
4150 break;
4152 /* storage */
4153 case TOK_EXTERN:
4154 g = VT_EXTERN;
4155 goto storage;
4156 case TOK_STATIC:
4157 g = VT_STATIC;
4158 goto storage;
4159 case TOK_TYPEDEF:
4160 g = VT_TYPEDEF;
4161 goto storage;
4162 storage:
4163 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4164 tcc_error("multiple storage classes");
4165 t |= g;
4166 next();
4167 break;
4168 case TOK_INLINE1:
4169 case TOK_INLINE2:
4170 case TOK_INLINE3:
4171 t |= VT_INLINE;
4172 next();
4173 break;
4175 /* GNUC attribute */
4176 case TOK_ATTRIBUTE1:
4177 case TOK_ATTRIBUTE2:
4178 parse_attribute(ad);
4179 if (ad->attr_mode) {
4180 u = ad->attr_mode -1;
4181 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4183 break;
4184 /* GNUC typeof */
4185 case TOK_TYPEOF1:
4186 case TOK_TYPEOF2:
4187 case TOK_TYPEOF3:
4188 next();
4189 parse_expr_type(&type1);
4190 /* remove all storage modifiers except typedef */
4191 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4192 if (type1.ref)
4193 sym_to_attr(ad, type1.ref);
4194 goto basic_type2;
4195 default:
4196 if (typespec_found)
4197 goto the_end;
4198 s = sym_find(tok);
4199 if (!s || !(s->type.t & VT_TYPEDEF))
4200 goto the_end;
4201 t &= ~(VT_BTYPE|VT_LONG);
4202 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4203 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4204 type->ref = s->type.ref;
4205 if (t)
4206 parse_btype_qualify(type, t);
4207 t = type->t;
4208 /* get attributes from typedef */
4209 sym_to_attr(ad, s);
4210 next();
4211 typespec_found = 1;
4212 st = bt = -2;
4213 break;
4215 type_found = 1;
4217 the_end:
4218 if (tcc_state->char_is_unsigned) {
4219 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4220 t |= VT_UNSIGNED;
4222 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4223 bt = t & (VT_BTYPE|VT_LONG);
4224 if (bt == VT_LONG)
4225 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4226 #ifdef TCC_TARGET_PE
4227 if (bt == VT_LDOUBLE)
4228 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4229 #endif
4230 type->t = t;
4231 return type_found;
4234 /* convert a function parameter type (array to pointer and function to
4235 function pointer) */
4236 static inline void convert_parameter_type(CType *pt)
4238 /* remove const and volatile qualifiers (XXX: const could be used
4239 to indicate a const function parameter */
4240 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4241 /* array must be transformed to pointer according to ANSI C */
4242 pt->t &= ~VT_ARRAY;
4243 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4244 mk_pointer(pt);
4248 ST_FUNC void parse_asm_str(CString *astr)
4250 skip('(');
4251 parse_mult_str(astr, "string constant");
4254 /* Parse an asm label and return the token */
4255 static int asm_label_instr(void)
4257 int v;
4258 CString astr;
4260 next();
4261 parse_asm_str(&astr);
4262 skip(')');
4263 #ifdef ASM_DEBUG
4264 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4265 #endif
4266 v = tok_alloc(astr.data, astr.size - 1)->tok;
4267 cstr_free(&astr);
4268 return v;
4271 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4273 int n, l, t1, arg_size, align;
4274 Sym **plast, *s, *first;
4275 AttributeDef ad1;
4276 CType pt;
4278 if (tok == '(') {
4279 /* function type, or recursive declarator (return if so) */
4280 next();
4281 if (td && !(td & TYPE_ABSTRACT))
4282 return 0;
4283 if (tok == ')')
4284 l = 0;
4285 else if (parse_btype(&pt, &ad1))
4286 l = FUNC_NEW;
4287 else if (td)
4288 return 0;
4289 else
4290 l = FUNC_OLD;
4291 first = NULL;
4292 plast = &first;
4293 arg_size = 0;
4294 if (l) {
4295 for(;;) {
4296 /* read param name and compute offset */
4297 if (l != FUNC_OLD) {
4298 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4299 break;
4300 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4301 if ((pt.t & VT_BTYPE) == VT_VOID)
4302 tcc_error("parameter declared as void");
4303 } else {
4304 n = tok;
4305 if (n < TOK_UIDENT)
4306 expect("identifier");
4307 pt.t = VT_VOID; /* invalid type */
4308 next();
4310 convert_parameter_type(&pt);
4311 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4312 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4313 *plast = s;
4314 plast = &s->next;
4315 if (tok == ')')
4316 break;
4317 skip(',');
4318 if (l == FUNC_NEW && tok == TOK_DOTS) {
4319 l = FUNC_ELLIPSIS;
4320 next();
4321 break;
4323 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4324 tcc_error("invalid type");
4326 } else
4327 /* if no parameters, then old type prototype */
4328 l = FUNC_OLD;
4329 skip(')');
4330 /* NOTE: const is ignored in returned type as it has a special
4331 meaning in gcc / C++ */
4332 type->t &= ~VT_CONSTANT;
4333 /* some ancient pre-K&R C allows a function to return an array
4334 and the array brackets to be put after the arguments, such
4335 that "int c()[]" means something like "int[] c()" */
4336 if (tok == '[') {
4337 next();
4338 skip(']'); /* only handle simple "[]" */
4339 mk_pointer(type);
4341 /* we push a anonymous symbol which will contain the function prototype */
4342 ad->f.func_args = arg_size;
4343 ad->f.func_type = l;
4344 s = sym_push(SYM_FIELD, type, 0, 0);
4345 s->a = ad->a;
4346 s->f = ad->f;
4347 s->next = first;
4348 type->t = VT_FUNC;
4349 type->ref = s;
4350 } else if (tok == '[') {
4351 int saved_nocode_wanted = nocode_wanted;
4352 /* array definition */
4353 next();
4354 while (1) {
4355 /* XXX The optional type-quals and static should only be accepted
4356 in parameter decls. The '*' as well, and then even only
4357 in prototypes (not function defs). */
4358 switch (tok) {
4359 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4360 case TOK_CONST1:
4361 case TOK_VOLATILE1:
4362 case TOK_STATIC:
4363 case '*':
4364 next();
4365 continue;
4366 default:
4367 break;
4369 break;
4371 n = -1;
4372 t1 = 0;
4373 if (tok != ']') {
4374 if (!local_stack || (storage & VT_STATIC))
4375 vpushi(expr_const());
4376 else {
4377 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4378 length must always be evaluated, even under nocode_wanted,
4379 so that its size slot is initialized (e.g. under sizeof
4380 or typeof). */
4381 nocode_wanted = 0;
4382 gexpr();
4384 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4385 n = vtop->c.i;
4386 if (n < 0)
4387 tcc_error("invalid array size");
4388 } else {
4389 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4390 tcc_error("size of variable length array should be an integer");
4391 t1 = VT_VLA;
4394 skip(']');
4395 /* parse next post type */
4396 post_type(type, ad, storage, 0);
4397 if (type->t == VT_FUNC)
4398 tcc_error("declaration of an array of functions");
4399 t1 |= type->t & VT_VLA;
4401 if (t1 & VT_VLA) {
4402 loc -= type_size(&int_type, &align);
4403 loc &= -align;
4404 n = loc;
4406 vla_runtime_type_size(type, &align);
4407 gen_op('*');
4408 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4409 vswap();
4410 vstore();
4412 if (n != -1)
4413 vpop();
4414 nocode_wanted = saved_nocode_wanted;
4416 /* we push an anonymous symbol which will contain the array
4417 element type */
4418 s = sym_push(SYM_FIELD, type, 0, n);
4419 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4420 type->ref = s;
4422 return 1;
4425 /* Parse a type declarator (except basic type), and return the type
4426 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4427 expected. 'type' should contain the basic type. 'ad' is the
4428 attribute definition of the basic type. It can be modified by
4429 type_decl(). If this (possibly abstract) declarator is a pointer chain
4430 it returns the innermost pointed to type (equals *type, but is a different
4431 pointer), otherwise returns type itself, that's used for recursive calls. */
4432 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4434 CType *post, *ret;
4435 int qualifiers, storage;
4437 /* recursive type, remove storage bits first, apply them later again */
4438 storage = type->t & VT_STORAGE;
4439 type->t &= ~VT_STORAGE;
4440 post = ret = type;
4442 while (tok == '*') {
4443 qualifiers = 0;
4444 redo:
4445 next();
4446 switch(tok) {
4447 case TOK_CONST1:
4448 case TOK_CONST2:
4449 case TOK_CONST3:
4450 qualifiers |= VT_CONSTANT;
4451 goto redo;
4452 case TOK_VOLATILE1:
4453 case TOK_VOLATILE2:
4454 case TOK_VOLATILE3:
4455 qualifiers |= VT_VOLATILE;
4456 goto redo;
4457 case TOK_RESTRICT1:
4458 case TOK_RESTRICT2:
4459 case TOK_RESTRICT3:
4460 goto redo;
4461 /* XXX: clarify attribute handling */
4462 case TOK_ATTRIBUTE1:
4463 case TOK_ATTRIBUTE2:
4464 parse_attribute(ad);
4465 break;
4467 mk_pointer(type);
4468 type->t |= qualifiers;
4469 if (ret == type)
4470 /* innermost pointed to type is the one for the first derivation */
4471 ret = pointed_type(type);
4474 if (tok == '(') {
4475 /* This is possibly a parameter type list for abstract declarators
4476 ('int ()'), use post_type for testing this. */
4477 if (!post_type(type, ad, 0, td)) {
4478 /* It's not, so it's a nested declarator, and the post operations
4479 apply to the innermost pointed to type (if any). */
4480 /* XXX: this is not correct to modify 'ad' at this point, but
4481 the syntax is not clear */
4482 parse_attribute(ad);
4483 post = type_decl(type, ad, v, td);
4484 skip(')');
4486 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4487 /* type identifier */
4488 *v = tok;
4489 next();
4490 } else {
4491 if (!(td & TYPE_ABSTRACT))
4492 expect("identifier");
4493 *v = 0;
4495 post_type(post, ad, storage, 0);
4496 parse_attribute(ad);
4497 type->t |= storage;
4498 return ret;
4501 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4502 ST_FUNC int lvalue_type(int t)
4504 int bt, r;
4505 r = VT_LVAL;
4506 bt = t & VT_BTYPE;
4507 if (bt == VT_BYTE || bt == VT_BOOL)
4508 r |= VT_LVAL_BYTE;
4509 else if (bt == VT_SHORT)
4510 r |= VT_LVAL_SHORT;
4511 else
4512 return r;
4513 if (t & VT_UNSIGNED)
4514 r |= VT_LVAL_UNSIGNED;
4515 return r;
4518 /* indirection with full error checking and bound check */
4519 ST_FUNC void indir(void)
4521 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4522 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4523 return;
4524 expect("pointer");
4526 if (vtop->r & VT_LVAL)
4527 gv(RC_INT);
4528 vtop->type = *pointed_type(&vtop->type);
4529 /* Arrays and functions are never lvalues */
4530 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4531 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4532 vtop->r |= lvalue_type(vtop->type.t);
4533 /* if bound checking, the referenced pointer must be checked */
4534 #ifdef CONFIG_TCC_BCHECK
4535 if (tcc_state->do_bounds_check)
4536 vtop->r |= VT_MUSTBOUND;
4537 #endif
4541 /* pass a parameter to a function and do type checking and casting */
4542 static void gfunc_param_typed(Sym *func, Sym *arg)
4544 int func_type;
4545 CType type;
4547 func_type = func->f.func_type;
4548 if (func_type == FUNC_OLD ||
4549 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4550 /* default casting : only need to convert float to double */
4551 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4552 gen_cast_s(VT_DOUBLE);
4553 } else if (vtop->type.t & VT_BITFIELD) {
4554 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4555 type.ref = vtop->type.ref;
4556 gen_cast(&type);
4558 } else if (arg == NULL) {
4559 tcc_error("too many arguments to function");
4560 } else {
4561 type = arg->type;
4562 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4563 gen_assign_cast(&type);
4567 /* parse an expression and return its type without any side effect. */
4568 static void expr_type(CType *type, void (*expr_fn)(void))
4570 nocode_wanted++;
4571 expr_fn();
4572 *type = vtop->type;
4573 vpop();
4574 nocode_wanted--;
4577 /* parse an expression of the form '(type)' or '(expr)' and return its
4578 type */
4579 static void parse_expr_type(CType *type)
4581 int n;
4582 AttributeDef ad;
4584 skip('(');
4585 if (parse_btype(type, &ad)) {
4586 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4587 } else {
4588 expr_type(type, gexpr);
4590 skip(')');
4593 static void parse_type(CType *type)
4595 AttributeDef ad;
4596 int n;
4598 if (!parse_btype(type, &ad)) {
4599 expect("type");
4601 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4604 static void parse_builtin_params(int nc, const char *args)
4606 char c, sep = '(';
4607 CType t;
4608 if (nc)
4609 nocode_wanted++;
4610 next();
4611 while ((c = *args++)) {
4612 skip(sep);
4613 sep = ',';
4614 switch (c) {
4615 case 'e': expr_eq(); continue;
4616 case 't': parse_type(&t); vpush(&t); continue;
4617 default: tcc_error("internal error"); break;
4620 skip(')');
4621 if (nc)
4622 nocode_wanted--;
4625 ST_FUNC void unary(void)
4627 int n, t, align, size, r, sizeof_caller;
4628 CType type;
4629 Sym *s;
4630 AttributeDef ad;
4632 sizeof_caller = in_sizeof;
4633 in_sizeof = 0;
4634 type.ref = NULL;
4635 /* XXX: GCC 2.95.3 does not generate a table although it should be
4636 better here */
4637 tok_next:
4638 switch(tok) {
4639 case TOK_EXTENSION:
4640 next();
4641 goto tok_next;
4642 case TOK_LCHAR:
4643 #ifdef TCC_TARGET_PE
4644 t = VT_SHORT|VT_UNSIGNED;
4645 goto push_tokc;
4646 #endif
4647 case TOK_CINT:
4648 case TOK_CCHAR:
4649 t = VT_INT;
4650 push_tokc:
4651 type.t = t;
4652 vsetc(&type, VT_CONST, &tokc);
4653 next();
4654 break;
4655 case TOK_CUINT:
4656 t = VT_INT | VT_UNSIGNED;
4657 goto push_tokc;
4658 case TOK_CLLONG:
4659 t = VT_LLONG;
4660 goto push_tokc;
4661 case TOK_CULLONG:
4662 t = VT_LLONG | VT_UNSIGNED;
4663 goto push_tokc;
4664 case TOK_CFLOAT:
4665 t = VT_FLOAT;
4666 goto push_tokc;
4667 case TOK_CDOUBLE:
4668 t = VT_DOUBLE;
4669 goto push_tokc;
4670 case TOK_CLDOUBLE:
4671 t = VT_LDOUBLE;
4672 goto push_tokc;
4673 case TOK_CLONG:
4674 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4675 goto push_tokc;
4676 case TOK_CULONG:
4677 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4678 goto push_tokc;
4679 case TOK___FUNCTION__:
4680 if (!gnu_ext)
4681 goto tok_identifier;
4682 /* fall thru */
4683 case TOK___FUNC__:
4685 void *ptr;
4686 int len;
4687 /* special function name identifier */
4688 len = strlen(funcname) + 1;
4689 /* generate char[len] type */
4690 type.t = VT_BYTE;
4691 mk_pointer(&type);
4692 type.t |= VT_ARRAY;
4693 type.ref->c = len;
4694 vpush_ref(&type, data_section, data_section->data_offset, len);
4695 if (!NODATA_WANTED) {
4696 ptr = section_ptr_add(data_section, len);
4697 memcpy(ptr, funcname, len);
4699 next();
4701 break;
4702 case TOK_LSTR:
4703 #ifdef TCC_TARGET_PE
4704 t = VT_SHORT | VT_UNSIGNED;
4705 #else
4706 t = VT_INT;
4707 #endif
4708 goto str_init;
4709 case TOK_STR:
4710 /* string parsing */
4711 t = VT_BYTE;
4712 if (tcc_state->char_is_unsigned)
4713 t = VT_BYTE | VT_UNSIGNED;
4714 str_init:
4715 if (tcc_state->warn_write_strings)
4716 t |= VT_CONSTANT;
4717 type.t = t;
4718 mk_pointer(&type);
4719 type.t |= VT_ARRAY;
4720 memset(&ad, 0, sizeof(AttributeDef));
4721 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4722 break;
4723 case '(':
4724 next();
4725 /* cast ? */
4726 if (parse_btype(&type, &ad)) {
4727 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4728 skip(')');
4729 /* check ISOC99 compound literal */
4730 if (tok == '{') {
4731 /* data is allocated locally by default */
4732 if (global_expr)
4733 r = VT_CONST;
4734 else
4735 r = VT_LOCAL;
4736 /* all except arrays are lvalues */
4737 if (!(type.t & VT_ARRAY))
4738 r |= lvalue_type(type.t);
4739 memset(&ad, 0, sizeof(AttributeDef));
4740 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4741 } else {
4742 if (sizeof_caller) {
4743 vpush(&type);
4744 return;
4746 unary();
4747 gen_cast(&type);
4749 } else if (tok == '{') {
4750 int saved_nocode_wanted = nocode_wanted;
4751 if (const_wanted)
4752 tcc_error("expected constant");
4753 /* save all registers */
4754 save_regs(0);
4755 /* statement expression : we do not accept break/continue
4756 inside as GCC does. We do retain the nocode_wanted state,
4757 as statement expressions can't ever be entered from the
4758 outside, so any reactivation of code emission (from labels
4759 or loop heads) can be disabled again after the end of it. */
4760 block(NULL, NULL, 1);
4761 nocode_wanted = saved_nocode_wanted;
4762 skip(')');
4763 } else {
4764 gexpr();
4765 skip(')');
4767 break;
4768 case '*':
4769 next();
4770 unary();
4771 indir();
4772 break;
4773 case '&':
4774 next();
4775 unary();
4776 /* functions names must be treated as function pointers,
4777 except for unary '&' and sizeof. Since we consider that
4778 functions are not lvalues, we only have to handle it
4779 there and in function calls. */
4780 /* arrays can also be used although they are not lvalues */
4781 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4782 !(vtop->type.t & VT_ARRAY))
4783 test_lvalue();
4784 mk_pointer(&vtop->type);
4785 gaddrof();
4786 break;
4787 case '!':
4788 next();
4789 unary();
4790 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4791 gen_cast_s(VT_BOOL);
4792 vtop->c.i = !vtop->c.i;
4793 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4794 vtop->c.i ^= 1;
4795 else {
4796 save_regs(1);
4797 vseti(VT_JMP, gvtst(1, 0));
4799 break;
4800 case '~':
4801 next();
4802 unary();
4803 vpushi(-1);
4804 gen_op('^');
4805 break;
4806 case '+':
4807 next();
4808 unary();
4809 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4810 tcc_error("pointer not accepted for unary plus");
4811 /* In order to force cast, we add zero, except for floating point
4812 where we really need an noop (otherwise -0.0 will be transformed
4813 into +0.0). */
4814 if (!is_float(vtop->type.t)) {
4815 vpushi(0);
4816 gen_op('+');
4818 break;
4819 case TOK_SIZEOF:
4820 case TOK_ALIGNOF1:
4821 case TOK_ALIGNOF2:
4822 t = tok;
4823 next();
4824 in_sizeof++;
4825 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4826 s = vtop[1].sym; /* hack: accessing previous vtop */
4827 size = type_size(&type, &align);
4828 if (s && s->a.aligned)
4829 align = 1 << (s->a.aligned - 1);
4830 if (t == TOK_SIZEOF) {
4831 if (!(type.t & VT_VLA)) {
4832 if (size < 0)
4833 tcc_error("sizeof applied to an incomplete type");
4834 vpushs(size);
4835 } else {
4836 vla_runtime_type_size(&type, &align);
4838 } else {
4839 vpushs(align);
4841 vtop->type.t |= VT_UNSIGNED;
4842 break;
4844 case TOK_builtin_expect:
4845 /* __builtin_expect is a no-op for now */
4846 parse_builtin_params(0, "ee");
4847 vpop();
4848 break;
4849 case TOK_builtin_types_compatible_p:
4850 parse_builtin_params(0, "tt");
4851 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4852 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4853 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4854 vtop -= 2;
4855 vpushi(n);
4856 break;
4857 case TOK_builtin_choose_expr:
4859 int64_t c;
4860 next();
4861 skip('(');
4862 c = expr_const64();
4863 skip(',');
4864 if (!c) {
4865 nocode_wanted++;
4867 expr_eq();
4868 if (!c) {
4869 vpop();
4870 nocode_wanted--;
4872 skip(',');
4873 if (c) {
4874 nocode_wanted++;
4876 expr_eq();
4877 if (c) {
4878 vpop();
4879 nocode_wanted--;
4881 skip(')');
4883 break;
4884 case TOK_builtin_constant_p:
4885 parse_builtin_params(1, "e");
4886 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4887 vtop--;
4888 vpushi(n);
4889 break;
4890 case TOK_builtin_frame_address:
4891 case TOK_builtin_return_address:
4893 int tok1 = tok;
4894 int level;
4895 next();
4896 skip('(');
4897 if (tok != TOK_CINT) {
4898 tcc_error("%s only takes positive integers",
4899 tok1 == TOK_builtin_return_address ?
4900 "__builtin_return_address" :
4901 "__builtin_frame_address");
4903 level = (uint32_t)tokc.i;
4904 next();
4905 skip(')');
4906 type.t = VT_VOID;
4907 mk_pointer(&type);
4908 vset(&type, VT_LOCAL, 0); /* local frame */
4909 while (level--) {
4910 mk_pointer(&vtop->type);
4911 indir(); /* -> parent frame */
4913 if (tok1 == TOK_builtin_return_address) {
4914 // assume return address is just above frame pointer on stack
4915 vpushi(PTR_SIZE);
4916 gen_op('+');
4917 mk_pointer(&vtop->type);
4918 indir();
4921 break;
4922 #ifdef TCC_TARGET_X86_64
4923 #ifdef TCC_TARGET_PE
4924 case TOK_builtin_va_start:
4925 parse_builtin_params(0, "ee");
4926 r = vtop->r & VT_VALMASK;
4927 if (r == VT_LLOCAL)
4928 r = VT_LOCAL;
4929 if (r != VT_LOCAL)
4930 tcc_error("__builtin_va_start expects a local variable");
4931 vtop->r = r;
4932 vtop->type = char_pointer_type;
4933 vtop->c.i += 8;
4934 vstore();
4935 break;
4936 #else
4937 case TOK_builtin_va_arg_types:
4938 parse_builtin_params(0, "t");
4939 vpushi(classify_x86_64_va_arg(&vtop->type));
4940 vswap();
4941 vpop();
4942 break;
4943 #endif
4944 #endif
4946 #ifdef TCC_TARGET_ARM64
4947 case TOK___va_start: {
4948 parse_builtin_params(0, "ee");
4949 //xx check types
4950 gen_va_start();
4951 vpushi(0);
4952 vtop->type.t = VT_VOID;
4953 break;
4955 case TOK___va_arg: {
4956 parse_builtin_params(0, "et");
4957 type = vtop->type;
4958 vpop();
4959 //xx check types
4960 gen_va_arg(&type);
4961 vtop->type = type;
4962 break;
4964 case TOK___arm64_clear_cache: {
4965 parse_builtin_params(0, "ee");
4966 gen_clear_cache();
4967 vpushi(0);
4968 vtop->type.t = VT_VOID;
4969 break;
4971 #endif
4972 /* pre operations */
4973 case TOK_INC:
4974 case TOK_DEC:
4975 t = tok;
4976 next();
4977 unary();
4978 inc(0, t);
4979 break;
4980 case '-':
4981 next();
4982 unary();
4983 t = vtop->type.t & VT_BTYPE;
4984 if (is_float(t)) {
4985 /* In IEEE negate(x) isn't subtract(0,x), but rather
4986 subtract(-0, x). */
4987 vpush(&vtop->type);
4988 if (t == VT_FLOAT)
4989 vtop->c.f = -1.0 * 0.0;
4990 else if (t == VT_DOUBLE)
4991 vtop->c.d = -1.0 * 0.0;
4992 else
4993 vtop->c.ld = -1.0 * 0.0;
4994 } else
4995 vpushi(0);
4996 vswap();
4997 gen_op('-');
4998 break;
4999 case TOK_LAND:
5000 if (!gnu_ext)
5001 goto tok_identifier;
5002 next();
5003 /* allow to take the address of a label */
5004 if (tok < TOK_UIDENT)
5005 expect("label identifier");
5006 s = label_find(tok);
5007 if (!s) {
5008 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5009 } else {
5010 if (s->r == LABEL_DECLARED)
5011 s->r = LABEL_FORWARD;
5013 if (!s->type.t) {
5014 s->type.t = VT_VOID;
5015 mk_pointer(&s->type);
5016 s->type.t |= VT_STATIC;
5018 vpushsym(&s->type, s);
5019 next();
5020 break;
5022 case TOK_GENERIC:
5024 CType controlling_type;
5025 int has_default = 0;
5026 int has_match = 0;
5027 int learn = 0;
5028 TokenString *str = NULL;
5030 next();
5031 skip('(');
5032 expr_type(&controlling_type, expr_eq);
5033 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5034 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5035 mk_pointer(&controlling_type);
5036 for (;;) {
5037 learn = 0;
5038 skip(',');
5039 if (tok == TOK_DEFAULT) {
5040 if (has_default)
5041 tcc_error("too many 'default'");
5042 has_default = 1;
5043 if (!has_match)
5044 learn = 1;
5045 next();
5046 } else {
5047 AttributeDef ad_tmp;
5048 int itmp;
5049 CType cur_type;
5050 parse_btype(&cur_type, &ad_tmp);
5051 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5052 if (compare_types(&controlling_type, &cur_type, 0)) {
5053 if (has_match) {
5054 tcc_error("type match twice");
5056 has_match = 1;
5057 learn = 1;
5060 skip(':');
5061 if (learn) {
5062 if (str)
5063 tok_str_free(str);
5064 skip_or_save_block(&str);
5065 } else {
5066 skip_or_save_block(NULL);
5068 if (tok == ')')
5069 break;
5071 if (!str) {
5072 char buf[60];
5073 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5074 tcc_error("type '%s' does not match any association", buf);
5076 begin_macro(str, 1);
5077 next();
5078 expr_eq();
5079 if (tok != TOK_EOF)
5080 expect(",");
5081 end_macro();
5082 next();
5083 break;
5085 // special qnan , snan and infinity values
5086 case TOK___NAN__:
5087 n = 0x7fc00000;
5088 special_math_val:
5089 vpushi(n);
5090 vtop->type.t = VT_FLOAT;
5091 next();
5092 break;
5093 case TOK___SNAN__:
5094 n = 0x7f800001;
5095 goto special_math_val;
5096 case TOK___INF__:
5097 n = 0x7f800000;
5098 goto special_math_val;
5100 default:
5101 tok_identifier:
5102 t = tok;
5103 next();
5104 if (t < TOK_UIDENT)
5105 expect("identifier");
5106 s = sym_find(t);
5107 if (!s || IS_ASM_SYM(s)) {
5108 const char *name = get_tok_str(t, NULL);
5109 if (tok != '(')
5110 tcc_error("'%s' undeclared", name);
5111 /* for simple function calls, we tolerate undeclared
5112 external reference to int() function */
5113 if (tcc_state->warn_implicit_function_declaration
5114 #ifdef TCC_TARGET_PE
5115 /* people must be warned about using undeclared WINAPI functions
5116 (which usually start with uppercase letter) */
5117 || (name[0] >= 'A' && name[0] <= 'Z')
5118 #endif
5120 tcc_warning("implicit declaration of function '%s'", name);
5121 s = external_global_sym(t, &func_old_type, 0);
5124 r = s->r;
5125 /* A symbol that has a register is a local register variable,
5126 which starts out as VT_LOCAL value. */
5127 if ((r & VT_VALMASK) < VT_CONST)
5128 r = (r & ~VT_VALMASK) | VT_LOCAL;
5130 vset(&s->type, r, s->c);
5131 /* Point to s as backpointer (even without r&VT_SYM).
5132 Will be used by at least the x86 inline asm parser for
5133 regvars. */
5134 vtop->sym = s;
5136 if (r & VT_SYM) {
5137 vtop->c.i = 0;
5138 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5139 vtop->c.i = s->enum_val;
5141 break;
5144 /* post operations */
5145 while (1) {
5146 if (tok == TOK_INC || tok == TOK_DEC) {
5147 inc(1, tok);
5148 next();
5149 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5150 int qualifiers;
5151 /* field */
5152 if (tok == TOK_ARROW)
5153 indir();
5154 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5155 test_lvalue();
5156 gaddrof();
5157 /* expect pointer on structure */
5158 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5159 expect("struct or union");
5160 if (tok == TOK_CDOUBLE)
5161 expect("field name");
5162 next();
5163 if (tok == TOK_CINT || tok == TOK_CUINT)
5164 expect("field name");
5165 s = find_field(&vtop->type, tok);
5166 if (!s)
5167 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5168 /* add field offset to pointer */
5169 vtop->type = char_pointer_type; /* change type to 'char *' */
5170 vpushi(s->c);
5171 gen_op('+');
5172 /* change type to field type, and set to lvalue */
5173 vtop->type = s->type;
5174 vtop->type.t |= qualifiers;
5175 /* an array is never an lvalue */
5176 if (!(vtop->type.t & VT_ARRAY)) {
5177 vtop->r |= lvalue_type(vtop->type.t);
5178 #ifdef CONFIG_TCC_BCHECK
5179 /* if bound checking, the referenced pointer must be checked */
5180 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5181 vtop->r |= VT_MUSTBOUND;
5182 #endif
5184 next();
5185 } else if (tok == '[') {
5186 next();
5187 gexpr();
5188 gen_op('+');
5189 indir();
5190 skip(']');
5191 } else if (tok == '(') {
5192 SValue ret;
5193 Sym *sa;
5194 int nb_args, ret_nregs, ret_align, regsize, variadic;
5196 /* function call */
5197 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5198 /* pointer test (no array accepted) */
5199 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5200 vtop->type = *pointed_type(&vtop->type);
5201 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5202 goto error_func;
5203 } else {
5204 error_func:
5205 expect("function pointer");
5207 } else {
5208 vtop->r &= ~VT_LVAL; /* no lvalue */
5210 /* get return type */
5211 s = vtop->type.ref;
5212 next();
5213 sa = s->next; /* first parameter */
5214 nb_args = regsize = 0;
5215 ret.r2 = VT_CONST;
5216 /* compute first implicit argument if a structure is returned */
5217 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5218 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5219 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5220 &ret_align, &regsize);
5221 if (!ret_nregs) {
5222 /* get some space for the returned structure */
5223 size = type_size(&s->type, &align);
5224 #ifdef TCC_TARGET_ARM64
5225 /* On arm64, a small struct is return in registers.
5226 It is much easier to write it to memory if we know
5227 that we are allowed to write some extra bytes, so
5228 round the allocated space up to a power of 2: */
5229 if (size < 16)
5230 while (size & (size - 1))
5231 size = (size | (size - 1)) + 1;
5232 #endif
5233 loc = (loc - size) & -align;
5234 ret.type = s->type;
5235 ret.r = VT_LOCAL | VT_LVAL;
5236 /* pass it as 'int' to avoid structure arg passing
5237 problems */
5238 vseti(VT_LOCAL, loc);
5239 ret.c = vtop->c;
5240 nb_args++;
5242 } else {
5243 ret_nregs = 1;
5244 ret.type = s->type;
5247 if (ret_nregs) {
5248 /* return in register */
5249 if (is_float(ret.type.t)) {
5250 ret.r = reg_fret(ret.type.t);
5251 #ifdef TCC_TARGET_X86_64
5252 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5253 ret.r2 = REG_QRET;
5254 #endif
5255 } else {
5256 #ifndef TCC_TARGET_ARM64
5257 #ifdef TCC_TARGET_X86_64
5258 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5259 #else
5260 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5261 #endif
5262 ret.r2 = REG_LRET;
5263 #endif
5264 ret.r = REG_IRET;
5266 ret.c.i = 0;
5268 if (tok != ')') {
5269 for(;;) {
5270 expr_eq();
5271 gfunc_param_typed(s, sa);
5272 nb_args++;
5273 if (sa)
5274 sa = sa->next;
5275 if (tok == ')')
5276 break;
5277 skip(',');
5280 if (sa)
5281 tcc_error("too few arguments to function");
5282 skip(')');
5283 gfunc_call(nb_args);
5285 /* return value */
5286 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5287 vsetc(&ret.type, r, &ret.c);
5288 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5291 /* handle packed struct return */
5292 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5293 int addr, offset;
5295 size = type_size(&s->type, &align);
5296 /* We're writing whole regs often, make sure there's enough
5297 space. Assume register size is power of 2. */
5298 if (regsize > align)
5299 align = regsize;
5300 loc = (loc - size) & -align;
5301 addr = loc;
5302 offset = 0;
5303 for (;;) {
5304 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5305 vswap();
5306 vstore();
5307 vtop--;
5308 if (--ret_nregs == 0)
5309 break;
5310 offset += regsize;
5312 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5314 } else {
5315 break;
5320 ST_FUNC void expr_prod(void)
5322 int t;
5324 unary();
5325 while (tok == '*' || tok == '/' || tok == '%') {
5326 t = tok;
5327 next();
5328 unary();
5329 gen_op(t);
5333 ST_FUNC void expr_sum(void)
5335 int t;
5337 expr_prod();
5338 while (tok == '+' || tok == '-') {
5339 t = tok;
5340 next();
5341 expr_prod();
5342 gen_op(t);
5346 static void expr_shift(void)
5348 int t;
5350 expr_sum();
5351 while (tok == TOK_SHL || tok == TOK_SAR) {
5352 t = tok;
5353 next();
5354 expr_sum();
5355 gen_op(t);
5359 static void expr_cmp(void)
5361 int t;
5363 expr_shift();
5364 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5365 tok == TOK_ULT || tok == TOK_UGE) {
5366 t = tok;
5367 next();
5368 expr_shift();
5369 gen_op(t);
5373 static void expr_cmpeq(void)
5375 int t;
5377 expr_cmp();
5378 while (tok == TOK_EQ || tok == TOK_NE) {
5379 t = tok;
5380 next();
5381 expr_cmp();
5382 gen_op(t);
5386 static void expr_and(void)
5388 expr_cmpeq();
5389 while (tok == '&') {
5390 next();
5391 expr_cmpeq();
5392 gen_op('&');
5396 static void expr_xor(void)
5398 expr_and();
5399 while (tok == '^') {
5400 next();
5401 expr_and();
5402 gen_op('^');
5406 static void expr_or(void)
5408 expr_xor();
5409 while (tok == '|') {
5410 next();
5411 expr_xor();
5412 gen_op('|');
5416 static void expr_land(void)
5418 expr_or();
5419 if (tok == TOK_LAND) {
5420 int t = 0;
5421 for(;;) {
5422 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5423 gen_cast_s(VT_BOOL);
5424 if (vtop->c.i) {
5425 vpop();
5426 } else {
5427 nocode_wanted++;
5428 while (tok == TOK_LAND) {
5429 next();
5430 expr_or();
5431 vpop();
5433 nocode_wanted--;
5434 if (t)
5435 gsym(t);
5436 gen_cast_s(VT_INT);
5437 break;
5439 } else {
5440 if (!t)
5441 save_regs(1);
5442 t = gvtst(1, t);
5444 if (tok != TOK_LAND) {
5445 if (t)
5446 vseti(VT_JMPI, t);
5447 else
5448 vpushi(1);
5449 break;
5451 next();
5452 expr_or();
5457 static void expr_lor(void)
5459 expr_land();
5460 if (tok == TOK_LOR) {
5461 int t = 0;
5462 for(;;) {
5463 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5464 gen_cast_s(VT_BOOL);
5465 if (!vtop->c.i) {
5466 vpop();
5467 } else {
5468 nocode_wanted++;
5469 while (tok == TOK_LOR) {
5470 next();
5471 expr_land();
5472 vpop();
5474 nocode_wanted--;
5475 if (t)
5476 gsym(t);
5477 gen_cast_s(VT_INT);
5478 break;
5480 } else {
5481 if (!t)
5482 save_regs(1);
5483 t = gvtst(0, t);
5485 if (tok != TOK_LOR) {
5486 if (t)
5487 vseti(VT_JMP, t);
5488 else
5489 vpushi(0);
5490 break;
5492 next();
5493 expr_land();
5498 /* Assuming vtop is a value used in a conditional context
5499 (i.e. compared with zero) return 0 if it's false, 1 if
5500 true and -1 if it can't be statically determined. */
5501 static int condition_3way(void)
5503 int c = -1;
5504 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5505 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5506 vdup();
5507 gen_cast_s(VT_BOOL);
5508 c = vtop->c.i;
5509 vpop();
5511 return c;
5514 static void expr_cond(void)
5516 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5517 SValue sv;
5518 CType type, type1, type2;
5520 expr_lor();
5521 if (tok == '?') {
5522 next();
5523 c = condition_3way();
5524 g = (tok == ':' && gnu_ext);
5525 if (c < 0) {
5526 /* needed to avoid having different registers saved in
5527 each branch */
5528 if (is_float(vtop->type.t)) {
5529 rc = RC_FLOAT;
5530 #ifdef TCC_TARGET_X86_64
5531 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5532 rc = RC_ST0;
5534 #endif
5535 } else
5536 rc = RC_INT;
5537 gv(rc);
5538 save_regs(1);
5539 if (g)
5540 gv_dup();
5541 tt = gvtst(1, 0);
5543 } else {
5544 if (!g)
5545 vpop();
5546 tt = 0;
5549 if (1) {
5550 if (c == 0)
5551 nocode_wanted++;
5552 if (!g)
5553 gexpr();
5555 type1 = vtop->type;
5556 sv = *vtop; /* save value to handle it later */
5557 vtop--; /* no vpop so that FP stack is not flushed */
5558 skip(':');
5560 u = 0;
5561 if (c < 0)
5562 u = gjmp(0);
5563 gsym(tt);
5565 if (c == 0)
5566 nocode_wanted--;
5567 if (c == 1)
5568 nocode_wanted++;
5569 expr_cond();
5570 if (c == 1)
5571 nocode_wanted--;
5573 type2 = vtop->type;
5574 t1 = type1.t;
5575 bt1 = t1 & VT_BTYPE;
5576 t2 = type2.t;
5577 bt2 = t2 & VT_BTYPE;
5578 type.ref = NULL;
5580 /* cast operands to correct type according to ISOC rules */
5581 if (is_float(bt1) || is_float(bt2)) {
5582 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5583 type.t = VT_LDOUBLE;
5585 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5586 type.t = VT_DOUBLE;
5587 } else {
5588 type.t = VT_FLOAT;
5590 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5591 /* cast to biggest op */
5592 type.t = VT_LLONG | VT_LONG;
5593 if (bt1 == VT_LLONG)
5594 type.t &= t1;
5595 if (bt2 == VT_LLONG)
5596 type.t &= t2;
5597 /* convert to unsigned if it does not fit in a long long */
5598 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5599 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5600 type.t |= VT_UNSIGNED;
5601 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5602 /* If one is a null ptr constant the result type
5603 is the other. */
5604 if (is_null_pointer (vtop))
5605 type = type1;
5606 else if (is_null_pointer (&sv))
5607 type = type2;
5608 /* XXX: test pointer compatibility, C99 has more elaborate
5609 rules here. */
5610 else
5611 type = type1;
5612 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5613 /* XXX: test function pointer compatibility */
5614 type = bt1 == VT_FUNC ? type1 : type2;
5615 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5616 /* XXX: test structure compatibility */
5617 type = bt1 == VT_STRUCT ? type1 : type2;
5618 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5619 /* NOTE: as an extension, we accept void on only one side */
5620 type.t = VT_VOID;
5621 } else {
5622 /* integer operations */
5623 type.t = VT_INT | (VT_LONG & (t1 | t2));
5624 /* convert to unsigned if it does not fit in an integer */
5625 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5626 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5627 type.t |= VT_UNSIGNED;
5629 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5630 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5631 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5632 islv &= c < 0;
5634 /* now we convert second operand */
5635 if (c != 1) {
5636 gen_cast(&type);
5637 if (islv) {
5638 mk_pointer(&vtop->type);
5639 gaddrof();
5640 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5641 gaddrof();
5644 rc = RC_INT;
5645 if (is_float(type.t)) {
5646 rc = RC_FLOAT;
5647 #ifdef TCC_TARGET_X86_64
5648 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5649 rc = RC_ST0;
5651 #endif
5652 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5653 /* for long longs, we use fixed registers to avoid having
5654 to handle a complicated move */
5655 rc = RC_IRET;
5658 tt = r2 = 0;
5659 if (c < 0) {
5660 r2 = gv(rc);
5661 tt = gjmp(0);
5663 gsym(u);
5665 /* this is horrible, but we must also convert first
5666 operand */
5667 if (c != 0) {
5668 *vtop = sv;
5669 gen_cast(&type);
5670 if (islv) {
5671 mk_pointer(&vtop->type);
5672 gaddrof();
5673 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5674 gaddrof();
5677 if (c < 0) {
5678 r1 = gv(rc);
5679 move_reg(r2, r1, type.t);
5680 vtop->r = r2;
5681 gsym(tt);
5682 if (islv)
5683 indir();
5689 static void expr_eq(void)
5691 int t;
5693 expr_cond();
5694 if (tok == '=' ||
5695 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5696 tok == TOK_A_XOR || tok == TOK_A_OR ||
5697 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5698 test_lvalue();
5699 t = tok;
5700 next();
5701 if (t == '=') {
5702 expr_eq();
5703 } else {
5704 vdup();
5705 expr_eq();
5706 gen_op(t & 0x7f);
5708 vstore();
5712 ST_FUNC void gexpr(void)
5714 while (1) {
5715 expr_eq();
5716 if (tok != ',')
5717 break;
5718 vpop();
5719 next();
5723 /* parse a constant expression and return value in vtop. */
5724 static void expr_const1(void)
5726 const_wanted++;
5727 nocode_wanted++;
5728 expr_cond();
5729 nocode_wanted--;
5730 const_wanted--;
5733 /* parse an integer constant and return its value. */
5734 static inline int64_t expr_const64(void)
5736 int64_t c;
5737 expr_const1();
5738 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5739 expect("constant expression");
5740 c = vtop->c.i;
5741 vpop();
5742 return c;
5745 /* parse an integer constant and return its value.
5746 Complain if it doesn't fit 32bit (signed or unsigned). */
5747 ST_FUNC int expr_const(void)
5749 int c;
5750 int64_t wc = expr_const64();
5751 c = wc;
5752 if (c != wc && (unsigned)c != wc)
5753 tcc_error("constant exceeds 32 bit");
5754 return c;
5757 /* return the label token if current token is a label, otherwise
5758 return zero */
5759 static int is_label(void)
5761 int last_tok;
5763 /* fast test first */
5764 if (tok < TOK_UIDENT)
5765 return 0;
5766 /* no need to save tokc because tok is an identifier */
5767 last_tok = tok;
5768 next();
5769 if (tok == ':') {
5770 return last_tok;
5771 } else {
5772 unget_tok(last_tok);
5773 return 0;
5777 #ifndef TCC_TARGET_ARM64
5778 static void gfunc_return(CType *func_type)
5780 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5781 CType type, ret_type;
5782 int ret_align, ret_nregs, regsize;
5783 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5784 &ret_align, &regsize);
5785 if (0 == ret_nregs) {
5786 /* if returning structure, must copy it to implicit
5787 first pointer arg location */
5788 type = *func_type;
5789 mk_pointer(&type);
5790 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5791 indir();
5792 vswap();
5793 /* copy structure value to pointer */
5794 vstore();
5795 } else {
5796 /* returning structure packed into registers */
5797 int r, size, addr, align;
5798 size = type_size(func_type,&align);
5799 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5800 (vtop->c.i & (ret_align-1)))
5801 && (align & (ret_align-1))) {
5802 loc = (loc - size) & -ret_align;
5803 addr = loc;
5804 type = *func_type;
5805 vset(&type, VT_LOCAL | VT_LVAL, addr);
5806 vswap();
5807 vstore();
5808 vpop();
5809 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5811 vtop->type = ret_type;
5812 if (is_float(ret_type.t))
5813 r = rc_fret(ret_type.t);
5814 else
5815 r = RC_IRET;
5817 if (ret_nregs == 1)
5818 gv(r);
5819 else {
5820 for (;;) {
5821 vdup();
5822 gv(r);
5823 vpop();
5824 if (--ret_nregs == 0)
5825 break;
5826 /* We assume that when a structure is returned in multiple
5827 registers, their classes are consecutive values of the
5828 suite s(n) = 2^n */
5829 r <<= 1;
5830 vtop->c.i += regsize;
5834 } else if (is_float(func_type->t)) {
5835 gv(rc_fret(func_type->t));
5836 } else {
5837 gv(RC_IRET);
5839 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5841 #endif
5843 static int case_cmp(const void *pa, const void *pb)
5845 int64_t a = (*(struct case_t**) pa)->v1;
5846 int64_t b = (*(struct case_t**) pb)->v1;
5847 return a < b ? -1 : a > b;
5850 static void gcase(struct case_t **base, int len, int *bsym)
5852 struct case_t *p;
5853 int e;
5854 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5855 gv(RC_INT);
5856 while (len > 4) {
5857 /* binary search */
5858 p = base[len/2];
5859 vdup();
5860 if (ll)
5861 vpushll(p->v2);
5862 else
5863 vpushi(p->v2);
5864 gen_op(TOK_LE);
5865 e = gtst(1, 0);
5866 vdup();
5867 if (ll)
5868 vpushll(p->v1);
5869 else
5870 vpushi(p->v1);
5871 gen_op(TOK_GE);
5872 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5873 /* x < v1 */
5874 gcase(base, len/2, bsym);
5875 if (cur_switch->def_sym)
5876 gjmp_addr(cur_switch->def_sym);
5877 else
5878 *bsym = gjmp(*bsym);
5879 /* x > v2 */
5880 gsym(e);
5881 e = len/2 + 1;
5882 base += e; len -= e;
5884 /* linear scan */
5885 while (len--) {
5886 p = *base++;
5887 vdup();
5888 if (ll)
5889 vpushll(p->v2);
5890 else
5891 vpushi(p->v2);
5892 if (p->v1 == p->v2) {
5893 gen_op(TOK_EQ);
5894 gtst_addr(0, p->sym);
5895 } else {
5896 gen_op(TOK_LE);
5897 e = gtst(1, 0);
5898 vdup();
5899 if (ll)
5900 vpushll(p->v1);
5901 else
5902 vpushi(p->v1);
5903 gen_op(TOK_GE);
5904 gtst_addr(0, p->sym);
5905 gsym(e);
5910 static void block(int *bsym, int *csym, int is_expr)
5912 int a, b, c, d, cond;
5913 Sym *s;
5915 /* generate line number info */
5916 if (tcc_state->do_debug)
5917 tcc_debug_line(tcc_state);
5919 if (is_expr) {
5920 /* default return value is (void) */
5921 vpushi(0);
5922 vtop->type.t = VT_VOID;
5925 if (tok == TOK_IF) {
5926 /* if test */
5927 int saved_nocode_wanted = nocode_wanted;
5928 next();
5929 skip('(');
5930 gexpr();
5931 skip(')');
5932 cond = condition_3way();
5933 if (cond == 1)
5934 a = 0, vpop();
5935 else
5936 a = gvtst(1, 0);
5937 if (cond == 0)
5938 nocode_wanted |= 0x20000000;
5939 block(bsym, csym, 0);
5940 if (cond != 1)
5941 nocode_wanted = saved_nocode_wanted;
5942 c = tok;
5943 if (c == TOK_ELSE) {
5944 next();
5945 d = gjmp(0);
5946 gsym(a);
5947 if (cond == 1)
5948 nocode_wanted |= 0x20000000;
5949 block(bsym, csym, 0);
5950 gsym(d); /* patch else jmp */
5951 if (cond != 0)
5952 nocode_wanted = saved_nocode_wanted;
5953 } else
5954 gsym(a);
5955 } else if (tok == TOK_WHILE) {
5956 int saved_nocode_wanted;
5957 nocode_wanted &= ~0x20000000;
5958 next();
5959 d = ind;
5960 vla_sp_restore();
5961 skip('(');
5962 gexpr();
5963 skip(')');
5964 a = gvtst(1, 0);
5965 b = 0;
5966 ++local_scope;
5967 saved_nocode_wanted = nocode_wanted;
5968 block(&a, &b, 0);
5969 nocode_wanted = saved_nocode_wanted;
5970 --local_scope;
5971 gjmp_addr(d);
5972 gsym(a);
5973 gsym_addr(b, d);
5974 } else if (tok == '{') {
5975 Sym *llabel;
5976 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5978 next();
5979 /* record local declaration stack position */
5980 s = local_stack;
5981 llabel = local_label_stack;
5982 ++local_scope;
5984 /* handle local labels declarations */
5985 if (tok == TOK_LABEL) {
5986 next();
5987 for(;;) {
5988 if (tok < TOK_UIDENT)
5989 expect("label identifier");
5990 label_push(&local_label_stack, tok, LABEL_DECLARED);
5991 next();
5992 if (tok == ',') {
5993 next();
5994 } else {
5995 skip(';');
5996 break;
6000 while (tok != '}') {
6001 if ((a = is_label()))
6002 unget_tok(a);
6003 else
6004 decl(VT_LOCAL);
6005 if (tok != '}') {
6006 if (is_expr)
6007 vpop();
6008 block(bsym, csym, is_expr);
6011 /* pop locally defined labels */
6012 label_pop(&local_label_stack, llabel, is_expr);
6013 /* pop locally defined symbols */
6014 --local_scope;
6015 /* In the is_expr case (a statement expression is finished here),
6016 vtop might refer to symbols on the local_stack. Either via the
6017 type or via vtop->sym. We can't pop those nor any that in turn
6018 might be referred to. To make it easier we don't roll back
6019 any symbols in that case; some upper level call to block() will
6020 do that. We do have to remove such symbols from the lookup
6021 tables, though. sym_pop will do that. */
6022 sym_pop(&local_stack, s, is_expr);
6024 /* Pop VLA frames and restore stack pointer if required */
6025 if (vlas_in_scope > saved_vlas_in_scope) {
6026 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6027 vla_sp_restore();
6029 vlas_in_scope = saved_vlas_in_scope;
6031 next();
6032 } else if (tok == TOK_RETURN) {
6033 next();
6034 if (tok != ';') {
6035 gexpr();
6036 gen_assign_cast(&func_vt);
6037 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6038 vtop--;
6039 else
6040 gfunc_return(&func_vt);
6042 skip(';');
6043 /* jump unless last stmt in top-level block */
6044 if (tok != '}' || local_scope != 1)
6045 rsym = gjmp(rsym);
6046 nocode_wanted |= 0x20000000;
6047 } else if (tok == TOK_BREAK) {
6048 /* compute jump */
6049 if (!bsym)
6050 tcc_error("cannot break");
6051 *bsym = gjmp(*bsym);
6052 next();
6053 skip(';');
6054 nocode_wanted |= 0x20000000;
6055 } else if (tok == TOK_CONTINUE) {
6056 /* compute jump */
6057 if (!csym)
6058 tcc_error("cannot continue");
6059 vla_sp_restore_root();
6060 *csym = gjmp(*csym);
6061 next();
6062 skip(';');
6063 } else if (tok == TOK_FOR) {
6064 int e;
6065 int saved_nocode_wanted;
6066 nocode_wanted &= ~0x20000000;
6067 next();
6068 skip('(');
6069 s = local_stack;
6070 ++local_scope;
6071 if (tok != ';') {
6072 /* c99 for-loop init decl? */
6073 if (!decl0(VT_LOCAL, 1, NULL)) {
6074 /* no, regular for-loop init expr */
6075 gexpr();
6076 vpop();
6079 skip(';');
6080 d = ind;
6081 c = ind;
6082 vla_sp_restore();
6083 a = 0;
6084 b = 0;
6085 if (tok != ';') {
6086 gexpr();
6087 a = gvtst(1, 0);
6089 skip(';');
6090 if (tok != ')') {
6091 e = gjmp(0);
6092 c = ind;
6093 vla_sp_restore();
6094 gexpr();
6095 vpop();
6096 gjmp_addr(d);
6097 gsym(e);
6099 skip(')');
6100 saved_nocode_wanted = nocode_wanted;
6101 block(&a, &b, 0);
6102 nocode_wanted = saved_nocode_wanted;
6103 gjmp_addr(c);
6104 gsym(a);
6105 gsym_addr(b, c);
6106 --local_scope;
6107 sym_pop(&local_stack, s, 0);
6109 } else
6110 if (tok == TOK_DO) {
6111 int saved_nocode_wanted;
6112 nocode_wanted &= ~0x20000000;
6113 next();
6114 a = 0;
6115 b = 0;
6116 d = ind;
6117 vla_sp_restore();
6118 saved_nocode_wanted = nocode_wanted;
6119 block(&a, &b, 0);
6120 skip(TOK_WHILE);
6121 skip('(');
6122 gsym(b);
6123 if (b)
6124 nocode_wanted = saved_nocode_wanted;
6125 gexpr();
6126 c = gvtst(0, 0);
6127 gsym_addr(c, d);
6128 nocode_wanted = saved_nocode_wanted;
6129 skip(')');
6130 gsym(a);
6131 skip(';');
6132 } else
6133 if (tok == TOK_SWITCH) {
6134 struct switch_t *saved, sw;
6135 int saved_nocode_wanted = nocode_wanted;
6136 SValue switchval;
6137 next();
6138 skip('(');
6139 gexpr();
6140 skip(')');
6141 switchval = *vtop--;
6142 a = 0;
6143 b = gjmp(0); /* jump to first case */
6144 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6145 saved = cur_switch;
6146 cur_switch = &sw;
6147 block(&a, csym, 0);
6148 nocode_wanted = saved_nocode_wanted;
6149 a = gjmp(a); /* add implicit break */
6150 /* case lookup */
6151 gsym(b);
6152 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6153 for (b = 1; b < sw.n; b++)
6154 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6155 tcc_error("duplicate case value");
6156 /* Our switch table sorting is signed, so the compared
6157 value needs to be as well when it's 64bit. */
6158 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6159 switchval.type.t &= ~VT_UNSIGNED;
6160 vpushv(&switchval);
6161 gcase(sw.p, sw.n, &a);
6162 vpop();
6163 if (sw.def_sym)
6164 gjmp_addr(sw.def_sym);
6165 dynarray_reset(&sw.p, &sw.n);
6166 cur_switch = saved;
6167 /* break label */
6168 gsym(a);
6169 } else
6170 if (tok == TOK_CASE) {
6171 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6172 if (!cur_switch)
6173 expect("switch");
6174 nocode_wanted &= ~0x20000000;
6175 next();
6176 cr->v1 = cr->v2 = expr_const64();
6177 if (gnu_ext && tok == TOK_DOTS) {
6178 next();
6179 cr->v2 = expr_const64();
6180 if (cr->v2 < cr->v1)
6181 tcc_warning("empty case range");
6183 cr->sym = ind;
6184 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6185 skip(':');
6186 is_expr = 0;
6187 goto block_after_label;
6188 } else
6189 if (tok == TOK_DEFAULT) {
6190 next();
6191 skip(':');
6192 if (!cur_switch)
6193 expect("switch");
6194 if (cur_switch->def_sym)
6195 tcc_error("too many 'default'");
6196 cur_switch->def_sym = ind;
6197 is_expr = 0;
6198 goto block_after_label;
6199 } else
6200 if (tok == TOK_GOTO) {
6201 next();
6202 if (tok == '*' && gnu_ext) {
6203 /* computed goto */
6204 next();
6205 gexpr();
6206 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6207 expect("pointer");
6208 ggoto();
6209 } else if (tok >= TOK_UIDENT) {
6210 s = label_find(tok);
6211 /* put forward definition if needed */
6212 if (!s) {
6213 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6214 } else {
6215 if (s->r == LABEL_DECLARED)
6216 s->r = LABEL_FORWARD;
6218 vla_sp_restore_root();
6219 if (s->r & LABEL_FORWARD)
6220 s->jnext = gjmp(s->jnext);
6221 else
6222 gjmp_addr(s->jnext);
6223 next();
6224 } else {
6225 expect("label identifier");
6227 skip(';');
6228 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6229 asm_instr();
6230 } else {
6231 b = is_label();
6232 if (b) {
6233 /* label case */
6234 next();
6235 s = label_find(b);
6236 if (s) {
6237 if (s->r == LABEL_DEFINED)
6238 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6239 gsym(s->jnext);
6240 s->r = LABEL_DEFINED;
6241 } else {
6242 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6244 s->jnext = ind;
6245 vla_sp_restore();
6246 /* we accept this, but it is a mistake */
6247 block_after_label:
6248 nocode_wanted &= ~0x20000000;
6249 if (tok == '}') {
6250 tcc_warning("deprecated use of label at end of compound statement");
6251 } else {
6252 if (is_expr)
6253 vpop();
6254 block(bsym, csym, is_expr);
6256 } else {
6257 /* expression case */
6258 if (tok != ';') {
6259 if (is_expr) {
6260 vpop();
6261 gexpr();
6262 } else {
6263 gexpr();
6264 vpop();
6267 skip(';');
6272 /* This skips over a stream of tokens containing balanced {} and ()
6273 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6274 with a '{'). If STR then allocates and stores the skipped tokens
6275 in *STR. This doesn't check if () and {} are nested correctly,
6276 i.e. "({)}" is accepted. */
6277 static void skip_or_save_block(TokenString **str)
6279 int braces = tok == '{';
6280 int level = 0;
6281 if (str)
6282 *str = tok_str_alloc();
6284 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6285 int t;
6286 if (tok == TOK_EOF) {
6287 if (str || level > 0)
6288 tcc_error("unexpected end of file");
6289 else
6290 break;
6292 if (str)
6293 tok_str_add_tok(*str);
6294 t = tok;
6295 next();
6296 if (t == '{' || t == '(') {
6297 level++;
6298 } else if (t == '}' || t == ')') {
6299 level--;
6300 if (level == 0 && braces && t == '}')
6301 break;
6304 if (str) {
6305 tok_str_add(*str, -1);
6306 tok_str_add(*str, 0);
6310 #define EXPR_CONST 1
6311 #define EXPR_ANY 2
6313 static void parse_init_elem(int expr_type)
6315 int saved_global_expr;
6316 switch(expr_type) {
6317 case EXPR_CONST:
6318 /* compound literals must be allocated globally in this case */
6319 saved_global_expr = global_expr;
6320 global_expr = 1;
6321 expr_const1();
6322 global_expr = saved_global_expr;
6323 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6324 (compound literals). */
6325 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6326 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6327 || vtop->sym->v < SYM_FIRST_ANOM))
6328 #ifdef TCC_TARGET_PE
6329 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6330 #endif
6332 tcc_error("initializer element is not constant");
6333 break;
6334 case EXPR_ANY:
6335 expr_eq();
6336 break;
6340 /* put zeros for variable based init */
6341 static void init_putz(Section *sec, unsigned long c, int size)
6343 if (sec) {
6344 /* nothing to do because globals are already set to zero */
6345 } else {
6346 vpush_global_sym(&func_old_type, TOK_memset);
6347 vseti(VT_LOCAL, c);
6348 #ifdef TCC_TARGET_ARM
6349 vpushs(size);
6350 vpushi(0);
6351 #else
6352 vpushi(0);
6353 vpushs(size);
6354 #endif
6355 gfunc_call(3);
6359 /* t is the array or struct type. c is the array or struct
6360 address. cur_field is the pointer to the current
6361 field, for arrays the 'c' member contains the current start
6362 index. 'size_only' is true if only size info is needed (only used
6363 in arrays). al contains the already initialized length of the
6364 current container (starting at c). This returns the new length of that. */
6365 static int decl_designator(CType *type, Section *sec, unsigned long c,
6366 Sym **cur_field, int size_only, int al)
6368 Sym *s, *f;
6369 int index, index_last, align, l, nb_elems, elem_size;
6370 unsigned long corig = c;
6372 elem_size = 0;
6373 nb_elems = 1;
6374 if (gnu_ext && (l = is_label()) != 0)
6375 goto struct_field;
6376 /* NOTE: we only support ranges for last designator */
6377 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6378 if (tok == '[') {
6379 if (!(type->t & VT_ARRAY))
6380 expect("array type");
6381 next();
6382 index = index_last = expr_const();
6383 if (tok == TOK_DOTS && gnu_ext) {
6384 next();
6385 index_last = expr_const();
6387 skip(']');
6388 s = type->ref;
6389 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6390 index_last < index)
6391 tcc_error("invalid index");
6392 if (cur_field)
6393 (*cur_field)->c = index_last;
6394 type = pointed_type(type);
6395 elem_size = type_size(type, &align);
6396 c += index * elem_size;
6397 nb_elems = index_last - index + 1;
6398 } else {
6399 next();
6400 l = tok;
6401 struct_field:
6402 next();
6403 if ((type->t & VT_BTYPE) != VT_STRUCT)
6404 expect("struct/union type");
6405 f = find_field(type, l);
6406 if (!f)
6407 expect("field");
6408 if (cur_field)
6409 *cur_field = f;
6410 type = &f->type;
6411 c += f->c;
6413 cur_field = NULL;
6415 if (!cur_field) {
6416 if (tok == '=') {
6417 next();
6418 } else if (!gnu_ext) {
6419 expect("=");
6421 } else {
6422 if (type->t & VT_ARRAY) {
6423 index = (*cur_field)->c;
6424 if (type->ref->c >= 0 && index >= type->ref->c)
6425 tcc_error("index too large");
6426 type = pointed_type(type);
6427 c += index * type_size(type, &align);
6428 } else {
6429 f = *cur_field;
6430 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6431 *cur_field = f = f->next;
6432 if (!f)
6433 tcc_error("too many field init");
6434 type = &f->type;
6435 c += f->c;
6438 /* must put zero in holes (note that doing it that way
6439 ensures that it even works with designators) */
6440 if (!size_only && c - corig > al)
6441 init_putz(sec, corig + al, c - corig - al);
6442 decl_initializer(type, sec, c, 0, size_only);
6444 /* XXX: make it more general */
6445 if (!size_only && nb_elems > 1) {
6446 unsigned long c_end;
6447 uint8_t *src, *dst;
6448 int i;
6450 if (!sec) {
6451 vset(type, VT_LOCAL|VT_LVAL, c);
6452 for (i = 1; i < nb_elems; i++) {
6453 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6454 vswap();
6455 vstore();
6457 vpop();
6458 } else if (!NODATA_WANTED) {
6459 c_end = c + nb_elems * elem_size;
6460 if (c_end > sec->data_allocated)
6461 section_realloc(sec, c_end);
6462 src = sec->data + c;
6463 dst = src;
6464 for(i = 1; i < nb_elems; i++) {
6465 dst += elem_size;
6466 memcpy(dst, src, elem_size);
6470 c += nb_elems * type_size(type, &align);
6471 if (c - corig > al)
6472 al = c - corig;
6473 return al;
6476 /* store a value or an expression directly in global data or in local array */
6477 static void init_putv(CType *type, Section *sec, unsigned long c)
6479 int bt;
6480 void *ptr;
6481 CType dtype;
6483 dtype = *type;
6484 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6486 if (sec) {
6487 int size, align;
6488 /* XXX: not portable */
6489 /* XXX: generate error if incorrect relocation */
6490 gen_assign_cast(&dtype);
6491 bt = type->t & VT_BTYPE;
6493 if ((vtop->r & VT_SYM)
6494 && bt != VT_PTR
6495 && bt != VT_FUNC
6496 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6497 || (type->t & VT_BITFIELD))
6498 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6500 tcc_error("initializer element is not computable at load time");
6502 if (NODATA_WANTED) {
6503 vtop--;
6504 return;
6507 size = type_size(type, &align);
6508 section_reserve(sec, c + size);
6509 ptr = sec->data + c;
6511 /* XXX: make code faster ? */
6512 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6513 vtop->sym->v >= SYM_FIRST_ANOM &&
6514 /* XXX This rejects compound literals like
6515 '(void *){ptr}'. The problem is that '&sym' is
6516 represented the same way, which would be ruled out
6517 by the SYM_FIRST_ANOM check above, but also '"string"'
6518 in 'char *p = "string"' is represented the same
6519 with the type being VT_PTR and the symbol being an
6520 anonymous one. That is, there's no difference in vtop
6521 between '(void *){x}' and '&(void *){x}'. Ignore
6522 pointer typed entities here. Hopefully no real code
6523 will every use compound literals with scalar type. */
6524 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6525 /* These come from compound literals, memcpy stuff over. */
6526 Section *ssec;
6527 ElfSym *esym;
6528 ElfW_Rel *rel;
6529 esym = elfsym(vtop->sym);
6530 ssec = tcc_state->sections[esym->st_shndx];
6531 memmove (ptr, ssec->data + esym->st_value, size);
6532 if (ssec->reloc) {
6533 /* We need to copy over all memory contents, and that
6534 includes relocations. Use the fact that relocs are
6535 created it order, so look from the end of relocs
6536 until we hit one before the copied region. */
6537 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6538 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6539 while (num_relocs--) {
6540 rel--;
6541 if (rel->r_offset >= esym->st_value + size)
6542 continue;
6543 if (rel->r_offset < esym->st_value)
6544 break;
6545 /* Note: if the same fields are initialized multiple
6546 times (possible with designators) then we possibly
6547 add multiple relocations for the same offset here.
6548 That would lead to wrong code, the last reloc needs
6549 to win. We clean this up later after the whole
6550 initializer is parsed. */
6551 put_elf_reloca(symtab_section, sec,
6552 c + rel->r_offset - esym->st_value,
6553 ELFW(R_TYPE)(rel->r_info),
6554 ELFW(R_SYM)(rel->r_info),
6555 #if PTR_SIZE == 8
6556 rel->r_addend
6557 #else
6559 #endif
6563 } else {
6564 if (type->t & VT_BITFIELD) {
6565 int bit_pos, bit_size, bits, n;
6566 unsigned char *p, v, m;
6567 bit_pos = BIT_POS(vtop->type.t);
6568 bit_size = BIT_SIZE(vtop->type.t);
6569 p = (unsigned char*)ptr + (bit_pos >> 3);
6570 bit_pos &= 7, bits = 0;
6571 while (bit_size) {
6572 n = 8 - bit_pos;
6573 if (n > bit_size)
6574 n = bit_size;
6575 v = vtop->c.i >> bits << bit_pos;
6576 m = ((1 << n) - 1) << bit_pos;
6577 *p = (*p & ~m) | (v & m);
6578 bits += n, bit_size -= n, bit_pos = 0, ++p;
6580 } else
6581 switch(bt) {
6582 /* XXX: when cross-compiling we assume that each type has the
6583 same representation on host and target, which is likely to
6584 be wrong in the case of long double */
6585 case VT_BOOL:
6586 vtop->c.i = vtop->c.i != 0;
6587 case VT_BYTE:
6588 *(char *)ptr |= vtop->c.i;
6589 break;
6590 case VT_SHORT:
6591 *(short *)ptr |= vtop->c.i;
6592 break;
6593 case VT_FLOAT:
6594 *(float*)ptr = vtop->c.f;
6595 break;
6596 case VT_DOUBLE:
6597 *(double *)ptr = vtop->c.d;
6598 break;
6599 case VT_LDOUBLE:
6600 #if defined TCC_IS_NATIVE_387
6601 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6602 memcpy(ptr, &vtop->c.ld, 10);
6603 #ifdef __TINYC__
6604 else if (sizeof (long double) == sizeof (double))
6605 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6606 #endif
6607 else if (vtop->c.ld == 0.0)
6609 else
6610 #endif
6611 if (sizeof(long double) == LDOUBLE_SIZE)
6612 *(long double*)ptr = vtop->c.ld;
6613 else if (sizeof(double) == LDOUBLE_SIZE)
6614 *(double *)ptr = (double)vtop->c.ld;
6615 else
6616 tcc_error("can't cross compile long double constants");
6617 break;
6618 #if PTR_SIZE != 8
6619 case VT_LLONG:
6620 *(long long *)ptr |= vtop->c.i;
6621 break;
6622 #else
6623 case VT_LLONG:
6624 #endif
6625 case VT_PTR:
6627 addr_t val = vtop->c.i;
6628 #if PTR_SIZE == 8
6629 if (vtop->r & VT_SYM)
6630 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6631 else
6632 *(addr_t *)ptr |= val;
6633 #else
6634 if (vtop->r & VT_SYM)
6635 greloc(sec, vtop->sym, c, R_DATA_PTR);
6636 *(addr_t *)ptr |= val;
6637 #endif
6638 break;
6640 default:
6642 int val = vtop->c.i;
6643 #if PTR_SIZE == 8
6644 if (vtop->r & VT_SYM)
6645 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6646 else
6647 *(int *)ptr |= val;
6648 #else
6649 if (vtop->r & VT_SYM)
6650 greloc(sec, vtop->sym, c, R_DATA_PTR);
6651 *(int *)ptr |= val;
6652 #endif
6653 break;
6657 vtop--;
6658 } else {
6659 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6660 vswap();
6661 vstore();
6662 vpop();
6666 /* 't' contains the type and storage info. 'c' is the offset of the
6667 object in section 'sec'. If 'sec' is NULL, it means stack based
6668 allocation. 'first' is true if array '{' must be read (multi
6669 dimension implicit array init handling). 'size_only' is true if
6670 size only evaluation is wanted (only for arrays). */
6671 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6672 int first, int size_only)
6674 int len, n, no_oblock, nb, i;
6675 int size1, align1;
6676 int have_elem;
6677 Sym *s, *f;
6678 Sym indexsym;
6679 CType *t1;
6681 /* If we currently are at an '}' or ',' we have read an initializer
6682 element in one of our callers, and not yet consumed it. */
6683 have_elem = tok == '}' || tok == ',';
6684 if (!have_elem && tok != '{' &&
6685 /* In case of strings we have special handling for arrays, so
6686 don't consume them as initializer value (which would commit them
6687 to some anonymous symbol). */
6688 tok != TOK_LSTR && tok != TOK_STR &&
6689 !size_only) {
6690 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6691 have_elem = 1;
6694 if (have_elem &&
6695 !(type->t & VT_ARRAY) &&
6696 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6697 The source type might have VT_CONSTANT set, which is
6698 of course assignable to non-const elements. */
6699 is_compatible_unqualified_types(type, &vtop->type)) {
6700 init_putv(type, sec, c);
6701 } else if (type->t & VT_ARRAY) {
6702 s = type->ref;
6703 n = s->c;
6704 t1 = pointed_type(type);
6705 size1 = type_size(t1, &align1);
6707 no_oblock = 1;
6708 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6709 tok == '{') {
6710 if (tok != '{')
6711 tcc_error("character array initializer must be a literal,"
6712 " optionally enclosed in braces");
6713 skip('{');
6714 no_oblock = 0;
6717 /* only parse strings here if correct type (otherwise: handle
6718 them as ((w)char *) expressions */
6719 if ((tok == TOK_LSTR &&
6720 #ifdef TCC_TARGET_PE
6721 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6722 #else
6723 (t1->t & VT_BTYPE) == VT_INT
6724 #endif
6725 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6726 len = 0;
6727 while (tok == TOK_STR || tok == TOK_LSTR) {
6728 int cstr_len, ch;
6730 /* compute maximum number of chars wanted */
6731 if (tok == TOK_STR)
6732 cstr_len = tokc.str.size;
6733 else
6734 cstr_len = tokc.str.size / sizeof(nwchar_t);
6735 cstr_len--;
6736 nb = cstr_len;
6737 if (n >= 0 && nb > (n - len))
6738 nb = n - len;
6739 if (!size_only) {
6740 if (cstr_len > nb)
6741 tcc_warning("initializer-string for array is too long");
6742 /* in order to go faster for common case (char
6743 string in global variable, we handle it
6744 specifically */
6745 if (sec && tok == TOK_STR && size1 == 1) {
6746 if (!NODATA_WANTED)
6747 memcpy(sec->data + c + len, tokc.str.data, nb);
6748 } else {
6749 for(i=0;i<nb;i++) {
6750 if (tok == TOK_STR)
6751 ch = ((unsigned char *)tokc.str.data)[i];
6752 else
6753 ch = ((nwchar_t *)tokc.str.data)[i];
6754 vpushi(ch);
6755 init_putv(t1, sec, c + (len + i) * size1);
6759 len += nb;
6760 next();
6762 /* only add trailing zero if enough storage (no
6763 warning in this case since it is standard) */
6764 if (n < 0 || len < n) {
6765 if (!size_only) {
6766 vpushi(0);
6767 init_putv(t1, sec, c + (len * size1));
6769 len++;
6771 len *= size1;
6772 } else {
6773 indexsym.c = 0;
6774 f = &indexsym;
6776 do_init_list:
6777 len = 0;
6778 while (tok != '}' || have_elem) {
6779 len = decl_designator(type, sec, c, &f, size_only, len);
6780 have_elem = 0;
6781 if (type->t & VT_ARRAY) {
6782 ++indexsym.c;
6783 /* special test for multi dimensional arrays (may not
6784 be strictly correct if designators are used at the
6785 same time) */
6786 if (no_oblock && len >= n*size1)
6787 break;
6788 } else {
6789 if (s->type.t == VT_UNION)
6790 f = NULL;
6791 else
6792 f = f->next;
6793 if (no_oblock && f == NULL)
6794 break;
6797 if (tok == '}')
6798 break;
6799 skip(',');
6802 /* put zeros at the end */
6803 if (!size_only && len < n*size1)
6804 init_putz(sec, c + len, n*size1 - len);
6805 if (!no_oblock)
6806 skip('}');
6807 /* patch type size if needed, which happens only for array types */
6808 if (n < 0)
6809 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6810 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6811 size1 = 1;
6812 no_oblock = 1;
6813 if (first || tok == '{') {
6814 skip('{');
6815 no_oblock = 0;
6817 s = type->ref;
6818 f = s->next;
6819 n = s->c;
6820 goto do_init_list;
6821 } else if (tok == '{') {
6822 next();
6823 decl_initializer(type, sec, c, first, size_only);
6824 skip('}');
6825 } else if (size_only) {
6826 /* If we supported only ISO C we wouldn't have to accept calling
6827 this on anything than an array size_only==1 (and even then
6828 only on the outermost level, so no recursion would be needed),
6829 because initializing a flex array member isn't supported.
6830 But GNU C supports it, so we need to recurse even into
6831 subfields of structs and arrays when size_only is set. */
6832 /* just skip expression */
6833 skip_or_save_block(NULL);
6834 } else {
6835 if (!have_elem) {
6836 /* This should happen only when we haven't parsed
6837 the init element above for fear of committing a
6838 string constant to memory too early. */
6839 if (tok != TOK_STR && tok != TOK_LSTR)
6840 expect("string constant");
6841 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6843 init_putv(type, sec, c);
6847 /* parse an initializer for type 't' if 'has_init' is non zero, and
6848 allocate space in local or global data space ('r' is either
6849 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6850 variable 'v' of scope 'scope' is declared before initializers
6851 are parsed. If 'v' is zero, then a reference to the new object
6852 is put in the value stack. If 'has_init' is 2, a special parsing
6853 is done to handle string constants. */
6854 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6855 int has_init, int v, int scope)
6857 int size, align, addr;
6858 TokenString *init_str = NULL;
6860 Section *sec;
6861 Sym *flexible_array;
6862 Sym *sym = NULL;
6863 int saved_nocode_wanted = nocode_wanted;
6864 #ifdef CONFIG_TCC_BCHECK
6865 int bcheck;
6866 #endif
6868 /* Always allocate static or global variables */
6869 if (v && (r & VT_VALMASK) == VT_CONST)
6870 nocode_wanted |= 0x80000000;
6872 #ifdef CONFIG_TCC_BCHECK
6873 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6874 #endif
6876 flexible_array = NULL;
6877 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6878 Sym *field = type->ref->next;
6879 if (field) {
6880 while (field->next)
6881 field = field->next;
6882 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6883 flexible_array = field;
6887 size = type_size(type, &align);
6888 /* If unknown size, we must evaluate it before
6889 evaluating initializers because
6890 initializers can generate global data too
6891 (e.g. string pointers or ISOC99 compound
6892 literals). It also simplifies local
6893 initializers handling */
6894 if (size < 0 || (flexible_array && has_init)) {
6895 if (!has_init)
6896 tcc_error("unknown type size");
6897 /* get all init string */
6898 if (has_init == 2) {
6899 init_str = tok_str_alloc();
6900 /* only get strings */
6901 while (tok == TOK_STR || tok == TOK_LSTR) {
6902 tok_str_add_tok(init_str);
6903 next();
6905 tok_str_add(init_str, -1);
6906 tok_str_add(init_str, 0);
6907 } else {
6908 skip_or_save_block(&init_str);
6910 unget_tok(0);
6912 /* compute size */
6913 begin_macro(init_str, 1);
6914 next();
6915 decl_initializer(type, NULL, 0, 1, 1);
6916 /* prepare second initializer parsing */
6917 macro_ptr = init_str->str;
6918 next();
6920 /* if still unknown size, error */
6921 size = type_size(type, &align);
6922 if (size < 0)
6923 tcc_error("unknown type size");
6925 /* If there's a flex member and it was used in the initializer
6926 adjust size. */
6927 if (flexible_array &&
6928 flexible_array->type.ref->c > 0)
6929 size += flexible_array->type.ref->c
6930 * pointed_size(&flexible_array->type);
6931 /* take into account specified alignment if bigger */
6932 if (ad->a.aligned) {
6933 int speca = 1 << (ad->a.aligned - 1);
6934 if (speca > align)
6935 align = speca;
6936 } else if (ad->a.packed) {
6937 align = 1;
6940 if (!v && NODATA_WANTED)
6941 size = 0, align = 1;
6943 if ((r & VT_VALMASK) == VT_LOCAL) {
6944 sec = NULL;
6945 #ifdef CONFIG_TCC_BCHECK
6946 if (bcheck && (type->t & VT_ARRAY)) {
6947 loc--;
6949 #endif
6950 loc = (loc - size) & -align;
6951 addr = loc;
6952 #ifdef CONFIG_TCC_BCHECK
6953 /* handles bounds */
6954 /* XXX: currently, since we do only one pass, we cannot track
6955 '&' operators, so we add only arrays */
6956 if (bcheck && (type->t & VT_ARRAY)) {
6957 addr_t *bounds_ptr;
6958 /* add padding between regions */
6959 loc--;
6960 /* then add local bound info */
6961 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6962 bounds_ptr[0] = addr;
6963 bounds_ptr[1] = size;
6965 #endif
6966 if (v) {
6967 /* local variable */
6968 #ifdef CONFIG_TCC_ASM
6969 if (ad->asm_label) {
6970 int reg = asm_parse_regvar(ad->asm_label);
6971 if (reg >= 0)
6972 r = (r & ~VT_VALMASK) | reg;
6974 #endif
6975 sym = sym_push(v, type, r, addr);
6976 sym->a = ad->a;
6977 } else {
6978 /* push local reference */
6979 vset(type, r, addr);
6981 } else {
6982 if (v && scope == VT_CONST) {
6983 /* see if the symbol was already defined */
6984 sym = sym_find(v);
6985 if (sym) {
6986 patch_storage(sym, ad, type);
6987 /* we accept several definitions of the same global variable. */
6988 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
6989 goto no_alloc;
6993 /* allocate symbol in corresponding section */
6994 sec = ad->section;
6995 if (!sec) {
6996 if (has_init)
6997 sec = data_section;
6998 else if (tcc_state->nocommon)
6999 sec = bss_section;
7002 if (sec) {
7003 addr = section_add(sec, size, align);
7004 #ifdef CONFIG_TCC_BCHECK
7005 /* add padding if bound check */
7006 if (bcheck)
7007 section_add(sec, 1, 1);
7008 #endif
7009 } else {
7010 addr = align; /* SHN_COMMON is special, symbol value is align */
7011 sec = common_section;
7014 if (v) {
7015 if (!sym) {
7016 sym = sym_push(v, type, r | VT_SYM, 0);
7017 patch_storage(sym, ad, NULL);
7019 /* Local statics have a scope until now (for
7020 warnings), remove it here. */
7021 sym->sym_scope = 0;
7022 /* update symbol definition */
7023 put_extern_sym(sym, sec, addr, size);
7024 } else {
7025 /* push global reference */
7026 sym = get_sym_ref(type, sec, addr, size);
7027 vpushsym(type, sym);
7028 vtop->r |= r;
7031 #ifdef CONFIG_TCC_BCHECK
7032 /* handles bounds now because the symbol must be defined
7033 before for the relocation */
7034 if (bcheck) {
7035 addr_t *bounds_ptr;
7037 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7038 /* then add global bound info */
7039 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7040 bounds_ptr[0] = 0; /* relocated */
7041 bounds_ptr[1] = size;
7043 #endif
7046 if (type->t & VT_VLA) {
7047 int a;
7049 if (NODATA_WANTED)
7050 goto no_alloc;
7052 /* save current stack pointer */
7053 if (vlas_in_scope == 0) {
7054 if (vla_sp_root_loc == -1)
7055 vla_sp_root_loc = (loc -= PTR_SIZE);
7056 gen_vla_sp_save(vla_sp_root_loc);
7059 vla_runtime_type_size(type, &a);
7060 gen_vla_alloc(type, a);
7061 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7062 /* on _WIN64, because of the function args scratch area, the
7063 result of alloca differs from RSP and is returned in RAX. */
7064 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7065 #endif
7066 gen_vla_sp_save(addr);
7067 vla_sp_loc = addr;
7068 vlas_in_scope++;
7070 } else if (has_init) {
7071 size_t oldreloc_offset = 0;
7072 if (sec && sec->reloc)
7073 oldreloc_offset = sec->reloc->data_offset;
7074 decl_initializer(type, sec, addr, 1, 0);
7075 if (sec && sec->reloc)
7076 squeeze_multi_relocs(sec, oldreloc_offset);
7077 /* patch flexible array member size back to -1, */
7078 /* for possible subsequent similar declarations */
7079 if (flexible_array)
7080 flexible_array->type.ref->c = -1;
7083 no_alloc:
7084 /* restore parse state if needed */
7085 if (init_str) {
7086 end_macro();
7087 next();
7090 nocode_wanted = saved_nocode_wanted;
7093 /* parse a function defined by symbol 'sym' and generate its code in
7094 'cur_text_section' */
7095 static void gen_function(Sym *sym)
7097 nocode_wanted = 0;
7098 ind = cur_text_section->data_offset;
7099 if (sym->a.aligned) {
7100 size_t newoff = section_add(cur_text_section, 0,
7101 1 << (sym->a.aligned - 1));
7102 gen_fill_nops(newoff - ind);
7104 /* NOTE: we patch the symbol size later */
7105 put_extern_sym(sym, cur_text_section, ind, 0);
7106 funcname = get_tok_str(sym->v, NULL);
7107 func_ind = ind;
7108 /* Initialize VLA state */
7109 vla_sp_loc = -1;
7110 vla_sp_root_loc = -1;
7111 /* put debug symbol */
7112 tcc_debug_funcstart(tcc_state, sym);
7113 /* push a dummy symbol to enable local sym storage */
7114 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7115 local_scope = 1; /* for function parameters */
7116 gfunc_prolog(&sym->type);
7117 local_scope = 0;
7118 rsym = 0;
7119 block(NULL, NULL, 0);
7120 nocode_wanted = 0;
7121 gsym(rsym);
7122 gfunc_epilog();
7123 cur_text_section->data_offset = ind;
7124 label_pop(&global_label_stack, NULL, 0);
7125 /* reset local stack */
7126 local_scope = 0;
7127 sym_pop(&local_stack, NULL, 0);
7128 /* end of function */
7129 /* patch symbol size */
7130 elfsym(sym)->st_size = ind - func_ind;
7131 tcc_debug_funcend(tcc_state, ind - func_ind);
7132 /* It's better to crash than to generate wrong code */
7133 cur_text_section = NULL;
7134 funcname = ""; /* for safety */
7135 func_vt.t = VT_VOID; /* for safety */
7136 func_var = 0; /* for safety */
7137 ind = 0; /* for safety */
7138 nocode_wanted = 0x80000000;
7139 check_vstack();
7142 static void gen_inline_functions(TCCState *s)
7144 Sym *sym;
7145 int inline_generated, i, ln;
7146 struct InlineFunc *fn;
7148 ln = file->line_num;
7149 /* iterate while inline function are referenced */
7150 do {
7151 inline_generated = 0;
7152 for (i = 0; i < s->nb_inline_fns; ++i) {
7153 fn = s->inline_fns[i];
7154 sym = fn->sym;
7155 if (sym && sym->c) {
7156 /* the function was used: generate its code and
7157 convert it to a normal function */
7158 fn->sym = NULL;
7159 if (file)
7160 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7161 sym->type.t &= ~VT_INLINE;
7163 begin_macro(fn->func_str, 1);
7164 next();
7165 cur_text_section = text_section;
7166 gen_function(sym);
7167 end_macro();
7169 inline_generated = 1;
7172 } while (inline_generated);
7173 file->line_num = ln;
7176 ST_FUNC void free_inline_functions(TCCState *s)
7178 int i;
7179 /* free tokens of unused inline functions */
7180 for (i = 0; i < s->nb_inline_fns; ++i) {
7181 struct InlineFunc *fn = s->inline_fns[i];
7182 if (fn->sym)
7183 tok_str_free(fn->func_str);
7185 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7188 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7189 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7190 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7192 int v, has_init, r;
7193 CType type, btype;
7194 Sym *sym;
7195 AttributeDef ad;
7197 while (1) {
7198 if (!parse_btype(&btype, &ad)) {
7199 if (is_for_loop_init)
7200 return 0;
7201 /* skip redundant ';' if not in old parameter decl scope */
7202 if (tok == ';' && l != VT_CMP) {
7203 next();
7204 continue;
7206 if (l != VT_CONST)
7207 break;
7208 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7209 /* global asm block */
7210 asm_global_instr();
7211 continue;
7213 if (tok >= TOK_UIDENT) {
7214 /* special test for old K&R protos without explicit int
7215 type. Only accepted when defining global data */
7216 btype.t = VT_INT;
7217 } else {
7218 if (tok != TOK_EOF)
7219 expect("declaration");
7220 break;
7223 if (tok == ';') {
7224 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7225 int v = btype.ref->v;
7226 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7227 tcc_warning("unnamed struct/union that defines no instances");
7228 next();
7229 continue;
7231 if (IS_ENUM(btype.t)) {
7232 next();
7233 continue;
7236 while (1) { /* iterate thru each declaration */
7237 type = btype;
7238 /* If the base type itself was an array type of unspecified
7239 size (like in 'typedef int arr[]; arr x = {1};') then
7240 we will overwrite the unknown size by the real one for
7241 this decl. We need to unshare the ref symbol holding
7242 that size. */
7243 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7244 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7246 type_decl(&type, &ad, &v, TYPE_DIRECT);
7247 #if 0
7249 char buf[500];
7250 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7251 printf("type = '%s'\n", buf);
7253 #endif
7254 if ((type.t & VT_BTYPE) == VT_FUNC) {
7255 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7256 tcc_error("function without file scope cannot be static");
7258 /* if old style function prototype, we accept a
7259 declaration list */
7260 sym = type.ref;
7261 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7262 decl0(VT_CMP, 0, sym);
7265 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7266 ad.asm_label = asm_label_instr();
7267 /* parse one last attribute list, after asm label */
7268 parse_attribute(&ad);
7269 if (tok == '{')
7270 expect(";");
7273 #ifdef TCC_TARGET_PE
7274 if (ad.a.dllimport || ad.a.dllexport) {
7275 if (type.t & (VT_STATIC|VT_TYPEDEF))
7276 tcc_error("cannot have dll linkage with static or typedef");
7277 if (ad.a.dllimport) {
7278 if ((type.t & VT_BTYPE) == VT_FUNC)
7279 ad.a.dllimport = 0;
7280 else
7281 type.t |= VT_EXTERN;
7284 #endif
7285 if (tok == '{') {
7286 if (l != VT_CONST)
7287 tcc_error("cannot use local functions");
7288 if ((type.t & VT_BTYPE) != VT_FUNC)
7289 expect("function definition");
7291 /* reject abstract declarators in function definition
7292 make old style params without decl have int type */
7293 sym = type.ref;
7294 while ((sym = sym->next) != NULL) {
7295 if (!(sym->v & ~SYM_FIELD))
7296 expect("identifier");
7297 if (sym->type.t == VT_VOID)
7298 sym->type = int_type;
7301 /* XXX: cannot do better now: convert extern line to static inline */
7302 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7303 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7305 /* put function symbol */
7306 sym = external_global_sym(v, &type, 0);
7307 type.t &= ~VT_EXTERN;
7308 patch_storage(sym, &ad, &type);
7310 /* static inline functions are just recorded as a kind
7311 of macro. Their code will be emitted at the end of
7312 the compilation unit only if they are used */
7313 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7314 (VT_INLINE | VT_STATIC)) {
7315 struct InlineFunc *fn;
7316 const char *filename;
7318 filename = file ? file->filename : "";
7319 fn = tcc_malloc(sizeof *fn + strlen(filename));
7320 strcpy(fn->filename, filename);
7321 fn->sym = sym;
7322 skip_or_save_block(&fn->func_str);
7323 dynarray_add(&tcc_state->inline_fns,
7324 &tcc_state->nb_inline_fns, fn);
7325 } else {
7326 /* compute text section */
7327 cur_text_section = ad.section;
7328 if (!cur_text_section)
7329 cur_text_section = text_section;
7330 gen_function(sym);
7332 break;
7333 } else {
7334 if (l == VT_CMP) {
7335 /* find parameter in function parameter list */
7336 for (sym = func_sym->next; sym; sym = sym->next)
7337 if ((sym->v & ~SYM_FIELD) == v)
7338 goto found;
7339 tcc_error("declaration for parameter '%s' but no such parameter",
7340 get_tok_str(v, NULL));
7341 found:
7342 if (type.t & VT_STORAGE) /* 'register' is okay */
7343 tcc_error("storage class specified for '%s'",
7344 get_tok_str(v, NULL));
7345 if (sym->type.t != VT_VOID)
7346 tcc_error("redefinition of parameter '%s'",
7347 get_tok_str(v, NULL));
7348 convert_parameter_type(&type);
7349 sym->type = type;
7350 } else if (type.t & VT_TYPEDEF) {
7351 /* save typedefed type */
7352 /* XXX: test storage specifiers ? */
7353 sym = sym_find(v);
7354 if (sym && sym->sym_scope == local_scope) {
7355 if (!is_compatible_types(&sym->type, &type)
7356 || !(sym->type.t & VT_TYPEDEF))
7357 tcc_error("incompatible redefinition of '%s'",
7358 get_tok_str(v, NULL));
7359 sym->type = type;
7360 } else {
7361 sym = sym_push(v, &type, 0, 0);
7363 sym->a = ad.a;
7364 sym->f = ad.f;
7365 } else {
7366 r = 0;
7367 if ((type.t & VT_BTYPE) == VT_FUNC) {
7368 /* external function definition */
7369 /* specific case for func_call attribute */
7370 type.ref->f = ad.f;
7371 } else if (!(type.t & VT_ARRAY)) {
7372 /* not lvalue if array */
7373 r |= lvalue_type(type.t);
7375 has_init = (tok == '=');
7376 if (has_init && (type.t & VT_VLA))
7377 tcc_error("variable length array cannot be initialized");
7378 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7379 ((type.t & VT_BTYPE) == VT_FUNC) ||
7380 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7381 !has_init && l == VT_CONST && type.ref->c < 0)) {
7382 /* external variable or function */
7383 /* NOTE: as GCC, uninitialized global static
7384 arrays of null size are considered as
7385 extern */
7386 type.t |= VT_EXTERN;
7387 sym = external_sym(v, &type, r, &ad);
7388 if (ad.alias_target) {
7389 ElfSym *esym;
7390 Sym *alias_target;
7391 alias_target = sym_find(ad.alias_target);
7392 esym = elfsym(alias_target);
7393 if (!esym)
7394 tcc_error("unsupported forward __alias__ attribute");
7395 /* Local statics have a scope until now (for
7396 warnings), remove it here. */
7397 sym->sym_scope = 0;
7398 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7400 } else {
7401 if (type.t & VT_STATIC)
7402 r |= VT_CONST;
7403 else
7404 r |= l;
7405 if (has_init)
7406 next();
7407 else if (l == VT_CONST)
7408 /* uninitialized global variables may be overridden */
7409 type.t |= VT_EXTERN;
7410 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7413 if (tok != ',') {
7414 if (is_for_loop_init)
7415 return 1;
7416 skip(';');
7417 break;
7419 next();
7421 ad.a.aligned = 0;
7424 return 0;
7427 static void decl(int l)
7429 decl0(l, 0, NULL);
7432 /* ------------------------------------------------------------------------- */