tccgen.c: fix warning for incompatible struct- and function pointers
[tinycc.git] / tccgen.c
blobba79f1f43a57d3dfb56d72ac9463c56e02bacf2e
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
395 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
396 name = buf1;
397 other |= ST_PE_STDCALL;
398 can_add_underscore = 0;
401 #endif
402 if (tcc_state->leading_underscore && can_add_underscore) {
403 buf1[0] = '_';
404 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
405 name = buf1;
407 if (sym->asm_label)
408 name = get_tok_str(sym->asm_label, NULL);
409 info = ELFW(ST_INFO)(sym_bind, sym_type);
410 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
411 } else {
412 esym = elfsym(sym);
413 esym->st_value = value;
414 esym->st_size = size;
415 esym->st_shndx = sh_num;
417 update_storage(sym);
420 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
421 addr_t value, unsigned long size)
423 int sh_num = section ? section->sh_num : SHN_UNDEF;
424 put_extern_sym2(sym, sh_num, value, size, 1);
427 /* add a new relocation entry to symbol 'sym' in section 's' */
428 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
429 addr_t addend)
431 int c = 0;
433 if (nocode_wanted && s == cur_text_section)
434 return;
436 if (sym) {
437 if (0 == sym->c)
438 put_extern_sym(sym, NULL, 0, 0);
439 c = sym->c;
442 /* now we can add ELF relocation info */
443 put_elf_reloca(symtab_section, s, offset, type, c, addend);
446 #if PTR_SIZE == 4
447 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
449 greloca(s, sym, offset, type, 0);
451 #endif
453 /* ------------------------------------------------------------------------- */
454 /* symbol allocator */
455 static Sym *__sym_malloc(void)
457 Sym *sym_pool, *sym, *last_sym;
458 int i;
460 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
461 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
463 last_sym = sym_free_first;
464 sym = sym_pool;
465 for(i = 0; i < SYM_POOL_NB; i++) {
466 sym->next = last_sym;
467 last_sym = sym;
468 sym++;
470 sym_free_first = last_sym;
471 return last_sym;
474 static inline Sym *sym_malloc(void)
476 Sym *sym;
477 #ifndef SYM_DEBUG
478 sym = sym_free_first;
479 if (!sym)
480 sym = __sym_malloc();
481 sym_free_first = sym->next;
482 return sym;
483 #else
484 sym = tcc_malloc(sizeof(Sym));
485 return sym;
486 #endif
489 ST_INLN void sym_free(Sym *sym)
491 #ifndef SYM_DEBUG
492 sym->next = sym_free_first;
493 sym_free_first = sym;
494 #else
495 tcc_free(sym);
496 #endif
499 /* push, without hashing */
500 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
502 Sym *s;
504 s = sym_malloc();
505 memset(s, 0, sizeof *s);
506 s->v = v;
507 s->type.t = t;
508 s->c = c;
509 /* add in stack */
510 s->prev = *ps;
511 *ps = s;
512 return s;
515 /* find a symbol and return its associated structure. 's' is the top
516 of the symbol stack */
517 ST_FUNC Sym *sym_find2(Sym *s, int v)
519 while (s) {
520 if (s->v == v)
521 return s;
522 else if (s->v == -1)
523 return NULL;
524 s = s->prev;
526 return NULL;
529 /* structure lookup */
530 ST_INLN Sym *struct_find(int v)
532 v -= TOK_IDENT;
533 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
534 return NULL;
535 return table_ident[v]->sym_struct;
538 /* find an identifier */
539 ST_INLN Sym *sym_find(int v)
541 v -= TOK_IDENT;
542 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
543 return NULL;
544 return table_ident[v]->sym_identifier;
547 /* push a given symbol on the symbol stack */
548 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
550 Sym *s, **ps;
551 TokenSym *ts;
553 if (local_stack)
554 ps = &local_stack;
555 else
556 ps = &global_stack;
557 s = sym_push2(ps, v, type->t, c);
558 s->type.ref = type->ref;
559 s->r = r;
560 /* don't record fields or anonymous symbols */
561 /* XXX: simplify */
562 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
563 /* record symbol in token array */
564 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
565 if (v & SYM_STRUCT)
566 ps = &ts->sym_struct;
567 else
568 ps = &ts->sym_identifier;
569 s->prev_tok = *ps;
570 *ps = s;
571 s->sym_scope = local_scope;
572 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
573 tcc_error("redeclaration of '%s'",
574 get_tok_str(v & ~SYM_STRUCT, NULL));
576 return s;
579 /* push a global identifier */
580 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
582 Sym *s, **ps;
583 s = sym_push2(&global_stack, v, t, c);
584 /* don't record anonymous symbol */
585 if (v < SYM_FIRST_ANOM) {
586 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
587 /* modify the top most local identifier, so that
588 sym_identifier will point to 's' when popped */
589 while (*ps != NULL && (*ps)->sym_scope)
590 ps = &(*ps)->prev_tok;
591 s->prev_tok = *ps;
592 *ps = s;
594 return s;
597 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
598 pop them yet from the list, but do remove them from the token array. */
599 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
601 Sym *s, *ss, **ps;
602 TokenSym *ts;
603 int v;
605 s = *ptop;
606 while(s != b) {
607 ss = s->prev;
608 v = s->v;
609 /* remove symbol in token array */
610 /* XXX: simplify */
611 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 *ps = s->prev_tok;
619 if (!keep)
620 sym_free(s);
621 s = ss;
623 if (!keep)
624 *ptop = b;
627 /* ------------------------------------------------------------------------- */
629 static void vsetc(CType *type, int r, CValue *vc)
631 int v;
633 if (vtop >= vstack + (VSTACK_SIZE - 1))
634 tcc_error("memory full (vstack)");
635 /* cannot let cpu flags if other instruction are generated. Also
636 avoid leaving VT_JMP anywhere except on the top of the stack
637 because it would complicate the code generator.
639 Don't do this when nocode_wanted. vtop might come from
640 !nocode_wanted regions (see 88_codeopt.c) and transforming
641 it to a register without actually generating code is wrong
642 as their value might still be used for real. All values
643 we push under nocode_wanted will eventually be popped
644 again, so that the VT_CMP/VT_JMP value will be in vtop
645 when code is unsuppressed again.
647 Same logic below in vswap(); */
648 if (vtop >= vstack && !nocode_wanted) {
649 v = vtop->r & VT_VALMASK;
650 if (v == VT_CMP || (v & ~1) == VT_JMP)
651 gv(RC_INT);
654 vtop++;
655 vtop->type = *type;
656 vtop->r = r;
657 vtop->r2 = VT_CONST;
658 vtop->c = *vc;
659 vtop->sym = NULL;
662 ST_FUNC void vswap(void)
664 SValue tmp;
665 /* cannot vswap cpu flags. See comment at vsetc() above */
666 if (vtop >= vstack && !nocode_wanted) {
667 int v = vtop->r & VT_VALMASK;
668 if (v == VT_CMP || (v & ~1) == VT_JMP)
669 gv(RC_INT);
671 tmp = vtop[0];
672 vtop[0] = vtop[-1];
673 vtop[-1] = tmp;
676 /* pop stack value */
677 ST_FUNC void vpop(void)
679 int v;
680 v = vtop->r & VT_VALMASK;
681 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
682 /* for x86, we need to pop the FP stack */
683 if (v == TREG_ST0) {
684 o(0xd8dd); /* fstp %st(0) */
685 } else
686 #endif
687 if (v == VT_JMP || v == VT_JMPI) {
688 /* need to put correct jump if && or || without test */
689 gsym(vtop->c.i);
691 vtop--;
694 /* push constant of type "type" with useless value */
695 ST_FUNC void vpush(CType *type)
697 vset(type, VT_CONST, 0);
700 /* push integer constant */
701 ST_FUNC void vpushi(int v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&int_type, VT_CONST, &cval);
708 /* push a pointer sized constant */
709 static void vpushs(addr_t v)
711 CValue cval;
712 cval.i = v;
713 vsetc(&size_type, VT_CONST, &cval);
716 /* push arbitrary 64bit constant */
717 ST_FUNC void vpush64(int ty, unsigned long long v)
719 CValue cval;
720 CType ctype;
721 ctype.t = ty;
722 ctype.ref = NULL;
723 cval.i = v;
724 vsetc(&ctype, VT_CONST, &cval);
727 /* push long long constant */
728 static inline void vpushll(long long v)
730 vpush64(VT_LLONG, v);
733 ST_FUNC void vset(CType *type, int r, int v)
735 CValue cval;
737 cval.i = v;
738 vsetc(type, r, &cval);
741 static void vseti(int r, int v)
743 CType type;
744 type.t = VT_INT;
745 type.ref = NULL;
746 vset(&type, r, v);
749 ST_FUNC void vpushv(SValue *v)
751 if (vtop >= vstack + (VSTACK_SIZE - 1))
752 tcc_error("memory full (vstack)");
753 vtop++;
754 *vtop = *v;
757 static void vdup(void)
759 vpushv(vtop);
762 /* rotate n first stack elements to the bottom
763 I1 ... In -> I2 ... In I1 [top is right]
765 ST_FUNC void vrotb(int n)
767 int i;
768 SValue tmp;
770 tmp = vtop[-n + 1];
771 for(i=-n+1;i!=0;i++)
772 vtop[i] = vtop[i+1];
773 vtop[0] = tmp;
776 /* rotate the n elements before entry e towards the top
777 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
779 ST_FUNC void vrote(SValue *e, int n)
781 int i;
782 SValue tmp;
784 tmp = *e;
785 for(i = 0;i < n - 1; i++)
786 e[-i] = e[-i - 1];
787 e[-n + 1] = tmp;
790 /* rotate n first stack elements to the top
791 I1 ... In -> In I1 ... I(n-1) [top is right]
793 ST_FUNC void vrott(int n)
795 vrote(vtop, n);
798 /* push a symbol value of TYPE */
799 static inline void vpushsym(CType *type, Sym *sym)
801 CValue cval;
802 cval.i = 0;
803 vsetc(type, VT_CONST | VT_SYM, &cval);
804 vtop->sym = sym;
807 /* Return a static symbol pointing to a section */
808 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
810 int v;
811 Sym *sym;
813 v = anon_sym++;
814 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
815 sym->type.ref = type->ref;
816 sym->r = VT_CONST | VT_SYM;
817 put_extern_sym(sym, sec, offset, size);
818 return sym;
821 /* push a reference to a section offset by adding a dummy symbol */
822 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
824 vpushsym(type, get_sym_ref(type, sec, offset, size));
827 /* define a new external reference to a symbol 'v' of type 'u' */
828 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
830 Sym *s;
832 s = sym_find(v);
833 if (!s) {
834 /* push forward reference */
835 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
836 s->type.ref = type->ref;
837 s->r = r | VT_CONST | VT_SYM;
838 } else if (IS_ASM_SYM(s)) {
839 s->type.t = type->t | (s->type.t & VT_EXTERN);
840 s->type.ref = type->ref;
841 update_storage(s);
843 return s;
846 /* Merge some type attributes. */
847 static void patch_type(Sym *sym, CType *type)
849 if (!(type->t & VT_EXTERN)) {
850 if (!(sym->type.t & VT_EXTERN))
851 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
852 sym->type.t &= ~VT_EXTERN;
855 if (IS_ASM_SYM(sym)) {
856 /* stay static if both are static */
857 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
858 sym->type.ref = type->ref;
861 if (!is_compatible_types(&sym->type, type)) {
862 tcc_error("incompatible types for redefinition of '%s'",
863 get_tok_str(sym->v, NULL));
865 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
866 int static_proto = sym->type.t & VT_STATIC;
867 /* warn if static follows non-static function declaration */
868 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
869 tcc_warning("static storage ignored for redefinition of '%s'",
870 get_tok_str(sym->v, NULL));
872 if (0 == (type->t & VT_EXTERN)) {
873 /* put complete type, use static from prototype */
874 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
875 if (type->t & VT_INLINE)
876 sym->type.t = type->t;
877 sym->type.ref = type->ref;
880 } else {
881 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
882 /* set array size if it was omitted in extern declaration */
883 if (sym->type.ref->c < 0)
884 sym->type.ref->c = type->ref->c;
885 else if (sym->type.ref->c != type->ref->c)
886 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
888 if ((type->t ^ sym->type.t) & VT_STATIC)
889 tcc_warning("storage mismatch for redefinition of '%s'",
890 get_tok_str(sym->v, NULL));
895 /* Merge some storage attributes. */
896 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
898 if (type)
899 patch_type(sym, type);
901 #ifdef TCC_TARGET_PE
902 if (sym->a.dllimport != ad->a.dllimport)
903 tcc_error("incompatible dll linkage for redefinition of '%s'",
904 get_tok_str(sym->v, NULL));
905 sym->a.dllexport |= ad->a.dllexport;
906 #endif
907 sym->a.weak |= ad->a.weak;
908 if (ad->a.visibility) {
909 int vis = sym->a.visibility;
910 int vis2 = ad->a.visibility;
911 if (vis == STV_DEFAULT)
912 vis = vis2;
913 else if (vis2 != STV_DEFAULT)
914 vis = (vis < vis2) ? vis : vis2;
915 sym->a.visibility = vis;
917 if (ad->a.aligned)
918 sym->a.aligned = ad->a.aligned;
919 if (ad->asm_label)
920 sym->asm_label = ad->asm_label;
921 update_storage(sym);
924 /* define a new external reference to a symbol 'v' */
925 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
927 Sym *s;
928 s = sym_find(v);
929 if (!s) {
930 /* push forward reference */
931 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
932 s->type.t |= VT_EXTERN;
933 s->a = ad->a;
934 s->sym_scope = 0;
935 } else {
936 if (s->type.ref == func_old_type.ref) {
937 s->type.ref = type->ref;
938 s->r = r | VT_CONST | VT_SYM;
939 s->type.t |= VT_EXTERN;
941 patch_storage(s, ad, type);
943 return s;
946 /* push a reference to global symbol v */
947 ST_FUNC void vpush_global_sym(CType *type, int v)
949 vpushsym(type, external_global_sym(v, type, 0));
952 /* save registers up to (vtop - n) stack entry */
953 ST_FUNC void save_regs(int n)
955 SValue *p, *p1;
956 for(p = vstack, p1 = vtop - n; p <= p1; p++)
957 save_reg(p->r);
960 /* save r to the memory stack, and mark it as being free */
961 ST_FUNC void save_reg(int r)
963 save_reg_upstack(r, 0);
966 /* save r to the memory stack, and mark it as being free,
967 if seen up to (vtop - n) stack entry */
968 ST_FUNC void save_reg_upstack(int r, int n)
970 int l, saved, size, align;
971 SValue *p, *p1, sv;
972 CType *type;
974 if ((r &= VT_VALMASK) >= VT_CONST)
975 return;
976 if (nocode_wanted)
977 return;
979 /* modify all stack values */
980 saved = 0;
981 l = 0;
982 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
983 if ((p->r & VT_VALMASK) == r ||
984 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
985 /* must save value on stack if not already done */
986 if (!saved) {
987 /* NOTE: must reload 'r' because r might be equal to r2 */
988 r = p->r & VT_VALMASK;
989 /* store register in the stack */
990 type = &p->type;
991 if ((p->r & VT_LVAL) ||
992 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
993 #if PTR_SIZE == 8
994 type = &char_pointer_type;
995 #else
996 type = &int_type;
997 #endif
998 size = type_size(type, &align);
999 loc = (loc - size) & -align;
1000 sv.type.t = type->t;
1001 sv.r = VT_LOCAL | VT_LVAL;
1002 sv.c.i = loc;
1003 store(r, &sv);
1004 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1005 /* x86 specific: need to pop fp register ST0 if saved */
1006 if (r == TREG_ST0) {
1007 o(0xd8dd); /* fstp %st(0) */
1009 #endif
1010 #if PTR_SIZE == 4
1011 /* special long long case */
1012 if ((type->t & VT_BTYPE) == VT_LLONG) {
1013 sv.c.i += 4;
1014 store(p->r2, &sv);
1016 #endif
1017 l = loc;
1018 saved = 1;
1020 /* mark that stack entry as being saved on the stack */
1021 if (p->r & VT_LVAL) {
1022 /* also clear the bounded flag because the
1023 relocation address of the function was stored in
1024 p->c.i */
1025 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1026 } else {
1027 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1029 p->r2 = VT_CONST;
1030 p->c.i = l;
1035 #ifdef TCC_TARGET_ARM
1036 /* find a register of class 'rc2' with at most one reference on stack.
1037 * If none, call get_reg(rc) */
1038 ST_FUNC int get_reg_ex(int rc, int rc2)
1040 int r;
1041 SValue *p;
1043 for(r=0;r<NB_REGS;r++) {
1044 if (reg_classes[r] & rc2) {
1045 int n;
1046 n=0;
1047 for(p = vstack; p <= vtop; p++) {
1048 if ((p->r & VT_VALMASK) == r ||
1049 (p->r2 & VT_VALMASK) == r)
1050 n++;
1052 if (n <= 1)
1053 return r;
1056 return get_reg(rc);
1058 #endif
1060 /* find a free register of class 'rc'. If none, save one register */
1061 ST_FUNC int get_reg(int rc)
1063 int r;
1064 SValue *p;
1066 /* find a free register */
1067 for(r=0;r<NB_REGS;r++) {
1068 if (reg_classes[r] & rc) {
1069 if (nocode_wanted)
1070 return r;
1071 for(p=vstack;p<=vtop;p++) {
1072 if ((p->r & VT_VALMASK) == r ||
1073 (p->r2 & VT_VALMASK) == r)
1074 goto notfound;
1076 return r;
1078 notfound: ;
1081 /* no register left : free the first one on the stack (VERY
1082 IMPORTANT to start from the bottom to ensure that we don't
1083 spill registers used in gen_opi()) */
1084 for(p=vstack;p<=vtop;p++) {
1085 /* look at second register (if long long) */
1086 r = p->r2 & VT_VALMASK;
1087 if (r < VT_CONST && (reg_classes[r] & rc))
1088 goto save_found;
1089 r = p->r & VT_VALMASK;
1090 if (r < VT_CONST && (reg_classes[r] & rc)) {
1091 save_found:
1092 save_reg(r);
1093 return r;
1096 /* Should never comes here */
1097 return -1;
1100 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1101 if needed */
1102 static void move_reg(int r, int s, int t)
1104 SValue sv;
1106 if (r != s) {
1107 save_reg(r);
1108 sv.type.t = t;
1109 sv.type.ref = NULL;
1110 sv.r = s;
1111 sv.c.i = 0;
1112 load(r, &sv);
1116 /* get address of vtop (vtop MUST BE an lvalue) */
1117 ST_FUNC void gaddrof(void)
1119 vtop->r &= ~VT_LVAL;
1120 /* tricky: if saved lvalue, then we can go back to lvalue */
1121 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1122 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1127 #ifdef CONFIG_TCC_BCHECK
1128 /* generate lvalue bound code */
1129 static void gbound(void)
1131 int lval_type;
1132 CType type1;
1134 vtop->r &= ~VT_MUSTBOUND;
1135 /* if lvalue, then use checking code before dereferencing */
1136 if (vtop->r & VT_LVAL) {
1137 /* if not VT_BOUNDED value, then make one */
1138 if (!(vtop->r & VT_BOUNDED)) {
1139 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1140 /* must save type because we must set it to int to get pointer */
1141 type1 = vtop->type;
1142 vtop->type.t = VT_PTR;
1143 gaddrof();
1144 vpushi(0);
1145 gen_bounded_ptr_add();
1146 vtop->r |= lval_type;
1147 vtop->type = type1;
1149 /* then check for dereferencing */
1150 gen_bounded_ptr_deref();
1153 #endif
1155 static void incr_bf_adr(int o)
1157 vtop->type = char_pointer_type;
1158 gaddrof();
1159 vpushi(o);
1160 gen_op('+');
1161 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1162 | (VT_BYTE|VT_UNSIGNED);
1163 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1164 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1167 /* single-byte load mode for packed or otherwise unaligned bitfields */
1168 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1170 int n, o, bits;
1171 save_reg_upstack(vtop->r, 1);
1172 vpush64(type->t & VT_BTYPE, 0); // B X
1173 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1174 do {
1175 vswap(); // X B
1176 incr_bf_adr(o);
1177 vdup(); // X B B
1178 n = 8 - bit_pos;
1179 if (n > bit_size)
1180 n = bit_size;
1181 if (bit_pos)
1182 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1183 if (n < 8)
1184 vpushi((1 << n) - 1), gen_op('&');
1185 gen_cast(type);
1186 if (bits)
1187 vpushi(bits), gen_op(TOK_SHL);
1188 vrotb(3); // B Y X
1189 gen_op('|'); // B X
1190 bits += n, bit_size -= n, o = 1;
1191 } while (bit_size);
1192 vswap(), vpop();
1193 if (!(type->t & VT_UNSIGNED)) {
1194 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1195 vpushi(n), gen_op(TOK_SHL);
1196 vpushi(n), gen_op(TOK_SAR);
1200 /* single-byte store mode for packed or otherwise unaligned bitfields */
1201 static void store_packed_bf(int bit_pos, int bit_size)
1203 int bits, n, o, m, c;
1205 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1206 vswap(); // X B
1207 save_reg_upstack(vtop->r, 1);
1208 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1209 do {
1210 incr_bf_adr(o); // X B
1211 vswap(); //B X
1212 c ? vdup() : gv_dup(); // B V X
1213 vrott(3); // X B V
1214 if (bits)
1215 vpushi(bits), gen_op(TOK_SHR);
1216 if (bit_pos)
1217 vpushi(bit_pos), gen_op(TOK_SHL);
1218 n = 8 - bit_pos;
1219 if (n > bit_size)
1220 n = bit_size;
1221 if (n < 8) {
1222 m = ((1 << n) - 1) << bit_pos;
1223 vpushi(m), gen_op('&'); // X B V1
1224 vpushv(vtop-1); // X B V1 B
1225 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1226 gen_op('&'); // X B V1 B1
1227 gen_op('|'); // X B V2
1229 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1230 vstore(), vpop(); // X B
1231 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1232 } while (bit_size);
1233 vpop(), vpop();
1236 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1238 int t;
1239 if (0 == sv->type.ref)
1240 return 0;
1241 t = sv->type.ref->auxtype;
1242 if (t != -1 && t != VT_STRUCT) {
1243 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1244 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1246 return t;
1249 /* store vtop a register belonging to class 'rc'. lvalues are
1250 converted to values. Cannot be used if cannot be converted to
1251 register value (such as structures). */
1252 ST_FUNC int gv(int rc)
1254 int r, bit_pos, bit_size, size, align, rc2;
1256 /* NOTE: get_reg can modify vstack[] */
1257 if (vtop->type.t & VT_BITFIELD) {
1258 CType type;
1260 bit_pos = BIT_POS(vtop->type.t);
1261 bit_size = BIT_SIZE(vtop->type.t);
1262 /* remove bit field info to avoid loops */
1263 vtop->type.t &= ~VT_STRUCT_MASK;
1265 type.ref = NULL;
1266 type.t = vtop->type.t & VT_UNSIGNED;
1267 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1268 type.t |= VT_UNSIGNED;
1270 r = adjust_bf(vtop, bit_pos, bit_size);
1272 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1273 type.t |= VT_LLONG;
1274 else
1275 type.t |= VT_INT;
1277 if (r == VT_STRUCT) {
1278 load_packed_bf(&type, bit_pos, bit_size);
1279 } else {
1280 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1281 /* cast to int to propagate signedness in following ops */
1282 gen_cast(&type);
1283 /* generate shifts */
1284 vpushi(bits - (bit_pos + bit_size));
1285 gen_op(TOK_SHL);
1286 vpushi(bits - bit_size);
1287 /* NOTE: transformed to SHR if unsigned */
1288 gen_op(TOK_SAR);
1290 r = gv(rc);
1291 } else {
1292 if (is_float(vtop->type.t) &&
1293 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1294 unsigned long offset;
1295 /* CPUs usually cannot use float constants, so we store them
1296 generically in data segment */
1297 size = type_size(&vtop->type, &align);
1298 if (NODATA_WANTED)
1299 size = 0, align = 1;
1300 offset = section_add(data_section, size, align);
1301 vpush_ref(&vtop->type, data_section, offset, size);
1302 vswap();
1303 init_putv(&vtop->type, data_section, offset);
1304 vtop->r |= VT_LVAL;
1306 #ifdef CONFIG_TCC_BCHECK
1307 if (vtop->r & VT_MUSTBOUND)
1308 gbound();
1309 #endif
1311 r = vtop->r & VT_VALMASK;
1312 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1313 #ifndef TCC_TARGET_ARM64
1314 if (rc == RC_IRET)
1315 rc2 = RC_LRET;
1316 #ifdef TCC_TARGET_X86_64
1317 else if (rc == RC_FRET)
1318 rc2 = RC_QRET;
1319 #endif
1320 #endif
1321 /* need to reload if:
1322 - constant
1323 - lvalue (need to dereference pointer)
1324 - already a register, but not in the right class */
1325 if (r >= VT_CONST
1326 || (vtop->r & VT_LVAL)
1327 || !(reg_classes[r] & rc)
1328 #if PTR_SIZE == 8
1329 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1330 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1331 #else
1332 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1333 #endif
1336 r = get_reg(rc);
1337 #if PTR_SIZE == 8
1338 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1339 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1340 #else
1341 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1342 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1343 unsigned long long ll;
1344 #endif
1345 int r2, original_type;
1346 original_type = vtop->type.t;
1347 /* two register type load : expand to two words
1348 temporarily */
1349 #if PTR_SIZE == 4
1350 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1351 /* load constant */
1352 ll = vtop->c.i;
1353 vtop->c.i = ll; /* first word */
1354 load(r, vtop);
1355 vtop->r = r; /* save register value */
1356 vpushi(ll >> 32); /* second word */
1357 } else
1358 #endif
1359 if (vtop->r & VT_LVAL) {
1360 /* We do not want to modifier the long long
1361 pointer here, so the safest (and less
1362 efficient) is to save all the other registers
1363 in the stack. XXX: totally inefficient. */
1364 #if 0
1365 save_regs(1);
1366 #else
1367 /* lvalue_save: save only if used further down the stack */
1368 save_reg_upstack(vtop->r, 1);
1369 #endif
1370 /* load from memory */
1371 vtop->type.t = load_type;
1372 load(r, vtop);
1373 vdup();
1374 vtop[-1].r = r; /* save register value */
1375 /* increment pointer to get second word */
1376 vtop->type.t = addr_type;
1377 gaddrof();
1378 vpushi(load_size);
1379 gen_op('+');
1380 vtop->r |= VT_LVAL;
1381 vtop->type.t = load_type;
1382 } else {
1383 /* move registers */
1384 load(r, vtop);
1385 vdup();
1386 vtop[-1].r = r; /* save register value */
1387 vtop->r = vtop[-1].r2;
1389 /* Allocate second register. Here we rely on the fact that
1390 get_reg() tries first to free r2 of an SValue. */
1391 r2 = get_reg(rc2);
1392 load(r2, vtop);
1393 vpop();
1394 /* write second register */
1395 vtop->r2 = r2;
1396 vtop->type.t = original_type;
1397 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1398 int t1, t;
1399 /* lvalue of scalar type : need to use lvalue type
1400 because of possible cast */
1401 t = vtop->type.t;
1402 t1 = t;
1403 /* compute memory access type */
1404 if (vtop->r & VT_LVAL_BYTE)
1405 t = VT_BYTE;
1406 else if (vtop->r & VT_LVAL_SHORT)
1407 t = VT_SHORT;
1408 if (vtop->r & VT_LVAL_UNSIGNED)
1409 t |= VT_UNSIGNED;
1410 vtop->type.t = t;
1411 load(r, vtop);
1412 /* restore wanted type */
1413 vtop->type.t = t1;
1414 } else {
1415 /* one register type load */
1416 load(r, vtop);
1419 vtop->r = r;
1420 #ifdef TCC_TARGET_C67
1421 /* uses register pairs for doubles */
1422 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1423 vtop->r2 = r+1;
1424 #endif
1426 return r;
1429 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1430 ST_FUNC void gv2(int rc1, int rc2)
1432 int v;
1434 /* generate more generic register first. But VT_JMP or VT_CMP
1435 values must be generated first in all cases to avoid possible
1436 reload errors */
1437 v = vtop[0].r & VT_VALMASK;
1438 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1439 vswap();
1440 gv(rc1);
1441 vswap();
1442 gv(rc2);
1443 /* test if reload is needed for first register */
1444 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1445 vswap();
1446 gv(rc1);
1447 vswap();
1449 } else {
1450 gv(rc2);
1451 vswap();
1452 gv(rc1);
1453 vswap();
1454 /* test if reload is needed for first register */
1455 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1456 gv(rc2);
1461 #ifndef TCC_TARGET_ARM64
1462 /* wrapper around RC_FRET to return a register by type */
1463 static int rc_fret(int t)
1465 #ifdef TCC_TARGET_X86_64
1466 if (t == VT_LDOUBLE) {
1467 return RC_ST0;
1469 #endif
1470 return RC_FRET;
1472 #endif
1474 /* wrapper around REG_FRET to return a register by type */
1475 static int reg_fret(int t)
1477 #ifdef TCC_TARGET_X86_64
1478 if (t == VT_LDOUBLE) {
1479 return TREG_ST0;
1481 #endif
1482 return REG_FRET;
1485 #if PTR_SIZE == 4
1486 /* expand 64bit on stack in two ints */
1487 static void lexpand(void)
1489 int u, v;
1490 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1491 v = vtop->r & (VT_VALMASK | VT_LVAL);
1492 if (v == VT_CONST) {
1493 vdup();
1494 vtop[0].c.i >>= 32;
1495 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1496 vdup();
1497 vtop[0].c.i += 4;
1498 } else {
1499 gv(RC_INT);
1500 vdup();
1501 vtop[0].r = vtop[-1].r2;
1502 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1504 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1506 #endif
1508 #ifdef TCC_TARGET_ARM
1509 /* expand long long on stack */
1510 ST_FUNC void lexpand_nr(void)
1512 int u,v;
1514 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1515 vdup();
1516 vtop->r2 = VT_CONST;
1517 vtop->type.t = VT_INT | u;
1518 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1519 if (v == VT_CONST) {
1520 vtop[-1].c.i = vtop->c.i;
1521 vtop->c.i = vtop->c.i >> 32;
1522 vtop->r = VT_CONST;
1523 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1524 vtop->c.i += 4;
1525 vtop->r = vtop[-1].r;
1526 } else if (v > VT_CONST) {
1527 vtop--;
1528 lexpand();
1529 } else
1530 vtop->r = vtop[-1].r2;
1531 vtop[-1].r2 = VT_CONST;
1532 vtop[-1].type.t = VT_INT | u;
1534 #endif
1536 #if PTR_SIZE == 4
1537 /* build a long long from two ints */
1538 static void lbuild(int t)
1540 gv2(RC_INT, RC_INT);
1541 vtop[-1].r2 = vtop[0].r;
1542 vtop[-1].type.t = t;
1543 vpop();
1545 #endif
1547 /* convert stack entry to register and duplicate its value in another
1548 register */
1549 static void gv_dup(void)
1551 int rc, t, r, r1;
1552 SValue sv;
1554 t = vtop->type.t;
1555 #if PTR_SIZE == 4
1556 if ((t & VT_BTYPE) == VT_LLONG) {
1557 if (t & VT_BITFIELD) {
1558 gv(RC_INT);
1559 t = vtop->type.t;
1561 lexpand();
1562 gv_dup();
1563 vswap();
1564 vrotb(3);
1565 gv_dup();
1566 vrotb(4);
1567 /* stack: H L L1 H1 */
1568 lbuild(t);
1569 vrotb(3);
1570 vrotb(3);
1571 vswap();
1572 lbuild(t);
1573 vswap();
1574 } else
1575 #endif
1577 /* duplicate value */
1578 rc = RC_INT;
1579 sv.type.t = VT_INT;
1580 if (is_float(t)) {
1581 rc = RC_FLOAT;
1582 #ifdef TCC_TARGET_X86_64
1583 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1584 rc = RC_ST0;
1586 #endif
1587 sv.type.t = t;
1589 r = gv(rc);
1590 r1 = get_reg(rc);
1591 sv.r = r;
1592 sv.c.i = 0;
1593 load(r1, &sv); /* move r to r1 */
1594 vdup();
1595 /* duplicates value */
1596 if (r != r1)
1597 vtop->r = r1;
1601 /* Generate value test
1603 * Generate a test for any value (jump, comparison and integers) */
1604 ST_FUNC int gvtst(int inv, int t)
1606 int v = vtop->r & VT_VALMASK;
1607 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1608 vpushi(0);
1609 gen_op(TOK_NE);
1611 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1612 /* constant jmp optimization */
1613 if ((vtop->c.i != 0) != inv)
1614 t = gjmp(t);
1615 vtop--;
1616 return t;
1618 return gtst(inv, t);
1621 #if PTR_SIZE == 4
1622 /* generate CPU independent (unsigned) long long operations */
1623 static void gen_opl(int op)
1625 int t, a, b, op1, c, i;
1626 int func;
1627 unsigned short reg_iret = REG_IRET;
1628 unsigned short reg_lret = REG_LRET;
1629 SValue tmp;
1631 switch(op) {
1632 case '/':
1633 case TOK_PDIV:
1634 func = TOK___divdi3;
1635 goto gen_func;
1636 case TOK_UDIV:
1637 func = TOK___udivdi3;
1638 goto gen_func;
1639 case '%':
1640 func = TOK___moddi3;
1641 goto gen_mod_func;
1642 case TOK_UMOD:
1643 func = TOK___umoddi3;
1644 gen_mod_func:
1645 #ifdef TCC_ARM_EABI
1646 reg_iret = TREG_R2;
1647 reg_lret = TREG_R3;
1648 #endif
1649 gen_func:
1650 /* call generic long long function */
1651 vpush_global_sym(&func_old_type, func);
1652 vrott(3);
1653 gfunc_call(2);
1654 vpushi(0);
1655 vtop->r = reg_iret;
1656 vtop->r2 = reg_lret;
1657 break;
1658 case '^':
1659 case '&':
1660 case '|':
1661 case '*':
1662 case '+':
1663 case '-':
1664 //pv("gen_opl A",0,2);
1665 t = vtop->type.t;
1666 vswap();
1667 lexpand();
1668 vrotb(3);
1669 lexpand();
1670 /* stack: L1 H1 L2 H2 */
1671 tmp = vtop[0];
1672 vtop[0] = vtop[-3];
1673 vtop[-3] = tmp;
1674 tmp = vtop[-2];
1675 vtop[-2] = vtop[-3];
1676 vtop[-3] = tmp;
1677 vswap();
1678 /* stack: H1 H2 L1 L2 */
1679 //pv("gen_opl B",0,4);
1680 if (op == '*') {
1681 vpushv(vtop - 1);
1682 vpushv(vtop - 1);
1683 gen_op(TOK_UMULL);
1684 lexpand();
1685 /* stack: H1 H2 L1 L2 ML MH */
1686 for(i=0;i<4;i++)
1687 vrotb(6);
1688 /* stack: ML MH H1 H2 L1 L2 */
1689 tmp = vtop[0];
1690 vtop[0] = vtop[-2];
1691 vtop[-2] = tmp;
1692 /* stack: ML MH H1 L2 H2 L1 */
1693 gen_op('*');
1694 vrotb(3);
1695 vrotb(3);
1696 gen_op('*');
1697 /* stack: ML MH M1 M2 */
1698 gen_op('+');
1699 gen_op('+');
1700 } else if (op == '+' || op == '-') {
1701 /* XXX: add non carry method too (for MIPS or alpha) */
1702 if (op == '+')
1703 op1 = TOK_ADDC1;
1704 else
1705 op1 = TOK_SUBC1;
1706 gen_op(op1);
1707 /* stack: H1 H2 (L1 op L2) */
1708 vrotb(3);
1709 vrotb(3);
1710 gen_op(op1 + 1); /* TOK_xxxC2 */
1711 } else {
1712 gen_op(op);
1713 /* stack: H1 H2 (L1 op L2) */
1714 vrotb(3);
1715 vrotb(3);
1716 /* stack: (L1 op L2) H1 H2 */
1717 gen_op(op);
1718 /* stack: (L1 op L2) (H1 op H2) */
1720 /* stack: L H */
1721 lbuild(t);
1722 break;
1723 case TOK_SAR:
1724 case TOK_SHR:
1725 case TOK_SHL:
1726 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1727 t = vtop[-1].type.t;
1728 vswap();
1729 lexpand();
1730 vrotb(3);
1731 /* stack: L H shift */
1732 c = (int)vtop->c.i;
1733 /* constant: simpler */
1734 /* NOTE: all comments are for SHL. the other cases are
1735 done by swapping words */
1736 vpop();
1737 if (op != TOK_SHL)
1738 vswap();
1739 if (c >= 32) {
1740 /* stack: L H */
1741 vpop();
1742 if (c > 32) {
1743 vpushi(c - 32);
1744 gen_op(op);
1746 if (op != TOK_SAR) {
1747 vpushi(0);
1748 } else {
1749 gv_dup();
1750 vpushi(31);
1751 gen_op(TOK_SAR);
1753 vswap();
1754 } else {
1755 vswap();
1756 gv_dup();
1757 /* stack: H L L */
1758 vpushi(c);
1759 gen_op(op);
1760 vswap();
1761 vpushi(32 - c);
1762 if (op == TOK_SHL)
1763 gen_op(TOK_SHR);
1764 else
1765 gen_op(TOK_SHL);
1766 vrotb(3);
1767 /* stack: L L H */
1768 vpushi(c);
1769 if (op == TOK_SHL)
1770 gen_op(TOK_SHL);
1771 else
1772 gen_op(TOK_SHR);
1773 gen_op('|');
1775 if (op != TOK_SHL)
1776 vswap();
1777 lbuild(t);
1778 } else {
1779 /* XXX: should provide a faster fallback on x86 ? */
1780 switch(op) {
1781 case TOK_SAR:
1782 func = TOK___ashrdi3;
1783 goto gen_func;
1784 case TOK_SHR:
1785 func = TOK___lshrdi3;
1786 goto gen_func;
1787 case TOK_SHL:
1788 func = TOK___ashldi3;
1789 goto gen_func;
1792 break;
1793 default:
1794 /* compare operations */
1795 t = vtop->type.t;
1796 vswap();
1797 lexpand();
1798 vrotb(3);
1799 lexpand();
1800 /* stack: L1 H1 L2 H2 */
1801 tmp = vtop[-1];
1802 vtop[-1] = vtop[-2];
1803 vtop[-2] = tmp;
1804 /* stack: L1 L2 H1 H2 */
1805 /* compare high */
1806 op1 = op;
1807 /* when values are equal, we need to compare low words. since
1808 the jump is inverted, we invert the test too. */
1809 if (op1 == TOK_LT)
1810 op1 = TOK_LE;
1811 else if (op1 == TOK_GT)
1812 op1 = TOK_GE;
1813 else if (op1 == TOK_ULT)
1814 op1 = TOK_ULE;
1815 else if (op1 == TOK_UGT)
1816 op1 = TOK_UGE;
1817 a = 0;
1818 b = 0;
1819 gen_op(op1);
1820 if (op == TOK_NE) {
1821 b = gvtst(0, 0);
1822 } else {
1823 a = gvtst(1, 0);
1824 if (op != TOK_EQ) {
1825 /* generate non equal test */
1826 vpushi(TOK_NE);
1827 vtop->r = VT_CMP;
1828 b = gvtst(0, 0);
1831 /* compare low. Always unsigned */
1832 op1 = op;
1833 if (op1 == TOK_LT)
1834 op1 = TOK_ULT;
1835 else if (op1 == TOK_LE)
1836 op1 = TOK_ULE;
1837 else if (op1 == TOK_GT)
1838 op1 = TOK_UGT;
1839 else if (op1 == TOK_GE)
1840 op1 = TOK_UGE;
1841 gen_op(op1);
1842 a = gvtst(1, a);
1843 gsym(b);
1844 vseti(VT_JMPI, a);
1845 break;
1848 #endif
1850 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1852 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1853 return (a ^ b) >> 63 ? -x : x;
1856 static int gen_opic_lt(uint64_t a, uint64_t b)
1858 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1861 /* handle integer constant optimizations and various machine
1862 independent opt */
1863 static void gen_opic(int op)
1865 SValue *v1 = vtop - 1;
1866 SValue *v2 = vtop;
1867 int t1 = v1->type.t & VT_BTYPE;
1868 int t2 = v2->type.t & VT_BTYPE;
1869 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1870 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1871 uint64_t l1 = c1 ? v1->c.i : 0;
1872 uint64_t l2 = c2 ? v2->c.i : 0;
1873 int shm = (t1 == VT_LLONG) ? 63 : 31;
1875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1876 l1 = ((uint32_t)l1 |
1877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1878 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1879 l2 = ((uint32_t)l2 |
1880 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1882 if (c1 && c2) {
1883 switch(op) {
1884 case '+': l1 += l2; break;
1885 case '-': l1 -= l2; break;
1886 case '&': l1 &= l2; break;
1887 case '^': l1 ^= l2; break;
1888 case '|': l1 |= l2; break;
1889 case '*': l1 *= l2; break;
1891 case TOK_PDIV:
1892 case '/':
1893 case '%':
1894 case TOK_UDIV:
1895 case TOK_UMOD:
1896 /* if division by zero, generate explicit division */
1897 if (l2 == 0) {
1898 if (const_wanted)
1899 tcc_error("division by zero in constant");
1900 goto general_case;
1902 switch(op) {
1903 default: l1 = gen_opic_sdiv(l1, l2); break;
1904 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1905 case TOK_UDIV: l1 = l1 / l2; break;
1906 case TOK_UMOD: l1 = l1 % l2; break;
1908 break;
1909 case TOK_SHL: l1 <<= (l2 & shm); break;
1910 case TOK_SHR: l1 >>= (l2 & shm); break;
1911 case TOK_SAR:
1912 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1913 break;
1914 /* tests */
1915 case TOK_ULT: l1 = l1 < l2; break;
1916 case TOK_UGE: l1 = l1 >= l2; break;
1917 case TOK_EQ: l1 = l1 == l2; break;
1918 case TOK_NE: l1 = l1 != l2; break;
1919 case TOK_ULE: l1 = l1 <= l2; break;
1920 case TOK_UGT: l1 = l1 > l2; break;
1921 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1922 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1923 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1924 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1925 /* logical */
1926 case TOK_LAND: l1 = l1 && l2; break;
1927 case TOK_LOR: l1 = l1 || l2; break;
1928 default:
1929 goto general_case;
1931 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1932 l1 = ((uint32_t)l1 |
1933 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1934 v1->c.i = l1;
1935 vtop--;
1936 } else {
1937 /* if commutative ops, put c2 as constant */
1938 if (c1 && (op == '+' || op == '&' || op == '^' ||
1939 op == '|' || op == '*')) {
1940 vswap();
1941 c2 = c1; //c = c1, c1 = c2, c2 = c;
1942 l2 = l1; //l = l1, l1 = l2, l2 = l;
1944 if (!const_wanted &&
1945 c1 && ((l1 == 0 &&
1946 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1947 (l1 == -1 && op == TOK_SAR))) {
1948 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1949 vtop--;
1950 } else if (!const_wanted &&
1951 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1952 (op == '|' &&
1953 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1954 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1955 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1956 if (l2 == 1)
1957 vtop->c.i = 0;
1958 vswap();
1959 vtop--;
1960 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1961 op == TOK_PDIV) &&
1962 l2 == 1) ||
1963 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1964 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1965 l2 == 0) ||
1966 (op == '&' &&
1967 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1968 /* filter out NOP operations like x*1, x-0, x&-1... */
1969 vtop--;
1970 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1971 /* try to use shifts instead of muls or divs */
1972 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1973 int n = -1;
1974 while (l2) {
1975 l2 >>= 1;
1976 n++;
1978 vtop->c.i = n;
1979 if (op == '*')
1980 op = TOK_SHL;
1981 else if (op == TOK_PDIV)
1982 op = TOK_SAR;
1983 else
1984 op = TOK_SHR;
1986 goto general_case;
1987 } else if (c2 && (op == '+' || op == '-') &&
1988 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1989 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1990 /* symbol + constant case */
1991 if (op == '-')
1992 l2 = -l2;
1993 l2 += vtop[-1].c.i;
1994 /* The backends can't always deal with addends to symbols
1995 larger than +-1<<31. Don't construct such. */
1996 if ((int)l2 != l2)
1997 goto general_case;
1998 vtop--;
1999 vtop->c.i = l2;
2000 } else {
2001 general_case:
2002 /* call low level op generator */
2003 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2004 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2005 gen_opl(op);
2006 else
2007 gen_opi(op);
2012 /* generate a floating point operation with constant propagation */
2013 static void gen_opif(int op)
2015 int c1, c2;
2016 SValue *v1, *v2;
2017 #if defined _MSC_VER && defined _AMD64_
2018 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2019 volatile
2020 #endif
2021 long double f1, f2;
2023 v1 = vtop - 1;
2024 v2 = vtop;
2025 /* currently, we cannot do computations with forward symbols */
2026 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2027 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2028 if (c1 && c2) {
2029 if (v1->type.t == VT_FLOAT) {
2030 f1 = v1->c.f;
2031 f2 = v2->c.f;
2032 } else if (v1->type.t == VT_DOUBLE) {
2033 f1 = v1->c.d;
2034 f2 = v2->c.d;
2035 } else {
2036 f1 = v1->c.ld;
2037 f2 = v2->c.ld;
2040 /* NOTE: we only do constant propagation if finite number (not
2041 NaN or infinity) (ANSI spec) */
2042 if (!ieee_finite(f1) || !ieee_finite(f2))
2043 goto general_case;
2045 switch(op) {
2046 case '+': f1 += f2; break;
2047 case '-': f1 -= f2; break;
2048 case '*': f1 *= f2; break;
2049 case '/':
2050 if (f2 == 0.0) {
2051 /* If not in initializer we need to potentially generate
2052 FP exceptions at runtime, otherwise we want to fold. */
2053 if (!const_wanted)
2054 goto general_case;
2056 f1 /= f2;
2057 break;
2058 /* XXX: also handles tests ? */
2059 default:
2060 goto general_case;
2062 /* XXX: overflow test ? */
2063 if (v1->type.t == VT_FLOAT) {
2064 v1->c.f = f1;
2065 } else if (v1->type.t == VT_DOUBLE) {
2066 v1->c.d = f1;
2067 } else {
2068 v1->c.ld = f1;
2070 vtop--;
2071 } else {
2072 general_case:
2073 gen_opf(op);
2077 static int pointed_size(CType *type)
2079 int align;
2080 return type_size(pointed_type(type), &align);
2083 static void vla_runtime_pointed_size(CType *type)
2085 int align;
2086 vla_runtime_type_size(pointed_type(type), &align);
2089 static inline int is_null_pointer(SValue *p)
2091 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2092 return 0;
2093 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2094 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2095 ((p->type.t & VT_BTYPE) == VT_PTR &&
2096 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2099 static inline int is_integer_btype(int bt)
2101 return (bt == VT_BYTE || bt == VT_SHORT ||
2102 bt == VT_INT || bt == VT_LLONG);
2105 /* check types for comparison or subtraction of pointers */
2106 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2108 CType *type1, *type2, tmp_type1, tmp_type2;
2109 int bt1, bt2;
2111 /* null pointers are accepted for all comparisons as gcc */
2112 if (is_null_pointer(p1) || is_null_pointer(p2))
2113 return;
2114 type1 = &p1->type;
2115 type2 = &p2->type;
2116 bt1 = type1->t & VT_BTYPE;
2117 bt2 = type2->t & VT_BTYPE;
2118 /* accept comparison between pointer and integer with a warning */
2119 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2120 if (op != TOK_LOR && op != TOK_LAND )
2121 tcc_warning("comparison between pointer and integer");
2122 return;
2125 /* both must be pointers or implicit function pointers */
2126 if (bt1 == VT_PTR) {
2127 type1 = pointed_type(type1);
2128 } else if (bt1 != VT_FUNC)
2129 goto invalid_operands;
2131 if (bt2 == VT_PTR) {
2132 type2 = pointed_type(type2);
2133 } else if (bt2 != VT_FUNC) {
2134 invalid_operands:
2135 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2137 if ((type1->t & VT_BTYPE) == VT_VOID ||
2138 (type2->t & VT_BTYPE) == VT_VOID)
2139 return;
2140 tmp_type1 = *type1;
2141 tmp_type2 = *type2;
2142 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2143 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2144 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2145 /* gcc-like error if '-' is used */
2146 if (op == '-')
2147 goto invalid_operands;
2148 else
2149 tcc_warning("comparison of distinct pointer types lacks a cast");
2153 /* generic gen_op: handles types problems */
2154 ST_FUNC void gen_op(int op)
2156 int u, t1, t2, bt1, bt2, t;
2157 CType type1;
2159 redo:
2160 t1 = vtop[-1].type.t;
2161 t2 = vtop[0].type.t;
2162 bt1 = t1 & VT_BTYPE;
2163 bt2 = t2 & VT_BTYPE;
2165 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2166 tcc_error("operation on a struct");
2167 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2168 if (bt2 == VT_FUNC) {
2169 mk_pointer(&vtop->type);
2170 gaddrof();
2172 if (bt1 == VT_FUNC) {
2173 vswap();
2174 mk_pointer(&vtop->type);
2175 gaddrof();
2176 vswap();
2178 goto redo;
2179 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2180 /* at least one operand is a pointer */
2181 /* relational op: must be both pointers */
2182 if (op >= TOK_ULT && op <= TOK_LOR) {
2183 check_comparison_pointer_types(vtop - 1, vtop, op);
2184 /* pointers are handled are unsigned */
2185 #if PTR_SIZE == 8
2186 t = VT_LLONG | VT_UNSIGNED;
2187 #else
2188 t = VT_INT | VT_UNSIGNED;
2189 #endif
2190 goto std_op;
2192 /* if both pointers, then it must be the '-' op */
2193 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2194 if (op != '-')
2195 tcc_error("cannot use pointers here");
2196 check_comparison_pointer_types(vtop - 1, vtop, op);
2197 /* XXX: check that types are compatible */
2198 if (vtop[-1].type.t & VT_VLA) {
2199 vla_runtime_pointed_size(&vtop[-1].type);
2200 } else {
2201 vpushi(pointed_size(&vtop[-1].type));
2203 vrott(3);
2204 gen_opic(op);
2205 vtop->type.t = ptrdiff_type.t;
2206 vswap();
2207 gen_op(TOK_PDIV);
2208 } else {
2209 /* exactly one pointer : must be '+' or '-'. */
2210 if (op != '-' && op != '+')
2211 tcc_error("cannot use pointers here");
2212 /* Put pointer as first operand */
2213 if (bt2 == VT_PTR) {
2214 vswap();
2215 t = t1, t1 = t2, t2 = t;
2217 #if PTR_SIZE == 4
2218 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2219 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2220 gen_cast_s(VT_INT);
2221 #endif
2222 type1 = vtop[-1].type;
2223 type1.t &= ~VT_ARRAY;
2224 if (vtop[-1].type.t & VT_VLA)
2225 vla_runtime_pointed_size(&vtop[-1].type);
2226 else {
2227 u = pointed_size(&vtop[-1].type);
2228 if (u < 0)
2229 tcc_error("unknown array element size");
2230 #if PTR_SIZE == 8
2231 vpushll(u);
2232 #else
2233 /* XXX: cast to int ? (long long case) */
2234 vpushi(u);
2235 #endif
2237 gen_op('*');
2238 #if 0
2239 /* #ifdef CONFIG_TCC_BCHECK
2240 The main reason to removing this code:
2241 #include <stdio.h>
2242 int main ()
2244 int v[10];
2245 int i = 10;
2246 int j = 9;
2247 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2248 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2250 When this code is on. then the output looks like
2251 v+i-j = 0xfffffffe
2252 v+(i-j) = 0xbff84000
2254 /* if evaluating constant expression, no code should be
2255 generated, so no bound check */
2256 if (tcc_state->do_bounds_check && !const_wanted) {
2257 /* if bounded pointers, we generate a special code to
2258 test bounds */
2259 if (op == '-') {
2260 vpushi(0);
2261 vswap();
2262 gen_op('-');
2264 gen_bounded_ptr_add();
2265 } else
2266 #endif
2268 gen_opic(op);
2270 /* put again type if gen_opic() swaped operands */
2271 vtop->type = type1;
2273 } else if (is_float(bt1) || is_float(bt2)) {
2274 /* compute bigger type and do implicit casts */
2275 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2276 t = VT_LDOUBLE;
2277 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2278 t = VT_DOUBLE;
2279 } else {
2280 t = VT_FLOAT;
2282 /* floats can only be used for a few operations */
2283 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2284 (op < TOK_ULT || op > TOK_GT))
2285 tcc_error("invalid operands for binary operation");
2286 goto std_op;
2287 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2288 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2289 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2290 t |= VT_UNSIGNED;
2291 t |= (VT_LONG & t1);
2292 goto std_op;
2293 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2294 /* cast to biggest op */
2295 t = VT_LLONG | VT_LONG;
2296 if (bt1 == VT_LLONG)
2297 t &= t1;
2298 if (bt2 == VT_LLONG)
2299 t &= t2;
2300 /* convert to unsigned if it does not fit in a long long */
2301 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2302 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2303 t |= VT_UNSIGNED;
2304 goto std_op;
2305 } else {
2306 /* integer operations */
2307 t = VT_INT | (VT_LONG & (t1 | t2));
2308 /* convert to unsigned if it does not fit in an integer */
2309 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2310 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2311 t |= VT_UNSIGNED;
2312 std_op:
2313 /* XXX: currently, some unsigned operations are explicit, so
2314 we modify them here */
2315 if (t & VT_UNSIGNED) {
2316 if (op == TOK_SAR)
2317 op = TOK_SHR;
2318 else if (op == '/')
2319 op = TOK_UDIV;
2320 else if (op == '%')
2321 op = TOK_UMOD;
2322 else if (op == TOK_LT)
2323 op = TOK_ULT;
2324 else if (op == TOK_GT)
2325 op = TOK_UGT;
2326 else if (op == TOK_LE)
2327 op = TOK_ULE;
2328 else if (op == TOK_GE)
2329 op = TOK_UGE;
2331 vswap();
2332 type1.t = t;
2333 type1.ref = NULL;
2334 gen_cast(&type1);
2335 vswap();
2336 /* special case for shifts and long long: we keep the shift as
2337 an integer */
2338 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2339 type1.t = VT_INT;
2340 gen_cast(&type1);
2341 if (is_float(t))
2342 gen_opif(op);
2343 else
2344 gen_opic(op);
2345 if (op >= TOK_ULT && op <= TOK_GT) {
2346 /* relational op: the result is an int */
2347 vtop->type.t = VT_INT;
2348 } else {
2349 vtop->type.t = t;
2352 // Make sure that we have converted to an rvalue:
2353 if (vtop->r & VT_LVAL)
2354 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2357 #ifndef TCC_TARGET_ARM
2358 /* generic itof for unsigned long long case */
2359 static void gen_cvt_itof1(int t)
2361 #ifdef TCC_TARGET_ARM64
2362 gen_cvt_itof(t);
2363 #else
2364 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2365 (VT_LLONG | VT_UNSIGNED)) {
2367 if (t == VT_FLOAT)
2368 vpush_global_sym(&func_old_type, TOK___floatundisf);
2369 #if LDOUBLE_SIZE != 8
2370 else if (t == VT_LDOUBLE)
2371 vpush_global_sym(&func_old_type, TOK___floatundixf);
2372 #endif
2373 else
2374 vpush_global_sym(&func_old_type, TOK___floatundidf);
2375 vrott(2);
2376 gfunc_call(1);
2377 vpushi(0);
2378 vtop->r = reg_fret(t);
2379 } else {
2380 gen_cvt_itof(t);
2382 #endif
2384 #endif
2386 /* generic ftoi for unsigned long long case */
2387 static void gen_cvt_ftoi1(int t)
2389 #ifdef TCC_TARGET_ARM64
2390 gen_cvt_ftoi(t);
2391 #else
2392 int st;
2394 if (t == (VT_LLONG | VT_UNSIGNED)) {
2395 /* not handled natively */
2396 st = vtop->type.t & VT_BTYPE;
2397 if (st == VT_FLOAT)
2398 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2399 #if LDOUBLE_SIZE != 8
2400 else if (st == VT_LDOUBLE)
2401 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2402 #endif
2403 else
2404 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2405 vrott(2);
2406 gfunc_call(1);
2407 vpushi(0);
2408 vtop->r = REG_IRET;
2409 vtop->r2 = REG_LRET;
2410 } else {
2411 gen_cvt_ftoi(t);
2413 #endif
2416 /* force char or short cast */
2417 static void force_charshort_cast(int t)
2419 int bits, dbt;
2421 /* cannot cast static initializers */
2422 if (STATIC_DATA_WANTED)
2423 return;
2425 dbt = t & VT_BTYPE;
2426 /* XXX: add optimization if lvalue : just change type and offset */
2427 if (dbt == VT_BYTE)
2428 bits = 8;
2429 else
2430 bits = 16;
2431 if (t & VT_UNSIGNED) {
2432 vpushi((1 << bits) - 1);
2433 gen_op('&');
2434 } else {
2435 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2436 bits = 64 - bits;
2437 else
2438 bits = 32 - bits;
2439 vpushi(bits);
2440 gen_op(TOK_SHL);
2441 /* result must be signed or the SAR is converted to an SHL
2442 This was not the case when "t" was a signed short
2443 and the last value on the stack was an unsigned int */
2444 vtop->type.t &= ~VT_UNSIGNED;
2445 vpushi(bits);
2446 gen_op(TOK_SAR);
2450 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2451 static void gen_cast_s(int t)
2453 CType type;
2454 type.t = t;
2455 type.ref = NULL;
2456 gen_cast(&type);
2459 static void gen_cast(CType *type)
2461 int sbt, dbt, sf, df, c, p;
2463 /* special delayed cast for char/short */
2464 /* XXX: in some cases (multiple cascaded casts), it may still
2465 be incorrect */
2466 if (vtop->r & VT_MUSTCAST) {
2467 vtop->r &= ~VT_MUSTCAST;
2468 force_charshort_cast(vtop->type.t);
2471 /* bitfields first get cast to ints */
2472 if (vtop->type.t & VT_BITFIELD) {
2473 gv(RC_INT);
2476 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2477 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2479 if (sbt != dbt) {
2480 sf = is_float(sbt);
2481 df = is_float(dbt);
2482 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2483 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2484 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2485 c &= dbt != VT_LDOUBLE;
2486 #endif
2487 if (c) {
2488 /* constant case: we can do it now */
2489 /* XXX: in ISOC, cannot do it if error in convert */
2490 if (sbt == VT_FLOAT)
2491 vtop->c.ld = vtop->c.f;
2492 else if (sbt == VT_DOUBLE)
2493 vtop->c.ld = vtop->c.d;
2495 if (df) {
2496 if ((sbt & VT_BTYPE) == VT_LLONG) {
2497 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2498 vtop->c.ld = vtop->c.i;
2499 else
2500 vtop->c.ld = -(long double)-vtop->c.i;
2501 } else if(!sf) {
2502 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2503 vtop->c.ld = (uint32_t)vtop->c.i;
2504 else
2505 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2508 if (dbt == VT_FLOAT)
2509 vtop->c.f = (float)vtop->c.ld;
2510 else if (dbt == VT_DOUBLE)
2511 vtop->c.d = (double)vtop->c.ld;
2512 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2513 vtop->c.i = vtop->c.ld;
2514 } else if (sf && dbt == VT_BOOL) {
2515 vtop->c.i = (vtop->c.ld != 0);
2516 } else {
2517 if(sf)
2518 vtop->c.i = vtop->c.ld;
2519 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2521 else if (sbt & VT_UNSIGNED)
2522 vtop->c.i = (uint32_t)vtop->c.i;
2523 #if PTR_SIZE == 8
2524 else if (sbt == VT_PTR)
2526 #endif
2527 else if (sbt != VT_LLONG)
2528 vtop->c.i = ((uint32_t)vtop->c.i |
2529 -(vtop->c.i & 0x80000000));
2531 if (dbt == (VT_LLONG|VT_UNSIGNED))
2533 else if (dbt == VT_BOOL)
2534 vtop->c.i = (vtop->c.i != 0);
2535 #if PTR_SIZE == 8
2536 else if (dbt == VT_PTR)
2538 #endif
2539 else if (dbt != VT_LLONG) {
2540 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2541 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2542 0xffffffff);
2543 vtop->c.i &= m;
2544 if (!(dbt & VT_UNSIGNED))
2545 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2548 } else if (p && dbt == VT_BOOL) {
2549 vtop->r = VT_CONST;
2550 vtop->c.i = 1;
2551 } else {
2552 /* non constant case: generate code */
2553 if (sf && df) {
2554 /* convert from fp to fp */
2555 gen_cvt_ftof(dbt);
2556 } else if (df) {
2557 /* convert int to fp */
2558 gen_cvt_itof1(dbt);
2559 } else if (sf) {
2560 /* convert fp to int */
2561 if (dbt == VT_BOOL) {
2562 vpushi(0);
2563 gen_op(TOK_NE);
2564 } else {
2565 /* we handle char/short/etc... with generic code */
2566 if (dbt != (VT_INT | VT_UNSIGNED) &&
2567 dbt != (VT_LLONG | VT_UNSIGNED) &&
2568 dbt != VT_LLONG)
2569 dbt = VT_INT;
2570 gen_cvt_ftoi1(dbt);
2571 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2572 /* additional cast for char/short... */
2573 vtop->type.t = dbt;
2574 gen_cast(type);
2577 #if PTR_SIZE == 4
2578 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2579 if ((sbt & VT_BTYPE) != VT_LLONG) {
2580 /* scalar to long long */
2581 /* machine independent conversion */
2582 gv(RC_INT);
2583 /* generate high word */
2584 if (sbt == (VT_INT | VT_UNSIGNED)) {
2585 vpushi(0);
2586 gv(RC_INT);
2587 } else {
2588 if (sbt == VT_PTR) {
2589 /* cast from pointer to int before we apply
2590 shift operation, which pointers don't support*/
2591 gen_cast_s(VT_INT);
2593 gv_dup();
2594 vpushi(31);
2595 gen_op(TOK_SAR);
2597 /* patch second register */
2598 vtop[-1].r2 = vtop->r;
2599 vpop();
2601 #else
2602 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2603 (dbt & VT_BTYPE) == VT_PTR ||
2604 (dbt & VT_BTYPE) == VT_FUNC) {
2605 if ((sbt & VT_BTYPE) != VT_LLONG &&
2606 (sbt & VT_BTYPE) != VT_PTR &&
2607 (sbt & VT_BTYPE) != VT_FUNC) {
2608 /* need to convert from 32bit to 64bit */
2609 gv(RC_INT);
2610 if (sbt != (VT_INT | VT_UNSIGNED)) {
2611 #if defined(TCC_TARGET_ARM64)
2612 gen_cvt_sxtw();
2613 #elif defined(TCC_TARGET_X86_64)
2614 int r = gv(RC_INT);
2615 /* x86_64 specific: movslq */
2616 o(0x6348);
2617 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2618 #else
2619 #error
2620 #endif
2623 #endif
2624 } else if (dbt == VT_BOOL) {
2625 /* scalar to bool */
2626 vpushi(0);
2627 gen_op(TOK_NE);
2628 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2629 (dbt & VT_BTYPE) == VT_SHORT) {
2630 if (sbt == VT_PTR) {
2631 vtop->type.t = VT_INT;
2632 tcc_warning("nonportable conversion from pointer to char/short");
2634 force_charshort_cast(dbt);
2635 } else if ((dbt & VT_BTYPE) == VT_INT) {
2636 /* scalar to int */
2637 if ((sbt & VT_BTYPE) == VT_LLONG) {
2638 #if PTR_SIZE == 4
2639 /* from long long: just take low order word */
2640 lexpand();
2641 vpop();
2642 #else
2643 vpushi(0xffffffff);
2644 vtop->type.t |= VT_UNSIGNED;
2645 gen_op('&');
2646 #endif
2648 /* if lvalue and single word type, nothing to do because
2649 the lvalue already contains the real type size (see
2650 VT_LVAL_xxx constants) */
2653 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2654 /* if we are casting between pointer types,
2655 we must update the VT_LVAL_xxx size */
2656 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2657 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2659 vtop->type = *type;
2662 /* return type size as known at compile time. Put alignment at 'a' */
2663 ST_FUNC int type_size(CType *type, int *a)
2665 Sym *s;
2666 int bt;
2668 bt = type->t & VT_BTYPE;
2669 if (bt == VT_STRUCT) {
2670 /* struct/union */
2671 s = type->ref;
2672 *a = s->r;
2673 return s->c;
2674 } else if (bt == VT_PTR) {
2675 if (type->t & VT_ARRAY) {
2676 int ts;
2678 s = type->ref;
2679 ts = type_size(&s->type, a);
2681 if (ts < 0 && s->c < 0)
2682 ts = -ts;
2684 return ts * s->c;
2685 } else {
2686 *a = PTR_SIZE;
2687 return PTR_SIZE;
2689 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2690 return -1; /* incomplete enum */
2691 } else if (bt == VT_LDOUBLE) {
2692 *a = LDOUBLE_ALIGN;
2693 return LDOUBLE_SIZE;
2694 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2695 #ifdef TCC_TARGET_I386
2696 #ifdef TCC_TARGET_PE
2697 *a = 8;
2698 #else
2699 *a = 4;
2700 #endif
2701 #elif defined(TCC_TARGET_ARM)
2702 #ifdef TCC_ARM_EABI
2703 *a = 8;
2704 #else
2705 *a = 4;
2706 #endif
2707 #else
2708 *a = 8;
2709 #endif
2710 return 8;
2711 } else if (bt == VT_INT || bt == VT_FLOAT) {
2712 *a = 4;
2713 return 4;
2714 } else if (bt == VT_SHORT) {
2715 *a = 2;
2716 return 2;
2717 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2718 *a = 8;
2719 return 16;
2720 } else {
2721 /* char, void, function, _Bool */
2722 *a = 1;
2723 return 1;
2727 /* push type size as known at runtime time on top of value stack. Put
2728 alignment at 'a' */
2729 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2731 if (type->t & VT_VLA) {
2732 type_size(&type->ref->type, a);
2733 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2734 } else {
2735 vpushi(type_size(type, a));
2739 static void vla_sp_restore(void) {
2740 if (vlas_in_scope) {
2741 gen_vla_sp_restore(vla_sp_loc);
2745 static void vla_sp_restore_root(void) {
2746 if (vlas_in_scope) {
2747 gen_vla_sp_restore(vla_sp_root_loc);
2751 /* return the pointed type of t */
2752 static inline CType *pointed_type(CType *type)
2754 return &type->ref->type;
2757 /* modify type so that its it is a pointer to type. */
2758 ST_FUNC void mk_pointer(CType *type)
2760 Sym *s;
2761 s = sym_push(SYM_FIELD, type, 0, -1);
2762 type->t = VT_PTR | (type->t & VT_STORAGE);
2763 type->ref = s;
2766 /* compare function types. OLD functions match any new functions */
2767 static int is_compatible_func(CType *type1, CType *type2)
2769 Sym *s1, *s2;
2771 s1 = type1->ref;
2772 s2 = type2->ref;
2773 if (!is_compatible_types(&s1->type, &s2->type))
2774 return 0;
2775 /* check func_call */
2776 if (s1->f.func_call != s2->f.func_call)
2777 return 0;
2778 /* XXX: not complete */
2779 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2780 return 1;
2781 if (s1->f.func_type != s2->f.func_type)
2782 return 0;
2783 while (s1 != NULL) {
2784 if (s2 == NULL)
2785 return 0;
2786 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2787 return 0;
2788 s1 = s1->next;
2789 s2 = s2->next;
2791 if (s2)
2792 return 0;
2793 return 1;
2796 /* return true if type1 and type2 are the same. If unqualified is
2797 true, qualifiers on the types are ignored.
2799 static int compare_types(CType *type1, CType *type2, int unqualified)
2801 int bt1, t1, t2;
2803 t1 = type1->t & VT_TYPE;
2804 t2 = type2->t & VT_TYPE;
2805 if (unqualified) {
2806 /* strip qualifiers before comparing */
2807 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2808 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2811 /* Default Vs explicit signedness only matters for char */
2812 if ((t1 & VT_BTYPE) != VT_BYTE) {
2813 t1 &= ~VT_DEFSIGN;
2814 t2 &= ~VT_DEFSIGN;
2816 /* XXX: bitfields ? */
2817 if (t1 != t2)
2818 return 0;
2819 /* test more complicated cases */
2820 bt1 = t1 & VT_BTYPE;
2821 if (bt1 == VT_PTR) {
2822 type1 = pointed_type(type1);
2823 type2 = pointed_type(type2);
2824 return is_compatible_types(type1, type2);
2825 } else if (bt1 == VT_STRUCT) {
2826 return (type1->ref == type2->ref);
2827 } else if (bt1 == VT_FUNC) {
2828 return is_compatible_func(type1, type2);
2829 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2830 return type1->ref == type2->ref;
2831 } else {
2832 return 1;
2836 /* return true if type1 and type2 are exactly the same (including
2837 qualifiers).
2839 static int is_compatible_types(CType *type1, CType *type2)
2841 return compare_types(type1,type2,0);
2844 /* return true if type1 and type2 are the same (ignoring qualifiers).
2846 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2848 return compare_types(type1,type2,1);
2851 /* print a type. If 'varstr' is not NULL, then the variable is also
2852 printed in the type */
2853 /* XXX: union */
2854 /* XXX: add array and function pointers */
2855 static void type_to_str(char *buf, int buf_size,
2856 CType *type, const char *varstr)
2858 int bt, v, t;
2859 Sym *s, *sa;
2860 char buf1[256];
2861 const char *tstr;
2863 t = type->t;
2864 bt = t & VT_BTYPE;
2865 buf[0] = '\0';
2867 if (t & VT_EXTERN)
2868 pstrcat(buf, buf_size, "extern ");
2869 if (t & VT_STATIC)
2870 pstrcat(buf, buf_size, "static ");
2871 if (t & VT_TYPEDEF)
2872 pstrcat(buf, buf_size, "typedef ");
2873 if (t & VT_INLINE)
2874 pstrcat(buf, buf_size, "inline ");
2875 if (t & VT_VOLATILE)
2876 pstrcat(buf, buf_size, "volatile ");
2877 if (t & VT_CONSTANT)
2878 pstrcat(buf, buf_size, "const ");
2880 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2881 || ((t & VT_UNSIGNED)
2882 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2883 && !IS_ENUM(t)
2885 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2887 buf_size -= strlen(buf);
2888 buf += strlen(buf);
2890 switch(bt) {
2891 case VT_VOID:
2892 tstr = "void";
2893 goto add_tstr;
2894 case VT_BOOL:
2895 tstr = "_Bool";
2896 goto add_tstr;
2897 case VT_BYTE:
2898 tstr = "char";
2899 goto add_tstr;
2900 case VT_SHORT:
2901 tstr = "short";
2902 goto add_tstr;
2903 case VT_INT:
2904 tstr = "int";
2905 goto maybe_long;
2906 case VT_LLONG:
2907 tstr = "long long";
2908 maybe_long:
2909 if (t & VT_LONG)
2910 tstr = "long";
2911 if (!IS_ENUM(t))
2912 goto add_tstr;
2913 tstr = "enum ";
2914 goto tstruct;
2915 case VT_FLOAT:
2916 tstr = "float";
2917 goto add_tstr;
2918 case VT_DOUBLE:
2919 tstr = "double";
2920 goto add_tstr;
2921 case VT_LDOUBLE:
2922 tstr = "long double";
2923 add_tstr:
2924 pstrcat(buf, buf_size, tstr);
2925 break;
2926 case VT_STRUCT:
2927 tstr = "struct ";
2928 if (IS_UNION(t))
2929 tstr = "union ";
2930 tstruct:
2931 pstrcat(buf, buf_size, tstr);
2932 v = type->ref->v & ~SYM_STRUCT;
2933 if (v >= SYM_FIRST_ANOM)
2934 pstrcat(buf, buf_size, "<anonymous>");
2935 else
2936 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2937 break;
2938 case VT_FUNC:
2939 s = type->ref;
2940 buf1[0]=0;
2941 if (varstr && '*' == *varstr) {
2942 pstrcat(buf1, sizeof(buf1), "(");
2943 pstrcat(buf1, sizeof(buf1), varstr);
2944 pstrcat(buf1, sizeof(buf1), ")");
2946 pstrcat(buf1, buf_size, "(");
2947 sa = s->next;
2948 while (sa != NULL) {
2949 char buf2[256];
2950 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2951 pstrcat(buf1, sizeof(buf1), buf2);
2952 sa = sa->next;
2953 if (sa)
2954 pstrcat(buf1, sizeof(buf1), ", ");
2956 if (s->f.func_type == FUNC_ELLIPSIS)
2957 pstrcat(buf1, sizeof(buf1), ", ...");
2958 pstrcat(buf1, sizeof(buf1), ")");
2959 type_to_str(buf, buf_size, &s->type, buf1);
2960 goto no_var;
2961 case VT_PTR:
2962 s = type->ref;
2963 if (t & VT_ARRAY) {
2964 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2965 type_to_str(buf, buf_size, &s->type, buf1);
2966 goto no_var;
2968 pstrcpy(buf1, sizeof(buf1), "*");
2969 if (t & VT_CONSTANT)
2970 pstrcat(buf1, buf_size, "const ");
2971 if (t & VT_VOLATILE)
2972 pstrcat(buf1, buf_size, "volatile ");
2973 if (varstr)
2974 pstrcat(buf1, sizeof(buf1), varstr);
2975 type_to_str(buf, buf_size, &s->type, buf1);
2976 goto no_var;
2978 if (varstr) {
2979 pstrcat(buf, buf_size, " ");
2980 pstrcat(buf, buf_size, varstr);
2982 no_var: ;
2985 /* verify type compatibility to store vtop in 'dt' type, and generate
2986 casts if needed. */
2987 static void gen_assign_cast(CType *dt)
2989 CType *st, *type1, *type2;
2990 char buf1[256], buf2[256];
2991 int dbt, sbt, qualwarn, lvl;
2993 st = &vtop->type; /* source type */
2994 dbt = dt->t & VT_BTYPE;
2995 sbt = st->t & VT_BTYPE;
2996 if (sbt == VT_VOID || dbt == VT_VOID) {
2997 if (sbt == VT_VOID && dbt == VT_VOID)
2998 ; /* It is Ok if both are void */
2999 else
3000 tcc_error("cannot cast from/to void");
3002 if (dt->t & VT_CONSTANT)
3003 tcc_warning("assignment of read-only location");
3004 switch(dbt) {
3005 case VT_PTR:
3006 /* special cases for pointers */
3007 /* '0' can also be a pointer */
3008 if (is_null_pointer(vtop))
3009 break;
3010 /* accept implicit pointer to integer cast with warning */
3011 if (is_integer_btype(sbt)) {
3012 tcc_warning("assignment makes pointer from integer without a cast");
3013 break;
3015 type1 = pointed_type(dt);
3016 if (sbt == VT_PTR)
3017 type2 = pointed_type(st);
3018 else if (sbt == VT_FUNC)
3019 type2 = st; /* a function is implicitly a function pointer */
3020 else
3021 goto error;
3022 if (is_compatible_types(type1, type2))
3023 break;
3024 for (qualwarn = lvl = 0;; ++lvl) {
3025 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3026 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3027 qualwarn = 1;
3028 dbt = type1->t & (VT_BTYPE|VT_LONG);
3029 sbt = type2->t & (VT_BTYPE|VT_LONG);
3030 if (dbt != VT_PTR || sbt != VT_PTR)
3031 break;
3032 type1 = pointed_type(type1);
3033 type2 = pointed_type(type2);
3035 if (!is_compatible_unqualified_types(type1, type2)) {
3036 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3037 /* void * can match anything */
3038 } else if (dbt == sbt
3039 && is_integer_btype(sbt & VT_BTYPE)
3040 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3041 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3042 /* Like GCC don't warn by default for merely changes
3043 in pointer target signedness. Do warn for different
3044 base types, though, in particular for unsigned enums
3045 and signed int targets. */
3046 } else {
3047 tcc_warning("assignment from incompatible pointer type");
3048 break;
3051 if (qualwarn)
3052 tcc_warning("assignment discards qualifiers from pointer target type");
3053 break;
3054 case VT_BYTE:
3055 case VT_SHORT:
3056 case VT_INT:
3057 case VT_LLONG:
3058 if (sbt == VT_PTR || sbt == VT_FUNC) {
3059 tcc_warning("assignment makes integer from pointer without a cast");
3060 } else if (sbt == VT_STRUCT) {
3061 goto case_VT_STRUCT;
3063 /* XXX: more tests */
3064 break;
3065 case VT_STRUCT:
3066 case_VT_STRUCT:
3067 if (!is_compatible_unqualified_types(dt, st)) {
3068 error:
3069 type_to_str(buf1, sizeof(buf1), st, NULL);
3070 type_to_str(buf2, sizeof(buf2), dt, NULL);
3071 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3073 break;
3075 gen_cast(dt);
3078 /* store vtop in lvalue pushed on stack */
3079 ST_FUNC void vstore(void)
3081 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3083 ft = vtop[-1].type.t;
3084 sbt = vtop->type.t & VT_BTYPE;
3085 dbt = ft & VT_BTYPE;
3086 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3087 (sbt == VT_INT && dbt == VT_SHORT))
3088 && !(vtop->type.t & VT_BITFIELD)) {
3089 /* optimize char/short casts */
3090 delayed_cast = VT_MUSTCAST;
3091 vtop->type.t = ft & VT_TYPE;
3092 /* XXX: factorize */
3093 if (ft & VT_CONSTANT)
3094 tcc_warning("assignment of read-only location");
3095 } else {
3096 delayed_cast = 0;
3097 if (!(ft & VT_BITFIELD))
3098 gen_assign_cast(&vtop[-1].type);
3101 if (sbt == VT_STRUCT) {
3102 /* if structure, only generate pointer */
3103 /* structure assignment : generate memcpy */
3104 /* XXX: optimize if small size */
3105 size = type_size(&vtop->type, &align);
3107 /* destination */
3108 vswap();
3109 vtop->type.t = VT_PTR;
3110 gaddrof();
3112 /* address of memcpy() */
3113 #ifdef TCC_ARM_EABI
3114 if(!(align & 7))
3115 vpush_global_sym(&func_old_type, TOK_memcpy8);
3116 else if(!(align & 3))
3117 vpush_global_sym(&func_old_type, TOK_memcpy4);
3118 else
3119 #endif
3120 /* Use memmove, rather than memcpy, as dest and src may be same: */
3121 vpush_global_sym(&func_old_type, TOK_memmove);
3123 vswap();
3124 /* source */
3125 vpushv(vtop - 2);
3126 vtop->type.t = VT_PTR;
3127 gaddrof();
3128 /* type size */
3129 vpushi(size);
3130 gfunc_call(3);
3132 /* leave source on stack */
3133 } else if (ft & VT_BITFIELD) {
3134 /* bitfield store handling */
3136 /* save lvalue as expression result (example: s.b = s.a = n;) */
3137 vdup(), vtop[-1] = vtop[-2];
3139 bit_pos = BIT_POS(ft);
3140 bit_size = BIT_SIZE(ft);
3141 /* remove bit field info to avoid loops */
3142 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3144 if ((ft & VT_BTYPE) == VT_BOOL) {
3145 gen_cast(&vtop[-1].type);
3146 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3149 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3150 if (r == VT_STRUCT) {
3151 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3152 store_packed_bf(bit_pos, bit_size);
3153 } else {
3154 unsigned long long mask = (1ULL << bit_size) - 1;
3155 if ((ft & VT_BTYPE) != VT_BOOL) {
3156 /* mask source */
3157 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3158 vpushll(mask);
3159 else
3160 vpushi((unsigned)mask);
3161 gen_op('&');
3163 /* shift source */
3164 vpushi(bit_pos);
3165 gen_op(TOK_SHL);
3166 vswap();
3167 /* duplicate destination */
3168 vdup();
3169 vrott(3);
3170 /* load destination, mask and or with source */
3171 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3172 vpushll(~(mask << bit_pos));
3173 else
3174 vpushi(~((unsigned)mask << bit_pos));
3175 gen_op('&');
3176 gen_op('|');
3177 /* store result */
3178 vstore();
3179 /* ... and discard */
3180 vpop();
3182 } else if (dbt == VT_VOID) {
3183 --vtop;
3184 } else {
3185 #ifdef CONFIG_TCC_BCHECK
3186 /* bound check case */
3187 if (vtop[-1].r & VT_MUSTBOUND) {
3188 vswap();
3189 gbound();
3190 vswap();
3192 #endif
3193 rc = RC_INT;
3194 if (is_float(ft)) {
3195 rc = RC_FLOAT;
3196 #ifdef TCC_TARGET_X86_64
3197 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3198 rc = RC_ST0;
3199 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3200 rc = RC_FRET;
3202 #endif
3204 r = gv(rc); /* generate value */
3205 /* if lvalue was saved on stack, must read it */
3206 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3207 SValue sv;
3208 t = get_reg(RC_INT);
3209 #if PTR_SIZE == 8
3210 sv.type.t = VT_PTR;
3211 #else
3212 sv.type.t = VT_INT;
3213 #endif
3214 sv.r = VT_LOCAL | VT_LVAL;
3215 sv.c.i = vtop[-1].c.i;
3216 load(t, &sv);
3217 vtop[-1].r = t | VT_LVAL;
3219 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3220 #if PTR_SIZE == 8
3221 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3222 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3223 #else
3224 if ((ft & VT_BTYPE) == VT_LLONG) {
3225 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3226 #endif
3227 vtop[-1].type.t = load_type;
3228 store(r, vtop - 1);
3229 vswap();
3230 /* convert to int to increment easily */
3231 vtop->type.t = addr_type;
3232 gaddrof();
3233 vpushi(load_size);
3234 gen_op('+');
3235 vtop->r |= VT_LVAL;
3236 vswap();
3237 vtop[-1].type.t = load_type;
3238 /* XXX: it works because r2 is spilled last ! */
3239 store(vtop->r2, vtop - 1);
3240 } else {
3241 store(r, vtop - 1);
3244 vswap();
3245 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3246 vtop->r |= delayed_cast;
3250 /* post defines POST/PRE add. c is the token ++ or -- */
3251 ST_FUNC void inc(int post, int c)
3253 test_lvalue();
3254 vdup(); /* save lvalue */
3255 if (post) {
3256 gv_dup(); /* duplicate value */
3257 vrotb(3);
3258 vrotb(3);
3260 /* add constant */
3261 vpushi(c - TOK_MID);
3262 gen_op('+');
3263 vstore(); /* store value */
3264 if (post)
3265 vpop(); /* if post op, return saved value */
3268 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3270 /* read the string */
3271 if (tok != TOK_STR)
3272 expect(msg);
3273 cstr_new(astr);
3274 while (tok == TOK_STR) {
3275 /* XXX: add \0 handling too ? */
3276 cstr_cat(astr, tokc.str.data, -1);
3277 next();
3279 cstr_ccat(astr, '\0');
3282 /* If I is >= 1 and a power of two, returns log2(i)+1.
3283 If I is 0 returns 0. */
3284 static int exact_log2p1(int i)
3286 int ret;
3287 if (!i)
3288 return 0;
3289 for (ret = 1; i >= 1 << 8; ret += 8)
3290 i >>= 8;
3291 if (i >= 1 << 4)
3292 ret += 4, i >>= 4;
3293 if (i >= 1 << 2)
3294 ret += 2, i >>= 2;
3295 if (i >= 1 << 1)
3296 ret++;
3297 return ret;
3300 /* Parse __attribute__((...)) GNUC extension. */
3301 static void parse_attribute(AttributeDef *ad)
3303 int t, n;
3304 CString astr;
3306 redo:
3307 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3308 return;
3309 next();
3310 skip('(');
3311 skip('(');
3312 while (tok != ')') {
3313 if (tok < TOK_IDENT)
3314 expect("attribute name");
3315 t = tok;
3316 next();
3317 switch(t) {
3318 case TOK_SECTION1:
3319 case TOK_SECTION2:
3320 skip('(');
3321 parse_mult_str(&astr, "section name");
3322 ad->section = find_section(tcc_state, (char *)astr.data);
3323 skip(')');
3324 cstr_free(&astr);
3325 break;
3326 case TOK_ALIAS1:
3327 case TOK_ALIAS2:
3328 skip('(');
3329 parse_mult_str(&astr, "alias(\"target\")");
3330 ad->alias_target = /* save string as token, for later */
3331 tok_alloc((char*)astr.data, astr.size-1)->tok;
3332 skip(')');
3333 cstr_free(&astr);
3334 break;
3335 case TOK_VISIBILITY1:
3336 case TOK_VISIBILITY2:
3337 skip('(');
3338 parse_mult_str(&astr,
3339 "visibility(\"default|hidden|internal|protected\")");
3340 if (!strcmp (astr.data, "default"))
3341 ad->a.visibility = STV_DEFAULT;
3342 else if (!strcmp (astr.data, "hidden"))
3343 ad->a.visibility = STV_HIDDEN;
3344 else if (!strcmp (astr.data, "internal"))
3345 ad->a.visibility = STV_INTERNAL;
3346 else if (!strcmp (astr.data, "protected"))
3347 ad->a.visibility = STV_PROTECTED;
3348 else
3349 expect("visibility(\"default|hidden|internal|protected\")");
3350 skip(')');
3351 cstr_free(&astr);
3352 break;
3353 case TOK_ALIGNED1:
3354 case TOK_ALIGNED2:
3355 if (tok == '(') {
3356 next();
3357 n = expr_const();
3358 if (n <= 0 || (n & (n - 1)) != 0)
3359 tcc_error("alignment must be a positive power of two");
3360 skip(')');
3361 } else {
3362 n = MAX_ALIGN;
3364 ad->a.aligned = exact_log2p1(n);
3365 if (n != 1 << (ad->a.aligned - 1))
3366 tcc_error("alignment of %d is larger than implemented", n);
3367 break;
3368 case TOK_PACKED1:
3369 case TOK_PACKED2:
3370 ad->a.packed = 1;
3371 break;
3372 case TOK_WEAK1:
3373 case TOK_WEAK2:
3374 ad->a.weak = 1;
3375 break;
3376 case TOK_UNUSED1:
3377 case TOK_UNUSED2:
3378 /* currently, no need to handle it because tcc does not
3379 track unused objects */
3380 break;
3381 case TOK_NORETURN1:
3382 case TOK_NORETURN2:
3383 /* currently, no need to handle it because tcc does not
3384 track unused objects */
3385 break;
3386 case TOK_CDECL1:
3387 case TOK_CDECL2:
3388 case TOK_CDECL3:
3389 ad->f.func_call = FUNC_CDECL;
3390 break;
3391 case TOK_STDCALL1:
3392 case TOK_STDCALL2:
3393 case TOK_STDCALL3:
3394 ad->f.func_call = FUNC_STDCALL;
3395 break;
3396 #ifdef TCC_TARGET_I386
3397 case TOK_REGPARM1:
3398 case TOK_REGPARM2:
3399 skip('(');
3400 n = expr_const();
3401 if (n > 3)
3402 n = 3;
3403 else if (n < 0)
3404 n = 0;
3405 if (n > 0)
3406 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3407 skip(')');
3408 break;
3409 case TOK_FASTCALL1:
3410 case TOK_FASTCALL2:
3411 case TOK_FASTCALL3:
3412 ad->f.func_call = FUNC_FASTCALLW;
3413 break;
3414 #endif
3415 case TOK_MODE:
3416 skip('(');
3417 switch(tok) {
3418 case TOK_MODE_DI:
3419 ad->attr_mode = VT_LLONG + 1;
3420 break;
3421 case TOK_MODE_QI:
3422 ad->attr_mode = VT_BYTE + 1;
3423 break;
3424 case TOK_MODE_HI:
3425 ad->attr_mode = VT_SHORT + 1;
3426 break;
3427 case TOK_MODE_SI:
3428 case TOK_MODE_word:
3429 ad->attr_mode = VT_INT + 1;
3430 break;
3431 default:
3432 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3433 break;
3435 next();
3436 skip(')');
3437 break;
3438 case TOK_DLLEXPORT:
3439 ad->a.dllexport = 1;
3440 break;
3441 case TOK_DLLIMPORT:
3442 ad->a.dllimport = 1;
3443 break;
3444 default:
3445 if (tcc_state->warn_unsupported)
3446 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3447 /* skip parameters */
3448 if (tok == '(') {
3449 int parenthesis = 0;
3450 do {
3451 if (tok == '(')
3452 parenthesis++;
3453 else if (tok == ')')
3454 parenthesis--;
3455 next();
3456 } while (parenthesis && tok != -1);
3458 break;
3460 if (tok != ',')
3461 break;
3462 next();
3464 skip(')');
3465 skip(')');
3466 goto redo;
3469 static Sym * find_field (CType *type, int v)
3471 Sym *s = type->ref;
3472 v |= SYM_FIELD;
3473 while ((s = s->next) != NULL) {
3474 if ((s->v & SYM_FIELD) &&
3475 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3476 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3477 Sym *ret = find_field (&s->type, v);
3478 if (ret)
3479 return ret;
3481 if (s->v == v)
3482 break;
3484 return s;
3487 static void struct_add_offset (Sym *s, int offset)
3489 while ((s = s->next) != NULL) {
3490 if ((s->v & SYM_FIELD) &&
3491 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3492 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3493 struct_add_offset(s->type.ref, offset);
3494 } else
3495 s->c += offset;
3499 static void struct_layout(CType *type, AttributeDef *ad)
3501 int size, align, maxalign, offset, c, bit_pos, bit_size;
3502 int packed, a, bt, prevbt, prev_bit_size;
3503 int pcc = !tcc_state->ms_bitfields;
3504 int pragma_pack = *tcc_state->pack_stack_ptr;
3505 Sym *f;
3507 maxalign = 1;
3508 offset = 0;
3509 c = 0;
3510 bit_pos = 0;
3511 prevbt = VT_STRUCT; /* make it never match */
3512 prev_bit_size = 0;
3514 //#define BF_DEBUG
3516 for (f = type->ref->next; f; f = f->next) {
3517 if (f->type.t & VT_BITFIELD)
3518 bit_size = BIT_SIZE(f->type.t);
3519 else
3520 bit_size = -1;
3521 size = type_size(&f->type, &align);
3522 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3523 packed = 0;
3525 if (pcc && bit_size == 0) {
3526 /* in pcc mode, packing does not affect zero-width bitfields */
3528 } else {
3529 /* in pcc mode, attribute packed overrides if set. */
3530 if (pcc && (f->a.packed || ad->a.packed))
3531 align = packed = 1;
3533 /* pragma pack overrides align if lesser and packs bitfields always */
3534 if (pragma_pack) {
3535 packed = 1;
3536 if (pragma_pack < align)
3537 align = pragma_pack;
3538 /* in pcc mode pragma pack also overrides individual align */
3539 if (pcc && pragma_pack < a)
3540 a = 0;
3543 /* some individual align was specified */
3544 if (a)
3545 align = a;
3547 if (type->ref->type.t == VT_UNION) {
3548 if (pcc && bit_size >= 0)
3549 size = (bit_size + 7) >> 3;
3550 offset = 0;
3551 if (size > c)
3552 c = size;
3554 } else if (bit_size < 0) {
3555 if (pcc)
3556 c += (bit_pos + 7) >> 3;
3557 c = (c + align - 1) & -align;
3558 offset = c;
3559 if (size > 0)
3560 c += size;
3561 bit_pos = 0;
3562 prevbt = VT_STRUCT;
3563 prev_bit_size = 0;
3565 } else {
3566 /* A bit-field. Layout is more complicated. There are two
3567 options: PCC (GCC) compatible and MS compatible */
3568 if (pcc) {
3569 /* In PCC layout a bit-field is placed adjacent to the
3570 preceding bit-fields, except if:
3571 - it has zero-width
3572 - an individual alignment was given
3573 - it would overflow its base type container and
3574 there is no packing */
3575 if (bit_size == 0) {
3576 new_field:
3577 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3578 bit_pos = 0;
3579 } else if (f->a.aligned) {
3580 goto new_field;
3581 } else if (!packed) {
3582 int a8 = align * 8;
3583 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3584 if (ofs > size / align)
3585 goto new_field;
3588 /* in pcc mode, long long bitfields have type int if they fit */
3589 if (size == 8 && bit_size <= 32)
3590 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3592 while (bit_pos >= align * 8)
3593 c += align, bit_pos -= align * 8;
3594 offset = c;
3596 /* In PCC layout named bit-fields influence the alignment
3597 of the containing struct using the base types alignment,
3598 except for packed fields (which here have correct align). */
3599 if (f->v & SYM_FIRST_ANOM
3600 // && bit_size // ??? gcc on ARM/rpi does that
3602 align = 1;
3604 } else {
3605 bt = f->type.t & VT_BTYPE;
3606 if ((bit_pos + bit_size > size * 8)
3607 || (bit_size > 0) == (bt != prevbt)
3609 c = (c + align - 1) & -align;
3610 offset = c;
3611 bit_pos = 0;
3612 /* In MS bitfield mode a bit-field run always uses
3613 at least as many bits as the underlying type.
3614 To start a new run it's also required that this
3615 or the last bit-field had non-zero width. */
3616 if (bit_size || prev_bit_size)
3617 c += size;
3619 /* In MS layout the records alignment is normally
3620 influenced by the field, except for a zero-width
3621 field at the start of a run (but by further zero-width
3622 fields it is again). */
3623 if (bit_size == 0 && prevbt != bt)
3624 align = 1;
3625 prevbt = bt;
3626 prev_bit_size = bit_size;
3629 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3630 | (bit_pos << VT_STRUCT_SHIFT);
3631 bit_pos += bit_size;
3633 if (align > maxalign)
3634 maxalign = align;
3636 #ifdef BF_DEBUG
3637 printf("set field %s offset %-2d size %-2d align %-2d",
3638 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3639 if (f->type.t & VT_BITFIELD) {
3640 printf(" pos %-2d bits %-2d",
3641 BIT_POS(f->type.t),
3642 BIT_SIZE(f->type.t)
3645 printf("\n");
3646 #endif
3648 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3649 Sym *ass;
3650 /* An anonymous struct/union. Adjust member offsets
3651 to reflect the real offset of our containing struct.
3652 Also set the offset of this anon member inside
3653 the outer struct to be zero. Via this it
3654 works when accessing the field offset directly
3655 (from base object), as well as when recursing
3656 members in initializer handling. */
3657 int v2 = f->type.ref->v;
3658 if (!(v2 & SYM_FIELD) &&
3659 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3660 Sym **pps;
3661 /* This happens only with MS extensions. The
3662 anon member has a named struct type, so it
3663 potentially is shared with other references.
3664 We need to unshare members so we can modify
3665 them. */
3666 ass = f->type.ref;
3667 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3668 &f->type.ref->type, 0,
3669 f->type.ref->c);
3670 pps = &f->type.ref->next;
3671 while ((ass = ass->next) != NULL) {
3672 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3673 pps = &((*pps)->next);
3675 *pps = NULL;
3677 struct_add_offset(f->type.ref, offset);
3678 f->c = 0;
3679 } else {
3680 f->c = offset;
3683 f->r = 0;
3686 if (pcc)
3687 c += (bit_pos + 7) >> 3;
3689 /* store size and alignment */
3690 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3691 if (a < maxalign)
3692 a = maxalign;
3693 type->ref->r = a;
3694 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3695 /* can happen if individual align for some member was given. In
3696 this case MSVC ignores maxalign when aligning the size */
3697 a = pragma_pack;
3698 if (a < bt)
3699 a = bt;
3701 c = (c + a - 1) & -a;
3702 type->ref->c = c;
3704 #ifdef BF_DEBUG
3705 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3706 #endif
3708 /* check whether we can access bitfields by their type */
3709 for (f = type->ref->next; f; f = f->next) {
3710 int s, px, cx, c0;
3711 CType t;
3713 if (0 == (f->type.t & VT_BITFIELD))
3714 continue;
3715 f->type.ref = f;
3716 f->auxtype = -1;
3717 bit_size = BIT_SIZE(f->type.t);
3718 if (bit_size == 0)
3719 continue;
3720 bit_pos = BIT_POS(f->type.t);
3721 size = type_size(&f->type, &align);
3722 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3723 continue;
3725 /* try to access the field using a different type */
3726 c0 = -1, s = align = 1;
3727 for (;;) {
3728 px = f->c * 8 + bit_pos;
3729 cx = (px >> 3) & -align;
3730 px = px - (cx << 3);
3731 if (c0 == cx)
3732 break;
3733 s = (px + bit_size + 7) >> 3;
3734 if (s > 4) {
3735 t.t = VT_LLONG;
3736 } else if (s > 2) {
3737 t.t = VT_INT;
3738 } else if (s > 1) {
3739 t.t = VT_SHORT;
3740 } else {
3741 t.t = VT_BYTE;
3743 s = type_size(&t, &align);
3744 c0 = cx;
3747 if (px + bit_size <= s * 8 && cx + s <= c) {
3748 /* update offset and bit position */
3749 f->c = cx;
3750 bit_pos = px;
3751 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3752 | (bit_pos << VT_STRUCT_SHIFT);
3753 if (s != size)
3754 f->auxtype = t.t;
3755 #ifdef BF_DEBUG
3756 printf("FIX field %s offset %-2d size %-2d align %-2d "
3757 "pos %-2d bits %-2d\n",
3758 get_tok_str(f->v & ~SYM_FIELD, NULL),
3759 cx, s, align, px, bit_size);
3760 #endif
3761 } else {
3762 /* fall back to load/store single-byte wise */
3763 f->auxtype = VT_STRUCT;
3764 #ifdef BF_DEBUG
3765 printf("FIX field %s : load byte-wise\n",
3766 get_tok_str(f->v & ~SYM_FIELD, NULL));
3767 #endif
3772 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3773 static void struct_decl(CType *type, int u)
3775 int v, c, size, align, flexible;
3776 int bit_size, bsize, bt;
3777 Sym *s, *ss, **ps;
3778 AttributeDef ad, ad1;
3779 CType type1, btype;
3781 memset(&ad, 0, sizeof ad);
3782 next();
3783 parse_attribute(&ad);
3784 if (tok != '{') {
3785 v = tok;
3786 next();
3787 /* struct already defined ? return it */
3788 if (v < TOK_IDENT)
3789 expect("struct/union/enum name");
3790 s = struct_find(v);
3791 if (s && (s->sym_scope == local_scope || tok != '{')) {
3792 if (u == s->type.t)
3793 goto do_decl;
3794 if (u == VT_ENUM && IS_ENUM(s->type.t))
3795 goto do_decl;
3796 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3798 } else {
3799 v = anon_sym++;
3801 /* Record the original enum/struct/union token. */
3802 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3803 type1.ref = NULL;
3804 /* we put an undefined size for struct/union */
3805 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3806 s->r = 0; /* default alignment is zero as gcc */
3807 do_decl:
3808 type->t = s->type.t;
3809 type->ref = s;
3811 if (tok == '{') {
3812 next();
3813 if (s->c != -1)
3814 tcc_error("struct/union/enum already defined");
3815 /* cannot be empty */
3816 /* non empty enums are not allowed */
3817 ps = &s->next;
3818 if (u == VT_ENUM) {
3819 long long ll = 0, pl = 0, nl = 0;
3820 CType t;
3821 t.ref = s;
3822 /* enum symbols have static storage */
3823 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3824 for(;;) {
3825 v = tok;
3826 if (v < TOK_UIDENT)
3827 expect("identifier");
3828 ss = sym_find(v);
3829 if (ss && !local_stack)
3830 tcc_error("redefinition of enumerator '%s'",
3831 get_tok_str(v, NULL));
3832 next();
3833 if (tok == '=') {
3834 next();
3835 ll = expr_const64();
3837 ss = sym_push(v, &t, VT_CONST, 0);
3838 ss->enum_val = ll;
3839 *ps = ss, ps = &ss->next;
3840 if (ll < nl)
3841 nl = ll;
3842 if (ll > pl)
3843 pl = ll;
3844 if (tok != ',')
3845 break;
3846 next();
3847 ll++;
3848 /* NOTE: we accept a trailing comma */
3849 if (tok == '}')
3850 break;
3852 skip('}');
3853 /* set integral type of the enum */
3854 t.t = VT_INT;
3855 if (nl >= 0) {
3856 if (pl != (unsigned)pl)
3857 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3858 t.t |= VT_UNSIGNED;
3859 } else if (pl != (int)pl || nl != (int)nl)
3860 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3861 s->type.t = type->t = t.t | VT_ENUM;
3862 s->c = 0;
3863 /* set type for enum members */
3864 for (ss = s->next; ss; ss = ss->next) {
3865 ll = ss->enum_val;
3866 if (ll == (int)ll) /* default is int if it fits */
3867 continue;
3868 if (t.t & VT_UNSIGNED) {
3869 ss->type.t |= VT_UNSIGNED;
3870 if (ll == (unsigned)ll)
3871 continue;
3873 ss->type.t = (ss->type.t & ~VT_BTYPE)
3874 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3876 } else {
3877 c = 0;
3878 flexible = 0;
3879 while (tok != '}') {
3880 if (!parse_btype(&btype, &ad1)) {
3881 skip(';');
3882 continue;
3884 while (1) {
3885 if (flexible)
3886 tcc_error("flexible array member '%s' not at the end of struct",
3887 get_tok_str(v, NULL));
3888 bit_size = -1;
3889 v = 0;
3890 type1 = btype;
3891 if (tok != ':') {
3892 if (tok != ';')
3893 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3894 if (v == 0) {
3895 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3896 expect("identifier");
3897 else {
3898 int v = btype.ref->v;
3899 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3900 if (tcc_state->ms_extensions == 0)
3901 expect("identifier");
3905 if (type_size(&type1, &align) < 0) {
3906 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3907 flexible = 1;
3908 else
3909 tcc_error("field '%s' has incomplete type",
3910 get_tok_str(v, NULL));
3912 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3913 (type1.t & VT_STORAGE))
3914 tcc_error("invalid type for '%s'",
3915 get_tok_str(v, NULL));
3917 if (tok == ':') {
3918 next();
3919 bit_size = expr_const();
3920 /* XXX: handle v = 0 case for messages */
3921 if (bit_size < 0)
3922 tcc_error("negative width in bit-field '%s'",
3923 get_tok_str(v, NULL));
3924 if (v && bit_size == 0)
3925 tcc_error("zero width for bit-field '%s'",
3926 get_tok_str(v, NULL));
3927 parse_attribute(&ad1);
3929 size = type_size(&type1, &align);
3930 if (bit_size >= 0) {
3931 bt = type1.t & VT_BTYPE;
3932 if (bt != VT_INT &&
3933 bt != VT_BYTE &&
3934 bt != VT_SHORT &&
3935 bt != VT_BOOL &&
3936 bt != VT_LLONG)
3937 tcc_error("bitfields must have scalar type");
3938 bsize = size * 8;
3939 if (bit_size > bsize) {
3940 tcc_error("width of '%s' exceeds its type",
3941 get_tok_str(v, NULL));
3942 } else if (bit_size == bsize
3943 && !ad.a.packed && !ad1.a.packed) {
3944 /* no need for bit fields */
3946 } else if (bit_size == 64) {
3947 tcc_error("field width 64 not implemented");
3948 } else {
3949 type1.t = (type1.t & ~VT_STRUCT_MASK)
3950 | VT_BITFIELD
3951 | (bit_size << (VT_STRUCT_SHIFT + 6));
3954 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3955 /* Remember we've seen a real field to check
3956 for placement of flexible array member. */
3957 c = 1;
3959 /* If member is a struct or bit-field, enforce
3960 placing into the struct (as anonymous). */
3961 if (v == 0 &&
3962 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3963 bit_size >= 0)) {
3964 v = anon_sym++;
3966 if (v) {
3967 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3968 ss->a = ad1.a;
3969 *ps = ss;
3970 ps = &ss->next;
3972 if (tok == ';' || tok == TOK_EOF)
3973 break;
3974 skip(',');
3976 skip(';');
3978 skip('}');
3979 parse_attribute(&ad);
3980 struct_layout(type, &ad);
3985 static void sym_to_attr(AttributeDef *ad, Sym *s)
3987 if (s->a.aligned && 0 == ad->a.aligned)
3988 ad->a.aligned = s->a.aligned;
3989 if (s->f.func_call && 0 == ad->f.func_call)
3990 ad->f.func_call = s->f.func_call;
3991 if (s->f.func_type && 0 == ad->f.func_type)
3992 ad->f.func_type = s->f.func_type;
3993 if (s->a.packed)
3994 ad->a.packed = 1;
3997 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3998 are added to the element type, copied because it could be a typedef. */
3999 static void parse_btype_qualify(CType *type, int qualifiers)
4001 while (type->t & VT_ARRAY) {
4002 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4003 type = &type->ref->type;
4005 type->t |= qualifiers;
4008 /* return 0 if no type declaration. otherwise, return the basic type
4009 and skip it.
4011 static int parse_btype(CType *type, AttributeDef *ad)
4013 int t, u, bt, st, type_found, typespec_found, g;
4014 Sym *s;
4015 CType type1;
4017 memset(ad, 0, sizeof(AttributeDef));
4018 type_found = 0;
4019 typespec_found = 0;
4020 t = VT_INT;
4021 bt = st = -1;
4022 type->ref = NULL;
4024 while(1) {
4025 switch(tok) {
4026 case TOK_EXTENSION:
4027 /* currently, we really ignore extension */
4028 next();
4029 continue;
4031 /* basic types */
4032 case TOK_CHAR:
4033 u = VT_BYTE;
4034 basic_type:
4035 next();
4036 basic_type1:
4037 if (u == VT_SHORT || u == VT_LONG) {
4038 if (st != -1 || (bt != -1 && bt != VT_INT))
4039 tmbt: tcc_error("too many basic types");
4040 st = u;
4041 } else {
4042 if (bt != -1 || (st != -1 && u != VT_INT))
4043 goto tmbt;
4044 bt = u;
4046 if (u != VT_INT)
4047 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4048 typespec_found = 1;
4049 break;
4050 case TOK_VOID:
4051 u = VT_VOID;
4052 goto basic_type;
4053 case TOK_SHORT:
4054 u = VT_SHORT;
4055 goto basic_type;
4056 case TOK_INT:
4057 u = VT_INT;
4058 goto basic_type;
4059 case TOK_LONG:
4060 if ((t & VT_BTYPE) == VT_DOUBLE) {
4061 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4062 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4063 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4064 } else {
4065 u = VT_LONG;
4066 goto basic_type;
4068 next();
4069 break;
4070 #ifdef TCC_TARGET_ARM64
4071 case TOK_UINT128:
4072 /* GCC's __uint128_t appears in some Linux header files. Make it a
4073 synonym for long double to get the size and alignment right. */
4074 u = VT_LDOUBLE;
4075 goto basic_type;
4076 #endif
4077 case TOK_BOOL:
4078 u = VT_BOOL;
4079 goto basic_type;
4080 case TOK_FLOAT:
4081 u = VT_FLOAT;
4082 goto basic_type;
4083 case TOK_DOUBLE:
4084 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4085 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4086 } else {
4087 u = VT_DOUBLE;
4088 goto basic_type;
4090 next();
4091 break;
4092 case TOK_ENUM:
4093 struct_decl(&type1, VT_ENUM);
4094 basic_type2:
4095 u = type1.t;
4096 type->ref = type1.ref;
4097 goto basic_type1;
4098 case TOK_STRUCT:
4099 struct_decl(&type1, VT_STRUCT);
4100 goto basic_type2;
4101 case TOK_UNION:
4102 struct_decl(&type1, VT_UNION);
4103 goto basic_type2;
4105 /* type modifiers */
4106 case TOK_CONST1:
4107 case TOK_CONST2:
4108 case TOK_CONST3:
4109 type->t = t;
4110 parse_btype_qualify(type, VT_CONSTANT);
4111 t = type->t;
4112 next();
4113 break;
4114 case TOK_VOLATILE1:
4115 case TOK_VOLATILE2:
4116 case TOK_VOLATILE3:
4117 type->t = t;
4118 parse_btype_qualify(type, VT_VOLATILE);
4119 t = type->t;
4120 next();
4121 break;
4122 case TOK_SIGNED1:
4123 case TOK_SIGNED2:
4124 case TOK_SIGNED3:
4125 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4126 tcc_error("signed and unsigned modifier");
4127 t |= VT_DEFSIGN;
4128 next();
4129 typespec_found = 1;
4130 break;
4131 case TOK_REGISTER:
4132 case TOK_AUTO:
4133 case TOK_RESTRICT1:
4134 case TOK_RESTRICT2:
4135 case TOK_RESTRICT3:
4136 next();
4137 break;
4138 case TOK_UNSIGNED:
4139 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4140 tcc_error("signed and unsigned modifier");
4141 t |= VT_DEFSIGN | VT_UNSIGNED;
4142 next();
4143 typespec_found = 1;
4144 break;
4146 /* storage */
4147 case TOK_EXTERN:
4148 g = VT_EXTERN;
4149 goto storage;
4150 case TOK_STATIC:
4151 g = VT_STATIC;
4152 goto storage;
4153 case TOK_TYPEDEF:
4154 g = VT_TYPEDEF;
4155 goto storage;
4156 storage:
4157 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4158 tcc_error("multiple storage classes");
4159 t |= g;
4160 next();
4161 break;
4162 case TOK_INLINE1:
4163 case TOK_INLINE2:
4164 case TOK_INLINE3:
4165 t |= VT_INLINE;
4166 next();
4167 break;
4169 /* GNUC attribute */
4170 case TOK_ATTRIBUTE1:
4171 case TOK_ATTRIBUTE2:
4172 parse_attribute(ad);
4173 if (ad->attr_mode) {
4174 u = ad->attr_mode -1;
4175 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4177 break;
4178 /* GNUC typeof */
4179 case TOK_TYPEOF1:
4180 case TOK_TYPEOF2:
4181 case TOK_TYPEOF3:
4182 next();
4183 parse_expr_type(&type1);
4184 /* remove all storage modifiers except typedef */
4185 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4186 if (type1.ref)
4187 sym_to_attr(ad, type1.ref);
4188 goto basic_type2;
4189 default:
4190 if (typespec_found)
4191 goto the_end;
4192 s = sym_find(tok);
4193 if (!s || !(s->type.t & VT_TYPEDEF))
4194 goto the_end;
4195 t &= ~(VT_BTYPE|VT_LONG);
4196 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4197 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4198 type->ref = s->type.ref;
4199 if (t)
4200 parse_btype_qualify(type, t);
4201 t = type->t;
4202 /* get attributes from typedef */
4203 sym_to_attr(ad, s);
4204 next();
4205 typespec_found = 1;
4206 st = bt = -2;
4207 break;
4209 type_found = 1;
4211 the_end:
4212 if (tcc_state->char_is_unsigned) {
4213 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4214 t |= VT_UNSIGNED;
4216 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4217 bt = t & (VT_BTYPE|VT_LONG);
4218 if (bt == VT_LONG)
4219 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4220 #ifdef TCC_TARGET_PE
4221 if (bt == VT_LDOUBLE)
4222 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4223 #endif
4224 type->t = t;
4225 return type_found;
4228 /* convert a function parameter type (array to pointer and function to
4229 function pointer) */
4230 static inline void convert_parameter_type(CType *pt)
4232 /* remove const and volatile qualifiers (XXX: const could be used
4233 to indicate a const function parameter */
4234 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4235 /* array must be transformed to pointer according to ANSI C */
4236 pt->t &= ~VT_ARRAY;
4237 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4238 mk_pointer(pt);
4242 ST_FUNC void parse_asm_str(CString *astr)
4244 skip('(');
4245 parse_mult_str(astr, "string constant");
4248 /* Parse an asm label and return the token */
4249 static int asm_label_instr(void)
4251 int v;
4252 CString astr;
4254 next();
4255 parse_asm_str(&astr);
4256 skip(')');
4257 #ifdef ASM_DEBUG
4258 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4259 #endif
4260 v = tok_alloc(astr.data, astr.size - 1)->tok;
4261 cstr_free(&astr);
4262 return v;
4265 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4267 int n, l, t1, arg_size, align;
4268 Sym **plast, *s, *first;
4269 AttributeDef ad1;
4270 CType pt;
4272 if (tok == '(') {
4273 /* function type, or recursive declarator (return if so) */
4274 next();
4275 if (td && !(td & TYPE_ABSTRACT))
4276 return 0;
4277 if (tok == ')')
4278 l = 0;
4279 else if (parse_btype(&pt, &ad1))
4280 l = FUNC_NEW;
4281 else if (td)
4282 return 0;
4283 else
4284 l = FUNC_OLD;
4285 first = NULL;
4286 plast = &first;
4287 arg_size = 0;
4288 if (l) {
4289 for(;;) {
4290 /* read param name and compute offset */
4291 if (l != FUNC_OLD) {
4292 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4293 break;
4294 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4295 if ((pt.t & VT_BTYPE) == VT_VOID)
4296 tcc_error("parameter declared as void");
4297 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4298 } else {
4299 n = tok;
4300 if (n < TOK_UIDENT)
4301 expect("identifier");
4302 pt.t = VT_VOID; /* invalid type */
4303 next();
4305 convert_parameter_type(&pt);
4306 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4307 *plast = s;
4308 plast = &s->next;
4309 if (tok == ')')
4310 break;
4311 skip(',');
4312 if (l == FUNC_NEW && tok == TOK_DOTS) {
4313 l = FUNC_ELLIPSIS;
4314 next();
4315 break;
4317 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4318 tcc_error("invalid type");
4320 } else
4321 /* if no parameters, then old type prototype */
4322 l = FUNC_OLD;
4323 skip(')');
4324 /* NOTE: const is ignored in returned type as it has a special
4325 meaning in gcc / C++ */
4326 type->t &= ~VT_CONSTANT;
4327 /* some ancient pre-K&R C allows a function to return an array
4328 and the array brackets to be put after the arguments, such
4329 that "int c()[]" means something like "int[] c()" */
4330 if (tok == '[') {
4331 next();
4332 skip(']'); /* only handle simple "[]" */
4333 mk_pointer(type);
4335 /* we push a anonymous symbol which will contain the function prototype */
4336 ad->f.func_args = arg_size;
4337 ad->f.func_type = l;
4338 s = sym_push(SYM_FIELD, type, 0, 0);
4339 s->a = ad->a;
4340 s->f = ad->f;
4341 s->next = first;
4342 type->t = VT_FUNC;
4343 type->ref = s;
4344 } else if (tok == '[') {
4345 int saved_nocode_wanted = nocode_wanted;
4346 /* array definition */
4347 next();
4348 while (1) {
4349 /* XXX The optional type-quals and static should only be accepted
4350 in parameter decls. The '*' as well, and then even only
4351 in prototypes (not function defs). */
4352 switch (tok) {
4353 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4354 case TOK_CONST1:
4355 case TOK_VOLATILE1:
4356 case TOK_STATIC:
4357 case '*':
4358 next();
4359 continue;
4360 default:
4361 break;
4363 break;
4365 n = -1;
4366 t1 = 0;
4367 if (tok != ']') {
4368 if (!local_stack || (storage & VT_STATIC))
4369 vpushi(expr_const());
4370 else {
4371 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4372 length must always be evaluated, even under nocode_wanted,
4373 so that its size slot is initialized (e.g. under sizeof
4374 or typeof). */
4375 nocode_wanted = 0;
4376 gexpr();
4378 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4379 n = vtop->c.i;
4380 if (n < 0)
4381 tcc_error("invalid array size");
4382 } else {
4383 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4384 tcc_error("size of variable length array should be an integer");
4385 t1 = VT_VLA;
4388 skip(']');
4389 /* parse next post type */
4390 post_type(type, ad, storage, 0);
4391 if (type->t == VT_FUNC)
4392 tcc_error("declaration of an array of functions");
4393 t1 |= type->t & VT_VLA;
4395 if (t1 & VT_VLA) {
4396 loc -= type_size(&int_type, &align);
4397 loc &= -align;
4398 n = loc;
4400 vla_runtime_type_size(type, &align);
4401 gen_op('*');
4402 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4403 vswap();
4404 vstore();
4406 if (n != -1)
4407 vpop();
4408 nocode_wanted = saved_nocode_wanted;
4410 /* we push an anonymous symbol which will contain the array
4411 element type */
4412 s = sym_push(SYM_FIELD, type, 0, n);
4413 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4414 type->ref = s;
4416 return 1;
4419 /* Parse a type declarator (except basic type), and return the type
4420 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4421 expected. 'type' should contain the basic type. 'ad' is the
4422 attribute definition of the basic type. It can be modified by
4423 type_decl(). If this (possibly abstract) declarator is a pointer chain
4424 it returns the innermost pointed to type (equals *type, but is a different
4425 pointer), otherwise returns type itself, that's used for recursive calls. */
4426 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4428 CType *post, *ret;
4429 int qualifiers, storage;
4431 /* recursive type, remove storage bits first, apply them later again */
4432 storage = type->t & VT_STORAGE;
4433 type->t &= ~VT_STORAGE;
4434 post = ret = type;
4436 while (tok == '*') {
4437 qualifiers = 0;
4438 redo:
4439 next();
4440 switch(tok) {
4441 case TOK_CONST1:
4442 case TOK_CONST2:
4443 case TOK_CONST3:
4444 qualifiers |= VT_CONSTANT;
4445 goto redo;
4446 case TOK_VOLATILE1:
4447 case TOK_VOLATILE2:
4448 case TOK_VOLATILE3:
4449 qualifiers |= VT_VOLATILE;
4450 goto redo;
4451 case TOK_RESTRICT1:
4452 case TOK_RESTRICT2:
4453 case TOK_RESTRICT3:
4454 goto redo;
4455 /* XXX: clarify attribute handling */
4456 case TOK_ATTRIBUTE1:
4457 case TOK_ATTRIBUTE2:
4458 parse_attribute(ad);
4459 break;
4461 mk_pointer(type);
4462 type->t |= qualifiers;
4463 if (ret == type)
4464 /* innermost pointed to type is the one for the first derivation */
4465 ret = pointed_type(type);
4468 if (tok == '(') {
4469 /* This is possibly a parameter type list for abstract declarators
4470 ('int ()'), use post_type for testing this. */
4471 if (!post_type(type, ad, 0, td)) {
4472 /* It's not, so it's a nested declarator, and the post operations
4473 apply to the innermost pointed to type (if any). */
4474 /* XXX: this is not correct to modify 'ad' at this point, but
4475 the syntax is not clear */
4476 parse_attribute(ad);
4477 post = type_decl(type, ad, v, td);
4478 skip(')');
4480 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4481 /* type identifier */
4482 *v = tok;
4483 next();
4484 } else {
4485 if (!(td & TYPE_ABSTRACT))
4486 expect("identifier");
4487 *v = 0;
4489 post_type(post, ad, storage, 0);
4490 parse_attribute(ad);
4491 type->t |= storage;
4492 return ret;
4495 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4496 ST_FUNC int lvalue_type(int t)
4498 int bt, r;
4499 r = VT_LVAL;
4500 bt = t & VT_BTYPE;
4501 if (bt == VT_BYTE || bt == VT_BOOL)
4502 r |= VT_LVAL_BYTE;
4503 else if (bt == VT_SHORT)
4504 r |= VT_LVAL_SHORT;
4505 else
4506 return r;
4507 if (t & VT_UNSIGNED)
4508 r |= VT_LVAL_UNSIGNED;
4509 return r;
4512 /* indirection with full error checking and bound check */
4513 ST_FUNC void indir(void)
4515 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4516 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4517 return;
4518 expect("pointer");
4520 if (vtop->r & VT_LVAL)
4521 gv(RC_INT);
4522 vtop->type = *pointed_type(&vtop->type);
4523 /* Arrays and functions are never lvalues */
4524 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4525 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4526 vtop->r |= lvalue_type(vtop->type.t);
4527 /* if bound checking, the referenced pointer must be checked */
4528 #ifdef CONFIG_TCC_BCHECK
4529 if (tcc_state->do_bounds_check)
4530 vtop->r |= VT_MUSTBOUND;
4531 #endif
4535 /* pass a parameter to a function and do type checking and casting */
4536 static void gfunc_param_typed(Sym *func, Sym *arg)
4538 int func_type;
4539 CType type;
4541 func_type = func->f.func_type;
4542 if (func_type == FUNC_OLD ||
4543 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4544 /* default casting : only need to convert float to double */
4545 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4546 gen_cast_s(VT_DOUBLE);
4547 } else if (vtop->type.t & VT_BITFIELD) {
4548 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4549 type.ref = vtop->type.ref;
4550 gen_cast(&type);
4552 } else if (arg == NULL) {
4553 tcc_error("too many arguments to function");
4554 } else {
4555 type = arg->type;
4556 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4557 gen_assign_cast(&type);
4561 /* parse an expression and return its type without any side effect. */
4562 static void expr_type(CType *type, void (*expr_fn)(void))
4564 nocode_wanted++;
4565 expr_fn();
4566 *type = vtop->type;
4567 vpop();
4568 nocode_wanted--;
4571 /* parse an expression of the form '(type)' or '(expr)' and return its
4572 type */
4573 static void parse_expr_type(CType *type)
4575 int n;
4576 AttributeDef ad;
4578 skip('(');
4579 if (parse_btype(type, &ad)) {
4580 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4581 } else {
4582 expr_type(type, gexpr);
4584 skip(')');
4587 static void parse_type(CType *type)
4589 AttributeDef ad;
4590 int n;
4592 if (!parse_btype(type, &ad)) {
4593 expect("type");
4595 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4598 static void parse_builtin_params(int nc, const char *args)
4600 char c, sep = '(';
4601 CType t;
4602 if (nc)
4603 nocode_wanted++;
4604 next();
4605 while ((c = *args++)) {
4606 skip(sep);
4607 sep = ',';
4608 switch (c) {
4609 case 'e': expr_eq(); continue;
4610 case 't': parse_type(&t); vpush(&t); continue;
4611 default: tcc_error("internal error"); break;
4614 skip(')');
4615 if (nc)
4616 nocode_wanted--;
4619 ST_FUNC void unary(void)
4621 int n, t, align, size, r, sizeof_caller;
4622 CType type;
4623 Sym *s;
4624 AttributeDef ad;
4626 sizeof_caller = in_sizeof;
4627 in_sizeof = 0;
4628 type.ref = NULL;
4629 /* XXX: GCC 2.95.3 does not generate a table although it should be
4630 better here */
4631 tok_next:
4632 switch(tok) {
4633 case TOK_EXTENSION:
4634 next();
4635 goto tok_next;
4636 case TOK_LCHAR:
4637 #ifdef TCC_TARGET_PE
4638 t = VT_SHORT|VT_UNSIGNED;
4639 goto push_tokc;
4640 #endif
4641 case TOK_CINT:
4642 case TOK_CCHAR:
4643 t = VT_INT;
4644 push_tokc:
4645 type.t = t;
4646 vsetc(&type, VT_CONST, &tokc);
4647 next();
4648 break;
4649 case TOK_CUINT:
4650 t = VT_INT | VT_UNSIGNED;
4651 goto push_tokc;
4652 case TOK_CLLONG:
4653 t = VT_LLONG;
4654 goto push_tokc;
4655 case TOK_CULLONG:
4656 t = VT_LLONG | VT_UNSIGNED;
4657 goto push_tokc;
4658 case TOK_CFLOAT:
4659 t = VT_FLOAT;
4660 goto push_tokc;
4661 case TOK_CDOUBLE:
4662 t = VT_DOUBLE;
4663 goto push_tokc;
4664 case TOK_CLDOUBLE:
4665 t = VT_LDOUBLE;
4666 goto push_tokc;
4667 case TOK_CLONG:
4668 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4669 goto push_tokc;
4670 case TOK_CULONG:
4671 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4672 goto push_tokc;
4673 case TOK___FUNCTION__:
4674 if (!gnu_ext)
4675 goto tok_identifier;
4676 /* fall thru */
4677 case TOK___FUNC__:
4679 void *ptr;
4680 int len;
4681 /* special function name identifier */
4682 len = strlen(funcname) + 1;
4683 /* generate char[len] type */
4684 type.t = VT_BYTE;
4685 mk_pointer(&type);
4686 type.t |= VT_ARRAY;
4687 type.ref->c = len;
4688 vpush_ref(&type, data_section, data_section->data_offset, len);
4689 if (!NODATA_WANTED) {
4690 ptr = section_ptr_add(data_section, len);
4691 memcpy(ptr, funcname, len);
4693 next();
4695 break;
4696 case TOK_LSTR:
4697 #ifdef TCC_TARGET_PE
4698 t = VT_SHORT | VT_UNSIGNED;
4699 #else
4700 t = VT_INT;
4701 #endif
4702 goto str_init;
4703 case TOK_STR:
4704 /* string parsing */
4705 t = VT_BYTE;
4706 if (tcc_state->char_is_unsigned)
4707 t = VT_BYTE | VT_UNSIGNED;
4708 str_init:
4709 if (tcc_state->warn_write_strings)
4710 t |= VT_CONSTANT;
4711 type.t = t;
4712 mk_pointer(&type);
4713 type.t |= VT_ARRAY;
4714 memset(&ad, 0, sizeof(AttributeDef));
4715 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4716 break;
4717 case '(':
4718 next();
4719 /* cast ? */
4720 if (parse_btype(&type, &ad)) {
4721 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4722 skip(')');
4723 /* check ISOC99 compound literal */
4724 if (tok == '{') {
4725 /* data is allocated locally by default */
4726 if (global_expr)
4727 r = VT_CONST;
4728 else
4729 r = VT_LOCAL;
4730 /* all except arrays are lvalues */
4731 if (!(type.t & VT_ARRAY))
4732 r |= lvalue_type(type.t);
4733 memset(&ad, 0, sizeof(AttributeDef));
4734 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4735 } else {
4736 if (sizeof_caller) {
4737 vpush(&type);
4738 return;
4740 unary();
4741 gen_cast(&type);
4743 } else if (tok == '{') {
4744 int saved_nocode_wanted = nocode_wanted;
4745 if (const_wanted)
4746 tcc_error("expected constant");
4747 /* save all registers */
4748 save_regs(0);
4749 /* statement expression : we do not accept break/continue
4750 inside as GCC does. We do retain the nocode_wanted state,
4751 as statement expressions can't ever be entered from the
4752 outside, so any reactivation of code emission (from labels
4753 or loop heads) can be disabled again after the end of it. */
4754 block(NULL, NULL, 1);
4755 nocode_wanted = saved_nocode_wanted;
4756 skip(')');
4757 } else {
4758 gexpr();
4759 skip(')');
4761 break;
4762 case '*':
4763 next();
4764 unary();
4765 indir();
4766 break;
4767 case '&':
4768 next();
4769 unary();
4770 /* functions names must be treated as function pointers,
4771 except for unary '&' and sizeof. Since we consider that
4772 functions are not lvalues, we only have to handle it
4773 there and in function calls. */
4774 /* arrays can also be used although they are not lvalues */
4775 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4776 !(vtop->type.t & VT_ARRAY))
4777 test_lvalue();
4778 mk_pointer(&vtop->type);
4779 gaddrof();
4780 break;
4781 case '!':
4782 next();
4783 unary();
4784 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4785 gen_cast_s(VT_BOOL);
4786 vtop->c.i = !vtop->c.i;
4787 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4788 vtop->c.i ^= 1;
4789 else {
4790 save_regs(1);
4791 vseti(VT_JMP, gvtst(1, 0));
4793 break;
4794 case '~':
4795 next();
4796 unary();
4797 vpushi(-1);
4798 gen_op('^');
4799 break;
4800 case '+':
4801 next();
4802 unary();
4803 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4804 tcc_error("pointer not accepted for unary plus");
4805 /* In order to force cast, we add zero, except for floating point
4806 where we really need an noop (otherwise -0.0 will be transformed
4807 into +0.0). */
4808 if (!is_float(vtop->type.t)) {
4809 vpushi(0);
4810 gen_op('+');
4812 break;
4813 case TOK_SIZEOF:
4814 case TOK_ALIGNOF1:
4815 case TOK_ALIGNOF2:
4816 t = tok;
4817 next();
4818 in_sizeof++;
4819 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4820 s = vtop[1].sym; /* hack: accessing previous vtop */
4821 size = type_size(&type, &align);
4822 if (s && s->a.aligned)
4823 align = 1 << (s->a.aligned - 1);
4824 if (t == TOK_SIZEOF) {
4825 if (!(type.t & VT_VLA)) {
4826 if (size < 0)
4827 tcc_error("sizeof applied to an incomplete type");
4828 vpushs(size);
4829 } else {
4830 vla_runtime_type_size(&type, &align);
4832 } else {
4833 vpushs(align);
4835 vtop->type.t |= VT_UNSIGNED;
4836 break;
4838 case TOK_builtin_expect:
4839 /* __builtin_expect is a no-op for now */
4840 parse_builtin_params(0, "ee");
4841 vpop();
4842 break;
4843 case TOK_builtin_types_compatible_p:
4844 parse_builtin_params(0, "tt");
4845 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4846 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4847 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4848 vtop -= 2;
4849 vpushi(n);
4850 break;
4851 case TOK_builtin_choose_expr:
4853 int64_t c;
4854 next();
4855 skip('(');
4856 c = expr_const64();
4857 skip(',');
4858 if (!c) {
4859 nocode_wanted++;
4861 expr_eq();
4862 if (!c) {
4863 vpop();
4864 nocode_wanted--;
4866 skip(',');
4867 if (c) {
4868 nocode_wanted++;
4870 expr_eq();
4871 if (c) {
4872 vpop();
4873 nocode_wanted--;
4875 skip(')');
4877 break;
4878 case TOK_builtin_constant_p:
4879 parse_builtin_params(1, "e");
4880 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4881 vtop--;
4882 vpushi(n);
4883 break;
4884 case TOK_builtin_frame_address:
4885 case TOK_builtin_return_address:
4887 int tok1 = tok;
4888 int level;
4889 next();
4890 skip('(');
4891 if (tok != TOK_CINT) {
4892 tcc_error("%s only takes positive integers",
4893 tok1 == TOK_builtin_return_address ?
4894 "__builtin_return_address" :
4895 "__builtin_frame_address");
4897 level = (uint32_t)tokc.i;
4898 next();
4899 skip(')');
4900 type.t = VT_VOID;
4901 mk_pointer(&type);
4902 vset(&type, VT_LOCAL, 0); /* local frame */
4903 while (level--) {
4904 mk_pointer(&vtop->type);
4905 indir(); /* -> parent frame */
4907 if (tok1 == TOK_builtin_return_address) {
4908 // assume return address is just above frame pointer on stack
4909 vpushi(PTR_SIZE);
4910 gen_op('+');
4911 mk_pointer(&vtop->type);
4912 indir();
4915 break;
4916 #ifdef TCC_TARGET_X86_64
4917 #ifdef TCC_TARGET_PE
4918 case TOK_builtin_va_start:
4919 parse_builtin_params(0, "ee");
4920 r = vtop->r & VT_VALMASK;
4921 if (r == VT_LLOCAL)
4922 r = VT_LOCAL;
4923 if (r != VT_LOCAL)
4924 tcc_error("__builtin_va_start expects a local variable");
4925 vtop->r = r;
4926 vtop->type = char_pointer_type;
4927 vtop->c.i += 8;
4928 vstore();
4929 break;
4930 #else
4931 case TOK_builtin_va_arg_types:
4932 parse_builtin_params(0, "t");
4933 vpushi(classify_x86_64_va_arg(&vtop->type));
4934 vswap();
4935 vpop();
4936 break;
4937 #endif
4938 #endif
4940 #ifdef TCC_TARGET_ARM64
4941 case TOK___va_start: {
4942 parse_builtin_params(0, "ee");
4943 //xx check types
4944 gen_va_start();
4945 vpushi(0);
4946 vtop->type.t = VT_VOID;
4947 break;
4949 case TOK___va_arg: {
4950 parse_builtin_params(0, "et");
4951 type = vtop->type;
4952 vpop();
4953 //xx check types
4954 gen_va_arg(&type);
4955 vtop->type = type;
4956 break;
4958 case TOK___arm64_clear_cache: {
4959 parse_builtin_params(0, "ee");
4960 gen_clear_cache();
4961 vpushi(0);
4962 vtop->type.t = VT_VOID;
4963 break;
4965 #endif
4966 /* pre operations */
4967 case TOK_INC:
4968 case TOK_DEC:
4969 t = tok;
4970 next();
4971 unary();
4972 inc(0, t);
4973 break;
4974 case '-':
4975 next();
4976 unary();
4977 t = vtop->type.t & VT_BTYPE;
4978 if (is_float(t)) {
4979 /* In IEEE negate(x) isn't subtract(0,x), but rather
4980 subtract(-0, x). */
4981 vpush(&vtop->type);
4982 if (t == VT_FLOAT)
4983 vtop->c.f = -1.0 * 0.0;
4984 else if (t == VT_DOUBLE)
4985 vtop->c.d = -1.0 * 0.0;
4986 else
4987 vtop->c.ld = -1.0 * 0.0;
4988 } else
4989 vpushi(0);
4990 vswap();
4991 gen_op('-');
4992 break;
4993 case TOK_LAND:
4994 if (!gnu_ext)
4995 goto tok_identifier;
4996 next();
4997 /* allow to take the address of a label */
4998 if (tok < TOK_UIDENT)
4999 expect("label identifier");
5000 s = label_find(tok);
5001 if (!s) {
5002 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5003 } else {
5004 if (s->r == LABEL_DECLARED)
5005 s->r = LABEL_FORWARD;
5007 if (!s->type.t) {
5008 s->type.t = VT_VOID;
5009 mk_pointer(&s->type);
5010 s->type.t |= VT_STATIC;
5012 vpushsym(&s->type, s);
5013 next();
5014 break;
5016 case TOK_GENERIC:
5018 CType controlling_type;
5019 int has_default = 0;
5020 int has_match = 0;
5021 int learn = 0;
5022 TokenString *str = NULL;
5024 next();
5025 skip('(');
5026 expr_type(&controlling_type, expr_eq);
5027 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5028 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5029 mk_pointer(&controlling_type);
5030 for (;;) {
5031 learn = 0;
5032 skip(',');
5033 if (tok == TOK_DEFAULT) {
5034 if (has_default)
5035 tcc_error("too many 'default'");
5036 has_default = 1;
5037 if (!has_match)
5038 learn = 1;
5039 next();
5040 } else {
5041 AttributeDef ad_tmp;
5042 int itmp;
5043 CType cur_type;
5044 parse_btype(&cur_type, &ad_tmp);
5045 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5046 if (compare_types(&controlling_type, &cur_type, 0)) {
5047 if (has_match) {
5048 tcc_error("type match twice");
5050 has_match = 1;
5051 learn = 1;
5054 skip(':');
5055 if (learn) {
5056 if (str)
5057 tok_str_free(str);
5058 skip_or_save_block(&str);
5059 } else {
5060 skip_or_save_block(NULL);
5062 if (tok == ')')
5063 break;
5065 if (!str) {
5066 char buf[60];
5067 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5068 tcc_error("type '%s' does not match any association", buf);
5070 begin_macro(str, 1);
5071 next();
5072 expr_eq();
5073 if (tok != TOK_EOF)
5074 expect(",");
5075 end_macro();
5076 next();
5077 break;
5079 // special qnan , snan and infinity values
5080 case TOK___NAN__:
5081 n = 0x7fc00000;
5082 special_math_val:
5083 vpushi(n);
5084 vtop->type.t = VT_FLOAT;
5085 next();
5086 break;
5087 case TOK___SNAN__:
5088 n = 0x7f800001;
5089 goto special_math_val;
5090 case TOK___INF__:
5091 n = 0x7f800000;
5092 goto special_math_val;
5094 default:
5095 tok_identifier:
5096 t = tok;
5097 next();
5098 if (t < TOK_UIDENT)
5099 expect("identifier");
5100 s = sym_find(t);
5101 if (!s || IS_ASM_SYM(s)) {
5102 const char *name = get_tok_str(t, NULL);
5103 if (tok != '(')
5104 tcc_error("'%s' undeclared", name);
5105 /* for simple function calls, we tolerate undeclared
5106 external reference to int() function */
5107 if (tcc_state->warn_implicit_function_declaration
5108 #ifdef TCC_TARGET_PE
5109 /* people must be warned about using undeclared WINAPI functions
5110 (which usually start with uppercase letter) */
5111 || (name[0] >= 'A' && name[0] <= 'Z')
5112 #endif
5114 tcc_warning("implicit declaration of function '%s'", name);
5115 s = external_global_sym(t, &func_old_type, 0);
5118 r = s->r;
5119 /* A symbol that has a register is a local register variable,
5120 which starts out as VT_LOCAL value. */
5121 if ((r & VT_VALMASK) < VT_CONST)
5122 r = (r & ~VT_VALMASK) | VT_LOCAL;
5124 vset(&s->type, r, s->c);
5125 /* Point to s as backpointer (even without r&VT_SYM).
5126 Will be used by at least the x86 inline asm parser for
5127 regvars. */
5128 vtop->sym = s;
5130 if (r & VT_SYM) {
5131 vtop->c.i = 0;
5132 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5133 vtop->c.i = s->enum_val;
5135 break;
5138 /* post operations */
5139 while (1) {
5140 if (tok == TOK_INC || tok == TOK_DEC) {
5141 inc(1, tok);
5142 next();
5143 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5144 int qualifiers;
5145 /* field */
5146 if (tok == TOK_ARROW)
5147 indir();
5148 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5149 test_lvalue();
5150 gaddrof();
5151 /* expect pointer on structure */
5152 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5153 expect("struct or union");
5154 if (tok == TOK_CDOUBLE)
5155 expect("field name");
5156 next();
5157 if (tok == TOK_CINT || tok == TOK_CUINT)
5158 expect("field name");
5159 s = find_field(&vtop->type, tok);
5160 if (!s)
5161 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5162 /* add field offset to pointer */
5163 vtop->type = char_pointer_type; /* change type to 'char *' */
5164 vpushi(s->c);
5165 gen_op('+');
5166 /* change type to field type, and set to lvalue */
5167 vtop->type = s->type;
5168 vtop->type.t |= qualifiers;
5169 /* an array is never an lvalue */
5170 if (!(vtop->type.t & VT_ARRAY)) {
5171 vtop->r |= lvalue_type(vtop->type.t);
5172 #ifdef CONFIG_TCC_BCHECK
5173 /* if bound checking, the referenced pointer must be checked */
5174 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5175 vtop->r |= VT_MUSTBOUND;
5176 #endif
5178 next();
5179 } else if (tok == '[') {
5180 next();
5181 gexpr();
5182 gen_op('+');
5183 indir();
5184 skip(']');
5185 } else if (tok == '(') {
5186 SValue ret;
5187 Sym *sa;
5188 int nb_args, ret_nregs, ret_align, regsize, variadic;
5190 /* function call */
5191 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5192 /* pointer test (no array accepted) */
5193 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5194 vtop->type = *pointed_type(&vtop->type);
5195 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5196 goto error_func;
5197 } else {
5198 error_func:
5199 expect("function pointer");
5201 } else {
5202 vtop->r &= ~VT_LVAL; /* no lvalue */
5204 /* get return type */
5205 s = vtop->type.ref;
5206 next();
5207 sa = s->next; /* first parameter */
5208 nb_args = regsize = 0;
5209 ret.r2 = VT_CONST;
5210 /* compute first implicit argument if a structure is returned */
5211 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5212 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5213 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5214 &ret_align, &regsize);
5215 if (!ret_nregs) {
5216 /* get some space for the returned structure */
5217 size = type_size(&s->type, &align);
5218 #ifdef TCC_TARGET_ARM64
5219 /* On arm64, a small struct is return in registers.
5220 It is much easier to write it to memory if we know
5221 that we are allowed to write some extra bytes, so
5222 round the allocated space up to a power of 2: */
5223 if (size < 16)
5224 while (size & (size - 1))
5225 size = (size | (size - 1)) + 1;
5226 #endif
5227 loc = (loc - size) & -align;
5228 ret.type = s->type;
5229 ret.r = VT_LOCAL | VT_LVAL;
5230 /* pass it as 'int' to avoid structure arg passing
5231 problems */
5232 vseti(VT_LOCAL, loc);
5233 ret.c = vtop->c;
5234 nb_args++;
5236 } else {
5237 ret_nregs = 1;
5238 ret.type = s->type;
5241 if (ret_nregs) {
5242 /* return in register */
5243 if (is_float(ret.type.t)) {
5244 ret.r = reg_fret(ret.type.t);
5245 #ifdef TCC_TARGET_X86_64
5246 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5247 ret.r2 = REG_QRET;
5248 #endif
5249 } else {
5250 #ifndef TCC_TARGET_ARM64
5251 #ifdef TCC_TARGET_X86_64
5252 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5253 #else
5254 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5255 #endif
5256 ret.r2 = REG_LRET;
5257 #endif
5258 ret.r = REG_IRET;
5260 ret.c.i = 0;
5262 if (tok != ')') {
5263 for(;;) {
5264 expr_eq();
5265 gfunc_param_typed(s, sa);
5266 nb_args++;
5267 if (sa)
5268 sa = sa->next;
5269 if (tok == ')')
5270 break;
5271 skip(',');
5274 if (sa)
5275 tcc_error("too few arguments to function");
5276 skip(')');
5277 gfunc_call(nb_args);
5279 /* return value */
5280 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5281 vsetc(&ret.type, r, &ret.c);
5282 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5285 /* handle packed struct return */
5286 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5287 int addr, offset;
5289 size = type_size(&s->type, &align);
5290 /* We're writing whole regs often, make sure there's enough
5291 space. Assume register size is power of 2. */
5292 if (regsize > align)
5293 align = regsize;
5294 loc = (loc - size) & -align;
5295 addr = loc;
5296 offset = 0;
5297 for (;;) {
5298 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5299 vswap();
5300 vstore();
5301 vtop--;
5302 if (--ret_nregs == 0)
5303 break;
5304 offset += regsize;
5306 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5308 } else {
5309 break;
5314 ST_FUNC void expr_prod(void)
5316 int t;
5318 unary();
5319 while (tok == '*' || tok == '/' || tok == '%') {
5320 t = tok;
5321 next();
5322 unary();
5323 gen_op(t);
5327 ST_FUNC void expr_sum(void)
5329 int t;
5331 expr_prod();
5332 while (tok == '+' || tok == '-') {
5333 t = tok;
5334 next();
5335 expr_prod();
5336 gen_op(t);
5340 static void expr_shift(void)
5342 int t;
5344 expr_sum();
5345 while (tok == TOK_SHL || tok == TOK_SAR) {
5346 t = tok;
5347 next();
5348 expr_sum();
5349 gen_op(t);
5353 static void expr_cmp(void)
5355 int t;
5357 expr_shift();
5358 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5359 tok == TOK_ULT || tok == TOK_UGE) {
5360 t = tok;
5361 next();
5362 expr_shift();
5363 gen_op(t);
5367 static void expr_cmpeq(void)
5369 int t;
5371 expr_cmp();
5372 while (tok == TOK_EQ || tok == TOK_NE) {
5373 t = tok;
5374 next();
5375 expr_cmp();
5376 gen_op(t);
5380 static void expr_and(void)
5382 expr_cmpeq();
5383 while (tok == '&') {
5384 next();
5385 expr_cmpeq();
5386 gen_op('&');
5390 static void expr_xor(void)
5392 expr_and();
5393 while (tok == '^') {
5394 next();
5395 expr_and();
5396 gen_op('^');
5400 static void expr_or(void)
5402 expr_xor();
5403 while (tok == '|') {
5404 next();
5405 expr_xor();
5406 gen_op('|');
5410 static void expr_land(void)
5412 expr_or();
5413 if (tok == TOK_LAND) {
5414 int t = 0;
5415 for(;;) {
5416 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5417 gen_cast_s(VT_BOOL);
5418 if (vtop->c.i) {
5419 vpop();
5420 } else {
5421 nocode_wanted++;
5422 while (tok == TOK_LAND) {
5423 next();
5424 expr_or();
5425 vpop();
5427 nocode_wanted--;
5428 if (t)
5429 gsym(t);
5430 gen_cast_s(VT_INT);
5431 break;
5433 } else {
5434 if (!t)
5435 save_regs(1);
5436 t = gvtst(1, t);
5438 if (tok != TOK_LAND) {
5439 if (t)
5440 vseti(VT_JMPI, t);
5441 else
5442 vpushi(1);
5443 break;
5445 next();
5446 expr_or();
5451 static void expr_lor(void)
5453 expr_land();
5454 if (tok == TOK_LOR) {
5455 int t = 0;
5456 for(;;) {
5457 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5458 gen_cast_s(VT_BOOL);
5459 if (!vtop->c.i) {
5460 vpop();
5461 } else {
5462 nocode_wanted++;
5463 while (tok == TOK_LOR) {
5464 next();
5465 expr_land();
5466 vpop();
5468 nocode_wanted--;
5469 if (t)
5470 gsym(t);
5471 gen_cast_s(VT_INT);
5472 break;
5474 } else {
5475 if (!t)
5476 save_regs(1);
5477 t = gvtst(0, t);
5479 if (tok != TOK_LOR) {
5480 if (t)
5481 vseti(VT_JMP, t);
5482 else
5483 vpushi(0);
5484 break;
5486 next();
5487 expr_land();
5492 /* Assuming vtop is a value used in a conditional context
5493 (i.e. compared with zero) return 0 if it's false, 1 if
5494 true and -1 if it can't be statically determined. */
5495 static int condition_3way(void)
5497 int c = -1;
5498 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5499 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5500 vdup();
5501 gen_cast_s(VT_BOOL);
5502 c = vtop->c.i;
5503 vpop();
5505 return c;
5508 static void expr_cond(void)
5510 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5511 SValue sv;
5512 CType type, type1, type2;
5514 expr_lor();
5515 if (tok == '?') {
5516 next();
5517 c = condition_3way();
5518 g = (tok == ':' && gnu_ext);
5519 if (c < 0) {
5520 /* needed to avoid having different registers saved in
5521 each branch */
5522 if (is_float(vtop->type.t)) {
5523 rc = RC_FLOAT;
5524 #ifdef TCC_TARGET_X86_64
5525 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5526 rc = RC_ST0;
5528 #endif
5529 } else
5530 rc = RC_INT;
5531 gv(rc);
5532 save_regs(1);
5533 if (g)
5534 gv_dup();
5535 tt = gvtst(1, 0);
5537 } else {
5538 if (!g)
5539 vpop();
5540 tt = 0;
5543 if (1) {
5544 if (c == 0)
5545 nocode_wanted++;
5546 if (!g)
5547 gexpr();
5549 type1 = vtop->type;
5550 sv = *vtop; /* save value to handle it later */
5551 vtop--; /* no vpop so that FP stack is not flushed */
5552 skip(':');
5554 u = 0;
5555 if (c < 0)
5556 u = gjmp(0);
5557 gsym(tt);
5559 if (c == 0)
5560 nocode_wanted--;
5561 if (c == 1)
5562 nocode_wanted++;
5563 expr_cond();
5564 if (c == 1)
5565 nocode_wanted--;
5567 type2 = vtop->type;
5568 t1 = type1.t;
5569 bt1 = t1 & VT_BTYPE;
5570 t2 = type2.t;
5571 bt2 = t2 & VT_BTYPE;
5572 type.ref = NULL;
5574 /* cast operands to correct type according to ISOC rules */
5575 if (is_float(bt1) || is_float(bt2)) {
5576 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5577 type.t = VT_LDOUBLE;
5579 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5580 type.t = VT_DOUBLE;
5581 } else {
5582 type.t = VT_FLOAT;
5584 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5585 /* cast to biggest op */
5586 type.t = VT_LLONG | VT_LONG;
5587 if (bt1 == VT_LLONG)
5588 type.t &= t1;
5589 if (bt2 == VT_LLONG)
5590 type.t &= t2;
5591 /* convert to unsigned if it does not fit in a long long */
5592 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5593 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5594 type.t |= VT_UNSIGNED;
5595 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5596 /* If one is a null ptr constant the result type
5597 is the other. */
5598 if (is_null_pointer (vtop))
5599 type = type1;
5600 else if (is_null_pointer (&sv))
5601 type = type2;
5602 /* XXX: test pointer compatibility, C99 has more elaborate
5603 rules here. */
5604 else
5605 type = type1;
5606 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5607 /* XXX: test function pointer compatibility */
5608 type = bt1 == VT_FUNC ? type1 : type2;
5609 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5610 /* XXX: test structure compatibility */
5611 type = bt1 == VT_STRUCT ? type1 : type2;
5612 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5613 /* NOTE: as an extension, we accept void on only one side */
5614 type.t = VT_VOID;
5615 } else {
5616 /* integer operations */
5617 type.t = VT_INT | (VT_LONG & (t1 | t2));
5618 /* convert to unsigned if it does not fit in an integer */
5619 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5620 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5621 type.t |= VT_UNSIGNED;
5623 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5624 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5625 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5626 islv &= c < 0;
5628 /* now we convert second operand */
5629 if (c != 1) {
5630 gen_cast(&type);
5631 if (islv) {
5632 mk_pointer(&vtop->type);
5633 gaddrof();
5634 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5635 gaddrof();
5638 rc = RC_INT;
5639 if (is_float(type.t)) {
5640 rc = RC_FLOAT;
5641 #ifdef TCC_TARGET_X86_64
5642 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5643 rc = RC_ST0;
5645 #endif
5646 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5647 /* for long longs, we use fixed registers to avoid having
5648 to handle a complicated move */
5649 rc = RC_IRET;
5652 tt = r2 = 0;
5653 if (c < 0) {
5654 r2 = gv(rc);
5655 tt = gjmp(0);
5657 gsym(u);
5659 /* this is horrible, but we must also convert first
5660 operand */
5661 if (c != 0) {
5662 *vtop = sv;
5663 gen_cast(&type);
5664 if (islv) {
5665 mk_pointer(&vtop->type);
5666 gaddrof();
5667 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5668 gaddrof();
5671 if (c < 0) {
5672 r1 = gv(rc);
5673 move_reg(r2, r1, type.t);
5674 vtop->r = r2;
5675 gsym(tt);
5676 if (islv)
5677 indir();
5683 static void expr_eq(void)
5685 int t;
5687 expr_cond();
5688 if (tok == '=' ||
5689 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5690 tok == TOK_A_XOR || tok == TOK_A_OR ||
5691 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5692 test_lvalue();
5693 t = tok;
5694 next();
5695 if (t == '=') {
5696 expr_eq();
5697 } else {
5698 vdup();
5699 expr_eq();
5700 gen_op(t & 0x7f);
5702 vstore();
5706 ST_FUNC void gexpr(void)
5708 while (1) {
5709 expr_eq();
5710 if (tok != ',')
5711 break;
5712 vpop();
5713 next();
5717 /* parse a constant expression and return value in vtop. */
5718 static void expr_const1(void)
5720 const_wanted++;
5721 nocode_wanted++;
5722 expr_cond();
5723 nocode_wanted--;
5724 const_wanted--;
5727 /* parse an integer constant and return its value. */
5728 static inline int64_t expr_const64(void)
5730 int64_t c;
5731 expr_const1();
5732 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5733 expect("constant expression");
5734 c = vtop->c.i;
5735 vpop();
5736 return c;
5739 /* parse an integer constant and return its value.
5740 Complain if it doesn't fit 32bit (signed or unsigned). */
5741 ST_FUNC int expr_const(void)
5743 int c;
5744 int64_t wc = expr_const64();
5745 c = wc;
5746 if (c != wc && (unsigned)c != wc)
5747 tcc_error("constant exceeds 32 bit");
5748 return c;
5751 /* return the label token if current token is a label, otherwise
5752 return zero */
5753 static int is_label(void)
5755 int last_tok;
5757 /* fast test first */
5758 if (tok < TOK_UIDENT)
5759 return 0;
5760 /* no need to save tokc because tok is an identifier */
5761 last_tok = tok;
5762 next();
5763 if (tok == ':') {
5764 return last_tok;
5765 } else {
5766 unget_tok(last_tok);
5767 return 0;
5771 #ifndef TCC_TARGET_ARM64
5772 static void gfunc_return(CType *func_type)
5774 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5775 CType type, ret_type;
5776 int ret_align, ret_nregs, regsize;
5777 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5778 &ret_align, &regsize);
5779 if (0 == ret_nregs) {
5780 /* if returning structure, must copy it to implicit
5781 first pointer arg location */
5782 type = *func_type;
5783 mk_pointer(&type);
5784 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5785 indir();
5786 vswap();
5787 /* copy structure value to pointer */
5788 vstore();
5789 } else {
5790 /* returning structure packed into registers */
5791 int r, size, addr, align;
5792 size = type_size(func_type,&align);
5793 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5794 (vtop->c.i & (ret_align-1)))
5795 && (align & (ret_align-1))) {
5796 loc = (loc - size) & -ret_align;
5797 addr = loc;
5798 type = *func_type;
5799 vset(&type, VT_LOCAL | VT_LVAL, addr);
5800 vswap();
5801 vstore();
5802 vpop();
5803 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5805 vtop->type = ret_type;
5806 if (is_float(ret_type.t))
5807 r = rc_fret(ret_type.t);
5808 else
5809 r = RC_IRET;
5811 if (ret_nregs == 1)
5812 gv(r);
5813 else {
5814 for (;;) {
5815 vdup();
5816 gv(r);
5817 vpop();
5818 if (--ret_nregs == 0)
5819 break;
5820 /* We assume that when a structure is returned in multiple
5821 registers, their classes are consecutive values of the
5822 suite s(n) = 2^n */
5823 r <<= 1;
5824 vtop->c.i += regsize;
5828 } else if (is_float(func_type->t)) {
5829 gv(rc_fret(func_type->t));
5830 } else {
5831 gv(RC_IRET);
5833 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5835 #endif
5837 static int case_cmp(const void *pa, const void *pb)
5839 int64_t a = (*(struct case_t**) pa)->v1;
5840 int64_t b = (*(struct case_t**) pb)->v1;
5841 return a < b ? -1 : a > b;
5844 static void gcase(struct case_t **base, int len, int *bsym)
5846 struct case_t *p;
5847 int e;
5848 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5849 gv(RC_INT);
5850 while (len > 4) {
5851 /* binary search */
5852 p = base[len/2];
5853 vdup();
5854 if (ll)
5855 vpushll(p->v2);
5856 else
5857 vpushi(p->v2);
5858 gen_op(TOK_LE);
5859 e = gtst(1, 0);
5860 vdup();
5861 if (ll)
5862 vpushll(p->v1);
5863 else
5864 vpushi(p->v1);
5865 gen_op(TOK_GE);
5866 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5867 /* x < v1 */
5868 gcase(base, len/2, bsym);
5869 if (cur_switch->def_sym)
5870 gjmp_addr(cur_switch->def_sym);
5871 else
5872 *bsym = gjmp(*bsym);
5873 /* x > v2 */
5874 gsym(e);
5875 e = len/2 + 1;
5876 base += e; len -= e;
5878 /* linear scan */
5879 while (len--) {
5880 p = *base++;
5881 vdup();
5882 if (ll)
5883 vpushll(p->v2);
5884 else
5885 vpushi(p->v2);
5886 if (p->v1 == p->v2) {
5887 gen_op(TOK_EQ);
5888 gtst_addr(0, p->sym);
5889 } else {
5890 gen_op(TOK_LE);
5891 e = gtst(1, 0);
5892 vdup();
5893 if (ll)
5894 vpushll(p->v1);
5895 else
5896 vpushi(p->v1);
5897 gen_op(TOK_GE);
5898 gtst_addr(0, p->sym);
5899 gsym(e);
5904 static void block(int *bsym, int *csym, int is_expr)
5906 int a, b, c, d, cond;
5907 Sym *s;
5909 /* generate line number info */
5910 if (tcc_state->do_debug)
5911 tcc_debug_line(tcc_state);
5913 if (is_expr) {
5914 /* default return value is (void) */
5915 vpushi(0);
5916 vtop->type.t = VT_VOID;
5919 if (tok == TOK_IF) {
5920 /* if test */
5921 int saved_nocode_wanted = nocode_wanted;
5922 next();
5923 skip('(');
5924 gexpr();
5925 skip(')');
5926 cond = condition_3way();
5927 if (cond == 1)
5928 a = 0, vpop();
5929 else
5930 a = gvtst(1, 0);
5931 if (cond == 0)
5932 nocode_wanted |= 0x20000000;
5933 block(bsym, csym, 0);
5934 if (cond != 1)
5935 nocode_wanted = saved_nocode_wanted;
5936 c = tok;
5937 if (c == TOK_ELSE) {
5938 next();
5939 d = gjmp(0);
5940 gsym(a);
5941 if (cond == 1)
5942 nocode_wanted |= 0x20000000;
5943 block(bsym, csym, 0);
5944 gsym(d); /* patch else jmp */
5945 if (cond != 0)
5946 nocode_wanted = saved_nocode_wanted;
5947 } else
5948 gsym(a);
5949 } else if (tok == TOK_WHILE) {
5950 int saved_nocode_wanted;
5951 nocode_wanted &= ~0x20000000;
5952 next();
5953 d = ind;
5954 vla_sp_restore();
5955 skip('(');
5956 gexpr();
5957 skip(')');
5958 a = gvtst(1, 0);
5959 b = 0;
5960 ++local_scope;
5961 saved_nocode_wanted = nocode_wanted;
5962 block(&a, &b, 0);
5963 nocode_wanted = saved_nocode_wanted;
5964 --local_scope;
5965 gjmp_addr(d);
5966 gsym(a);
5967 gsym_addr(b, d);
5968 } else if (tok == '{') {
5969 Sym *llabel;
5970 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5972 next();
5973 /* record local declaration stack position */
5974 s = local_stack;
5975 llabel = local_label_stack;
5976 ++local_scope;
5978 /* handle local labels declarations */
5979 if (tok == TOK_LABEL) {
5980 next();
5981 for(;;) {
5982 if (tok < TOK_UIDENT)
5983 expect("label identifier");
5984 label_push(&local_label_stack, tok, LABEL_DECLARED);
5985 next();
5986 if (tok == ',') {
5987 next();
5988 } else {
5989 skip(';');
5990 break;
5994 while (tok != '}') {
5995 if ((a = is_label()))
5996 unget_tok(a);
5997 else
5998 decl(VT_LOCAL);
5999 if (tok != '}') {
6000 if (is_expr)
6001 vpop();
6002 block(bsym, csym, is_expr);
6005 /* pop locally defined labels */
6006 label_pop(&local_label_stack, llabel, is_expr);
6007 /* pop locally defined symbols */
6008 --local_scope;
6009 /* In the is_expr case (a statement expression is finished here),
6010 vtop might refer to symbols on the local_stack. Either via the
6011 type or via vtop->sym. We can't pop those nor any that in turn
6012 might be referred to. To make it easier we don't roll back
6013 any symbols in that case; some upper level call to block() will
6014 do that. We do have to remove such symbols from the lookup
6015 tables, though. sym_pop will do that. */
6016 sym_pop(&local_stack, s, is_expr);
6018 /* Pop VLA frames and restore stack pointer if required */
6019 if (vlas_in_scope > saved_vlas_in_scope) {
6020 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6021 vla_sp_restore();
6023 vlas_in_scope = saved_vlas_in_scope;
6025 next();
6026 } else if (tok == TOK_RETURN) {
6027 next();
6028 if (tok != ';') {
6029 gexpr();
6030 gen_assign_cast(&func_vt);
6031 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6032 vtop--;
6033 else
6034 gfunc_return(&func_vt);
6036 skip(';');
6037 /* jump unless last stmt in top-level block */
6038 if (tok != '}' || local_scope != 1)
6039 rsym = gjmp(rsym);
6040 nocode_wanted |= 0x20000000;
6041 } else if (tok == TOK_BREAK) {
6042 /* compute jump */
6043 if (!bsym)
6044 tcc_error("cannot break");
6045 *bsym = gjmp(*bsym);
6046 next();
6047 skip(';');
6048 nocode_wanted |= 0x20000000;
6049 } else if (tok == TOK_CONTINUE) {
6050 /* compute jump */
6051 if (!csym)
6052 tcc_error("cannot continue");
6053 vla_sp_restore_root();
6054 *csym = gjmp(*csym);
6055 next();
6056 skip(';');
6057 } else if (tok == TOK_FOR) {
6058 int e;
6059 int saved_nocode_wanted;
6060 nocode_wanted &= ~0x20000000;
6061 next();
6062 skip('(');
6063 s = local_stack;
6064 ++local_scope;
6065 if (tok != ';') {
6066 /* c99 for-loop init decl? */
6067 if (!decl0(VT_LOCAL, 1, NULL)) {
6068 /* no, regular for-loop init expr */
6069 gexpr();
6070 vpop();
6073 skip(';');
6074 d = ind;
6075 c = ind;
6076 vla_sp_restore();
6077 a = 0;
6078 b = 0;
6079 if (tok != ';') {
6080 gexpr();
6081 a = gvtst(1, 0);
6083 skip(';');
6084 if (tok != ')') {
6085 e = gjmp(0);
6086 c = ind;
6087 vla_sp_restore();
6088 gexpr();
6089 vpop();
6090 gjmp_addr(d);
6091 gsym(e);
6093 skip(')');
6094 saved_nocode_wanted = nocode_wanted;
6095 block(&a, &b, 0);
6096 nocode_wanted = saved_nocode_wanted;
6097 gjmp_addr(c);
6098 gsym(a);
6099 gsym_addr(b, c);
6100 --local_scope;
6101 sym_pop(&local_stack, s, 0);
6103 } else
6104 if (tok == TOK_DO) {
6105 int saved_nocode_wanted;
6106 nocode_wanted &= ~0x20000000;
6107 next();
6108 a = 0;
6109 b = 0;
6110 d = ind;
6111 vla_sp_restore();
6112 saved_nocode_wanted = nocode_wanted;
6113 block(&a, &b, 0);
6114 skip(TOK_WHILE);
6115 skip('(');
6116 gsym(b);
6117 if (b)
6118 nocode_wanted = saved_nocode_wanted;
6119 gexpr();
6120 c = gvtst(0, 0);
6121 gsym_addr(c, d);
6122 nocode_wanted = saved_nocode_wanted;
6123 skip(')');
6124 gsym(a);
6125 skip(';');
6126 } else
6127 if (tok == TOK_SWITCH) {
6128 struct switch_t *saved, sw;
6129 int saved_nocode_wanted = nocode_wanted;
6130 SValue switchval;
6131 next();
6132 skip('(');
6133 gexpr();
6134 skip(')');
6135 switchval = *vtop--;
6136 a = 0;
6137 b = gjmp(0); /* jump to first case */
6138 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6139 saved = cur_switch;
6140 cur_switch = &sw;
6141 block(&a, csym, 0);
6142 nocode_wanted = saved_nocode_wanted;
6143 a = gjmp(a); /* add implicit break */
6144 /* case lookup */
6145 gsym(b);
6146 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6147 for (b = 1; b < sw.n; b++)
6148 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6149 tcc_error("duplicate case value");
6150 /* Our switch table sorting is signed, so the compared
6151 value needs to be as well when it's 64bit. */
6152 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6153 switchval.type.t &= ~VT_UNSIGNED;
6154 vpushv(&switchval);
6155 gcase(sw.p, sw.n, &a);
6156 vpop();
6157 if (sw.def_sym)
6158 gjmp_addr(sw.def_sym);
6159 dynarray_reset(&sw.p, &sw.n);
6160 cur_switch = saved;
6161 /* break label */
6162 gsym(a);
6163 } else
6164 if (tok == TOK_CASE) {
6165 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6166 if (!cur_switch)
6167 expect("switch");
6168 nocode_wanted &= ~0x20000000;
6169 next();
6170 cr->v1 = cr->v2 = expr_const64();
6171 if (gnu_ext && tok == TOK_DOTS) {
6172 next();
6173 cr->v2 = expr_const64();
6174 if (cr->v2 < cr->v1)
6175 tcc_warning("empty case range");
6177 cr->sym = ind;
6178 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6179 skip(':');
6180 is_expr = 0;
6181 goto block_after_label;
6182 } else
6183 if (tok == TOK_DEFAULT) {
6184 next();
6185 skip(':');
6186 if (!cur_switch)
6187 expect("switch");
6188 if (cur_switch->def_sym)
6189 tcc_error("too many 'default'");
6190 cur_switch->def_sym = ind;
6191 is_expr = 0;
6192 goto block_after_label;
6193 } else
6194 if (tok == TOK_GOTO) {
6195 next();
6196 if (tok == '*' && gnu_ext) {
6197 /* computed goto */
6198 next();
6199 gexpr();
6200 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6201 expect("pointer");
6202 ggoto();
6203 } else if (tok >= TOK_UIDENT) {
6204 s = label_find(tok);
6205 /* put forward definition if needed */
6206 if (!s) {
6207 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6208 } else {
6209 if (s->r == LABEL_DECLARED)
6210 s->r = LABEL_FORWARD;
6212 vla_sp_restore_root();
6213 if (s->r & LABEL_FORWARD)
6214 s->jnext = gjmp(s->jnext);
6215 else
6216 gjmp_addr(s->jnext);
6217 next();
6218 } else {
6219 expect("label identifier");
6221 skip(';');
6222 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6223 asm_instr();
6224 } else {
6225 b = is_label();
6226 if (b) {
6227 /* label case */
6228 next();
6229 s = label_find(b);
6230 if (s) {
6231 if (s->r == LABEL_DEFINED)
6232 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6233 gsym(s->jnext);
6234 s->r = LABEL_DEFINED;
6235 } else {
6236 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6238 s->jnext = ind;
6239 vla_sp_restore();
6240 /* we accept this, but it is a mistake */
6241 block_after_label:
6242 nocode_wanted &= ~0x20000000;
6243 if (tok == '}') {
6244 tcc_warning("deprecated use of label at end of compound statement");
6245 } else {
6246 if (is_expr)
6247 vpop();
6248 block(bsym, csym, is_expr);
6250 } else {
6251 /* expression case */
6252 if (tok != ';') {
6253 if (is_expr) {
6254 vpop();
6255 gexpr();
6256 } else {
6257 gexpr();
6258 vpop();
6261 skip(';');
6266 /* This skips over a stream of tokens containing balanced {} and ()
6267 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6268 with a '{'). If STR then allocates and stores the skipped tokens
6269 in *STR. This doesn't check if () and {} are nested correctly,
6270 i.e. "({)}" is accepted. */
6271 static void skip_or_save_block(TokenString **str)
6273 int braces = tok == '{';
6274 int level = 0;
6275 if (str)
6276 *str = tok_str_alloc();
6278 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6279 int t;
6280 if (tok == TOK_EOF) {
6281 if (str || level > 0)
6282 tcc_error("unexpected end of file");
6283 else
6284 break;
6286 if (str)
6287 tok_str_add_tok(*str);
6288 t = tok;
6289 next();
6290 if (t == '{' || t == '(') {
6291 level++;
6292 } else if (t == '}' || t == ')') {
6293 level--;
6294 if (level == 0 && braces && t == '}')
6295 break;
6298 if (str) {
6299 tok_str_add(*str, -1);
6300 tok_str_add(*str, 0);
6304 #define EXPR_CONST 1
6305 #define EXPR_ANY 2
6307 static void parse_init_elem(int expr_type)
6309 int saved_global_expr;
6310 switch(expr_type) {
6311 case EXPR_CONST:
6312 /* compound literals must be allocated globally in this case */
6313 saved_global_expr = global_expr;
6314 global_expr = 1;
6315 expr_const1();
6316 global_expr = saved_global_expr;
6317 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6318 (compound literals). */
6319 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6320 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6321 || vtop->sym->v < SYM_FIRST_ANOM))
6322 #ifdef TCC_TARGET_PE
6323 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6324 #endif
6326 tcc_error("initializer element is not constant");
6327 break;
6328 case EXPR_ANY:
6329 expr_eq();
6330 break;
6334 /* put zeros for variable based init */
6335 static void init_putz(Section *sec, unsigned long c, int size)
6337 if (sec) {
6338 /* nothing to do because globals are already set to zero */
6339 } else {
6340 vpush_global_sym(&func_old_type, TOK_memset);
6341 vseti(VT_LOCAL, c);
6342 #ifdef TCC_TARGET_ARM
6343 vpushs(size);
6344 vpushi(0);
6345 #else
6346 vpushi(0);
6347 vpushs(size);
6348 #endif
6349 gfunc_call(3);
6353 /* t is the array or struct type. c is the array or struct
6354 address. cur_field is the pointer to the current
6355 field, for arrays the 'c' member contains the current start
6356 index. 'size_only' is true if only size info is needed (only used
6357 in arrays). al contains the already initialized length of the
6358 current container (starting at c). This returns the new length of that. */
6359 static int decl_designator(CType *type, Section *sec, unsigned long c,
6360 Sym **cur_field, int size_only, int al)
6362 Sym *s, *f;
6363 int index, index_last, align, l, nb_elems, elem_size;
6364 unsigned long corig = c;
6366 elem_size = 0;
6367 nb_elems = 1;
6368 if (gnu_ext && (l = is_label()) != 0)
6369 goto struct_field;
6370 /* NOTE: we only support ranges for last designator */
6371 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6372 if (tok == '[') {
6373 if (!(type->t & VT_ARRAY))
6374 expect("array type");
6375 next();
6376 index = index_last = expr_const();
6377 if (tok == TOK_DOTS && gnu_ext) {
6378 next();
6379 index_last = expr_const();
6381 skip(']');
6382 s = type->ref;
6383 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6384 index_last < index)
6385 tcc_error("invalid index");
6386 if (cur_field)
6387 (*cur_field)->c = index_last;
6388 type = pointed_type(type);
6389 elem_size = type_size(type, &align);
6390 c += index * elem_size;
6391 nb_elems = index_last - index + 1;
6392 } else {
6393 next();
6394 l = tok;
6395 struct_field:
6396 next();
6397 if ((type->t & VT_BTYPE) != VT_STRUCT)
6398 expect("struct/union type");
6399 f = find_field(type, l);
6400 if (!f)
6401 expect("field");
6402 if (cur_field)
6403 *cur_field = f;
6404 type = &f->type;
6405 c += f->c;
6407 cur_field = NULL;
6409 if (!cur_field) {
6410 if (tok == '=') {
6411 next();
6412 } else if (!gnu_ext) {
6413 expect("=");
6415 } else {
6416 if (type->t & VT_ARRAY) {
6417 index = (*cur_field)->c;
6418 if (type->ref->c >= 0 && index >= type->ref->c)
6419 tcc_error("index too large");
6420 type = pointed_type(type);
6421 c += index * type_size(type, &align);
6422 } else {
6423 f = *cur_field;
6424 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6425 *cur_field = f = f->next;
6426 if (!f)
6427 tcc_error("too many field init");
6428 type = &f->type;
6429 c += f->c;
6432 /* must put zero in holes (note that doing it that way
6433 ensures that it even works with designators) */
6434 if (!size_only && c - corig > al)
6435 init_putz(sec, corig + al, c - corig - al);
6436 decl_initializer(type, sec, c, 0, size_only);
6438 /* XXX: make it more general */
6439 if (!size_only && nb_elems > 1) {
6440 unsigned long c_end;
6441 uint8_t *src, *dst;
6442 int i;
6444 if (!sec) {
6445 vset(type, VT_LOCAL|VT_LVAL, c);
6446 for (i = 1; i < nb_elems; i++) {
6447 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6448 vswap();
6449 vstore();
6451 vpop();
6452 } else if (!NODATA_WANTED) {
6453 c_end = c + nb_elems * elem_size;
6454 if (c_end > sec->data_allocated)
6455 section_realloc(sec, c_end);
6456 src = sec->data + c;
6457 dst = src;
6458 for(i = 1; i < nb_elems; i++) {
6459 dst += elem_size;
6460 memcpy(dst, src, elem_size);
6464 c += nb_elems * type_size(type, &align);
6465 if (c - corig > al)
6466 al = c - corig;
6467 return al;
6470 /* store a value or an expression directly in global data or in local array */
6471 static void init_putv(CType *type, Section *sec, unsigned long c)
6473 int bt;
6474 void *ptr;
6475 CType dtype;
6477 dtype = *type;
6478 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6480 if (sec) {
6481 int size, align;
6482 /* XXX: not portable */
6483 /* XXX: generate error if incorrect relocation */
6484 gen_assign_cast(&dtype);
6485 bt = type->t & VT_BTYPE;
6487 if ((vtop->r & VT_SYM)
6488 && bt != VT_PTR
6489 && bt != VT_FUNC
6490 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6491 || (type->t & VT_BITFIELD))
6492 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6494 tcc_error("initializer element is not computable at load time");
6496 if (NODATA_WANTED) {
6497 vtop--;
6498 return;
6501 size = type_size(type, &align);
6502 section_reserve(sec, c + size);
6503 ptr = sec->data + c;
6505 /* XXX: make code faster ? */
6506 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6507 vtop->sym->v >= SYM_FIRST_ANOM &&
6508 /* XXX This rejects compound literals like
6509 '(void *){ptr}'. The problem is that '&sym' is
6510 represented the same way, which would be ruled out
6511 by the SYM_FIRST_ANOM check above, but also '"string"'
6512 in 'char *p = "string"' is represented the same
6513 with the type being VT_PTR and the symbol being an
6514 anonymous one. That is, there's no difference in vtop
6515 between '(void *){x}' and '&(void *){x}'. Ignore
6516 pointer typed entities here. Hopefully no real code
6517 will every use compound literals with scalar type. */
6518 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6519 /* These come from compound literals, memcpy stuff over. */
6520 Section *ssec;
6521 ElfSym *esym;
6522 ElfW_Rel *rel;
6523 esym = elfsym(vtop->sym);
6524 ssec = tcc_state->sections[esym->st_shndx];
6525 memmove (ptr, ssec->data + esym->st_value, size);
6526 if (ssec->reloc) {
6527 /* We need to copy over all memory contents, and that
6528 includes relocations. Use the fact that relocs are
6529 created it order, so look from the end of relocs
6530 until we hit one before the copied region. */
6531 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6532 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6533 while (num_relocs--) {
6534 rel--;
6535 if (rel->r_offset >= esym->st_value + size)
6536 continue;
6537 if (rel->r_offset < esym->st_value)
6538 break;
6539 /* Note: if the same fields are initialized multiple
6540 times (possible with designators) then we possibly
6541 add multiple relocations for the same offset here.
6542 That would lead to wrong code, the last reloc needs
6543 to win. We clean this up later after the whole
6544 initializer is parsed. */
6545 put_elf_reloca(symtab_section, sec,
6546 c + rel->r_offset - esym->st_value,
6547 ELFW(R_TYPE)(rel->r_info),
6548 ELFW(R_SYM)(rel->r_info),
6549 #if PTR_SIZE == 8
6550 rel->r_addend
6551 #else
6553 #endif
6557 } else {
6558 if (type->t & VT_BITFIELD) {
6559 int bit_pos, bit_size, bits, n;
6560 unsigned char *p, v, m;
6561 bit_pos = BIT_POS(vtop->type.t);
6562 bit_size = BIT_SIZE(vtop->type.t);
6563 p = (unsigned char*)ptr + (bit_pos >> 3);
6564 bit_pos &= 7, bits = 0;
6565 while (bit_size) {
6566 n = 8 - bit_pos;
6567 if (n > bit_size)
6568 n = bit_size;
6569 v = vtop->c.i >> bits << bit_pos;
6570 m = ((1 << n) - 1) << bit_pos;
6571 *p = (*p & ~m) | (v & m);
6572 bits += n, bit_size -= n, bit_pos = 0, ++p;
6574 } else
6575 switch(bt) {
6576 /* XXX: when cross-compiling we assume that each type has the
6577 same representation on host and target, which is likely to
6578 be wrong in the case of long double */
6579 case VT_BOOL:
6580 vtop->c.i = vtop->c.i != 0;
6581 case VT_BYTE:
6582 *(char *)ptr |= vtop->c.i;
6583 break;
6584 case VT_SHORT:
6585 *(short *)ptr |= vtop->c.i;
6586 break;
6587 case VT_FLOAT:
6588 *(float*)ptr = vtop->c.f;
6589 break;
6590 case VT_DOUBLE:
6591 *(double *)ptr = vtop->c.d;
6592 break;
6593 case VT_LDOUBLE:
6594 #if defined TCC_IS_NATIVE_387
6595 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6596 memcpy(ptr, &vtop->c.ld, 10);
6597 #ifdef __TINYC__
6598 else if (sizeof (long double) == sizeof (double))
6599 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6600 #endif
6601 else if (vtop->c.ld == 0.0)
6603 else
6604 #endif
6605 if (sizeof(long double) == LDOUBLE_SIZE)
6606 *(long double*)ptr = vtop->c.ld;
6607 else if (sizeof(double) == LDOUBLE_SIZE)
6608 *(double *)ptr = (double)vtop->c.ld;
6609 else
6610 tcc_error("can't cross compile long double constants");
6611 break;
6612 #if PTR_SIZE != 8
6613 case VT_LLONG:
6614 *(long long *)ptr |= vtop->c.i;
6615 break;
6616 #else
6617 case VT_LLONG:
6618 #endif
6619 case VT_PTR:
6621 addr_t val = vtop->c.i;
6622 #if PTR_SIZE == 8
6623 if (vtop->r & VT_SYM)
6624 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6625 else
6626 *(addr_t *)ptr |= val;
6627 #else
6628 if (vtop->r & VT_SYM)
6629 greloc(sec, vtop->sym, c, R_DATA_PTR);
6630 *(addr_t *)ptr |= val;
6631 #endif
6632 break;
6634 default:
6636 int val = vtop->c.i;
6637 #if PTR_SIZE == 8
6638 if (vtop->r & VT_SYM)
6639 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6640 else
6641 *(int *)ptr |= val;
6642 #else
6643 if (vtop->r & VT_SYM)
6644 greloc(sec, vtop->sym, c, R_DATA_PTR);
6645 *(int *)ptr |= val;
6646 #endif
6647 break;
6651 vtop--;
6652 } else {
6653 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6654 vswap();
6655 vstore();
6656 vpop();
6660 /* 't' contains the type and storage info. 'c' is the offset of the
6661 object in section 'sec'. If 'sec' is NULL, it means stack based
6662 allocation. 'first' is true if array '{' must be read (multi
6663 dimension implicit array init handling). 'size_only' is true if
6664 size only evaluation is wanted (only for arrays). */
6665 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6666 int first, int size_only)
6668 int len, n, no_oblock, nb, i;
6669 int size1, align1;
6670 int have_elem;
6671 Sym *s, *f;
6672 Sym indexsym;
6673 CType *t1;
6675 /* If we currently are at an '}' or ',' we have read an initializer
6676 element in one of our callers, and not yet consumed it. */
6677 have_elem = tok == '}' || tok == ',';
6678 if (!have_elem && tok != '{' &&
6679 /* In case of strings we have special handling for arrays, so
6680 don't consume them as initializer value (which would commit them
6681 to some anonymous symbol). */
6682 tok != TOK_LSTR && tok != TOK_STR &&
6683 !size_only) {
6684 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6685 have_elem = 1;
6688 if (have_elem &&
6689 !(type->t & VT_ARRAY) &&
6690 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6691 The source type might have VT_CONSTANT set, which is
6692 of course assignable to non-const elements. */
6693 is_compatible_unqualified_types(type, &vtop->type)) {
6694 init_putv(type, sec, c);
6695 } else if (type->t & VT_ARRAY) {
6696 s = type->ref;
6697 n = s->c;
6698 t1 = pointed_type(type);
6699 size1 = type_size(t1, &align1);
6701 no_oblock = 1;
6702 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6703 tok == '{') {
6704 if (tok != '{')
6705 tcc_error("character array initializer must be a literal,"
6706 " optionally enclosed in braces");
6707 skip('{');
6708 no_oblock = 0;
6711 /* only parse strings here if correct type (otherwise: handle
6712 them as ((w)char *) expressions */
6713 if ((tok == TOK_LSTR &&
6714 #ifdef TCC_TARGET_PE
6715 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6716 #else
6717 (t1->t & VT_BTYPE) == VT_INT
6718 #endif
6719 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6720 len = 0;
6721 while (tok == TOK_STR || tok == TOK_LSTR) {
6722 int cstr_len, ch;
6724 /* compute maximum number of chars wanted */
6725 if (tok == TOK_STR)
6726 cstr_len = tokc.str.size;
6727 else
6728 cstr_len = tokc.str.size / sizeof(nwchar_t);
6729 cstr_len--;
6730 nb = cstr_len;
6731 if (n >= 0 && nb > (n - len))
6732 nb = n - len;
6733 if (!size_only) {
6734 if (cstr_len > nb)
6735 tcc_warning("initializer-string for array is too long");
6736 /* in order to go faster for common case (char
6737 string in global variable, we handle it
6738 specifically */
6739 if (sec && tok == TOK_STR && size1 == 1) {
6740 if (!NODATA_WANTED)
6741 memcpy(sec->data + c + len, tokc.str.data, nb);
6742 } else {
6743 for(i=0;i<nb;i++) {
6744 if (tok == TOK_STR)
6745 ch = ((unsigned char *)tokc.str.data)[i];
6746 else
6747 ch = ((nwchar_t *)tokc.str.data)[i];
6748 vpushi(ch);
6749 init_putv(t1, sec, c + (len + i) * size1);
6753 len += nb;
6754 next();
6756 /* only add trailing zero if enough storage (no
6757 warning in this case since it is standard) */
6758 if (n < 0 || len < n) {
6759 if (!size_only) {
6760 vpushi(0);
6761 init_putv(t1, sec, c + (len * size1));
6763 len++;
6765 len *= size1;
6766 } else {
6767 indexsym.c = 0;
6768 f = &indexsym;
6770 do_init_list:
6771 len = 0;
6772 while (tok != '}' || have_elem) {
6773 len = decl_designator(type, sec, c, &f, size_only, len);
6774 have_elem = 0;
6775 if (type->t & VT_ARRAY) {
6776 ++indexsym.c;
6777 /* special test for multi dimensional arrays (may not
6778 be strictly correct if designators are used at the
6779 same time) */
6780 if (no_oblock && len >= n*size1)
6781 break;
6782 } else {
6783 if (s->type.t == VT_UNION)
6784 f = NULL;
6785 else
6786 f = f->next;
6787 if (no_oblock && f == NULL)
6788 break;
6791 if (tok == '}')
6792 break;
6793 skip(',');
6796 /* put zeros at the end */
6797 if (!size_only && len < n*size1)
6798 init_putz(sec, c + len, n*size1 - len);
6799 if (!no_oblock)
6800 skip('}');
6801 /* patch type size if needed, which happens only for array types */
6802 if (n < 0)
6803 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6804 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6805 size1 = 1;
6806 no_oblock = 1;
6807 if (first || tok == '{') {
6808 skip('{');
6809 no_oblock = 0;
6811 s = type->ref;
6812 f = s->next;
6813 n = s->c;
6814 goto do_init_list;
6815 } else if (tok == '{') {
6816 next();
6817 decl_initializer(type, sec, c, first, size_only);
6818 skip('}');
6819 } else if (size_only) {
6820 /* If we supported only ISO C we wouldn't have to accept calling
6821 this on anything than an array size_only==1 (and even then
6822 only on the outermost level, so no recursion would be needed),
6823 because initializing a flex array member isn't supported.
6824 But GNU C supports it, so we need to recurse even into
6825 subfields of structs and arrays when size_only is set. */
6826 /* just skip expression */
6827 skip_or_save_block(NULL);
6828 } else {
6829 if (!have_elem) {
6830 /* This should happen only when we haven't parsed
6831 the init element above for fear of committing a
6832 string constant to memory too early. */
6833 if (tok != TOK_STR && tok != TOK_LSTR)
6834 expect("string constant");
6835 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6837 init_putv(type, sec, c);
6841 /* parse an initializer for type 't' if 'has_init' is non zero, and
6842 allocate space in local or global data space ('r' is either
6843 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6844 variable 'v' of scope 'scope' is declared before initializers
6845 are parsed. If 'v' is zero, then a reference to the new object
6846 is put in the value stack. If 'has_init' is 2, a special parsing
6847 is done to handle string constants. */
6848 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6849 int has_init, int v, int scope)
6851 int size, align, addr;
6852 TokenString *init_str = NULL;
6854 Section *sec;
6855 Sym *flexible_array;
6856 Sym *sym = NULL;
6857 int saved_nocode_wanted = nocode_wanted;
6858 #ifdef CONFIG_TCC_BCHECK
6859 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6860 #endif
6862 if (type->t & VT_STATIC)
6863 nocode_wanted |= NODATA_WANTED ? 0x40000000 : 0x80000000;
6865 flexible_array = NULL;
6866 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6867 Sym *field = type->ref->next;
6868 if (field) {
6869 while (field->next)
6870 field = field->next;
6871 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6872 flexible_array = field;
6876 size = type_size(type, &align);
6877 /* If unknown size, we must evaluate it before
6878 evaluating initializers because
6879 initializers can generate global data too
6880 (e.g. string pointers or ISOC99 compound
6881 literals). It also simplifies local
6882 initializers handling */
6883 if (size < 0 || (flexible_array && has_init)) {
6884 if (!has_init)
6885 tcc_error("unknown type size");
6886 /* get all init string */
6887 if (has_init == 2) {
6888 init_str = tok_str_alloc();
6889 /* only get strings */
6890 while (tok == TOK_STR || tok == TOK_LSTR) {
6891 tok_str_add_tok(init_str);
6892 next();
6894 tok_str_add(init_str, -1);
6895 tok_str_add(init_str, 0);
6896 } else {
6897 skip_or_save_block(&init_str);
6899 unget_tok(0);
6901 /* compute size */
6902 begin_macro(init_str, 1);
6903 next();
6904 decl_initializer(type, NULL, 0, 1, 1);
6905 /* prepare second initializer parsing */
6906 macro_ptr = init_str->str;
6907 next();
6909 /* if still unknown size, error */
6910 size = type_size(type, &align);
6911 if (size < 0)
6912 tcc_error("unknown type size");
6914 /* If there's a flex member and it was used in the initializer
6915 adjust size. */
6916 if (flexible_array &&
6917 flexible_array->type.ref->c > 0)
6918 size += flexible_array->type.ref->c
6919 * pointed_size(&flexible_array->type);
6920 /* take into account specified alignment if bigger */
6921 if (ad->a.aligned) {
6922 int speca = 1 << (ad->a.aligned - 1);
6923 if (speca > align)
6924 align = speca;
6925 } else if (ad->a.packed) {
6926 align = 1;
6929 if (NODATA_WANTED)
6930 size = 0, align = 1;
6932 if ((r & VT_VALMASK) == VT_LOCAL) {
6933 sec = NULL;
6934 #ifdef CONFIG_TCC_BCHECK
6935 if (bcheck && (type->t & VT_ARRAY)) {
6936 loc--;
6938 #endif
6939 loc = (loc - size) & -align;
6940 addr = loc;
6941 #ifdef CONFIG_TCC_BCHECK
6942 /* handles bounds */
6943 /* XXX: currently, since we do only one pass, we cannot track
6944 '&' operators, so we add only arrays */
6945 if (bcheck && (type->t & VT_ARRAY)) {
6946 addr_t *bounds_ptr;
6947 /* add padding between regions */
6948 loc--;
6949 /* then add local bound info */
6950 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6951 bounds_ptr[0] = addr;
6952 bounds_ptr[1] = size;
6954 #endif
6955 if (v) {
6956 /* local variable */
6957 #ifdef CONFIG_TCC_ASM
6958 if (ad->asm_label) {
6959 int reg = asm_parse_regvar(ad->asm_label);
6960 if (reg >= 0)
6961 r = (r & ~VT_VALMASK) | reg;
6963 #endif
6964 sym = sym_push(v, type, r, addr);
6965 sym->a = ad->a;
6966 } else {
6967 /* push local reference */
6968 vset(type, r, addr);
6970 } else {
6971 if (v && scope == VT_CONST) {
6972 /* see if the symbol was already defined */
6973 sym = sym_find(v);
6974 if (sym) {
6975 patch_storage(sym, ad, type);
6976 /* we accept several definitions of the same global variable. */
6977 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
6978 goto no_alloc;
6982 /* allocate symbol in corresponding section */
6983 sec = ad->section;
6984 if (!sec) {
6985 if (has_init)
6986 sec = data_section;
6987 else if (tcc_state->nocommon)
6988 sec = bss_section;
6991 if (sec) {
6992 addr = section_add(sec, size, align);
6993 #ifdef CONFIG_TCC_BCHECK
6994 /* add padding if bound check */
6995 if (bcheck)
6996 section_add(sec, 1, 1);
6997 #endif
6998 } else {
6999 addr = align; /* SHN_COMMON is special, symbol value is align */
7000 sec = common_section;
7003 if (v) {
7004 if (!sym) {
7005 sym = sym_push(v, type, r | VT_SYM, 0);
7006 patch_storage(sym, ad, NULL);
7008 /* Local statics have a scope until now (for
7009 warnings), remove it here. */
7010 sym->sym_scope = 0;
7011 /* update symbol definition */
7012 put_extern_sym(sym, sec, addr, size);
7013 } else {
7014 /* push global reference */
7015 sym = get_sym_ref(type, sec, addr, size);
7016 vpushsym(type, sym);
7017 vtop->r |= r;
7020 #ifdef CONFIG_TCC_BCHECK
7021 /* handles bounds now because the symbol must be defined
7022 before for the relocation */
7023 if (bcheck) {
7024 addr_t *bounds_ptr;
7026 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7027 /* then add global bound info */
7028 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7029 bounds_ptr[0] = 0; /* relocated */
7030 bounds_ptr[1] = size;
7032 #endif
7035 if (type->t & VT_VLA) {
7036 int a;
7038 if (NODATA_WANTED)
7039 goto no_alloc;
7041 /* save current stack pointer */
7042 if (vlas_in_scope == 0) {
7043 if (vla_sp_root_loc == -1)
7044 vla_sp_root_loc = (loc -= PTR_SIZE);
7045 gen_vla_sp_save(vla_sp_root_loc);
7048 vla_runtime_type_size(type, &a);
7049 gen_vla_alloc(type, a);
7050 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7051 /* on _WIN64, because of the function args scratch area, the
7052 result of alloca differs from RSP and is returned in RAX. */
7053 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7054 #endif
7055 gen_vla_sp_save(addr);
7056 vla_sp_loc = addr;
7057 vlas_in_scope++;
7059 } else if (has_init) {
7060 size_t oldreloc_offset = 0;
7061 if (sec && sec->reloc)
7062 oldreloc_offset = sec->reloc->data_offset;
7063 decl_initializer(type, sec, addr, 1, 0);
7064 if (sec && sec->reloc)
7065 squeeze_multi_relocs(sec, oldreloc_offset);
7066 /* patch flexible array member size back to -1, */
7067 /* for possible subsequent similar declarations */
7068 if (flexible_array)
7069 flexible_array->type.ref->c = -1;
7072 no_alloc:
7073 /* restore parse state if needed */
7074 if (init_str) {
7075 end_macro();
7076 next();
7079 nocode_wanted = saved_nocode_wanted;
7082 /* parse a function defined by symbol 'sym' and generate its code in
7083 'cur_text_section' */
7084 static void gen_function(Sym *sym)
7086 nocode_wanted = 0;
7087 ind = cur_text_section->data_offset;
7088 if (sym->a.aligned) {
7089 size_t newoff = section_add(cur_text_section, 0,
7090 1 << (sym->a.aligned - 1));
7091 if (ind != newoff)
7092 gen_fill_nops(newoff - ind);
7093 ind = newoff;
7095 /* NOTE: we patch the symbol size later */
7096 put_extern_sym(sym, cur_text_section, ind, 0);
7097 funcname = get_tok_str(sym->v, NULL);
7098 func_ind = ind;
7099 /* Initialize VLA state */
7100 vla_sp_loc = -1;
7101 vla_sp_root_loc = -1;
7102 /* put debug symbol */
7103 tcc_debug_funcstart(tcc_state, sym);
7104 /* push a dummy symbol to enable local sym storage */
7105 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7106 local_scope = 1; /* for function parameters */
7107 gfunc_prolog(&sym->type);
7108 local_scope = 0;
7109 rsym = 0;
7110 block(NULL, NULL, 0);
7111 nocode_wanted = 0;
7112 gsym(rsym);
7113 gfunc_epilog();
7114 cur_text_section->data_offset = ind;
7115 label_pop(&global_label_stack, NULL, 0);
7116 /* reset local stack */
7117 local_scope = 0;
7118 sym_pop(&local_stack, NULL, 0);
7119 /* end of function */
7120 /* patch symbol size */
7121 elfsym(sym)->st_size = ind - func_ind;
7122 tcc_debug_funcend(tcc_state, ind - func_ind);
7123 /* It's better to crash than to generate wrong code */
7124 cur_text_section = NULL;
7125 funcname = ""; /* for safety */
7126 func_vt.t = VT_VOID; /* for safety */
7127 func_var = 0; /* for safety */
7128 ind = 0; /* for safety */
7129 nocode_wanted = 0x80000000;
7130 check_vstack();
7133 static void gen_inline_functions(TCCState *s)
7135 Sym *sym;
7136 int inline_generated, i, ln;
7137 struct InlineFunc *fn;
7139 ln = file->line_num;
7140 /* iterate while inline function are referenced */
7141 do {
7142 inline_generated = 0;
7143 for (i = 0; i < s->nb_inline_fns; ++i) {
7144 fn = s->inline_fns[i];
7145 sym = fn->sym;
7146 if (sym && sym->c) {
7147 /* the function was used: generate its code and
7148 convert it to a normal function */
7149 fn->sym = NULL;
7150 if (file)
7151 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7152 sym->type.t &= ~VT_INLINE;
7154 begin_macro(fn->func_str, 1);
7155 next();
7156 cur_text_section = text_section;
7157 gen_function(sym);
7158 end_macro();
7160 inline_generated = 1;
7163 } while (inline_generated);
7164 file->line_num = ln;
7167 ST_FUNC void free_inline_functions(TCCState *s)
7169 int i;
7170 /* free tokens of unused inline functions */
7171 for (i = 0; i < s->nb_inline_fns; ++i) {
7172 struct InlineFunc *fn = s->inline_fns[i];
7173 if (fn->sym)
7174 tok_str_free(fn->func_str);
7176 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7179 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7180 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7181 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7183 int v, has_init, r;
7184 CType type, btype;
7185 Sym *sym;
7186 AttributeDef ad;
7188 while (1) {
7189 if (!parse_btype(&btype, &ad)) {
7190 if (is_for_loop_init)
7191 return 0;
7192 /* skip redundant ';' if not in old parameter decl scope */
7193 if (tok == ';' && l != VT_CMP) {
7194 next();
7195 continue;
7197 if (l != VT_CONST)
7198 break;
7199 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7200 /* global asm block */
7201 asm_global_instr();
7202 continue;
7204 if (tok >= TOK_UIDENT) {
7205 /* special test for old K&R protos without explicit int
7206 type. Only accepted when defining global data */
7207 btype.t = VT_INT;
7208 } else {
7209 if (tok != TOK_EOF)
7210 expect("declaration");
7211 break;
7214 if (tok == ';') {
7215 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7216 int v = btype.ref->v;
7217 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7218 tcc_warning("unnamed struct/union that defines no instances");
7219 next();
7220 continue;
7222 if (IS_ENUM(btype.t)) {
7223 next();
7224 continue;
7227 while (1) { /* iterate thru each declaration */
7228 type = btype;
7229 /* If the base type itself was an array type of unspecified
7230 size (like in 'typedef int arr[]; arr x = {1};') then
7231 we will overwrite the unknown size by the real one for
7232 this decl. We need to unshare the ref symbol holding
7233 that size. */
7234 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7235 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7237 type_decl(&type, &ad, &v, TYPE_DIRECT);
7238 #if 0
7240 char buf[500];
7241 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7242 printf("type = '%s'\n", buf);
7244 #endif
7245 if ((type.t & VT_BTYPE) == VT_FUNC) {
7246 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7247 tcc_error("function without file scope cannot be static");
7249 /* if old style function prototype, we accept a
7250 declaration list */
7251 sym = type.ref;
7252 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7253 decl0(VT_CMP, 0, sym);
7256 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7257 ad.asm_label = asm_label_instr();
7258 /* parse one last attribute list, after asm label */
7259 parse_attribute(&ad);
7260 if (tok == '{')
7261 expect(";");
7264 #ifdef TCC_TARGET_PE
7265 if (ad.a.dllimport || ad.a.dllexport) {
7266 if (type.t & (VT_STATIC|VT_TYPEDEF))
7267 tcc_error("cannot have dll linkage with static or typedef");
7268 if (ad.a.dllimport) {
7269 if ((type.t & VT_BTYPE) == VT_FUNC)
7270 ad.a.dllimport = 0;
7271 else
7272 type.t |= VT_EXTERN;
7275 #endif
7276 if (tok == '{') {
7277 if (l != VT_CONST)
7278 tcc_error("cannot use local functions");
7279 if ((type.t & VT_BTYPE) != VT_FUNC)
7280 expect("function definition");
7282 /* reject abstract declarators in function definition
7283 make old style params without decl have int type */
7284 sym = type.ref;
7285 while ((sym = sym->next) != NULL) {
7286 if (!(sym->v & ~SYM_FIELD))
7287 expect("identifier");
7288 if (sym->type.t == VT_VOID)
7289 sym->type = int_type;
7292 /* XXX: cannot do better now: convert extern line to static inline */
7293 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7294 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7296 /* put function symbol */
7297 sym = external_global_sym(v, &type, 0);
7298 type.t &= ~VT_EXTERN;
7299 patch_storage(sym, &ad, &type);
7301 /* static inline functions are just recorded as a kind
7302 of macro. Their code will be emitted at the end of
7303 the compilation unit only if they are used */
7304 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7305 (VT_INLINE | VT_STATIC)) {
7306 struct InlineFunc *fn;
7307 const char *filename;
7309 filename = file ? file->filename : "";
7310 fn = tcc_malloc(sizeof *fn + strlen(filename));
7311 strcpy(fn->filename, filename);
7312 fn->sym = sym;
7313 skip_or_save_block(&fn->func_str);
7314 dynarray_add(&tcc_state->inline_fns,
7315 &tcc_state->nb_inline_fns, fn);
7316 } else {
7317 /* compute text section */
7318 cur_text_section = ad.section;
7319 if (!cur_text_section)
7320 cur_text_section = text_section;
7321 gen_function(sym);
7323 break;
7324 } else {
7325 if (l == VT_CMP) {
7326 /* find parameter in function parameter list */
7327 for (sym = func_sym->next; sym; sym = sym->next)
7328 if ((sym->v & ~SYM_FIELD) == v)
7329 goto found;
7330 tcc_error("declaration for parameter '%s' but no such parameter",
7331 get_tok_str(v, NULL));
7332 found:
7333 if (type.t & VT_STORAGE) /* 'register' is okay */
7334 tcc_error("storage class specified for '%s'",
7335 get_tok_str(v, NULL));
7336 if (sym->type.t != VT_VOID)
7337 tcc_error("redefinition of parameter '%s'",
7338 get_tok_str(v, NULL));
7339 convert_parameter_type(&type);
7340 sym->type = type;
7341 } else if (type.t & VT_TYPEDEF) {
7342 /* save typedefed type */
7343 /* XXX: test storage specifiers ? */
7344 sym = sym_find(v);
7345 if (sym && sym->sym_scope == local_scope) {
7346 if (!is_compatible_types(&sym->type, &type)
7347 || !(sym->type.t & VT_TYPEDEF))
7348 tcc_error("incompatible redefinition of '%s'",
7349 get_tok_str(v, NULL));
7350 sym->type = type;
7351 } else {
7352 sym = sym_push(v, &type, 0, 0);
7354 sym->a = ad.a;
7355 sym->f = ad.f;
7356 } else {
7357 r = 0;
7358 if ((type.t & VT_BTYPE) == VT_FUNC) {
7359 /* external function definition */
7360 /* specific case for func_call attribute */
7361 type.ref->f = ad.f;
7362 } else if (!(type.t & VT_ARRAY)) {
7363 /* not lvalue if array */
7364 r |= lvalue_type(type.t);
7366 has_init = (tok == '=');
7367 if (has_init && (type.t & VT_VLA))
7368 tcc_error("variable length array cannot be initialized");
7369 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7370 ((type.t & VT_BTYPE) == VT_FUNC) ||
7371 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7372 !has_init && l == VT_CONST && type.ref->c < 0)) {
7373 /* external variable or function */
7374 /* NOTE: as GCC, uninitialized global static
7375 arrays of null size are considered as
7376 extern */
7377 type.t |= VT_EXTERN;
7378 sym = external_sym(v, &type, r, &ad);
7379 if (ad.alias_target) {
7380 ElfSym *esym;
7381 Sym *alias_target;
7382 alias_target = sym_find(ad.alias_target);
7383 esym = elfsym(alias_target);
7384 if (!esym)
7385 tcc_error("unsupported forward __alias__ attribute");
7386 /* Local statics have a scope until now (for
7387 warnings), remove it here. */
7388 sym->sym_scope = 0;
7389 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7391 } else {
7392 if (type.t & VT_STATIC)
7393 r |= VT_CONST;
7394 else
7395 r |= l;
7396 if (has_init)
7397 next();
7398 else if (l == VT_CONST)
7399 /* uninitialized global variables may be overridden */
7400 type.t |= VT_EXTERN;
7401 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7404 if (tok != ',') {
7405 if (is_for_loop_init)
7406 return 1;
7407 skip(';');
7408 break;
7410 next();
7412 ad.a.aligned = 0;
7415 return 0;
7418 static void decl(int l)
7420 decl0(l, 0, NULL);
7423 /* ------------------------------------------------------------------------- */