Always allow ({ }) in the ctrl-expr of _Generic
[tinycc.git] / tccgen.c
blob0d686961da2a67082421b8a1316cef6b0fcfd5e3
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->a.nodecorate) {
395 can_add_underscore = 0;
397 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
398 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
399 name = buf1;
400 other |= ST_PE_STDCALL;
401 can_add_underscore = 0;
404 #endif
405 if (tcc_state->leading_underscore && can_add_underscore) {
406 buf1[0] = '_';
407 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
408 name = buf1;
410 if (sym->asm_label)
411 name = get_tok_str(sym->asm_label, NULL);
412 info = ELFW(ST_INFO)(sym_bind, sym_type);
413 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
414 } else {
415 esym = elfsym(sym);
416 esym->st_value = value;
417 esym->st_size = size;
418 esym->st_shndx = sh_num;
420 update_storage(sym);
423 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
424 addr_t value, unsigned long size)
426 int sh_num = section ? section->sh_num : SHN_UNDEF;
427 put_extern_sym2(sym, sh_num, value, size, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
432 addr_t addend)
434 int c = 0;
436 if (nocode_wanted && s == cur_text_section)
437 return;
439 if (sym) {
440 if (0 == sym->c)
441 put_extern_sym(sym, NULL, 0, 0);
442 c = sym->c;
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section, s, offset, type, c, addend);
449 #if PTR_SIZE == 4
450 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
452 greloca(s, sym, offset, type, 0);
454 #endif
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym *__sym_malloc(void)
460 Sym *sym_pool, *sym, *last_sym;
461 int i;
463 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
464 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
466 last_sym = sym_free_first;
467 sym = sym_pool;
468 for(i = 0; i < SYM_POOL_NB; i++) {
469 sym->next = last_sym;
470 last_sym = sym;
471 sym++;
473 sym_free_first = last_sym;
474 return last_sym;
477 static inline Sym *sym_malloc(void)
479 Sym *sym;
480 #ifndef SYM_DEBUG
481 sym = sym_free_first;
482 if (!sym)
483 sym = __sym_malloc();
484 sym_free_first = sym->next;
485 return sym;
486 #else
487 sym = tcc_malloc(sizeof(Sym));
488 return sym;
489 #endif
492 ST_INLN void sym_free(Sym *sym)
494 #ifndef SYM_DEBUG
495 sym->next = sym_free_first;
496 sym_free_first = sym;
497 #else
498 tcc_free(sym);
499 #endif
502 /* push, without hashing */
503 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
505 Sym *s;
507 s = sym_malloc();
508 memset(s, 0, sizeof *s);
509 s->v = v;
510 s->type.t = t;
511 s->c = c;
512 /* add in stack */
513 s->prev = *ps;
514 *ps = s;
515 return s;
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym *sym_find2(Sym *s, int v)
522 while (s) {
523 if (s->v == v)
524 return s;
525 else if (s->v == -1)
526 return NULL;
527 s = s->prev;
529 return NULL;
532 /* structure lookup */
533 ST_INLN Sym *struct_find(int v)
535 v -= TOK_IDENT;
536 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
537 return NULL;
538 return table_ident[v]->sym_struct;
541 /* find an identifier */
542 ST_INLN Sym *sym_find(int v)
544 v -= TOK_IDENT;
545 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
546 return NULL;
547 return table_ident[v]->sym_identifier;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
553 Sym *s, **ps;
554 TokenSym *ts;
556 if (local_stack)
557 ps = &local_stack;
558 else
559 ps = &global_stack;
560 s = sym_push2(ps, v, type->t, c);
561 s->type.ref = type->ref;
562 s->r = r;
563 /* don't record fields or anonymous symbols */
564 /* XXX: simplify */
565 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
566 /* record symbol in token array */
567 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
568 if (v & SYM_STRUCT)
569 ps = &ts->sym_struct;
570 else
571 ps = &ts->sym_identifier;
572 s->prev_tok = *ps;
573 *ps = s;
574 s->sym_scope = local_scope;
575 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v & ~SYM_STRUCT, NULL));
579 return s;
582 /* push a global identifier */
583 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
585 Sym *s, **ps;
586 s = sym_push2(&global_stack, v, t, c);
587 /* don't record anonymous symbol */
588 if (v < SYM_FIRST_ANOM) {
589 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps != NULL && (*ps)->sym_scope)
593 ps = &(*ps)->prev_tok;
594 s->prev_tok = *ps;
595 *ps = s;
597 return s;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
604 Sym *s, *ss, **ps;
605 TokenSym *ts;
606 int v;
608 s = *ptop;
609 while(s != b) {
610 ss = s->prev;
611 v = s->v;
612 /* remove symbol in token array */
613 /* XXX: simplify */
614 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
615 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
616 if (v & SYM_STRUCT)
617 ps = &ts->sym_struct;
618 else
619 ps = &ts->sym_identifier;
620 *ps = s->prev_tok;
622 if (!keep)
623 sym_free(s);
624 s = ss;
626 if (!keep)
627 *ptop = b;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType *type, int r, CValue *vc)
634 int v;
636 if (vtop >= vstack + (VSTACK_SIZE - 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop >= vstack && !nocode_wanted) {
652 v = vtop->r & VT_VALMASK;
653 if (v == VT_CMP || (v & ~1) == VT_JMP)
654 gv(RC_INT);
657 vtop++;
658 vtop->type = *type;
659 vtop->r = r;
660 vtop->r2 = VT_CONST;
661 vtop->c = *vc;
662 vtop->sym = NULL;
665 ST_FUNC void vswap(void)
667 SValue tmp;
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop >= vstack && !nocode_wanted) {
670 int v = vtop->r & VT_VALMASK;
671 if (v == VT_CMP || (v & ~1) == VT_JMP)
672 gv(RC_INT);
674 tmp = vtop[0];
675 vtop[0] = vtop[-1];
676 vtop[-1] = tmp;
679 /* pop stack value */
680 ST_FUNC void vpop(void)
682 int v;
683 v = vtop->r & VT_VALMASK;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
686 if (v == TREG_ST0) {
687 o(0xd8dd); /* fstp %st(0) */
688 } else
689 #endif
690 if (v == VT_JMP || v == VT_JMPI) {
691 /* need to put correct jump if && or || without test */
692 gsym(vtop->c.i);
694 vtop--;
697 /* push constant of type "type" with useless value */
698 ST_FUNC void vpush(CType *type)
700 vset(type, VT_CONST, 0);
703 /* push integer constant */
704 ST_FUNC void vpushi(int v)
706 CValue cval;
707 cval.i = v;
708 vsetc(&int_type, VT_CONST, &cval);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v)
714 CValue cval;
715 cval.i = v;
716 vsetc(&size_type, VT_CONST, &cval);
719 /* push arbitrary 64bit constant */
720 ST_FUNC void vpush64(int ty, unsigned long long v)
722 CValue cval;
723 CType ctype;
724 ctype.t = ty;
725 ctype.ref = NULL;
726 cval.i = v;
727 vsetc(&ctype, VT_CONST, &cval);
730 /* push long long constant */
731 static inline void vpushll(long long v)
733 vpush64(VT_LLONG, v);
736 ST_FUNC void vset(CType *type, int r, int v)
738 CValue cval;
740 cval.i = v;
741 vsetc(type, r, &cval);
744 static void vseti(int r, int v)
746 CType type;
747 type.t = VT_INT;
748 type.ref = NULL;
749 vset(&type, r, v);
752 ST_FUNC void vpushv(SValue *v)
754 if (vtop >= vstack + (VSTACK_SIZE - 1))
755 tcc_error("memory full (vstack)");
756 vtop++;
757 *vtop = *v;
760 static void vdup(void)
762 vpushv(vtop);
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC void vrotb(int n)
770 int i;
771 SValue tmp;
773 tmp = vtop[-n + 1];
774 for(i=-n+1;i!=0;i++)
775 vtop[i] = vtop[i+1];
776 vtop[0] = tmp;
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC void vrote(SValue *e, int n)
784 int i;
785 SValue tmp;
787 tmp = *e;
788 for(i = 0;i < n - 1; i++)
789 e[-i] = e[-i - 1];
790 e[-n + 1] = tmp;
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC void vrott(int n)
798 vrote(vtop, n);
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType *type, Sym *sym)
804 CValue cval;
805 cval.i = 0;
806 vsetc(type, VT_CONST | VT_SYM, &cval);
807 vtop->sym = sym;
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
813 int v;
814 Sym *sym;
816 v = anon_sym++;
817 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
818 sym->type.ref = type->ref;
819 sym->r = VT_CONST | VT_SYM;
820 put_extern_sym(sym, sec, offset, size);
821 return sym;
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
827 vpushsym(type, get_sym_ref(type, sec, offset, size));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
833 Sym *s;
835 s = sym_find(v);
836 if (!s) {
837 /* push forward reference */
838 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
839 s->type.ref = type->ref;
840 s->r = r | VT_CONST | VT_SYM;
841 } else if (IS_ASM_SYM(s)) {
842 s->type.t = type->t | (s->type.t & VT_EXTERN);
843 s->type.ref = type->ref;
844 update_storage(s);
846 return s;
849 /* Merge some type attributes. */
850 static void patch_type(Sym *sym, CType *type)
852 if (!(type->t & VT_EXTERN)) {
853 if (!(sym->type.t & VT_EXTERN))
854 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
855 sym->type.t &= ~VT_EXTERN;
858 if (IS_ASM_SYM(sym)) {
859 /* stay static if both are static */
860 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
861 sym->type.ref = type->ref;
864 if (!is_compatible_types(&sym->type, type)) {
865 tcc_error("incompatible types for redefinition of '%s'",
866 get_tok_str(sym->v, NULL));
868 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
869 int static_proto = sym->type.t & VT_STATIC;
870 /* warn if static follows non-static function declaration */
871 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
872 tcc_warning("static storage ignored for redefinition of '%s'",
873 get_tok_str(sym->v, NULL));
875 if (0 == (type->t & VT_EXTERN)) {
876 /* put complete type, use static from prototype */
877 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
878 if (type->t & VT_INLINE)
879 sym->type.t = type->t;
880 sym->type.ref = type->ref;
883 } else {
884 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
885 /* set array size if it was omitted in extern declaration */
886 if (sym->type.ref->c < 0)
887 sym->type.ref->c = type->ref->c;
888 else if (sym->type.ref->c != type->ref->c)
889 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
891 if ((type->t ^ sym->type.t) & VT_STATIC)
892 tcc_warning("storage mismatch for redefinition of '%s'",
893 get_tok_str(sym->v, NULL));
898 /* Merge some storage attributes. */
899 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
901 if (type)
902 patch_type(sym, type);
904 #ifdef TCC_TARGET_PE
905 if (sym->a.dllimport != ad->a.dllimport)
906 tcc_error("incompatible dll linkage for redefinition of '%s'",
907 get_tok_str(sym->v, NULL));
908 sym->a.dllexport |= ad->a.dllexport;
909 #endif
910 sym->a.weak |= ad->a.weak;
911 if (ad->a.visibility) {
912 int vis = sym->a.visibility;
913 int vis2 = ad->a.visibility;
914 if (vis == STV_DEFAULT)
915 vis = vis2;
916 else if (vis2 != STV_DEFAULT)
917 vis = (vis < vis2) ? vis : vis2;
918 sym->a.visibility = vis;
920 if (ad->a.aligned)
921 sym->a.aligned = ad->a.aligned;
922 if (ad->asm_label)
923 sym->asm_label = ad->asm_label;
924 update_storage(sym);
927 /* define a new external reference to a symbol 'v' */
928 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
930 Sym *s;
931 s = sym_find(v);
932 if (!s) {
933 /* push forward reference */
934 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
935 s->type.t |= VT_EXTERN;
936 s->a = ad->a;
937 s->sym_scope = 0;
938 } else {
939 if (s->type.ref == func_old_type.ref) {
940 s->type.ref = type->ref;
941 s->r = r | VT_CONST | VT_SYM;
942 s->type.t |= VT_EXTERN;
944 patch_storage(s, ad, type);
946 return s;
949 /* push a reference to global symbol v */
950 ST_FUNC void vpush_global_sym(CType *type, int v)
952 vpushsym(type, external_global_sym(v, type, 0));
955 /* save registers up to (vtop - n) stack entry */
956 ST_FUNC void save_regs(int n)
958 SValue *p, *p1;
959 for(p = vstack, p1 = vtop - n; p <= p1; p++)
960 save_reg(p->r);
963 /* save r to the memory stack, and mark it as being free */
964 ST_FUNC void save_reg(int r)
966 save_reg_upstack(r, 0);
969 /* save r to the memory stack, and mark it as being free,
970 if seen up to (vtop - n) stack entry */
971 ST_FUNC void save_reg_upstack(int r, int n)
973 int l, saved, size, align;
974 SValue *p, *p1, sv;
975 CType *type;
977 if ((r &= VT_VALMASK) >= VT_CONST)
978 return;
979 if (nocode_wanted)
980 return;
982 /* modify all stack values */
983 saved = 0;
984 l = 0;
985 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
986 if ((p->r & VT_VALMASK) == r ||
987 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
988 /* must save value on stack if not already done */
989 if (!saved) {
990 /* NOTE: must reload 'r' because r might be equal to r2 */
991 r = p->r & VT_VALMASK;
992 /* store register in the stack */
993 type = &p->type;
994 if ((p->r & VT_LVAL) ||
995 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
996 #if PTR_SIZE == 8
997 type = &char_pointer_type;
998 #else
999 type = &int_type;
1000 #endif
1001 size = type_size(type, &align);
1002 loc = (loc - size) & -align;
1003 sv.type.t = type->t;
1004 sv.r = VT_LOCAL | VT_LVAL;
1005 sv.c.i = loc;
1006 store(r, &sv);
1007 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1008 /* x86 specific: need to pop fp register ST0 if saved */
1009 if (r == TREG_ST0) {
1010 o(0xd8dd); /* fstp %st(0) */
1012 #endif
1013 #if PTR_SIZE == 4
1014 /* special long long case */
1015 if ((type->t & VT_BTYPE) == VT_LLONG) {
1016 sv.c.i += 4;
1017 store(p->r2, &sv);
1019 #endif
1020 l = loc;
1021 saved = 1;
1023 /* mark that stack entry as being saved on the stack */
1024 if (p->r & VT_LVAL) {
1025 /* also clear the bounded flag because the
1026 relocation address of the function was stored in
1027 p->c.i */
1028 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1029 } else {
1030 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1032 p->r2 = VT_CONST;
1033 p->c.i = l;
1038 #ifdef TCC_TARGET_ARM
1039 /* find a register of class 'rc2' with at most one reference on stack.
1040 * If none, call get_reg(rc) */
1041 ST_FUNC int get_reg_ex(int rc, int rc2)
1043 int r;
1044 SValue *p;
1046 for(r=0;r<NB_REGS;r++) {
1047 if (reg_classes[r] & rc2) {
1048 int n;
1049 n=0;
1050 for(p = vstack; p <= vtop; p++) {
1051 if ((p->r & VT_VALMASK) == r ||
1052 (p->r2 & VT_VALMASK) == r)
1053 n++;
1055 if (n <= 1)
1056 return r;
1059 return get_reg(rc);
1061 #endif
1063 /* find a free register of class 'rc'. If none, save one register */
1064 ST_FUNC int get_reg(int rc)
1066 int r;
1067 SValue *p;
1069 /* find a free register */
1070 for(r=0;r<NB_REGS;r++) {
1071 if (reg_classes[r] & rc) {
1072 if (nocode_wanted)
1073 return r;
1074 for(p=vstack;p<=vtop;p++) {
1075 if ((p->r & VT_VALMASK) == r ||
1076 (p->r2 & VT_VALMASK) == r)
1077 goto notfound;
1079 return r;
1081 notfound: ;
1084 /* no register left : free the first one on the stack (VERY
1085 IMPORTANT to start from the bottom to ensure that we don't
1086 spill registers used in gen_opi()) */
1087 for(p=vstack;p<=vtop;p++) {
1088 /* look at second register (if long long) */
1089 r = p->r2 & VT_VALMASK;
1090 if (r < VT_CONST && (reg_classes[r] & rc))
1091 goto save_found;
1092 r = p->r & VT_VALMASK;
1093 if (r < VT_CONST && (reg_classes[r] & rc)) {
1094 save_found:
1095 save_reg(r);
1096 return r;
1099 /* Should never comes here */
1100 return -1;
1103 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1104 if needed */
1105 static void move_reg(int r, int s, int t)
1107 SValue sv;
1109 if (r != s) {
1110 save_reg(r);
1111 sv.type.t = t;
1112 sv.type.ref = NULL;
1113 sv.r = s;
1114 sv.c.i = 0;
1115 load(r, &sv);
1119 /* get address of vtop (vtop MUST BE an lvalue) */
1120 ST_FUNC void gaddrof(void)
1122 vtop->r &= ~VT_LVAL;
1123 /* tricky: if saved lvalue, then we can go back to lvalue */
1124 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1125 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1130 #ifdef CONFIG_TCC_BCHECK
1131 /* generate lvalue bound code */
1132 static void gbound(void)
1134 int lval_type;
1135 CType type1;
1137 vtop->r &= ~VT_MUSTBOUND;
1138 /* if lvalue, then use checking code before dereferencing */
1139 if (vtop->r & VT_LVAL) {
1140 /* if not VT_BOUNDED value, then make one */
1141 if (!(vtop->r & VT_BOUNDED)) {
1142 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1143 /* must save type because we must set it to int to get pointer */
1144 type1 = vtop->type;
1145 vtop->type.t = VT_PTR;
1146 gaddrof();
1147 vpushi(0);
1148 gen_bounded_ptr_add();
1149 vtop->r |= lval_type;
1150 vtop->type = type1;
1152 /* then check for dereferencing */
1153 gen_bounded_ptr_deref();
1156 #endif
1158 static void incr_bf_adr(int o)
1160 vtop->type = char_pointer_type;
1161 gaddrof();
1162 vpushi(o);
1163 gen_op('+');
1164 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1165 | (VT_BYTE|VT_UNSIGNED);
1166 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1167 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1170 /* single-byte load mode for packed or otherwise unaligned bitfields */
1171 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1173 int n, o, bits;
1174 save_reg_upstack(vtop->r, 1);
1175 vpush64(type->t & VT_BTYPE, 0); // B X
1176 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1177 do {
1178 vswap(); // X B
1179 incr_bf_adr(o);
1180 vdup(); // X B B
1181 n = 8 - bit_pos;
1182 if (n > bit_size)
1183 n = bit_size;
1184 if (bit_pos)
1185 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1186 if (n < 8)
1187 vpushi((1 << n) - 1), gen_op('&');
1188 gen_cast(type);
1189 if (bits)
1190 vpushi(bits), gen_op(TOK_SHL);
1191 vrotb(3); // B Y X
1192 gen_op('|'); // B X
1193 bits += n, bit_size -= n, o = 1;
1194 } while (bit_size);
1195 vswap(), vpop();
1196 if (!(type->t & VT_UNSIGNED)) {
1197 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1198 vpushi(n), gen_op(TOK_SHL);
1199 vpushi(n), gen_op(TOK_SAR);
1203 /* single-byte store mode for packed or otherwise unaligned bitfields */
1204 static void store_packed_bf(int bit_pos, int bit_size)
1206 int bits, n, o, m, c;
1208 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1209 vswap(); // X B
1210 save_reg_upstack(vtop->r, 1);
1211 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1212 do {
1213 incr_bf_adr(o); // X B
1214 vswap(); //B X
1215 c ? vdup() : gv_dup(); // B V X
1216 vrott(3); // X B V
1217 if (bits)
1218 vpushi(bits), gen_op(TOK_SHR);
1219 if (bit_pos)
1220 vpushi(bit_pos), gen_op(TOK_SHL);
1221 n = 8 - bit_pos;
1222 if (n > bit_size)
1223 n = bit_size;
1224 if (n < 8) {
1225 m = ((1 << n) - 1) << bit_pos;
1226 vpushi(m), gen_op('&'); // X B V1
1227 vpushv(vtop-1); // X B V1 B
1228 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1229 gen_op('&'); // X B V1 B1
1230 gen_op('|'); // X B V2
1232 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1233 vstore(), vpop(); // X B
1234 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1235 } while (bit_size);
1236 vpop(), vpop();
1239 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1241 int t;
1242 if (0 == sv->type.ref)
1243 return 0;
1244 t = sv->type.ref->auxtype;
1245 if (t != -1 && t != VT_STRUCT) {
1246 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1247 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1249 return t;
1252 /* store vtop a register belonging to class 'rc'. lvalues are
1253 converted to values. Cannot be used if cannot be converted to
1254 register value (such as structures). */
1255 ST_FUNC int gv(int rc)
1257 int r, bit_pos, bit_size, size, align, rc2;
1259 /* NOTE: get_reg can modify vstack[] */
1260 if (vtop->type.t & VT_BITFIELD) {
1261 CType type;
1263 bit_pos = BIT_POS(vtop->type.t);
1264 bit_size = BIT_SIZE(vtop->type.t);
1265 /* remove bit field info to avoid loops */
1266 vtop->type.t &= ~VT_STRUCT_MASK;
1268 type.ref = NULL;
1269 type.t = vtop->type.t & VT_UNSIGNED;
1270 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1271 type.t |= VT_UNSIGNED;
1273 r = adjust_bf(vtop, bit_pos, bit_size);
1275 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1276 type.t |= VT_LLONG;
1277 else
1278 type.t |= VT_INT;
1280 if (r == VT_STRUCT) {
1281 load_packed_bf(&type, bit_pos, bit_size);
1282 } else {
1283 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1284 /* cast to int to propagate signedness in following ops */
1285 gen_cast(&type);
1286 /* generate shifts */
1287 vpushi(bits - (bit_pos + bit_size));
1288 gen_op(TOK_SHL);
1289 vpushi(bits - bit_size);
1290 /* NOTE: transformed to SHR if unsigned */
1291 gen_op(TOK_SAR);
1293 r = gv(rc);
1294 } else {
1295 if (is_float(vtop->type.t) &&
1296 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1297 unsigned long offset;
1298 /* CPUs usually cannot use float constants, so we store them
1299 generically in data segment */
1300 size = type_size(&vtop->type, &align);
1301 if (NODATA_WANTED)
1302 size = 0, align = 1;
1303 offset = section_add(data_section, size, align);
1304 vpush_ref(&vtop->type, data_section, offset, size);
1305 vswap();
1306 init_putv(&vtop->type, data_section, offset);
1307 vtop->r |= VT_LVAL;
1309 #ifdef CONFIG_TCC_BCHECK
1310 if (vtop->r & VT_MUSTBOUND)
1311 gbound();
1312 #endif
1314 r = vtop->r & VT_VALMASK;
1315 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1316 #ifndef TCC_TARGET_ARM64
1317 if (rc == RC_IRET)
1318 rc2 = RC_LRET;
1319 #ifdef TCC_TARGET_X86_64
1320 else if (rc == RC_FRET)
1321 rc2 = RC_QRET;
1322 #endif
1323 #endif
1324 /* need to reload if:
1325 - constant
1326 - lvalue (need to dereference pointer)
1327 - already a register, but not in the right class */
1328 if (r >= VT_CONST
1329 || (vtop->r & VT_LVAL)
1330 || !(reg_classes[r] & rc)
1331 #if PTR_SIZE == 8
1332 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1333 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1334 #else
1335 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1336 #endif
1339 r = get_reg(rc);
1340 #if PTR_SIZE == 8
1341 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1342 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1343 #else
1344 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1345 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1346 unsigned long long ll;
1347 #endif
1348 int r2, original_type;
1349 original_type = vtop->type.t;
1350 /* two register type load : expand to two words
1351 temporarily */
1352 #if PTR_SIZE == 4
1353 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1354 /* load constant */
1355 ll = vtop->c.i;
1356 vtop->c.i = ll; /* first word */
1357 load(r, vtop);
1358 vtop->r = r; /* save register value */
1359 vpushi(ll >> 32); /* second word */
1360 } else
1361 #endif
1362 if (vtop->r & VT_LVAL) {
1363 /* We do not want to modifier the long long
1364 pointer here, so the safest (and less
1365 efficient) is to save all the other registers
1366 in the stack. XXX: totally inefficient. */
1367 #if 0
1368 save_regs(1);
1369 #else
1370 /* lvalue_save: save only if used further down the stack */
1371 save_reg_upstack(vtop->r, 1);
1372 #endif
1373 /* load from memory */
1374 vtop->type.t = load_type;
1375 load(r, vtop);
1376 vdup();
1377 vtop[-1].r = r; /* save register value */
1378 /* increment pointer to get second word */
1379 vtop->type.t = addr_type;
1380 gaddrof();
1381 vpushi(load_size);
1382 gen_op('+');
1383 vtop->r |= VT_LVAL;
1384 vtop->type.t = load_type;
1385 } else {
1386 /* move registers */
1387 load(r, vtop);
1388 vdup();
1389 vtop[-1].r = r; /* save register value */
1390 vtop->r = vtop[-1].r2;
1392 /* Allocate second register. Here we rely on the fact that
1393 get_reg() tries first to free r2 of an SValue. */
1394 r2 = get_reg(rc2);
1395 load(r2, vtop);
1396 vpop();
1397 /* write second register */
1398 vtop->r2 = r2;
1399 vtop->type.t = original_type;
1400 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1401 int t1, t;
1402 /* lvalue of scalar type : need to use lvalue type
1403 because of possible cast */
1404 t = vtop->type.t;
1405 t1 = t;
1406 /* compute memory access type */
1407 if (vtop->r & VT_LVAL_BYTE)
1408 t = VT_BYTE;
1409 else if (vtop->r & VT_LVAL_SHORT)
1410 t = VT_SHORT;
1411 if (vtop->r & VT_LVAL_UNSIGNED)
1412 t |= VT_UNSIGNED;
1413 vtop->type.t = t;
1414 load(r, vtop);
1415 /* restore wanted type */
1416 vtop->type.t = t1;
1417 } else {
1418 /* one register type load */
1419 load(r, vtop);
1422 vtop->r = r;
1423 #ifdef TCC_TARGET_C67
1424 /* uses register pairs for doubles */
1425 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1426 vtop->r2 = r+1;
1427 #endif
1429 return r;
1432 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1433 ST_FUNC void gv2(int rc1, int rc2)
1435 int v;
1437 /* generate more generic register first. But VT_JMP or VT_CMP
1438 values must be generated first in all cases to avoid possible
1439 reload errors */
1440 v = vtop[0].r & VT_VALMASK;
1441 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1442 vswap();
1443 gv(rc1);
1444 vswap();
1445 gv(rc2);
1446 /* test if reload is needed for first register */
1447 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1448 vswap();
1449 gv(rc1);
1450 vswap();
1452 } else {
1453 gv(rc2);
1454 vswap();
1455 gv(rc1);
1456 vswap();
1457 /* test if reload is needed for first register */
1458 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1459 gv(rc2);
1464 #ifndef TCC_TARGET_ARM64
1465 /* wrapper around RC_FRET to return a register by type */
1466 static int rc_fret(int t)
1468 #ifdef TCC_TARGET_X86_64
1469 if (t == VT_LDOUBLE) {
1470 return RC_ST0;
1472 #endif
1473 return RC_FRET;
1475 #endif
1477 /* wrapper around REG_FRET to return a register by type */
1478 static int reg_fret(int t)
1480 #ifdef TCC_TARGET_X86_64
1481 if (t == VT_LDOUBLE) {
1482 return TREG_ST0;
1484 #endif
1485 return REG_FRET;
1488 #if PTR_SIZE == 4
1489 /* expand 64bit on stack in two ints */
1490 static void lexpand(void)
1492 int u, v;
1493 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1494 v = vtop->r & (VT_VALMASK | VT_LVAL);
1495 if (v == VT_CONST) {
1496 vdup();
1497 vtop[0].c.i >>= 32;
1498 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1499 vdup();
1500 vtop[0].c.i += 4;
1501 } else {
1502 gv(RC_INT);
1503 vdup();
1504 vtop[0].r = vtop[-1].r2;
1505 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1507 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1509 #endif
1511 #ifdef TCC_TARGET_ARM
1512 /* expand long long on stack */
1513 ST_FUNC void lexpand_nr(void)
1515 int u,v;
1517 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1518 vdup();
1519 vtop->r2 = VT_CONST;
1520 vtop->type.t = VT_INT | u;
1521 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1522 if (v == VT_CONST) {
1523 vtop[-1].c.i = vtop->c.i;
1524 vtop->c.i = vtop->c.i >> 32;
1525 vtop->r = VT_CONST;
1526 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1527 vtop->c.i += 4;
1528 vtop->r = vtop[-1].r;
1529 } else if (v > VT_CONST) {
1530 vtop--;
1531 lexpand();
1532 } else
1533 vtop->r = vtop[-1].r2;
1534 vtop[-1].r2 = VT_CONST;
1535 vtop[-1].type.t = VT_INT | u;
1537 #endif
1539 #if PTR_SIZE == 4
1540 /* build a long long from two ints */
1541 static void lbuild(int t)
1543 gv2(RC_INT, RC_INT);
1544 vtop[-1].r2 = vtop[0].r;
1545 vtop[-1].type.t = t;
1546 vpop();
1548 #endif
1550 /* convert stack entry to register and duplicate its value in another
1551 register */
1552 static void gv_dup(void)
1554 int rc, t, r, r1;
1555 SValue sv;
1557 t = vtop->type.t;
1558 #if PTR_SIZE == 4
1559 if ((t & VT_BTYPE) == VT_LLONG) {
1560 if (t & VT_BITFIELD) {
1561 gv(RC_INT);
1562 t = vtop->type.t;
1564 lexpand();
1565 gv_dup();
1566 vswap();
1567 vrotb(3);
1568 gv_dup();
1569 vrotb(4);
1570 /* stack: H L L1 H1 */
1571 lbuild(t);
1572 vrotb(3);
1573 vrotb(3);
1574 vswap();
1575 lbuild(t);
1576 vswap();
1577 } else
1578 #endif
1580 /* duplicate value */
1581 rc = RC_INT;
1582 sv.type.t = VT_INT;
1583 if (is_float(t)) {
1584 rc = RC_FLOAT;
1585 #ifdef TCC_TARGET_X86_64
1586 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1587 rc = RC_ST0;
1589 #endif
1590 sv.type.t = t;
1592 r = gv(rc);
1593 r1 = get_reg(rc);
1594 sv.r = r;
1595 sv.c.i = 0;
1596 load(r1, &sv); /* move r to r1 */
1597 vdup();
1598 /* duplicates value */
1599 if (r != r1)
1600 vtop->r = r1;
1604 /* Generate value test
1606 * Generate a test for any value (jump, comparison and integers) */
1607 ST_FUNC int gvtst(int inv, int t)
1609 int v = vtop->r & VT_VALMASK;
1610 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1611 vpushi(0);
1612 gen_op(TOK_NE);
1614 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1615 /* constant jmp optimization */
1616 if ((vtop->c.i != 0) != inv)
1617 t = gjmp(t);
1618 vtop--;
1619 return t;
1621 return gtst(inv, t);
1624 #if PTR_SIZE == 4
1625 /* generate CPU independent (unsigned) long long operations */
1626 static void gen_opl(int op)
1628 int t, a, b, op1, c, i;
1629 int func;
1630 unsigned short reg_iret = REG_IRET;
1631 unsigned short reg_lret = REG_LRET;
1632 SValue tmp;
1634 switch(op) {
1635 case '/':
1636 case TOK_PDIV:
1637 func = TOK___divdi3;
1638 goto gen_func;
1639 case TOK_UDIV:
1640 func = TOK___udivdi3;
1641 goto gen_func;
1642 case '%':
1643 func = TOK___moddi3;
1644 goto gen_mod_func;
1645 case TOK_UMOD:
1646 func = TOK___umoddi3;
1647 gen_mod_func:
1648 #ifdef TCC_ARM_EABI
1649 reg_iret = TREG_R2;
1650 reg_lret = TREG_R3;
1651 #endif
1652 gen_func:
1653 /* call generic long long function */
1654 vpush_global_sym(&func_old_type, func);
1655 vrott(3);
1656 gfunc_call(2);
1657 vpushi(0);
1658 vtop->r = reg_iret;
1659 vtop->r2 = reg_lret;
1660 break;
1661 case '^':
1662 case '&':
1663 case '|':
1664 case '*':
1665 case '+':
1666 case '-':
1667 //pv("gen_opl A",0,2);
1668 t = vtop->type.t;
1669 vswap();
1670 lexpand();
1671 vrotb(3);
1672 lexpand();
1673 /* stack: L1 H1 L2 H2 */
1674 tmp = vtop[0];
1675 vtop[0] = vtop[-3];
1676 vtop[-3] = tmp;
1677 tmp = vtop[-2];
1678 vtop[-2] = vtop[-3];
1679 vtop[-3] = tmp;
1680 vswap();
1681 /* stack: H1 H2 L1 L2 */
1682 //pv("gen_opl B",0,4);
1683 if (op == '*') {
1684 vpushv(vtop - 1);
1685 vpushv(vtop - 1);
1686 gen_op(TOK_UMULL);
1687 lexpand();
1688 /* stack: H1 H2 L1 L2 ML MH */
1689 for(i=0;i<4;i++)
1690 vrotb(6);
1691 /* stack: ML MH H1 H2 L1 L2 */
1692 tmp = vtop[0];
1693 vtop[0] = vtop[-2];
1694 vtop[-2] = tmp;
1695 /* stack: ML MH H1 L2 H2 L1 */
1696 gen_op('*');
1697 vrotb(3);
1698 vrotb(3);
1699 gen_op('*');
1700 /* stack: ML MH M1 M2 */
1701 gen_op('+');
1702 gen_op('+');
1703 } else if (op == '+' || op == '-') {
1704 /* XXX: add non carry method too (for MIPS or alpha) */
1705 if (op == '+')
1706 op1 = TOK_ADDC1;
1707 else
1708 op1 = TOK_SUBC1;
1709 gen_op(op1);
1710 /* stack: H1 H2 (L1 op L2) */
1711 vrotb(3);
1712 vrotb(3);
1713 gen_op(op1 + 1); /* TOK_xxxC2 */
1714 } else {
1715 gen_op(op);
1716 /* stack: H1 H2 (L1 op L2) */
1717 vrotb(3);
1718 vrotb(3);
1719 /* stack: (L1 op L2) H1 H2 */
1720 gen_op(op);
1721 /* stack: (L1 op L2) (H1 op H2) */
1723 /* stack: L H */
1724 lbuild(t);
1725 break;
1726 case TOK_SAR:
1727 case TOK_SHR:
1728 case TOK_SHL:
1729 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1730 t = vtop[-1].type.t;
1731 vswap();
1732 lexpand();
1733 vrotb(3);
1734 /* stack: L H shift */
1735 c = (int)vtop->c.i;
1736 /* constant: simpler */
1737 /* NOTE: all comments are for SHL. the other cases are
1738 done by swapping words */
1739 vpop();
1740 if (op != TOK_SHL)
1741 vswap();
1742 if (c >= 32) {
1743 /* stack: L H */
1744 vpop();
1745 if (c > 32) {
1746 vpushi(c - 32);
1747 gen_op(op);
1749 if (op != TOK_SAR) {
1750 vpushi(0);
1751 } else {
1752 gv_dup();
1753 vpushi(31);
1754 gen_op(TOK_SAR);
1756 vswap();
1757 } else {
1758 vswap();
1759 gv_dup();
1760 /* stack: H L L */
1761 vpushi(c);
1762 gen_op(op);
1763 vswap();
1764 vpushi(32 - c);
1765 if (op == TOK_SHL)
1766 gen_op(TOK_SHR);
1767 else
1768 gen_op(TOK_SHL);
1769 vrotb(3);
1770 /* stack: L L H */
1771 vpushi(c);
1772 if (op == TOK_SHL)
1773 gen_op(TOK_SHL);
1774 else
1775 gen_op(TOK_SHR);
1776 gen_op('|');
1778 if (op != TOK_SHL)
1779 vswap();
1780 lbuild(t);
1781 } else {
1782 /* XXX: should provide a faster fallback on x86 ? */
1783 switch(op) {
1784 case TOK_SAR:
1785 func = TOK___ashrdi3;
1786 goto gen_func;
1787 case TOK_SHR:
1788 func = TOK___lshrdi3;
1789 goto gen_func;
1790 case TOK_SHL:
1791 func = TOK___ashldi3;
1792 goto gen_func;
1795 break;
1796 default:
1797 /* compare operations */
1798 t = vtop->type.t;
1799 vswap();
1800 lexpand();
1801 vrotb(3);
1802 lexpand();
1803 /* stack: L1 H1 L2 H2 */
1804 tmp = vtop[-1];
1805 vtop[-1] = vtop[-2];
1806 vtop[-2] = tmp;
1807 /* stack: L1 L2 H1 H2 */
1808 /* compare high */
1809 op1 = op;
1810 /* when values are equal, we need to compare low words. since
1811 the jump is inverted, we invert the test too. */
1812 if (op1 == TOK_LT)
1813 op1 = TOK_LE;
1814 else if (op1 == TOK_GT)
1815 op1 = TOK_GE;
1816 else if (op1 == TOK_ULT)
1817 op1 = TOK_ULE;
1818 else if (op1 == TOK_UGT)
1819 op1 = TOK_UGE;
1820 a = 0;
1821 b = 0;
1822 gen_op(op1);
1823 if (op == TOK_NE) {
1824 b = gvtst(0, 0);
1825 } else {
1826 a = gvtst(1, 0);
1827 if (op != TOK_EQ) {
1828 /* generate non equal test */
1829 vpushi(TOK_NE);
1830 vtop->r = VT_CMP;
1831 b = gvtst(0, 0);
1834 /* compare low. Always unsigned */
1835 op1 = op;
1836 if (op1 == TOK_LT)
1837 op1 = TOK_ULT;
1838 else if (op1 == TOK_LE)
1839 op1 = TOK_ULE;
1840 else if (op1 == TOK_GT)
1841 op1 = TOK_UGT;
1842 else if (op1 == TOK_GE)
1843 op1 = TOK_UGE;
1844 gen_op(op1);
1845 a = gvtst(1, a);
1846 gsym(b);
1847 vseti(VT_JMPI, a);
1848 break;
1851 #endif
1853 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1855 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1856 return (a ^ b) >> 63 ? -x : x;
1859 static int gen_opic_lt(uint64_t a, uint64_t b)
1861 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1864 /* handle integer constant optimizations and various machine
1865 independent opt */
1866 static void gen_opic(int op)
1868 SValue *v1 = vtop - 1;
1869 SValue *v2 = vtop;
1870 int t1 = v1->type.t & VT_BTYPE;
1871 int t2 = v2->type.t & VT_BTYPE;
1872 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1873 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1874 uint64_t l1 = c1 ? v1->c.i : 0;
1875 uint64_t l2 = c2 ? v2->c.i : 0;
1876 int shm = (t1 == VT_LLONG) ? 63 : 31;
1878 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1879 l1 = ((uint32_t)l1 |
1880 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1881 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1882 l2 = ((uint32_t)l2 |
1883 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1885 if (c1 && c2) {
1886 switch(op) {
1887 case '+': l1 += l2; break;
1888 case '-': l1 -= l2; break;
1889 case '&': l1 &= l2; break;
1890 case '^': l1 ^= l2; break;
1891 case '|': l1 |= l2; break;
1892 case '*': l1 *= l2; break;
1894 case TOK_PDIV:
1895 case '/':
1896 case '%':
1897 case TOK_UDIV:
1898 case TOK_UMOD:
1899 /* if division by zero, generate explicit division */
1900 if (l2 == 0) {
1901 if (const_wanted)
1902 tcc_error("division by zero in constant");
1903 goto general_case;
1905 switch(op) {
1906 default: l1 = gen_opic_sdiv(l1, l2); break;
1907 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1908 case TOK_UDIV: l1 = l1 / l2; break;
1909 case TOK_UMOD: l1 = l1 % l2; break;
1911 break;
1912 case TOK_SHL: l1 <<= (l2 & shm); break;
1913 case TOK_SHR: l1 >>= (l2 & shm); break;
1914 case TOK_SAR:
1915 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1916 break;
1917 /* tests */
1918 case TOK_ULT: l1 = l1 < l2; break;
1919 case TOK_UGE: l1 = l1 >= l2; break;
1920 case TOK_EQ: l1 = l1 == l2; break;
1921 case TOK_NE: l1 = l1 != l2; break;
1922 case TOK_ULE: l1 = l1 <= l2; break;
1923 case TOK_UGT: l1 = l1 > l2; break;
1924 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1925 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1926 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1927 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1928 /* logical */
1929 case TOK_LAND: l1 = l1 && l2; break;
1930 case TOK_LOR: l1 = l1 || l2; break;
1931 default:
1932 goto general_case;
1934 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1935 l1 = ((uint32_t)l1 |
1936 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1937 v1->c.i = l1;
1938 vtop--;
1939 } else {
1940 /* if commutative ops, put c2 as constant */
1941 if (c1 && (op == '+' || op == '&' || op == '^' ||
1942 op == '|' || op == '*')) {
1943 vswap();
1944 c2 = c1; //c = c1, c1 = c2, c2 = c;
1945 l2 = l1; //l = l1, l1 = l2, l2 = l;
1947 if (!const_wanted &&
1948 c1 && ((l1 == 0 &&
1949 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1950 (l1 == -1 && op == TOK_SAR))) {
1951 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1952 vtop--;
1953 } else if (!const_wanted &&
1954 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1955 (op == '|' &&
1956 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1957 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1958 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1959 if (l2 == 1)
1960 vtop->c.i = 0;
1961 vswap();
1962 vtop--;
1963 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1964 op == TOK_PDIV) &&
1965 l2 == 1) ||
1966 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1967 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1968 l2 == 0) ||
1969 (op == '&' &&
1970 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1971 /* filter out NOP operations like x*1, x-0, x&-1... */
1972 vtop--;
1973 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1974 /* try to use shifts instead of muls or divs */
1975 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1976 int n = -1;
1977 while (l2) {
1978 l2 >>= 1;
1979 n++;
1981 vtop->c.i = n;
1982 if (op == '*')
1983 op = TOK_SHL;
1984 else if (op == TOK_PDIV)
1985 op = TOK_SAR;
1986 else
1987 op = TOK_SHR;
1989 goto general_case;
1990 } else if (c2 && (op == '+' || op == '-') &&
1991 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1992 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1993 /* symbol + constant case */
1994 if (op == '-')
1995 l2 = -l2;
1996 l2 += vtop[-1].c.i;
1997 /* The backends can't always deal with addends to symbols
1998 larger than +-1<<31. Don't construct such. */
1999 if ((int)l2 != l2)
2000 goto general_case;
2001 vtop--;
2002 vtop->c.i = l2;
2003 } else {
2004 general_case:
2005 /* call low level op generator */
2006 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2007 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2008 gen_opl(op);
2009 else
2010 gen_opi(op);
2015 /* generate a floating point operation with constant propagation */
2016 static void gen_opif(int op)
2018 int c1, c2;
2019 SValue *v1, *v2;
2020 #if defined _MSC_VER && defined _AMD64_
2021 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2022 volatile
2023 #endif
2024 long double f1, f2;
2026 v1 = vtop - 1;
2027 v2 = vtop;
2028 /* currently, we cannot do computations with forward symbols */
2029 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2030 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2031 if (c1 && c2) {
2032 if (v1->type.t == VT_FLOAT) {
2033 f1 = v1->c.f;
2034 f2 = v2->c.f;
2035 } else if (v1->type.t == VT_DOUBLE) {
2036 f1 = v1->c.d;
2037 f2 = v2->c.d;
2038 } else {
2039 f1 = v1->c.ld;
2040 f2 = v2->c.ld;
2043 /* NOTE: we only do constant propagation if finite number (not
2044 NaN or infinity) (ANSI spec) */
2045 if (!ieee_finite(f1) || !ieee_finite(f2))
2046 goto general_case;
2048 switch(op) {
2049 case '+': f1 += f2; break;
2050 case '-': f1 -= f2; break;
2051 case '*': f1 *= f2; break;
2052 case '/':
2053 if (f2 == 0.0) {
2054 /* If not in initializer we need to potentially generate
2055 FP exceptions at runtime, otherwise we want to fold. */
2056 if (!const_wanted)
2057 goto general_case;
2059 f1 /= f2;
2060 break;
2061 /* XXX: also handles tests ? */
2062 default:
2063 goto general_case;
2065 /* XXX: overflow test ? */
2066 if (v1->type.t == VT_FLOAT) {
2067 v1->c.f = f1;
2068 } else if (v1->type.t == VT_DOUBLE) {
2069 v1->c.d = f1;
2070 } else {
2071 v1->c.ld = f1;
2073 vtop--;
2074 } else {
2075 general_case:
2076 gen_opf(op);
2080 static int pointed_size(CType *type)
2082 int align;
2083 return type_size(pointed_type(type), &align);
2086 static void vla_runtime_pointed_size(CType *type)
2088 int align;
2089 vla_runtime_type_size(pointed_type(type), &align);
2092 static inline int is_null_pointer(SValue *p)
2094 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2095 return 0;
2096 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2097 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2098 ((p->type.t & VT_BTYPE) == VT_PTR &&
2099 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2102 static inline int is_integer_btype(int bt)
2104 return (bt == VT_BYTE || bt == VT_SHORT ||
2105 bt == VT_INT || bt == VT_LLONG);
2108 /* check types for comparison or subtraction of pointers */
2109 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2111 CType *type1, *type2, tmp_type1, tmp_type2;
2112 int bt1, bt2;
2114 /* null pointers are accepted for all comparisons as gcc */
2115 if (is_null_pointer(p1) || is_null_pointer(p2))
2116 return;
2117 type1 = &p1->type;
2118 type2 = &p2->type;
2119 bt1 = type1->t & VT_BTYPE;
2120 bt2 = type2->t & VT_BTYPE;
2121 /* accept comparison between pointer and integer with a warning */
2122 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2123 if (op != TOK_LOR && op != TOK_LAND )
2124 tcc_warning("comparison between pointer and integer");
2125 return;
2128 /* both must be pointers or implicit function pointers */
2129 if (bt1 == VT_PTR) {
2130 type1 = pointed_type(type1);
2131 } else if (bt1 != VT_FUNC)
2132 goto invalid_operands;
2134 if (bt2 == VT_PTR) {
2135 type2 = pointed_type(type2);
2136 } else if (bt2 != VT_FUNC) {
2137 invalid_operands:
2138 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2140 if ((type1->t & VT_BTYPE) == VT_VOID ||
2141 (type2->t & VT_BTYPE) == VT_VOID)
2142 return;
2143 tmp_type1 = *type1;
2144 tmp_type2 = *type2;
2145 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2146 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2147 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2148 /* gcc-like error if '-' is used */
2149 if (op == '-')
2150 goto invalid_operands;
2151 else
2152 tcc_warning("comparison of distinct pointer types lacks a cast");
2156 /* generic gen_op: handles types problems */
2157 ST_FUNC void gen_op(int op)
2159 int u, t1, t2, bt1, bt2, t;
2160 CType type1;
2162 redo:
2163 t1 = vtop[-1].type.t;
2164 t2 = vtop[0].type.t;
2165 bt1 = t1 & VT_BTYPE;
2166 bt2 = t2 & VT_BTYPE;
2168 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2169 tcc_error("operation on a struct");
2170 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2171 if (bt2 == VT_FUNC) {
2172 mk_pointer(&vtop->type);
2173 gaddrof();
2175 if (bt1 == VT_FUNC) {
2176 vswap();
2177 mk_pointer(&vtop->type);
2178 gaddrof();
2179 vswap();
2181 goto redo;
2182 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2183 /* at least one operand is a pointer */
2184 /* relational op: must be both pointers */
2185 if (op >= TOK_ULT && op <= TOK_LOR) {
2186 check_comparison_pointer_types(vtop - 1, vtop, op);
2187 /* pointers are handled are unsigned */
2188 #if PTR_SIZE == 8
2189 t = VT_LLONG | VT_UNSIGNED;
2190 #else
2191 t = VT_INT | VT_UNSIGNED;
2192 #endif
2193 goto std_op;
2195 /* if both pointers, then it must be the '-' op */
2196 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2197 if (op != '-')
2198 tcc_error("cannot use pointers here");
2199 check_comparison_pointer_types(vtop - 1, vtop, op);
2200 /* XXX: check that types are compatible */
2201 if (vtop[-1].type.t & VT_VLA) {
2202 vla_runtime_pointed_size(&vtop[-1].type);
2203 } else {
2204 vpushi(pointed_size(&vtop[-1].type));
2206 vrott(3);
2207 gen_opic(op);
2208 vtop->type.t = ptrdiff_type.t;
2209 vswap();
2210 gen_op(TOK_PDIV);
2211 } else {
2212 /* exactly one pointer : must be '+' or '-'. */
2213 if (op != '-' && op != '+')
2214 tcc_error("cannot use pointers here");
2215 /* Put pointer as first operand */
2216 if (bt2 == VT_PTR) {
2217 vswap();
2218 t = t1, t1 = t2, t2 = t;
2220 #if PTR_SIZE == 4
2221 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2222 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2223 gen_cast_s(VT_INT);
2224 #endif
2225 type1 = vtop[-1].type;
2226 type1.t &= ~VT_ARRAY;
2227 if (vtop[-1].type.t & VT_VLA)
2228 vla_runtime_pointed_size(&vtop[-1].type);
2229 else {
2230 u = pointed_size(&vtop[-1].type);
2231 if (u < 0)
2232 tcc_error("unknown array element size");
2233 #if PTR_SIZE == 8
2234 vpushll(u);
2235 #else
2236 /* XXX: cast to int ? (long long case) */
2237 vpushi(u);
2238 #endif
2240 gen_op('*');
2241 #if 0
2242 /* #ifdef CONFIG_TCC_BCHECK
2243 The main reason to removing this code:
2244 #include <stdio.h>
2245 int main ()
2247 int v[10];
2248 int i = 10;
2249 int j = 9;
2250 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2251 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2253 When this code is on. then the output looks like
2254 v+i-j = 0xfffffffe
2255 v+(i-j) = 0xbff84000
2257 /* if evaluating constant expression, no code should be
2258 generated, so no bound check */
2259 if (tcc_state->do_bounds_check && !const_wanted) {
2260 /* if bounded pointers, we generate a special code to
2261 test bounds */
2262 if (op == '-') {
2263 vpushi(0);
2264 vswap();
2265 gen_op('-');
2267 gen_bounded_ptr_add();
2268 } else
2269 #endif
2271 gen_opic(op);
2273 /* put again type if gen_opic() swaped operands */
2274 vtop->type = type1;
2276 } else if (is_float(bt1) || is_float(bt2)) {
2277 /* compute bigger type and do implicit casts */
2278 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2279 t = VT_LDOUBLE;
2280 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2281 t = VT_DOUBLE;
2282 } else {
2283 t = VT_FLOAT;
2285 /* floats can only be used for a few operations */
2286 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2287 (op < TOK_ULT || op > TOK_GT))
2288 tcc_error("invalid operands for binary operation");
2289 goto std_op;
2290 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2291 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2292 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2293 t |= VT_UNSIGNED;
2294 t |= (VT_LONG & t1);
2295 goto std_op;
2296 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2297 /* cast to biggest op */
2298 t = VT_LLONG | VT_LONG;
2299 if (bt1 == VT_LLONG)
2300 t &= t1;
2301 if (bt2 == VT_LLONG)
2302 t &= t2;
2303 /* convert to unsigned if it does not fit in a long long */
2304 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2305 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2306 t |= VT_UNSIGNED;
2307 goto std_op;
2308 } else {
2309 /* integer operations */
2310 t = VT_INT | (VT_LONG & (t1 | t2));
2311 /* convert to unsigned if it does not fit in an integer */
2312 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2313 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2314 t |= VT_UNSIGNED;
2315 std_op:
2316 /* XXX: currently, some unsigned operations are explicit, so
2317 we modify them here */
2318 if (t & VT_UNSIGNED) {
2319 if (op == TOK_SAR)
2320 op = TOK_SHR;
2321 else if (op == '/')
2322 op = TOK_UDIV;
2323 else if (op == '%')
2324 op = TOK_UMOD;
2325 else if (op == TOK_LT)
2326 op = TOK_ULT;
2327 else if (op == TOK_GT)
2328 op = TOK_UGT;
2329 else if (op == TOK_LE)
2330 op = TOK_ULE;
2331 else if (op == TOK_GE)
2332 op = TOK_UGE;
2334 vswap();
2335 type1.t = t;
2336 type1.ref = NULL;
2337 gen_cast(&type1);
2338 vswap();
2339 /* special case for shifts and long long: we keep the shift as
2340 an integer */
2341 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2342 type1.t = VT_INT;
2343 gen_cast(&type1);
2344 if (is_float(t))
2345 gen_opif(op);
2346 else
2347 gen_opic(op);
2348 if (op >= TOK_ULT && op <= TOK_GT) {
2349 /* relational op: the result is an int */
2350 vtop->type.t = VT_INT;
2351 } else {
2352 vtop->type.t = t;
2355 // Make sure that we have converted to an rvalue:
2356 if (vtop->r & VT_LVAL)
2357 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2360 #ifndef TCC_TARGET_ARM
2361 /* generic itof for unsigned long long case */
2362 static void gen_cvt_itof1(int t)
2364 #ifdef TCC_TARGET_ARM64
2365 gen_cvt_itof(t);
2366 #else
2367 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2368 (VT_LLONG | VT_UNSIGNED)) {
2370 if (t == VT_FLOAT)
2371 vpush_global_sym(&func_old_type, TOK___floatundisf);
2372 #if LDOUBLE_SIZE != 8
2373 else if (t == VT_LDOUBLE)
2374 vpush_global_sym(&func_old_type, TOK___floatundixf);
2375 #endif
2376 else
2377 vpush_global_sym(&func_old_type, TOK___floatundidf);
2378 vrott(2);
2379 gfunc_call(1);
2380 vpushi(0);
2381 vtop->r = reg_fret(t);
2382 } else {
2383 gen_cvt_itof(t);
2385 #endif
2387 #endif
2389 /* generic ftoi for unsigned long long case */
2390 static void gen_cvt_ftoi1(int t)
2392 #ifdef TCC_TARGET_ARM64
2393 gen_cvt_ftoi(t);
2394 #else
2395 int st;
2397 if (t == (VT_LLONG | VT_UNSIGNED)) {
2398 /* not handled natively */
2399 st = vtop->type.t & VT_BTYPE;
2400 if (st == VT_FLOAT)
2401 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2402 #if LDOUBLE_SIZE != 8
2403 else if (st == VT_LDOUBLE)
2404 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2405 #endif
2406 else
2407 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2408 vrott(2);
2409 gfunc_call(1);
2410 vpushi(0);
2411 vtop->r = REG_IRET;
2412 vtop->r2 = REG_LRET;
2413 } else {
2414 gen_cvt_ftoi(t);
2416 #endif
2419 /* force char or short cast */
2420 static void force_charshort_cast(int t)
2422 int bits, dbt;
2424 /* cannot cast static initializers */
2425 if (STATIC_DATA_WANTED)
2426 return;
2428 dbt = t & VT_BTYPE;
2429 /* XXX: add optimization if lvalue : just change type and offset */
2430 if (dbt == VT_BYTE)
2431 bits = 8;
2432 else
2433 bits = 16;
2434 if (t & VT_UNSIGNED) {
2435 vpushi((1 << bits) - 1);
2436 gen_op('&');
2437 } else {
2438 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2439 bits = 64 - bits;
2440 else
2441 bits = 32 - bits;
2442 vpushi(bits);
2443 gen_op(TOK_SHL);
2444 /* result must be signed or the SAR is converted to an SHL
2445 This was not the case when "t" was a signed short
2446 and the last value on the stack was an unsigned int */
2447 vtop->type.t &= ~VT_UNSIGNED;
2448 vpushi(bits);
2449 gen_op(TOK_SAR);
2453 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2454 static void gen_cast_s(int t)
2456 CType type;
2457 type.t = t;
2458 type.ref = NULL;
2459 gen_cast(&type);
2462 static void gen_cast(CType *type)
2464 int sbt, dbt, sf, df, c, p;
2466 /* special delayed cast for char/short */
2467 /* XXX: in some cases (multiple cascaded casts), it may still
2468 be incorrect */
2469 if (vtop->r & VT_MUSTCAST) {
2470 vtop->r &= ~VT_MUSTCAST;
2471 force_charshort_cast(vtop->type.t);
2474 /* bitfields first get cast to ints */
2475 if (vtop->type.t & VT_BITFIELD) {
2476 gv(RC_INT);
2479 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2480 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2482 if (sbt != dbt) {
2483 sf = is_float(sbt);
2484 df = is_float(dbt);
2485 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2486 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2487 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2488 c &= dbt != VT_LDOUBLE;
2489 #endif
2490 if (c) {
2491 /* constant case: we can do it now */
2492 /* XXX: in ISOC, cannot do it if error in convert */
2493 if (sbt == VT_FLOAT)
2494 vtop->c.ld = vtop->c.f;
2495 else if (sbt == VT_DOUBLE)
2496 vtop->c.ld = vtop->c.d;
2498 if (df) {
2499 if ((sbt & VT_BTYPE) == VT_LLONG) {
2500 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2501 vtop->c.ld = vtop->c.i;
2502 else
2503 vtop->c.ld = -(long double)-vtop->c.i;
2504 } else if(!sf) {
2505 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2506 vtop->c.ld = (uint32_t)vtop->c.i;
2507 else
2508 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2511 if (dbt == VT_FLOAT)
2512 vtop->c.f = (float)vtop->c.ld;
2513 else if (dbt == VT_DOUBLE)
2514 vtop->c.d = (double)vtop->c.ld;
2515 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2516 vtop->c.i = vtop->c.ld;
2517 } else if (sf && dbt == VT_BOOL) {
2518 vtop->c.i = (vtop->c.ld != 0);
2519 } else {
2520 if(sf)
2521 vtop->c.i = vtop->c.ld;
2522 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2524 else if (sbt & VT_UNSIGNED)
2525 vtop->c.i = (uint32_t)vtop->c.i;
2526 #if PTR_SIZE == 8
2527 else if (sbt == VT_PTR)
2529 #endif
2530 else if (sbt != VT_LLONG)
2531 vtop->c.i = ((uint32_t)vtop->c.i |
2532 -(vtop->c.i & 0x80000000));
2534 if (dbt == (VT_LLONG|VT_UNSIGNED))
2536 else if (dbt == VT_BOOL)
2537 vtop->c.i = (vtop->c.i != 0);
2538 #if PTR_SIZE == 8
2539 else if (dbt == VT_PTR)
2541 #endif
2542 else if (dbt != VT_LLONG) {
2543 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2544 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2545 0xffffffff);
2546 vtop->c.i &= m;
2547 if (!(dbt & VT_UNSIGNED))
2548 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2551 } else if (p && dbt == VT_BOOL) {
2552 vtop->r = VT_CONST;
2553 vtop->c.i = 1;
2554 } else {
2555 /* non constant case: generate code */
2556 if (sf && df) {
2557 /* convert from fp to fp */
2558 gen_cvt_ftof(dbt);
2559 } else if (df) {
2560 /* convert int to fp */
2561 gen_cvt_itof1(dbt);
2562 } else if (sf) {
2563 /* convert fp to int */
2564 if (dbt == VT_BOOL) {
2565 vpushi(0);
2566 gen_op(TOK_NE);
2567 } else {
2568 /* we handle char/short/etc... with generic code */
2569 if (dbt != (VT_INT | VT_UNSIGNED) &&
2570 dbt != (VT_LLONG | VT_UNSIGNED) &&
2571 dbt != VT_LLONG)
2572 dbt = VT_INT;
2573 gen_cvt_ftoi1(dbt);
2574 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2575 /* additional cast for char/short... */
2576 vtop->type.t = dbt;
2577 gen_cast(type);
2580 #if PTR_SIZE == 4
2581 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2582 if ((sbt & VT_BTYPE) != VT_LLONG) {
2583 /* scalar to long long */
2584 /* machine independent conversion */
2585 gv(RC_INT);
2586 /* generate high word */
2587 if (sbt == (VT_INT | VT_UNSIGNED)) {
2588 vpushi(0);
2589 gv(RC_INT);
2590 } else {
2591 if (sbt == VT_PTR) {
2592 /* cast from pointer to int before we apply
2593 shift operation, which pointers don't support*/
2594 gen_cast_s(VT_INT);
2596 gv_dup();
2597 vpushi(31);
2598 gen_op(TOK_SAR);
2600 /* patch second register */
2601 vtop[-1].r2 = vtop->r;
2602 vpop();
2604 #else
2605 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2606 (dbt & VT_BTYPE) == VT_PTR ||
2607 (dbt & VT_BTYPE) == VT_FUNC) {
2608 if ((sbt & VT_BTYPE) != VT_LLONG &&
2609 (sbt & VT_BTYPE) != VT_PTR &&
2610 (sbt & VT_BTYPE) != VT_FUNC) {
2611 /* need to convert from 32bit to 64bit */
2612 gv(RC_INT);
2613 if (sbt != (VT_INT | VT_UNSIGNED)) {
2614 #if defined(TCC_TARGET_ARM64)
2615 gen_cvt_sxtw();
2616 #elif defined(TCC_TARGET_X86_64)
2617 int r = gv(RC_INT);
2618 /* x86_64 specific: movslq */
2619 o(0x6348);
2620 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2621 #else
2622 #error
2623 #endif
2626 #endif
2627 } else if (dbt == VT_BOOL) {
2628 /* scalar to bool */
2629 vpushi(0);
2630 gen_op(TOK_NE);
2631 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2632 (dbt & VT_BTYPE) == VT_SHORT) {
2633 if (sbt == VT_PTR) {
2634 vtop->type.t = VT_INT;
2635 tcc_warning("nonportable conversion from pointer to char/short");
2637 force_charshort_cast(dbt);
2638 } else if ((dbt & VT_BTYPE) == VT_INT) {
2639 /* scalar to int */
2640 if ((sbt & VT_BTYPE) == VT_LLONG) {
2641 #if PTR_SIZE == 4
2642 /* from long long: just take low order word */
2643 lexpand();
2644 vpop();
2645 #else
2646 vpushi(0xffffffff);
2647 vtop->type.t |= VT_UNSIGNED;
2648 gen_op('&');
2649 #endif
2651 /* if lvalue and single word type, nothing to do because
2652 the lvalue already contains the real type size (see
2653 VT_LVAL_xxx constants) */
2656 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2657 /* if we are casting between pointer types,
2658 we must update the VT_LVAL_xxx size */
2659 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2660 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2662 vtop->type = *type;
2665 /* return type size as known at compile time. Put alignment at 'a' */
2666 ST_FUNC int type_size(CType *type, int *a)
2668 Sym *s;
2669 int bt;
2671 bt = type->t & VT_BTYPE;
2672 if (bt == VT_STRUCT) {
2673 /* struct/union */
2674 s = type->ref;
2675 *a = s->r;
2676 return s->c;
2677 } else if (bt == VT_PTR) {
2678 if (type->t & VT_ARRAY) {
2679 int ts;
2681 s = type->ref;
2682 ts = type_size(&s->type, a);
2684 if (ts < 0 && s->c < 0)
2685 ts = -ts;
2687 return ts * s->c;
2688 } else {
2689 *a = PTR_SIZE;
2690 return PTR_SIZE;
2692 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2693 return -1; /* incomplete enum */
2694 } else if (bt == VT_LDOUBLE) {
2695 *a = LDOUBLE_ALIGN;
2696 return LDOUBLE_SIZE;
2697 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2698 #ifdef TCC_TARGET_I386
2699 #ifdef TCC_TARGET_PE
2700 *a = 8;
2701 #else
2702 *a = 4;
2703 #endif
2704 #elif defined(TCC_TARGET_ARM)
2705 #ifdef TCC_ARM_EABI
2706 *a = 8;
2707 #else
2708 *a = 4;
2709 #endif
2710 #else
2711 *a = 8;
2712 #endif
2713 return 8;
2714 } else if (bt == VT_INT || bt == VT_FLOAT) {
2715 *a = 4;
2716 return 4;
2717 } else if (bt == VT_SHORT) {
2718 *a = 2;
2719 return 2;
2720 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2721 *a = 8;
2722 return 16;
2723 } else {
2724 /* char, void, function, _Bool */
2725 *a = 1;
2726 return 1;
2730 /* push type size as known at runtime time on top of value stack. Put
2731 alignment at 'a' */
2732 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2734 if (type->t & VT_VLA) {
2735 type_size(&type->ref->type, a);
2736 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2737 } else {
2738 vpushi(type_size(type, a));
2742 static void vla_sp_restore(void) {
2743 if (vlas_in_scope) {
2744 gen_vla_sp_restore(vla_sp_loc);
2748 static void vla_sp_restore_root(void) {
2749 if (vlas_in_scope) {
2750 gen_vla_sp_restore(vla_sp_root_loc);
2754 /* return the pointed type of t */
2755 static inline CType *pointed_type(CType *type)
2757 return &type->ref->type;
2760 /* modify type so that its it is a pointer to type. */
2761 ST_FUNC void mk_pointer(CType *type)
2763 Sym *s;
2764 s = sym_push(SYM_FIELD, type, 0, -1);
2765 type->t = VT_PTR | (type->t & VT_STORAGE);
2766 type->ref = s;
2769 /* compare function types. OLD functions match any new functions */
2770 static int is_compatible_func(CType *type1, CType *type2)
2772 Sym *s1, *s2;
2774 s1 = type1->ref;
2775 s2 = type2->ref;
2776 if (!is_compatible_types(&s1->type, &s2->type))
2777 return 0;
2778 /* check func_call */
2779 if (s1->f.func_call != s2->f.func_call)
2780 return 0;
2781 /* XXX: not complete */
2782 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2783 return 1;
2784 if (s1->f.func_type != s2->f.func_type)
2785 return 0;
2786 while (s1 != NULL) {
2787 if (s2 == NULL)
2788 return 0;
2789 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2790 return 0;
2791 s1 = s1->next;
2792 s2 = s2->next;
2794 if (s2)
2795 return 0;
2796 return 1;
2799 /* return true if type1 and type2 are the same. If unqualified is
2800 true, qualifiers on the types are ignored.
2802 static int compare_types(CType *type1, CType *type2, int unqualified)
2804 int bt1, t1, t2;
2806 t1 = type1->t & VT_TYPE;
2807 t2 = type2->t & VT_TYPE;
2808 if (unqualified) {
2809 /* strip qualifiers before comparing */
2810 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2811 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2814 /* Default Vs explicit signedness only matters for char */
2815 if ((t1 & VT_BTYPE) != VT_BYTE) {
2816 t1 &= ~VT_DEFSIGN;
2817 t2 &= ~VT_DEFSIGN;
2819 /* XXX: bitfields ? */
2820 if (t1 != t2)
2821 return 0;
2822 /* test more complicated cases */
2823 bt1 = t1 & (VT_BTYPE | (unqualified ? 0 : VT_ARRAY) );
2824 if (bt1 == VT_PTR) {
2825 type1 = pointed_type(type1);
2826 type2 = pointed_type(type2);
2827 return is_compatible_types(type1, type2);
2828 } else if (bt1 & VT_ARRAY) {
2829 return type1->ref->c < 0 || type2->ref->c < 0
2830 || type1->ref->c == type2->ref->c;
2831 } else if (bt1 == VT_STRUCT) {
2832 return (type1->ref == type2->ref);
2833 } else if (bt1 == VT_FUNC) {
2834 return is_compatible_func(type1, type2);
2835 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2836 return type1->ref == type2->ref;
2837 } else {
2838 return 1;
2842 /* return true if type1 and type2 are exactly the same (including
2843 qualifiers).
2845 static int is_compatible_types(CType *type1, CType *type2)
2847 return compare_types(type1,type2,0);
2850 /* return true if type1 and type2 are the same (ignoring qualifiers).
2852 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2854 return compare_types(type1,type2,1);
2857 /* print a type. If 'varstr' is not NULL, then the variable is also
2858 printed in the type */
2859 /* XXX: union */
2860 /* XXX: add array and function pointers */
2861 static void type_to_str(char *buf, int buf_size,
2862 CType *type, const char *varstr)
2864 int bt, v, t;
2865 Sym *s, *sa;
2866 char buf1[256];
2867 const char *tstr;
2869 t = type->t;
2870 bt = t & VT_BTYPE;
2871 buf[0] = '\0';
2873 if (t & VT_EXTERN)
2874 pstrcat(buf, buf_size, "extern ");
2875 if (t & VT_STATIC)
2876 pstrcat(buf, buf_size, "static ");
2877 if (t & VT_TYPEDEF)
2878 pstrcat(buf, buf_size, "typedef ");
2879 if (t & VT_INLINE)
2880 pstrcat(buf, buf_size, "inline ");
2881 if (t & VT_VOLATILE)
2882 pstrcat(buf, buf_size, "volatile ");
2883 if (t & VT_CONSTANT)
2884 pstrcat(buf, buf_size, "const ");
2886 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2887 || ((t & VT_UNSIGNED)
2888 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2889 && !IS_ENUM(t)
2891 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2893 buf_size -= strlen(buf);
2894 buf += strlen(buf);
2896 switch(bt) {
2897 case VT_VOID:
2898 tstr = "void";
2899 goto add_tstr;
2900 case VT_BOOL:
2901 tstr = "_Bool";
2902 goto add_tstr;
2903 case VT_BYTE:
2904 tstr = "char";
2905 goto add_tstr;
2906 case VT_SHORT:
2907 tstr = "short";
2908 goto add_tstr;
2909 case VT_INT:
2910 tstr = "int";
2911 goto maybe_long;
2912 case VT_LLONG:
2913 tstr = "long long";
2914 maybe_long:
2915 if (t & VT_LONG)
2916 tstr = "long";
2917 if (!IS_ENUM(t))
2918 goto add_tstr;
2919 tstr = "enum ";
2920 goto tstruct;
2921 case VT_FLOAT:
2922 tstr = "float";
2923 goto add_tstr;
2924 case VT_DOUBLE:
2925 tstr = "double";
2926 goto add_tstr;
2927 case VT_LDOUBLE:
2928 tstr = "long double";
2929 add_tstr:
2930 pstrcat(buf, buf_size, tstr);
2931 break;
2932 case VT_STRUCT:
2933 tstr = "struct ";
2934 if (IS_UNION(t))
2935 tstr = "union ";
2936 tstruct:
2937 pstrcat(buf, buf_size, tstr);
2938 v = type->ref->v & ~SYM_STRUCT;
2939 if (v >= SYM_FIRST_ANOM)
2940 pstrcat(buf, buf_size, "<anonymous>");
2941 else
2942 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2943 break;
2944 case VT_FUNC:
2945 s = type->ref;
2946 buf1[0]=0;
2947 if (varstr && '*' == *varstr) {
2948 pstrcat(buf1, sizeof(buf1), "(");
2949 pstrcat(buf1, sizeof(buf1), varstr);
2950 pstrcat(buf1, sizeof(buf1), ")");
2952 pstrcat(buf1, buf_size, "(");
2953 sa = s->next;
2954 while (sa != NULL) {
2955 char buf2[256];
2956 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2957 pstrcat(buf1, sizeof(buf1), buf2);
2958 sa = sa->next;
2959 if (sa)
2960 pstrcat(buf1, sizeof(buf1), ", ");
2962 if (s->f.func_type == FUNC_ELLIPSIS)
2963 pstrcat(buf1, sizeof(buf1), ", ...");
2964 pstrcat(buf1, sizeof(buf1), ")");
2965 type_to_str(buf, buf_size, &s->type, buf1);
2966 goto no_var;
2967 case VT_PTR:
2968 s = type->ref;
2969 if (t & VT_ARRAY) {
2970 if (varstr && '*' == *varstr)
2971 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2972 else
2973 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2974 type_to_str(buf, buf_size, &s->type, buf1);
2975 goto no_var;
2977 pstrcpy(buf1, sizeof(buf1), "*");
2978 if (t & VT_CONSTANT)
2979 pstrcat(buf1, buf_size, "const ");
2980 if (t & VT_VOLATILE)
2981 pstrcat(buf1, buf_size, "volatile ");
2982 if (varstr)
2983 pstrcat(buf1, sizeof(buf1), varstr);
2984 type_to_str(buf, buf_size, &s->type, buf1);
2985 goto no_var;
2987 if (varstr) {
2988 pstrcat(buf, buf_size, " ");
2989 pstrcat(buf, buf_size, varstr);
2991 no_var: ;
2994 /* verify type compatibility to store vtop in 'dt' type, and generate
2995 casts if needed. */
2996 static void gen_assign_cast(CType *dt)
2998 CType *st, *type1, *type2;
2999 char buf1[256], buf2[256];
3000 int dbt, sbt, qualwarn, lvl;
3002 st = &vtop->type; /* source type */
3003 dbt = dt->t & VT_BTYPE;
3004 sbt = st->t & VT_BTYPE;
3005 if (sbt == VT_VOID || dbt == VT_VOID) {
3006 if (sbt == VT_VOID && dbt == VT_VOID)
3007 ; /* It is Ok if both are void */
3008 else
3009 tcc_error("cannot cast from/to void");
3011 if (dt->t & VT_CONSTANT)
3012 tcc_warning("assignment of read-only location");
3013 switch(dbt) {
3014 case VT_PTR:
3015 /* special cases for pointers */
3016 /* '0' can also be a pointer */
3017 if (is_null_pointer(vtop))
3018 break;
3019 /* accept implicit pointer to integer cast with warning */
3020 if (is_integer_btype(sbt)) {
3021 tcc_warning("assignment makes pointer from integer without a cast");
3022 break;
3024 type1 = pointed_type(dt);
3025 if (sbt == VT_PTR)
3026 type2 = pointed_type(st);
3027 else if (sbt == VT_FUNC)
3028 type2 = st; /* a function is implicitly a function pointer */
3029 else
3030 goto error;
3031 if (is_compatible_types(type1, type2))
3032 break;
3033 for (qualwarn = lvl = 0;; ++lvl) {
3034 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3035 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3036 qualwarn = 1;
3037 dbt = type1->t & (VT_BTYPE|VT_LONG);
3038 sbt = type2->t & (VT_BTYPE|VT_LONG);
3039 if (dbt != VT_PTR || sbt != VT_PTR)
3040 break;
3041 type1 = pointed_type(type1);
3042 type2 = pointed_type(type2);
3044 if (!is_compatible_unqualified_types(type1, type2)) {
3045 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3046 /* void * can match anything */
3047 } else if (dbt == sbt
3048 && is_integer_btype(sbt & VT_BTYPE)
3049 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3050 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3051 /* Like GCC don't warn by default for merely changes
3052 in pointer target signedness. Do warn for different
3053 base types, though, in particular for unsigned enums
3054 and signed int targets. */
3055 } else {
3056 tcc_warning("assignment from incompatible pointer type");
3057 break;
3060 if (qualwarn)
3061 tcc_warning("assignment discards qualifiers from pointer target type");
3062 break;
3063 case VT_BYTE:
3064 case VT_SHORT:
3065 case VT_INT:
3066 case VT_LLONG:
3067 if (sbt == VT_PTR || sbt == VT_FUNC) {
3068 tcc_warning("assignment makes integer from pointer without a cast");
3069 } else if (sbt == VT_STRUCT) {
3070 goto case_VT_STRUCT;
3072 /* XXX: more tests */
3073 break;
3074 case VT_STRUCT:
3075 case_VT_STRUCT:
3076 if (!is_compatible_unqualified_types(dt, st)) {
3077 error:
3078 type_to_str(buf1, sizeof(buf1), st, NULL);
3079 type_to_str(buf2, sizeof(buf2), dt, NULL);
3080 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3082 break;
3084 gen_cast(dt);
3087 /* store vtop in lvalue pushed on stack */
3088 ST_FUNC void vstore(void)
3090 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3092 ft = vtop[-1].type.t;
3093 sbt = vtop->type.t & VT_BTYPE;
3094 dbt = ft & VT_BTYPE;
3095 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3096 (sbt == VT_INT && dbt == VT_SHORT))
3097 && !(vtop->type.t & VT_BITFIELD)) {
3098 /* optimize char/short casts */
3099 delayed_cast = VT_MUSTCAST;
3100 vtop->type.t = ft & VT_TYPE;
3101 /* XXX: factorize */
3102 if (ft & VT_CONSTANT)
3103 tcc_warning("assignment of read-only location");
3104 } else {
3105 delayed_cast = 0;
3106 if (!(ft & VT_BITFIELD))
3107 gen_assign_cast(&vtop[-1].type);
3110 if (sbt == VT_STRUCT) {
3111 /* if structure, only generate pointer */
3112 /* structure assignment : generate memcpy */
3113 /* XXX: optimize if small size */
3114 size = type_size(&vtop->type, &align);
3116 /* destination */
3117 vswap();
3118 vtop->type.t = VT_PTR;
3119 gaddrof();
3121 /* address of memcpy() */
3122 #ifdef TCC_ARM_EABI
3123 if(!(align & 7))
3124 vpush_global_sym(&func_old_type, TOK_memcpy8);
3125 else if(!(align & 3))
3126 vpush_global_sym(&func_old_type, TOK_memcpy4);
3127 else
3128 #endif
3129 /* Use memmove, rather than memcpy, as dest and src may be same: */
3130 vpush_global_sym(&func_old_type, TOK_memmove);
3132 vswap();
3133 /* source */
3134 vpushv(vtop - 2);
3135 vtop->type.t = VT_PTR;
3136 gaddrof();
3137 /* type size */
3138 vpushi(size);
3139 gfunc_call(3);
3141 /* leave source on stack */
3142 } else if (ft & VT_BITFIELD) {
3143 /* bitfield store handling */
3145 /* save lvalue as expression result (example: s.b = s.a = n;) */
3146 vdup(), vtop[-1] = vtop[-2];
3148 bit_pos = BIT_POS(ft);
3149 bit_size = BIT_SIZE(ft);
3150 /* remove bit field info to avoid loops */
3151 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3153 if ((ft & VT_BTYPE) == VT_BOOL) {
3154 gen_cast(&vtop[-1].type);
3155 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3158 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3159 if (r == VT_STRUCT) {
3160 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3161 store_packed_bf(bit_pos, bit_size);
3162 } else {
3163 unsigned long long mask = (1ULL << bit_size) - 1;
3164 if ((ft & VT_BTYPE) != VT_BOOL) {
3165 /* mask source */
3166 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3167 vpushll(mask);
3168 else
3169 vpushi((unsigned)mask);
3170 gen_op('&');
3172 /* shift source */
3173 vpushi(bit_pos);
3174 gen_op(TOK_SHL);
3175 vswap();
3176 /* duplicate destination */
3177 vdup();
3178 vrott(3);
3179 /* load destination, mask and or with source */
3180 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3181 vpushll(~(mask << bit_pos));
3182 else
3183 vpushi(~((unsigned)mask << bit_pos));
3184 gen_op('&');
3185 gen_op('|');
3186 /* store result */
3187 vstore();
3188 /* ... and discard */
3189 vpop();
3191 } else if (dbt == VT_VOID) {
3192 --vtop;
3193 } else {
3194 #ifdef CONFIG_TCC_BCHECK
3195 /* bound check case */
3196 if (vtop[-1].r & VT_MUSTBOUND) {
3197 vswap();
3198 gbound();
3199 vswap();
3201 #endif
3202 rc = RC_INT;
3203 if (is_float(ft)) {
3204 rc = RC_FLOAT;
3205 #ifdef TCC_TARGET_X86_64
3206 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3207 rc = RC_ST0;
3208 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3209 rc = RC_FRET;
3211 #endif
3213 r = gv(rc); /* generate value */
3214 /* if lvalue was saved on stack, must read it */
3215 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3216 SValue sv;
3217 t = get_reg(RC_INT);
3218 #if PTR_SIZE == 8
3219 sv.type.t = VT_PTR;
3220 #else
3221 sv.type.t = VT_INT;
3222 #endif
3223 sv.r = VT_LOCAL | VT_LVAL;
3224 sv.c.i = vtop[-1].c.i;
3225 load(t, &sv);
3226 vtop[-1].r = t | VT_LVAL;
3228 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3229 #if PTR_SIZE == 8
3230 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3231 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3232 #else
3233 if ((ft & VT_BTYPE) == VT_LLONG) {
3234 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3235 #endif
3236 vtop[-1].type.t = load_type;
3237 store(r, vtop - 1);
3238 vswap();
3239 /* convert to int to increment easily */
3240 vtop->type.t = addr_type;
3241 gaddrof();
3242 vpushi(load_size);
3243 gen_op('+');
3244 vtop->r |= VT_LVAL;
3245 vswap();
3246 vtop[-1].type.t = load_type;
3247 /* XXX: it works because r2 is spilled last ! */
3248 store(vtop->r2, vtop - 1);
3249 } else {
3250 store(r, vtop - 1);
3253 vswap();
3254 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3255 vtop->r |= delayed_cast;
3259 /* post defines POST/PRE add. c is the token ++ or -- */
3260 ST_FUNC void inc(int post, int c)
3262 test_lvalue();
3263 vdup(); /* save lvalue */
3264 if (post) {
3265 gv_dup(); /* duplicate value */
3266 vrotb(3);
3267 vrotb(3);
3269 /* add constant */
3270 vpushi(c - TOK_MID);
3271 gen_op('+');
3272 vstore(); /* store value */
3273 if (post)
3274 vpop(); /* if post op, return saved value */
3277 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3279 /* read the string */
3280 if (tok != TOK_STR)
3281 expect(msg);
3282 cstr_new(astr);
3283 while (tok == TOK_STR) {
3284 /* XXX: add \0 handling too ? */
3285 cstr_cat(astr, tokc.str.data, -1);
3286 next();
3288 cstr_ccat(astr, '\0');
3291 /* If I is >= 1 and a power of two, returns log2(i)+1.
3292 If I is 0 returns 0. */
3293 static int exact_log2p1(int i)
3295 int ret;
3296 if (!i)
3297 return 0;
3298 for (ret = 1; i >= 1 << 8; ret += 8)
3299 i >>= 8;
3300 if (i >= 1 << 4)
3301 ret += 4, i >>= 4;
3302 if (i >= 1 << 2)
3303 ret += 2, i >>= 2;
3304 if (i >= 1 << 1)
3305 ret++;
3306 return ret;
3309 /* Parse __attribute__((...)) GNUC extension. */
3310 static void parse_attribute(AttributeDef *ad)
3312 int t, n;
3313 CString astr;
3315 redo:
3316 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3317 return;
3318 next();
3319 skip('(');
3320 skip('(');
3321 while (tok != ')') {
3322 if (tok < TOK_IDENT)
3323 expect("attribute name");
3324 t = tok;
3325 next();
3326 switch(t) {
3327 case TOK_SECTION1:
3328 case TOK_SECTION2:
3329 skip('(');
3330 parse_mult_str(&astr, "section name");
3331 ad->section = find_section(tcc_state, (char *)astr.data);
3332 skip(')');
3333 cstr_free(&astr);
3334 break;
3335 case TOK_ALIAS1:
3336 case TOK_ALIAS2:
3337 skip('(');
3338 parse_mult_str(&astr, "alias(\"target\")");
3339 ad->alias_target = /* save string as token, for later */
3340 tok_alloc((char*)astr.data, astr.size-1)->tok;
3341 skip(')');
3342 cstr_free(&astr);
3343 break;
3344 case TOK_VISIBILITY1:
3345 case TOK_VISIBILITY2:
3346 skip('(');
3347 parse_mult_str(&astr,
3348 "visibility(\"default|hidden|internal|protected\")");
3349 if (!strcmp (astr.data, "default"))
3350 ad->a.visibility = STV_DEFAULT;
3351 else if (!strcmp (astr.data, "hidden"))
3352 ad->a.visibility = STV_HIDDEN;
3353 else if (!strcmp (astr.data, "internal"))
3354 ad->a.visibility = STV_INTERNAL;
3355 else if (!strcmp (astr.data, "protected"))
3356 ad->a.visibility = STV_PROTECTED;
3357 else
3358 expect("visibility(\"default|hidden|internal|protected\")");
3359 skip(')');
3360 cstr_free(&astr);
3361 break;
3362 case TOK_ALIGNED1:
3363 case TOK_ALIGNED2:
3364 if (tok == '(') {
3365 next();
3366 n = expr_const();
3367 if (n <= 0 || (n & (n - 1)) != 0)
3368 tcc_error("alignment must be a positive power of two");
3369 skip(')');
3370 } else {
3371 n = MAX_ALIGN;
3373 ad->a.aligned = exact_log2p1(n);
3374 if (n != 1 << (ad->a.aligned - 1))
3375 tcc_error("alignment of %d is larger than implemented", n);
3376 break;
3377 case TOK_PACKED1:
3378 case TOK_PACKED2:
3379 ad->a.packed = 1;
3380 break;
3381 case TOK_WEAK1:
3382 case TOK_WEAK2:
3383 ad->a.weak = 1;
3384 break;
3385 case TOK_UNUSED1:
3386 case TOK_UNUSED2:
3387 /* currently, no need to handle it because tcc does not
3388 track unused objects */
3389 break;
3390 case TOK_NORETURN1:
3391 case TOK_NORETURN2:
3392 /* currently, no need to handle it because tcc does not
3393 track unused objects */
3394 break;
3395 case TOK_CDECL1:
3396 case TOK_CDECL2:
3397 case TOK_CDECL3:
3398 ad->f.func_call = FUNC_CDECL;
3399 break;
3400 case TOK_STDCALL1:
3401 case TOK_STDCALL2:
3402 case TOK_STDCALL3:
3403 ad->f.func_call = FUNC_STDCALL;
3404 break;
3405 #ifdef TCC_TARGET_I386
3406 case TOK_REGPARM1:
3407 case TOK_REGPARM2:
3408 skip('(');
3409 n = expr_const();
3410 if (n > 3)
3411 n = 3;
3412 else if (n < 0)
3413 n = 0;
3414 if (n > 0)
3415 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3416 skip(')');
3417 break;
3418 case TOK_FASTCALL1:
3419 case TOK_FASTCALL2:
3420 case TOK_FASTCALL3:
3421 ad->f.func_call = FUNC_FASTCALLW;
3422 break;
3423 #endif
3424 case TOK_MODE:
3425 skip('(');
3426 switch(tok) {
3427 case TOK_MODE_DI:
3428 ad->attr_mode = VT_LLONG + 1;
3429 break;
3430 case TOK_MODE_QI:
3431 ad->attr_mode = VT_BYTE + 1;
3432 break;
3433 case TOK_MODE_HI:
3434 ad->attr_mode = VT_SHORT + 1;
3435 break;
3436 case TOK_MODE_SI:
3437 case TOK_MODE_word:
3438 ad->attr_mode = VT_INT + 1;
3439 break;
3440 default:
3441 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3442 break;
3444 next();
3445 skip(')');
3446 break;
3447 case TOK_DLLEXPORT:
3448 ad->a.dllexport = 1;
3449 break;
3450 case TOK_NODECORATE:
3451 ad->a.nodecorate = 1;
3452 break;
3453 case TOK_DLLIMPORT:
3454 ad->a.dllimport = 1;
3455 break;
3456 default:
3457 if (tcc_state->warn_unsupported)
3458 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3459 /* skip parameters */
3460 if (tok == '(') {
3461 int parenthesis = 0;
3462 do {
3463 if (tok == '(')
3464 parenthesis++;
3465 else if (tok == ')')
3466 parenthesis--;
3467 next();
3468 } while (parenthesis && tok != -1);
3470 break;
3472 if (tok != ',')
3473 break;
3474 next();
3476 skip(')');
3477 skip(')');
3478 goto redo;
3481 static Sym * find_field (CType *type, int v)
3483 Sym *s = type->ref;
3484 v |= SYM_FIELD;
3485 while ((s = s->next) != NULL) {
3486 if ((s->v & SYM_FIELD) &&
3487 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3488 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3489 Sym *ret = find_field (&s->type, v);
3490 if (ret)
3491 return ret;
3493 if (s->v == v)
3494 break;
3496 return s;
3499 static void struct_add_offset (Sym *s, int offset)
3501 while ((s = s->next) != NULL) {
3502 if ((s->v & SYM_FIELD) &&
3503 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3504 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3505 struct_add_offset(s->type.ref, offset);
3506 } else
3507 s->c += offset;
3511 static void struct_layout(CType *type, AttributeDef *ad)
3513 int size, align, maxalign, offset, c, bit_pos, bit_size;
3514 int packed, a, bt, prevbt, prev_bit_size;
3515 int pcc = !tcc_state->ms_bitfields;
3516 int pragma_pack = *tcc_state->pack_stack_ptr;
3517 Sym *f;
3519 maxalign = 1;
3520 offset = 0;
3521 c = 0;
3522 bit_pos = 0;
3523 prevbt = VT_STRUCT; /* make it never match */
3524 prev_bit_size = 0;
3526 //#define BF_DEBUG
3528 for (f = type->ref->next; f; f = f->next) {
3529 if (f->type.t & VT_BITFIELD)
3530 bit_size = BIT_SIZE(f->type.t);
3531 else
3532 bit_size = -1;
3533 size = type_size(&f->type, &align);
3534 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3535 packed = 0;
3537 if (pcc && bit_size == 0) {
3538 /* in pcc mode, packing does not affect zero-width bitfields */
3540 } else {
3541 /* in pcc mode, attribute packed overrides if set. */
3542 if (pcc && (f->a.packed || ad->a.packed))
3543 align = packed = 1;
3545 /* pragma pack overrides align if lesser and packs bitfields always */
3546 if (pragma_pack) {
3547 packed = 1;
3548 if (pragma_pack < align)
3549 align = pragma_pack;
3550 /* in pcc mode pragma pack also overrides individual align */
3551 if (pcc && pragma_pack < a)
3552 a = 0;
3555 /* some individual align was specified */
3556 if (a)
3557 align = a;
3559 if (type->ref->type.t == VT_UNION) {
3560 if (pcc && bit_size >= 0)
3561 size = (bit_size + 7) >> 3;
3562 offset = 0;
3563 if (size > c)
3564 c = size;
3566 } else if (bit_size < 0) {
3567 if (pcc)
3568 c += (bit_pos + 7) >> 3;
3569 c = (c + align - 1) & -align;
3570 offset = c;
3571 if (size > 0)
3572 c += size;
3573 bit_pos = 0;
3574 prevbt = VT_STRUCT;
3575 prev_bit_size = 0;
3577 } else {
3578 /* A bit-field. Layout is more complicated. There are two
3579 options: PCC (GCC) compatible and MS compatible */
3580 if (pcc) {
3581 /* In PCC layout a bit-field is placed adjacent to the
3582 preceding bit-fields, except if:
3583 - it has zero-width
3584 - an individual alignment was given
3585 - it would overflow its base type container and
3586 there is no packing */
3587 if (bit_size == 0) {
3588 new_field:
3589 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3590 bit_pos = 0;
3591 } else if (f->a.aligned) {
3592 goto new_field;
3593 } else if (!packed) {
3594 int a8 = align * 8;
3595 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3596 if (ofs > size / align)
3597 goto new_field;
3600 /* in pcc mode, long long bitfields have type int if they fit */
3601 if (size == 8 && bit_size <= 32)
3602 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3604 while (bit_pos >= align * 8)
3605 c += align, bit_pos -= align * 8;
3606 offset = c;
3608 /* In PCC layout named bit-fields influence the alignment
3609 of the containing struct using the base types alignment,
3610 except for packed fields (which here have correct align). */
3611 if (f->v & SYM_FIRST_ANOM
3612 // && bit_size // ??? gcc on ARM/rpi does that
3614 align = 1;
3616 } else {
3617 bt = f->type.t & VT_BTYPE;
3618 if ((bit_pos + bit_size > size * 8)
3619 || (bit_size > 0) == (bt != prevbt)
3621 c = (c + align - 1) & -align;
3622 offset = c;
3623 bit_pos = 0;
3624 /* In MS bitfield mode a bit-field run always uses
3625 at least as many bits as the underlying type.
3626 To start a new run it's also required that this
3627 or the last bit-field had non-zero width. */
3628 if (bit_size || prev_bit_size)
3629 c += size;
3631 /* In MS layout the records alignment is normally
3632 influenced by the field, except for a zero-width
3633 field at the start of a run (but by further zero-width
3634 fields it is again). */
3635 if (bit_size == 0 && prevbt != bt)
3636 align = 1;
3637 prevbt = bt;
3638 prev_bit_size = bit_size;
3641 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3642 | (bit_pos << VT_STRUCT_SHIFT);
3643 bit_pos += bit_size;
3645 if (align > maxalign)
3646 maxalign = align;
3648 #ifdef BF_DEBUG
3649 printf("set field %s offset %-2d size %-2d align %-2d",
3650 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3651 if (f->type.t & VT_BITFIELD) {
3652 printf(" pos %-2d bits %-2d",
3653 BIT_POS(f->type.t),
3654 BIT_SIZE(f->type.t)
3657 printf("\n");
3658 #endif
3660 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3661 Sym *ass;
3662 /* An anonymous struct/union. Adjust member offsets
3663 to reflect the real offset of our containing struct.
3664 Also set the offset of this anon member inside
3665 the outer struct to be zero. Via this it
3666 works when accessing the field offset directly
3667 (from base object), as well as when recursing
3668 members in initializer handling. */
3669 int v2 = f->type.ref->v;
3670 if (!(v2 & SYM_FIELD) &&
3671 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3672 Sym **pps;
3673 /* This happens only with MS extensions. The
3674 anon member has a named struct type, so it
3675 potentially is shared with other references.
3676 We need to unshare members so we can modify
3677 them. */
3678 ass = f->type.ref;
3679 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3680 &f->type.ref->type, 0,
3681 f->type.ref->c);
3682 pps = &f->type.ref->next;
3683 while ((ass = ass->next) != NULL) {
3684 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3685 pps = &((*pps)->next);
3687 *pps = NULL;
3689 struct_add_offset(f->type.ref, offset);
3690 f->c = 0;
3691 } else {
3692 f->c = offset;
3695 f->r = 0;
3698 if (pcc)
3699 c += (bit_pos + 7) >> 3;
3701 /* store size and alignment */
3702 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3703 if (a < maxalign)
3704 a = maxalign;
3705 type->ref->r = a;
3706 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3707 /* can happen if individual align for some member was given. In
3708 this case MSVC ignores maxalign when aligning the size */
3709 a = pragma_pack;
3710 if (a < bt)
3711 a = bt;
3713 c = (c + a - 1) & -a;
3714 type->ref->c = c;
3716 #ifdef BF_DEBUG
3717 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3718 #endif
3720 /* check whether we can access bitfields by their type */
3721 for (f = type->ref->next; f; f = f->next) {
3722 int s, px, cx, c0;
3723 CType t;
3725 if (0 == (f->type.t & VT_BITFIELD))
3726 continue;
3727 f->type.ref = f;
3728 f->auxtype = -1;
3729 bit_size = BIT_SIZE(f->type.t);
3730 if (bit_size == 0)
3731 continue;
3732 bit_pos = BIT_POS(f->type.t);
3733 size = type_size(&f->type, &align);
3734 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3735 continue;
3737 /* try to access the field using a different type */
3738 c0 = -1, s = align = 1;
3739 for (;;) {
3740 px = f->c * 8 + bit_pos;
3741 cx = (px >> 3) & -align;
3742 px = px - (cx << 3);
3743 if (c0 == cx)
3744 break;
3745 s = (px + bit_size + 7) >> 3;
3746 if (s > 4) {
3747 t.t = VT_LLONG;
3748 } else if (s > 2) {
3749 t.t = VT_INT;
3750 } else if (s > 1) {
3751 t.t = VT_SHORT;
3752 } else {
3753 t.t = VT_BYTE;
3755 s = type_size(&t, &align);
3756 c0 = cx;
3759 if (px + bit_size <= s * 8 && cx + s <= c) {
3760 /* update offset and bit position */
3761 f->c = cx;
3762 bit_pos = px;
3763 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3764 | (bit_pos << VT_STRUCT_SHIFT);
3765 if (s != size)
3766 f->auxtype = t.t;
3767 #ifdef BF_DEBUG
3768 printf("FIX field %s offset %-2d size %-2d align %-2d "
3769 "pos %-2d bits %-2d\n",
3770 get_tok_str(f->v & ~SYM_FIELD, NULL),
3771 cx, s, align, px, bit_size);
3772 #endif
3773 } else {
3774 /* fall back to load/store single-byte wise */
3775 f->auxtype = VT_STRUCT;
3776 #ifdef BF_DEBUG
3777 printf("FIX field %s : load byte-wise\n",
3778 get_tok_str(f->v & ~SYM_FIELD, NULL));
3779 #endif
3784 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3785 static void struct_decl(CType *type, int u)
3787 int v, c, size, align, flexible;
3788 int bit_size, bsize, bt;
3789 Sym *s, *ss, **ps;
3790 AttributeDef ad, ad1;
3791 CType type1, btype;
3793 memset(&ad, 0, sizeof ad);
3794 next();
3795 parse_attribute(&ad);
3796 if (tok != '{') {
3797 v = tok;
3798 next();
3799 /* struct already defined ? return it */
3800 if (v < TOK_IDENT)
3801 expect("struct/union/enum name");
3802 s = struct_find(v);
3803 if (s && (s->sym_scope == local_scope || tok != '{')) {
3804 if (u == s->type.t)
3805 goto do_decl;
3806 if (u == VT_ENUM && IS_ENUM(s->type.t))
3807 goto do_decl;
3808 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3810 } else {
3811 v = anon_sym++;
3813 /* Record the original enum/struct/union token. */
3814 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3815 type1.ref = NULL;
3816 /* we put an undefined size for struct/union */
3817 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3818 s->r = 0; /* default alignment is zero as gcc */
3819 do_decl:
3820 type->t = s->type.t;
3821 type->ref = s;
3823 if (tok == '{') {
3824 next();
3825 if (s->c != -1)
3826 tcc_error("struct/union/enum already defined");
3827 /* cannot be empty */
3828 /* non empty enums are not allowed */
3829 ps = &s->next;
3830 if (u == VT_ENUM) {
3831 long long ll = 0, pl = 0, nl = 0;
3832 CType t;
3833 t.ref = s;
3834 /* enum symbols have static storage */
3835 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3836 for(;;) {
3837 v = tok;
3838 if (v < TOK_UIDENT)
3839 expect("identifier");
3840 ss = sym_find(v);
3841 if (ss && !local_stack)
3842 tcc_error("redefinition of enumerator '%s'",
3843 get_tok_str(v, NULL));
3844 next();
3845 if (tok == '=') {
3846 next();
3847 ll = expr_const64();
3849 ss = sym_push(v, &t, VT_CONST, 0);
3850 ss->enum_val = ll;
3851 *ps = ss, ps = &ss->next;
3852 if (ll < nl)
3853 nl = ll;
3854 if (ll > pl)
3855 pl = ll;
3856 if (tok != ',')
3857 break;
3858 next();
3859 ll++;
3860 /* NOTE: we accept a trailing comma */
3861 if (tok == '}')
3862 break;
3864 skip('}');
3865 /* set integral type of the enum */
3866 t.t = VT_INT;
3867 if (nl >= 0) {
3868 if (pl != (unsigned)pl)
3869 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3870 t.t |= VT_UNSIGNED;
3871 } else if (pl != (int)pl || nl != (int)nl)
3872 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3873 s->type.t = type->t = t.t | VT_ENUM;
3874 s->c = 0;
3875 /* set type for enum members */
3876 for (ss = s->next; ss; ss = ss->next) {
3877 ll = ss->enum_val;
3878 if (ll == (int)ll) /* default is int if it fits */
3879 continue;
3880 if (t.t & VT_UNSIGNED) {
3881 ss->type.t |= VT_UNSIGNED;
3882 if (ll == (unsigned)ll)
3883 continue;
3885 ss->type.t = (ss->type.t & ~VT_BTYPE)
3886 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3888 } else {
3889 c = 0;
3890 flexible = 0;
3891 while (tok != '}') {
3892 if (!parse_btype(&btype, &ad1)) {
3893 skip(';');
3894 continue;
3896 while (1) {
3897 if (flexible)
3898 tcc_error("flexible array member '%s' not at the end of struct",
3899 get_tok_str(v, NULL));
3900 bit_size = -1;
3901 v = 0;
3902 type1 = btype;
3903 if (tok != ':') {
3904 if (tok != ';')
3905 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3906 if (v == 0) {
3907 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3908 expect("identifier");
3909 else {
3910 int v = btype.ref->v;
3911 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3912 if (tcc_state->ms_extensions == 0)
3913 expect("identifier");
3917 if (type_size(&type1, &align) < 0) {
3918 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3919 flexible = 1;
3920 else
3921 tcc_error("field '%s' has incomplete type",
3922 get_tok_str(v, NULL));
3924 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3925 (type1.t & VT_BTYPE) == VT_VOID ||
3926 (type1.t & VT_STORAGE))
3927 tcc_error("invalid type for '%s'",
3928 get_tok_str(v, NULL));
3930 if (tok == ':') {
3931 next();
3932 bit_size = expr_const();
3933 /* XXX: handle v = 0 case for messages */
3934 if (bit_size < 0)
3935 tcc_error("negative width in bit-field '%s'",
3936 get_tok_str(v, NULL));
3937 if (v && bit_size == 0)
3938 tcc_error("zero width for bit-field '%s'",
3939 get_tok_str(v, NULL));
3940 parse_attribute(&ad1);
3942 size = type_size(&type1, &align);
3943 if (bit_size >= 0) {
3944 bt = type1.t & VT_BTYPE;
3945 if (bt != VT_INT &&
3946 bt != VT_BYTE &&
3947 bt != VT_SHORT &&
3948 bt != VT_BOOL &&
3949 bt != VT_LLONG)
3950 tcc_error("bitfields must have scalar type");
3951 bsize = size * 8;
3952 if (bit_size > bsize) {
3953 tcc_error("width of '%s' exceeds its type",
3954 get_tok_str(v, NULL));
3955 } else if (bit_size == bsize
3956 && !ad.a.packed && !ad1.a.packed) {
3957 /* no need for bit fields */
3959 } else if (bit_size == 64) {
3960 tcc_error("field width 64 not implemented");
3961 } else {
3962 type1.t = (type1.t & ~VT_STRUCT_MASK)
3963 | VT_BITFIELD
3964 | (bit_size << (VT_STRUCT_SHIFT + 6));
3967 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3968 /* Remember we've seen a real field to check
3969 for placement of flexible array member. */
3970 c = 1;
3972 /* If member is a struct or bit-field, enforce
3973 placing into the struct (as anonymous). */
3974 if (v == 0 &&
3975 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3976 bit_size >= 0)) {
3977 v = anon_sym++;
3979 if (v) {
3980 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3981 ss->a = ad1.a;
3982 *ps = ss;
3983 ps = &ss->next;
3985 if (tok == ';' || tok == TOK_EOF)
3986 break;
3987 skip(',');
3989 skip(';');
3991 skip('}');
3992 parse_attribute(&ad);
3993 struct_layout(type, &ad);
3998 static void sym_to_attr(AttributeDef *ad, Sym *s)
4000 if (s->a.aligned && 0 == ad->a.aligned)
4001 ad->a.aligned = s->a.aligned;
4002 if (s->f.func_call && 0 == ad->f.func_call)
4003 ad->f.func_call = s->f.func_call;
4004 if (s->f.func_type && 0 == ad->f.func_type)
4005 ad->f.func_type = s->f.func_type;
4006 if (s->a.packed)
4007 ad->a.packed = 1;
4010 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4011 are added to the element type, copied because it could be a typedef. */
4012 static void parse_btype_qualify(CType *type, int qualifiers)
4014 while (type->t & VT_ARRAY) {
4015 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4016 type = &type->ref->type;
4018 type->t |= qualifiers;
4021 /* return 0 if no type declaration. otherwise, return the basic type
4022 and skip it.
4024 static int parse_btype(CType *type, AttributeDef *ad)
4026 int t, u, bt, st, type_found, typespec_found, g;
4027 Sym *s;
4028 CType type1;
4030 memset(ad, 0, sizeof(AttributeDef));
4031 type_found = 0;
4032 typespec_found = 0;
4033 t = VT_INT;
4034 bt = st = -1;
4035 type->ref = NULL;
4037 while(1) {
4038 switch(tok) {
4039 case TOK_EXTENSION:
4040 /* currently, we really ignore extension */
4041 next();
4042 continue;
4044 /* basic types */
4045 case TOK_CHAR:
4046 u = VT_BYTE;
4047 basic_type:
4048 next();
4049 basic_type1:
4050 if (u == VT_SHORT || u == VT_LONG) {
4051 if (st != -1 || (bt != -1 && bt != VT_INT))
4052 tmbt: tcc_error("too many basic types");
4053 st = u;
4054 } else {
4055 if (bt != -1 || (st != -1 && u != VT_INT))
4056 goto tmbt;
4057 bt = u;
4059 if (u != VT_INT)
4060 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4061 typespec_found = 1;
4062 break;
4063 case TOK_VOID:
4064 u = VT_VOID;
4065 goto basic_type;
4066 case TOK_SHORT:
4067 u = VT_SHORT;
4068 goto basic_type;
4069 case TOK_INT:
4070 u = VT_INT;
4071 goto basic_type;
4072 case TOK_LONG:
4073 if ((t & VT_BTYPE) == VT_DOUBLE) {
4074 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4075 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4076 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4077 } else {
4078 u = VT_LONG;
4079 goto basic_type;
4081 next();
4082 break;
4083 #ifdef TCC_TARGET_ARM64
4084 case TOK_UINT128:
4085 /* GCC's __uint128_t appears in some Linux header files. Make it a
4086 synonym for long double to get the size and alignment right. */
4087 u = VT_LDOUBLE;
4088 goto basic_type;
4089 #endif
4090 case TOK_BOOL:
4091 u = VT_BOOL;
4092 goto basic_type;
4093 case TOK_FLOAT:
4094 u = VT_FLOAT;
4095 goto basic_type;
4096 case TOK_DOUBLE:
4097 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4098 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4099 } else {
4100 u = VT_DOUBLE;
4101 goto basic_type;
4103 next();
4104 break;
4105 case TOK_ENUM:
4106 struct_decl(&type1, VT_ENUM);
4107 basic_type2:
4108 u = type1.t;
4109 type->ref = type1.ref;
4110 goto basic_type1;
4111 case TOK_STRUCT:
4112 struct_decl(&type1, VT_STRUCT);
4113 goto basic_type2;
4114 case TOK_UNION:
4115 struct_decl(&type1, VT_UNION);
4116 goto basic_type2;
4118 /* type modifiers */
4119 case TOK_CONST1:
4120 case TOK_CONST2:
4121 case TOK_CONST3:
4122 type->t = t;
4123 parse_btype_qualify(type, VT_CONSTANT);
4124 t = type->t;
4125 next();
4126 break;
4127 case TOK_VOLATILE1:
4128 case TOK_VOLATILE2:
4129 case TOK_VOLATILE3:
4130 type->t = t;
4131 parse_btype_qualify(type, VT_VOLATILE);
4132 t = type->t;
4133 next();
4134 break;
4135 case TOK_SIGNED1:
4136 case TOK_SIGNED2:
4137 case TOK_SIGNED3:
4138 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4139 tcc_error("signed and unsigned modifier");
4140 t |= VT_DEFSIGN;
4141 next();
4142 typespec_found = 1;
4143 break;
4144 case TOK_REGISTER:
4145 case TOK_AUTO:
4146 case TOK_RESTRICT1:
4147 case TOK_RESTRICT2:
4148 case TOK_RESTRICT3:
4149 next();
4150 break;
4151 case TOK_UNSIGNED:
4152 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4153 tcc_error("signed and unsigned modifier");
4154 t |= VT_DEFSIGN | VT_UNSIGNED;
4155 next();
4156 typespec_found = 1;
4157 break;
4159 /* storage */
4160 case TOK_EXTERN:
4161 g = VT_EXTERN;
4162 goto storage;
4163 case TOK_STATIC:
4164 g = VT_STATIC;
4165 goto storage;
4166 case TOK_TYPEDEF:
4167 g = VT_TYPEDEF;
4168 goto storage;
4169 storage:
4170 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4171 tcc_error("multiple storage classes");
4172 t |= g;
4173 next();
4174 break;
4175 case TOK_INLINE1:
4176 case TOK_INLINE2:
4177 case TOK_INLINE3:
4178 t |= VT_INLINE;
4179 next();
4180 break;
4182 /* GNUC attribute */
4183 case TOK_ATTRIBUTE1:
4184 case TOK_ATTRIBUTE2:
4185 parse_attribute(ad);
4186 if (ad->attr_mode) {
4187 u = ad->attr_mode -1;
4188 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4190 continue;
4191 /* GNUC typeof */
4192 case TOK_TYPEOF1:
4193 case TOK_TYPEOF2:
4194 case TOK_TYPEOF3:
4195 next();
4196 parse_expr_type(&type1);
4197 /* remove all storage modifiers except typedef */
4198 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4199 if (type1.ref)
4200 sym_to_attr(ad, type1.ref);
4201 goto basic_type2;
4202 default:
4203 if (typespec_found)
4204 goto the_end;
4205 s = sym_find(tok);
4206 if (!s || !(s->type.t & VT_TYPEDEF))
4207 goto the_end;
4208 t &= ~(VT_BTYPE|VT_LONG);
4209 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4210 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4211 type->ref = s->type.ref;
4212 if (t)
4213 parse_btype_qualify(type, t);
4214 t = type->t;
4215 /* get attributes from typedef */
4216 sym_to_attr(ad, s);
4217 next();
4218 typespec_found = 1;
4219 st = bt = -2;
4220 break;
4222 type_found = 1;
4224 the_end:
4225 if (tcc_state->char_is_unsigned) {
4226 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4227 t |= VT_UNSIGNED;
4229 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4230 bt = t & (VT_BTYPE|VT_LONG);
4231 if (bt == VT_LONG)
4232 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4233 #ifdef TCC_TARGET_PE
4234 if (bt == VT_LDOUBLE)
4235 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4236 #endif
4237 type->t = t;
4238 return type_found;
4241 /* convert a function parameter type (array to pointer and function to
4242 function pointer) */
4243 static inline void convert_parameter_type(CType *pt)
4245 /* remove const and volatile qualifiers (XXX: const could be used
4246 to indicate a const function parameter */
4247 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4248 /* array must be transformed to pointer according to ANSI C */
4249 pt->t &= ~VT_ARRAY;
4250 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4251 mk_pointer(pt);
4255 ST_FUNC void parse_asm_str(CString *astr)
4257 skip('(');
4258 parse_mult_str(astr, "string constant");
4261 /* Parse an asm label and return the token */
4262 static int asm_label_instr(void)
4264 int v;
4265 CString astr;
4267 next();
4268 parse_asm_str(&astr);
4269 skip(')');
4270 #ifdef ASM_DEBUG
4271 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4272 #endif
4273 v = tok_alloc(astr.data, astr.size - 1)->tok;
4274 cstr_free(&astr);
4275 return v;
4278 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4280 int n, l, t1, arg_size, align;
4281 Sym **plast, *s, *first;
4282 AttributeDef ad1;
4283 CType pt;
4285 if (tok == '(') {
4286 /* function type, or recursive declarator (return if so) */
4287 next();
4288 if (td && !(td & TYPE_ABSTRACT))
4289 return 0;
4290 if (tok == ')')
4291 l = 0;
4292 else if (parse_btype(&pt, &ad1))
4293 l = FUNC_NEW;
4294 else if (td)
4295 return 0;
4296 else
4297 l = FUNC_OLD;
4298 first = NULL;
4299 plast = &first;
4300 arg_size = 0;
4301 if (l) {
4302 for(;;) {
4303 /* read param name and compute offset */
4304 if (l != FUNC_OLD) {
4305 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4306 break;
4307 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4308 if ((pt.t & VT_BTYPE) == VT_VOID)
4309 tcc_error("parameter declared as void");
4310 } else {
4311 n = tok;
4312 if (n < TOK_UIDENT)
4313 expect("identifier");
4314 pt.t = VT_VOID; /* invalid type */
4315 next();
4317 convert_parameter_type(&pt);
4318 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4319 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4320 *plast = s;
4321 plast = &s->next;
4322 if (tok == ')')
4323 break;
4324 skip(',');
4325 if (l == FUNC_NEW && tok == TOK_DOTS) {
4326 l = FUNC_ELLIPSIS;
4327 next();
4328 break;
4330 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4331 tcc_error("invalid type");
4333 } else
4334 /* if no parameters, then old type prototype */
4335 l = FUNC_OLD;
4336 skip(')');
4337 /* NOTE: const is ignored in returned type as it has a special
4338 meaning in gcc / C++ */
4339 type->t &= ~VT_CONSTANT;
4340 /* some ancient pre-K&R C allows a function to return an array
4341 and the array brackets to be put after the arguments, such
4342 that "int c()[]" means something like "int[] c()" */
4343 if (tok == '[') {
4344 next();
4345 skip(']'); /* only handle simple "[]" */
4346 mk_pointer(type);
4348 /* we push a anonymous symbol which will contain the function prototype */
4349 ad->f.func_args = arg_size;
4350 ad->f.func_type = l;
4351 s = sym_push(SYM_FIELD, type, 0, 0);
4352 s->a = ad->a;
4353 s->f = ad->f;
4354 s->next = first;
4355 type->t = VT_FUNC;
4356 type->ref = s;
4357 } else if (tok == '[') {
4358 int saved_nocode_wanted = nocode_wanted;
4359 /* array definition */
4360 next();
4361 while (1) {
4362 /* XXX The optional type-quals and static should only be accepted
4363 in parameter decls. The '*' as well, and then even only
4364 in prototypes (not function defs). */
4365 switch (tok) {
4366 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4367 case TOK_CONST1:
4368 case TOK_VOLATILE1:
4369 case TOK_STATIC:
4370 case '*':
4371 next();
4372 continue;
4373 default:
4374 break;
4376 break;
4378 n = -1;
4379 t1 = 0;
4380 if (tok != ']') {
4381 if (!local_stack || (storage & VT_STATIC))
4382 vpushi(expr_const());
4383 else {
4384 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4385 length must always be evaluated, even under nocode_wanted,
4386 so that its size slot is initialized (e.g. under sizeof
4387 or typeof). */
4388 nocode_wanted = 0;
4389 gexpr();
4391 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4392 n = vtop->c.i;
4393 if (n < 0)
4394 tcc_error("invalid array size");
4395 } else {
4396 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4397 tcc_error("size of variable length array should be an integer");
4398 t1 = VT_VLA;
4401 skip(']');
4402 /* parse next post type */
4403 post_type(type, ad, storage, 0);
4404 if (type->t == VT_FUNC)
4405 tcc_error("declaration of an array of functions");
4406 t1 |= type->t & VT_VLA;
4408 if (t1 & VT_VLA) {
4409 loc -= type_size(&int_type, &align);
4410 loc &= -align;
4411 n = loc;
4413 vla_runtime_type_size(type, &align);
4414 gen_op('*');
4415 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4416 vswap();
4417 vstore();
4419 if (n != -1)
4420 vpop();
4421 nocode_wanted = saved_nocode_wanted;
4423 /* we push an anonymous symbol which will contain the array
4424 element type */
4425 s = sym_push(SYM_FIELD, type, 0, n);
4426 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4427 type->ref = s;
4429 return 1;
4432 /* Parse a type declarator (except basic type), and return the type
4433 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4434 expected. 'type' should contain the basic type. 'ad' is the
4435 attribute definition of the basic type. It can be modified by
4436 type_decl(). If this (possibly abstract) declarator is a pointer chain
4437 it returns the innermost pointed to type (equals *type, but is a different
4438 pointer), otherwise returns type itself, that's used for recursive calls. */
4439 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4441 CType *post, *ret;
4442 int qualifiers, storage;
4444 /* recursive type, remove storage bits first, apply them later again */
4445 storage = type->t & VT_STORAGE;
4446 type->t &= ~VT_STORAGE;
4447 post = ret = type;
4449 while (tok == '*') {
4450 qualifiers = 0;
4451 redo:
4452 next();
4453 switch(tok) {
4454 case TOK_CONST1:
4455 case TOK_CONST2:
4456 case TOK_CONST3:
4457 qualifiers |= VT_CONSTANT;
4458 goto redo;
4459 case TOK_VOLATILE1:
4460 case TOK_VOLATILE2:
4461 case TOK_VOLATILE3:
4462 qualifiers |= VT_VOLATILE;
4463 goto redo;
4464 case TOK_RESTRICT1:
4465 case TOK_RESTRICT2:
4466 case TOK_RESTRICT3:
4467 goto redo;
4468 /* XXX: clarify attribute handling */
4469 case TOK_ATTRIBUTE1:
4470 case TOK_ATTRIBUTE2:
4471 parse_attribute(ad);
4472 break;
4474 mk_pointer(type);
4475 type->t |= qualifiers;
4476 if (ret == type)
4477 /* innermost pointed to type is the one for the first derivation */
4478 ret = pointed_type(type);
4481 if (tok == '(') {
4482 /* This is possibly a parameter type list for abstract declarators
4483 ('int ()'), use post_type for testing this. */
4484 if (!post_type(type, ad, 0, td)) {
4485 /* It's not, so it's a nested declarator, and the post operations
4486 apply to the innermost pointed to type (if any). */
4487 /* XXX: this is not correct to modify 'ad' at this point, but
4488 the syntax is not clear */
4489 parse_attribute(ad);
4490 post = type_decl(type, ad, v, td);
4491 skip(')');
4493 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4494 /* type identifier */
4495 *v = tok;
4496 next();
4497 } else {
4498 if (!(td & TYPE_ABSTRACT))
4499 expect("identifier");
4500 *v = 0;
4502 post_type(post, ad, storage, 0);
4503 parse_attribute(ad);
4504 type->t |= storage;
4505 return ret;
4508 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4509 ST_FUNC int lvalue_type(int t)
4511 int bt, r;
4512 r = VT_LVAL;
4513 bt = t & VT_BTYPE;
4514 if (bt == VT_BYTE || bt == VT_BOOL)
4515 r |= VT_LVAL_BYTE;
4516 else if (bt == VT_SHORT)
4517 r |= VT_LVAL_SHORT;
4518 else
4519 return r;
4520 if (t & VT_UNSIGNED)
4521 r |= VT_LVAL_UNSIGNED;
4522 return r;
4525 /* indirection with full error checking and bound check */
4526 ST_FUNC void indir(void)
4528 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4529 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4530 return;
4531 expect("pointer");
4533 if (vtop->r & VT_LVAL)
4534 gv(RC_INT);
4535 vtop->type = *pointed_type(&vtop->type);
4536 /* Arrays and functions are never lvalues */
4537 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4538 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4539 vtop->r |= lvalue_type(vtop->type.t);
4540 /* if bound checking, the referenced pointer must be checked */
4541 #ifdef CONFIG_TCC_BCHECK
4542 if (tcc_state->do_bounds_check)
4543 vtop->r |= VT_MUSTBOUND;
4544 #endif
4548 /* pass a parameter to a function and do type checking and casting */
4549 static void gfunc_param_typed(Sym *func, Sym *arg)
4551 int func_type;
4552 CType type;
4554 func_type = func->f.func_type;
4555 if (func_type == FUNC_OLD ||
4556 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4557 /* default casting : only need to convert float to double */
4558 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4559 gen_cast_s(VT_DOUBLE);
4560 } else if (vtop->type.t & VT_BITFIELD) {
4561 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4562 type.ref = vtop->type.ref;
4563 gen_cast(&type);
4565 } else if (arg == NULL) {
4566 tcc_error("too many arguments to function");
4567 } else {
4568 type = arg->type;
4569 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4570 gen_assign_cast(&type);
4574 /* parse an expression and return its type without any side effect. */
4575 static void expr_type(CType *type, void (*expr_fn)(void))
4577 nocode_wanted++;
4578 expr_fn();
4579 *type = vtop->type;
4580 vpop();
4581 nocode_wanted--;
4584 /* parse an expression of the form '(type)' or '(expr)' and return its
4585 type */
4586 static void parse_expr_type(CType *type)
4588 int n;
4589 AttributeDef ad;
4591 skip('(');
4592 if (parse_btype(type, &ad)) {
4593 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4594 } else {
4595 expr_type(type, gexpr);
4597 skip(')');
4600 static void parse_type(CType *type)
4602 AttributeDef ad;
4603 int n;
4605 if (!parse_btype(type, &ad)) {
4606 expect("type");
4608 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4611 static void parse_builtin_params(int nc, const char *args)
4613 char c, sep = '(';
4614 CType t;
4615 if (nc)
4616 nocode_wanted++;
4617 next();
4618 while ((c = *args++)) {
4619 skip(sep);
4620 sep = ',';
4621 switch (c) {
4622 case 'e': expr_eq(); continue;
4623 case 't': parse_type(&t); vpush(&t); continue;
4624 default: tcc_error("internal error"); break;
4627 skip(')');
4628 if (nc)
4629 nocode_wanted--;
4632 ST_FUNC void unary(void)
4634 int n, t, align, size, r, sizeof_caller;
4635 CType type;
4636 Sym *s;
4637 AttributeDef ad;
4639 sizeof_caller = in_sizeof;
4640 in_sizeof = 0;
4641 type.ref = NULL;
4642 /* XXX: GCC 2.95.3 does not generate a table although it should be
4643 better here */
4644 tok_next:
4645 switch(tok) {
4646 case TOK_EXTENSION:
4647 next();
4648 goto tok_next;
4649 case TOK_LCHAR:
4650 #ifdef TCC_TARGET_PE
4651 t = VT_SHORT|VT_UNSIGNED;
4652 goto push_tokc;
4653 #endif
4654 case TOK_CINT:
4655 case TOK_CCHAR:
4656 t = VT_INT;
4657 push_tokc:
4658 type.t = t;
4659 vsetc(&type, VT_CONST, &tokc);
4660 next();
4661 break;
4662 case TOK_CUINT:
4663 t = VT_INT | VT_UNSIGNED;
4664 goto push_tokc;
4665 case TOK_CLLONG:
4666 t = VT_LLONG;
4667 goto push_tokc;
4668 case TOK_CULLONG:
4669 t = VT_LLONG | VT_UNSIGNED;
4670 goto push_tokc;
4671 case TOK_CFLOAT:
4672 t = VT_FLOAT;
4673 goto push_tokc;
4674 case TOK_CDOUBLE:
4675 t = VT_DOUBLE;
4676 goto push_tokc;
4677 case TOK_CLDOUBLE:
4678 t = VT_LDOUBLE;
4679 goto push_tokc;
4680 case TOK_CLONG:
4681 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4682 goto push_tokc;
4683 case TOK_CULONG:
4684 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4685 goto push_tokc;
4686 case TOK___FUNCTION__:
4687 if (!gnu_ext)
4688 goto tok_identifier;
4689 /* fall thru */
4690 case TOK___FUNC__:
4692 void *ptr;
4693 int len;
4694 /* special function name identifier */
4695 len = strlen(funcname) + 1;
4696 /* generate char[len] type */
4697 type.t = VT_BYTE;
4698 mk_pointer(&type);
4699 type.t |= VT_ARRAY;
4700 type.ref->c = len;
4701 vpush_ref(&type, data_section, data_section->data_offset, len);
4702 if (!NODATA_WANTED) {
4703 ptr = section_ptr_add(data_section, len);
4704 memcpy(ptr, funcname, len);
4706 next();
4708 break;
4709 case TOK_LSTR:
4710 #ifdef TCC_TARGET_PE
4711 t = VT_SHORT | VT_UNSIGNED;
4712 #else
4713 t = VT_INT;
4714 #endif
4715 goto str_init;
4716 case TOK_STR:
4717 /* string parsing */
4718 t = VT_BYTE;
4719 if (tcc_state->char_is_unsigned)
4720 t = VT_BYTE | VT_UNSIGNED;
4721 str_init:
4722 if (tcc_state->warn_write_strings)
4723 t |= VT_CONSTANT;
4724 type.t = t;
4725 mk_pointer(&type);
4726 type.t |= VT_ARRAY;
4727 memset(&ad, 0, sizeof(AttributeDef));
4728 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4729 break;
4730 case '(':
4731 next();
4732 /* cast ? */
4733 if (parse_btype(&type, &ad)) {
4734 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4735 skip(')');
4736 /* check ISOC99 compound literal */
4737 if (tok == '{') {
4738 /* data is allocated locally by default */
4739 if (global_expr)
4740 r = VT_CONST;
4741 else
4742 r = VT_LOCAL;
4743 /* all except arrays are lvalues */
4744 if (!(type.t & VT_ARRAY))
4745 r |= lvalue_type(type.t);
4746 memset(&ad, 0, sizeof(AttributeDef));
4747 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4748 } else {
4749 if (sizeof_caller) {
4750 vpush(&type);
4751 return;
4753 unary();
4754 gen_cast(&type);
4756 } else if (tok == '{') {
4757 int saved_nocode_wanted = nocode_wanted;
4758 if (const_wanted)
4759 tcc_error("expected constant");
4760 /* save all registers */
4761 save_regs(0);
4762 /* statement expression : we do not accept break/continue
4763 inside as GCC does. We do retain the nocode_wanted state,
4764 as statement expressions can't ever be entered from the
4765 outside, so any reactivation of code emission (from labels
4766 or loop heads) can be disabled again after the end of it. */
4767 block(NULL, NULL, 1);
4768 nocode_wanted = saved_nocode_wanted;
4769 skip(')');
4770 } else {
4771 gexpr();
4772 skip(')');
4774 break;
4775 case '*':
4776 next();
4777 unary();
4778 indir();
4779 break;
4780 case '&':
4781 next();
4782 unary();
4783 /* functions names must be treated as function pointers,
4784 except for unary '&' and sizeof. Since we consider that
4785 functions are not lvalues, we only have to handle it
4786 there and in function calls. */
4787 /* arrays can also be used although they are not lvalues */
4788 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4789 !(vtop->type.t & VT_ARRAY))
4790 test_lvalue();
4791 mk_pointer(&vtop->type);
4792 gaddrof();
4793 break;
4794 case '!':
4795 next();
4796 unary();
4797 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4798 gen_cast_s(VT_BOOL);
4799 vtop->c.i = !vtop->c.i;
4800 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4801 vtop->c.i ^= 1;
4802 else {
4803 save_regs(1);
4804 vseti(VT_JMP, gvtst(1, 0));
4806 break;
4807 case '~':
4808 next();
4809 unary();
4810 vpushi(-1);
4811 gen_op('^');
4812 break;
4813 case '+':
4814 next();
4815 unary();
4816 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4817 tcc_error("pointer not accepted for unary plus");
4818 /* In order to force cast, we add zero, except for floating point
4819 where we really need an noop (otherwise -0.0 will be transformed
4820 into +0.0). */
4821 if (!is_float(vtop->type.t)) {
4822 vpushi(0);
4823 gen_op('+');
4825 break;
4826 case TOK_SIZEOF:
4827 case TOK_ALIGNOF1:
4828 case TOK_ALIGNOF2:
4829 t = tok;
4830 next();
4831 in_sizeof++;
4832 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4833 s = vtop[1].sym; /* hack: accessing previous vtop */
4834 size = type_size(&type, &align);
4835 if (s && s->a.aligned)
4836 align = 1 << (s->a.aligned - 1);
4837 if (t == TOK_SIZEOF) {
4838 if (!(type.t & VT_VLA)) {
4839 if (size < 0)
4840 tcc_error("sizeof applied to an incomplete type");
4841 vpushs(size);
4842 } else {
4843 vla_runtime_type_size(&type, &align);
4845 } else {
4846 vpushs(align);
4848 vtop->type.t |= VT_UNSIGNED;
4849 break;
4851 case TOK_builtin_expect:
4852 /* __builtin_expect is a no-op for now */
4853 parse_builtin_params(0, "ee");
4854 vpop();
4855 break;
4856 case TOK_builtin_types_compatible_p:
4857 parse_builtin_params(0, "tt");
4858 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4859 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4860 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4861 vtop -= 2;
4862 vpushi(n);
4863 break;
4864 case TOK_builtin_choose_expr:
4866 int64_t c;
4867 next();
4868 skip('(');
4869 c = expr_const64();
4870 skip(',');
4871 if (!c) {
4872 nocode_wanted++;
4874 expr_eq();
4875 if (!c) {
4876 vpop();
4877 nocode_wanted--;
4879 skip(',');
4880 if (c) {
4881 nocode_wanted++;
4883 expr_eq();
4884 if (c) {
4885 vpop();
4886 nocode_wanted--;
4888 skip(')');
4890 break;
4891 case TOK_builtin_constant_p:
4892 parse_builtin_params(1, "e");
4893 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4894 vtop--;
4895 vpushi(n);
4896 break;
4897 case TOK_builtin_frame_address:
4898 case TOK_builtin_return_address:
4900 int tok1 = tok;
4901 int level;
4902 next();
4903 skip('(');
4904 if (tok != TOK_CINT) {
4905 tcc_error("%s only takes positive integers",
4906 tok1 == TOK_builtin_return_address ?
4907 "__builtin_return_address" :
4908 "__builtin_frame_address");
4910 level = (uint32_t)tokc.i;
4911 next();
4912 skip(')');
4913 type.t = VT_VOID;
4914 mk_pointer(&type);
4915 vset(&type, VT_LOCAL, 0); /* local frame */
4916 while (level--) {
4917 mk_pointer(&vtop->type);
4918 indir(); /* -> parent frame */
4920 if (tok1 == TOK_builtin_return_address) {
4921 // assume return address is just above frame pointer on stack
4922 vpushi(PTR_SIZE);
4923 gen_op('+');
4924 mk_pointer(&vtop->type);
4925 indir();
4928 break;
4929 #ifdef TCC_TARGET_X86_64
4930 #ifdef TCC_TARGET_PE
4931 case TOK_builtin_va_start:
4932 parse_builtin_params(0, "ee");
4933 r = vtop->r & VT_VALMASK;
4934 if (r == VT_LLOCAL)
4935 r = VT_LOCAL;
4936 if (r != VT_LOCAL)
4937 tcc_error("__builtin_va_start expects a local variable");
4938 vtop->r = r;
4939 vtop->type = char_pointer_type;
4940 vtop->c.i += 8;
4941 vstore();
4942 break;
4943 #else
4944 case TOK_builtin_va_arg_types:
4945 parse_builtin_params(0, "t");
4946 vpushi(classify_x86_64_va_arg(&vtop->type));
4947 vswap();
4948 vpop();
4949 break;
4950 #endif
4951 #endif
4953 #ifdef TCC_TARGET_ARM64
4954 case TOK___va_start: {
4955 parse_builtin_params(0, "ee");
4956 //xx check types
4957 gen_va_start();
4958 vpushi(0);
4959 vtop->type.t = VT_VOID;
4960 break;
4962 case TOK___va_arg: {
4963 parse_builtin_params(0, "et");
4964 type = vtop->type;
4965 vpop();
4966 //xx check types
4967 gen_va_arg(&type);
4968 vtop->type = type;
4969 break;
4971 case TOK___arm64_clear_cache: {
4972 parse_builtin_params(0, "ee");
4973 gen_clear_cache();
4974 vpushi(0);
4975 vtop->type.t = VT_VOID;
4976 break;
4978 #endif
4979 /* pre operations */
4980 case TOK_INC:
4981 case TOK_DEC:
4982 t = tok;
4983 next();
4984 unary();
4985 inc(0, t);
4986 break;
4987 case '-':
4988 next();
4989 unary();
4990 t = vtop->type.t & VT_BTYPE;
4991 if (is_float(t)) {
4992 /* In IEEE negate(x) isn't subtract(0,x), but rather
4993 subtract(-0, x). */
4994 vpush(&vtop->type);
4995 if (t == VT_FLOAT)
4996 vtop->c.f = -1.0 * 0.0;
4997 else if (t == VT_DOUBLE)
4998 vtop->c.d = -1.0 * 0.0;
4999 else
5000 vtop->c.ld = -1.0 * 0.0;
5001 } else
5002 vpushi(0);
5003 vswap();
5004 gen_op('-');
5005 break;
5006 case TOK_LAND:
5007 if (!gnu_ext)
5008 goto tok_identifier;
5009 next();
5010 /* allow to take the address of a label */
5011 if (tok < TOK_UIDENT)
5012 expect("label identifier");
5013 s = label_find(tok);
5014 if (!s) {
5015 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5016 } else {
5017 if (s->r == LABEL_DECLARED)
5018 s->r = LABEL_FORWARD;
5020 if (!s->type.t) {
5021 s->type.t = VT_VOID;
5022 mk_pointer(&s->type);
5023 s->type.t |= VT_STATIC;
5025 vpushsym(&s->type, s);
5026 next();
5027 break;
5029 case TOK_GENERIC:
5031 CType controlling_type;
5032 int has_default = 0;
5033 int has_match = 0;
5034 int learn = 0;
5035 TokenString *str = NULL;
5036 int saved_const_wanted = const_wanted;
5038 next();
5039 skip('(');
5040 const_wanted = 0;
5041 expr_type(&controlling_type, expr_eq);
5042 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5043 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5044 mk_pointer(&controlling_type);
5045 const_wanted = saved_const_wanted;
5046 for (;;) {
5047 learn = 0;
5048 skip(',');
5049 if (tok == TOK_DEFAULT) {
5050 if (has_default)
5051 tcc_error("too many 'default'");
5052 has_default = 1;
5053 if (!has_match)
5054 learn = 1;
5055 next();
5056 } else {
5057 AttributeDef ad_tmp;
5058 int itmp;
5059 CType cur_type;
5060 parse_btype(&cur_type, &ad_tmp);
5061 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5062 if (compare_types(&controlling_type, &cur_type, 0)) {
5063 if (has_match) {
5064 tcc_error("type match twice");
5066 has_match = 1;
5067 learn = 1;
5070 skip(':');
5071 if (learn) {
5072 if (str)
5073 tok_str_free(str);
5074 skip_or_save_block(&str);
5075 } else {
5076 skip_or_save_block(NULL);
5078 if (tok == ')')
5079 break;
5081 if (!str) {
5082 char buf[60];
5083 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5084 tcc_error("type '%s' does not match any association", buf);
5086 begin_macro(str, 1);
5087 next();
5088 expr_eq();
5089 if (tok != TOK_EOF)
5090 expect(",");
5091 end_macro();
5092 next();
5093 break;
5095 // special qnan , snan and infinity values
5096 case TOK___NAN__:
5097 n = 0x7fc00000;
5098 special_math_val:
5099 vpushi(n);
5100 vtop->type.t = VT_FLOAT;
5101 next();
5102 break;
5103 case TOK___SNAN__:
5104 n = 0x7f800001;
5105 goto special_math_val;
5106 case TOK___INF__:
5107 n = 0x7f800000;
5108 goto special_math_val;
5110 default:
5111 tok_identifier:
5112 t = tok;
5113 next();
5114 if (t < TOK_UIDENT)
5115 expect("identifier");
5116 s = sym_find(t);
5117 if (!s || IS_ASM_SYM(s)) {
5118 const char *name = get_tok_str(t, NULL);
5119 if (tok != '(')
5120 tcc_error("'%s' undeclared", name);
5121 /* for simple function calls, we tolerate undeclared
5122 external reference to int() function */
5123 if (tcc_state->warn_implicit_function_declaration
5124 #ifdef TCC_TARGET_PE
5125 /* people must be warned about using undeclared WINAPI functions
5126 (which usually start with uppercase letter) */
5127 || (name[0] >= 'A' && name[0] <= 'Z')
5128 #endif
5130 tcc_warning("implicit declaration of function '%s'", name);
5131 s = external_global_sym(t, &func_old_type, 0);
5134 r = s->r;
5135 /* A symbol that has a register is a local register variable,
5136 which starts out as VT_LOCAL value. */
5137 if ((r & VT_VALMASK) < VT_CONST)
5138 r = (r & ~VT_VALMASK) | VT_LOCAL;
5140 vset(&s->type, r, s->c);
5141 /* Point to s as backpointer (even without r&VT_SYM).
5142 Will be used by at least the x86 inline asm parser for
5143 regvars. */
5144 vtop->sym = s;
5146 if (r & VT_SYM) {
5147 vtop->c.i = 0;
5148 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5149 vtop->c.i = s->enum_val;
5151 break;
5154 /* post operations */
5155 while (1) {
5156 if (tok == TOK_INC || tok == TOK_DEC) {
5157 inc(1, tok);
5158 next();
5159 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5160 int qualifiers;
5161 /* field */
5162 if (tok == TOK_ARROW)
5163 indir();
5164 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5165 test_lvalue();
5166 gaddrof();
5167 /* expect pointer on structure */
5168 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5169 expect("struct or union");
5170 if (tok == TOK_CDOUBLE)
5171 expect("field name");
5172 next();
5173 if (tok == TOK_CINT || tok == TOK_CUINT)
5174 expect("field name");
5175 s = find_field(&vtop->type, tok);
5176 if (!s)
5177 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5178 /* add field offset to pointer */
5179 vtop->type = char_pointer_type; /* change type to 'char *' */
5180 vpushi(s->c);
5181 gen_op('+');
5182 /* change type to field type, and set to lvalue */
5183 vtop->type = s->type;
5184 vtop->type.t |= qualifiers;
5185 /* an array is never an lvalue */
5186 if (!(vtop->type.t & VT_ARRAY)) {
5187 vtop->r |= lvalue_type(vtop->type.t);
5188 #ifdef CONFIG_TCC_BCHECK
5189 /* if bound checking, the referenced pointer must be checked */
5190 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5191 vtop->r |= VT_MUSTBOUND;
5192 #endif
5194 next();
5195 } else if (tok == '[') {
5196 next();
5197 gexpr();
5198 gen_op('+');
5199 indir();
5200 skip(']');
5201 } else if (tok == '(') {
5202 SValue ret;
5203 Sym *sa;
5204 int nb_args, ret_nregs, ret_align, regsize, variadic;
5206 /* function call */
5207 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5208 /* pointer test (no array accepted) */
5209 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5210 vtop->type = *pointed_type(&vtop->type);
5211 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5212 goto error_func;
5213 } else {
5214 error_func:
5215 expect("function pointer");
5217 } else {
5218 vtop->r &= ~VT_LVAL; /* no lvalue */
5220 /* get return type */
5221 s = vtop->type.ref;
5222 next();
5223 sa = s->next; /* first parameter */
5224 nb_args = regsize = 0;
5225 ret.r2 = VT_CONST;
5226 /* compute first implicit argument if a structure is returned */
5227 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5228 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5229 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5230 &ret_align, &regsize);
5231 if (!ret_nregs) {
5232 /* get some space for the returned structure */
5233 size = type_size(&s->type, &align);
5234 #ifdef TCC_TARGET_ARM64
5235 /* On arm64, a small struct is return in registers.
5236 It is much easier to write it to memory if we know
5237 that we are allowed to write some extra bytes, so
5238 round the allocated space up to a power of 2: */
5239 if (size < 16)
5240 while (size & (size - 1))
5241 size = (size | (size - 1)) + 1;
5242 #endif
5243 loc = (loc - size) & -align;
5244 ret.type = s->type;
5245 ret.r = VT_LOCAL | VT_LVAL;
5246 /* pass it as 'int' to avoid structure arg passing
5247 problems */
5248 vseti(VT_LOCAL, loc);
5249 ret.c = vtop->c;
5250 nb_args++;
5252 } else {
5253 ret_nregs = 1;
5254 ret.type = s->type;
5257 if (ret_nregs) {
5258 /* return in register */
5259 if (is_float(ret.type.t)) {
5260 ret.r = reg_fret(ret.type.t);
5261 #ifdef TCC_TARGET_X86_64
5262 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5263 ret.r2 = REG_QRET;
5264 #endif
5265 } else {
5266 #ifndef TCC_TARGET_ARM64
5267 #ifdef TCC_TARGET_X86_64
5268 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5269 #else
5270 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5271 #endif
5272 ret.r2 = REG_LRET;
5273 #endif
5274 ret.r = REG_IRET;
5276 ret.c.i = 0;
5278 if (tok != ')') {
5279 for(;;) {
5280 expr_eq();
5281 gfunc_param_typed(s, sa);
5282 nb_args++;
5283 if (sa)
5284 sa = sa->next;
5285 if (tok == ')')
5286 break;
5287 skip(',');
5290 if (sa)
5291 tcc_error("too few arguments to function");
5292 skip(')');
5293 gfunc_call(nb_args);
5295 /* return value */
5296 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5297 vsetc(&ret.type, r, &ret.c);
5298 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5301 /* handle packed struct return */
5302 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5303 int addr, offset;
5305 size = type_size(&s->type, &align);
5306 /* We're writing whole regs often, make sure there's enough
5307 space. Assume register size is power of 2. */
5308 if (regsize > align)
5309 align = regsize;
5310 loc = (loc - size) & -align;
5311 addr = loc;
5312 offset = 0;
5313 for (;;) {
5314 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5315 vswap();
5316 vstore();
5317 vtop--;
5318 if (--ret_nregs == 0)
5319 break;
5320 offset += regsize;
5322 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5324 } else {
5325 break;
5330 ST_FUNC void expr_prod(void)
5332 int t;
5334 unary();
5335 while (tok == '*' || tok == '/' || tok == '%') {
5336 t = tok;
5337 next();
5338 unary();
5339 gen_op(t);
5343 ST_FUNC void expr_sum(void)
5345 int t;
5347 expr_prod();
5348 while (tok == '+' || tok == '-') {
5349 t = tok;
5350 next();
5351 expr_prod();
5352 gen_op(t);
5356 static void expr_shift(void)
5358 int t;
5360 expr_sum();
5361 while (tok == TOK_SHL || tok == TOK_SAR) {
5362 t = tok;
5363 next();
5364 expr_sum();
5365 gen_op(t);
5369 static void expr_cmp(void)
5371 int t;
5373 expr_shift();
5374 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5375 tok == TOK_ULT || tok == TOK_UGE) {
5376 t = tok;
5377 next();
5378 expr_shift();
5379 gen_op(t);
5383 static void expr_cmpeq(void)
5385 int t;
5387 expr_cmp();
5388 while (tok == TOK_EQ || tok == TOK_NE) {
5389 t = tok;
5390 next();
5391 expr_cmp();
5392 gen_op(t);
5396 static void expr_and(void)
5398 expr_cmpeq();
5399 while (tok == '&') {
5400 next();
5401 expr_cmpeq();
5402 gen_op('&');
5406 static void expr_xor(void)
5408 expr_and();
5409 while (tok == '^') {
5410 next();
5411 expr_and();
5412 gen_op('^');
5416 static void expr_or(void)
5418 expr_xor();
5419 while (tok == '|') {
5420 next();
5421 expr_xor();
5422 gen_op('|');
5426 static void expr_land(void)
5428 expr_or();
5429 if (tok == TOK_LAND) {
5430 int t = 0;
5431 for(;;) {
5432 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5433 gen_cast_s(VT_BOOL);
5434 if (vtop->c.i) {
5435 vpop();
5436 } else {
5437 nocode_wanted++;
5438 while (tok == TOK_LAND) {
5439 next();
5440 expr_or();
5441 vpop();
5443 nocode_wanted--;
5444 if (t)
5445 gsym(t);
5446 gen_cast_s(VT_INT);
5447 break;
5449 } else {
5450 if (!t)
5451 save_regs(1);
5452 t = gvtst(1, t);
5454 if (tok != TOK_LAND) {
5455 if (t)
5456 vseti(VT_JMPI, t);
5457 else
5458 vpushi(1);
5459 break;
5461 next();
5462 expr_or();
5467 static void expr_lor(void)
5469 expr_land();
5470 if (tok == TOK_LOR) {
5471 int t = 0;
5472 for(;;) {
5473 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5474 gen_cast_s(VT_BOOL);
5475 if (!vtop->c.i) {
5476 vpop();
5477 } else {
5478 nocode_wanted++;
5479 while (tok == TOK_LOR) {
5480 next();
5481 expr_land();
5482 vpop();
5484 nocode_wanted--;
5485 if (t)
5486 gsym(t);
5487 gen_cast_s(VT_INT);
5488 break;
5490 } else {
5491 if (!t)
5492 save_regs(1);
5493 t = gvtst(0, t);
5495 if (tok != TOK_LOR) {
5496 if (t)
5497 vseti(VT_JMP, t);
5498 else
5499 vpushi(0);
5500 break;
5502 next();
5503 expr_land();
5508 /* Assuming vtop is a value used in a conditional context
5509 (i.e. compared with zero) return 0 if it's false, 1 if
5510 true and -1 if it can't be statically determined. */
5511 static int condition_3way(void)
5513 int c = -1;
5514 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5515 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5516 vdup();
5517 gen_cast_s(VT_BOOL);
5518 c = vtop->c.i;
5519 vpop();
5521 return c;
5524 static void expr_cond(void)
5526 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5527 SValue sv;
5528 CType type, type1, type2;
5530 expr_lor();
5531 if (tok == '?') {
5532 next();
5533 c = condition_3way();
5534 g = (tok == ':' && gnu_ext);
5535 if (c < 0) {
5536 /* needed to avoid having different registers saved in
5537 each branch */
5538 if (is_float(vtop->type.t)) {
5539 rc = RC_FLOAT;
5540 #ifdef TCC_TARGET_X86_64
5541 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5542 rc = RC_ST0;
5544 #endif
5545 } else
5546 rc = RC_INT;
5547 gv(rc);
5548 save_regs(1);
5549 if (g)
5550 gv_dup();
5551 tt = gvtst(1, 0);
5553 } else {
5554 if (!g)
5555 vpop();
5556 tt = 0;
5559 if (1) {
5560 if (c == 0)
5561 nocode_wanted++;
5562 if (!g)
5563 gexpr();
5565 type1 = vtop->type;
5566 sv = *vtop; /* save value to handle it later */
5567 vtop--; /* no vpop so that FP stack is not flushed */
5568 skip(':');
5570 u = 0;
5571 if (c < 0)
5572 u = gjmp(0);
5573 gsym(tt);
5575 if (c == 0)
5576 nocode_wanted--;
5577 if (c == 1)
5578 nocode_wanted++;
5579 expr_cond();
5580 if (c == 1)
5581 nocode_wanted--;
5583 type2 = vtop->type;
5584 t1 = type1.t;
5585 bt1 = t1 & VT_BTYPE;
5586 t2 = type2.t;
5587 bt2 = t2 & VT_BTYPE;
5588 type.ref = NULL;
5590 /* cast operands to correct type according to ISOC rules */
5591 if (is_float(bt1) || is_float(bt2)) {
5592 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5593 type.t = VT_LDOUBLE;
5595 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5596 type.t = VT_DOUBLE;
5597 } else {
5598 type.t = VT_FLOAT;
5600 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5601 /* cast to biggest op */
5602 type.t = VT_LLONG | VT_LONG;
5603 if (bt1 == VT_LLONG)
5604 type.t &= t1;
5605 if (bt2 == VT_LLONG)
5606 type.t &= t2;
5607 /* convert to unsigned if it does not fit in a long long */
5608 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5609 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5610 type.t |= VT_UNSIGNED;
5611 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5612 /* If one is a null ptr constant the result type
5613 is the other. */
5614 if (is_null_pointer (vtop))
5615 type = type1;
5616 else if (is_null_pointer (&sv))
5617 type = type2;
5618 /* XXX: test pointer compatibility, C99 has more elaborate
5619 rules here. */
5620 else
5621 type = type1;
5622 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5623 /* XXX: test function pointer compatibility */
5624 type = bt1 == VT_FUNC ? type1 : type2;
5625 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5626 /* XXX: test structure compatibility */
5627 type = bt1 == VT_STRUCT ? type1 : type2;
5628 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5629 /* NOTE: as an extension, we accept void on only one side */
5630 type.t = VT_VOID;
5631 } else {
5632 /* integer operations */
5633 type.t = VT_INT | (VT_LONG & (t1 | t2));
5634 /* convert to unsigned if it does not fit in an integer */
5635 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5636 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5637 type.t |= VT_UNSIGNED;
5639 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5640 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5641 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5642 islv &= c < 0;
5644 /* now we convert second operand */
5645 if (c != 1) {
5646 gen_cast(&type);
5647 if (islv) {
5648 mk_pointer(&vtop->type);
5649 gaddrof();
5650 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5651 gaddrof();
5654 rc = RC_INT;
5655 if (is_float(type.t)) {
5656 rc = RC_FLOAT;
5657 #ifdef TCC_TARGET_X86_64
5658 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5659 rc = RC_ST0;
5661 #endif
5662 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5663 /* for long longs, we use fixed registers to avoid having
5664 to handle a complicated move */
5665 rc = RC_IRET;
5668 tt = r2 = 0;
5669 if (c < 0) {
5670 r2 = gv(rc);
5671 tt = gjmp(0);
5673 gsym(u);
5675 /* this is horrible, but we must also convert first
5676 operand */
5677 if (c != 0) {
5678 *vtop = sv;
5679 gen_cast(&type);
5680 if (islv) {
5681 mk_pointer(&vtop->type);
5682 gaddrof();
5683 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5684 gaddrof();
5687 if (c < 0) {
5688 r1 = gv(rc);
5689 move_reg(r2, r1, type.t);
5690 vtop->r = r2;
5691 gsym(tt);
5692 if (islv)
5693 indir();
5699 static void expr_eq(void)
5701 int t;
5703 expr_cond();
5704 if (tok == '=' ||
5705 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5706 tok == TOK_A_XOR || tok == TOK_A_OR ||
5707 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5708 test_lvalue();
5709 t = tok;
5710 next();
5711 if (t == '=') {
5712 expr_eq();
5713 } else {
5714 vdup();
5715 expr_eq();
5716 gen_op(t & 0x7f);
5718 vstore();
5722 ST_FUNC void gexpr(void)
5724 while (1) {
5725 expr_eq();
5726 if (tok != ',')
5727 break;
5728 vpop();
5729 next();
5733 /* parse a constant expression and return value in vtop. */
5734 static void expr_const1(void)
5736 const_wanted++;
5737 nocode_wanted++;
5738 expr_cond();
5739 nocode_wanted--;
5740 const_wanted--;
5743 /* parse an integer constant and return its value. */
5744 static inline int64_t expr_const64(void)
5746 int64_t c;
5747 expr_const1();
5748 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5749 expect("constant expression");
5750 c = vtop->c.i;
5751 vpop();
5752 return c;
5755 /* parse an integer constant and return its value.
5756 Complain if it doesn't fit 32bit (signed or unsigned). */
5757 ST_FUNC int expr_const(void)
5759 int c;
5760 int64_t wc = expr_const64();
5761 c = wc;
5762 if (c != wc && (unsigned)c != wc)
5763 tcc_error("constant exceeds 32 bit");
5764 return c;
5767 /* return the label token if current token is a label, otherwise
5768 return zero */
5769 static int is_label(void)
5771 int last_tok;
5773 /* fast test first */
5774 if (tok < TOK_UIDENT)
5775 return 0;
5776 /* no need to save tokc because tok is an identifier */
5777 last_tok = tok;
5778 next();
5779 if (tok == ':') {
5780 return last_tok;
5781 } else {
5782 unget_tok(last_tok);
5783 return 0;
5787 #ifndef TCC_TARGET_ARM64
5788 static void gfunc_return(CType *func_type)
5790 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5791 CType type, ret_type;
5792 int ret_align, ret_nregs, regsize;
5793 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5794 &ret_align, &regsize);
5795 if (0 == ret_nregs) {
5796 /* if returning structure, must copy it to implicit
5797 first pointer arg location */
5798 type = *func_type;
5799 mk_pointer(&type);
5800 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5801 indir();
5802 vswap();
5803 /* copy structure value to pointer */
5804 vstore();
5805 } else {
5806 /* returning structure packed into registers */
5807 int r, size, addr, align;
5808 size = type_size(func_type,&align);
5809 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5810 (vtop->c.i & (ret_align-1)))
5811 && (align & (ret_align-1))) {
5812 loc = (loc - size) & -ret_align;
5813 addr = loc;
5814 type = *func_type;
5815 vset(&type, VT_LOCAL | VT_LVAL, addr);
5816 vswap();
5817 vstore();
5818 vpop();
5819 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5821 vtop->type = ret_type;
5822 if (is_float(ret_type.t))
5823 r = rc_fret(ret_type.t);
5824 else
5825 r = RC_IRET;
5827 if (ret_nregs == 1)
5828 gv(r);
5829 else {
5830 for (;;) {
5831 vdup();
5832 gv(r);
5833 vpop();
5834 if (--ret_nregs == 0)
5835 break;
5836 /* We assume that when a structure is returned in multiple
5837 registers, their classes are consecutive values of the
5838 suite s(n) = 2^n */
5839 r <<= 1;
5840 vtop->c.i += regsize;
5844 } else if (is_float(func_type->t)) {
5845 gv(rc_fret(func_type->t));
5846 } else {
5847 gv(RC_IRET);
5849 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5851 #endif
5853 static int case_cmp(const void *pa, const void *pb)
5855 int64_t a = (*(struct case_t**) pa)->v1;
5856 int64_t b = (*(struct case_t**) pb)->v1;
5857 return a < b ? -1 : a > b;
5860 static void gcase(struct case_t **base, int len, int *bsym)
5862 struct case_t *p;
5863 int e;
5864 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5865 gv(RC_INT);
5866 while (len > 4) {
5867 /* binary search */
5868 p = base[len/2];
5869 vdup();
5870 if (ll)
5871 vpushll(p->v2);
5872 else
5873 vpushi(p->v2);
5874 gen_op(TOK_LE);
5875 e = gtst(1, 0);
5876 vdup();
5877 if (ll)
5878 vpushll(p->v1);
5879 else
5880 vpushi(p->v1);
5881 gen_op(TOK_GE);
5882 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5883 /* x < v1 */
5884 gcase(base, len/2, bsym);
5885 if (cur_switch->def_sym)
5886 gjmp_addr(cur_switch->def_sym);
5887 else
5888 *bsym = gjmp(*bsym);
5889 /* x > v2 */
5890 gsym(e);
5891 e = len/2 + 1;
5892 base += e; len -= e;
5894 /* linear scan */
5895 while (len--) {
5896 p = *base++;
5897 vdup();
5898 if (ll)
5899 vpushll(p->v2);
5900 else
5901 vpushi(p->v2);
5902 if (p->v1 == p->v2) {
5903 gen_op(TOK_EQ);
5904 gtst_addr(0, p->sym);
5905 } else {
5906 gen_op(TOK_LE);
5907 e = gtst(1, 0);
5908 vdup();
5909 if (ll)
5910 vpushll(p->v1);
5911 else
5912 vpushi(p->v1);
5913 gen_op(TOK_GE);
5914 gtst_addr(0, p->sym);
5915 gsym(e);
5920 static void block(int *bsym, int *csym, int is_expr)
5922 int a, b, c, d, cond;
5923 Sym *s;
5925 /* generate line number info */
5926 if (tcc_state->do_debug)
5927 tcc_debug_line(tcc_state);
5929 if (is_expr) {
5930 /* default return value is (void) */
5931 vpushi(0);
5932 vtop->type.t = VT_VOID;
5935 if (tok == TOK_IF) {
5936 /* if test */
5937 int saved_nocode_wanted = nocode_wanted;
5938 next();
5939 skip('(');
5940 gexpr();
5941 skip(')');
5942 cond = condition_3way();
5943 if (cond == 1)
5944 a = 0, vpop();
5945 else
5946 a = gvtst(1, 0);
5947 if (cond == 0)
5948 nocode_wanted |= 0x20000000;
5949 block(bsym, csym, 0);
5950 if (cond != 1)
5951 nocode_wanted = saved_nocode_wanted;
5952 c = tok;
5953 if (c == TOK_ELSE) {
5954 next();
5955 d = gjmp(0);
5956 gsym(a);
5957 if (cond == 1)
5958 nocode_wanted |= 0x20000000;
5959 block(bsym, csym, 0);
5960 gsym(d); /* patch else jmp */
5961 if (cond != 0)
5962 nocode_wanted = saved_nocode_wanted;
5963 } else
5964 gsym(a);
5965 } else if (tok == TOK_WHILE) {
5966 int saved_nocode_wanted;
5967 nocode_wanted &= ~0x20000000;
5968 next();
5969 d = ind;
5970 vla_sp_restore();
5971 skip('(');
5972 gexpr();
5973 skip(')');
5974 a = gvtst(1, 0);
5975 b = 0;
5976 ++local_scope;
5977 saved_nocode_wanted = nocode_wanted;
5978 block(&a, &b, 0);
5979 nocode_wanted = saved_nocode_wanted;
5980 --local_scope;
5981 gjmp_addr(d);
5982 gsym(a);
5983 gsym_addr(b, d);
5984 } else if (tok == '{') {
5985 Sym *llabel;
5986 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5988 next();
5989 /* record local declaration stack position */
5990 s = local_stack;
5991 llabel = local_label_stack;
5992 ++local_scope;
5994 /* handle local labels declarations */
5995 if (tok == TOK_LABEL) {
5996 next();
5997 for(;;) {
5998 if (tok < TOK_UIDENT)
5999 expect("label identifier");
6000 label_push(&local_label_stack, tok, LABEL_DECLARED);
6001 next();
6002 if (tok == ',') {
6003 next();
6004 } else {
6005 skip(';');
6006 break;
6010 while (tok != '}') {
6011 if ((a = is_label()))
6012 unget_tok(a);
6013 else
6014 decl(VT_LOCAL);
6015 if (tok != '}') {
6016 if (is_expr)
6017 vpop();
6018 block(bsym, csym, is_expr);
6021 /* pop locally defined labels */
6022 label_pop(&local_label_stack, llabel, is_expr);
6023 /* pop locally defined symbols */
6024 --local_scope;
6025 /* In the is_expr case (a statement expression is finished here),
6026 vtop might refer to symbols on the local_stack. Either via the
6027 type or via vtop->sym. We can't pop those nor any that in turn
6028 might be referred to. To make it easier we don't roll back
6029 any symbols in that case; some upper level call to block() will
6030 do that. We do have to remove such symbols from the lookup
6031 tables, though. sym_pop will do that. */
6032 sym_pop(&local_stack, s, is_expr);
6034 /* Pop VLA frames and restore stack pointer if required */
6035 if (vlas_in_scope > saved_vlas_in_scope) {
6036 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6037 vla_sp_restore();
6039 vlas_in_scope = saved_vlas_in_scope;
6041 next();
6042 } else if (tok == TOK_RETURN) {
6043 next();
6044 if (tok != ';') {
6045 gexpr();
6046 gen_assign_cast(&func_vt);
6047 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6048 vtop--;
6049 else
6050 gfunc_return(&func_vt);
6052 skip(';');
6053 /* jump unless last stmt in top-level block */
6054 if (tok != '}' || local_scope != 1)
6055 rsym = gjmp(rsym);
6056 nocode_wanted |= 0x20000000;
6057 } else if (tok == TOK_BREAK) {
6058 /* compute jump */
6059 if (!bsym)
6060 tcc_error("cannot break");
6061 *bsym = gjmp(*bsym);
6062 next();
6063 skip(';');
6064 nocode_wanted |= 0x20000000;
6065 } else if (tok == TOK_CONTINUE) {
6066 /* compute jump */
6067 if (!csym)
6068 tcc_error("cannot continue");
6069 vla_sp_restore_root();
6070 *csym = gjmp(*csym);
6071 next();
6072 skip(';');
6073 } else if (tok == TOK_FOR) {
6074 int e;
6075 int saved_nocode_wanted;
6076 nocode_wanted &= ~0x20000000;
6077 next();
6078 skip('(');
6079 s = local_stack;
6080 ++local_scope;
6081 if (tok != ';') {
6082 /* c99 for-loop init decl? */
6083 if (!decl0(VT_LOCAL, 1, NULL)) {
6084 /* no, regular for-loop init expr */
6085 gexpr();
6086 vpop();
6089 skip(';');
6090 d = ind;
6091 c = ind;
6092 vla_sp_restore();
6093 a = 0;
6094 b = 0;
6095 if (tok != ';') {
6096 gexpr();
6097 a = gvtst(1, 0);
6099 skip(';');
6100 if (tok != ')') {
6101 e = gjmp(0);
6102 c = ind;
6103 vla_sp_restore();
6104 gexpr();
6105 vpop();
6106 gjmp_addr(d);
6107 gsym(e);
6109 skip(')');
6110 saved_nocode_wanted = nocode_wanted;
6111 block(&a, &b, 0);
6112 nocode_wanted = saved_nocode_wanted;
6113 gjmp_addr(c);
6114 gsym(a);
6115 gsym_addr(b, c);
6116 --local_scope;
6117 sym_pop(&local_stack, s, 0);
6119 } else
6120 if (tok == TOK_DO) {
6121 int saved_nocode_wanted;
6122 nocode_wanted &= ~0x20000000;
6123 next();
6124 a = 0;
6125 b = 0;
6126 d = ind;
6127 vla_sp_restore();
6128 saved_nocode_wanted = nocode_wanted;
6129 block(&a, &b, 0);
6130 skip(TOK_WHILE);
6131 skip('(');
6132 gsym(b);
6133 if (b)
6134 nocode_wanted = saved_nocode_wanted;
6135 gexpr();
6136 c = gvtst(0, 0);
6137 gsym_addr(c, d);
6138 nocode_wanted = saved_nocode_wanted;
6139 skip(')');
6140 gsym(a);
6141 skip(';');
6142 } else
6143 if (tok == TOK_SWITCH) {
6144 struct switch_t *saved, sw;
6145 int saved_nocode_wanted = nocode_wanted;
6146 SValue switchval;
6147 next();
6148 skip('(');
6149 gexpr();
6150 skip(')');
6151 switchval = *vtop--;
6152 a = 0;
6153 b = gjmp(0); /* jump to first case */
6154 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6155 saved = cur_switch;
6156 cur_switch = &sw;
6157 block(&a, csym, 0);
6158 nocode_wanted = saved_nocode_wanted;
6159 a = gjmp(a); /* add implicit break */
6160 /* case lookup */
6161 gsym(b);
6162 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6163 for (b = 1; b < sw.n; b++)
6164 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6165 tcc_error("duplicate case value");
6166 /* Our switch table sorting is signed, so the compared
6167 value needs to be as well when it's 64bit. */
6168 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6169 switchval.type.t &= ~VT_UNSIGNED;
6170 vpushv(&switchval);
6171 gcase(sw.p, sw.n, &a);
6172 vpop();
6173 if (sw.def_sym)
6174 gjmp_addr(sw.def_sym);
6175 dynarray_reset(&sw.p, &sw.n);
6176 cur_switch = saved;
6177 /* break label */
6178 gsym(a);
6179 } else
6180 if (tok == TOK_CASE) {
6181 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6182 if (!cur_switch)
6183 expect("switch");
6184 nocode_wanted &= ~0x20000000;
6185 next();
6186 cr->v1 = cr->v2 = expr_const64();
6187 if (gnu_ext && tok == TOK_DOTS) {
6188 next();
6189 cr->v2 = expr_const64();
6190 if (cr->v2 < cr->v1)
6191 tcc_warning("empty case range");
6193 cr->sym = ind;
6194 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6195 skip(':');
6196 is_expr = 0;
6197 goto block_after_label;
6198 } else
6199 if (tok == TOK_DEFAULT) {
6200 next();
6201 skip(':');
6202 if (!cur_switch)
6203 expect("switch");
6204 if (cur_switch->def_sym)
6205 tcc_error("too many 'default'");
6206 cur_switch->def_sym = ind;
6207 is_expr = 0;
6208 goto block_after_label;
6209 } else
6210 if (tok == TOK_GOTO) {
6211 next();
6212 if (tok == '*' && gnu_ext) {
6213 /* computed goto */
6214 next();
6215 gexpr();
6216 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6217 expect("pointer");
6218 ggoto();
6219 } else if (tok >= TOK_UIDENT) {
6220 s = label_find(tok);
6221 /* put forward definition if needed */
6222 if (!s) {
6223 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6224 } else {
6225 if (s->r == LABEL_DECLARED)
6226 s->r = LABEL_FORWARD;
6228 vla_sp_restore_root();
6229 if (s->r & LABEL_FORWARD)
6230 s->jnext = gjmp(s->jnext);
6231 else
6232 gjmp_addr(s->jnext);
6233 next();
6234 } else {
6235 expect("label identifier");
6237 skip(';');
6238 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6239 asm_instr();
6240 } else {
6241 b = is_label();
6242 if (b) {
6243 /* label case */
6244 next();
6245 s = label_find(b);
6246 if (s) {
6247 if (s->r == LABEL_DEFINED)
6248 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6249 gsym(s->jnext);
6250 s->r = LABEL_DEFINED;
6251 } else {
6252 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6254 s->jnext = ind;
6255 vla_sp_restore();
6256 /* we accept this, but it is a mistake */
6257 block_after_label:
6258 nocode_wanted &= ~0x20000000;
6259 if (tok == '}') {
6260 tcc_warning("deprecated use of label at end of compound statement");
6261 } else {
6262 if (is_expr)
6263 vpop();
6264 block(bsym, csym, is_expr);
6266 } else {
6267 /* expression case */
6268 if (tok != ';') {
6269 if (is_expr) {
6270 vpop();
6271 gexpr();
6272 } else {
6273 gexpr();
6274 vpop();
6277 skip(';');
6282 /* This skips over a stream of tokens containing balanced {} and ()
6283 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6284 with a '{'). If STR then allocates and stores the skipped tokens
6285 in *STR. This doesn't check if () and {} are nested correctly,
6286 i.e. "({)}" is accepted. */
6287 static void skip_or_save_block(TokenString **str)
6289 int braces = tok == '{';
6290 int level = 0;
6291 if (str)
6292 *str = tok_str_alloc();
6294 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6295 int t;
6296 if (tok == TOK_EOF) {
6297 if (str || level > 0)
6298 tcc_error("unexpected end of file");
6299 else
6300 break;
6302 if (str)
6303 tok_str_add_tok(*str);
6304 t = tok;
6305 next();
6306 if (t == '{' || t == '(') {
6307 level++;
6308 } else if (t == '}' || t == ')') {
6309 level--;
6310 if (level == 0 && braces && t == '}')
6311 break;
6314 if (str) {
6315 tok_str_add(*str, -1);
6316 tok_str_add(*str, 0);
6320 #define EXPR_CONST 1
6321 #define EXPR_ANY 2
6323 static void parse_init_elem(int expr_type)
6325 int saved_global_expr;
6326 switch(expr_type) {
6327 case EXPR_CONST:
6328 /* compound literals must be allocated globally in this case */
6329 saved_global_expr = global_expr;
6330 global_expr = 1;
6331 expr_const1();
6332 global_expr = saved_global_expr;
6333 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6334 (compound literals). */
6335 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6336 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6337 || vtop->sym->v < SYM_FIRST_ANOM))
6338 #ifdef TCC_TARGET_PE
6339 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6340 #endif
6342 tcc_error("initializer element is not constant");
6343 break;
6344 case EXPR_ANY:
6345 expr_eq();
6346 break;
6350 /* put zeros for variable based init */
6351 static void init_putz(Section *sec, unsigned long c, int size)
6353 if (sec) {
6354 /* nothing to do because globals are already set to zero */
6355 } else {
6356 vpush_global_sym(&func_old_type, TOK_memset);
6357 vseti(VT_LOCAL, c);
6358 #ifdef TCC_TARGET_ARM
6359 vpushs(size);
6360 vpushi(0);
6361 #else
6362 vpushi(0);
6363 vpushs(size);
6364 #endif
6365 gfunc_call(3);
6369 /* t is the array or struct type. c is the array or struct
6370 address. cur_field is the pointer to the current
6371 field, for arrays the 'c' member contains the current start
6372 index. 'size_only' is true if only size info is needed (only used
6373 in arrays). al contains the already initialized length of the
6374 current container (starting at c). This returns the new length of that. */
6375 static int decl_designator(CType *type, Section *sec, unsigned long c,
6376 Sym **cur_field, int size_only, int al)
6378 Sym *s, *f;
6379 int index, index_last, align, l, nb_elems, elem_size;
6380 unsigned long corig = c;
6382 elem_size = 0;
6383 nb_elems = 1;
6384 if (gnu_ext && (l = is_label()) != 0)
6385 goto struct_field;
6386 /* NOTE: we only support ranges for last designator */
6387 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6388 if (tok == '[') {
6389 if (!(type->t & VT_ARRAY))
6390 expect("array type");
6391 next();
6392 index = index_last = expr_const();
6393 if (tok == TOK_DOTS && gnu_ext) {
6394 next();
6395 index_last = expr_const();
6397 skip(']');
6398 s = type->ref;
6399 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6400 index_last < index)
6401 tcc_error("invalid index");
6402 if (cur_field)
6403 (*cur_field)->c = index_last;
6404 type = pointed_type(type);
6405 elem_size = type_size(type, &align);
6406 c += index * elem_size;
6407 nb_elems = index_last - index + 1;
6408 } else {
6409 next();
6410 l = tok;
6411 struct_field:
6412 next();
6413 if ((type->t & VT_BTYPE) != VT_STRUCT)
6414 expect("struct/union type");
6415 f = find_field(type, l);
6416 if (!f)
6417 expect("field");
6418 if (cur_field)
6419 *cur_field = f;
6420 type = &f->type;
6421 c += f->c;
6423 cur_field = NULL;
6425 if (!cur_field) {
6426 if (tok == '=') {
6427 next();
6428 } else if (!gnu_ext) {
6429 expect("=");
6431 } else {
6432 if (type->t & VT_ARRAY) {
6433 index = (*cur_field)->c;
6434 if (type->ref->c >= 0 && index >= type->ref->c)
6435 tcc_error("index too large");
6436 type = pointed_type(type);
6437 c += index * type_size(type, &align);
6438 } else {
6439 f = *cur_field;
6440 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6441 *cur_field = f = f->next;
6442 if (!f)
6443 tcc_error("too many field init");
6444 type = &f->type;
6445 c += f->c;
6448 /* must put zero in holes (note that doing it that way
6449 ensures that it even works with designators) */
6450 if (!size_only && c - corig > al)
6451 init_putz(sec, corig + al, c - corig - al);
6452 decl_initializer(type, sec, c, 0, size_only);
6454 /* XXX: make it more general */
6455 if (!size_only && nb_elems > 1) {
6456 unsigned long c_end;
6457 uint8_t *src, *dst;
6458 int i;
6460 if (!sec) {
6461 vset(type, VT_LOCAL|VT_LVAL, c);
6462 for (i = 1; i < nb_elems; i++) {
6463 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6464 vswap();
6465 vstore();
6467 vpop();
6468 } else if (!NODATA_WANTED) {
6469 c_end = c + nb_elems * elem_size;
6470 if (c_end > sec->data_allocated)
6471 section_realloc(sec, c_end);
6472 src = sec->data + c;
6473 dst = src;
6474 for(i = 1; i < nb_elems; i++) {
6475 dst += elem_size;
6476 memcpy(dst, src, elem_size);
6480 c += nb_elems * type_size(type, &align);
6481 if (c - corig > al)
6482 al = c - corig;
6483 return al;
6486 /* store a value or an expression directly in global data or in local array */
6487 static void init_putv(CType *type, Section *sec, unsigned long c)
6489 int bt;
6490 void *ptr;
6491 CType dtype;
6493 dtype = *type;
6494 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6496 if (sec) {
6497 int size, align;
6498 /* XXX: not portable */
6499 /* XXX: generate error if incorrect relocation */
6500 gen_assign_cast(&dtype);
6501 bt = type->t & VT_BTYPE;
6503 if ((vtop->r & VT_SYM)
6504 && bt != VT_PTR
6505 && bt != VT_FUNC
6506 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6507 || (type->t & VT_BITFIELD))
6508 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6510 tcc_error("initializer element is not computable at load time");
6512 if (NODATA_WANTED) {
6513 vtop--;
6514 return;
6517 size = type_size(type, &align);
6518 section_reserve(sec, c + size);
6519 ptr = sec->data + c;
6521 /* XXX: make code faster ? */
6522 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6523 vtop->sym->v >= SYM_FIRST_ANOM &&
6524 /* XXX This rejects compound literals like
6525 '(void *){ptr}'. The problem is that '&sym' is
6526 represented the same way, which would be ruled out
6527 by the SYM_FIRST_ANOM check above, but also '"string"'
6528 in 'char *p = "string"' is represented the same
6529 with the type being VT_PTR and the symbol being an
6530 anonymous one. That is, there's no difference in vtop
6531 between '(void *){x}' and '&(void *){x}'. Ignore
6532 pointer typed entities here. Hopefully no real code
6533 will every use compound literals with scalar type. */
6534 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6535 /* These come from compound literals, memcpy stuff over. */
6536 Section *ssec;
6537 ElfSym *esym;
6538 ElfW_Rel *rel;
6539 esym = elfsym(vtop->sym);
6540 ssec = tcc_state->sections[esym->st_shndx];
6541 memmove (ptr, ssec->data + esym->st_value, size);
6542 if (ssec->reloc) {
6543 /* We need to copy over all memory contents, and that
6544 includes relocations. Use the fact that relocs are
6545 created it order, so look from the end of relocs
6546 until we hit one before the copied region. */
6547 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6548 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6549 while (num_relocs--) {
6550 rel--;
6551 if (rel->r_offset >= esym->st_value + size)
6552 continue;
6553 if (rel->r_offset < esym->st_value)
6554 break;
6555 /* Note: if the same fields are initialized multiple
6556 times (possible with designators) then we possibly
6557 add multiple relocations for the same offset here.
6558 That would lead to wrong code, the last reloc needs
6559 to win. We clean this up later after the whole
6560 initializer is parsed. */
6561 put_elf_reloca(symtab_section, sec,
6562 c + rel->r_offset - esym->st_value,
6563 ELFW(R_TYPE)(rel->r_info),
6564 ELFW(R_SYM)(rel->r_info),
6565 #if PTR_SIZE == 8
6566 rel->r_addend
6567 #else
6569 #endif
6573 } else {
6574 if (type->t & VT_BITFIELD) {
6575 int bit_pos, bit_size, bits, n;
6576 unsigned char *p, v, m;
6577 bit_pos = BIT_POS(vtop->type.t);
6578 bit_size = BIT_SIZE(vtop->type.t);
6579 p = (unsigned char*)ptr + (bit_pos >> 3);
6580 bit_pos &= 7, bits = 0;
6581 while (bit_size) {
6582 n = 8 - bit_pos;
6583 if (n > bit_size)
6584 n = bit_size;
6585 v = vtop->c.i >> bits << bit_pos;
6586 m = ((1 << n) - 1) << bit_pos;
6587 *p = (*p & ~m) | (v & m);
6588 bits += n, bit_size -= n, bit_pos = 0, ++p;
6590 } else
6591 switch(bt) {
6592 /* XXX: when cross-compiling we assume that each type has the
6593 same representation on host and target, which is likely to
6594 be wrong in the case of long double */
6595 case VT_BOOL:
6596 vtop->c.i = vtop->c.i != 0;
6597 case VT_BYTE:
6598 *(char *)ptr |= vtop->c.i;
6599 break;
6600 case VT_SHORT:
6601 *(short *)ptr |= vtop->c.i;
6602 break;
6603 case VT_FLOAT:
6604 *(float*)ptr = vtop->c.f;
6605 break;
6606 case VT_DOUBLE:
6607 *(double *)ptr = vtop->c.d;
6608 break;
6609 case VT_LDOUBLE:
6610 #if defined TCC_IS_NATIVE_387
6611 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6612 memcpy(ptr, &vtop->c.ld, 10);
6613 #ifdef __TINYC__
6614 else if (sizeof (long double) == sizeof (double))
6615 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6616 #endif
6617 else if (vtop->c.ld == 0.0)
6619 else
6620 #endif
6621 if (sizeof(long double) == LDOUBLE_SIZE)
6622 *(long double*)ptr = vtop->c.ld;
6623 else if (sizeof(double) == LDOUBLE_SIZE)
6624 *(double *)ptr = (double)vtop->c.ld;
6625 else
6626 tcc_error("can't cross compile long double constants");
6627 break;
6628 #if PTR_SIZE != 8
6629 case VT_LLONG:
6630 *(long long *)ptr |= vtop->c.i;
6631 break;
6632 #else
6633 case VT_LLONG:
6634 #endif
6635 case VT_PTR:
6637 addr_t val = vtop->c.i;
6638 #if PTR_SIZE == 8
6639 if (vtop->r & VT_SYM)
6640 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6641 else
6642 *(addr_t *)ptr |= val;
6643 #else
6644 if (vtop->r & VT_SYM)
6645 greloc(sec, vtop->sym, c, R_DATA_PTR);
6646 *(addr_t *)ptr |= val;
6647 #endif
6648 break;
6650 default:
6652 int val = vtop->c.i;
6653 #if PTR_SIZE == 8
6654 if (vtop->r & VT_SYM)
6655 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6656 else
6657 *(int *)ptr |= val;
6658 #else
6659 if (vtop->r & VT_SYM)
6660 greloc(sec, vtop->sym, c, R_DATA_PTR);
6661 *(int *)ptr |= val;
6662 #endif
6663 break;
6667 vtop--;
6668 } else {
6669 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6670 vswap();
6671 vstore();
6672 vpop();
6676 /* 't' contains the type and storage info. 'c' is the offset of the
6677 object in section 'sec'. If 'sec' is NULL, it means stack based
6678 allocation. 'first' is true if array '{' must be read (multi
6679 dimension implicit array init handling). 'size_only' is true if
6680 size only evaluation is wanted (only for arrays). */
6681 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6682 int first, int size_only)
6684 int len, n, no_oblock, nb, i;
6685 int size1, align1;
6686 int have_elem;
6687 Sym *s, *f;
6688 Sym indexsym;
6689 CType *t1;
6691 /* If we currently are at an '}' or ',' we have read an initializer
6692 element in one of our callers, and not yet consumed it. */
6693 have_elem = tok == '}' || tok == ',';
6694 if (!have_elem && tok != '{' &&
6695 /* In case of strings we have special handling for arrays, so
6696 don't consume them as initializer value (which would commit them
6697 to some anonymous symbol). */
6698 tok != TOK_LSTR && tok != TOK_STR &&
6699 !size_only) {
6700 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6701 have_elem = 1;
6704 if (have_elem &&
6705 !(type->t & VT_ARRAY) &&
6706 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6707 The source type might have VT_CONSTANT set, which is
6708 of course assignable to non-const elements. */
6709 is_compatible_unqualified_types(type, &vtop->type)) {
6710 init_putv(type, sec, c);
6711 } else if (type->t & VT_ARRAY) {
6712 s = type->ref;
6713 n = s->c;
6714 t1 = pointed_type(type);
6715 size1 = type_size(t1, &align1);
6717 no_oblock = 1;
6718 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6719 tok == '{') {
6720 if (tok != '{')
6721 tcc_error("character array initializer must be a literal,"
6722 " optionally enclosed in braces");
6723 skip('{');
6724 no_oblock = 0;
6727 /* only parse strings here if correct type (otherwise: handle
6728 them as ((w)char *) expressions */
6729 if ((tok == TOK_LSTR &&
6730 #ifdef TCC_TARGET_PE
6731 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6732 #else
6733 (t1->t & VT_BTYPE) == VT_INT
6734 #endif
6735 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6736 len = 0;
6737 while (tok == TOK_STR || tok == TOK_LSTR) {
6738 int cstr_len, ch;
6740 /* compute maximum number of chars wanted */
6741 if (tok == TOK_STR)
6742 cstr_len = tokc.str.size;
6743 else
6744 cstr_len = tokc.str.size / sizeof(nwchar_t);
6745 cstr_len--;
6746 nb = cstr_len;
6747 if (n >= 0 && nb > (n - len))
6748 nb = n - len;
6749 if (!size_only) {
6750 if (cstr_len > nb)
6751 tcc_warning("initializer-string for array is too long");
6752 /* in order to go faster for common case (char
6753 string in global variable, we handle it
6754 specifically */
6755 if (sec && tok == TOK_STR && size1 == 1) {
6756 if (!NODATA_WANTED)
6757 memcpy(sec->data + c + len, tokc.str.data, nb);
6758 } else {
6759 for(i=0;i<nb;i++) {
6760 if (tok == TOK_STR)
6761 ch = ((unsigned char *)tokc.str.data)[i];
6762 else
6763 ch = ((nwchar_t *)tokc.str.data)[i];
6764 vpushi(ch);
6765 init_putv(t1, sec, c + (len + i) * size1);
6769 len += nb;
6770 next();
6772 /* only add trailing zero if enough storage (no
6773 warning in this case since it is standard) */
6774 if (n < 0 || len < n) {
6775 if (!size_only) {
6776 vpushi(0);
6777 init_putv(t1, sec, c + (len * size1));
6779 len++;
6781 len *= size1;
6782 } else {
6783 indexsym.c = 0;
6784 f = &indexsym;
6786 do_init_list:
6787 len = 0;
6788 while (tok != '}' || have_elem) {
6789 len = decl_designator(type, sec, c, &f, size_only, len);
6790 have_elem = 0;
6791 if (type->t & VT_ARRAY) {
6792 ++indexsym.c;
6793 /* special test for multi dimensional arrays (may not
6794 be strictly correct if designators are used at the
6795 same time) */
6796 if (no_oblock && len >= n*size1)
6797 break;
6798 } else {
6799 if (s->type.t == VT_UNION)
6800 f = NULL;
6801 else
6802 f = f->next;
6803 if (no_oblock && f == NULL)
6804 break;
6807 if (tok == '}')
6808 break;
6809 skip(',');
6812 /* put zeros at the end */
6813 if (!size_only && len < n*size1)
6814 init_putz(sec, c + len, n*size1 - len);
6815 if (!no_oblock)
6816 skip('}');
6817 /* patch type size if needed, which happens only for array types */
6818 if (n < 0)
6819 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6820 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6821 size1 = 1;
6822 no_oblock = 1;
6823 if (first || tok == '{') {
6824 skip('{');
6825 no_oblock = 0;
6827 s = type->ref;
6828 f = s->next;
6829 n = s->c;
6830 goto do_init_list;
6831 } else if (tok == '{') {
6832 next();
6833 decl_initializer(type, sec, c, first, size_only);
6834 skip('}');
6835 } else if (size_only) {
6836 /* If we supported only ISO C we wouldn't have to accept calling
6837 this on anything than an array size_only==1 (and even then
6838 only on the outermost level, so no recursion would be needed),
6839 because initializing a flex array member isn't supported.
6840 But GNU C supports it, so we need to recurse even into
6841 subfields of structs and arrays when size_only is set. */
6842 /* just skip expression */
6843 skip_or_save_block(NULL);
6844 } else {
6845 if (!have_elem) {
6846 /* This should happen only when we haven't parsed
6847 the init element above for fear of committing a
6848 string constant to memory too early. */
6849 if (tok != TOK_STR && tok != TOK_LSTR)
6850 expect("string constant");
6851 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6853 init_putv(type, sec, c);
6857 /* parse an initializer for type 't' if 'has_init' is non zero, and
6858 allocate space in local or global data space ('r' is either
6859 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6860 variable 'v' of scope 'scope' is declared before initializers
6861 are parsed. If 'v' is zero, then a reference to the new object
6862 is put in the value stack. If 'has_init' is 2, a special parsing
6863 is done to handle string constants. */
6864 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6865 int has_init, int v, int scope)
6867 int size, align, addr;
6868 TokenString *init_str = NULL;
6870 Section *sec;
6871 Sym *flexible_array;
6872 Sym *sym = NULL;
6873 int saved_nocode_wanted = nocode_wanted;
6874 #ifdef CONFIG_TCC_BCHECK
6875 int bcheck;
6876 #endif
6878 /* Always allocate static or global variables */
6879 if (v && (r & VT_VALMASK) == VT_CONST)
6880 nocode_wanted |= 0x80000000;
6882 #ifdef CONFIG_TCC_BCHECK
6883 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6884 #endif
6886 flexible_array = NULL;
6887 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6888 Sym *field = type->ref->next;
6889 if (field) {
6890 while (field->next)
6891 field = field->next;
6892 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6893 flexible_array = field;
6897 size = type_size(type, &align);
6898 /* If unknown size, we must evaluate it before
6899 evaluating initializers because
6900 initializers can generate global data too
6901 (e.g. string pointers or ISOC99 compound
6902 literals). It also simplifies local
6903 initializers handling */
6904 if (size < 0 || (flexible_array && has_init)) {
6905 if (!has_init)
6906 tcc_error("unknown type size");
6907 /* get all init string */
6908 if (has_init == 2) {
6909 init_str = tok_str_alloc();
6910 /* only get strings */
6911 while (tok == TOK_STR || tok == TOK_LSTR) {
6912 tok_str_add_tok(init_str);
6913 next();
6915 tok_str_add(init_str, -1);
6916 tok_str_add(init_str, 0);
6917 } else {
6918 skip_or_save_block(&init_str);
6920 unget_tok(0);
6922 /* compute size */
6923 begin_macro(init_str, 1);
6924 next();
6925 decl_initializer(type, NULL, 0, 1, 1);
6926 /* prepare second initializer parsing */
6927 macro_ptr = init_str->str;
6928 next();
6930 /* if still unknown size, error */
6931 size = type_size(type, &align);
6932 if (size < 0)
6933 tcc_error("unknown type size");
6935 /* If there's a flex member and it was used in the initializer
6936 adjust size. */
6937 if (flexible_array &&
6938 flexible_array->type.ref->c > 0)
6939 size += flexible_array->type.ref->c
6940 * pointed_size(&flexible_array->type);
6941 /* take into account specified alignment if bigger */
6942 if (ad->a.aligned) {
6943 int speca = 1 << (ad->a.aligned - 1);
6944 if (speca > align)
6945 align = speca;
6946 } else if (ad->a.packed) {
6947 align = 1;
6950 if (!v && NODATA_WANTED)
6951 size = 0, align = 1;
6953 if ((r & VT_VALMASK) == VT_LOCAL) {
6954 sec = NULL;
6955 #ifdef CONFIG_TCC_BCHECK
6956 if (bcheck && (type->t & VT_ARRAY)) {
6957 loc--;
6959 #endif
6960 loc = (loc - size) & -align;
6961 addr = loc;
6962 #ifdef CONFIG_TCC_BCHECK
6963 /* handles bounds */
6964 /* XXX: currently, since we do only one pass, we cannot track
6965 '&' operators, so we add only arrays */
6966 if (bcheck && (type->t & VT_ARRAY)) {
6967 addr_t *bounds_ptr;
6968 /* add padding between regions */
6969 loc--;
6970 /* then add local bound info */
6971 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6972 bounds_ptr[0] = addr;
6973 bounds_ptr[1] = size;
6975 #endif
6976 if (v) {
6977 /* local variable */
6978 #ifdef CONFIG_TCC_ASM
6979 if (ad->asm_label) {
6980 int reg = asm_parse_regvar(ad->asm_label);
6981 if (reg >= 0)
6982 r = (r & ~VT_VALMASK) | reg;
6984 #endif
6985 sym = sym_push(v, type, r, addr);
6986 sym->a = ad->a;
6987 } else {
6988 /* push local reference */
6989 vset(type, r, addr);
6991 } else {
6992 if (v && scope == VT_CONST) {
6993 /* see if the symbol was already defined */
6994 sym = sym_find(v);
6995 if (sym) {
6996 patch_storage(sym, ad, type);
6997 /* we accept several definitions of the same global variable. */
6998 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
6999 goto no_alloc;
7003 /* allocate symbol in corresponding section */
7004 sec = ad->section;
7005 if (!sec) {
7006 if (has_init)
7007 sec = data_section;
7008 else if (tcc_state->nocommon)
7009 sec = bss_section;
7012 if (sec) {
7013 addr = section_add(sec, size, align);
7014 #ifdef CONFIG_TCC_BCHECK
7015 /* add padding if bound check */
7016 if (bcheck)
7017 section_add(sec, 1, 1);
7018 #endif
7019 } else {
7020 addr = align; /* SHN_COMMON is special, symbol value is align */
7021 sec = common_section;
7024 if (v) {
7025 if (!sym) {
7026 sym = sym_push(v, type, r | VT_SYM, 0);
7027 patch_storage(sym, ad, NULL);
7029 /* Local statics have a scope until now (for
7030 warnings), remove it here. */
7031 sym->sym_scope = 0;
7032 /* update symbol definition */
7033 put_extern_sym(sym, sec, addr, size);
7034 } else {
7035 /* push global reference */
7036 sym = get_sym_ref(type, sec, addr, size);
7037 vpushsym(type, sym);
7038 vtop->r |= r;
7041 #ifdef CONFIG_TCC_BCHECK
7042 /* handles bounds now because the symbol must be defined
7043 before for the relocation */
7044 if (bcheck) {
7045 addr_t *bounds_ptr;
7047 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7048 /* then add global bound info */
7049 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7050 bounds_ptr[0] = 0; /* relocated */
7051 bounds_ptr[1] = size;
7053 #endif
7056 if (type->t & VT_VLA) {
7057 int a;
7059 if (NODATA_WANTED)
7060 goto no_alloc;
7062 /* save current stack pointer */
7063 if (vlas_in_scope == 0) {
7064 if (vla_sp_root_loc == -1)
7065 vla_sp_root_loc = (loc -= PTR_SIZE);
7066 gen_vla_sp_save(vla_sp_root_loc);
7069 vla_runtime_type_size(type, &a);
7070 gen_vla_alloc(type, a);
7071 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7072 /* on _WIN64, because of the function args scratch area, the
7073 result of alloca differs from RSP and is returned in RAX. */
7074 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7075 #endif
7076 gen_vla_sp_save(addr);
7077 vla_sp_loc = addr;
7078 vlas_in_scope++;
7080 } else if (has_init) {
7081 size_t oldreloc_offset = 0;
7082 if (sec && sec->reloc)
7083 oldreloc_offset = sec->reloc->data_offset;
7084 decl_initializer(type, sec, addr, 1, 0);
7085 if (sec && sec->reloc)
7086 squeeze_multi_relocs(sec, oldreloc_offset);
7087 /* patch flexible array member size back to -1, */
7088 /* for possible subsequent similar declarations */
7089 if (flexible_array)
7090 flexible_array->type.ref->c = -1;
7093 no_alloc:
7094 /* restore parse state if needed */
7095 if (init_str) {
7096 end_macro();
7097 next();
7100 nocode_wanted = saved_nocode_wanted;
7103 /* parse a function defined by symbol 'sym' and generate its code in
7104 'cur_text_section' */
7105 static void gen_function(Sym *sym)
7107 nocode_wanted = 0;
7108 ind = cur_text_section->data_offset;
7109 if (sym->a.aligned) {
7110 size_t newoff = section_add(cur_text_section, 0,
7111 1 << (sym->a.aligned - 1));
7112 gen_fill_nops(newoff - ind);
7114 /* NOTE: we patch the symbol size later */
7115 put_extern_sym(sym, cur_text_section, ind, 0);
7116 funcname = get_tok_str(sym->v, NULL);
7117 func_ind = ind;
7118 /* Initialize VLA state */
7119 vla_sp_loc = -1;
7120 vla_sp_root_loc = -1;
7121 /* put debug symbol */
7122 tcc_debug_funcstart(tcc_state, sym);
7123 /* push a dummy symbol to enable local sym storage */
7124 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7125 local_scope = 1; /* for function parameters */
7126 gfunc_prolog(&sym->type);
7127 local_scope = 0;
7128 rsym = 0;
7129 block(NULL, NULL, 0);
7130 if (!(nocode_wanted & 0x20000000)
7131 && ((func_vt.t & VT_BTYPE) == VT_INT)
7132 && !strcmp (funcname, "main"))
7134 nocode_wanted = 0;
7135 vpushi(0);
7136 gen_assign_cast(&func_vt);
7137 gfunc_return(&func_vt);
7139 nocode_wanted = 0;
7140 gsym(rsym);
7141 gfunc_epilog();
7142 cur_text_section->data_offset = ind;
7143 label_pop(&global_label_stack, NULL, 0);
7144 /* reset local stack */
7145 local_scope = 0;
7146 sym_pop(&local_stack, NULL, 0);
7147 /* end of function */
7148 /* patch symbol size */
7149 elfsym(sym)->st_size = ind - func_ind;
7150 tcc_debug_funcend(tcc_state, ind - func_ind);
7151 /* It's better to crash than to generate wrong code */
7152 cur_text_section = NULL;
7153 funcname = ""; /* for safety */
7154 func_vt.t = VT_VOID; /* for safety */
7155 func_var = 0; /* for safety */
7156 ind = 0; /* for safety */
7157 nocode_wanted = 0x80000000;
7158 check_vstack();
7161 static void gen_inline_functions(TCCState *s)
7163 Sym *sym;
7164 int inline_generated, i, ln;
7165 struct InlineFunc *fn;
7167 ln = file->line_num;
7168 /* iterate while inline function are referenced */
7169 do {
7170 inline_generated = 0;
7171 for (i = 0; i < s->nb_inline_fns; ++i) {
7172 fn = s->inline_fns[i];
7173 sym = fn->sym;
7174 if (sym && sym->c) {
7175 /* the function was used: generate its code and
7176 convert it to a normal function */
7177 fn->sym = NULL;
7178 if (file)
7179 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7180 sym->type.t &= ~VT_INLINE;
7182 begin_macro(fn->func_str, 1);
7183 next();
7184 cur_text_section = text_section;
7185 gen_function(sym);
7186 end_macro();
7188 inline_generated = 1;
7191 } while (inline_generated);
7192 file->line_num = ln;
7195 ST_FUNC void free_inline_functions(TCCState *s)
7197 int i;
7198 /* free tokens of unused inline functions */
7199 for (i = 0; i < s->nb_inline_fns; ++i) {
7200 struct InlineFunc *fn = s->inline_fns[i];
7201 if (fn->sym)
7202 tok_str_free(fn->func_str);
7204 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7207 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7208 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7209 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7211 int v, has_init, r;
7212 CType type, btype;
7213 Sym *sym;
7214 AttributeDef ad;
7216 while (1) {
7217 if (!parse_btype(&btype, &ad)) {
7218 if (is_for_loop_init)
7219 return 0;
7220 /* skip redundant ';' if not in old parameter decl scope */
7221 if (tok == ';' && l != VT_CMP) {
7222 next();
7223 continue;
7225 if (l != VT_CONST)
7226 break;
7227 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7228 /* global asm block */
7229 asm_global_instr();
7230 continue;
7232 if (tok >= TOK_UIDENT) {
7233 /* special test for old K&R protos without explicit int
7234 type. Only accepted when defining global data */
7235 btype.t = VT_INT;
7236 } else {
7237 if (tok != TOK_EOF)
7238 expect("declaration");
7239 break;
7242 if (tok == ';') {
7243 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7244 int v = btype.ref->v;
7245 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7246 tcc_warning("unnamed struct/union that defines no instances");
7247 next();
7248 continue;
7250 if (IS_ENUM(btype.t)) {
7251 next();
7252 continue;
7255 while (1) { /* iterate thru each declaration */
7256 type = btype;
7257 /* If the base type itself was an array type of unspecified
7258 size (like in 'typedef int arr[]; arr x = {1};') then
7259 we will overwrite the unknown size by the real one for
7260 this decl. We need to unshare the ref symbol holding
7261 that size. */
7262 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7263 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7265 type_decl(&type, &ad, &v, TYPE_DIRECT);
7266 #if 0
7268 char buf[500];
7269 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7270 printf("type = '%s'\n", buf);
7272 #endif
7273 if ((type.t & VT_BTYPE) == VT_FUNC) {
7274 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7275 tcc_error("function without file scope cannot be static");
7277 /* if old style function prototype, we accept a
7278 declaration list */
7279 sym = type.ref;
7280 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7281 decl0(VT_CMP, 0, sym);
7284 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7285 ad.asm_label = asm_label_instr();
7286 /* parse one last attribute list, after asm label */
7287 parse_attribute(&ad);
7288 if (tok == '{')
7289 expect(";");
7292 #ifdef TCC_TARGET_PE
7293 if (ad.a.dllimport || ad.a.dllexport) {
7294 if (type.t & (VT_STATIC|VT_TYPEDEF))
7295 tcc_error("cannot have dll linkage with static or typedef");
7296 if (ad.a.dllimport) {
7297 if ((type.t & VT_BTYPE) == VT_FUNC)
7298 ad.a.dllimport = 0;
7299 else
7300 type.t |= VT_EXTERN;
7303 #endif
7304 if (tok == '{') {
7305 if (l != VT_CONST)
7306 tcc_error("cannot use local functions");
7307 if ((type.t & VT_BTYPE) != VT_FUNC)
7308 expect("function definition");
7310 /* reject abstract declarators in function definition
7311 make old style params without decl have int type */
7312 sym = type.ref;
7313 while ((sym = sym->next) != NULL) {
7314 if (!(sym->v & ~SYM_FIELD))
7315 expect("identifier");
7316 if (sym->type.t == VT_VOID)
7317 sym->type = int_type;
7320 /* XXX: cannot do better now: convert extern line to static inline */
7321 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7322 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7324 /* put function symbol */
7325 sym = external_global_sym(v, &type, 0);
7326 type.t &= ~VT_EXTERN;
7327 patch_storage(sym, &ad, &type);
7329 /* static inline functions are just recorded as a kind
7330 of macro. Their code will be emitted at the end of
7331 the compilation unit only if they are used */
7332 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7333 (VT_INLINE | VT_STATIC)) {
7334 struct InlineFunc *fn;
7335 const char *filename;
7337 filename = file ? file->filename : "";
7338 fn = tcc_malloc(sizeof *fn + strlen(filename));
7339 strcpy(fn->filename, filename);
7340 fn->sym = sym;
7341 skip_or_save_block(&fn->func_str);
7342 dynarray_add(&tcc_state->inline_fns,
7343 &tcc_state->nb_inline_fns, fn);
7344 } else {
7345 /* compute text section */
7346 cur_text_section = ad.section;
7347 if (!cur_text_section)
7348 cur_text_section = text_section;
7349 gen_function(sym);
7351 break;
7352 } else {
7353 if (l == VT_CMP) {
7354 /* find parameter in function parameter list */
7355 for (sym = func_sym->next; sym; sym = sym->next)
7356 if ((sym->v & ~SYM_FIELD) == v)
7357 goto found;
7358 tcc_error("declaration for parameter '%s' but no such parameter",
7359 get_tok_str(v, NULL));
7360 found:
7361 if (type.t & VT_STORAGE) /* 'register' is okay */
7362 tcc_error("storage class specified for '%s'",
7363 get_tok_str(v, NULL));
7364 if (sym->type.t != VT_VOID)
7365 tcc_error("redefinition of parameter '%s'",
7366 get_tok_str(v, NULL));
7367 convert_parameter_type(&type);
7368 sym->type = type;
7369 } else if (type.t & VT_TYPEDEF) {
7370 /* save typedefed type */
7371 /* XXX: test storage specifiers ? */
7372 sym = sym_find(v);
7373 if (sym && sym->sym_scope == local_scope) {
7374 if (!is_compatible_types(&sym->type, &type)
7375 || !(sym->type.t & VT_TYPEDEF))
7376 tcc_error("incompatible redefinition of '%s'",
7377 get_tok_str(v, NULL));
7378 sym->type = type;
7379 } else {
7380 sym = sym_push(v, &type, 0, 0);
7382 sym->a = ad.a;
7383 sym->f = ad.f;
7384 } else if ((type.t & VT_BTYPE) == VT_VOID
7385 && !(type.t & VT_EXTERN)) {
7386 tcc_error("declaration of void object");
7387 } else {
7388 r = 0;
7389 if ((type.t & VT_BTYPE) == VT_FUNC) {
7390 /* external function definition */
7391 /* specific case for func_call attribute */
7392 type.ref->f = ad.f;
7393 } else if (!(type.t & VT_ARRAY)) {
7394 /* not lvalue if array */
7395 r |= lvalue_type(type.t);
7397 has_init = (tok == '=');
7398 if (has_init && (type.t & VT_VLA))
7399 tcc_error("variable length array cannot be initialized");
7400 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7401 ((type.t & VT_BTYPE) == VT_FUNC) ||
7402 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7403 !has_init && l == VT_CONST && type.ref->c < 0)) {
7404 /* external variable or function */
7405 /* NOTE: as GCC, uninitialized global static
7406 arrays of null size are considered as
7407 extern */
7408 type.t |= VT_EXTERN;
7409 sym = external_sym(v, &type, r, &ad);
7410 if (ad.alias_target) {
7411 ElfSym *esym;
7412 Sym *alias_target;
7413 alias_target = sym_find(ad.alias_target);
7414 esym = elfsym(alias_target);
7415 if (!esym)
7416 tcc_error("unsupported forward __alias__ attribute");
7417 /* Local statics have a scope until now (for
7418 warnings), remove it here. */
7419 sym->sym_scope = 0;
7420 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7422 } else {
7423 if (type.t & VT_STATIC)
7424 r |= VT_CONST;
7425 else
7426 r |= l;
7427 if (has_init)
7428 next();
7429 else if (l == VT_CONST)
7430 /* uninitialized global variables may be overridden */
7431 type.t |= VT_EXTERN;
7432 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7435 if (tok != ',') {
7436 if (is_for_loop_init)
7437 return 1;
7438 skip(';');
7439 break;
7441 next();
7443 ad.a.aligned = 0;
7446 return 0;
7449 static void decl(int l)
7451 decl0(l, 0, NULL);
7454 /* ------------------------------------------------------------------------- */