Fix more attribute placements
[tinycc.git] / tccgen.c
blob9f671e092e8df9a2eef59e9330139a3603800ed6
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->a.nodecorate) {
395 can_add_underscore = 0;
397 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
398 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
399 name = buf1;
400 other |= ST_PE_STDCALL;
401 can_add_underscore = 0;
404 #endif
405 if (tcc_state->leading_underscore && can_add_underscore) {
406 buf1[0] = '_';
407 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
408 name = buf1;
410 if (sym->asm_label)
411 name = get_tok_str(sym->asm_label, NULL);
412 info = ELFW(ST_INFO)(sym_bind, sym_type);
413 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
414 } else {
415 esym = elfsym(sym);
416 esym->st_value = value;
417 esym->st_size = size;
418 esym->st_shndx = sh_num;
420 update_storage(sym);
423 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
424 addr_t value, unsigned long size)
426 int sh_num = section ? section->sh_num : SHN_UNDEF;
427 put_extern_sym2(sym, sh_num, value, size, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
432 addr_t addend)
434 int c = 0;
436 if (nocode_wanted && s == cur_text_section)
437 return;
439 if (sym) {
440 if (0 == sym->c)
441 put_extern_sym(sym, NULL, 0, 0);
442 c = sym->c;
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section, s, offset, type, c, addend);
449 #if PTR_SIZE == 4
450 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
452 greloca(s, sym, offset, type, 0);
454 #endif
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym *__sym_malloc(void)
460 Sym *sym_pool, *sym, *last_sym;
461 int i;
463 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
464 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
466 last_sym = sym_free_first;
467 sym = sym_pool;
468 for(i = 0; i < SYM_POOL_NB; i++) {
469 sym->next = last_sym;
470 last_sym = sym;
471 sym++;
473 sym_free_first = last_sym;
474 return last_sym;
477 static inline Sym *sym_malloc(void)
479 Sym *sym;
480 #ifndef SYM_DEBUG
481 sym = sym_free_first;
482 if (!sym)
483 sym = __sym_malloc();
484 sym_free_first = sym->next;
485 return sym;
486 #else
487 sym = tcc_malloc(sizeof(Sym));
488 return sym;
489 #endif
492 ST_INLN void sym_free(Sym *sym)
494 #ifndef SYM_DEBUG
495 sym->next = sym_free_first;
496 sym_free_first = sym;
497 #else
498 tcc_free(sym);
499 #endif
502 /* push, without hashing */
503 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
505 Sym *s;
507 s = sym_malloc();
508 memset(s, 0, sizeof *s);
509 s->v = v;
510 s->type.t = t;
511 s->c = c;
512 /* add in stack */
513 s->prev = *ps;
514 *ps = s;
515 return s;
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym *sym_find2(Sym *s, int v)
522 while (s) {
523 if (s->v == v)
524 return s;
525 else if (s->v == -1)
526 return NULL;
527 s = s->prev;
529 return NULL;
532 /* structure lookup */
533 ST_INLN Sym *struct_find(int v)
535 v -= TOK_IDENT;
536 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
537 return NULL;
538 return table_ident[v]->sym_struct;
541 /* find an identifier */
542 ST_INLN Sym *sym_find(int v)
544 v -= TOK_IDENT;
545 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
546 return NULL;
547 return table_ident[v]->sym_identifier;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
553 Sym *s, **ps;
554 TokenSym *ts;
556 if (local_stack)
557 ps = &local_stack;
558 else
559 ps = &global_stack;
560 s = sym_push2(ps, v, type->t, c);
561 s->type.ref = type->ref;
562 s->r = r;
563 /* don't record fields or anonymous symbols */
564 /* XXX: simplify */
565 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
566 /* record symbol in token array */
567 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
568 if (v & SYM_STRUCT)
569 ps = &ts->sym_struct;
570 else
571 ps = &ts->sym_identifier;
572 s->prev_tok = *ps;
573 *ps = s;
574 s->sym_scope = local_scope;
575 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v & ~SYM_STRUCT, NULL));
579 return s;
582 /* push a global identifier */
583 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
585 Sym *s, **ps;
586 s = sym_push2(&global_stack, v, t, c);
587 /* don't record anonymous symbol */
588 if (v < SYM_FIRST_ANOM) {
589 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps != NULL && (*ps)->sym_scope)
593 ps = &(*ps)->prev_tok;
594 s->prev_tok = *ps;
595 *ps = s;
597 return s;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
604 Sym *s, *ss, **ps;
605 TokenSym *ts;
606 int v;
608 s = *ptop;
609 while(s != b) {
610 ss = s->prev;
611 v = s->v;
612 /* remove symbol in token array */
613 /* XXX: simplify */
614 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
615 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
616 if (v & SYM_STRUCT)
617 ps = &ts->sym_struct;
618 else
619 ps = &ts->sym_identifier;
620 *ps = s->prev_tok;
622 if (!keep)
623 sym_free(s);
624 s = ss;
626 if (!keep)
627 *ptop = b;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType *type, int r, CValue *vc)
634 int v;
636 if (vtop >= vstack + (VSTACK_SIZE - 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop >= vstack && !nocode_wanted) {
652 v = vtop->r & VT_VALMASK;
653 if (v == VT_CMP || (v & ~1) == VT_JMP)
654 gv(RC_INT);
657 vtop++;
658 vtop->type = *type;
659 vtop->r = r;
660 vtop->r2 = VT_CONST;
661 vtop->c = *vc;
662 vtop->sym = NULL;
665 ST_FUNC void vswap(void)
667 SValue tmp;
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop >= vstack && !nocode_wanted) {
670 int v = vtop->r & VT_VALMASK;
671 if (v == VT_CMP || (v & ~1) == VT_JMP)
672 gv(RC_INT);
674 tmp = vtop[0];
675 vtop[0] = vtop[-1];
676 vtop[-1] = tmp;
679 /* pop stack value */
680 ST_FUNC void vpop(void)
682 int v;
683 v = vtop->r & VT_VALMASK;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
686 if (v == TREG_ST0) {
687 o(0xd8dd); /* fstp %st(0) */
688 } else
689 #endif
690 if (v == VT_JMP || v == VT_JMPI) {
691 /* need to put correct jump if && or || without test */
692 gsym(vtop->c.i);
694 vtop--;
697 /* push constant of type "type" with useless value */
698 ST_FUNC void vpush(CType *type)
700 vset(type, VT_CONST, 0);
703 /* push integer constant */
704 ST_FUNC void vpushi(int v)
706 CValue cval;
707 cval.i = v;
708 vsetc(&int_type, VT_CONST, &cval);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v)
714 CValue cval;
715 cval.i = v;
716 vsetc(&size_type, VT_CONST, &cval);
719 /* push arbitrary 64bit constant */
720 ST_FUNC void vpush64(int ty, unsigned long long v)
722 CValue cval;
723 CType ctype;
724 ctype.t = ty;
725 ctype.ref = NULL;
726 cval.i = v;
727 vsetc(&ctype, VT_CONST, &cval);
730 /* push long long constant */
731 static inline void vpushll(long long v)
733 vpush64(VT_LLONG, v);
736 ST_FUNC void vset(CType *type, int r, int v)
738 CValue cval;
740 cval.i = v;
741 vsetc(type, r, &cval);
744 static void vseti(int r, int v)
746 CType type;
747 type.t = VT_INT;
748 type.ref = NULL;
749 vset(&type, r, v);
752 ST_FUNC void vpushv(SValue *v)
754 if (vtop >= vstack + (VSTACK_SIZE - 1))
755 tcc_error("memory full (vstack)");
756 vtop++;
757 *vtop = *v;
760 static void vdup(void)
762 vpushv(vtop);
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC void vrotb(int n)
770 int i;
771 SValue tmp;
773 tmp = vtop[-n + 1];
774 for(i=-n+1;i!=0;i++)
775 vtop[i] = vtop[i+1];
776 vtop[0] = tmp;
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC void vrote(SValue *e, int n)
784 int i;
785 SValue tmp;
787 tmp = *e;
788 for(i = 0;i < n - 1; i++)
789 e[-i] = e[-i - 1];
790 e[-n + 1] = tmp;
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC void vrott(int n)
798 vrote(vtop, n);
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType *type, Sym *sym)
804 CValue cval;
805 cval.i = 0;
806 vsetc(type, VT_CONST | VT_SYM, &cval);
807 vtop->sym = sym;
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
813 int v;
814 Sym *sym;
816 v = anon_sym++;
817 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
818 sym->type.ref = type->ref;
819 sym->r = VT_CONST | VT_SYM;
820 put_extern_sym(sym, sec, offset, size);
821 return sym;
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
827 vpushsym(type, get_sym_ref(type, sec, offset, size));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
833 Sym *s;
835 s = sym_find(v);
836 if (!s) {
837 /* push forward reference */
838 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
839 s->type.ref = type->ref;
840 s->r = r | VT_CONST | VT_SYM;
841 } else if (IS_ASM_SYM(s)) {
842 s->type.t = type->t | (s->type.t & VT_EXTERN);
843 s->type.ref = type->ref;
844 update_storage(s);
846 return s;
849 /* Merge symbol attributes. */
850 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
852 if (sa1->aligned && !sa->aligned)
853 sa->aligned = sa1->aligned;
854 sa->packed |= sa1->packed;
855 sa->weak |= sa1->weak;
856 if (sa1->visibility != STV_DEFAULT) {
857 int vis = sa->visibility;
858 if (vis == STV_DEFAULT
859 || vis > sa1->visibility)
860 vis = sa1->visibility;
861 sa->visibility = vis;
863 sa->dllexport |= sa1->dllexport;
864 sa->nodecorate |= sa1->nodecorate;
865 sa->dllimport |= sa1->dllimport;
868 /* Merge function attributes. */
869 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
871 if (fa1->func_call && !fa->func_call)
872 fa->func_call = fa1->func_call;
873 if (fa1->func_type && !fa->func_type)
874 fa->func_type = fa1->func_type;
875 if (fa1->func_args && !fa->func_args)
876 fa->func_args = fa1->func_args;
879 /* Merge attributes. */
880 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
882 merge_symattr(&ad->a, &ad1->a);
883 merge_funcattr(&ad->f, &ad1->f);
885 if (ad1->section)
886 ad->section = ad1->section;
887 if (ad1->alias_target)
888 ad->alias_target = ad1->alias_target;
889 if (ad1->asm_label)
890 ad->asm_label = ad1->asm_label;
891 if (ad1->attr_mode)
892 ad->attr_mode = ad1->attr_mode;
895 /* Merge some type attributes. */
896 static void patch_type(Sym *sym, CType *type)
898 if (!(type->t & VT_EXTERN)) {
899 if (!(sym->type.t & VT_EXTERN))
900 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
901 sym->type.t &= ~VT_EXTERN;
904 if (IS_ASM_SYM(sym)) {
905 /* stay static if both are static */
906 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
907 sym->type.ref = type->ref;
910 if (!is_compatible_types(&sym->type, type)) {
911 tcc_error("incompatible types for redefinition of '%s'",
912 get_tok_str(sym->v, NULL));
914 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
915 int static_proto = sym->type.t & VT_STATIC;
916 /* warn if static follows non-static function declaration */
917 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
918 tcc_warning("static storage ignored for redefinition of '%s'",
919 get_tok_str(sym->v, NULL));
921 if (0 == (type->t & VT_EXTERN)) {
922 /* put complete type, use static from prototype */
923 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
924 if (type->t & VT_INLINE)
925 sym->type.t = type->t;
926 sym->type.ref = type->ref;
929 } else {
930 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
931 /* set array size if it was omitted in extern declaration */
932 if (sym->type.ref->c < 0)
933 sym->type.ref->c = type->ref->c;
934 else if (sym->type.ref->c != type->ref->c)
935 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
937 if ((type->t ^ sym->type.t) & VT_STATIC)
938 tcc_warning("storage mismatch for redefinition of '%s'",
939 get_tok_str(sym->v, NULL));
944 /* Merge some storage attributes. */
945 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
947 if (type)
948 patch_type(sym, type);
950 #ifdef TCC_TARGET_PE
951 if (sym->a.dllimport != ad->a.dllimport)
952 tcc_error("incompatible dll linkage for redefinition of '%s'",
953 get_tok_str(sym->v, NULL));
954 #endif
955 merge_symattr(&sym->a, &ad->a);
956 if (ad->asm_label)
957 sym->asm_label = ad->asm_label;
958 update_storage(sym);
961 /* define a new external reference to a symbol 'v' */
962 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
964 Sym *s;
965 s = sym_find(v);
966 if (!s) {
967 /* push forward reference */
968 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
969 s->type.t |= VT_EXTERN;
970 s->a = ad->a;
971 s->sym_scope = 0;
972 } else {
973 if (s->type.ref == func_old_type.ref) {
974 s->type.ref = type->ref;
975 s->r = r | VT_CONST | VT_SYM;
976 s->type.t |= VT_EXTERN;
978 patch_storage(s, ad, type);
980 return s;
983 /* push a reference to global symbol v */
984 ST_FUNC void vpush_global_sym(CType *type, int v)
986 vpushsym(type, external_global_sym(v, type, 0));
989 /* save registers up to (vtop - n) stack entry */
990 ST_FUNC void save_regs(int n)
992 SValue *p, *p1;
993 for(p = vstack, p1 = vtop - n; p <= p1; p++)
994 save_reg(p->r);
997 /* save r to the memory stack, and mark it as being free */
998 ST_FUNC void save_reg(int r)
1000 save_reg_upstack(r, 0);
1003 /* save r to the memory stack, and mark it as being free,
1004 if seen up to (vtop - n) stack entry */
1005 ST_FUNC void save_reg_upstack(int r, int n)
1007 int l, saved, size, align;
1008 SValue *p, *p1, sv;
1009 CType *type;
1011 if ((r &= VT_VALMASK) >= VT_CONST)
1012 return;
1013 if (nocode_wanted)
1014 return;
1016 /* modify all stack values */
1017 saved = 0;
1018 l = 0;
1019 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1020 if ((p->r & VT_VALMASK) == r ||
1021 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1022 /* must save value on stack if not already done */
1023 if (!saved) {
1024 /* NOTE: must reload 'r' because r might be equal to r2 */
1025 r = p->r & VT_VALMASK;
1026 /* store register in the stack */
1027 type = &p->type;
1028 if ((p->r & VT_LVAL) ||
1029 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1030 #if PTR_SIZE == 8
1031 type = &char_pointer_type;
1032 #else
1033 type = &int_type;
1034 #endif
1035 size = type_size(type, &align);
1036 loc = (loc - size) & -align;
1037 sv.type.t = type->t;
1038 sv.r = VT_LOCAL | VT_LVAL;
1039 sv.c.i = loc;
1040 store(r, &sv);
1041 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1042 /* x86 specific: need to pop fp register ST0 if saved */
1043 if (r == TREG_ST0) {
1044 o(0xd8dd); /* fstp %st(0) */
1046 #endif
1047 #if PTR_SIZE == 4
1048 /* special long long case */
1049 if ((type->t & VT_BTYPE) == VT_LLONG) {
1050 sv.c.i += 4;
1051 store(p->r2, &sv);
1053 #endif
1054 l = loc;
1055 saved = 1;
1057 /* mark that stack entry as being saved on the stack */
1058 if (p->r & VT_LVAL) {
1059 /* also clear the bounded flag because the
1060 relocation address of the function was stored in
1061 p->c.i */
1062 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1063 } else {
1064 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1066 p->r2 = VT_CONST;
1067 p->c.i = l;
1072 #ifdef TCC_TARGET_ARM
1073 /* find a register of class 'rc2' with at most one reference on stack.
1074 * If none, call get_reg(rc) */
1075 ST_FUNC int get_reg_ex(int rc, int rc2)
1077 int r;
1078 SValue *p;
1080 for(r=0;r<NB_REGS;r++) {
1081 if (reg_classes[r] & rc2) {
1082 int n;
1083 n=0;
1084 for(p = vstack; p <= vtop; p++) {
1085 if ((p->r & VT_VALMASK) == r ||
1086 (p->r2 & VT_VALMASK) == r)
1087 n++;
1089 if (n <= 1)
1090 return r;
1093 return get_reg(rc);
1095 #endif
1097 /* find a free register of class 'rc'. If none, save one register */
1098 ST_FUNC int get_reg(int rc)
1100 int r;
1101 SValue *p;
1103 /* find a free register */
1104 for(r=0;r<NB_REGS;r++) {
1105 if (reg_classes[r] & rc) {
1106 if (nocode_wanted)
1107 return r;
1108 for(p=vstack;p<=vtop;p++) {
1109 if ((p->r & VT_VALMASK) == r ||
1110 (p->r2 & VT_VALMASK) == r)
1111 goto notfound;
1113 return r;
1115 notfound: ;
1118 /* no register left : free the first one on the stack (VERY
1119 IMPORTANT to start from the bottom to ensure that we don't
1120 spill registers used in gen_opi()) */
1121 for(p=vstack;p<=vtop;p++) {
1122 /* look at second register (if long long) */
1123 r = p->r2 & VT_VALMASK;
1124 if (r < VT_CONST && (reg_classes[r] & rc))
1125 goto save_found;
1126 r = p->r & VT_VALMASK;
1127 if (r < VT_CONST && (reg_classes[r] & rc)) {
1128 save_found:
1129 save_reg(r);
1130 return r;
1133 /* Should never comes here */
1134 return -1;
1137 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1138 if needed */
1139 static void move_reg(int r, int s, int t)
1141 SValue sv;
1143 if (r != s) {
1144 save_reg(r);
1145 sv.type.t = t;
1146 sv.type.ref = NULL;
1147 sv.r = s;
1148 sv.c.i = 0;
1149 load(r, &sv);
1153 /* get address of vtop (vtop MUST BE an lvalue) */
1154 ST_FUNC void gaddrof(void)
1156 vtop->r &= ~VT_LVAL;
1157 /* tricky: if saved lvalue, then we can go back to lvalue */
1158 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1159 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1164 #ifdef CONFIG_TCC_BCHECK
1165 /* generate lvalue bound code */
1166 static void gbound(void)
1168 int lval_type;
1169 CType type1;
1171 vtop->r &= ~VT_MUSTBOUND;
1172 /* if lvalue, then use checking code before dereferencing */
1173 if (vtop->r & VT_LVAL) {
1174 /* if not VT_BOUNDED value, then make one */
1175 if (!(vtop->r & VT_BOUNDED)) {
1176 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1177 /* must save type because we must set it to int to get pointer */
1178 type1 = vtop->type;
1179 vtop->type.t = VT_PTR;
1180 gaddrof();
1181 vpushi(0);
1182 gen_bounded_ptr_add();
1183 vtop->r |= lval_type;
1184 vtop->type = type1;
1186 /* then check for dereferencing */
1187 gen_bounded_ptr_deref();
1190 #endif
1192 static void incr_bf_adr(int o)
1194 vtop->type = char_pointer_type;
1195 gaddrof();
1196 vpushi(o);
1197 gen_op('+');
1198 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1199 | (VT_BYTE|VT_UNSIGNED);
1200 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1201 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1204 /* single-byte load mode for packed or otherwise unaligned bitfields */
1205 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1207 int n, o, bits;
1208 save_reg_upstack(vtop->r, 1);
1209 vpush64(type->t & VT_BTYPE, 0); // B X
1210 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1211 do {
1212 vswap(); // X B
1213 incr_bf_adr(o);
1214 vdup(); // X B B
1215 n = 8 - bit_pos;
1216 if (n > bit_size)
1217 n = bit_size;
1218 if (bit_pos)
1219 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1220 if (n < 8)
1221 vpushi((1 << n) - 1), gen_op('&');
1222 gen_cast(type);
1223 if (bits)
1224 vpushi(bits), gen_op(TOK_SHL);
1225 vrotb(3); // B Y X
1226 gen_op('|'); // B X
1227 bits += n, bit_size -= n, o = 1;
1228 } while (bit_size);
1229 vswap(), vpop();
1230 if (!(type->t & VT_UNSIGNED)) {
1231 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1232 vpushi(n), gen_op(TOK_SHL);
1233 vpushi(n), gen_op(TOK_SAR);
1237 /* single-byte store mode for packed or otherwise unaligned bitfields */
1238 static void store_packed_bf(int bit_pos, int bit_size)
1240 int bits, n, o, m, c;
1242 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1243 vswap(); // X B
1244 save_reg_upstack(vtop->r, 1);
1245 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1246 do {
1247 incr_bf_adr(o); // X B
1248 vswap(); //B X
1249 c ? vdup() : gv_dup(); // B V X
1250 vrott(3); // X B V
1251 if (bits)
1252 vpushi(bits), gen_op(TOK_SHR);
1253 if (bit_pos)
1254 vpushi(bit_pos), gen_op(TOK_SHL);
1255 n = 8 - bit_pos;
1256 if (n > bit_size)
1257 n = bit_size;
1258 if (n < 8) {
1259 m = ((1 << n) - 1) << bit_pos;
1260 vpushi(m), gen_op('&'); // X B V1
1261 vpushv(vtop-1); // X B V1 B
1262 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1263 gen_op('&'); // X B V1 B1
1264 gen_op('|'); // X B V2
1266 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1267 vstore(), vpop(); // X B
1268 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1269 } while (bit_size);
1270 vpop(), vpop();
1273 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1275 int t;
1276 if (0 == sv->type.ref)
1277 return 0;
1278 t = sv->type.ref->auxtype;
1279 if (t != -1 && t != VT_STRUCT) {
1280 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1281 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1283 return t;
1286 /* store vtop a register belonging to class 'rc'. lvalues are
1287 converted to values. Cannot be used if cannot be converted to
1288 register value (such as structures). */
1289 ST_FUNC int gv(int rc)
1291 int r, bit_pos, bit_size, size, align, rc2;
1293 /* NOTE: get_reg can modify vstack[] */
1294 if (vtop->type.t & VT_BITFIELD) {
1295 CType type;
1297 bit_pos = BIT_POS(vtop->type.t);
1298 bit_size = BIT_SIZE(vtop->type.t);
1299 /* remove bit field info to avoid loops */
1300 vtop->type.t &= ~VT_STRUCT_MASK;
1302 type.ref = NULL;
1303 type.t = vtop->type.t & VT_UNSIGNED;
1304 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1305 type.t |= VT_UNSIGNED;
1307 r = adjust_bf(vtop, bit_pos, bit_size);
1309 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1310 type.t |= VT_LLONG;
1311 else
1312 type.t |= VT_INT;
1314 if (r == VT_STRUCT) {
1315 load_packed_bf(&type, bit_pos, bit_size);
1316 } else {
1317 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1318 /* cast to int to propagate signedness in following ops */
1319 gen_cast(&type);
1320 /* generate shifts */
1321 vpushi(bits - (bit_pos + bit_size));
1322 gen_op(TOK_SHL);
1323 vpushi(bits - bit_size);
1324 /* NOTE: transformed to SHR if unsigned */
1325 gen_op(TOK_SAR);
1327 r = gv(rc);
1328 } else {
1329 if (is_float(vtop->type.t) &&
1330 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1331 unsigned long offset;
1332 /* CPUs usually cannot use float constants, so we store them
1333 generically in data segment */
1334 size = type_size(&vtop->type, &align);
1335 if (NODATA_WANTED)
1336 size = 0, align = 1;
1337 offset = section_add(data_section, size, align);
1338 vpush_ref(&vtop->type, data_section, offset, size);
1339 vswap();
1340 init_putv(&vtop->type, data_section, offset);
1341 vtop->r |= VT_LVAL;
1343 #ifdef CONFIG_TCC_BCHECK
1344 if (vtop->r & VT_MUSTBOUND)
1345 gbound();
1346 #endif
1348 r = vtop->r & VT_VALMASK;
1349 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1350 #ifndef TCC_TARGET_ARM64
1351 if (rc == RC_IRET)
1352 rc2 = RC_LRET;
1353 #ifdef TCC_TARGET_X86_64
1354 else if (rc == RC_FRET)
1355 rc2 = RC_QRET;
1356 #endif
1357 #endif
1358 /* need to reload if:
1359 - constant
1360 - lvalue (need to dereference pointer)
1361 - already a register, but not in the right class */
1362 if (r >= VT_CONST
1363 || (vtop->r & VT_LVAL)
1364 || !(reg_classes[r] & rc)
1365 #if PTR_SIZE == 8
1366 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1367 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1368 #else
1369 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1370 #endif
1373 r = get_reg(rc);
1374 #if PTR_SIZE == 8
1375 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1376 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1377 #else
1378 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1379 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1380 unsigned long long ll;
1381 #endif
1382 int r2, original_type;
1383 original_type = vtop->type.t;
1384 /* two register type load : expand to two words
1385 temporarily */
1386 #if PTR_SIZE == 4
1387 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1388 /* load constant */
1389 ll = vtop->c.i;
1390 vtop->c.i = ll; /* first word */
1391 load(r, vtop);
1392 vtop->r = r; /* save register value */
1393 vpushi(ll >> 32); /* second word */
1394 } else
1395 #endif
1396 if (vtop->r & VT_LVAL) {
1397 /* We do not want to modifier the long long
1398 pointer here, so the safest (and less
1399 efficient) is to save all the other registers
1400 in the stack. XXX: totally inefficient. */
1401 #if 0
1402 save_regs(1);
1403 #else
1404 /* lvalue_save: save only if used further down the stack */
1405 save_reg_upstack(vtop->r, 1);
1406 #endif
1407 /* load from memory */
1408 vtop->type.t = load_type;
1409 load(r, vtop);
1410 vdup();
1411 vtop[-1].r = r; /* save register value */
1412 /* increment pointer to get second word */
1413 vtop->type.t = addr_type;
1414 gaddrof();
1415 vpushi(load_size);
1416 gen_op('+');
1417 vtop->r |= VT_LVAL;
1418 vtop->type.t = load_type;
1419 } else {
1420 /* move registers */
1421 load(r, vtop);
1422 vdup();
1423 vtop[-1].r = r; /* save register value */
1424 vtop->r = vtop[-1].r2;
1426 /* Allocate second register. Here we rely on the fact that
1427 get_reg() tries first to free r2 of an SValue. */
1428 r2 = get_reg(rc2);
1429 load(r2, vtop);
1430 vpop();
1431 /* write second register */
1432 vtop->r2 = r2;
1433 vtop->type.t = original_type;
1434 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1435 int t1, t;
1436 /* lvalue of scalar type : need to use lvalue type
1437 because of possible cast */
1438 t = vtop->type.t;
1439 t1 = t;
1440 /* compute memory access type */
1441 if (vtop->r & VT_LVAL_BYTE)
1442 t = VT_BYTE;
1443 else if (vtop->r & VT_LVAL_SHORT)
1444 t = VT_SHORT;
1445 if (vtop->r & VT_LVAL_UNSIGNED)
1446 t |= VT_UNSIGNED;
1447 vtop->type.t = t;
1448 load(r, vtop);
1449 /* restore wanted type */
1450 vtop->type.t = t1;
1451 } else {
1452 /* one register type load */
1453 load(r, vtop);
1456 vtop->r = r;
1457 #ifdef TCC_TARGET_C67
1458 /* uses register pairs for doubles */
1459 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1460 vtop->r2 = r+1;
1461 #endif
1463 return r;
1466 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1467 ST_FUNC void gv2(int rc1, int rc2)
1469 int v;
1471 /* generate more generic register first. But VT_JMP or VT_CMP
1472 values must be generated first in all cases to avoid possible
1473 reload errors */
1474 v = vtop[0].r & VT_VALMASK;
1475 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1476 vswap();
1477 gv(rc1);
1478 vswap();
1479 gv(rc2);
1480 /* test if reload is needed for first register */
1481 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1482 vswap();
1483 gv(rc1);
1484 vswap();
1486 } else {
1487 gv(rc2);
1488 vswap();
1489 gv(rc1);
1490 vswap();
1491 /* test if reload is needed for first register */
1492 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1493 gv(rc2);
1498 #ifndef TCC_TARGET_ARM64
1499 /* wrapper around RC_FRET to return a register by type */
1500 static int rc_fret(int t)
1502 #ifdef TCC_TARGET_X86_64
1503 if (t == VT_LDOUBLE) {
1504 return RC_ST0;
1506 #endif
1507 return RC_FRET;
1509 #endif
1511 /* wrapper around REG_FRET to return a register by type */
1512 static int reg_fret(int t)
1514 #ifdef TCC_TARGET_X86_64
1515 if (t == VT_LDOUBLE) {
1516 return TREG_ST0;
1518 #endif
1519 return REG_FRET;
1522 #if PTR_SIZE == 4
1523 /* expand 64bit on stack in two ints */
1524 static void lexpand(void)
1526 int u, v;
1527 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1528 v = vtop->r & (VT_VALMASK | VT_LVAL);
1529 if (v == VT_CONST) {
1530 vdup();
1531 vtop[0].c.i >>= 32;
1532 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1533 vdup();
1534 vtop[0].c.i += 4;
1535 } else {
1536 gv(RC_INT);
1537 vdup();
1538 vtop[0].r = vtop[-1].r2;
1539 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1541 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1543 #endif
1545 #ifdef TCC_TARGET_ARM
1546 /* expand long long on stack */
1547 ST_FUNC void lexpand_nr(void)
1549 int u,v;
1551 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1552 vdup();
1553 vtop->r2 = VT_CONST;
1554 vtop->type.t = VT_INT | u;
1555 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1556 if (v == VT_CONST) {
1557 vtop[-1].c.i = vtop->c.i;
1558 vtop->c.i = vtop->c.i >> 32;
1559 vtop->r = VT_CONST;
1560 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1561 vtop->c.i += 4;
1562 vtop->r = vtop[-1].r;
1563 } else if (v > VT_CONST) {
1564 vtop--;
1565 lexpand();
1566 } else
1567 vtop->r = vtop[-1].r2;
1568 vtop[-1].r2 = VT_CONST;
1569 vtop[-1].type.t = VT_INT | u;
1571 #endif
1573 #if PTR_SIZE == 4
1574 /* build a long long from two ints */
1575 static void lbuild(int t)
1577 gv2(RC_INT, RC_INT);
1578 vtop[-1].r2 = vtop[0].r;
1579 vtop[-1].type.t = t;
1580 vpop();
1582 #endif
1584 /* convert stack entry to register and duplicate its value in another
1585 register */
1586 static void gv_dup(void)
1588 int rc, t, r, r1;
1589 SValue sv;
1591 t = vtop->type.t;
1592 #if PTR_SIZE == 4
1593 if ((t & VT_BTYPE) == VT_LLONG) {
1594 if (t & VT_BITFIELD) {
1595 gv(RC_INT);
1596 t = vtop->type.t;
1598 lexpand();
1599 gv_dup();
1600 vswap();
1601 vrotb(3);
1602 gv_dup();
1603 vrotb(4);
1604 /* stack: H L L1 H1 */
1605 lbuild(t);
1606 vrotb(3);
1607 vrotb(3);
1608 vswap();
1609 lbuild(t);
1610 vswap();
1611 } else
1612 #endif
1614 /* duplicate value */
1615 rc = RC_INT;
1616 sv.type.t = VT_INT;
1617 if (is_float(t)) {
1618 rc = RC_FLOAT;
1619 #ifdef TCC_TARGET_X86_64
1620 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1621 rc = RC_ST0;
1623 #endif
1624 sv.type.t = t;
1626 r = gv(rc);
1627 r1 = get_reg(rc);
1628 sv.r = r;
1629 sv.c.i = 0;
1630 load(r1, &sv); /* move r to r1 */
1631 vdup();
1632 /* duplicates value */
1633 if (r != r1)
1634 vtop->r = r1;
1638 /* Generate value test
1640 * Generate a test for any value (jump, comparison and integers) */
1641 ST_FUNC int gvtst(int inv, int t)
1643 int v = vtop->r & VT_VALMASK;
1644 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1645 vpushi(0);
1646 gen_op(TOK_NE);
1648 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1649 /* constant jmp optimization */
1650 if ((vtop->c.i != 0) != inv)
1651 t = gjmp(t);
1652 vtop--;
1653 return t;
1655 return gtst(inv, t);
1658 #if PTR_SIZE == 4
1659 /* generate CPU independent (unsigned) long long operations */
1660 static void gen_opl(int op)
1662 int t, a, b, op1, c, i;
1663 int func;
1664 unsigned short reg_iret = REG_IRET;
1665 unsigned short reg_lret = REG_LRET;
1666 SValue tmp;
1668 switch(op) {
1669 case '/':
1670 case TOK_PDIV:
1671 func = TOK___divdi3;
1672 goto gen_func;
1673 case TOK_UDIV:
1674 func = TOK___udivdi3;
1675 goto gen_func;
1676 case '%':
1677 func = TOK___moddi3;
1678 goto gen_mod_func;
1679 case TOK_UMOD:
1680 func = TOK___umoddi3;
1681 gen_mod_func:
1682 #ifdef TCC_ARM_EABI
1683 reg_iret = TREG_R2;
1684 reg_lret = TREG_R3;
1685 #endif
1686 gen_func:
1687 /* call generic long long function */
1688 vpush_global_sym(&func_old_type, func);
1689 vrott(3);
1690 gfunc_call(2);
1691 vpushi(0);
1692 vtop->r = reg_iret;
1693 vtop->r2 = reg_lret;
1694 break;
1695 case '^':
1696 case '&':
1697 case '|':
1698 case '*':
1699 case '+':
1700 case '-':
1701 //pv("gen_opl A",0,2);
1702 t = vtop->type.t;
1703 vswap();
1704 lexpand();
1705 vrotb(3);
1706 lexpand();
1707 /* stack: L1 H1 L2 H2 */
1708 tmp = vtop[0];
1709 vtop[0] = vtop[-3];
1710 vtop[-3] = tmp;
1711 tmp = vtop[-2];
1712 vtop[-2] = vtop[-3];
1713 vtop[-3] = tmp;
1714 vswap();
1715 /* stack: H1 H2 L1 L2 */
1716 //pv("gen_opl B",0,4);
1717 if (op == '*') {
1718 vpushv(vtop - 1);
1719 vpushv(vtop - 1);
1720 gen_op(TOK_UMULL);
1721 lexpand();
1722 /* stack: H1 H2 L1 L2 ML MH */
1723 for(i=0;i<4;i++)
1724 vrotb(6);
1725 /* stack: ML MH H1 H2 L1 L2 */
1726 tmp = vtop[0];
1727 vtop[0] = vtop[-2];
1728 vtop[-2] = tmp;
1729 /* stack: ML MH H1 L2 H2 L1 */
1730 gen_op('*');
1731 vrotb(3);
1732 vrotb(3);
1733 gen_op('*');
1734 /* stack: ML MH M1 M2 */
1735 gen_op('+');
1736 gen_op('+');
1737 } else if (op == '+' || op == '-') {
1738 /* XXX: add non carry method too (for MIPS or alpha) */
1739 if (op == '+')
1740 op1 = TOK_ADDC1;
1741 else
1742 op1 = TOK_SUBC1;
1743 gen_op(op1);
1744 /* stack: H1 H2 (L1 op L2) */
1745 vrotb(3);
1746 vrotb(3);
1747 gen_op(op1 + 1); /* TOK_xxxC2 */
1748 } else {
1749 gen_op(op);
1750 /* stack: H1 H2 (L1 op L2) */
1751 vrotb(3);
1752 vrotb(3);
1753 /* stack: (L1 op L2) H1 H2 */
1754 gen_op(op);
1755 /* stack: (L1 op L2) (H1 op H2) */
1757 /* stack: L H */
1758 lbuild(t);
1759 break;
1760 case TOK_SAR:
1761 case TOK_SHR:
1762 case TOK_SHL:
1763 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1764 t = vtop[-1].type.t;
1765 vswap();
1766 lexpand();
1767 vrotb(3);
1768 /* stack: L H shift */
1769 c = (int)vtop->c.i;
1770 /* constant: simpler */
1771 /* NOTE: all comments are for SHL. the other cases are
1772 done by swapping words */
1773 vpop();
1774 if (op != TOK_SHL)
1775 vswap();
1776 if (c >= 32) {
1777 /* stack: L H */
1778 vpop();
1779 if (c > 32) {
1780 vpushi(c - 32);
1781 gen_op(op);
1783 if (op != TOK_SAR) {
1784 vpushi(0);
1785 } else {
1786 gv_dup();
1787 vpushi(31);
1788 gen_op(TOK_SAR);
1790 vswap();
1791 } else {
1792 vswap();
1793 gv_dup();
1794 /* stack: H L L */
1795 vpushi(c);
1796 gen_op(op);
1797 vswap();
1798 vpushi(32 - c);
1799 if (op == TOK_SHL)
1800 gen_op(TOK_SHR);
1801 else
1802 gen_op(TOK_SHL);
1803 vrotb(3);
1804 /* stack: L L H */
1805 vpushi(c);
1806 if (op == TOK_SHL)
1807 gen_op(TOK_SHL);
1808 else
1809 gen_op(TOK_SHR);
1810 gen_op('|');
1812 if (op != TOK_SHL)
1813 vswap();
1814 lbuild(t);
1815 } else {
1816 /* XXX: should provide a faster fallback on x86 ? */
1817 switch(op) {
1818 case TOK_SAR:
1819 func = TOK___ashrdi3;
1820 goto gen_func;
1821 case TOK_SHR:
1822 func = TOK___lshrdi3;
1823 goto gen_func;
1824 case TOK_SHL:
1825 func = TOK___ashldi3;
1826 goto gen_func;
1829 break;
1830 default:
1831 /* compare operations */
1832 t = vtop->type.t;
1833 vswap();
1834 lexpand();
1835 vrotb(3);
1836 lexpand();
1837 /* stack: L1 H1 L2 H2 */
1838 tmp = vtop[-1];
1839 vtop[-1] = vtop[-2];
1840 vtop[-2] = tmp;
1841 /* stack: L1 L2 H1 H2 */
1842 /* compare high */
1843 op1 = op;
1844 /* when values are equal, we need to compare low words. since
1845 the jump is inverted, we invert the test too. */
1846 if (op1 == TOK_LT)
1847 op1 = TOK_LE;
1848 else if (op1 == TOK_GT)
1849 op1 = TOK_GE;
1850 else if (op1 == TOK_ULT)
1851 op1 = TOK_ULE;
1852 else if (op1 == TOK_UGT)
1853 op1 = TOK_UGE;
1854 a = 0;
1855 b = 0;
1856 gen_op(op1);
1857 if (op == TOK_NE) {
1858 b = gvtst(0, 0);
1859 } else {
1860 a = gvtst(1, 0);
1861 if (op != TOK_EQ) {
1862 /* generate non equal test */
1863 vpushi(TOK_NE);
1864 vtop->r = VT_CMP;
1865 b = gvtst(0, 0);
1868 /* compare low. Always unsigned */
1869 op1 = op;
1870 if (op1 == TOK_LT)
1871 op1 = TOK_ULT;
1872 else if (op1 == TOK_LE)
1873 op1 = TOK_ULE;
1874 else if (op1 == TOK_GT)
1875 op1 = TOK_UGT;
1876 else if (op1 == TOK_GE)
1877 op1 = TOK_UGE;
1878 gen_op(op1);
1879 a = gvtst(1, a);
1880 gsym(b);
1881 vseti(VT_JMPI, a);
1882 break;
1885 #endif
1887 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1889 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1890 return (a ^ b) >> 63 ? -x : x;
1893 static int gen_opic_lt(uint64_t a, uint64_t b)
1895 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1898 /* handle integer constant optimizations and various machine
1899 independent opt */
1900 static void gen_opic(int op)
1902 SValue *v1 = vtop - 1;
1903 SValue *v2 = vtop;
1904 int t1 = v1->type.t & VT_BTYPE;
1905 int t2 = v2->type.t & VT_BTYPE;
1906 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1907 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1908 uint64_t l1 = c1 ? v1->c.i : 0;
1909 uint64_t l2 = c2 ? v2->c.i : 0;
1910 int shm = (t1 == VT_LLONG) ? 63 : 31;
1912 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1913 l1 = ((uint32_t)l1 |
1914 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1915 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1916 l2 = ((uint32_t)l2 |
1917 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1919 if (c1 && c2) {
1920 switch(op) {
1921 case '+': l1 += l2; break;
1922 case '-': l1 -= l2; break;
1923 case '&': l1 &= l2; break;
1924 case '^': l1 ^= l2; break;
1925 case '|': l1 |= l2; break;
1926 case '*': l1 *= l2; break;
1928 case TOK_PDIV:
1929 case '/':
1930 case '%':
1931 case TOK_UDIV:
1932 case TOK_UMOD:
1933 /* if division by zero, generate explicit division */
1934 if (l2 == 0) {
1935 if (const_wanted)
1936 tcc_error("division by zero in constant");
1937 goto general_case;
1939 switch(op) {
1940 default: l1 = gen_opic_sdiv(l1, l2); break;
1941 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1942 case TOK_UDIV: l1 = l1 / l2; break;
1943 case TOK_UMOD: l1 = l1 % l2; break;
1945 break;
1946 case TOK_SHL: l1 <<= (l2 & shm); break;
1947 case TOK_SHR: l1 >>= (l2 & shm); break;
1948 case TOK_SAR:
1949 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1950 break;
1951 /* tests */
1952 case TOK_ULT: l1 = l1 < l2; break;
1953 case TOK_UGE: l1 = l1 >= l2; break;
1954 case TOK_EQ: l1 = l1 == l2; break;
1955 case TOK_NE: l1 = l1 != l2; break;
1956 case TOK_ULE: l1 = l1 <= l2; break;
1957 case TOK_UGT: l1 = l1 > l2; break;
1958 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1959 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1960 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1961 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1962 /* logical */
1963 case TOK_LAND: l1 = l1 && l2; break;
1964 case TOK_LOR: l1 = l1 || l2; break;
1965 default:
1966 goto general_case;
1968 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1969 l1 = ((uint32_t)l1 |
1970 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1971 v1->c.i = l1;
1972 vtop--;
1973 } else {
1974 /* if commutative ops, put c2 as constant */
1975 if (c1 && (op == '+' || op == '&' || op == '^' ||
1976 op == '|' || op == '*')) {
1977 vswap();
1978 c2 = c1; //c = c1, c1 = c2, c2 = c;
1979 l2 = l1; //l = l1, l1 = l2, l2 = l;
1981 if (!const_wanted &&
1982 c1 && ((l1 == 0 &&
1983 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1984 (l1 == -1 && op == TOK_SAR))) {
1985 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1986 vtop--;
1987 } else if (!const_wanted &&
1988 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1989 (op == '|' &&
1990 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1991 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1992 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1993 if (l2 == 1)
1994 vtop->c.i = 0;
1995 vswap();
1996 vtop--;
1997 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1998 op == TOK_PDIV) &&
1999 l2 == 1) ||
2000 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2001 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2002 l2 == 0) ||
2003 (op == '&' &&
2004 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2005 /* filter out NOP operations like x*1, x-0, x&-1... */
2006 vtop--;
2007 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2008 /* try to use shifts instead of muls or divs */
2009 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2010 int n = -1;
2011 while (l2) {
2012 l2 >>= 1;
2013 n++;
2015 vtop->c.i = n;
2016 if (op == '*')
2017 op = TOK_SHL;
2018 else if (op == TOK_PDIV)
2019 op = TOK_SAR;
2020 else
2021 op = TOK_SHR;
2023 goto general_case;
2024 } else if (c2 && (op == '+' || op == '-') &&
2025 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2026 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2027 /* symbol + constant case */
2028 if (op == '-')
2029 l2 = -l2;
2030 l2 += vtop[-1].c.i;
2031 /* The backends can't always deal with addends to symbols
2032 larger than +-1<<31. Don't construct such. */
2033 if ((int)l2 != l2)
2034 goto general_case;
2035 vtop--;
2036 vtop->c.i = l2;
2037 } else {
2038 general_case:
2039 /* call low level op generator */
2040 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2041 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2042 gen_opl(op);
2043 else
2044 gen_opi(op);
2049 /* generate a floating point operation with constant propagation */
2050 static void gen_opif(int op)
2052 int c1, c2;
2053 SValue *v1, *v2;
2054 #if defined _MSC_VER && defined _AMD64_
2055 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2056 volatile
2057 #endif
2058 long double f1, f2;
2060 v1 = vtop - 1;
2061 v2 = vtop;
2062 /* currently, we cannot do computations with forward symbols */
2063 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2064 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2065 if (c1 && c2) {
2066 if (v1->type.t == VT_FLOAT) {
2067 f1 = v1->c.f;
2068 f2 = v2->c.f;
2069 } else if (v1->type.t == VT_DOUBLE) {
2070 f1 = v1->c.d;
2071 f2 = v2->c.d;
2072 } else {
2073 f1 = v1->c.ld;
2074 f2 = v2->c.ld;
2077 /* NOTE: we only do constant propagation if finite number (not
2078 NaN or infinity) (ANSI spec) */
2079 if (!ieee_finite(f1) || !ieee_finite(f2))
2080 goto general_case;
2082 switch(op) {
2083 case '+': f1 += f2; break;
2084 case '-': f1 -= f2; break;
2085 case '*': f1 *= f2; break;
2086 case '/':
2087 if (f2 == 0.0) {
2088 /* If not in initializer we need to potentially generate
2089 FP exceptions at runtime, otherwise we want to fold. */
2090 if (!const_wanted)
2091 goto general_case;
2093 f1 /= f2;
2094 break;
2095 /* XXX: also handles tests ? */
2096 default:
2097 goto general_case;
2099 /* XXX: overflow test ? */
2100 if (v1->type.t == VT_FLOAT) {
2101 v1->c.f = f1;
2102 } else if (v1->type.t == VT_DOUBLE) {
2103 v1->c.d = f1;
2104 } else {
2105 v1->c.ld = f1;
2107 vtop--;
2108 } else {
2109 general_case:
2110 gen_opf(op);
2114 static int pointed_size(CType *type)
2116 int align;
2117 return type_size(pointed_type(type), &align);
2120 static void vla_runtime_pointed_size(CType *type)
2122 int align;
2123 vla_runtime_type_size(pointed_type(type), &align);
2126 static inline int is_null_pointer(SValue *p)
2128 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2129 return 0;
2130 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2131 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2132 ((p->type.t & VT_BTYPE) == VT_PTR &&
2133 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2134 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2135 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2138 static inline int is_integer_btype(int bt)
2140 return (bt == VT_BYTE || bt == VT_SHORT ||
2141 bt == VT_INT || bt == VT_LLONG);
2144 /* check types for comparison or subtraction of pointers */
2145 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2147 CType *type1, *type2, tmp_type1, tmp_type2;
2148 int bt1, bt2;
2150 /* null pointers are accepted for all comparisons as gcc */
2151 if (is_null_pointer(p1) || is_null_pointer(p2))
2152 return;
2153 type1 = &p1->type;
2154 type2 = &p2->type;
2155 bt1 = type1->t & VT_BTYPE;
2156 bt2 = type2->t & VT_BTYPE;
2157 /* accept comparison between pointer and integer with a warning */
2158 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2159 if (op != TOK_LOR && op != TOK_LAND )
2160 tcc_warning("comparison between pointer and integer");
2161 return;
2164 /* both must be pointers or implicit function pointers */
2165 if (bt1 == VT_PTR) {
2166 type1 = pointed_type(type1);
2167 } else if (bt1 != VT_FUNC)
2168 goto invalid_operands;
2170 if (bt2 == VT_PTR) {
2171 type2 = pointed_type(type2);
2172 } else if (bt2 != VT_FUNC) {
2173 invalid_operands:
2174 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2176 if ((type1->t & VT_BTYPE) == VT_VOID ||
2177 (type2->t & VT_BTYPE) == VT_VOID)
2178 return;
2179 tmp_type1 = *type1;
2180 tmp_type2 = *type2;
2181 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2182 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2183 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2184 /* gcc-like error if '-' is used */
2185 if (op == '-')
2186 goto invalid_operands;
2187 else
2188 tcc_warning("comparison of distinct pointer types lacks a cast");
2192 /* generic gen_op: handles types problems */
2193 ST_FUNC void gen_op(int op)
2195 int u, t1, t2, bt1, bt2, t;
2196 CType type1;
2198 redo:
2199 t1 = vtop[-1].type.t;
2200 t2 = vtop[0].type.t;
2201 bt1 = t1 & VT_BTYPE;
2202 bt2 = t2 & VT_BTYPE;
2204 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2205 tcc_error("operation on a struct");
2206 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2207 if (bt2 == VT_FUNC) {
2208 mk_pointer(&vtop->type);
2209 gaddrof();
2211 if (bt1 == VT_FUNC) {
2212 vswap();
2213 mk_pointer(&vtop->type);
2214 gaddrof();
2215 vswap();
2217 goto redo;
2218 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2219 /* at least one operand is a pointer */
2220 /* relational op: must be both pointers */
2221 if (op >= TOK_ULT && op <= TOK_LOR) {
2222 check_comparison_pointer_types(vtop - 1, vtop, op);
2223 /* pointers are handled are unsigned */
2224 #if PTR_SIZE == 8
2225 t = VT_LLONG | VT_UNSIGNED;
2226 #else
2227 t = VT_INT | VT_UNSIGNED;
2228 #endif
2229 goto std_op;
2231 /* if both pointers, then it must be the '-' op */
2232 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2233 if (op != '-')
2234 tcc_error("cannot use pointers here");
2235 check_comparison_pointer_types(vtop - 1, vtop, op);
2236 /* XXX: check that types are compatible */
2237 if (vtop[-1].type.t & VT_VLA) {
2238 vla_runtime_pointed_size(&vtop[-1].type);
2239 } else {
2240 vpushi(pointed_size(&vtop[-1].type));
2242 vrott(3);
2243 gen_opic(op);
2244 vtop->type.t = ptrdiff_type.t;
2245 vswap();
2246 gen_op(TOK_PDIV);
2247 } else {
2248 /* exactly one pointer : must be '+' or '-'. */
2249 if (op != '-' && op != '+')
2250 tcc_error("cannot use pointers here");
2251 /* Put pointer as first operand */
2252 if (bt2 == VT_PTR) {
2253 vswap();
2254 t = t1, t1 = t2, t2 = t;
2256 #if PTR_SIZE == 4
2257 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2258 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2259 gen_cast_s(VT_INT);
2260 #endif
2261 type1 = vtop[-1].type;
2262 type1.t &= ~VT_ARRAY;
2263 if (vtop[-1].type.t & VT_VLA)
2264 vla_runtime_pointed_size(&vtop[-1].type);
2265 else {
2266 u = pointed_size(&vtop[-1].type);
2267 if (u < 0)
2268 tcc_error("unknown array element size");
2269 #if PTR_SIZE == 8
2270 vpushll(u);
2271 #else
2272 /* XXX: cast to int ? (long long case) */
2273 vpushi(u);
2274 #endif
2276 gen_op('*');
2277 #if 0
2278 /* #ifdef CONFIG_TCC_BCHECK
2279 The main reason to removing this code:
2280 #include <stdio.h>
2281 int main ()
2283 int v[10];
2284 int i = 10;
2285 int j = 9;
2286 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2287 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2289 When this code is on. then the output looks like
2290 v+i-j = 0xfffffffe
2291 v+(i-j) = 0xbff84000
2293 /* if evaluating constant expression, no code should be
2294 generated, so no bound check */
2295 if (tcc_state->do_bounds_check && !const_wanted) {
2296 /* if bounded pointers, we generate a special code to
2297 test bounds */
2298 if (op == '-') {
2299 vpushi(0);
2300 vswap();
2301 gen_op('-');
2303 gen_bounded_ptr_add();
2304 } else
2305 #endif
2307 gen_opic(op);
2309 /* put again type if gen_opic() swaped operands */
2310 vtop->type = type1;
2312 } else if (is_float(bt1) || is_float(bt2)) {
2313 /* compute bigger type and do implicit casts */
2314 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2315 t = VT_LDOUBLE;
2316 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2317 t = VT_DOUBLE;
2318 } else {
2319 t = VT_FLOAT;
2321 /* floats can only be used for a few operations */
2322 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2323 (op < TOK_ULT || op > TOK_GT))
2324 tcc_error("invalid operands for binary operation");
2325 goto std_op;
2326 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2327 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2328 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2329 t |= VT_UNSIGNED;
2330 t |= (VT_LONG & t1);
2331 goto std_op;
2332 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2333 /* cast to biggest op */
2334 t = VT_LLONG | VT_LONG;
2335 if (bt1 == VT_LLONG)
2336 t &= t1;
2337 if (bt2 == VT_LLONG)
2338 t &= t2;
2339 /* convert to unsigned if it does not fit in a long long */
2340 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2341 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2342 t |= VT_UNSIGNED;
2343 goto std_op;
2344 } else {
2345 /* integer operations */
2346 t = VT_INT | (VT_LONG & (t1 | t2));
2347 /* convert to unsigned if it does not fit in an integer */
2348 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2349 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2350 t |= VT_UNSIGNED;
2351 std_op:
2352 /* XXX: currently, some unsigned operations are explicit, so
2353 we modify them here */
2354 if (t & VT_UNSIGNED) {
2355 if (op == TOK_SAR)
2356 op = TOK_SHR;
2357 else if (op == '/')
2358 op = TOK_UDIV;
2359 else if (op == '%')
2360 op = TOK_UMOD;
2361 else if (op == TOK_LT)
2362 op = TOK_ULT;
2363 else if (op == TOK_GT)
2364 op = TOK_UGT;
2365 else if (op == TOK_LE)
2366 op = TOK_ULE;
2367 else if (op == TOK_GE)
2368 op = TOK_UGE;
2370 vswap();
2371 type1.t = t;
2372 type1.ref = NULL;
2373 gen_cast(&type1);
2374 vswap();
2375 /* special case for shifts and long long: we keep the shift as
2376 an integer */
2377 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2378 type1.t = VT_INT;
2379 gen_cast(&type1);
2380 if (is_float(t))
2381 gen_opif(op);
2382 else
2383 gen_opic(op);
2384 if (op >= TOK_ULT && op <= TOK_GT) {
2385 /* relational op: the result is an int */
2386 vtop->type.t = VT_INT;
2387 } else {
2388 vtop->type.t = t;
2391 // Make sure that we have converted to an rvalue:
2392 if (vtop->r & VT_LVAL)
2393 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2396 #ifndef TCC_TARGET_ARM
2397 /* generic itof for unsigned long long case */
2398 static void gen_cvt_itof1(int t)
2400 #ifdef TCC_TARGET_ARM64
2401 gen_cvt_itof(t);
2402 #else
2403 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2404 (VT_LLONG | VT_UNSIGNED)) {
2406 if (t == VT_FLOAT)
2407 vpush_global_sym(&func_old_type, TOK___floatundisf);
2408 #if LDOUBLE_SIZE != 8
2409 else if (t == VT_LDOUBLE)
2410 vpush_global_sym(&func_old_type, TOK___floatundixf);
2411 #endif
2412 else
2413 vpush_global_sym(&func_old_type, TOK___floatundidf);
2414 vrott(2);
2415 gfunc_call(1);
2416 vpushi(0);
2417 vtop->r = reg_fret(t);
2418 } else {
2419 gen_cvt_itof(t);
2421 #endif
2423 #endif
2425 /* generic ftoi for unsigned long long case */
2426 static void gen_cvt_ftoi1(int t)
2428 #ifdef TCC_TARGET_ARM64
2429 gen_cvt_ftoi(t);
2430 #else
2431 int st;
2433 if (t == (VT_LLONG | VT_UNSIGNED)) {
2434 /* not handled natively */
2435 st = vtop->type.t & VT_BTYPE;
2436 if (st == VT_FLOAT)
2437 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2438 #if LDOUBLE_SIZE != 8
2439 else if (st == VT_LDOUBLE)
2440 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2441 #endif
2442 else
2443 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2444 vrott(2);
2445 gfunc_call(1);
2446 vpushi(0);
2447 vtop->r = REG_IRET;
2448 vtop->r2 = REG_LRET;
2449 } else {
2450 gen_cvt_ftoi(t);
2452 #endif
2455 /* force char or short cast */
2456 static void force_charshort_cast(int t)
2458 int bits, dbt;
2460 /* cannot cast static initializers */
2461 if (STATIC_DATA_WANTED)
2462 return;
2464 dbt = t & VT_BTYPE;
2465 /* XXX: add optimization if lvalue : just change type and offset */
2466 if (dbt == VT_BYTE)
2467 bits = 8;
2468 else
2469 bits = 16;
2470 if (t & VT_UNSIGNED) {
2471 vpushi((1 << bits) - 1);
2472 gen_op('&');
2473 } else {
2474 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2475 bits = 64 - bits;
2476 else
2477 bits = 32 - bits;
2478 vpushi(bits);
2479 gen_op(TOK_SHL);
2480 /* result must be signed or the SAR is converted to an SHL
2481 This was not the case when "t" was a signed short
2482 and the last value on the stack was an unsigned int */
2483 vtop->type.t &= ~VT_UNSIGNED;
2484 vpushi(bits);
2485 gen_op(TOK_SAR);
2489 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2490 static void gen_cast_s(int t)
2492 CType type;
2493 type.t = t;
2494 type.ref = NULL;
2495 gen_cast(&type);
2498 static void gen_cast(CType *type)
2500 int sbt, dbt, sf, df, c, p;
2502 /* special delayed cast for char/short */
2503 /* XXX: in some cases (multiple cascaded casts), it may still
2504 be incorrect */
2505 if (vtop->r & VT_MUSTCAST) {
2506 vtop->r &= ~VT_MUSTCAST;
2507 force_charshort_cast(vtop->type.t);
2510 /* bitfields first get cast to ints */
2511 if (vtop->type.t & VT_BITFIELD) {
2512 gv(RC_INT);
2515 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2516 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2518 if (sbt != dbt) {
2519 sf = is_float(sbt);
2520 df = is_float(dbt);
2521 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2522 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2523 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2524 c &= dbt != VT_LDOUBLE;
2525 #endif
2526 if (c) {
2527 /* constant case: we can do it now */
2528 /* XXX: in ISOC, cannot do it if error in convert */
2529 if (sbt == VT_FLOAT)
2530 vtop->c.ld = vtop->c.f;
2531 else if (sbt == VT_DOUBLE)
2532 vtop->c.ld = vtop->c.d;
2534 if (df) {
2535 if ((sbt & VT_BTYPE) == VT_LLONG) {
2536 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2537 vtop->c.ld = vtop->c.i;
2538 else
2539 vtop->c.ld = -(long double)-vtop->c.i;
2540 } else if(!sf) {
2541 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2542 vtop->c.ld = (uint32_t)vtop->c.i;
2543 else
2544 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2547 if (dbt == VT_FLOAT)
2548 vtop->c.f = (float)vtop->c.ld;
2549 else if (dbt == VT_DOUBLE)
2550 vtop->c.d = (double)vtop->c.ld;
2551 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2552 vtop->c.i = vtop->c.ld;
2553 } else if (sf && dbt == VT_BOOL) {
2554 vtop->c.i = (vtop->c.ld != 0);
2555 } else {
2556 if(sf)
2557 vtop->c.i = vtop->c.ld;
2558 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2560 else if (sbt & VT_UNSIGNED)
2561 vtop->c.i = (uint32_t)vtop->c.i;
2562 #if PTR_SIZE == 8
2563 else if (sbt == VT_PTR)
2565 #endif
2566 else if (sbt != VT_LLONG)
2567 vtop->c.i = ((uint32_t)vtop->c.i |
2568 -(vtop->c.i & 0x80000000));
2570 if (dbt == (VT_LLONG|VT_UNSIGNED))
2572 else if (dbt == VT_BOOL)
2573 vtop->c.i = (vtop->c.i != 0);
2574 #if PTR_SIZE == 8
2575 else if (dbt == VT_PTR)
2577 #endif
2578 else if (dbt != VT_LLONG) {
2579 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2580 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2581 0xffffffff);
2582 vtop->c.i &= m;
2583 if (!(dbt & VT_UNSIGNED))
2584 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2587 } else if (p && dbt == VT_BOOL) {
2588 vtop->r = VT_CONST;
2589 vtop->c.i = 1;
2590 } else {
2591 /* non constant case: generate code */
2592 if (sf && df) {
2593 /* convert from fp to fp */
2594 gen_cvt_ftof(dbt);
2595 } else if (df) {
2596 /* convert int to fp */
2597 gen_cvt_itof1(dbt);
2598 } else if (sf) {
2599 /* convert fp to int */
2600 if (dbt == VT_BOOL) {
2601 vpushi(0);
2602 gen_op(TOK_NE);
2603 } else {
2604 /* we handle char/short/etc... with generic code */
2605 if (dbt != (VT_INT | VT_UNSIGNED) &&
2606 dbt != (VT_LLONG | VT_UNSIGNED) &&
2607 dbt != VT_LLONG)
2608 dbt = VT_INT;
2609 gen_cvt_ftoi1(dbt);
2610 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2611 /* additional cast for char/short... */
2612 vtop->type.t = dbt;
2613 gen_cast(type);
2616 #if PTR_SIZE == 4
2617 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2618 if ((sbt & VT_BTYPE) != VT_LLONG) {
2619 /* scalar to long long */
2620 /* machine independent conversion */
2621 gv(RC_INT);
2622 /* generate high word */
2623 if (sbt == (VT_INT | VT_UNSIGNED)) {
2624 vpushi(0);
2625 gv(RC_INT);
2626 } else {
2627 if (sbt == VT_PTR) {
2628 /* cast from pointer to int before we apply
2629 shift operation, which pointers don't support*/
2630 gen_cast_s(VT_INT);
2632 gv_dup();
2633 vpushi(31);
2634 gen_op(TOK_SAR);
2636 /* patch second register */
2637 vtop[-1].r2 = vtop->r;
2638 vpop();
2640 #else
2641 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2642 (dbt & VT_BTYPE) == VT_PTR ||
2643 (dbt & VT_BTYPE) == VT_FUNC) {
2644 if ((sbt & VT_BTYPE) != VT_LLONG &&
2645 (sbt & VT_BTYPE) != VT_PTR &&
2646 (sbt & VT_BTYPE) != VT_FUNC) {
2647 /* need to convert from 32bit to 64bit */
2648 gv(RC_INT);
2649 if (sbt != (VT_INT | VT_UNSIGNED)) {
2650 #if defined(TCC_TARGET_ARM64)
2651 gen_cvt_sxtw();
2652 #elif defined(TCC_TARGET_X86_64)
2653 int r = gv(RC_INT);
2654 /* x86_64 specific: movslq */
2655 o(0x6348);
2656 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2657 #else
2658 #error
2659 #endif
2662 #endif
2663 } else if (dbt == VT_BOOL) {
2664 /* scalar to bool */
2665 vpushi(0);
2666 gen_op(TOK_NE);
2667 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2668 (dbt & VT_BTYPE) == VT_SHORT) {
2669 if (sbt == VT_PTR) {
2670 vtop->type.t = VT_INT;
2671 tcc_warning("nonportable conversion from pointer to char/short");
2673 force_charshort_cast(dbt);
2674 } else if ((dbt & VT_BTYPE) == VT_INT) {
2675 /* scalar to int */
2676 if ((sbt & VT_BTYPE) == VT_LLONG) {
2677 #if PTR_SIZE == 4
2678 /* from long long: just take low order word */
2679 lexpand();
2680 vpop();
2681 #else
2682 vpushi(0xffffffff);
2683 vtop->type.t |= VT_UNSIGNED;
2684 gen_op('&');
2685 #endif
2687 /* if lvalue and single word type, nothing to do because
2688 the lvalue already contains the real type size (see
2689 VT_LVAL_xxx constants) */
2692 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2693 /* if we are casting between pointer types,
2694 we must update the VT_LVAL_xxx size */
2695 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2696 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2698 vtop->type = *type;
2699 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2702 /* return type size as known at compile time. Put alignment at 'a' */
2703 ST_FUNC int type_size(CType *type, int *a)
2705 Sym *s;
2706 int bt;
2708 bt = type->t & VT_BTYPE;
2709 if (bt == VT_STRUCT) {
2710 /* struct/union */
2711 s = type->ref;
2712 *a = s->r;
2713 return s->c;
2714 } else if (bt == VT_PTR) {
2715 if (type->t & VT_ARRAY) {
2716 int ts;
2718 s = type->ref;
2719 ts = type_size(&s->type, a);
2721 if (ts < 0 && s->c < 0)
2722 ts = -ts;
2724 return ts * s->c;
2725 } else {
2726 *a = PTR_SIZE;
2727 return PTR_SIZE;
2729 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2730 return -1; /* incomplete enum */
2731 } else if (bt == VT_LDOUBLE) {
2732 *a = LDOUBLE_ALIGN;
2733 return LDOUBLE_SIZE;
2734 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2735 #ifdef TCC_TARGET_I386
2736 #ifdef TCC_TARGET_PE
2737 *a = 8;
2738 #else
2739 *a = 4;
2740 #endif
2741 #elif defined(TCC_TARGET_ARM)
2742 #ifdef TCC_ARM_EABI
2743 *a = 8;
2744 #else
2745 *a = 4;
2746 #endif
2747 #else
2748 *a = 8;
2749 #endif
2750 return 8;
2751 } else if (bt == VT_INT || bt == VT_FLOAT) {
2752 *a = 4;
2753 return 4;
2754 } else if (bt == VT_SHORT) {
2755 *a = 2;
2756 return 2;
2757 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2758 *a = 8;
2759 return 16;
2760 } else {
2761 /* char, void, function, _Bool */
2762 *a = 1;
2763 return 1;
2767 /* push type size as known at runtime time on top of value stack. Put
2768 alignment at 'a' */
2769 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2771 if (type->t & VT_VLA) {
2772 type_size(&type->ref->type, a);
2773 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2774 } else {
2775 vpushi(type_size(type, a));
2779 static void vla_sp_restore(void) {
2780 if (vlas_in_scope) {
2781 gen_vla_sp_restore(vla_sp_loc);
2785 static void vla_sp_restore_root(void) {
2786 if (vlas_in_scope) {
2787 gen_vla_sp_restore(vla_sp_root_loc);
2791 /* return the pointed type of t */
2792 static inline CType *pointed_type(CType *type)
2794 return &type->ref->type;
2797 /* modify type so that its it is a pointer to type. */
2798 ST_FUNC void mk_pointer(CType *type)
2800 Sym *s;
2801 s = sym_push(SYM_FIELD, type, 0, -1);
2802 type->t = VT_PTR | (type->t & VT_STORAGE);
2803 type->ref = s;
2806 /* compare function types. OLD functions match any new functions */
2807 static int is_compatible_func(CType *type1, CType *type2)
2809 Sym *s1, *s2;
2811 s1 = type1->ref;
2812 s2 = type2->ref;
2813 if (!is_compatible_types(&s1->type, &s2->type))
2814 return 0;
2815 /* check func_call */
2816 if (s1->f.func_call != s2->f.func_call)
2817 return 0;
2818 /* XXX: not complete */
2819 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2820 return 1;
2821 if (s1->f.func_type != s2->f.func_type)
2822 return 0;
2823 while (s1 != NULL) {
2824 if (s2 == NULL)
2825 return 0;
2826 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2827 return 0;
2828 s1 = s1->next;
2829 s2 = s2->next;
2831 if (s2)
2832 return 0;
2833 return 1;
2836 /* return true if type1 and type2 are the same. If unqualified is
2837 true, qualifiers on the types are ignored.
2839 static int compare_types(CType *type1, CType *type2, int unqualified)
2841 int bt1, t1, t2;
2843 t1 = type1->t & VT_TYPE;
2844 t2 = type2->t & VT_TYPE;
2845 if (unqualified) {
2846 /* strip qualifiers before comparing */
2847 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2848 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2851 /* Default Vs explicit signedness only matters for char */
2852 if ((t1 & VT_BTYPE) != VT_BYTE) {
2853 t1 &= ~VT_DEFSIGN;
2854 t2 &= ~VT_DEFSIGN;
2856 /* XXX: bitfields ? */
2857 if (t1 != t2)
2858 return 0;
2859 /* test more complicated cases */
2860 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2861 if (bt1 == VT_PTR) {
2862 type1 = pointed_type(type1);
2863 type2 = pointed_type(type2);
2864 return is_compatible_types(type1, type2);
2865 } else if (bt1 & VT_ARRAY) {
2866 return type1->ref->c < 0 || type2->ref->c < 0
2867 || type1->ref->c == type2->ref->c;
2868 } else if (bt1 == VT_STRUCT) {
2869 return (type1->ref == type2->ref);
2870 } else if (bt1 == VT_FUNC) {
2871 return is_compatible_func(type1, type2);
2872 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2873 return type1->ref == type2->ref;
2874 } else {
2875 return 1;
2879 /* return true if type1 and type2 are exactly the same (including
2880 qualifiers).
2882 static int is_compatible_types(CType *type1, CType *type2)
2884 return compare_types(type1,type2,0);
2887 /* return true if type1 and type2 are the same (ignoring qualifiers).
2889 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2891 return compare_types(type1,type2,1);
2894 /* print a type. If 'varstr' is not NULL, then the variable is also
2895 printed in the type */
2896 /* XXX: union */
2897 /* XXX: add array and function pointers */
2898 static void type_to_str(char *buf, int buf_size,
2899 CType *type, const char *varstr)
2901 int bt, v, t;
2902 Sym *s, *sa;
2903 char buf1[256];
2904 const char *tstr;
2906 t = type->t;
2907 bt = t & VT_BTYPE;
2908 buf[0] = '\0';
2910 if (t & VT_EXTERN)
2911 pstrcat(buf, buf_size, "extern ");
2912 if (t & VT_STATIC)
2913 pstrcat(buf, buf_size, "static ");
2914 if (t & VT_TYPEDEF)
2915 pstrcat(buf, buf_size, "typedef ");
2916 if (t & VT_INLINE)
2917 pstrcat(buf, buf_size, "inline ");
2918 if (t & VT_VOLATILE)
2919 pstrcat(buf, buf_size, "volatile ");
2920 if (t & VT_CONSTANT)
2921 pstrcat(buf, buf_size, "const ");
2923 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2924 || ((t & VT_UNSIGNED)
2925 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2926 && !IS_ENUM(t)
2928 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2930 buf_size -= strlen(buf);
2931 buf += strlen(buf);
2933 switch(bt) {
2934 case VT_VOID:
2935 tstr = "void";
2936 goto add_tstr;
2937 case VT_BOOL:
2938 tstr = "_Bool";
2939 goto add_tstr;
2940 case VT_BYTE:
2941 tstr = "char";
2942 goto add_tstr;
2943 case VT_SHORT:
2944 tstr = "short";
2945 goto add_tstr;
2946 case VT_INT:
2947 tstr = "int";
2948 goto maybe_long;
2949 case VT_LLONG:
2950 tstr = "long long";
2951 maybe_long:
2952 if (t & VT_LONG)
2953 tstr = "long";
2954 if (!IS_ENUM(t))
2955 goto add_tstr;
2956 tstr = "enum ";
2957 goto tstruct;
2958 case VT_FLOAT:
2959 tstr = "float";
2960 goto add_tstr;
2961 case VT_DOUBLE:
2962 tstr = "double";
2963 goto add_tstr;
2964 case VT_LDOUBLE:
2965 tstr = "long double";
2966 add_tstr:
2967 pstrcat(buf, buf_size, tstr);
2968 break;
2969 case VT_STRUCT:
2970 tstr = "struct ";
2971 if (IS_UNION(t))
2972 tstr = "union ";
2973 tstruct:
2974 pstrcat(buf, buf_size, tstr);
2975 v = type->ref->v & ~SYM_STRUCT;
2976 if (v >= SYM_FIRST_ANOM)
2977 pstrcat(buf, buf_size, "<anonymous>");
2978 else
2979 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2980 break;
2981 case VT_FUNC:
2982 s = type->ref;
2983 buf1[0]=0;
2984 if (varstr && '*' == *varstr) {
2985 pstrcat(buf1, sizeof(buf1), "(");
2986 pstrcat(buf1, sizeof(buf1), varstr);
2987 pstrcat(buf1, sizeof(buf1), ")");
2989 pstrcat(buf1, buf_size, "(");
2990 sa = s->next;
2991 while (sa != NULL) {
2992 char buf2[256];
2993 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2994 pstrcat(buf1, sizeof(buf1), buf2);
2995 sa = sa->next;
2996 if (sa)
2997 pstrcat(buf1, sizeof(buf1), ", ");
2999 if (s->f.func_type == FUNC_ELLIPSIS)
3000 pstrcat(buf1, sizeof(buf1), ", ...");
3001 pstrcat(buf1, sizeof(buf1), ")");
3002 type_to_str(buf, buf_size, &s->type, buf1);
3003 goto no_var;
3004 case VT_PTR:
3005 s = type->ref;
3006 if (t & VT_ARRAY) {
3007 if (varstr && '*' == *varstr)
3008 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3009 else
3010 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3011 type_to_str(buf, buf_size, &s->type, buf1);
3012 goto no_var;
3014 pstrcpy(buf1, sizeof(buf1), "*");
3015 if (t & VT_CONSTANT)
3016 pstrcat(buf1, buf_size, "const ");
3017 if (t & VT_VOLATILE)
3018 pstrcat(buf1, buf_size, "volatile ");
3019 if (varstr)
3020 pstrcat(buf1, sizeof(buf1), varstr);
3021 type_to_str(buf, buf_size, &s->type, buf1);
3022 goto no_var;
3024 if (varstr) {
3025 pstrcat(buf, buf_size, " ");
3026 pstrcat(buf, buf_size, varstr);
3028 no_var: ;
3031 /* verify type compatibility to store vtop in 'dt' type, and generate
3032 casts if needed. */
3033 static void gen_assign_cast(CType *dt)
3035 CType *st, *type1, *type2;
3036 char buf1[256], buf2[256];
3037 int dbt, sbt, qualwarn, lvl;
3039 st = &vtop->type; /* source type */
3040 dbt = dt->t & VT_BTYPE;
3041 sbt = st->t & VT_BTYPE;
3042 if (sbt == VT_VOID || dbt == VT_VOID) {
3043 if (sbt == VT_VOID && dbt == VT_VOID)
3044 ; /* It is Ok if both are void */
3045 else
3046 tcc_error("cannot cast from/to void");
3048 if (dt->t & VT_CONSTANT)
3049 tcc_warning("assignment of read-only location");
3050 switch(dbt) {
3051 case VT_PTR:
3052 /* special cases for pointers */
3053 /* '0' can also be a pointer */
3054 if (is_null_pointer(vtop))
3055 break;
3056 /* accept implicit pointer to integer cast with warning */
3057 if (is_integer_btype(sbt)) {
3058 tcc_warning("assignment makes pointer from integer without a cast");
3059 break;
3061 type1 = pointed_type(dt);
3062 if (sbt == VT_PTR)
3063 type2 = pointed_type(st);
3064 else if (sbt == VT_FUNC)
3065 type2 = st; /* a function is implicitly a function pointer */
3066 else
3067 goto error;
3068 if (is_compatible_types(type1, type2))
3069 break;
3070 for (qualwarn = lvl = 0;; ++lvl) {
3071 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3072 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3073 qualwarn = 1;
3074 dbt = type1->t & (VT_BTYPE|VT_LONG);
3075 sbt = type2->t & (VT_BTYPE|VT_LONG);
3076 if (dbt != VT_PTR || sbt != VT_PTR)
3077 break;
3078 type1 = pointed_type(type1);
3079 type2 = pointed_type(type2);
3081 if (!is_compatible_unqualified_types(type1, type2)) {
3082 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3083 /* void * can match anything */
3084 } else if (dbt == sbt
3085 && is_integer_btype(sbt & VT_BTYPE)
3086 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3087 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3088 /* Like GCC don't warn by default for merely changes
3089 in pointer target signedness. Do warn for different
3090 base types, though, in particular for unsigned enums
3091 and signed int targets. */
3092 } else {
3093 tcc_warning("assignment from incompatible pointer type");
3094 break;
3097 if (qualwarn)
3098 tcc_warning("assignment discards qualifiers from pointer target type");
3099 break;
3100 case VT_BYTE:
3101 case VT_SHORT:
3102 case VT_INT:
3103 case VT_LLONG:
3104 if (sbt == VT_PTR || sbt == VT_FUNC) {
3105 tcc_warning("assignment makes integer from pointer without a cast");
3106 } else if (sbt == VT_STRUCT) {
3107 goto case_VT_STRUCT;
3109 /* XXX: more tests */
3110 break;
3111 case VT_STRUCT:
3112 case_VT_STRUCT:
3113 if (!is_compatible_unqualified_types(dt, st)) {
3114 error:
3115 type_to_str(buf1, sizeof(buf1), st, NULL);
3116 type_to_str(buf2, sizeof(buf2), dt, NULL);
3117 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3119 break;
3121 gen_cast(dt);
3124 /* store vtop in lvalue pushed on stack */
3125 ST_FUNC void vstore(void)
3127 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3129 ft = vtop[-1].type.t;
3130 sbt = vtop->type.t & VT_BTYPE;
3131 dbt = ft & VT_BTYPE;
3132 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3133 (sbt == VT_INT && dbt == VT_SHORT))
3134 && !(vtop->type.t & VT_BITFIELD)) {
3135 /* optimize char/short casts */
3136 delayed_cast = VT_MUSTCAST;
3137 vtop->type.t = ft & VT_TYPE;
3138 /* XXX: factorize */
3139 if (ft & VT_CONSTANT)
3140 tcc_warning("assignment of read-only location");
3141 } else {
3142 delayed_cast = 0;
3143 if (!(ft & VT_BITFIELD))
3144 gen_assign_cast(&vtop[-1].type);
3147 if (sbt == VT_STRUCT) {
3148 /* if structure, only generate pointer */
3149 /* structure assignment : generate memcpy */
3150 /* XXX: optimize if small size */
3151 size = type_size(&vtop->type, &align);
3153 /* destination */
3154 vswap();
3155 vtop->type.t = VT_PTR;
3156 gaddrof();
3158 /* address of memcpy() */
3159 #ifdef TCC_ARM_EABI
3160 if(!(align & 7))
3161 vpush_global_sym(&func_old_type, TOK_memcpy8);
3162 else if(!(align & 3))
3163 vpush_global_sym(&func_old_type, TOK_memcpy4);
3164 else
3165 #endif
3166 /* Use memmove, rather than memcpy, as dest and src may be same: */
3167 vpush_global_sym(&func_old_type, TOK_memmove);
3169 vswap();
3170 /* source */
3171 vpushv(vtop - 2);
3172 vtop->type.t = VT_PTR;
3173 gaddrof();
3174 /* type size */
3175 vpushi(size);
3176 gfunc_call(3);
3178 /* leave source on stack */
3179 } else if (ft & VT_BITFIELD) {
3180 /* bitfield store handling */
3182 /* save lvalue as expression result (example: s.b = s.a = n;) */
3183 vdup(), vtop[-1] = vtop[-2];
3185 bit_pos = BIT_POS(ft);
3186 bit_size = BIT_SIZE(ft);
3187 /* remove bit field info to avoid loops */
3188 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3190 if ((ft & VT_BTYPE) == VT_BOOL) {
3191 gen_cast(&vtop[-1].type);
3192 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3195 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3196 if (r == VT_STRUCT) {
3197 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3198 store_packed_bf(bit_pos, bit_size);
3199 } else {
3200 unsigned long long mask = (1ULL << bit_size) - 1;
3201 if ((ft & VT_BTYPE) != VT_BOOL) {
3202 /* mask source */
3203 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3204 vpushll(mask);
3205 else
3206 vpushi((unsigned)mask);
3207 gen_op('&');
3209 /* shift source */
3210 vpushi(bit_pos);
3211 gen_op(TOK_SHL);
3212 vswap();
3213 /* duplicate destination */
3214 vdup();
3215 vrott(3);
3216 /* load destination, mask and or with source */
3217 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3218 vpushll(~(mask << bit_pos));
3219 else
3220 vpushi(~((unsigned)mask << bit_pos));
3221 gen_op('&');
3222 gen_op('|');
3223 /* store result */
3224 vstore();
3225 /* ... and discard */
3226 vpop();
3228 } else if (dbt == VT_VOID) {
3229 --vtop;
3230 } else {
3231 #ifdef CONFIG_TCC_BCHECK
3232 /* bound check case */
3233 if (vtop[-1].r & VT_MUSTBOUND) {
3234 vswap();
3235 gbound();
3236 vswap();
3238 #endif
3239 rc = RC_INT;
3240 if (is_float(ft)) {
3241 rc = RC_FLOAT;
3242 #ifdef TCC_TARGET_X86_64
3243 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3244 rc = RC_ST0;
3245 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3246 rc = RC_FRET;
3248 #endif
3250 r = gv(rc); /* generate value */
3251 /* if lvalue was saved on stack, must read it */
3252 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3253 SValue sv;
3254 t = get_reg(RC_INT);
3255 #if PTR_SIZE == 8
3256 sv.type.t = VT_PTR;
3257 #else
3258 sv.type.t = VT_INT;
3259 #endif
3260 sv.r = VT_LOCAL | VT_LVAL;
3261 sv.c.i = vtop[-1].c.i;
3262 load(t, &sv);
3263 vtop[-1].r = t | VT_LVAL;
3265 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3266 #if PTR_SIZE == 8
3267 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3268 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3269 #else
3270 if ((ft & VT_BTYPE) == VT_LLONG) {
3271 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3272 #endif
3273 vtop[-1].type.t = load_type;
3274 store(r, vtop - 1);
3275 vswap();
3276 /* convert to int to increment easily */
3277 vtop->type.t = addr_type;
3278 gaddrof();
3279 vpushi(load_size);
3280 gen_op('+');
3281 vtop->r |= VT_LVAL;
3282 vswap();
3283 vtop[-1].type.t = load_type;
3284 /* XXX: it works because r2 is spilled last ! */
3285 store(vtop->r2, vtop - 1);
3286 } else {
3287 store(r, vtop - 1);
3290 vswap();
3291 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3292 vtop->r |= delayed_cast;
3296 /* post defines POST/PRE add. c is the token ++ or -- */
3297 ST_FUNC void inc(int post, int c)
3299 test_lvalue();
3300 vdup(); /* save lvalue */
3301 if (post) {
3302 gv_dup(); /* duplicate value */
3303 vrotb(3);
3304 vrotb(3);
3306 /* add constant */
3307 vpushi(c - TOK_MID);
3308 gen_op('+');
3309 vstore(); /* store value */
3310 if (post)
3311 vpop(); /* if post op, return saved value */
3314 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3316 /* read the string */
3317 if (tok != TOK_STR)
3318 expect(msg);
3319 cstr_new(astr);
3320 while (tok == TOK_STR) {
3321 /* XXX: add \0 handling too ? */
3322 cstr_cat(astr, tokc.str.data, -1);
3323 next();
3325 cstr_ccat(astr, '\0');
3328 /* If I is >= 1 and a power of two, returns log2(i)+1.
3329 If I is 0 returns 0. */
3330 static int exact_log2p1(int i)
3332 int ret;
3333 if (!i)
3334 return 0;
3335 for (ret = 1; i >= 1 << 8; ret += 8)
3336 i >>= 8;
3337 if (i >= 1 << 4)
3338 ret += 4, i >>= 4;
3339 if (i >= 1 << 2)
3340 ret += 2, i >>= 2;
3341 if (i >= 1 << 1)
3342 ret++;
3343 return ret;
3346 /* Parse __attribute__((...)) GNUC extension. */
3347 static void parse_attribute(AttributeDef *ad)
3349 int t, n;
3350 CString astr;
3352 redo:
3353 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3354 return;
3355 next();
3356 skip('(');
3357 skip('(');
3358 while (tok != ')') {
3359 if (tok < TOK_IDENT)
3360 expect("attribute name");
3361 t = tok;
3362 next();
3363 switch(t) {
3364 case TOK_SECTION1:
3365 case TOK_SECTION2:
3366 skip('(');
3367 parse_mult_str(&astr, "section name");
3368 ad->section = find_section(tcc_state, (char *)astr.data);
3369 skip(')');
3370 cstr_free(&astr);
3371 break;
3372 case TOK_ALIAS1:
3373 case TOK_ALIAS2:
3374 skip('(');
3375 parse_mult_str(&astr, "alias(\"target\")");
3376 ad->alias_target = /* save string as token, for later */
3377 tok_alloc((char*)astr.data, astr.size-1)->tok;
3378 skip(')');
3379 cstr_free(&astr);
3380 break;
3381 case TOK_VISIBILITY1:
3382 case TOK_VISIBILITY2:
3383 skip('(');
3384 parse_mult_str(&astr,
3385 "visibility(\"default|hidden|internal|protected\")");
3386 if (!strcmp (astr.data, "default"))
3387 ad->a.visibility = STV_DEFAULT;
3388 else if (!strcmp (astr.data, "hidden"))
3389 ad->a.visibility = STV_HIDDEN;
3390 else if (!strcmp (astr.data, "internal"))
3391 ad->a.visibility = STV_INTERNAL;
3392 else if (!strcmp (astr.data, "protected"))
3393 ad->a.visibility = STV_PROTECTED;
3394 else
3395 expect("visibility(\"default|hidden|internal|protected\")");
3396 skip(')');
3397 cstr_free(&astr);
3398 break;
3399 case TOK_ALIGNED1:
3400 case TOK_ALIGNED2:
3401 if (tok == '(') {
3402 next();
3403 n = expr_const();
3404 if (n <= 0 || (n & (n - 1)) != 0)
3405 tcc_error("alignment must be a positive power of two");
3406 skip(')');
3407 } else {
3408 n = MAX_ALIGN;
3410 ad->a.aligned = exact_log2p1(n);
3411 if (n != 1 << (ad->a.aligned - 1))
3412 tcc_error("alignment of %d is larger than implemented", n);
3413 break;
3414 case TOK_PACKED1:
3415 case TOK_PACKED2:
3416 ad->a.packed = 1;
3417 break;
3418 case TOK_WEAK1:
3419 case TOK_WEAK2:
3420 ad->a.weak = 1;
3421 break;
3422 case TOK_UNUSED1:
3423 case TOK_UNUSED2:
3424 /* currently, no need to handle it because tcc does not
3425 track unused objects */
3426 break;
3427 case TOK_NORETURN1:
3428 case TOK_NORETURN2:
3429 /* currently, no need to handle it because tcc does not
3430 track unused objects */
3431 break;
3432 case TOK_CDECL1:
3433 case TOK_CDECL2:
3434 case TOK_CDECL3:
3435 ad->f.func_call = FUNC_CDECL;
3436 break;
3437 case TOK_STDCALL1:
3438 case TOK_STDCALL2:
3439 case TOK_STDCALL3:
3440 ad->f.func_call = FUNC_STDCALL;
3441 break;
3442 #ifdef TCC_TARGET_I386
3443 case TOK_REGPARM1:
3444 case TOK_REGPARM2:
3445 skip('(');
3446 n = expr_const();
3447 if (n > 3)
3448 n = 3;
3449 else if (n < 0)
3450 n = 0;
3451 if (n > 0)
3452 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3453 skip(')');
3454 break;
3455 case TOK_FASTCALL1:
3456 case TOK_FASTCALL2:
3457 case TOK_FASTCALL3:
3458 ad->f.func_call = FUNC_FASTCALLW;
3459 break;
3460 #endif
3461 case TOK_MODE:
3462 skip('(');
3463 switch(tok) {
3464 case TOK_MODE_DI:
3465 ad->attr_mode = VT_LLONG + 1;
3466 break;
3467 case TOK_MODE_QI:
3468 ad->attr_mode = VT_BYTE + 1;
3469 break;
3470 case TOK_MODE_HI:
3471 ad->attr_mode = VT_SHORT + 1;
3472 break;
3473 case TOK_MODE_SI:
3474 case TOK_MODE_word:
3475 ad->attr_mode = VT_INT + 1;
3476 break;
3477 default:
3478 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3479 break;
3481 next();
3482 skip(')');
3483 break;
3484 case TOK_DLLEXPORT:
3485 ad->a.dllexport = 1;
3486 break;
3487 case TOK_NODECORATE:
3488 ad->a.nodecorate = 1;
3489 break;
3490 case TOK_DLLIMPORT:
3491 ad->a.dllimport = 1;
3492 break;
3493 default:
3494 if (tcc_state->warn_unsupported)
3495 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3496 /* skip parameters */
3497 if (tok == '(') {
3498 int parenthesis = 0;
3499 do {
3500 if (tok == '(')
3501 parenthesis++;
3502 else if (tok == ')')
3503 parenthesis--;
3504 next();
3505 } while (parenthesis && tok != -1);
3507 break;
3509 if (tok != ',')
3510 break;
3511 next();
3513 skip(')');
3514 skip(')');
3515 goto redo;
3518 static Sym * find_field (CType *type, int v)
3520 Sym *s = type->ref;
3521 v |= SYM_FIELD;
3522 while ((s = s->next) != NULL) {
3523 if ((s->v & SYM_FIELD) &&
3524 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3525 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3526 Sym *ret = find_field (&s->type, v);
3527 if (ret)
3528 return ret;
3530 if (s->v == v)
3531 break;
3533 return s;
3536 static void struct_add_offset (Sym *s, int offset)
3538 while ((s = s->next) != NULL) {
3539 if ((s->v & SYM_FIELD) &&
3540 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3541 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3542 struct_add_offset(s->type.ref, offset);
3543 } else
3544 s->c += offset;
3548 static void struct_layout(CType *type, AttributeDef *ad)
3550 int size, align, maxalign, offset, c, bit_pos, bit_size;
3551 int packed, a, bt, prevbt, prev_bit_size;
3552 int pcc = !tcc_state->ms_bitfields;
3553 int pragma_pack = *tcc_state->pack_stack_ptr;
3554 Sym *f;
3556 maxalign = 1;
3557 offset = 0;
3558 c = 0;
3559 bit_pos = 0;
3560 prevbt = VT_STRUCT; /* make it never match */
3561 prev_bit_size = 0;
3563 //#define BF_DEBUG
3565 for (f = type->ref->next; f; f = f->next) {
3566 if (f->type.t & VT_BITFIELD)
3567 bit_size = BIT_SIZE(f->type.t);
3568 else
3569 bit_size = -1;
3570 size = type_size(&f->type, &align);
3571 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3572 packed = 0;
3574 if (pcc && bit_size == 0) {
3575 /* in pcc mode, packing does not affect zero-width bitfields */
3577 } else {
3578 /* in pcc mode, attribute packed overrides if set. */
3579 if (pcc && (f->a.packed || ad->a.packed))
3580 align = packed = 1;
3582 /* pragma pack overrides align if lesser and packs bitfields always */
3583 if (pragma_pack) {
3584 packed = 1;
3585 if (pragma_pack < align)
3586 align = pragma_pack;
3587 /* in pcc mode pragma pack also overrides individual align */
3588 if (pcc && pragma_pack < a)
3589 a = 0;
3592 /* some individual align was specified */
3593 if (a)
3594 align = a;
3596 if (type->ref->type.t == VT_UNION) {
3597 if (pcc && bit_size >= 0)
3598 size = (bit_size + 7) >> 3;
3599 offset = 0;
3600 if (size > c)
3601 c = size;
3603 } else if (bit_size < 0) {
3604 if (pcc)
3605 c += (bit_pos + 7) >> 3;
3606 c = (c + align - 1) & -align;
3607 offset = c;
3608 if (size > 0)
3609 c += size;
3610 bit_pos = 0;
3611 prevbt = VT_STRUCT;
3612 prev_bit_size = 0;
3614 } else {
3615 /* A bit-field. Layout is more complicated. There are two
3616 options: PCC (GCC) compatible and MS compatible */
3617 if (pcc) {
3618 /* In PCC layout a bit-field is placed adjacent to the
3619 preceding bit-fields, except if:
3620 - it has zero-width
3621 - an individual alignment was given
3622 - it would overflow its base type container and
3623 there is no packing */
3624 if (bit_size == 0) {
3625 new_field:
3626 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3627 bit_pos = 0;
3628 } else if (f->a.aligned) {
3629 goto new_field;
3630 } else if (!packed) {
3631 int a8 = align * 8;
3632 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3633 if (ofs > size / align)
3634 goto new_field;
3637 /* in pcc mode, long long bitfields have type int if they fit */
3638 if (size == 8 && bit_size <= 32)
3639 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3641 while (bit_pos >= align * 8)
3642 c += align, bit_pos -= align * 8;
3643 offset = c;
3645 /* In PCC layout named bit-fields influence the alignment
3646 of the containing struct using the base types alignment,
3647 except for packed fields (which here have correct align). */
3648 if (f->v & SYM_FIRST_ANOM
3649 // && bit_size // ??? gcc on ARM/rpi does that
3651 align = 1;
3653 } else {
3654 bt = f->type.t & VT_BTYPE;
3655 if ((bit_pos + bit_size > size * 8)
3656 || (bit_size > 0) == (bt != prevbt)
3658 c = (c + align - 1) & -align;
3659 offset = c;
3660 bit_pos = 0;
3661 /* In MS bitfield mode a bit-field run always uses
3662 at least as many bits as the underlying type.
3663 To start a new run it's also required that this
3664 or the last bit-field had non-zero width. */
3665 if (bit_size || prev_bit_size)
3666 c += size;
3668 /* In MS layout the records alignment is normally
3669 influenced by the field, except for a zero-width
3670 field at the start of a run (but by further zero-width
3671 fields it is again). */
3672 if (bit_size == 0 && prevbt != bt)
3673 align = 1;
3674 prevbt = bt;
3675 prev_bit_size = bit_size;
3678 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3679 | (bit_pos << VT_STRUCT_SHIFT);
3680 bit_pos += bit_size;
3682 if (align > maxalign)
3683 maxalign = align;
3685 #ifdef BF_DEBUG
3686 printf("set field %s offset %-2d size %-2d align %-2d",
3687 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3688 if (f->type.t & VT_BITFIELD) {
3689 printf(" pos %-2d bits %-2d",
3690 BIT_POS(f->type.t),
3691 BIT_SIZE(f->type.t)
3694 printf("\n");
3695 #endif
3697 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3698 Sym *ass;
3699 /* An anonymous struct/union. Adjust member offsets
3700 to reflect the real offset of our containing struct.
3701 Also set the offset of this anon member inside
3702 the outer struct to be zero. Via this it
3703 works when accessing the field offset directly
3704 (from base object), as well as when recursing
3705 members in initializer handling. */
3706 int v2 = f->type.ref->v;
3707 if (!(v2 & SYM_FIELD) &&
3708 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3709 Sym **pps;
3710 /* This happens only with MS extensions. The
3711 anon member has a named struct type, so it
3712 potentially is shared with other references.
3713 We need to unshare members so we can modify
3714 them. */
3715 ass = f->type.ref;
3716 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3717 &f->type.ref->type, 0,
3718 f->type.ref->c);
3719 pps = &f->type.ref->next;
3720 while ((ass = ass->next) != NULL) {
3721 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3722 pps = &((*pps)->next);
3724 *pps = NULL;
3726 struct_add_offset(f->type.ref, offset);
3727 f->c = 0;
3728 } else {
3729 f->c = offset;
3732 f->r = 0;
3735 if (pcc)
3736 c += (bit_pos + 7) >> 3;
3738 /* store size and alignment */
3739 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3740 if (a < maxalign)
3741 a = maxalign;
3742 type->ref->r = a;
3743 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3744 /* can happen if individual align for some member was given. In
3745 this case MSVC ignores maxalign when aligning the size */
3746 a = pragma_pack;
3747 if (a < bt)
3748 a = bt;
3750 c = (c + a - 1) & -a;
3751 type->ref->c = c;
3753 #ifdef BF_DEBUG
3754 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3755 #endif
3757 /* check whether we can access bitfields by their type */
3758 for (f = type->ref->next; f; f = f->next) {
3759 int s, px, cx, c0;
3760 CType t;
3762 if (0 == (f->type.t & VT_BITFIELD))
3763 continue;
3764 f->type.ref = f;
3765 f->auxtype = -1;
3766 bit_size = BIT_SIZE(f->type.t);
3767 if (bit_size == 0)
3768 continue;
3769 bit_pos = BIT_POS(f->type.t);
3770 size = type_size(&f->type, &align);
3771 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3772 continue;
3774 /* try to access the field using a different type */
3775 c0 = -1, s = align = 1;
3776 for (;;) {
3777 px = f->c * 8 + bit_pos;
3778 cx = (px >> 3) & -align;
3779 px = px - (cx << 3);
3780 if (c0 == cx)
3781 break;
3782 s = (px + bit_size + 7) >> 3;
3783 if (s > 4) {
3784 t.t = VT_LLONG;
3785 } else if (s > 2) {
3786 t.t = VT_INT;
3787 } else if (s > 1) {
3788 t.t = VT_SHORT;
3789 } else {
3790 t.t = VT_BYTE;
3792 s = type_size(&t, &align);
3793 c0 = cx;
3796 if (px + bit_size <= s * 8 && cx + s <= c) {
3797 /* update offset and bit position */
3798 f->c = cx;
3799 bit_pos = px;
3800 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3801 | (bit_pos << VT_STRUCT_SHIFT);
3802 if (s != size)
3803 f->auxtype = t.t;
3804 #ifdef BF_DEBUG
3805 printf("FIX field %s offset %-2d size %-2d align %-2d "
3806 "pos %-2d bits %-2d\n",
3807 get_tok_str(f->v & ~SYM_FIELD, NULL),
3808 cx, s, align, px, bit_size);
3809 #endif
3810 } else {
3811 /* fall back to load/store single-byte wise */
3812 f->auxtype = VT_STRUCT;
3813 #ifdef BF_DEBUG
3814 printf("FIX field %s : load byte-wise\n",
3815 get_tok_str(f->v & ~SYM_FIELD, NULL));
3816 #endif
3821 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3822 static void struct_decl(CType *type, int u)
3824 int v, c, size, align, flexible;
3825 int bit_size, bsize, bt;
3826 Sym *s, *ss, **ps;
3827 AttributeDef ad, ad1;
3828 CType type1, btype;
3830 memset(&ad, 0, sizeof ad);
3831 next();
3832 parse_attribute(&ad);
3833 if (tok != '{') {
3834 v = tok;
3835 next();
3836 /* struct already defined ? return it */
3837 if (v < TOK_IDENT)
3838 expect("struct/union/enum name");
3839 s = struct_find(v);
3840 if (s && (s->sym_scope == local_scope || tok != '{')) {
3841 if (u == s->type.t)
3842 goto do_decl;
3843 if (u == VT_ENUM && IS_ENUM(s->type.t))
3844 goto do_decl;
3845 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3847 } else {
3848 v = anon_sym++;
3850 /* Record the original enum/struct/union token. */
3851 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3852 type1.ref = NULL;
3853 /* we put an undefined size for struct/union */
3854 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3855 s->r = 0; /* default alignment is zero as gcc */
3856 do_decl:
3857 type->t = s->type.t;
3858 type->ref = s;
3860 if (tok == '{') {
3861 next();
3862 if (s->c != -1)
3863 tcc_error("struct/union/enum already defined");
3864 /* cannot be empty */
3865 /* non empty enums are not allowed */
3866 ps = &s->next;
3867 if (u == VT_ENUM) {
3868 long long ll = 0, pl = 0, nl = 0;
3869 CType t;
3870 t.ref = s;
3871 /* enum symbols have static storage */
3872 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3873 for(;;) {
3874 v = tok;
3875 if (v < TOK_UIDENT)
3876 expect("identifier");
3877 ss = sym_find(v);
3878 if (ss && !local_stack)
3879 tcc_error("redefinition of enumerator '%s'",
3880 get_tok_str(v, NULL));
3881 next();
3882 if (tok == '=') {
3883 next();
3884 ll = expr_const64();
3886 ss = sym_push(v, &t, VT_CONST, 0);
3887 ss->enum_val = ll;
3888 *ps = ss, ps = &ss->next;
3889 if (ll < nl)
3890 nl = ll;
3891 if (ll > pl)
3892 pl = ll;
3893 if (tok != ',')
3894 break;
3895 next();
3896 ll++;
3897 /* NOTE: we accept a trailing comma */
3898 if (tok == '}')
3899 break;
3901 skip('}');
3902 /* set integral type of the enum */
3903 t.t = VT_INT;
3904 if (nl >= 0) {
3905 if (pl != (unsigned)pl)
3906 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3907 t.t |= VT_UNSIGNED;
3908 } else if (pl != (int)pl || nl != (int)nl)
3909 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3910 s->type.t = type->t = t.t | VT_ENUM;
3911 s->c = 0;
3912 /* set type for enum members */
3913 for (ss = s->next; ss; ss = ss->next) {
3914 ll = ss->enum_val;
3915 if (ll == (int)ll) /* default is int if it fits */
3916 continue;
3917 if (t.t & VT_UNSIGNED) {
3918 ss->type.t |= VT_UNSIGNED;
3919 if (ll == (unsigned)ll)
3920 continue;
3922 ss->type.t = (ss->type.t & ~VT_BTYPE)
3923 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3925 } else {
3926 c = 0;
3927 flexible = 0;
3928 while (tok != '}') {
3929 if (!parse_btype(&btype, &ad1)) {
3930 skip(';');
3931 continue;
3933 while (1) {
3934 if (flexible)
3935 tcc_error("flexible array member '%s' not at the end of struct",
3936 get_tok_str(v, NULL));
3937 bit_size = -1;
3938 v = 0;
3939 type1 = btype;
3940 if (tok != ':') {
3941 if (tok != ';')
3942 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3943 if (v == 0) {
3944 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3945 expect("identifier");
3946 else {
3947 int v = btype.ref->v;
3948 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3949 if (tcc_state->ms_extensions == 0)
3950 expect("identifier");
3954 if (type_size(&type1, &align) < 0) {
3955 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3956 flexible = 1;
3957 else
3958 tcc_error("field '%s' has incomplete type",
3959 get_tok_str(v, NULL));
3961 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3962 (type1.t & VT_BTYPE) == VT_VOID ||
3963 (type1.t & VT_STORAGE))
3964 tcc_error("invalid type for '%s'",
3965 get_tok_str(v, NULL));
3967 if (tok == ':') {
3968 next();
3969 bit_size = expr_const();
3970 /* XXX: handle v = 0 case for messages */
3971 if (bit_size < 0)
3972 tcc_error("negative width in bit-field '%s'",
3973 get_tok_str(v, NULL));
3974 if (v && bit_size == 0)
3975 tcc_error("zero width for bit-field '%s'",
3976 get_tok_str(v, NULL));
3977 parse_attribute(&ad1);
3979 size = type_size(&type1, &align);
3980 if (bit_size >= 0) {
3981 bt = type1.t & VT_BTYPE;
3982 if (bt != VT_INT &&
3983 bt != VT_BYTE &&
3984 bt != VT_SHORT &&
3985 bt != VT_BOOL &&
3986 bt != VT_LLONG)
3987 tcc_error("bitfields must have scalar type");
3988 bsize = size * 8;
3989 if (bit_size > bsize) {
3990 tcc_error("width of '%s' exceeds its type",
3991 get_tok_str(v, NULL));
3992 } else if (bit_size == bsize
3993 && !ad.a.packed && !ad1.a.packed) {
3994 /* no need for bit fields */
3996 } else if (bit_size == 64) {
3997 tcc_error("field width 64 not implemented");
3998 } else {
3999 type1.t = (type1.t & ~VT_STRUCT_MASK)
4000 | VT_BITFIELD
4001 | (bit_size << (VT_STRUCT_SHIFT + 6));
4004 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4005 /* Remember we've seen a real field to check
4006 for placement of flexible array member. */
4007 c = 1;
4009 /* If member is a struct or bit-field, enforce
4010 placing into the struct (as anonymous). */
4011 if (v == 0 &&
4012 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4013 bit_size >= 0)) {
4014 v = anon_sym++;
4016 if (v) {
4017 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4018 ss->a = ad1.a;
4019 *ps = ss;
4020 ps = &ss->next;
4022 if (tok == ';' || tok == TOK_EOF)
4023 break;
4024 skip(',');
4026 skip(';');
4028 skip('}');
4029 parse_attribute(&ad);
4030 struct_layout(type, &ad);
4035 static void sym_to_attr(AttributeDef *ad, Sym *s)
4037 merge_symattr(&ad->a, &s->a);
4038 merge_funcattr(&ad->f, &s->f);
4041 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4042 are added to the element type, copied because it could be a typedef. */
4043 static void parse_btype_qualify(CType *type, int qualifiers)
4045 while (type->t & VT_ARRAY) {
4046 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4047 type = &type->ref->type;
4049 type->t |= qualifiers;
4052 /* return 0 if no type declaration. otherwise, return the basic type
4053 and skip it.
4055 static int parse_btype(CType *type, AttributeDef *ad)
4057 int t, u, bt, st, type_found, typespec_found, g;
4058 Sym *s;
4059 CType type1;
4061 memset(ad, 0, sizeof(AttributeDef));
4062 type_found = 0;
4063 typespec_found = 0;
4064 t = VT_INT;
4065 bt = st = -1;
4066 type->ref = NULL;
4068 while(1) {
4069 switch(tok) {
4070 case TOK_EXTENSION:
4071 /* currently, we really ignore extension */
4072 next();
4073 continue;
4075 /* basic types */
4076 case TOK_CHAR:
4077 u = VT_BYTE;
4078 basic_type:
4079 next();
4080 basic_type1:
4081 if (u == VT_SHORT || u == VT_LONG) {
4082 if (st != -1 || (bt != -1 && bt != VT_INT))
4083 tmbt: tcc_error("too many basic types");
4084 st = u;
4085 } else {
4086 if (bt != -1 || (st != -1 && u != VT_INT))
4087 goto tmbt;
4088 bt = u;
4090 if (u != VT_INT)
4091 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4092 typespec_found = 1;
4093 break;
4094 case TOK_VOID:
4095 u = VT_VOID;
4096 goto basic_type;
4097 case TOK_SHORT:
4098 u = VT_SHORT;
4099 goto basic_type;
4100 case TOK_INT:
4101 u = VT_INT;
4102 goto basic_type;
4103 case TOK_LONG:
4104 if ((t & VT_BTYPE) == VT_DOUBLE) {
4105 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4106 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4107 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4108 } else {
4109 u = VT_LONG;
4110 goto basic_type;
4112 next();
4113 break;
4114 #ifdef TCC_TARGET_ARM64
4115 case TOK_UINT128:
4116 /* GCC's __uint128_t appears in some Linux header files. Make it a
4117 synonym for long double to get the size and alignment right. */
4118 u = VT_LDOUBLE;
4119 goto basic_type;
4120 #endif
4121 case TOK_BOOL:
4122 u = VT_BOOL;
4123 goto basic_type;
4124 case TOK_FLOAT:
4125 u = VT_FLOAT;
4126 goto basic_type;
4127 case TOK_DOUBLE:
4128 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4129 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4130 } else {
4131 u = VT_DOUBLE;
4132 goto basic_type;
4134 next();
4135 break;
4136 case TOK_ENUM:
4137 struct_decl(&type1, VT_ENUM);
4138 basic_type2:
4139 u = type1.t;
4140 type->ref = type1.ref;
4141 goto basic_type1;
4142 case TOK_STRUCT:
4143 struct_decl(&type1, VT_STRUCT);
4144 goto basic_type2;
4145 case TOK_UNION:
4146 struct_decl(&type1, VT_UNION);
4147 goto basic_type2;
4149 /* type modifiers */
4150 case TOK_CONST1:
4151 case TOK_CONST2:
4152 case TOK_CONST3:
4153 type->t = t;
4154 parse_btype_qualify(type, VT_CONSTANT);
4155 t = type->t;
4156 next();
4157 break;
4158 case TOK_VOLATILE1:
4159 case TOK_VOLATILE2:
4160 case TOK_VOLATILE3:
4161 type->t = t;
4162 parse_btype_qualify(type, VT_VOLATILE);
4163 t = type->t;
4164 next();
4165 break;
4166 case TOK_SIGNED1:
4167 case TOK_SIGNED2:
4168 case TOK_SIGNED3:
4169 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4170 tcc_error("signed and unsigned modifier");
4171 t |= VT_DEFSIGN;
4172 next();
4173 typespec_found = 1;
4174 break;
4175 case TOK_REGISTER:
4176 case TOK_AUTO:
4177 case TOK_RESTRICT1:
4178 case TOK_RESTRICT2:
4179 case TOK_RESTRICT3:
4180 next();
4181 break;
4182 case TOK_UNSIGNED:
4183 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4184 tcc_error("signed and unsigned modifier");
4185 t |= VT_DEFSIGN | VT_UNSIGNED;
4186 next();
4187 typespec_found = 1;
4188 break;
4190 /* storage */
4191 case TOK_EXTERN:
4192 g = VT_EXTERN;
4193 goto storage;
4194 case TOK_STATIC:
4195 g = VT_STATIC;
4196 goto storage;
4197 case TOK_TYPEDEF:
4198 g = VT_TYPEDEF;
4199 goto storage;
4200 storage:
4201 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4202 tcc_error("multiple storage classes");
4203 t |= g;
4204 next();
4205 break;
4206 case TOK_INLINE1:
4207 case TOK_INLINE2:
4208 case TOK_INLINE3:
4209 t |= VT_INLINE;
4210 next();
4211 break;
4213 /* GNUC attribute */
4214 case TOK_ATTRIBUTE1:
4215 case TOK_ATTRIBUTE2:
4216 parse_attribute(ad);
4217 if (ad->attr_mode) {
4218 u = ad->attr_mode -1;
4219 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4221 continue;
4222 /* GNUC typeof */
4223 case TOK_TYPEOF1:
4224 case TOK_TYPEOF2:
4225 case TOK_TYPEOF3:
4226 next();
4227 parse_expr_type(&type1);
4228 /* remove all storage modifiers except typedef */
4229 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4230 if (type1.ref)
4231 sym_to_attr(ad, type1.ref);
4232 goto basic_type2;
4233 default:
4234 if (typespec_found)
4235 goto the_end;
4236 s = sym_find(tok);
4237 if (!s || !(s->type.t & VT_TYPEDEF))
4238 goto the_end;
4239 t &= ~(VT_BTYPE|VT_LONG);
4240 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4241 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4242 type->ref = s->type.ref;
4243 if (t)
4244 parse_btype_qualify(type, t);
4245 t = type->t;
4246 /* get attributes from typedef */
4247 sym_to_attr(ad, s);
4248 next();
4249 typespec_found = 1;
4250 st = bt = -2;
4251 break;
4253 type_found = 1;
4255 the_end:
4256 if (tcc_state->char_is_unsigned) {
4257 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4258 t |= VT_UNSIGNED;
4260 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4261 bt = t & (VT_BTYPE|VT_LONG);
4262 if (bt == VT_LONG)
4263 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4264 #ifdef TCC_TARGET_PE
4265 if (bt == VT_LDOUBLE)
4266 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4267 #endif
4268 type->t = t;
4269 return type_found;
4272 /* convert a function parameter type (array to pointer and function to
4273 function pointer) */
4274 static inline void convert_parameter_type(CType *pt)
4276 /* remove const and volatile qualifiers (XXX: const could be used
4277 to indicate a const function parameter */
4278 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4279 /* array must be transformed to pointer according to ANSI C */
4280 pt->t &= ~VT_ARRAY;
4281 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4282 mk_pointer(pt);
4286 ST_FUNC void parse_asm_str(CString *astr)
4288 skip('(');
4289 parse_mult_str(astr, "string constant");
4292 /* Parse an asm label and return the token */
4293 static int asm_label_instr(void)
4295 int v;
4296 CString astr;
4298 next();
4299 parse_asm_str(&astr);
4300 skip(')');
4301 #ifdef ASM_DEBUG
4302 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4303 #endif
4304 v = tok_alloc(astr.data, astr.size - 1)->tok;
4305 cstr_free(&astr);
4306 return v;
4309 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4311 int n, l, t1, arg_size, align;
4312 Sym **plast, *s, *first;
4313 AttributeDef ad1;
4314 CType pt;
4316 if (tok == '(') {
4317 /* function type, or recursive declarator (return if so) */
4318 next();
4319 if (td && !(td & TYPE_ABSTRACT))
4320 return 0;
4321 if (tok == ')')
4322 l = 0;
4323 else if (parse_btype(&pt, &ad1))
4324 l = FUNC_NEW;
4325 else if (td) {
4326 merge_attr (ad, &ad1);
4327 return 0;
4328 } else
4329 l = FUNC_OLD;
4330 first = NULL;
4331 plast = &first;
4332 arg_size = 0;
4333 if (l) {
4334 for(;;) {
4335 /* read param name and compute offset */
4336 if (l != FUNC_OLD) {
4337 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4338 break;
4339 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4340 if ((pt.t & VT_BTYPE) == VT_VOID)
4341 tcc_error("parameter declared as void");
4342 } else {
4343 n = tok;
4344 if (n < TOK_UIDENT)
4345 expect("identifier");
4346 pt.t = VT_VOID; /* invalid type */
4347 next();
4349 convert_parameter_type(&pt);
4350 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4351 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4352 *plast = s;
4353 plast = &s->next;
4354 if (tok == ')')
4355 break;
4356 skip(',');
4357 if (l == FUNC_NEW && tok == TOK_DOTS) {
4358 l = FUNC_ELLIPSIS;
4359 next();
4360 break;
4362 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4363 tcc_error("invalid type");
4365 } else
4366 /* if no parameters, then old type prototype */
4367 l = FUNC_OLD;
4368 skip(')');
4369 /* NOTE: const is ignored in returned type as it has a special
4370 meaning in gcc / C++ */
4371 type->t &= ~VT_CONSTANT;
4372 /* some ancient pre-K&R C allows a function to return an array
4373 and the array brackets to be put after the arguments, such
4374 that "int c()[]" means something like "int[] c()" */
4375 if (tok == '[') {
4376 next();
4377 skip(']'); /* only handle simple "[]" */
4378 mk_pointer(type);
4380 /* we push a anonymous symbol which will contain the function prototype */
4381 ad->f.func_args = arg_size;
4382 ad->f.func_type = l;
4383 s = sym_push(SYM_FIELD, type, 0, 0);
4384 s->a = ad->a;
4385 s->f = ad->f;
4386 s->next = first;
4387 type->t = VT_FUNC;
4388 type->ref = s;
4389 } else if (tok == '[') {
4390 int saved_nocode_wanted = nocode_wanted;
4391 /* array definition */
4392 next();
4393 while (1) {
4394 /* XXX The optional type-quals and static should only be accepted
4395 in parameter decls. The '*' as well, and then even only
4396 in prototypes (not function defs). */
4397 switch (tok) {
4398 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4399 case TOK_CONST1:
4400 case TOK_VOLATILE1:
4401 case TOK_STATIC:
4402 case '*':
4403 next();
4404 continue;
4405 default:
4406 break;
4408 break;
4410 n = -1;
4411 t1 = 0;
4412 if (tok != ']') {
4413 if (!local_stack || (storage & VT_STATIC))
4414 vpushi(expr_const());
4415 else {
4416 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4417 length must always be evaluated, even under nocode_wanted,
4418 so that its size slot is initialized (e.g. under sizeof
4419 or typeof). */
4420 nocode_wanted = 0;
4421 gexpr();
4423 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4424 n = vtop->c.i;
4425 if (n < 0)
4426 tcc_error("invalid array size");
4427 } else {
4428 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4429 tcc_error("size of variable length array should be an integer");
4430 t1 = VT_VLA;
4433 skip(']');
4434 /* parse next post type */
4435 post_type(type, ad, storage, 0);
4436 if (type->t == VT_FUNC)
4437 tcc_error("declaration of an array of functions");
4438 t1 |= type->t & VT_VLA;
4440 if (t1 & VT_VLA) {
4441 loc -= type_size(&int_type, &align);
4442 loc &= -align;
4443 n = loc;
4445 vla_runtime_type_size(type, &align);
4446 gen_op('*');
4447 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4448 vswap();
4449 vstore();
4451 if (n != -1)
4452 vpop();
4453 nocode_wanted = saved_nocode_wanted;
4455 /* we push an anonymous symbol which will contain the array
4456 element type */
4457 s = sym_push(SYM_FIELD, type, 0, n);
4458 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4459 type->ref = s;
4461 return 1;
4464 /* Parse a type declarator (except basic type), and return the type
4465 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4466 expected. 'type' should contain the basic type. 'ad' is the
4467 attribute definition of the basic type. It can be modified by
4468 type_decl(). If this (possibly abstract) declarator is a pointer chain
4469 it returns the innermost pointed to type (equals *type, but is a different
4470 pointer), otherwise returns type itself, that's used for recursive calls. */
4471 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4473 CType *post, *ret;
4474 int qualifiers, storage;
4476 /* recursive type, remove storage bits first, apply them later again */
4477 storage = type->t & VT_STORAGE;
4478 type->t &= ~VT_STORAGE;
4479 post = ret = type;
4481 while (tok == '*') {
4482 qualifiers = 0;
4483 redo:
4484 next();
4485 switch(tok) {
4486 case TOK_CONST1:
4487 case TOK_CONST2:
4488 case TOK_CONST3:
4489 qualifiers |= VT_CONSTANT;
4490 goto redo;
4491 case TOK_VOLATILE1:
4492 case TOK_VOLATILE2:
4493 case TOK_VOLATILE3:
4494 qualifiers |= VT_VOLATILE;
4495 goto redo;
4496 case TOK_RESTRICT1:
4497 case TOK_RESTRICT2:
4498 case TOK_RESTRICT3:
4499 goto redo;
4500 /* XXX: clarify attribute handling */
4501 case TOK_ATTRIBUTE1:
4502 case TOK_ATTRIBUTE2:
4503 parse_attribute(ad);
4504 break;
4506 mk_pointer(type);
4507 type->t |= qualifiers;
4508 if (ret == type)
4509 /* innermost pointed to type is the one for the first derivation */
4510 ret = pointed_type(type);
4513 if (tok == '(') {
4514 /* This is possibly a parameter type list for abstract declarators
4515 ('int ()'), use post_type for testing this. */
4516 if (!post_type(type, ad, 0, td)) {
4517 /* It's not, so it's a nested declarator, and the post operations
4518 apply to the innermost pointed to type (if any). */
4519 /* XXX: this is not correct to modify 'ad' at this point, but
4520 the syntax is not clear */
4521 parse_attribute(ad);
4522 post = type_decl(type, ad, v, td);
4523 skip(')');
4525 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4526 /* type identifier */
4527 *v = tok;
4528 next();
4529 } else {
4530 if (!(td & TYPE_ABSTRACT))
4531 expect("identifier");
4532 *v = 0;
4534 post_type(post, ad, storage, 0);
4535 parse_attribute(ad);
4536 type->t |= storage;
4537 return ret;
4540 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4541 ST_FUNC int lvalue_type(int t)
4543 int bt, r;
4544 r = VT_LVAL;
4545 bt = t & VT_BTYPE;
4546 if (bt == VT_BYTE || bt == VT_BOOL)
4547 r |= VT_LVAL_BYTE;
4548 else if (bt == VT_SHORT)
4549 r |= VT_LVAL_SHORT;
4550 else
4551 return r;
4552 if (t & VT_UNSIGNED)
4553 r |= VT_LVAL_UNSIGNED;
4554 return r;
4557 /* indirection with full error checking and bound check */
4558 ST_FUNC void indir(void)
4560 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4561 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4562 return;
4563 expect("pointer");
4565 if (vtop->r & VT_LVAL)
4566 gv(RC_INT);
4567 vtop->type = *pointed_type(&vtop->type);
4568 /* Arrays and functions are never lvalues */
4569 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4570 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4571 vtop->r |= lvalue_type(vtop->type.t);
4572 /* if bound checking, the referenced pointer must be checked */
4573 #ifdef CONFIG_TCC_BCHECK
4574 if (tcc_state->do_bounds_check)
4575 vtop->r |= VT_MUSTBOUND;
4576 #endif
4580 /* pass a parameter to a function and do type checking and casting */
4581 static void gfunc_param_typed(Sym *func, Sym *arg)
4583 int func_type;
4584 CType type;
4586 func_type = func->f.func_type;
4587 if (func_type == FUNC_OLD ||
4588 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4589 /* default casting : only need to convert float to double */
4590 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4591 gen_cast_s(VT_DOUBLE);
4592 } else if (vtop->type.t & VT_BITFIELD) {
4593 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4594 type.ref = vtop->type.ref;
4595 gen_cast(&type);
4597 } else if (arg == NULL) {
4598 tcc_error("too many arguments to function");
4599 } else {
4600 type = arg->type;
4601 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4602 gen_assign_cast(&type);
4606 /* parse an expression and return its type without any side effect. */
4607 static void expr_type(CType *type, void (*expr_fn)(void))
4609 nocode_wanted++;
4610 expr_fn();
4611 *type = vtop->type;
4612 vpop();
4613 nocode_wanted--;
4616 /* parse an expression of the form '(type)' or '(expr)' and return its
4617 type */
4618 static void parse_expr_type(CType *type)
4620 int n;
4621 AttributeDef ad;
4623 skip('(');
4624 if (parse_btype(type, &ad)) {
4625 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4626 } else {
4627 expr_type(type, gexpr);
4629 skip(')');
4632 static void parse_type(CType *type)
4634 AttributeDef ad;
4635 int n;
4637 if (!parse_btype(type, &ad)) {
4638 expect("type");
4640 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4643 static void parse_builtin_params(int nc, const char *args)
4645 char c, sep = '(';
4646 CType t;
4647 if (nc)
4648 nocode_wanted++;
4649 next();
4650 while ((c = *args++)) {
4651 skip(sep);
4652 sep = ',';
4653 switch (c) {
4654 case 'e': expr_eq(); continue;
4655 case 't': parse_type(&t); vpush(&t); continue;
4656 default: tcc_error("internal error"); break;
4659 skip(')');
4660 if (nc)
4661 nocode_wanted--;
4664 ST_FUNC void unary(void)
4666 int n, t, align, size, r, sizeof_caller;
4667 CType type;
4668 Sym *s;
4669 AttributeDef ad;
4671 sizeof_caller = in_sizeof;
4672 in_sizeof = 0;
4673 type.ref = NULL;
4674 /* XXX: GCC 2.95.3 does not generate a table although it should be
4675 better here */
4676 tok_next:
4677 switch(tok) {
4678 case TOK_EXTENSION:
4679 next();
4680 goto tok_next;
4681 case TOK_LCHAR:
4682 #ifdef TCC_TARGET_PE
4683 t = VT_SHORT|VT_UNSIGNED;
4684 goto push_tokc;
4685 #endif
4686 case TOK_CINT:
4687 case TOK_CCHAR:
4688 t = VT_INT;
4689 push_tokc:
4690 type.t = t;
4691 vsetc(&type, VT_CONST, &tokc);
4692 next();
4693 break;
4694 case TOK_CUINT:
4695 t = VT_INT | VT_UNSIGNED;
4696 goto push_tokc;
4697 case TOK_CLLONG:
4698 t = VT_LLONG;
4699 goto push_tokc;
4700 case TOK_CULLONG:
4701 t = VT_LLONG | VT_UNSIGNED;
4702 goto push_tokc;
4703 case TOK_CFLOAT:
4704 t = VT_FLOAT;
4705 goto push_tokc;
4706 case TOK_CDOUBLE:
4707 t = VT_DOUBLE;
4708 goto push_tokc;
4709 case TOK_CLDOUBLE:
4710 t = VT_LDOUBLE;
4711 goto push_tokc;
4712 case TOK_CLONG:
4713 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4714 goto push_tokc;
4715 case TOK_CULONG:
4716 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4717 goto push_tokc;
4718 case TOK___FUNCTION__:
4719 if (!gnu_ext)
4720 goto tok_identifier;
4721 /* fall thru */
4722 case TOK___FUNC__:
4724 void *ptr;
4725 int len;
4726 /* special function name identifier */
4727 len = strlen(funcname) + 1;
4728 /* generate char[len] type */
4729 type.t = VT_BYTE;
4730 mk_pointer(&type);
4731 type.t |= VT_ARRAY;
4732 type.ref->c = len;
4733 vpush_ref(&type, data_section, data_section->data_offset, len);
4734 if (!NODATA_WANTED) {
4735 ptr = section_ptr_add(data_section, len);
4736 memcpy(ptr, funcname, len);
4738 next();
4740 break;
4741 case TOK_LSTR:
4742 #ifdef TCC_TARGET_PE
4743 t = VT_SHORT | VT_UNSIGNED;
4744 #else
4745 t = VT_INT;
4746 #endif
4747 goto str_init;
4748 case TOK_STR:
4749 /* string parsing */
4750 t = VT_BYTE;
4751 if (tcc_state->char_is_unsigned)
4752 t = VT_BYTE | VT_UNSIGNED;
4753 str_init:
4754 if (tcc_state->warn_write_strings)
4755 t |= VT_CONSTANT;
4756 type.t = t;
4757 mk_pointer(&type);
4758 type.t |= VT_ARRAY;
4759 memset(&ad, 0, sizeof(AttributeDef));
4760 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4761 break;
4762 case '(':
4763 next();
4764 /* cast ? */
4765 if (parse_btype(&type, &ad)) {
4766 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4767 skip(')');
4768 /* check ISOC99 compound literal */
4769 if (tok == '{') {
4770 /* data is allocated locally by default */
4771 if (global_expr)
4772 r = VT_CONST;
4773 else
4774 r = VT_LOCAL;
4775 /* all except arrays are lvalues */
4776 if (!(type.t & VT_ARRAY))
4777 r |= lvalue_type(type.t);
4778 memset(&ad, 0, sizeof(AttributeDef));
4779 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4780 } else {
4781 if (sizeof_caller) {
4782 vpush(&type);
4783 return;
4785 unary();
4786 gen_cast(&type);
4788 } else if (tok == '{') {
4789 int saved_nocode_wanted = nocode_wanted;
4790 if (const_wanted)
4791 tcc_error("expected constant");
4792 /* save all registers */
4793 save_regs(0);
4794 /* statement expression : we do not accept break/continue
4795 inside as GCC does. We do retain the nocode_wanted state,
4796 as statement expressions can't ever be entered from the
4797 outside, so any reactivation of code emission (from labels
4798 or loop heads) can be disabled again after the end of it. */
4799 block(NULL, NULL, 1);
4800 nocode_wanted = saved_nocode_wanted;
4801 skip(')');
4802 } else {
4803 gexpr();
4804 skip(')');
4806 break;
4807 case '*':
4808 next();
4809 unary();
4810 indir();
4811 break;
4812 case '&':
4813 next();
4814 unary();
4815 /* functions names must be treated as function pointers,
4816 except for unary '&' and sizeof. Since we consider that
4817 functions are not lvalues, we only have to handle it
4818 there and in function calls. */
4819 /* arrays can also be used although they are not lvalues */
4820 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4821 !(vtop->type.t & VT_ARRAY))
4822 test_lvalue();
4823 mk_pointer(&vtop->type);
4824 gaddrof();
4825 break;
4826 case '!':
4827 next();
4828 unary();
4829 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4830 gen_cast_s(VT_BOOL);
4831 vtop->c.i = !vtop->c.i;
4832 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4833 vtop->c.i ^= 1;
4834 else {
4835 save_regs(1);
4836 vseti(VT_JMP, gvtst(1, 0));
4838 break;
4839 case '~':
4840 next();
4841 unary();
4842 vpushi(-1);
4843 gen_op('^');
4844 break;
4845 case '+':
4846 next();
4847 unary();
4848 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4849 tcc_error("pointer not accepted for unary plus");
4850 /* In order to force cast, we add zero, except for floating point
4851 where we really need an noop (otherwise -0.0 will be transformed
4852 into +0.0). */
4853 if (!is_float(vtop->type.t)) {
4854 vpushi(0);
4855 gen_op('+');
4857 break;
4858 case TOK_SIZEOF:
4859 case TOK_ALIGNOF1:
4860 case TOK_ALIGNOF2:
4861 case TOK_ALIGNOF3:
4862 t = tok;
4863 next();
4864 in_sizeof++;
4865 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4866 s = vtop[1].sym; /* hack: accessing previous vtop */
4867 size = type_size(&type, &align);
4868 if (s && s->a.aligned)
4869 align = 1 << (s->a.aligned - 1);
4870 if (t == TOK_SIZEOF) {
4871 if (!(type.t & VT_VLA)) {
4872 if (size < 0)
4873 tcc_error("sizeof applied to an incomplete type");
4874 vpushs(size);
4875 } else {
4876 vla_runtime_type_size(&type, &align);
4878 } else {
4879 vpushs(align);
4881 vtop->type.t |= VT_UNSIGNED;
4882 break;
4884 case TOK_builtin_expect:
4885 /* __builtin_expect is a no-op for now */
4886 parse_builtin_params(0, "ee");
4887 vpop();
4888 break;
4889 case TOK_builtin_types_compatible_p:
4890 parse_builtin_params(0, "tt");
4891 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4892 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4893 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4894 vtop -= 2;
4895 vpushi(n);
4896 break;
4897 case TOK_builtin_choose_expr:
4899 int64_t c;
4900 next();
4901 skip('(');
4902 c = expr_const64();
4903 skip(',');
4904 if (!c) {
4905 nocode_wanted++;
4907 expr_eq();
4908 if (!c) {
4909 vpop();
4910 nocode_wanted--;
4912 skip(',');
4913 if (c) {
4914 nocode_wanted++;
4916 expr_eq();
4917 if (c) {
4918 vpop();
4919 nocode_wanted--;
4921 skip(')');
4923 break;
4924 case TOK_builtin_constant_p:
4925 parse_builtin_params(1, "e");
4926 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4927 vtop--;
4928 vpushi(n);
4929 break;
4930 case TOK_builtin_frame_address:
4931 case TOK_builtin_return_address:
4933 int tok1 = tok;
4934 int level;
4935 next();
4936 skip('(');
4937 if (tok != TOK_CINT) {
4938 tcc_error("%s only takes positive integers",
4939 tok1 == TOK_builtin_return_address ?
4940 "__builtin_return_address" :
4941 "__builtin_frame_address");
4943 level = (uint32_t)tokc.i;
4944 next();
4945 skip(')');
4946 type.t = VT_VOID;
4947 mk_pointer(&type);
4948 vset(&type, VT_LOCAL, 0); /* local frame */
4949 while (level--) {
4950 mk_pointer(&vtop->type);
4951 indir(); /* -> parent frame */
4953 if (tok1 == TOK_builtin_return_address) {
4954 // assume return address is just above frame pointer on stack
4955 vpushi(PTR_SIZE);
4956 gen_op('+');
4957 mk_pointer(&vtop->type);
4958 indir();
4961 break;
4962 #ifdef TCC_TARGET_X86_64
4963 #ifdef TCC_TARGET_PE
4964 case TOK_builtin_va_start:
4965 parse_builtin_params(0, "ee");
4966 r = vtop->r & VT_VALMASK;
4967 if (r == VT_LLOCAL)
4968 r = VT_LOCAL;
4969 if (r != VT_LOCAL)
4970 tcc_error("__builtin_va_start expects a local variable");
4971 vtop->r = r;
4972 vtop->type = char_pointer_type;
4973 vtop->c.i += 8;
4974 vstore();
4975 break;
4976 #else
4977 case TOK_builtin_va_arg_types:
4978 parse_builtin_params(0, "t");
4979 vpushi(classify_x86_64_va_arg(&vtop->type));
4980 vswap();
4981 vpop();
4982 break;
4983 #endif
4984 #endif
4986 #ifdef TCC_TARGET_ARM64
4987 case TOK___va_start: {
4988 parse_builtin_params(0, "ee");
4989 //xx check types
4990 gen_va_start();
4991 vpushi(0);
4992 vtop->type.t = VT_VOID;
4993 break;
4995 case TOK___va_arg: {
4996 parse_builtin_params(0, "et");
4997 type = vtop->type;
4998 vpop();
4999 //xx check types
5000 gen_va_arg(&type);
5001 vtop->type = type;
5002 break;
5004 case TOK___arm64_clear_cache: {
5005 parse_builtin_params(0, "ee");
5006 gen_clear_cache();
5007 vpushi(0);
5008 vtop->type.t = VT_VOID;
5009 break;
5011 #endif
5012 /* pre operations */
5013 case TOK_INC:
5014 case TOK_DEC:
5015 t = tok;
5016 next();
5017 unary();
5018 inc(0, t);
5019 break;
5020 case '-':
5021 next();
5022 unary();
5023 t = vtop->type.t & VT_BTYPE;
5024 if (is_float(t)) {
5025 /* In IEEE negate(x) isn't subtract(0,x), but rather
5026 subtract(-0, x). */
5027 vpush(&vtop->type);
5028 if (t == VT_FLOAT)
5029 vtop->c.f = -1.0 * 0.0;
5030 else if (t == VT_DOUBLE)
5031 vtop->c.d = -1.0 * 0.0;
5032 else
5033 vtop->c.ld = -1.0 * 0.0;
5034 } else
5035 vpushi(0);
5036 vswap();
5037 gen_op('-');
5038 break;
5039 case TOK_LAND:
5040 if (!gnu_ext)
5041 goto tok_identifier;
5042 next();
5043 /* allow to take the address of a label */
5044 if (tok < TOK_UIDENT)
5045 expect("label identifier");
5046 s = label_find(tok);
5047 if (!s) {
5048 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5049 } else {
5050 if (s->r == LABEL_DECLARED)
5051 s->r = LABEL_FORWARD;
5053 if (!s->type.t) {
5054 s->type.t = VT_VOID;
5055 mk_pointer(&s->type);
5056 s->type.t |= VT_STATIC;
5058 vpushsym(&s->type, s);
5059 next();
5060 break;
5062 case TOK_GENERIC:
5064 CType controlling_type;
5065 int has_default = 0;
5066 int has_match = 0;
5067 int learn = 0;
5068 TokenString *str = NULL;
5069 int saved_const_wanted = const_wanted;
5071 next();
5072 skip('(');
5073 const_wanted = 0;
5074 expr_type(&controlling_type, expr_eq);
5075 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5076 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5077 mk_pointer(&controlling_type);
5078 const_wanted = saved_const_wanted;
5079 for (;;) {
5080 learn = 0;
5081 skip(',');
5082 if (tok == TOK_DEFAULT) {
5083 if (has_default)
5084 tcc_error("too many 'default'");
5085 has_default = 1;
5086 if (!has_match)
5087 learn = 1;
5088 next();
5089 } else {
5090 AttributeDef ad_tmp;
5091 int itmp;
5092 CType cur_type;
5093 parse_btype(&cur_type, &ad_tmp);
5094 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5095 if (compare_types(&controlling_type, &cur_type, 0)) {
5096 if (has_match) {
5097 tcc_error("type match twice");
5099 has_match = 1;
5100 learn = 1;
5103 skip(':');
5104 if (learn) {
5105 if (str)
5106 tok_str_free(str);
5107 skip_or_save_block(&str);
5108 } else {
5109 skip_or_save_block(NULL);
5111 if (tok == ')')
5112 break;
5114 if (!str) {
5115 char buf[60];
5116 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5117 tcc_error("type '%s' does not match any association", buf);
5119 begin_macro(str, 1);
5120 next();
5121 expr_eq();
5122 if (tok != TOK_EOF)
5123 expect(",");
5124 end_macro();
5125 next();
5126 break;
5128 // special qnan , snan and infinity values
5129 case TOK___NAN__:
5130 n = 0x7fc00000;
5131 special_math_val:
5132 vpushi(n);
5133 vtop->type.t = VT_FLOAT;
5134 next();
5135 break;
5136 case TOK___SNAN__:
5137 n = 0x7f800001;
5138 goto special_math_val;
5139 case TOK___INF__:
5140 n = 0x7f800000;
5141 goto special_math_val;
5143 default:
5144 tok_identifier:
5145 t = tok;
5146 next();
5147 if (t < TOK_UIDENT)
5148 expect("identifier");
5149 s = sym_find(t);
5150 if (!s || IS_ASM_SYM(s)) {
5151 const char *name = get_tok_str(t, NULL);
5152 if (tok != '(')
5153 tcc_error("'%s' undeclared", name);
5154 /* for simple function calls, we tolerate undeclared
5155 external reference to int() function */
5156 if (tcc_state->warn_implicit_function_declaration
5157 #ifdef TCC_TARGET_PE
5158 /* people must be warned about using undeclared WINAPI functions
5159 (which usually start with uppercase letter) */
5160 || (name[0] >= 'A' && name[0] <= 'Z')
5161 #endif
5163 tcc_warning("implicit declaration of function '%s'", name);
5164 s = external_global_sym(t, &func_old_type, 0);
5167 r = s->r;
5168 /* A symbol that has a register is a local register variable,
5169 which starts out as VT_LOCAL value. */
5170 if ((r & VT_VALMASK) < VT_CONST)
5171 r = (r & ~VT_VALMASK) | VT_LOCAL;
5173 vset(&s->type, r, s->c);
5174 /* Point to s as backpointer (even without r&VT_SYM).
5175 Will be used by at least the x86 inline asm parser for
5176 regvars. */
5177 vtop->sym = s;
5179 if (r & VT_SYM) {
5180 vtop->c.i = 0;
5181 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5182 vtop->c.i = s->enum_val;
5184 break;
5187 /* post operations */
5188 while (1) {
5189 if (tok == TOK_INC || tok == TOK_DEC) {
5190 inc(1, tok);
5191 next();
5192 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5193 int qualifiers;
5194 /* field */
5195 if (tok == TOK_ARROW)
5196 indir();
5197 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5198 test_lvalue();
5199 gaddrof();
5200 /* expect pointer on structure */
5201 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5202 expect("struct or union");
5203 if (tok == TOK_CDOUBLE)
5204 expect("field name");
5205 next();
5206 if (tok == TOK_CINT || tok == TOK_CUINT)
5207 expect("field name");
5208 s = find_field(&vtop->type, tok);
5209 if (!s)
5210 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5211 /* add field offset to pointer */
5212 vtop->type = char_pointer_type; /* change type to 'char *' */
5213 vpushi(s->c);
5214 gen_op('+');
5215 /* change type to field type, and set to lvalue */
5216 vtop->type = s->type;
5217 vtop->type.t |= qualifiers;
5218 /* an array is never an lvalue */
5219 if (!(vtop->type.t & VT_ARRAY)) {
5220 vtop->r |= lvalue_type(vtop->type.t);
5221 #ifdef CONFIG_TCC_BCHECK
5222 /* if bound checking, the referenced pointer must be checked */
5223 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5224 vtop->r |= VT_MUSTBOUND;
5225 #endif
5227 next();
5228 } else if (tok == '[') {
5229 next();
5230 gexpr();
5231 gen_op('+');
5232 indir();
5233 skip(']');
5234 } else if (tok == '(') {
5235 SValue ret;
5236 Sym *sa;
5237 int nb_args, ret_nregs, ret_align, regsize, variadic;
5239 /* function call */
5240 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5241 /* pointer test (no array accepted) */
5242 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5243 vtop->type = *pointed_type(&vtop->type);
5244 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5245 goto error_func;
5246 } else {
5247 error_func:
5248 expect("function pointer");
5250 } else {
5251 vtop->r &= ~VT_LVAL; /* no lvalue */
5253 /* get return type */
5254 s = vtop->type.ref;
5255 next();
5256 sa = s->next; /* first parameter */
5257 nb_args = regsize = 0;
5258 ret.r2 = VT_CONST;
5259 /* compute first implicit argument if a structure is returned */
5260 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5261 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5262 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5263 &ret_align, &regsize);
5264 if (!ret_nregs) {
5265 /* get some space for the returned structure */
5266 size = type_size(&s->type, &align);
5267 #ifdef TCC_TARGET_ARM64
5268 /* On arm64, a small struct is return in registers.
5269 It is much easier to write it to memory if we know
5270 that we are allowed to write some extra bytes, so
5271 round the allocated space up to a power of 2: */
5272 if (size < 16)
5273 while (size & (size - 1))
5274 size = (size | (size - 1)) + 1;
5275 #endif
5276 loc = (loc - size) & -align;
5277 ret.type = s->type;
5278 ret.r = VT_LOCAL | VT_LVAL;
5279 /* pass it as 'int' to avoid structure arg passing
5280 problems */
5281 vseti(VT_LOCAL, loc);
5282 ret.c = vtop->c;
5283 nb_args++;
5285 } else {
5286 ret_nregs = 1;
5287 ret.type = s->type;
5290 if (ret_nregs) {
5291 /* return in register */
5292 if (is_float(ret.type.t)) {
5293 ret.r = reg_fret(ret.type.t);
5294 #ifdef TCC_TARGET_X86_64
5295 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5296 ret.r2 = REG_QRET;
5297 #endif
5298 } else {
5299 #ifndef TCC_TARGET_ARM64
5300 #ifdef TCC_TARGET_X86_64
5301 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5302 #else
5303 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5304 #endif
5305 ret.r2 = REG_LRET;
5306 #endif
5307 ret.r = REG_IRET;
5309 ret.c.i = 0;
5311 if (tok != ')') {
5312 for(;;) {
5313 expr_eq();
5314 gfunc_param_typed(s, sa);
5315 nb_args++;
5316 if (sa)
5317 sa = sa->next;
5318 if (tok == ')')
5319 break;
5320 skip(',');
5323 if (sa)
5324 tcc_error("too few arguments to function");
5325 skip(')');
5326 gfunc_call(nb_args);
5328 /* return value */
5329 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5330 vsetc(&ret.type, r, &ret.c);
5331 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5334 /* handle packed struct return */
5335 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5336 int addr, offset;
5338 size = type_size(&s->type, &align);
5339 /* We're writing whole regs often, make sure there's enough
5340 space. Assume register size is power of 2. */
5341 if (regsize > align)
5342 align = regsize;
5343 loc = (loc - size) & -align;
5344 addr = loc;
5345 offset = 0;
5346 for (;;) {
5347 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5348 vswap();
5349 vstore();
5350 vtop--;
5351 if (--ret_nregs == 0)
5352 break;
5353 offset += regsize;
5355 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5357 } else {
5358 break;
5363 ST_FUNC void expr_prod(void)
5365 int t;
5367 unary();
5368 while (tok == '*' || tok == '/' || tok == '%') {
5369 t = tok;
5370 next();
5371 unary();
5372 gen_op(t);
5376 ST_FUNC void expr_sum(void)
5378 int t;
5380 expr_prod();
5381 while (tok == '+' || tok == '-') {
5382 t = tok;
5383 next();
5384 expr_prod();
5385 gen_op(t);
5389 static void expr_shift(void)
5391 int t;
5393 expr_sum();
5394 while (tok == TOK_SHL || tok == TOK_SAR) {
5395 t = tok;
5396 next();
5397 expr_sum();
5398 gen_op(t);
5402 static void expr_cmp(void)
5404 int t;
5406 expr_shift();
5407 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5408 tok == TOK_ULT || tok == TOK_UGE) {
5409 t = tok;
5410 next();
5411 expr_shift();
5412 gen_op(t);
5416 static void expr_cmpeq(void)
5418 int t;
5420 expr_cmp();
5421 while (tok == TOK_EQ || tok == TOK_NE) {
5422 t = tok;
5423 next();
5424 expr_cmp();
5425 gen_op(t);
5429 static void expr_and(void)
5431 expr_cmpeq();
5432 while (tok == '&') {
5433 next();
5434 expr_cmpeq();
5435 gen_op('&');
5439 static void expr_xor(void)
5441 expr_and();
5442 while (tok == '^') {
5443 next();
5444 expr_and();
5445 gen_op('^');
5449 static void expr_or(void)
5451 expr_xor();
5452 while (tok == '|') {
5453 next();
5454 expr_xor();
5455 gen_op('|');
5459 static void expr_land(void)
5461 expr_or();
5462 if (tok == TOK_LAND) {
5463 int t = 0;
5464 for(;;) {
5465 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5466 gen_cast_s(VT_BOOL);
5467 if (vtop->c.i) {
5468 vpop();
5469 } else {
5470 nocode_wanted++;
5471 while (tok == TOK_LAND) {
5472 next();
5473 expr_or();
5474 vpop();
5476 nocode_wanted--;
5477 if (t)
5478 gsym(t);
5479 gen_cast_s(VT_INT);
5480 break;
5482 } else {
5483 if (!t)
5484 save_regs(1);
5485 t = gvtst(1, t);
5487 if (tok != TOK_LAND) {
5488 if (t)
5489 vseti(VT_JMPI, t);
5490 else
5491 vpushi(1);
5492 break;
5494 next();
5495 expr_or();
5500 static void expr_lor(void)
5502 expr_land();
5503 if (tok == TOK_LOR) {
5504 int t = 0;
5505 for(;;) {
5506 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5507 gen_cast_s(VT_BOOL);
5508 if (!vtop->c.i) {
5509 vpop();
5510 } else {
5511 nocode_wanted++;
5512 while (tok == TOK_LOR) {
5513 next();
5514 expr_land();
5515 vpop();
5517 nocode_wanted--;
5518 if (t)
5519 gsym(t);
5520 gen_cast_s(VT_INT);
5521 break;
5523 } else {
5524 if (!t)
5525 save_regs(1);
5526 t = gvtst(0, t);
5528 if (tok != TOK_LOR) {
5529 if (t)
5530 vseti(VT_JMP, t);
5531 else
5532 vpushi(0);
5533 break;
5535 next();
5536 expr_land();
5541 /* Assuming vtop is a value used in a conditional context
5542 (i.e. compared with zero) return 0 if it's false, 1 if
5543 true and -1 if it can't be statically determined. */
5544 static int condition_3way(void)
5546 int c = -1;
5547 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5548 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5549 vdup();
5550 gen_cast_s(VT_BOOL);
5551 c = vtop->c.i;
5552 vpop();
5554 return c;
5557 static void expr_cond(void)
5559 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5560 SValue sv;
5561 CType type, type1, type2;
5563 expr_lor();
5564 if (tok == '?') {
5565 next();
5566 c = condition_3way();
5567 g = (tok == ':' && gnu_ext);
5568 if (c < 0) {
5569 /* needed to avoid having different registers saved in
5570 each branch */
5571 if (is_float(vtop->type.t)) {
5572 rc = RC_FLOAT;
5573 #ifdef TCC_TARGET_X86_64
5574 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5575 rc = RC_ST0;
5577 #endif
5578 } else
5579 rc = RC_INT;
5580 gv(rc);
5581 save_regs(1);
5582 if (g)
5583 gv_dup();
5584 tt = gvtst(1, 0);
5586 } else {
5587 if (!g)
5588 vpop();
5589 tt = 0;
5592 if (1) {
5593 if (c == 0)
5594 nocode_wanted++;
5595 if (!g)
5596 gexpr();
5598 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5599 mk_pointer(&vtop->type);
5600 type1 = vtop->type;
5601 sv = *vtop; /* save value to handle it later */
5602 vtop--; /* no vpop so that FP stack is not flushed */
5603 skip(':');
5605 u = 0;
5606 if (c < 0)
5607 u = gjmp(0);
5608 gsym(tt);
5610 if (c == 0)
5611 nocode_wanted--;
5612 if (c == 1)
5613 nocode_wanted++;
5614 expr_cond();
5615 if (c == 1)
5616 nocode_wanted--;
5618 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5619 mk_pointer(&vtop->type);
5620 type2=vtop->type;
5621 t1 = type1.t;
5622 bt1 = t1 & VT_BTYPE;
5623 t2 = type2.t;
5624 bt2 = t2 & VT_BTYPE;
5625 type.ref = NULL;
5628 /* cast operands to correct type according to ISOC rules */
5629 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5630 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5631 } else if (is_float(bt1) || is_float(bt2)) {
5632 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5633 type.t = VT_LDOUBLE;
5635 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5636 type.t = VT_DOUBLE;
5637 } else {
5638 type.t = VT_FLOAT;
5640 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5641 /* cast to biggest op */
5642 type.t = VT_LLONG | VT_LONG;
5643 if (bt1 == VT_LLONG)
5644 type.t &= t1;
5645 if (bt2 == VT_LLONG)
5646 type.t &= t2;
5647 /* convert to unsigned if it does not fit in a long long */
5648 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5649 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5650 type.t |= VT_UNSIGNED;
5651 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5652 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5653 /* If one is a null ptr constant the result type
5654 is the other. */
5655 if (is_null_pointer (vtop)) type = type1;
5656 else if (is_null_pointer (&sv)) type = type2;
5657 else if (bt1 != bt2)
5658 tcc_error("incompatible types in conditional expressions");
5659 else {
5660 CType *pt1 = pointed_type(&type1);
5661 CType *pt2 = pointed_type(&type2);
5662 int pbt1 = pt1->t & VT_BTYPE;
5663 int pbt2 = pt2->t & VT_BTYPE;
5664 int newquals, copied = 0;
5665 /* pointers to void get preferred, otherwise the
5666 pointed to types minus qualifs should be compatible */
5667 type = (pbt1 == VT_VOID) ? type1 : type2;
5668 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5669 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5670 tcc_warning("pointer type mismatch in conditional expression\n");
5672 /* combine qualifs */
5673 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5674 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5675 & newquals)
5677 /* copy the pointer target symbol */
5678 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5679 0, type.ref->c);
5680 copied = 1;
5681 pointed_type(&type)->t |= newquals;
5683 /* pointers to incomplete arrays get converted to
5684 pointers to completed ones if possible */
5685 if (pt1->t & VT_ARRAY
5686 && pt2->t & VT_ARRAY
5687 && pointed_type(&type)->ref->c < 0
5688 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5690 if (!copied)
5691 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5692 0, type.ref->c);
5693 pointed_type(&type)->ref =
5694 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5695 0, pointed_type(&type)->ref->c);
5696 pointed_type(&type)->ref->c =
5697 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5700 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5701 /* XXX: test structure compatibility */
5702 type = bt1 == VT_STRUCT ? type1 : type2;
5703 } else {
5704 /* integer operations */
5705 type.t = VT_INT | (VT_LONG & (t1 | t2));
5706 /* convert to unsigned if it does not fit in an integer */
5707 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5708 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5709 type.t |= VT_UNSIGNED;
5711 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5712 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5713 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5715 /* now we convert second operand */
5716 if (c != 1) {
5717 gen_cast(&type);
5718 if (islv) {
5719 mk_pointer(&vtop->type);
5720 gaddrof();
5721 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5722 gaddrof();
5725 rc = RC_INT;
5726 if (is_float(type.t)) {
5727 rc = RC_FLOAT;
5728 #ifdef TCC_TARGET_X86_64
5729 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5730 rc = RC_ST0;
5732 #endif
5733 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5734 /* for long longs, we use fixed registers to avoid having
5735 to handle a complicated move */
5736 rc = RC_IRET;
5739 tt = r2 = 0;
5740 if (c < 0) {
5741 r2 = gv(rc);
5742 tt = gjmp(0);
5744 gsym(u);
5746 /* this is horrible, but we must also convert first
5747 operand */
5748 if (c != 0) {
5749 *vtop = sv;
5750 gen_cast(&type);
5751 if (islv) {
5752 mk_pointer(&vtop->type);
5753 gaddrof();
5754 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5755 gaddrof();
5758 if (c < 0 || islv) {
5759 r1 = gv(rc);
5760 move_reg(r2, r1, type.t);
5761 vtop->r = r2;
5762 gsym(tt);
5763 if (islv)
5764 indir();
5770 static void expr_eq(void)
5772 int t;
5774 expr_cond();
5775 if (tok == '=' ||
5776 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5777 tok == TOK_A_XOR || tok == TOK_A_OR ||
5778 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5779 test_lvalue();
5780 t = tok;
5781 next();
5782 if (t == '=') {
5783 expr_eq();
5784 } else {
5785 vdup();
5786 expr_eq();
5787 gen_op(t & 0x7f);
5789 vstore();
5793 ST_FUNC void gexpr(void)
5795 while (1) {
5796 expr_eq();
5797 if (tok != ',')
5798 break;
5799 vpop();
5800 next();
5804 /* parse a constant expression and return value in vtop. */
5805 static void expr_const1(void)
5807 const_wanted++;
5808 nocode_wanted++;
5809 expr_cond();
5810 nocode_wanted--;
5811 const_wanted--;
5814 /* parse an integer constant and return its value. */
5815 static inline int64_t expr_const64(void)
5817 int64_t c;
5818 expr_const1();
5819 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5820 expect("constant expression");
5821 c = vtop->c.i;
5822 vpop();
5823 return c;
5826 /* parse an integer constant and return its value.
5827 Complain if it doesn't fit 32bit (signed or unsigned). */
5828 ST_FUNC int expr_const(void)
5830 int c;
5831 int64_t wc = expr_const64();
5832 c = wc;
5833 if (c != wc && (unsigned)c != wc)
5834 tcc_error("constant exceeds 32 bit");
5835 return c;
5838 /* return the label token if current token is a label, otherwise
5839 return zero */
5840 static int is_label(void)
5842 int last_tok;
5844 /* fast test first */
5845 if (tok < TOK_UIDENT)
5846 return 0;
5847 /* no need to save tokc because tok is an identifier */
5848 last_tok = tok;
5849 next();
5850 if (tok == ':') {
5851 return last_tok;
5852 } else {
5853 unget_tok(last_tok);
5854 return 0;
5858 #ifndef TCC_TARGET_ARM64
5859 static void gfunc_return(CType *func_type)
5861 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5862 CType type, ret_type;
5863 int ret_align, ret_nregs, regsize;
5864 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5865 &ret_align, &regsize);
5866 if (0 == ret_nregs) {
5867 /* if returning structure, must copy it to implicit
5868 first pointer arg location */
5869 type = *func_type;
5870 mk_pointer(&type);
5871 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5872 indir();
5873 vswap();
5874 /* copy structure value to pointer */
5875 vstore();
5876 } else {
5877 /* returning structure packed into registers */
5878 int r, size, addr, align;
5879 size = type_size(func_type,&align);
5880 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5881 (vtop->c.i & (ret_align-1)))
5882 && (align & (ret_align-1))) {
5883 loc = (loc - size) & -ret_align;
5884 addr = loc;
5885 type = *func_type;
5886 vset(&type, VT_LOCAL | VT_LVAL, addr);
5887 vswap();
5888 vstore();
5889 vpop();
5890 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5892 vtop->type = ret_type;
5893 if (is_float(ret_type.t))
5894 r = rc_fret(ret_type.t);
5895 else
5896 r = RC_IRET;
5898 if (ret_nregs == 1)
5899 gv(r);
5900 else {
5901 for (;;) {
5902 vdup();
5903 gv(r);
5904 vpop();
5905 if (--ret_nregs == 0)
5906 break;
5907 /* We assume that when a structure is returned in multiple
5908 registers, their classes are consecutive values of the
5909 suite s(n) = 2^n */
5910 r <<= 1;
5911 vtop->c.i += regsize;
5915 } else if (is_float(func_type->t)) {
5916 gv(rc_fret(func_type->t));
5917 } else {
5918 gv(RC_IRET);
5920 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5922 #endif
5924 static int case_cmp(const void *pa, const void *pb)
5926 int64_t a = (*(struct case_t**) pa)->v1;
5927 int64_t b = (*(struct case_t**) pb)->v1;
5928 return a < b ? -1 : a > b;
5931 static void gcase(struct case_t **base, int len, int *bsym)
5933 struct case_t *p;
5934 int e;
5935 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5936 gv(RC_INT);
5937 while (len > 4) {
5938 /* binary search */
5939 p = base[len/2];
5940 vdup();
5941 if (ll)
5942 vpushll(p->v2);
5943 else
5944 vpushi(p->v2);
5945 gen_op(TOK_LE);
5946 e = gtst(1, 0);
5947 vdup();
5948 if (ll)
5949 vpushll(p->v1);
5950 else
5951 vpushi(p->v1);
5952 gen_op(TOK_GE);
5953 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5954 /* x < v1 */
5955 gcase(base, len/2, bsym);
5956 if (cur_switch->def_sym)
5957 gjmp_addr(cur_switch->def_sym);
5958 else
5959 *bsym = gjmp(*bsym);
5960 /* x > v2 */
5961 gsym(e);
5962 e = len/2 + 1;
5963 base += e; len -= e;
5965 /* linear scan */
5966 while (len--) {
5967 p = *base++;
5968 vdup();
5969 if (ll)
5970 vpushll(p->v2);
5971 else
5972 vpushi(p->v2);
5973 if (p->v1 == p->v2) {
5974 gen_op(TOK_EQ);
5975 gtst_addr(0, p->sym);
5976 } else {
5977 gen_op(TOK_LE);
5978 e = gtst(1, 0);
5979 vdup();
5980 if (ll)
5981 vpushll(p->v1);
5982 else
5983 vpushi(p->v1);
5984 gen_op(TOK_GE);
5985 gtst_addr(0, p->sym);
5986 gsym(e);
5991 static void block(int *bsym, int *csym, int is_expr)
5993 int a, b, c, d, cond;
5994 Sym *s;
5996 /* generate line number info */
5997 if (tcc_state->do_debug)
5998 tcc_debug_line(tcc_state);
6000 if (is_expr) {
6001 /* default return value is (void) */
6002 vpushi(0);
6003 vtop->type.t = VT_VOID;
6006 if (tok == TOK_IF) {
6007 /* if test */
6008 int saved_nocode_wanted = nocode_wanted;
6009 next();
6010 skip('(');
6011 gexpr();
6012 skip(')');
6013 cond = condition_3way();
6014 if (cond == 1)
6015 a = 0, vpop();
6016 else
6017 a = gvtst(1, 0);
6018 if (cond == 0)
6019 nocode_wanted |= 0x20000000;
6020 block(bsym, csym, 0);
6021 if (cond != 1)
6022 nocode_wanted = saved_nocode_wanted;
6023 c = tok;
6024 if (c == TOK_ELSE) {
6025 next();
6026 d = gjmp(0);
6027 gsym(a);
6028 if (cond == 1)
6029 nocode_wanted |= 0x20000000;
6030 block(bsym, csym, 0);
6031 gsym(d); /* patch else jmp */
6032 if (cond != 0)
6033 nocode_wanted = saved_nocode_wanted;
6034 } else
6035 gsym(a);
6036 } else if (tok == TOK_WHILE) {
6037 int saved_nocode_wanted;
6038 nocode_wanted &= ~0x20000000;
6039 next();
6040 d = ind;
6041 vla_sp_restore();
6042 skip('(');
6043 gexpr();
6044 skip(')');
6045 a = gvtst(1, 0);
6046 b = 0;
6047 ++local_scope;
6048 saved_nocode_wanted = nocode_wanted;
6049 block(&a, &b, 0);
6050 nocode_wanted = saved_nocode_wanted;
6051 --local_scope;
6052 gjmp_addr(d);
6053 gsym(a);
6054 gsym_addr(b, d);
6055 } else if (tok == '{') {
6056 Sym *llabel;
6057 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6059 next();
6060 /* record local declaration stack position */
6061 s = local_stack;
6062 llabel = local_label_stack;
6063 ++local_scope;
6065 /* handle local labels declarations */
6066 if (tok == TOK_LABEL) {
6067 next();
6068 for(;;) {
6069 if (tok < TOK_UIDENT)
6070 expect("label identifier");
6071 label_push(&local_label_stack, tok, LABEL_DECLARED);
6072 next();
6073 if (tok == ',') {
6074 next();
6075 } else {
6076 skip(';');
6077 break;
6081 while (tok != '}') {
6082 if ((a = is_label()))
6083 unget_tok(a);
6084 else
6085 decl(VT_LOCAL);
6086 if (tok != '}') {
6087 if (is_expr)
6088 vpop();
6089 block(bsym, csym, is_expr);
6092 /* pop locally defined labels */
6093 label_pop(&local_label_stack, llabel, is_expr);
6094 /* pop locally defined symbols */
6095 --local_scope;
6096 /* In the is_expr case (a statement expression is finished here),
6097 vtop might refer to symbols on the local_stack. Either via the
6098 type or via vtop->sym. We can't pop those nor any that in turn
6099 might be referred to. To make it easier we don't roll back
6100 any symbols in that case; some upper level call to block() will
6101 do that. We do have to remove such symbols from the lookup
6102 tables, though. sym_pop will do that. */
6103 sym_pop(&local_stack, s, is_expr);
6105 /* Pop VLA frames and restore stack pointer if required */
6106 if (vlas_in_scope > saved_vlas_in_scope) {
6107 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6108 vla_sp_restore();
6110 vlas_in_scope = saved_vlas_in_scope;
6112 next();
6113 } else if (tok == TOK_RETURN) {
6114 next();
6115 if (tok != ';') {
6116 gexpr();
6117 gen_assign_cast(&func_vt);
6118 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6119 vtop--;
6120 else
6121 gfunc_return(&func_vt);
6123 skip(';');
6124 /* jump unless last stmt in top-level block */
6125 if (tok != '}' || local_scope != 1)
6126 rsym = gjmp(rsym);
6127 nocode_wanted |= 0x20000000;
6128 } else if (tok == TOK_BREAK) {
6129 /* compute jump */
6130 if (!bsym)
6131 tcc_error("cannot break");
6132 *bsym = gjmp(*bsym);
6133 next();
6134 skip(';');
6135 nocode_wanted |= 0x20000000;
6136 } else if (tok == TOK_CONTINUE) {
6137 /* compute jump */
6138 if (!csym)
6139 tcc_error("cannot continue");
6140 vla_sp_restore_root();
6141 *csym = gjmp(*csym);
6142 next();
6143 skip(';');
6144 } else if (tok == TOK_FOR) {
6145 int e;
6146 int saved_nocode_wanted;
6147 nocode_wanted &= ~0x20000000;
6148 next();
6149 skip('(');
6150 s = local_stack;
6151 ++local_scope;
6152 if (tok != ';') {
6153 /* c99 for-loop init decl? */
6154 if (!decl0(VT_LOCAL, 1, NULL)) {
6155 /* no, regular for-loop init expr */
6156 gexpr();
6157 vpop();
6160 skip(';');
6161 d = ind;
6162 c = ind;
6163 vla_sp_restore();
6164 a = 0;
6165 b = 0;
6166 if (tok != ';') {
6167 gexpr();
6168 a = gvtst(1, 0);
6170 skip(';');
6171 if (tok != ')') {
6172 e = gjmp(0);
6173 c = ind;
6174 vla_sp_restore();
6175 gexpr();
6176 vpop();
6177 gjmp_addr(d);
6178 gsym(e);
6180 skip(')');
6181 saved_nocode_wanted = nocode_wanted;
6182 block(&a, &b, 0);
6183 nocode_wanted = saved_nocode_wanted;
6184 gjmp_addr(c);
6185 gsym(a);
6186 gsym_addr(b, c);
6187 --local_scope;
6188 sym_pop(&local_stack, s, 0);
6190 } else
6191 if (tok == TOK_DO) {
6192 int saved_nocode_wanted;
6193 nocode_wanted &= ~0x20000000;
6194 next();
6195 a = 0;
6196 b = 0;
6197 d = ind;
6198 vla_sp_restore();
6199 saved_nocode_wanted = nocode_wanted;
6200 block(&a, &b, 0);
6201 skip(TOK_WHILE);
6202 skip('(');
6203 gsym(b);
6204 if (b)
6205 nocode_wanted = saved_nocode_wanted;
6206 gexpr();
6207 c = gvtst(0, 0);
6208 gsym_addr(c, d);
6209 nocode_wanted = saved_nocode_wanted;
6210 skip(')');
6211 gsym(a);
6212 skip(';');
6213 } else
6214 if (tok == TOK_SWITCH) {
6215 struct switch_t *saved, sw;
6216 int saved_nocode_wanted = nocode_wanted;
6217 SValue switchval;
6218 next();
6219 skip('(');
6220 gexpr();
6221 skip(')');
6222 switchval = *vtop--;
6223 a = 0;
6224 b = gjmp(0); /* jump to first case */
6225 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6226 saved = cur_switch;
6227 cur_switch = &sw;
6228 block(&a, csym, 0);
6229 nocode_wanted = saved_nocode_wanted;
6230 a = gjmp(a); /* add implicit break */
6231 /* case lookup */
6232 gsym(b);
6233 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6234 for (b = 1; b < sw.n; b++)
6235 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6236 tcc_error("duplicate case value");
6237 /* Our switch table sorting is signed, so the compared
6238 value needs to be as well when it's 64bit. */
6239 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6240 switchval.type.t &= ~VT_UNSIGNED;
6241 vpushv(&switchval);
6242 gcase(sw.p, sw.n, &a);
6243 vpop();
6244 if (sw.def_sym)
6245 gjmp_addr(sw.def_sym);
6246 dynarray_reset(&sw.p, &sw.n);
6247 cur_switch = saved;
6248 /* break label */
6249 gsym(a);
6250 } else
6251 if (tok == TOK_CASE) {
6252 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6253 if (!cur_switch)
6254 expect("switch");
6255 nocode_wanted &= ~0x20000000;
6256 next();
6257 cr->v1 = cr->v2 = expr_const64();
6258 if (gnu_ext && tok == TOK_DOTS) {
6259 next();
6260 cr->v2 = expr_const64();
6261 if (cr->v2 < cr->v1)
6262 tcc_warning("empty case range");
6264 cr->sym = ind;
6265 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6266 skip(':');
6267 is_expr = 0;
6268 goto block_after_label;
6269 } else
6270 if (tok == TOK_DEFAULT) {
6271 next();
6272 skip(':');
6273 if (!cur_switch)
6274 expect("switch");
6275 if (cur_switch->def_sym)
6276 tcc_error("too many 'default'");
6277 cur_switch->def_sym = ind;
6278 is_expr = 0;
6279 goto block_after_label;
6280 } else
6281 if (tok == TOK_GOTO) {
6282 next();
6283 if (tok == '*' && gnu_ext) {
6284 /* computed goto */
6285 next();
6286 gexpr();
6287 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6288 expect("pointer");
6289 ggoto();
6290 } else if (tok >= TOK_UIDENT) {
6291 s = label_find(tok);
6292 /* put forward definition if needed */
6293 if (!s) {
6294 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6295 } else {
6296 if (s->r == LABEL_DECLARED)
6297 s->r = LABEL_FORWARD;
6299 vla_sp_restore_root();
6300 if (s->r & LABEL_FORWARD)
6301 s->jnext = gjmp(s->jnext);
6302 else
6303 gjmp_addr(s->jnext);
6304 next();
6305 } else {
6306 expect("label identifier");
6308 skip(';');
6309 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6310 asm_instr();
6311 } else {
6312 b = is_label();
6313 if (b) {
6314 /* label case */
6315 next();
6316 s = label_find(b);
6317 if (s) {
6318 if (s->r == LABEL_DEFINED)
6319 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6320 gsym(s->jnext);
6321 s->r = LABEL_DEFINED;
6322 } else {
6323 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6325 s->jnext = ind;
6326 vla_sp_restore();
6327 /* we accept this, but it is a mistake */
6328 block_after_label:
6329 nocode_wanted &= ~0x20000000;
6330 if (tok == '}') {
6331 tcc_warning("deprecated use of label at end of compound statement");
6332 } else {
6333 if (is_expr)
6334 vpop();
6335 block(bsym, csym, is_expr);
6337 } else {
6338 /* expression case */
6339 if (tok != ';') {
6340 if (is_expr) {
6341 vpop();
6342 gexpr();
6343 } else {
6344 gexpr();
6345 vpop();
6348 skip(';');
6353 /* This skips over a stream of tokens containing balanced {} and ()
6354 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6355 with a '{'). If STR then allocates and stores the skipped tokens
6356 in *STR. This doesn't check if () and {} are nested correctly,
6357 i.e. "({)}" is accepted. */
6358 static void skip_or_save_block(TokenString **str)
6360 int braces = tok == '{';
6361 int level = 0;
6362 if (str)
6363 *str = tok_str_alloc();
6365 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6366 int t;
6367 if (tok == TOK_EOF) {
6368 if (str || level > 0)
6369 tcc_error("unexpected end of file");
6370 else
6371 break;
6373 if (str)
6374 tok_str_add_tok(*str);
6375 t = tok;
6376 next();
6377 if (t == '{' || t == '(') {
6378 level++;
6379 } else if (t == '}' || t == ')') {
6380 level--;
6381 if (level == 0 && braces && t == '}')
6382 break;
6385 if (str) {
6386 tok_str_add(*str, -1);
6387 tok_str_add(*str, 0);
6391 #define EXPR_CONST 1
6392 #define EXPR_ANY 2
6394 static void parse_init_elem(int expr_type)
6396 int saved_global_expr;
6397 switch(expr_type) {
6398 case EXPR_CONST:
6399 /* compound literals must be allocated globally in this case */
6400 saved_global_expr = global_expr;
6401 global_expr = 1;
6402 expr_const1();
6403 global_expr = saved_global_expr;
6404 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6405 (compound literals). */
6406 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6407 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6408 || vtop->sym->v < SYM_FIRST_ANOM))
6409 #ifdef TCC_TARGET_PE
6410 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6411 #endif
6413 tcc_error("initializer element is not constant");
6414 break;
6415 case EXPR_ANY:
6416 expr_eq();
6417 break;
6421 /* put zeros for variable based init */
6422 static void init_putz(Section *sec, unsigned long c, int size)
6424 if (sec) {
6425 /* nothing to do because globals are already set to zero */
6426 } else {
6427 vpush_global_sym(&func_old_type, TOK_memset);
6428 vseti(VT_LOCAL, c);
6429 #ifdef TCC_TARGET_ARM
6430 vpushs(size);
6431 vpushi(0);
6432 #else
6433 vpushi(0);
6434 vpushs(size);
6435 #endif
6436 gfunc_call(3);
6440 /* t is the array or struct type. c is the array or struct
6441 address. cur_field is the pointer to the current
6442 field, for arrays the 'c' member contains the current start
6443 index. 'size_only' is true if only size info is needed (only used
6444 in arrays). al contains the already initialized length of the
6445 current container (starting at c). This returns the new length of that. */
6446 static int decl_designator(CType *type, Section *sec, unsigned long c,
6447 Sym **cur_field, int size_only, int al)
6449 Sym *s, *f;
6450 int index, index_last, align, l, nb_elems, elem_size;
6451 unsigned long corig = c;
6453 elem_size = 0;
6454 nb_elems = 1;
6455 if (gnu_ext && (l = is_label()) != 0)
6456 goto struct_field;
6457 /* NOTE: we only support ranges for last designator */
6458 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6459 if (tok == '[') {
6460 if (!(type->t & VT_ARRAY))
6461 expect("array type");
6462 next();
6463 index = index_last = expr_const();
6464 if (tok == TOK_DOTS && gnu_ext) {
6465 next();
6466 index_last = expr_const();
6468 skip(']');
6469 s = type->ref;
6470 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6471 index_last < index)
6472 tcc_error("invalid index");
6473 if (cur_field)
6474 (*cur_field)->c = index_last;
6475 type = pointed_type(type);
6476 elem_size = type_size(type, &align);
6477 c += index * elem_size;
6478 nb_elems = index_last - index + 1;
6479 } else {
6480 next();
6481 l = tok;
6482 struct_field:
6483 next();
6484 if ((type->t & VT_BTYPE) != VT_STRUCT)
6485 expect("struct/union type");
6486 f = find_field(type, l);
6487 if (!f)
6488 expect("field");
6489 if (cur_field)
6490 *cur_field = f;
6491 type = &f->type;
6492 c += f->c;
6494 cur_field = NULL;
6496 if (!cur_field) {
6497 if (tok == '=') {
6498 next();
6499 } else if (!gnu_ext) {
6500 expect("=");
6502 } else {
6503 if (type->t & VT_ARRAY) {
6504 index = (*cur_field)->c;
6505 if (type->ref->c >= 0 && index >= type->ref->c)
6506 tcc_error("index too large");
6507 type = pointed_type(type);
6508 c += index * type_size(type, &align);
6509 } else {
6510 f = *cur_field;
6511 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6512 *cur_field = f = f->next;
6513 if (!f)
6514 tcc_error("too many field init");
6515 type = &f->type;
6516 c += f->c;
6519 /* must put zero in holes (note that doing it that way
6520 ensures that it even works with designators) */
6521 if (!size_only && c - corig > al)
6522 init_putz(sec, corig + al, c - corig - al);
6523 decl_initializer(type, sec, c, 0, size_only);
6525 /* XXX: make it more general */
6526 if (!size_only && nb_elems > 1) {
6527 unsigned long c_end;
6528 uint8_t *src, *dst;
6529 int i;
6531 if (!sec) {
6532 vset(type, VT_LOCAL|VT_LVAL, c);
6533 for (i = 1; i < nb_elems; i++) {
6534 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6535 vswap();
6536 vstore();
6538 vpop();
6539 } else if (!NODATA_WANTED) {
6540 c_end = c + nb_elems * elem_size;
6541 if (c_end > sec->data_allocated)
6542 section_realloc(sec, c_end);
6543 src = sec->data + c;
6544 dst = src;
6545 for(i = 1; i < nb_elems; i++) {
6546 dst += elem_size;
6547 memcpy(dst, src, elem_size);
6551 c += nb_elems * type_size(type, &align);
6552 if (c - corig > al)
6553 al = c - corig;
6554 return al;
6557 /* store a value or an expression directly in global data or in local array */
6558 static void init_putv(CType *type, Section *sec, unsigned long c)
6560 int bt;
6561 void *ptr;
6562 CType dtype;
6564 dtype = *type;
6565 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6567 if (sec) {
6568 int size, align;
6569 /* XXX: not portable */
6570 /* XXX: generate error if incorrect relocation */
6571 gen_assign_cast(&dtype);
6572 bt = type->t & VT_BTYPE;
6574 if ((vtop->r & VT_SYM)
6575 && bt != VT_PTR
6576 && bt != VT_FUNC
6577 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6578 || (type->t & VT_BITFIELD))
6579 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6581 tcc_error("initializer element is not computable at load time");
6583 if (NODATA_WANTED) {
6584 vtop--;
6585 return;
6588 size = type_size(type, &align);
6589 section_reserve(sec, c + size);
6590 ptr = sec->data + c;
6592 /* XXX: make code faster ? */
6593 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6594 vtop->sym->v >= SYM_FIRST_ANOM &&
6595 /* XXX This rejects compound literals like
6596 '(void *){ptr}'. The problem is that '&sym' is
6597 represented the same way, which would be ruled out
6598 by the SYM_FIRST_ANOM check above, but also '"string"'
6599 in 'char *p = "string"' is represented the same
6600 with the type being VT_PTR and the symbol being an
6601 anonymous one. That is, there's no difference in vtop
6602 between '(void *){x}' and '&(void *){x}'. Ignore
6603 pointer typed entities here. Hopefully no real code
6604 will every use compound literals with scalar type. */
6605 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6606 /* These come from compound literals, memcpy stuff over. */
6607 Section *ssec;
6608 ElfSym *esym;
6609 ElfW_Rel *rel;
6610 esym = elfsym(vtop->sym);
6611 ssec = tcc_state->sections[esym->st_shndx];
6612 memmove (ptr, ssec->data + esym->st_value, size);
6613 if (ssec->reloc) {
6614 /* We need to copy over all memory contents, and that
6615 includes relocations. Use the fact that relocs are
6616 created it order, so look from the end of relocs
6617 until we hit one before the copied region. */
6618 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6619 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6620 while (num_relocs--) {
6621 rel--;
6622 if (rel->r_offset >= esym->st_value + size)
6623 continue;
6624 if (rel->r_offset < esym->st_value)
6625 break;
6626 /* Note: if the same fields are initialized multiple
6627 times (possible with designators) then we possibly
6628 add multiple relocations for the same offset here.
6629 That would lead to wrong code, the last reloc needs
6630 to win. We clean this up later after the whole
6631 initializer is parsed. */
6632 put_elf_reloca(symtab_section, sec,
6633 c + rel->r_offset - esym->st_value,
6634 ELFW(R_TYPE)(rel->r_info),
6635 ELFW(R_SYM)(rel->r_info),
6636 #if PTR_SIZE == 8
6637 rel->r_addend
6638 #else
6640 #endif
6644 } else {
6645 if (type->t & VT_BITFIELD) {
6646 int bit_pos, bit_size, bits, n;
6647 unsigned char *p, v, m;
6648 bit_pos = BIT_POS(vtop->type.t);
6649 bit_size = BIT_SIZE(vtop->type.t);
6650 p = (unsigned char*)ptr + (bit_pos >> 3);
6651 bit_pos &= 7, bits = 0;
6652 while (bit_size) {
6653 n = 8 - bit_pos;
6654 if (n > bit_size)
6655 n = bit_size;
6656 v = vtop->c.i >> bits << bit_pos;
6657 m = ((1 << n) - 1) << bit_pos;
6658 *p = (*p & ~m) | (v & m);
6659 bits += n, bit_size -= n, bit_pos = 0, ++p;
6661 } else
6662 switch(bt) {
6663 /* XXX: when cross-compiling we assume that each type has the
6664 same representation on host and target, which is likely to
6665 be wrong in the case of long double */
6666 case VT_BOOL:
6667 vtop->c.i = vtop->c.i != 0;
6668 case VT_BYTE:
6669 *(char *)ptr |= vtop->c.i;
6670 break;
6671 case VT_SHORT:
6672 *(short *)ptr |= vtop->c.i;
6673 break;
6674 case VT_FLOAT:
6675 *(float*)ptr = vtop->c.f;
6676 break;
6677 case VT_DOUBLE:
6678 *(double *)ptr = vtop->c.d;
6679 break;
6680 case VT_LDOUBLE:
6681 #if defined TCC_IS_NATIVE_387
6682 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6683 memcpy(ptr, &vtop->c.ld, 10);
6684 #ifdef __TINYC__
6685 else if (sizeof (long double) == sizeof (double))
6686 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6687 #endif
6688 else if (vtop->c.ld == 0.0)
6690 else
6691 #endif
6692 if (sizeof(long double) == LDOUBLE_SIZE)
6693 *(long double*)ptr = vtop->c.ld;
6694 else if (sizeof(double) == LDOUBLE_SIZE)
6695 *(double *)ptr = (double)vtop->c.ld;
6696 else
6697 tcc_error("can't cross compile long double constants");
6698 break;
6699 #if PTR_SIZE != 8
6700 case VT_LLONG:
6701 *(long long *)ptr |= vtop->c.i;
6702 break;
6703 #else
6704 case VT_LLONG:
6705 #endif
6706 case VT_PTR:
6708 addr_t val = vtop->c.i;
6709 #if PTR_SIZE == 8
6710 if (vtop->r & VT_SYM)
6711 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6712 else
6713 *(addr_t *)ptr |= val;
6714 #else
6715 if (vtop->r & VT_SYM)
6716 greloc(sec, vtop->sym, c, R_DATA_PTR);
6717 *(addr_t *)ptr |= val;
6718 #endif
6719 break;
6721 default:
6723 int val = vtop->c.i;
6724 #if PTR_SIZE == 8
6725 if (vtop->r & VT_SYM)
6726 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6727 else
6728 *(int *)ptr |= val;
6729 #else
6730 if (vtop->r & VT_SYM)
6731 greloc(sec, vtop->sym, c, R_DATA_PTR);
6732 *(int *)ptr |= val;
6733 #endif
6734 break;
6738 vtop--;
6739 } else {
6740 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6741 vswap();
6742 vstore();
6743 vpop();
6747 /* 't' contains the type and storage info. 'c' is the offset of the
6748 object in section 'sec'. If 'sec' is NULL, it means stack based
6749 allocation. 'first' is true if array '{' must be read (multi
6750 dimension implicit array init handling). 'size_only' is true if
6751 size only evaluation is wanted (only for arrays). */
6752 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6753 int first, int size_only)
6755 int len, n, no_oblock, nb, i;
6756 int size1, align1;
6757 int have_elem;
6758 Sym *s, *f;
6759 Sym indexsym;
6760 CType *t1;
6762 /* If we currently are at an '}' or ',' we have read an initializer
6763 element in one of our callers, and not yet consumed it. */
6764 have_elem = tok == '}' || tok == ',';
6765 if (!have_elem && tok != '{' &&
6766 /* In case of strings we have special handling for arrays, so
6767 don't consume them as initializer value (which would commit them
6768 to some anonymous symbol). */
6769 tok != TOK_LSTR && tok != TOK_STR &&
6770 !size_only) {
6771 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6772 have_elem = 1;
6775 if (have_elem &&
6776 !(type->t & VT_ARRAY) &&
6777 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6778 The source type might have VT_CONSTANT set, which is
6779 of course assignable to non-const elements. */
6780 is_compatible_unqualified_types(type, &vtop->type)) {
6781 init_putv(type, sec, c);
6782 } else if (type->t & VT_ARRAY) {
6783 s = type->ref;
6784 n = s->c;
6785 t1 = pointed_type(type);
6786 size1 = type_size(t1, &align1);
6788 no_oblock = 1;
6789 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6790 tok == '{') {
6791 if (tok != '{')
6792 tcc_error("character array initializer must be a literal,"
6793 " optionally enclosed in braces");
6794 skip('{');
6795 no_oblock = 0;
6798 /* only parse strings here if correct type (otherwise: handle
6799 them as ((w)char *) expressions */
6800 if ((tok == TOK_LSTR &&
6801 #ifdef TCC_TARGET_PE
6802 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6803 #else
6804 (t1->t & VT_BTYPE) == VT_INT
6805 #endif
6806 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6807 len = 0;
6808 while (tok == TOK_STR || tok == TOK_LSTR) {
6809 int cstr_len, ch;
6811 /* compute maximum number of chars wanted */
6812 if (tok == TOK_STR)
6813 cstr_len = tokc.str.size;
6814 else
6815 cstr_len = tokc.str.size / sizeof(nwchar_t);
6816 cstr_len--;
6817 nb = cstr_len;
6818 if (n >= 0 && nb > (n - len))
6819 nb = n - len;
6820 if (!size_only) {
6821 if (cstr_len > nb)
6822 tcc_warning("initializer-string for array is too long");
6823 /* in order to go faster for common case (char
6824 string in global variable, we handle it
6825 specifically */
6826 if (sec && tok == TOK_STR && size1 == 1) {
6827 if (!NODATA_WANTED)
6828 memcpy(sec->data + c + len, tokc.str.data, nb);
6829 } else {
6830 for(i=0;i<nb;i++) {
6831 if (tok == TOK_STR)
6832 ch = ((unsigned char *)tokc.str.data)[i];
6833 else
6834 ch = ((nwchar_t *)tokc.str.data)[i];
6835 vpushi(ch);
6836 init_putv(t1, sec, c + (len + i) * size1);
6840 len += nb;
6841 next();
6843 /* only add trailing zero if enough storage (no
6844 warning in this case since it is standard) */
6845 if (n < 0 || len < n) {
6846 if (!size_only) {
6847 vpushi(0);
6848 init_putv(t1, sec, c + (len * size1));
6850 len++;
6852 len *= size1;
6853 } else {
6854 indexsym.c = 0;
6855 f = &indexsym;
6857 do_init_list:
6858 len = 0;
6859 while (tok != '}' || have_elem) {
6860 len = decl_designator(type, sec, c, &f, size_only, len);
6861 have_elem = 0;
6862 if (type->t & VT_ARRAY) {
6863 ++indexsym.c;
6864 /* special test for multi dimensional arrays (may not
6865 be strictly correct if designators are used at the
6866 same time) */
6867 if (no_oblock && len >= n*size1)
6868 break;
6869 } else {
6870 if (s->type.t == VT_UNION)
6871 f = NULL;
6872 else
6873 f = f->next;
6874 if (no_oblock && f == NULL)
6875 break;
6878 if (tok == '}')
6879 break;
6880 skip(',');
6883 /* put zeros at the end */
6884 if (!size_only && len < n*size1)
6885 init_putz(sec, c + len, n*size1 - len);
6886 if (!no_oblock)
6887 skip('}');
6888 /* patch type size if needed, which happens only for array types */
6889 if (n < 0)
6890 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6891 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6892 size1 = 1;
6893 no_oblock = 1;
6894 if (first || tok == '{') {
6895 skip('{');
6896 no_oblock = 0;
6898 s = type->ref;
6899 f = s->next;
6900 n = s->c;
6901 goto do_init_list;
6902 } else if (tok == '{') {
6903 next();
6904 decl_initializer(type, sec, c, first, size_only);
6905 skip('}');
6906 } else if (size_only) {
6907 /* If we supported only ISO C we wouldn't have to accept calling
6908 this on anything than an array size_only==1 (and even then
6909 only on the outermost level, so no recursion would be needed),
6910 because initializing a flex array member isn't supported.
6911 But GNU C supports it, so we need to recurse even into
6912 subfields of structs and arrays when size_only is set. */
6913 /* just skip expression */
6914 skip_or_save_block(NULL);
6915 } else {
6916 if (!have_elem) {
6917 /* This should happen only when we haven't parsed
6918 the init element above for fear of committing a
6919 string constant to memory too early. */
6920 if (tok != TOK_STR && tok != TOK_LSTR)
6921 expect("string constant");
6922 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6924 init_putv(type, sec, c);
6928 /* parse an initializer for type 't' if 'has_init' is non zero, and
6929 allocate space in local or global data space ('r' is either
6930 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6931 variable 'v' of scope 'scope' is declared before initializers
6932 are parsed. If 'v' is zero, then a reference to the new object
6933 is put in the value stack. If 'has_init' is 2, a special parsing
6934 is done to handle string constants. */
6935 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6936 int has_init, int v, int scope)
6938 int size, align, addr;
6939 TokenString *init_str = NULL;
6941 Section *sec;
6942 Sym *flexible_array;
6943 Sym *sym = NULL;
6944 int saved_nocode_wanted = nocode_wanted;
6945 #ifdef CONFIG_TCC_BCHECK
6946 int bcheck;
6947 #endif
6949 /* Always allocate static or global variables */
6950 if (v && (r & VT_VALMASK) == VT_CONST)
6951 nocode_wanted |= 0x80000000;
6953 #ifdef CONFIG_TCC_BCHECK
6954 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6955 #endif
6957 flexible_array = NULL;
6958 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6959 Sym *field = type->ref->next;
6960 if (field) {
6961 while (field->next)
6962 field = field->next;
6963 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6964 flexible_array = field;
6968 size = type_size(type, &align);
6969 /* If unknown size, we must evaluate it before
6970 evaluating initializers because
6971 initializers can generate global data too
6972 (e.g. string pointers or ISOC99 compound
6973 literals). It also simplifies local
6974 initializers handling */
6975 if (size < 0 || (flexible_array && has_init)) {
6976 if (!has_init)
6977 tcc_error("unknown type size");
6978 /* get all init string */
6979 if (has_init == 2) {
6980 init_str = tok_str_alloc();
6981 /* only get strings */
6982 while (tok == TOK_STR || tok == TOK_LSTR) {
6983 tok_str_add_tok(init_str);
6984 next();
6986 tok_str_add(init_str, -1);
6987 tok_str_add(init_str, 0);
6988 } else {
6989 skip_or_save_block(&init_str);
6991 unget_tok(0);
6993 /* compute size */
6994 begin_macro(init_str, 1);
6995 next();
6996 decl_initializer(type, NULL, 0, 1, 1);
6997 /* prepare second initializer parsing */
6998 macro_ptr = init_str->str;
6999 next();
7001 /* if still unknown size, error */
7002 size = type_size(type, &align);
7003 if (size < 0)
7004 tcc_error("unknown type size");
7006 /* If there's a flex member and it was used in the initializer
7007 adjust size. */
7008 if (flexible_array &&
7009 flexible_array->type.ref->c > 0)
7010 size += flexible_array->type.ref->c
7011 * pointed_size(&flexible_array->type);
7012 /* take into account specified alignment if bigger */
7013 if (ad->a.aligned) {
7014 int speca = 1 << (ad->a.aligned - 1);
7015 if (speca > align)
7016 align = speca;
7017 } else if (ad->a.packed) {
7018 align = 1;
7021 if (!v && NODATA_WANTED)
7022 size = 0, align = 1;
7024 if ((r & VT_VALMASK) == VT_LOCAL) {
7025 sec = NULL;
7026 #ifdef CONFIG_TCC_BCHECK
7027 if (bcheck && (type->t & VT_ARRAY)) {
7028 loc--;
7030 #endif
7031 loc = (loc - size) & -align;
7032 addr = loc;
7033 #ifdef CONFIG_TCC_BCHECK
7034 /* handles bounds */
7035 /* XXX: currently, since we do only one pass, we cannot track
7036 '&' operators, so we add only arrays */
7037 if (bcheck && (type->t & VT_ARRAY)) {
7038 addr_t *bounds_ptr;
7039 /* add padding between regions */
7040 loc--;
7041 /* then add local bound info */
7042 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7043 bounds_ptr[0] = addr;
7044 bounds_ptr[1] = size;
7046 #endif
7047 if (v) {
7048 /* local variable */
7049 #ifdef CONFIG_TCC_ASM
7050 if (ad->asm_label) {
7051 int reg = asm_parse_regvar(ad->asm_label);
7052 if (reg >= 0)
7053 r = (r & ~VT_VALMASK) | reg;
7055 #endif
7056 sym = sym_push(v, type, r, addr);
7057 sym->a = ad->a;
7058 } else {
7059 /* push local reference */
7060 vset(type, r, addr);
7062 } else {
7063 if (v && scope == VT_CONST) {
7064 /* see if the symbol was already defined */
7065 sym = sym_find(v);
7066 if (sym) {
7067 patch_storage(sym, ad, type);
7068 /* we accept several definitions of the same global variable. */
7069 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7070 goto no_alloc;
7074 /* allocate symbol in corresponding section */
7075 sec = ad->section;
7076 if (!sec) {
7077 if (has_init)
7078 sec = data_section;
7079 else if (tcc_state->nocommon)
7080 sec = bss_section;
7083 if (sec) {
7084 addr = section_add(sec, size, align);
7085 #ifdef CONFIG_TCC_BCHECK
7086 /* add padding if bound check */
7087 if (bcheck)
7088 section_add(sec, 1, 1);
7089 #endif
7090 } else {
7091 addr = align; /* SHN_COMMON is special, symbol value is align */
7092 sec = common_section;
7095 if (v) {
7096 if (!sym) {
7097 sym = sym_push(v, type, r | VT_SYM, 0);
7098 patch_storage(sym, ad, NULL);
7100 /* Local statics have a scope until now (for
7101 warnings), remove it here. */
7102 sym->sym_scope = 0;
7103 /* update symbol definition */
7104 put_extern_sym(sym, sec, addr, size);
7105 } else {
7106 /* push global reference */
7107 sym = get_sym_ref(type, sec, addr, size);
7108 vpushsym(type, sym);
7109 vtop->r |= r;
7112 #ifdef CONFIG_TCC_BCHECK
7113 /* handles bounds now because the symbol must be defined
7114 before for the relocation */
7115 if (bcheck) {
7116 addr_t *bounds_ptr;
7118 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7119 /* then add global bound info */
7120 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7121 bounds_ptr[0] = 0; /* relocated */
7122 bounds_ptr[1] = size;
7124 #endif
7127 if (type->t & VT_VLA) {
7128 int a;
7130 if (NODATA_WANTED)
7131 goto no_alloc;
7133 /* save current stack pointer */
7134 if (vlas_in_scope == 0) {
7135 if (vla_sp_root_loc == -1)
7136 vla_sp_root_loc = (loc -= PTR_SIZE);
7137 gen_vla_sp_save(vla_sp_root_loc);
7140 vla_runtime_type_size(type, &a);
7141 gen_vla_alloc(type, a);
7142 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7143 /* on _WIN64, because of the function args scratch area, the
7144 result of alloca differs from RSP and is returned in RAX. */
7145 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7146 #endif
7147 gen_vla_sp_save(addr);
7148 vla_sp_loc = addr;
7149 vlas_in_scope++;
7151 } else if (has_init) {
7152 size_t oldreloc_offset = 0;
7153 if (sec && sec->reloc)
7154 oldreloc_offset = sec->reloc->data_offset;
7155 decl_initializer(type, sec, addr, 1, 0);
7156 if (sec && sec->reloc)
7157 squeeze_multi_relocs(sec, oldreloc_offset);
7158 /* patch flexible array member size back to -1, */
7159 /* for possible subsequent similar declarations */
7160 if (flexible_array)
7161 flexible_array->type.ref->c = -1;
7164 no_alloc:
7165 /* restore parse state if needed */
7166 if (init_str) {
7167 end_macro();
7168 next();
7171 nocode_wanted = saved_nocode_wanted;
7174 /* parse a function defined by symbol 'sym' and generate its code in
7175 'cur_text_section' */
7176 static void gen_function(Sym *sym)
7178 nocode_wanted = 0;
7179 ind = cur_text_section->data_offset;
7180 if (sym->a.aligned) {
7181 size_t newoff = section_add(cur_text_section, 0,
7182 1 << (sym->a.aligned - 1));
7183 gen_fill_nops(newoff - ind);
7185 /* NOTE: we patch the symbol size later */
7186 put_extern_sym(sym, cur_text_section, ind, 0);
7187 funcname = get_tok_str(sym->v, NULL);
7188 func_ind = ind;
7189 /* Initialize VLA state */
7190 vla_sp_loc = -1;
7191 vla_sp_root_loc = -1;
7192 /* put debug symbol */
7193 tcc_debug_funcstart(tcc_state, sym);
7194 /* push a dummy symbol to enable local sym storage */
7195 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7196 local_scope = 1; /* for function parameters */
7197 gfunc_prolog(&sym->type);
7198 local_scope = 0;
7199 rsym = 0;
7200 block(NULL, NULL, 0);
7201 if (!(nocode_wanted & 0x20000000)
7202 && ((func_vt.t & VT_BTYPE) == VT_INT)
7203 && !strcmp (funcname, "main"))
7205 nocode_wanted = 0;
7206 vpushi(0);
7207 gen_assign_cast(&func_vt);
7208 gfunc_return(&func_vt);
7210 nocode_wanted = 0;
7211 gsym(rsym);
7212 gfunc_epilog();
7213 cur_text_section->data_offset = ind;
7214 label_pop(&global_label_stack, NULL, 0);
7215 /* reset local stack */
7216 local_scope = 0;
7217 sym_pop(&local_stack, NULL, 0);
7218 /* end of function */
7219 /* patch symbol size */
7220 elfsym(sym)->st_size = ind - func_ind;
7221 tcc_debug_funcend(tcc_state, ind - func_ind);
7222 /* It's better to crash than to generate wrong code */
7223 cur_text_section = NULL;
7224 funcname = ""; /* for safety */
7225 func_vt.t = VT_VOID; /* for safety */
7226 func_var = 0; /* for safety */
7227 ind = 0; /* for safety */
7228 nocode_wanted = 0x80000000;
7229 check_vstack();
7232 static void gen_inline_functions(TCCState *s)
7234 Sym *sym;
7235 int inline_generated, i, ln;
7236 struct InlineFunc *fn;
7238 ln = file->line_num;
7239 /* iterate while inline function are referenced */
7240 do {
7241 inline_generated = 0;
7242 for (i = 0; i < s->nb_inline_fns; ++i) {
7243 fn = s->inline_fns[i];
7244 sym = fn->sym;
7245 if (sym && sym->c) {
7246 /* the function was used: generate its code and
7247 convert it to a normal function */
7248 fn->sym = NULL;
7249 if (file)
7250 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7251 sym->type.t &= ~VT_INLINE;
7253 begin_macro(fn->func_str, 1);
7254 next();
7255 cur_text_section = text_section;
7256 gen_function(sym);
7257 end_macro();
7259 inline_generated = 1;
7262 } while (inline_generated);
7263 file->line_num = ln;
7266 ST_FUNC void free_inline_functions(TCCState *s)
7268 int i;
7269 /* free tokens of unused inline functions */
7270 for (i = 0; i < s->nb_inline_fns; ++i) {
7271 struct InlineFunc *fn = s->inline_fns[i];
7272 if (fn->sym)
7273 tok_str_free(fn->func_str);
7275 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7278 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7279 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7280 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7282 int v, has_init, r;
7283 CType type, btype;
7284 Sym *sym;
7285 AttributeDef ad;
7287 while (1) {
7288 if (!parse_btype(&btype, &ad)) {
7289 if (is_for_loop_init)
7290 return 0;
7291 /* skip redundant ';' if not in old parameter decl scope */
7292 if (tok == ';' && l != VT_CMP) {
7293 next();
7294 continue;
7296 if (l != VT_CONST)
7297 break;
7298 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7299 /* global asm block */
7300 asm_global_instr();
7301 continue;
7303 if (tok >= TOK_UIDENT) {
7304 /* special test for old K&R protos without explicit int
7305 type. Only accepted when defining global data */
7306 btype.t = VT_INT;
7307 } else {
7308 if (tok != TOK_EOF)
7309 expect("declaration");
7310 break;
7313 if (tok == ';') {
7314 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7315 int v = btype.ref->v;
7316 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7317 tcc_warning("unnamed struct/union that defines no instances");
7318 next();
7319 continue;
7321 if (IS_ENUM(btype.t)) {
7322 next();
7323 continue;
7326 while (1) { /* iterate thru each declaration */
7327 type = btype;
7328 /* If the base type itself was an array type of unspecified
7329 size (like in 'typedef int arr[]; arr x = {1};') then
7330 we will overwrite the unknown size by the real one for
7331 this decl. We need to unshare the ref symbol holding
7332 that size. */
7333 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7334 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7336 type_decl(&type, &ad, &v, TYPE_DIRECT);
7337 #if 0
7339 char buf[500];
7340 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7341 printf("type = '%s'\n", buf);
7343 #endif
7344 if ((type.t & VT_BTYPE) == VT_FUNC) {
7345 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7346 tcc_error("function without file scope cannot be static");
7348 /* if old style function prototype, we accept a
7349 declaration list */
7350 sym = type.ref;
7351 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7352 decl0(VT_CMP, 0, sym);
7355 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7356 ad.asm_label = asm_label_instr();
7357 /* parse one last attribute list, after asm label */
7358 parse_attribute(&ad);
7359 if (tok == '{')
7360 expect(";");
7363 #ifdef TCC_TARGET_PE
7364 if (ad.a.dllimport || ad.a.dllexport) {
7365 if (type.t & (VT_STATIC|VT_TYPEDEF))
7366 tcc_error("cannot have dll linkage with static or typedef");
7367 if (ad.a.dllimport) {
7368 if ((type.t & VT_BTYPE) == VT_FUNC)
7369 ad.a.dllimport = 0;
7370 else
7371 type.t |= VT_EXTERN;
7374 #endif
7375 if (tok == '{') {
7376 if (l != VT_CONST)
7377 tcc_error("cannot use local functions");
7378 if ((type.t & VT_BTYPE) != VT_FUNC)
7379 expect("function definition");
7381 /* reject abstract declarators in function definition
7382 make old style params without decl have int type */
7383 sym = type.ref;
7384 while ((sym = sym->next) != NULL) {
7385 if (!(sym->v & ~SYM_FIELD))
7386 expect("identifier");
7387 if (sym->type.t == VT_VOID)
7388 sym->type = int_type;
7391 /* XXX: cannot do better now: convert extern line to static inline */
7392 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7393 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7395 /* put function symbol */
7396 sym = external_global_sym(v, &type, 0);
7397 type.t &= ~VT_EXTERN;
7398 patch_storage(sym, &ad, &type);
7400 /* static inline functions are just recorded as a kind
7401 of macro. Their code will be emitted at the end of
7402 the compilation unit only if they are used */
7403 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7404 (VT_INLINE | VT_STATIC)) {
7405 struct InlineFunc *fn;
7406 const char *filename;
7408 filename = file ? file->filename : "";
7409 fn = tcc_malloc(sizeof *fn + strlen(filename));
7410 strcpy(fn->filename, filename);
7411 fn->sym = sym;
7412 skip_or_save_block(&fn->func_str);
7413 dynarray_add(&tcc_state->inline_fns,
7414 &tcc_state->nb_inline_fns, fn);
7415 } else {
7416 /* compute text section */
7417 cur_text_section = ad.section;
7418 if (!cur_text_section)
7419 cur_text_section = text_section;
7420 gen_function(sym);
7422 break;
7423 } else {
7424 if (l == VT_CMP) {
7425 /* find parameter in function parameter list */
7426 for (sym = func_sym->next; sym; sym = sym->next)
7427 if ((sym->v & ~SYM_FIELD) == v)
7428 goto found;
7429 tcc_error("declaration for parameter '%s' but no such parameter",
7430 get_tok_str(v, NULL));
7431 found:
7432 if (type.t & VT_STORAGE) /* 'register' is okay */
7433 tcc_error("storage class specified for '%s'",
7434 get_tok_str(v, NULL));
7435 if (sym->type.t != VT_VOID)
7436 tcc_error("redefinition of parameter '%s'",
7437 get_tok_str(v, NULL));
7438 convert_parameter_type(&type);
7439 sym->type = type;
7440 } else if (type.t & VT_TYPEDEF) {
7441 /* save typedefed type */
7442 /* XXX: test storage specifiers ? */
7443 sym = sym_find(v);
7444 if (sym && sym->sym_scope == local_scope) {
7445 if (!is_compatible_types(&sym->type, &type)
7446 || !(sym->type.t & VT_TYPEDEF))
7447 tcc_error("incompatible redefinition of '%s'",
7448 get_tok_str(v, NULL));
7449 sym->type = type;
7450 } else {
7451 sym = sym_push(v, &type, 0, 0);
7453 sym->a = ad.a;
7454 sym->f = ad.f;
7455 } else if ((type.t & VT_BTYPE) == VT_VOID
7456 && !(type.t & VT_EXTERN)) {
7457 tcc_error("declaration of void object");
7458 } else {
7459 r = 0;
7460 if ((type.t & VT_BTYPE) == VT_FUNC) {
7461 /* external function definition */
7462 /* specific case for func_call attribute */
7463 type.ref->f = ad.f;
7464 } else if (!(type.t & VT_ARRAY)) {
7465 /* not lvalue if array */
7466 r |= lvalue_type(type.t);
7468 has_init = (tok == '=');
7469 if (has_init && (type.t & VT_VLA))
7470 tcc_error("variable length array cannot be initialized");
7471 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7472 ((type.t & VT_BTYPE) == VT_FUNC) ||
7473 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7474 !has_init && l == VT_CONST && type.ref->c < 0)) {
7475 /* external variable or function */
7476 /* NOTE: as GCC, uninitialized global static
7477 arrays of null size are considered as
7478 extern */
7479 type.t |= VT_EXTERN;
7480 sym = external_sym(v, &type, r, &ad);
7481 if (ad.alias_target) {
7482 ElfSym *esym;
7483 Sym *alias_target;
7484 alias_target = sym_find(ad.alias_target);
7485 esym = elfsym(alias_target);
7486 if (!esym)
7487 tcc_error("unsupported forward __alias__ attribute");
7488 /* Local statics have a scope until now (for
7489 warnings), remove it here. */
7490 sym->sym_scope = 0;
7491 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7493 } else {
7494 if (type.t & VT_STATIC)
7495 r |= VT_CONST;
7496 else
7497 r |= l;
7498 if (has_init)
7499 next();
7500 else if (l == VT_CONST)
7501 /* uninitialized global variables may be overridden */
7502 type.t |= VT_EXTERN;
7503 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7506 if (tok != ',') {
7507 if (is_for_loop_init)
7508 return 1;
7509 skip(';');
7510 break;
7512 next();
7514 ad.a.aligned = 0;
7517 return 0;
7520 static void decl(int l)
7522 decl0(l, 0, NULL);
7525 /* ------------------------------------------------------------------------- */