tccasm: Don't abuse dllexport/dllimport
[tinycc.git] / tccgen.c
bloba86103c9952e60a1d50597b29808af27dbda6901
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
58 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
59 ST_DATA int func_vc;
60 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
61 ST_DATA const char *funcname;
62 ST_DATA int g_debug;
64 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
66 ST_DATA struct switch_t {
67 struct case_t {
68 int64_t v1, v2;
69 int sym;
70 } **p; int n; /* list of case ranges */
71 int def_sym; /* default symbol */
72 } *cur_switch; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static void gen_cast_s(int t);
78 static inline CType *pointed_type(CType *type);
79 static int is_compatible_types(CType *type1, CType *type2);
80 static int parse_btype(CType *type, AttributeDef *ad);
81 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
82 static void parse_expr_type(CType *type);
83 static void init_putv(CType *type, Section *sec, unsigned long c);
84 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
85 static void block(int *bsym, int *csym, int is_expr);
86 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
87 static void decl(int l);
88 static int decl0(int l, int is_for_loop_init, Sym *);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType *type, int *a);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType *type1, CType *type2);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty, unsigned long long v);
96 static void vpush(CType *type);
97 static int gvtst(int inv, int t);
98 static void gen_inline_functions(TCCState *s);
99 static void skip_or_save_block(TokenString **str);
100 static void gv_dup(void);
102 ST_INLN int is_float(int t)
104 int bt;
105 bt = t & VT_BTYPE;
106 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC int ieee_finite(double d)
114 int p[4];
115 memcpy(p, &d, sizeof(double));
116 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
123 #endif
125 ST_FUNC void test_lvalue(void)
127 if (!(vtop->r & VT_LVAL))
128 expect("lvalue");
131 ST_FUNC void check_vstack(void)
133 if (pvtop != vtop)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
140 #if 0
141 void pv (const char *lbl, int a, int b)
143 int i;
144 for (i = a; i < a + b; ++i) {
145 SValue *p = &vtop[-i];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
150 #endif
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC void tcc_debug_start(TCCState *s1)
156 if (s1->do_debug) {
157 char buf[512];
159 /* file info: full path + filename */
160 section_sym = put_elf_sym(symtab_section, 0, 0,
161 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
162 text_section->sh_num, NULL);
163 getcwd(buf, sizeof(buf));
164 #ifdef _WIN32
165 normalize_slashes(buf);
166 #endif
167 pstrcat(buf, sizeof(buf), "/");
168 put_stabs_r(buf, N_SO, 0, 0,
169 text_section->data_offset, text_section, section_sym);
170 put_stabs_r(file->filename, N_SO, 0, 0,
171 text_section->data_offset, text_section, section_sym);
172 last_ind = 0;
173 last_line_num = 0;
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section, 0, 0,
179 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
180 SHN_ABS, file->filename);
183 /* put end of translation unit info */
184 ST_FUNC void tcc_debug_end(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 put_stabs_r(NULL, N_SO, 0, 0,
189 text_section->data_offset, text_section, section_sym);
193 /* generate line number info */
194 ST_FUNC void tcc_debug_line(TCCState *s1)
196 if (!s1->do_debug)
197 return;
198 if ((last_line_num != file->line_num || last_ind != ind)) {
199 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
200 last_ind = ind;
201 last_line_num = file->line_num;
205 /* put function symbol */
206 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
208 char buf[512];
210 if (!s1->do_debug)
211 return;
213 /* stabs info */
214 /* XXX: we put here a dummy type */
215 snprintf(buf, sizeof(buf), "%s:%c1",
216 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
217 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
218 cur_text_section, sym->c);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE, 0, file->line_num, 0);
222 last_ind = 0;
223 last_line_num = 0;
226 /* put function size */
227 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
229 if (!s1->do_debug)
230 return;
231 put_stabn(N_FUN, 0, 0, size);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC int tccgen_compile(TCCState *s1)
237 cur_text_section = NULL;
238 funcname = "";
239 anon_sym = SYM_FIRST_ANOM;
240 section_sym = 0;
241 const_wanted = 0;
242 nocode_wanted = 0x80000000;
244 /* define some often used types */
245 int_type.t = VT_INT;
246 char_pointer_type.t = VT_BYTE;
247 mk_pointer(&char_pointer_type);
248 #if PTR_SIZE == 4
249 size_type.t = VT_INT | VT_UNSIGNED;
250 ptrdiff_type.t = VT_INT;
251 #elif LONG_SIZE == 4
252 size_type.t = VT_LLONG | VT_UNSIGNED;
253 ptrdiff_type.t = VT_LLONG;
254 #else
255 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
256 ptrdiff_type.t = VT_LONG | VT_LLONG;
257 #endif
258 func_old_type.t = VT_FUNC;
259 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
260 func_old_type.ref->f.func_call = FUNC_CDECL;
261 func_old_type.ref->f.func_type = FUNC_OLD;
263 tcc_debug_start(s1);
265 #ifdef TCC_TARGET_ARM
266 arm_init(s1);
267 #endif
269 #ifdef INC_DEBUG
270 printf("%s: **** new file\n", file->filename);
271 #endif
273 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
274 next();
275 decl(VT_CONST);
276 gen_inline_functions(s1);
277 check_vstack();
279 #ifdef CONFIG_TCC_ASM
280 asm_free_labels(s1);
281 #endif
283 /* end of translation unit info */
284 tcc_debug_end(s1);
285 return 0;
288 /* ------------------------------------------------------------------------- */
289 /* apply storage attributes to Elf symbol */
291 static void update_storage(Sym *sym)
293 ElfW(Sym) *esym;
294 if (0 == sym->c)
295 return;
296 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
297 if (sym->a.visibility)
298 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
299 | sym->a.visibility;
300 if (sym->a.weak)
301 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
302 #ifdef TCC_TARGET_PE
303 if (sym->a.dllimport)
304 esym->st_other |= ST_PE_IMPORT;
305 if (sym->a.dllexport)
306 esym->st_other |= ST_PE_EXPORT;
307 #endif
308 #if 0
309 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
310 get_tok_str(sym->v, NULL),
311 sym->a.visibility,
312 sym->a.weak,
313 sym->a.dllexport,
314 sym->a.dllimport
316 #endif
319 /* ------------------------------------------------------------------------- */
320 /* update sym->c so that it points to an external symbol in section
321 'section' with value 'value' */
323 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
324 addr_t value, unsigned long size,
325 int can_add_underscore)
327 int sym_type, sym_bind, sh_num, info, other, t;
328 ElfW(Sym) *esym;
329 const char *name;
330 char buf1[256];
331 #ifdef CONFIG_TCC_BCHECK
332 char buf[32];
333 #endif
335 if (section == NULL)
336 sh_num = SHN_UNDEF;
337 else if (section == SECTION_ABS)
338 sh_num = SHN_ABS;
339 else
340 sh_num = section->sh_num;
342 if (!sym->c) {
343 name = get_tok_str(sym->v, NULL);
344 #ifdef CONFIG_TCC_BCHECK
345 if (tcc_state->do_bounds_check) {
346 /* XXX: avoid doing that for statics ? */
347 /* if bound checking is activated, we change some function
348 names by adding the "__bound" prefix */
349 switch(sym->v) {
350 #ifdef TCC_TARGET_PE
351 /* XXX: we rely only on malloc hooks */
352 case TOK_malloc:
353 case TOK_free:
354 case TOK_realloc:
355 case TOK_memalign:
356 case TOK_calloc:
357 #endif
358 case TOK_memcpy:
359 case TOK_memmove:
360 case TOK_memset:
361 case TOK_strlen:
362 case TOK_strcpy:
363 case TOK_alloca:
364 strcpy(buf, "__bound_");
365 strcat(buf, name);
366 name = buf;
367 break;
370 #endif
371 t = sym->type.t;
372 if ((t & VT_BTYPE) == VT_FUNC) {
373 sym_type = STT_FUNC;
374 } else if ((t & VT_BTYPE) == VT_VOID) {
375 sym_type = STT_NOTYPE;
376 } else {
377 sym_type = STT_OBJECT;
379 if (t & VT_STATIC)
380 sym_bind = STB_LOCAL;
381 else
382 sym_bind = STB_GLOBAL;
383 other = 0;
384 #ifdef TCC_TARGET_PE
385 if (sym_type == STT_FUNC && sym->type.ref) {
386 Sym *ref = sym->type.ref;
387 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
388 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
389 name = buf1;
390 other |= ST_PE_STDCALL;
391 can_add_underscore = 0;
394 #endif
395 if (tcc_state->leading_underscore && can_add_underscore) {
396 buf1[0] = '_';
397 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
398 name = buf1;
400 if (sym->asm_label)
401 name = get_tok_str(sym->asm_label, NULL);
402 info = ELFW(ST_INFO)(sym_bind, sym_type);
403 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
404 } else {
405 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
406 esym->st_value = value;
407 esym->st_size = size;
408 esym->st_shndx = sh_num;
410 update_storage(sym);
413 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
414 addr_t value, unsigned long size)
416 put_extern_sym2(sym, section, value, size, 1);
419 /* add a new relocation entry to symbol 'sym' in section 's' */
420 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
421 addr_t addend)
423 int c = 0;
425 if (nocode_wanted && s == cur_text_section)
426 return;
428 if (sym) {
429 if (0 == sym->c)
430 put_extern_sym(sym, NULL, 0, 0);
431 c = sym->c;
434 /* now we can add ELF relocation info */
435 put_elf_reloca(symtab_section, s, offset, type, c, addend);
438 #if PTR_SIZE == 4
439 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
441 greloca(s, sym, offset, type, 0);
443 #endif
445 /* ------------------------------------------------------------------------- */
446 /* symbol allocator */
447 static Sym *__sym_malloc(void)
449 Sym *sym_pool, *sym, *last_sym;
450 int i;
452 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
453 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
455 last_sym = sym_free_first;
456 sym = sym_pool;
457 for(i = 0; i < SYM_POOL_NB; i++) {
458 sym->next = last_sym;
459 last_sym = sym;
460 sym++;
462 sym_free_first = last_sym;
463 return last_sym;
466 static inline Sym *sym_malloc(void)
468 Sym *sym;
469 #ifndef SYM_DEBUG
470 sym = sym_free_first;
471 if (!sym)
472 sym = __sym_malloc();
473 sym_free_first = sym->next;
474 return sym;
475 #else
476 sym = tcc_malloc(sizeof(Sym));
477 return sym;
478 #endif
481 ST_INLN void sym_free(Sym *sym)
483 #ifndef SYM_DEBUG
484 sym->next = sym_free_first;
485 sym_free_first = sym;
486 #else
487 tcc_free(sym);
488 #endif
491 /* push, without hashing */
492 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
494 Sym *s;
496 s = sym_malloc();
497 memset(s, 0, sizeof *s);
498 s->v = v;
499 s->type.t = t;
500 s->c = c;
501 /* add in stack */
502 s->prev = *ps;
503 *ps = s;
504 return s;
507 /* find a symbol and return its associated structure. 's' is the top
508 of the symbol stack */
509 ST_FUNC Sym *sym_find2(Sym *s, int v)
511 while (s) {
512 if (s->v == v)
513 return s;
514 else if (s->v == -1)
515 return NULL;
516 s = s->prev;
518 return NULL;
521 /* structure lookup */
522 ST_INLN Sym *struct_find(int v)
524 v -= TOK_IDENT;
525 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
526 return NULL;
527 return table_ident[v]->sym_struct;
530 /* find an identifier */
531 ST_INLN Sym *sym_find(int v)
533 v -= TOK_IDENT;
534 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
535 return NULL;
536 return table_ident[v]->sym_identifier;
539 /* push a given symbol on the symbol stack */
540 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
542 Sym *s, **ps;
543 TokenSym *ts;
545 if (local_stack)
546 ps = &local_stack;
547 else
548 ps = &global_stack;
549 s = sym_push2(ps, v, type->t, c);
550 s->type.ref = type->ref;
551 s->r = r;
552 /* don't record fields or anonymous symbols */
553 /* XXX: simplify */
554 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
555 /* record symbol in token array */
556 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
557 if (v & SYM_STRUCT)
558 ps = &ts->sym_struct;
559 else
560 ps = &ts->sym_identifier;
561 s->prev_tok = *ps;
562 *ps = s;
563 s->sym_scope = local_scope;
564 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
565 tcc_error("redeclaration of '%s'",
566 get_tok_str(v & ~SYM_STRUCT, NULL));
568 return s;
571 /* push a global identifier */
572 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
574 Sym *s, **ps;
575 s = sym_push2(&global_stack, v, t, c);
576 /* don't record anonymous symbol */
577 if (v < SYM_FIRST_ANOM) {
578 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
579 /* modify the top most local identifier, so that
580 sym_identifier will point to 's' when popped */
581 while (*ps != NULL)
582 ps = &(*ps)->prev_tok;
583 s->prev_tok = NULL;
584 *ps = s;
586 return s;
589 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
590 pop them yet from the list, but do remove them from the token array. */
591 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
593 Sym *s, *ss, **ps;
594 TokenSym *ts;
595 int v;
597 s = *ptop;
598 while(s != b) {
599 ss = s->prev;
600 v = s->v;
601 /* remove symbol in token array */
602 /* XXX: simplify */
603 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
604 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
605 if (v & SYM_STRUCT)
606 ps = &ts->sym_struct;
607 else
608 ps = &ts->sym_identifier;
609 *ps = s->prev_tok;
611 if (!keep)
612 sym_free(s);
613 s = ss;
615 if (!keep)
616 *ptop = b;
619 /* ------------------------------------------------------------------------- */
621 static void vsetc(CType *type, int r, CValue *vc)
623 int v;
625 if (vtop >= vstack + (VSTACK_SIZE - 1))
626 tcc_error("memory full (vstack)");
627 /* cannot let cpu flags if other instruction are generated. Also
628 avoid leaving VT_JMP anywhere except on the top of the stack
629 because it would complicate the code generator.
631 Don't do this when nocode_wanted. vtop might come from
632 !nocode_wanted regions (see 88_codeopt.c) and transforming
633 it to a register without actually generating code is wrong
634 as their value might still be used for real. All values
635 we push under nocode_wanted will eventually be popped
636 again, so that the VT_CMP/VT_JMP value will be in vtop
637 when code is unsuppressed again.
639 Same logic below in vswap(); */
640 if (vtop >= vstack && !nocode_wanted) {
641 v = vtop->r & VT_VALMASK;
642 if (v == VT_CMP || (v & ~1) == VT_JMP)
643 gv(RC_INT);
646 vtop++;
647 vtop->type = *type;
648 vtop->r = r;
649 vtop->r2 = VT_CONST;
650 vtop->c = *vc;
651 vtop->sym = NULL;
654 ST_FUNC void vswap(void)
656 SValue tmp;
657 /* cannot vswap cpu flags. See comment at vsetc() above */
658 if (vtop >= vstack && !nocode_wanted) {
659 int v = vtop->r & VT_VALMASK;
660 if (v == VT_CMP || (v & ~1) == VT_JMP)
661 gv(RC_INT);
663 tmp = vtop[0];
664 vtop[0] = vtop[-1];
665 vtop[-1] = tmp;
668 /* pop stack value */
669 ST_FUNC void vpop(void)
671 int v;
672 v = vtop->r & VT_VALMASK;
673 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
674 /* for x86, we need to pop the FP stack */
675 if (v == TREG_ST0) {
676 o(0xd8dd); /* fstp %st(0) */
677 } else
678 #endif
679 if (v == VT_JMP || v == VT_JMPI) {
680 /* need to put correct jump if && or || without test */
681 gsym(vtop->c.i);
683 vtop--;
686 /* push constant of type "type" with useless value */
687 ST_FUNC void vpush(CType *type)
689 vset(type, VT_CONST, 0);
692 /* push integer constant */
693 ST_FUNC void vpushi(int v)
695 CValue cval;
696 cval.i = v;
697 vsetc(&int_type, VT_CONST, &cval);
700 /* push a pointer sized constant */
701 static void vpushs(addr_t v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&size_type, VT_CONST, &cval);
708 /* push arbitrary 64bit constant */
709 ST_FUNC void vpush64(int ty, unsigned long long v)
711 CValue cval;
712 CType ctype;
713 ctype.t = ty;
714 ctype.ref = NULL;
715 cval.i = v;
716 vsetc(&ctype, VT_CONST, &cval);
719 /* push long long constant */
720 static inline void vpushll(long long v)
722 vpush64(VT_LLONG, v);
725 ST_FUNC void vset(CType *type, int r, int v)
727 CValue cval;
729 cval.i = v;
730 vsetc(type, r, &cval);
733 static void vseti(int r, int v)
735 CType type;
736 type.t = VT_INT;
737 type.ref = NULL;
738 vset(&type, r, v);
741 ST_FUNC void vpushv(SValue *v)
743 if (vtop >= vstack + (VSTACK_SIZE - 1))
744 tcc_error("memory full (vstack)");
745 vtop++;
746 *vtop = *v;
749 static void vdup(void)
751 vpushv(vtop);
754 /* rotate n first stack elements to the bottom
755 I1 ... In -> I2 ... In I1 [top is right]
757 ST_FUNC void vrotb(int n)
759 int i;
760 SValue tmp;
762 tmp = vtop[-n + 1];
763 for(i=-n+1;i!=0;i++)
764 vtop[i] = vtop[i+1];
765 vtop[0] = tmp;
768 /* rotate the n elements before entry e towards the top
769 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
771 ST_FUNC void vrote(SValue *e, int n)
773 int i;
774 SValue tmp;
776 tmp = *e;
777 for(i = 0;i < n - 1; i++)
778 e[-i] = e[-i - 1];
779 e[-n + 1] = tmp;
782 /* rotate n first stack elements to the top
783 I1 ... In -> In I1 ... I(n-1) [top is right]
785 ST_FUNC void vrott(int n)
787 vrote(vtop, n);
790 /* push a symbol value of TYPE */
791 static inline void vpushsym(CType *type, Sym *sym)
793 CValue cval;
794 cval.i = 0;
795 vsetc(type, VT_CONST | VT_SYM, &cval);
796 vtop->sym = sym;
799 /* Return a static symbol pointing to a section */
800 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
802 int v;
803 Sym *sym;
805 v = anon_sym++;
806 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
807 sym->type.ref = type->ref;
808 sym->r = VT_CONST | VT_SYM;
809 put_extern_sym(sym, sec, offset, size);
810 return sym;
813 /* push a reference to a section offset by adding a dummy symbol */
814 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
816 vpushsym(type, get_sym_ref(type, sec, offset, size));
819 /* define a new external reference to a symbol 'v' of type 'u' */
820 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
822 Sym *s;
824 s = sym_find(v);
825 if (!s) {
826 /* push forward reference */
827 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
828 s->type.ref = type->ref;
829 s->r = r | VT_CONST | VT_SYM;
831 return s;
834 /* Merge some storage attributes. */
835 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
837 if (type && !is_compatible_types(&sym->type, type))
838 tcc_error("incompatible types for redefinition of '%s'",
839 get_tok_str(sym->v, NULL));
840 #ifdef TCC_TARGET_PE
841 if (sym->a.dllimport != ad->a.dllimport)
842 tcc_error("incompatible dll linkage for redefinition of '%s'",
843 get_tok_str(sym->v, NULL));
844 #endif
845 sym->a.dllexport |= ad->a.dllexport;
846 sym->a.weak |= ad->a.weak;
847 if (ad->a.visibility) {
848 int vis = sym->a.visibility;
849 int vis2 = ad->a.visibility;
850 if (vis == STV_DEFAULT)
851 vis = vis2;
852 else if (vis2 != STV_DEFAULT)
853 vis = (vis < vis2) ? vis : vis2;
854 sym->a.visibility = vis;
856 if (ad->a.aligned)
857 sym->a.aligned = ad->a.aligned;
858 if (ad->asm_label)
859 sym->asm_label = ad->asm_label;
860 update_storage(sym);
863 /* define a new external reference to a symbol 'v' */
864 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
866 Sym *s;
867 s = sym_find(v);
868 if (!s) {
869 /* push forward reference */
870 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
871 s->type.t |= VT_EXTERN;
872 s->a = ad->a;
873 s->sym_scope = 0;
874 } else {
875 if (s->type.ref == func_old_type.ref) {
876 s->type.ref = type->ref;
877 s->r = r | VT_CONST | VT_SYM;
878 s->type.t |= VT_EXTERN;
880 patch_storage(s, ad, type);
882 return s;
885 /* push a reference to global symbol v */
886 ST_FUNC void vpush_global_sym(CType *type, int v)
888 vpushsym(type, external_global_sym(v, type, 0));
891 /* save registers up to (vtop - n) stack entry */
892 ST_FUNC void save_regs(int n)
894 SValue *p, *p1;
895 for(p = vstack, p1 = vtop - n; p <= p1; p++)
896 save_reg(p->r);
899 /* save r to the memory stack, and mark it as being free */
900 ST_FUNC void save_reg(int r)
902 save_reg_upstack(r, 0);
905 /* save r to the memory stack, and mark it as being free,
906 if seen up to (vtop - n) stack entry */
907 ST_FUNC void save_reg_upstack(int r, int n)
909 int l, saved, size, align;
910 SValue *p, *p1, sv;
911 CType *type;
913 if ((r &= VT_VALMASK) >= VT_CONST)
914 return;
915 if (nocode_wanted)
916 return;
918 /* modify all stack values */
919 saved = 0;
920 l = 0;
921 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
922 if ((p->r & VT_VALMASK) == r ||
923 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
924 /* must save value on stack if not already done */
925 if (!saved) {
926 /* NOTE: must reload 'r' because r might be equal to r2 */
927 r = p->r & VT_VALMASK;
928 /* store register in the stack */
929 type = &p->type;
930 if ((p->r & VT_LVAL) ||
931 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
932 #if PTR_SIZE == 8
933 type = &char_pointer_type;
934 #else
935 type = &int_type;
936 #endif
937 size = type_size(type, &align);
938 loc = (loc - size) & -align;
939 sv.type.t = type->t;
940 sv.r = VT_LOCAL | VT_LVAL;
941 sv.c.i = loc;
942 store(r, &sv);
943 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
944 /* x86 specific: need to pop fp register ST0 if saved */
945 if (r == TREG_ST0) {
946 o(0xd8dd); /* fstp %st(0) */
948 #endif
949 #if PTR_SIZE == 4
950 /* special long long case */
951 if ((type->t & VT_BTYPE) == VT_LLONG) {
952 sv.c.i += 4;
953 store(p->r2, &sv);
955 #endif
956 l = loc;
957 saved = 1;
959 /* mark that stack entry as being saved on the stack */
960 if (p->r & VT_LVAL) {
961 /* also clear the bounded flag because the
962 relocation address of the function was stored in
963 p->c.i */
964 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
965 } else {
966 p->r = lvalue_type(p->type.t) | VT_LOCAL;
968 p->r2 = VT_CONST;
969 p->c.i = l;
974 #ifdef TCC_TARGET_ARM
975 /* find a register of class 'rc2' with at most one reference on stack.
976 * If none, call get_reg(rc) */
977 ST_FUNC int get_reg_ex(int rc, int rc2)
979 int r;
980 SValue *p;
982 for(r=0;r<NB_REGS;r++) {
983 if (reg_classes[r] & rc2) {
984 int n;
985 n=0;
986 for(p = vstack; p <= vtop; p++) {
987 if ((p->r & VT_VALMASK) == r ||
988 (p->r2 & VT_VALMASK) == r)
989 n++;
991 if (n <= 1)
992 return r;
995 return get_reg(rc);
997 #endif
999 /* find a free register of class 'rc'. If none, save one register */
1000 ST_FUNC int get_reg(int rc)
1002 int r;
1003 SValue *p;
1005 /* find a free register */
1006 for(r=0;r<NB_REGS;r++) {
1007 if (reg_classes[r] & rc) {
1008 if (nocode_wanted)
1009 return r;
1010 for(p=vstack;p<=vtop;p++) {
1011 if ((p->r & VT_VALMASK) == r ||
1012 (p->r2 & VT_VALMASK) == r)
1013 goto notfound;
1015 return r;
1017 notfound: ;
1020 /* no register left : free the first one on the stack (VERY
1021 IMPORTANT to start from the bottom to ensure that we don't
1022 spill registers used in gen_opi()) */
1023 for(p=vstack;p<=vtop;p++) {
1024 /* look at second register (if long long) */
1025 r = p->r2 & VT_VALMASK;
1026 if (r < VT_CONST && (reg_classes[r] & rc))
1027 goto save_found;
1028 r = p->r & VT_VALMASK;
1029 if (r < VT_CONST && (reg_classes[r] & rc)) {
1030 save_found:
1031 save_reg(r);
1032 return r;
1035 /* Should never comes here */
1036 return -1;
1039 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1040 if needed */
1041 static void move_reg(int r, int s, int t)
1043 SValue sv;
1045 if (r != s) {
1046 save_reg(r);
1047 sv.type.t = t;
1048 sv.type.ref = NULL;
1049 sv.r = s;
1050 sv.c.i = 0;
1051 load(r, &sv);
1055 /* get address of vtop (vtop MUST BE an lvalue) */
1056 ST_FUNC void gaddrof(void)
1058 vtop->r &= ~VT_LVAL;
1059 /* tricky: if saved lvalue, then we can go back to lvalue */
1060 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1061 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1066 #ifdef CONFIG_TCC_BCHECK
1067 /* generate lvalue bound code */
1068 static void gbound(void)
1070 int lval_type;
1071 CType type1;
1073 vtop->r &= ~VT_MUSTBOUND;
1074 /* if lvalue, then use checking code before dereferencing */
1075 if (vtop->r & VT_LVAL) {
1076 /* if not VT_BOUNDED value, then make one */
1077 if (!(vtop->r & VT_BOUNDED)) {
1078 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1079 /* must save type because we must set it to int to get pointer */
1080 type1 = vtop->type;
1081 vtop->type.t = VT_PTR;
1082 gaddrof();
1083 vpushi(0);
1084 gen_bounded_ptr_add();
1085 vtop->r |= lval_type;
1086 vtop->type = type1;
1088 /* then check for dereferencing */
1089 gen_bounded_ptr_deref();
1092 #endif
1094 static void incr_bf_adr(int o)
1096 vtop->type = char_pointer_type;
1097 gaddrof();
1098 vpushi(o);
1099 gen_op('+');
1100 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1101 | (VT_BYTE|VT_UNSIGNED);
1102 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1103 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1106 /* single-byte load mode for packed or otherwise unaligned bitfields */
1107 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1109 int n, o, bits;
1110 save_reg_upstack(vtop->r, 1);
1111 vpush64(type->t & VT_BTYPE, 0); // B X
1112 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1113 do {
1114 vswap(); // X B
1115 incr_bf_adr(o);
1116 vdup(); // X B B
1117 n = 8 - bit_pos;
1118 if (n > bit_size)
1119 n = bit_size;
1120 if (bit_pos)
1121 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1122 if (n < 8)
1123 vpushi((1 << n) - 1), gen_op('&');
1124 gen_cast(type);
1125 if (bits)
1126 vpushi(bits), gen_op(TOK_SHL);
1127 vrotb(3); // B Y X
1128 gen_op('|'); // B X
1129 bits += n, bit_size -= n, o = 1;
1130 } while (bit_size);
1131 vswap(), vpop();
1132 if (!(type->t & VT_UNSIGNED)) {
1133 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1134 vpushi(n), gen_op(TOK_SHL);
1135 vpushi(n), gen_op(TOK_SAR);
1139 /* single-byte store mode for packed or otherwise unaligned bitfields */
1140 static void store_packed_bf(int bit_pos, int bit_size)
1142 int bits, n, o, m, c;
1144 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1145 vswap(); // X B
1146 save_reg_upstack(vtop->r, 1);
1147 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1148 do {
1149 incr_bf_adr(o); // X B
1150 vswap(); //B X
1151 c ? vdup() : gv_dup(); // B V X
1152 vrott(3); // X B V
1153 if (bits)
1154 vpushi(bits), gen_op(TOK_SHR);
1155 if (bit_pos)
1156 vpushi(bit_pos), gen_op(TOK_SHL);
1157 n = 8 - bit_pos;
1158 if (n > bit_size)
1159 n = bit_size;
1160 if (n < 8) {
1161 m = ((1 << n) - 1) << bit_pos;
1162 vpushi(m), gen_op('&'); // X B V1
1163 vpushv(vtop-1); // X B V1 B
1164 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1165 gen_op('&'); // X B V1 B1
1166 gen_op('|'); // X B V2
1168 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1169 vstore(), vpop(); // X B
1170 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1171 } while (bit_size);
1172 vpop(), vpop();
1175 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1177 int t;
1178 if (0 == sv->type.ref)
1179 return 0;
1180 t = sv->type.ref->auxtype;
1181 if (t != -1 && t != VT_STRUCT) {
1182 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1183 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1185 return t;
1188 /* store vtop a register belonging to class 'rc'. lvalues are
1189 converted to values. Cannot be used if cannot be converted to
1190 register value (such as structures). */
1191 ST_FUNC int gv(int rc)
1193 int r, bit_pos, bit_size, size, align, rc2;
1195 /* NOTE: get_reg can modify vstack[] */
1196 if (vtop->type.t & VT_BITFIELD) {
1197 CType type;
1199 bit_pos = BIT_POS(vtop->type.t);
1200 bit_size = BIT_SIZE(vtop->type.t);
1201 /* remove bit field info to avoid loops */
1202 vtop->type.t &= ~VT_STRUCT_MASK;
1204 type.ref = NULL;
1205 type.t = vtop->type.t & VT_UNSIGNED;
1206 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1207 type.t |= VT_UNSIGNED;
1209 r = adjust_bf(vtop, bit_pos, bit_size);
1211 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1212 type.t |= VT_LLONG;
1213 else
1214 type.t |= VT_INT;
1216 if (r == VT_STRUCT) {
1217 load_packed_bf(&type, bit_pos, bit_size);
1218 } else {
1219 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1220 /* cast to int to propagate signedness in following ops */
1221 gen_cast(&type);
1222 /* generate shifts */
1223 vpushi(bits - (bit_pos + bit_size));
1224 gen_op(TOK_SHL);
1225 vpushi(bits - bit_size);
1226 /* NOTE: transformed to SHR if unsigned */
1227 gen_op(TOK_SAR);
1229 r = gv(rc);
1230 } else {
1231 if (is_float(vtop->type.t) &&
1232 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1233 unsigned long offset;
1234 /* CPUs usually cannot use float constants, so we store them
1235 generically in data segment */
1236 size = type_size(&vtop->type, &align);
1237 if (NODATA_WANTED)
1238 size = 0, align = 1;
1239 offset = section_add(data_section, size, align);
1240 vpush_ref(&vtop->type, data_section, offset, size);
1241 vswap();
1242 init_putv(&vtop->type, data_section, offset);
1243 vtop->r |= VT_LVAL;
1245 #ifdef CONFIG_TCC_BCHECK
1246 if (vtop->r & VT_MUSTBOUND)
1247 gbound();
1248 #endif
1250 r = vtop->r & VT_VALMASK;
1251 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1252 #ifndef TCC_TARGET_ARM64
1253 if (rc == RC_IRET)
1254 rc2 = RC_LRET;
1255 #ifdef TCC_TARGET_X86_64
1256 else if (rc == RC_FRET)
1257 rc2 = RC_QRET;
1258 #endif
1259 #endif
1260 /* need to reload if:
1261 - constant
1262 - lvalue (need to dereference pointer)
1263 - already a register, but not in the right class */
1264 if (r >= VT_CONST
1265 || (vtop->r & VT_LVAL)
1266 || !(reg_classes[r] & rc)
1267 #if PTR_SIZE == 8
1268 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1269 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1270 #else
1271 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1272 #endif
1275 r = get_reg(rc);
1276 #if PTR_SIZE == 8
1277 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1278 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1279 #else
1280 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1281 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1282 unsigned long long ll;
1283 #endif
1284 int r2, original_type;
1285 original_type = vtop->type.t;
1286 /* two register type load : expand to two words
1287 temporarily */
1288 #if PTR_SIZE == 4
1289 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1290 /* load constant */
1291 ll = vtop->c.i;
1292 vtop->c.i = ll; /* first word */
1293 load(r, vtop);
1294 vtop->r = r; /* save register value */
1295 vpushi(ll >> 32); /* second word */
1296 } else
1297 #endif
1298 if (vtop->r & VT_LVAL) {
1299 /* We do not want to modifier the long long
1300 pointer here, so the safest (and less
1301 efficient) is to save all the other registers
1302 in the stack. XXX: totally inefficient. */
1303 #if 0
1304 save_regs(1);
1305 #else
1306 /* lvalue_save: save only if used further down the stack */
1307 save_reg_upstack(vtop->r, 1);
1308 #endif
1309 /* load from memory */
1310 vtop->type.t = load_type;
1311 load(r, vtop);
1312 vdup();
1313 vtop[-1].r = r; /* save register value */
1314 /* increment pointer to get second word */
1315 vtop->type.t = addr_type;
1316 gaddrof();
1317 vpushi(load_size);
1318 gen_op('+');
1319 vtop->r |= VT_LVAL;
1320 vtop->type.t = load_type;
1321 } else {
1322 /* move registers */
1323 load(r, vtop);
1324 vdup();
1325 vtop[-1].r = r; /* save register value */
1326 vtop->r = vtop[-1].r2;
1328 /* Allocate second register. Here we rely on the fact that
1329 get_reg() tries first to free r2 of an SValue. */
1330 r2 = get_reg(rc2);
1331 load(r2, vtop);
1332 vpop();
1333 /* write second register */
1334 vtop->r2 = r2;
1335 vtop->type.t = original_type;
1336 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1337 int t1, t;
1338 /* lvalue of scalar type : need to use lvalue type
1339 because of possible cast */
1340 t = vtop->type.t;
1341 t1 = t;
1342 /* compute memory access type */
1343 if (vtop->r & VT_LVAL_BYTE)
1344 t = VT_BYTE;
1345 else if (vtop->r & VT_LVAL_SHORT)
1346 t = VT_SHORT;
1347 if (vtop->r & VT_LVAL_UNSIGNED)
1348 t |= VT_UNSIGNED;
1349 vtop->type.t = t;
1350 load(r, vtop);
1351 /* restore wanted type */
1352 vtop->type.t = t1;
1353 } else {
1354 /* one register type load */
1355 load(r, vtop);
1358 vtop->r = r;
1359 #ifdef TCC_TARGET_C67
1360 /* uses register pairs for doubles */
1361 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1362 vtop->r2 = r+1;
1363 #endif
1365 return r;
1368 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1369 ST_FUNC void gv2(int rc1, int rc2)
1371 int v;
1373 /* generate more generic register first. But VT_JMP or VT_CMP
1374 values must be generated first in all cases to avoid possible
1375 reload errors */
1376 v = vtop[0].r & VT_VALMASK;
1377 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1378 vswap();
1379 gv(rc1);
1380 vswap();
1381 gv(rc2);
1382 /* test if reload is needed for first register */
1383 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1384 vswap();
1385 gv(rc1);
1386 vswap();
1388 } else {
1389 gv(rc2);
1390 vswap();
1391 gv(rc1);
1392 vswap();
1393 /* test if reload is needed for first register */
1394 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1395 gv(rc2);
1400 #ifndef TCC_TARGET_ARM64
1401 /* wrapper around RC_FRET to return a register by type */
1402 static int rc_fret(int t)
1404 #ifdef TCC_TARGET_X86_64
1405 if (t == VT_LDOUBLE) {
1406 return RC_ST0;
1408 #endif
1409 return RC_FRET;
1411 #endif
1413 /* wrapper around REG_FRET to return a register by type */
1414 static int reg_fret(int t)
1416 #ifdef TCC_TARGET_X86_64
1417 if (t == VT_LDOUBLE) {
1418 return TREG_ST0;
1420 #endif
1421 return REG_FRET;
1424 #if PTR_SIZE == 4
1425 /* expand 64bit on stack in two ints */
1426 static void lexpand(void)
1428 int u, v;
1429 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1430 v = vtop->r & (VT_VALMASK | VT_LVAL);
1431 if (v == VT_CONST) {
1432 vdup();
1433 vtop[0].c.i >>= 32;
1434 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1435 vdup();
1436 vtop[0].c.i += 4;
1437 } else {
1438 gv(RC_INT);
1439 vdup();
1440 vtop[0].r = vtop[-1].r2;
1441 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1443 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1445 #endif
1447 #ifdef TCC_TARGET_ARM
1448 /* expand long long on stack */
1449 ST_FUNC void lexpand_nr(void)
1451 int u,v;
1453 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1454 vdup();
1455 vtop->r2 = VT_CONST;
1456 vtop->type.t = VT_INT | u;
1457 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1458 if (v == VT_CONST) {
1459 vtop[-1].c.i = vtop->c.i;
1460 vtop->c.i = vtop->c.i >> 32;
1461 vtop->r = VT_CONST;
1462 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1463 vtop->c.i += 4;
1464 vtop->r = vtop[-1].r;
1465 } else if (v > VT_CONST) {
1466 vtop--;
1467 lexpand();
1468 } else
1469 vtop->r = vtop[-1].r2;
1470 vtop[-1].r2 = VT_CONST;
1471 vtop[-1].type.t = VT_INT | u;
1473 #endif
1475 #if PTR_SIZE == 4
1476 /* build a long long from two ints */
1477 static void lbuild(int t)
1479 gv2(RC_INT, RC_INT);
1480 vtop[-1].r2 = vtop[0].r;
1481 vtop[-1].type.t = t;
1482 vpop();
1484 #endif
1486 /* convert stack entry to register and duplicate its value in another
1487 register */
1488 static void gv_dup(void)
1490 int rc, t, r, r1;
1491 SValue sv;
1493 t = vtop->type.t;
1494 #if PTR_SIZE == 4
1495 if ((t & VT_BTYPE) == VT_LLONG) {
1496 if (t & VT_BITFIELD) {
1497 gv(RC_INT);
1498 t = vtop->type.t;
1500 lexpand();
1501 gv_dup();
1502 vswap();
1503 vrotb(3);
1504 gv_dup();
1505 vrotb(4);
1506 /* stack: H L L1 H1 */
1507 lbuild(t);
1508 vrotb(3);
1509 vrotb(3);
1510 vswap();
1511 lbuild(t);
1512 vswap();
1513 } else
1514 #endif
1516 /* duplicate value */
1517 rc = RC_INT;
1518 sv.type.t = VT_INT;
1519 if (is_float(t)) {
1520 rc = RC_FLOAT;
1521 #ifdef TCC_TARGET_X86_64
1522 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1523 rc = RC_ST0;
1525 #endif
1526 sv.type.t = t;
1528 r = gv(rc);
1529 r1 = get_reg(rc);
1530 sv.r = r;
1531 sv.c.i = 0;
1532 load(r1, &sv); /* move r to r1 */
1533 vdup();
1534 /* duplicates value */
1535 if (r != r1)
1536 vtop->r = r1;
1540 /* Generate value test
1542 * Generate a test for any value (jump, comparison and integers) */
1543 ST_FUNC int gvtst(int inv, int t)
1545 int v = vtop->r & VT_VALMASK;
1546 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1547 vpushi(0);
1548 gen_op(TOK_NE);
1550 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1551 /* constant jmp optimization */
1552 if ((vtop->c.i != 0) != inv)
1553 t = gjmp(t);
1554 vtop--;
1555 return t;
1557 return gtst(inv, t);
1560 #if PTR_SIZE == 4
1561 /* generate CPU independent (unsigned) long long operations */
1562 static void gen_opl(int op)
1564 int t, a, b, op1, c, i;
1565 int func;
1566 unsigned short reg_iret = REG_IRET;
1567 unsigned short reg_lret = REG_LRET;
1568 SValue tmp;
1570 switch(op) {
1571 case '/':
1572 case TOK_PDIV:
1573 func = TOK___divdi3;
1574 goto gen_func;
1575 case TOK_UDIV:
1576 func = TOK___udivdi3;
1577 goto gen_func;
1578 case '%':
1579 func = TOK___moddi3;
1580 goto gen_mod_func;
1581 case TOK_UMOD:
1582 func = TOK___umoddi3;
1583 gen_mod_func:
1584 #ifdef TCC_ARM_EABI
1585 reg_iret = TREG_R2;
1586 reg_lret = TREG_R3;
1587 #endif
1588 gen_func:
1589 /* call generic long long function */
1590 vpush_global_sym(&func_old_type, func);
1591 vrott(3);
1592 gfunc_call(2);
1593 vpushi(0);
1594 vtop->r = reg_iret;
1595 vtop->r2 = reg_lret;
1596 break;
1597 case '^':
1598 case '&':
1599 case '|':
1600 case '*':
1601 case '+':
1602 case '-':
1603 //pv("gen_opl A",0,2);
1604 t = vtop->type.t;
1605 vswap();
1606 lexpand();
1607 vrotb(3);
1608 lexpand();
1609 /* stack: L1 H1 L2 H2 */
1610 tmp = vtop[0];
1611 vtop[0] = vtop[-3];
1612 vtop[-3] = tmp;
1613 tmp = vtop[-2];
1614 vtop[-2] = vtop[-3];
1615 vtop[-3] = tmp;
1616 vswap();
1617 /* stack: H1 H2 L1 L2 */
1618 //pv("gen_opl B",0,4);
1619 if (op == '*') {
1620 vpushv(vtop - 1);
1621 vpushv(vtop - 1);
1622 gen_op(TOK_UMULL);
1623 lexpand();
1624 /* stack: H1 H2 L1 L2 ML MH */
1625 for(i=0;i<4;i++)
1626 vrotb(6);
1627 /* stack: ML MH H1 H2 L1 L2 */
1628 tmp = vtop[0];
1629 vtop[0] = vtop[-2];
1630 vtop[-2] = tmp;
1631 /* stack: ML MH H1 L2 H2 L1 */
1632 gen_op('*');
1633 vrotb(3);
1634 vrotb(3);
1635 gen_op('*');
1636 /* stack: ML MH M1 M2 */
1637 gen_op('+');
1638 gen_op('+');
1639 } else if (op == '+' || op == '-') {
1640 /* XXX: add non carry method too (for MIPS or alpha) */
1641 if (op == '+')
1642 op1 = TOK_ADDC1;
1643 else
1644 op1 = TOK_SUBC1;
1645 gen_op(op1);
1646 /* stack: H1 H2 (L1 op L2) */
1647 vrotb(3);
1648 vrotb(3);
1649 gen_op(op1 + 1); /* TOK_xxxC2 */
1650 } else {
1651 gen_op(op);
1652 /* stack: H1 H2 (L1 op L2) */
1653 vrotb(3);
1654 vrotb(3);
1655 /* stack: (L1 op L2) H1 H2 */
1656 gen_op(op);
1657 /* stack: (L1 op L2) (H1 op H2) */
1659 /* stack: L H */
1660 lbuild(t);
1661 break;
1662 case TOK_SAR:
1663 case TOK_SHR:
1664 case TOK_SHL:
1665 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1666 t = vtop[-1].type.t;
1667 vswap();
1668 lexpand();
1669 vrotb(3);
1670 /* stack: L H shift */
1671 c = (int)vtop->c.i;
1672 /* constant: simpler */
1673 /* NOTE: all comments are for SHL. the other cases are
1674 done by swapping words */
1675 vpop();
1676 if (op != TOK_SHL)
1677 vswap();
1678 if (c >= 32) {
1679 /* stack: L H */
1680 vpop();
1681 if (c > 32) {
1682 vpushi(c - 32);
1683 gen_op(op);
1685 if (op != TOK_SAR) {
1686 vpushi(0);
1687 } else {
1688 gv_dup();
1689 vpushi(31);
1690 gen_op(TOK_SAR);
1692 vswap();
1693 } else {
1694 vswap();
1695 gv_dup();
1696 /* stack: H L L */
1697 vpushi(c);
1698 gen_op(op);
1699 vswap();
1700 vpushi(32 - c);
1701 if (op == TOK_SHL)
1702 gen_op(TOK_SHR);
1703 else
1704 gen_op(TOK_SHL);
1705 vrotb(3);
1706 /* stack: L L H */
1707 vpushi(c);
1708 if (op == TOK_SHL)
1709 gen_op(TOK_SHL);
1710 else
1711 gen_op(TOK_SHR);
1712 gen_op('|');
1714 if (op != TOK_SHL)
1715 vswap();
1716 lbuild(t);
1717 } else {
1718 /* XXX: should provide a faster fallback on x86 ? */
1719 switch(op) {
1720 case TOK_SAR:
1721 func = TOK___ashrdi3;
1722 goto gen_func;
1723 case TOK_SHR:
1724 func = TOK___lshrdi3;
1725 goto gen_func;
1726 case TOK_SHL:
1727 func = TOK___ashldi3;
1728 goto gen_func;
1731 break;
1732 default:
1733 /* compare operations */
1734 t = vtop->type.t;
1735 vswap();
1736 lexpand();
1737 vrotb(3);
1738 lexpand();
1739 /* stack: L1 H1 L2 H2 */
1740 tmp = vtop[-1];
1741 vtop[-1] = vtop[-2];
1742 vtop[-2] = tmp;
1743 /* stack: L1 L2 H1 H2 */
1744 /* compare high */
1745 op1 = op;
1746 /* when values are equal, we need to compare low words. since
1747 the jump is inverted, we invert the test too. */
1748 if (op1 == TOK_LT)
1749 op1 = TOK_LE;
1750 else if (op1 == TOK_GT)
1751 op1 = TOK_GE;
1752 else if (op1 == TOK_ULT)
1753 op1 = TOK_ULE;
1754 else if (op1 == TOK_UGT)
1755 op1 = TOK_UGE;
1756 a = 0;
1757 b = 0;
1758 gen_op(op1);
1759 if (op == TOK_NE) {
1760 b = gvtst(0, 0);
1761 } else {
1762 a = gvtst(1, 0);
1763 if (op != TOK_EQ) {
1764 /* generate non equal test */
1765 vpushi(TOK_NE);
1766 vtop->r = VT_CMP;
1767 b = gvtst(0, 0);
1770 /* compare low. Always unsigned */
1771 op1 = op;
1772 if (op1 == TOK_LT)
1773 op1 = TOK_ULT;
1774 else if (op1 == TOK_LE)
1775 op1 = TOK_ULE;
1776 else if (op1 == TOK_GT)
1777 op1 = TOK_UGT;
1778 else if (op1 == TOK_GE)
1779 op1 = TOK_UGE;
1780 gen_op(op1);
1781 a = gvtst(1, a);
1782 gsym(b);
1783 vseti(VT_JMPI, a);
1784 break;
1787 #endif
1789 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1791 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1792 return (a ^ b) >> 63 ? -x : x;
1795 static int gen_opic_lt(uint64_t a, uint64_t b)
1797 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1800 /* handle integer constant optimizations and various machine
1801 independent opt */
1802 static void gen_opic(int op)
1804 SValue *v1 = vtop - 1;
1805 SValue *v2 = vtop;
1806 int t1 = v1->type.t & VT_BTYPE;
1807 int t2 = v2->type.t & VT_BTYPE;
1808 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1809 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1810 uint64_t l1 = c1 ? v1->c.i : 0;
1811 uint64_t l2 = c2 ? v2->c.i : 0;
1812 int shm = (t1 == VT_LLONG) ? 63 : 31;
1814 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1815 l1 = ((uint32_t)l1 |
1816 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1817 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1818 l2 = ((uint32_t)l2 |
1819 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1821 if (c1 && c2) {
1822 switch(op) {
1823 case '+': l1 += l2; break;
1824 case '-': l1 -= l2; break;
1825 case '&': l1 &= l2; break;
1826 case '^': l1 ^= l2; break;
1827 case '|': l1 |= l2; break;
1828 case '*': l1 *= l2; break;
1830 case TOK_PDIV:
1831 case '/':
1832 case '%':
1833 case TOK_UDIV:
1834 case TOK_UMOD:
1835 /* if division by zero, generate explicit division */
1836 if (l2 == 0) {
1837 if (const_wanted)
1838 tcc_error("division by zero in constant");
1839 goto general_case;
1841 switch(op) {
1842 default: l1 = gen_opic_sdiv(l1, l2); break;
1843 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1844 case TOK_UDIV: l1 = l1 / l2; break;
1845 case TOK_UMOD: l1 = l1 % l2; break;
1847 break;
1848 case TOK_SHL: l1 <<= (l2 & shm); break;
1849 case TOK_SHR: l1 >>= (l2 & shm); break;
1850 case TOK_SAR:
1851 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1852 break;
1853 /* tests */
1854 case TOK_ULT: l1 = l1 < l2; break;
1855 case TOK_UGE: l1 = l1 >= l2; break;
1856 case TOK_EQ: l1 = l1 == l2; break;
1857 case TOK_NE: l1 = l1 != l2; break;
1858 case TOK_ULE: l1 = l1 <= l2; break;
1859 case TOK_UGT: l1 = l1 > l2; break;
1860 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1861 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1862 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1863 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1864 /* logical */
1865 case TOK_LAND: l1 = l1 && l2; break;
1866 case TOK_LOR: l1 = l1 || l2; break;
1867 default:
1868 goto general_case;
1870 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1871 l1 = ((uint32_t)l1 |
1872 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1873 v1->c.i = l1;
1874 vtop--;
1875 } else {
1876 /* if commutative ops, put c2 as constant */
1877 if (c1 && (op == '+' || op == '&' || op == '^' ||
1878 op == '|' || op == '*')) {
1879 vswap();
1880 c2 = c1; //c = c1, c1 = c2, c2 = c;
1881 l2 = l1; //l = l1, l1 = l2, l2 = l;
1883 if (!const_wanted &&
1884 c1 && ((l1 == 0 &&
1885 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1886 (l1 == -1 && op == TOK_SAR))) {
1887 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1888 vtop--;
1889 } else if (!const_wanted &&
1890 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1891 (op == '|' &&
1892 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1893 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1894 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1895 if (l2 == 1)
1896 vtop->c.i = 0;
1897 vswap();
1898 vtop--;
1899 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1900 op == TOK_PDIV) &&
1901 l2 == 1) ||
1902 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1903 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1904 l2 == 0) ||
1905 (op == '&' &&
1906 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1907 /* filter out NOP operations like x*1, x-0, x&-1... */
1908 vtop--;
1909 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1910 /* try to use shifts instead of muls or divs */
1911 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1912 int n = -1;
1913 while (l2) {
1914 l2 >>= 1;
1915 n++;
1917 vtop->c.i = n;
1918 if (op == '*')
1919 op = TOK_SHL;
1920 else if (op == TOK_PDIV)
1921 op = TOK_SAR;
1922 else
1923 op = TOK_SHR;
1925 goto general_case;
1926 } else if (c2 && (op == '+' || op == '-') &&
1927 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1928 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1929 /* symbol + constant case */
1930 if (op == '-')
1931 l2 = -l2;
1932 l2 += vtop[-1].c.i;
1933 /* The backends can't always deal with addends to symbols
1934 larger than +-1<<31. Don't construct such. */
1935 if ((int)l2 != l2)
1936 goto general_case;
1937 vtop--;
1938 vtop->c.i = l2;
1939 } else {
1940 general_case:
1941 /* call low level op generator */
1942 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1943 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1944 gen_opl(op);
1945 else
1946 gen_opi(op);
1951 /* generate a floating point operation with constant propagation */
1952 static void gen_opif(int op)
1954 int c1, c2;
1955 SValue *v1, *v2;
1956 #if defined _MSC_VER && defined _AMD64_
1957 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1958 volatile
1959 #endif
1960 long double f1, f2;
1962 v1 = vtop - 1;
1963 v2 = vtop;
1964 /* currently, we cannot do computations with forward symbols */
1965 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1966 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1967 if (c1 && c2) {
1968 if (v1->type.t == VT_FLOAT) {
1969 f1 = v1->c.f;
1970 f2 = v2->c.f;
1971 } else if (v1->type.t == VT_DOUBLE) {
1972 f1 = v1->c.d;
1973 f2 = v2->c.d;
1974 } else {
1975 f1 = v1->c.ld;
1976 f2 = v2->c.ld;
1979 /* NOTE: we only do constant propagation if finite number (not
1980 NaN or infinity) (ANSI spec) */
1981 if (!ieee_finite(f1) || !ieee_finite(f2))
1982 goto general_case;
1984 switch(op) {
1985 case '+': f1 += f2; break;
1986 case '-': f1 -= f2; break;
1987 case '*': f1 *= f2; break;
1988 case '/':
1989 if (f2 == 0.0) {
1990 if (const_wanted)
1991 tcc_error("division by zero in constant");
1992 goto general_case;
1994 f1 /= f2;
1995 break;
1996 /* XXX: also handles tests ? */
1997 default:
1998 goto general_case;
2000 /* XXX: overflow test ? */
2001 if (v1->type.t == VT_FLOAT) {
2002 v1->c.f = f1;
2003 } else if (v1->type.t == VT_DOUBLE) {
2004 v1->c.d = f1;
2005 } else {
2006 v1->c.ld = f1;
2008 vtop--;
2009 } else {
2010 general_case:
2011 gen_opf(op);
2015 static int pointed_size(CType *type)
2017 int align;
2018 return type_size(pointed_type(type), &align);
2021 static void vla_runtime_pointed_size(CType *type)
2023 int align;
2024 vla_runtime_type_size(pointed_type(type), &align);
2027 static inline int is_null_pointer(SValue *p)
2029 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2030 return 0;
2031 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2032 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2033 ((p->type.t & VT_BTYPE) == VT_PTR &&
2034 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2037 static inline int is_integer_btype(int bt)
2039 return (bt == VT_BYTE || bt == VT_SHORT ||
2040 bt == VT_INT || bt == VT_LLONG);
2043 /* check types for comparison or subtraction of pointers */
2044 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2046 CType *type1, *type2, tmp_type1, tmp_type2;
2047 int bt1, bt2;
2049 /* null pointers are accepted for all comparisons as gcc */
2050 if (is_null_pointer(p1) || is_null_pointer(p2))
2051 return;
2052 type1 = &p1->type;
2053 type2 = &p2->type;
2054 bt1 = type1->t & VT_BTYPE;
2055 bt2 = type2->t & VT_BTYPE;
2056 /* accept comparison between pointer and integer with a warning */
2057 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2058 if (op != TOK_LOR && op != TOK_LAND )
2059 tcc_warning("comparison between pointer and integer");
2060 return;
2063 /* both must be pointers or implicit function pointers */
2064 if (bt1 == VT_PTR) {
2065 type1 = pointed_type(type1);
2066 } else if (bt1 != VT_FUNC)
2067 goto invalid_operands;
2069 if (bt2 == VT_PTR) {
2070 type2 = pointed_type(type2);
2071 } else if (bt2 != VT_FUNC) {
2072 invalid_operands:
2073 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2075 if ((type1->t & VT_BTYPE) == VT_VOID ||
2076 (type2->t & VT_BTYPE) == VT_VOID)
2077 return;
2078 tmp_type1 = *type1;
2079 tmp_type2 = *type2;
2080 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2081 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2082 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2083 /* gcc-like error if '-' is used */
2084 if (op == '-')
2085 goto invalid_operands;
2086 else
2087 tcc_warning("comparison of distinct pointer types lacks a cast");
2091 /* generic gen_op: handles types problems */
2092 ST_FUNC void gen_op(int op)
2094 int u, t1, t2, bt1, bt2, t;
2095 CType type1;
2097 redo:
2098 t1 = vtop[-1].type.t;
2099 t2 = vtop[0].type.t;
2100 bt1 = t1 & VT_BTYPE;
2101 bt2 = t2 & VT_BTYPE;
2103 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2104 tcc_error("operation on a struct");
2105 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2106 if (bt2 == VT_FUNC) {
2107 mk_pointer(&vtop->type);
2108 gaddrof();
2110 if (bt1 == VT_FUNC) {
2111 vswap();
2112 mk_pointer(&vtop->type);
2113 gaddrof();
2114 vswap();
2116 goto redo;
2117 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2118 /* at least one operand is a pointer */
2119 /* relational op: must be both pointers */
2120 if (op >= TOK_ULT && op <= TOK_LOR) {
2121 check_comparison_pointer_types(vtop - 1, vtop, op);
2122 /* pointers are handled are unsigned */
2123 #if PTR_SIZE == 8
2124 t = VT_LLONG | VT_UNSIGNED;
2125 #else
2126 t = VT_INT | VT_UNSIGNED;
2127 #endif
2128 goto std_op;
2130 /* if both pointers, then it must be the '-' op */
2131 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2132 if (op != '-')
2133 tcc_error("cannot use pointers here");
2134 check_comparison_pointer_types(vtop - 1, vtop, op);
2135 /* XXX: check that types are compatible */
2136 if (vtop[-1].type.t & VT_VLA) {
2137 vla_runtime_pointed_size(&vtop[-1].type);
2138 } else {
2139 vpushi(pointed_size(&vtop[-1].type));
2141 vrott(3);
2142 gen_opic(op);
2143 vtop->type.t = ptrdiff_type.t;
2144 vswap();
2145 gen_op(TOK_PDIV);
2146 } else {
2147 /* exactly one pointer : must be '+' or '-'. */
2148 if (op != '-' && op != '+')
2149 tcc_error("cannot use pointers here");
2150 /* Put pointer as first operand */
2151 if (bt2 == VT_PTR) {
2152 vswap();
2153 t = t1, t1 = t2, t2 = t;
2155 #if PTR_SIZE == 4
2156 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2157 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2158 gen_cast_s(VT_INT);
2159 #endif
2160 type1 = vtop[-1].type;
2161 type1.t &= ~VT_ARRAY;
2162 if (vtop[-1].type.t & VT_VLA)
2163 vla_runtime_pointed_size(&vtop[-1].type);
2164 else {
2165 u = pointed_size(&vtop[-1].type);
2166 if (u < 0)
2167 tcc_error("unknown array element size");
2168 #if PTR_SIZE == 8
2169 vpushll(u);
2170 #else
2171 /* XXX: cast to int ? (long long case) */
2172 vpushi(u);
2173 #endif
2175 gen_op('*');
2176 #if 0
2177 /* #ifdef CONFIG_TCC_BCHECK
2178 The main reason to removing this code:
2179 #include <stdio.h>
2180 int main ()
2182 int v[10];
2183 int i = 10;
2184 int j = 9;
2185 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2186 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2188 When this code is on. then the output looks like
2189 v+i-j = 0xfffffffe
2190 v+(i-j) = 0xbff84000
2192 /* if evaluating constant expression, no code should be
2193 generated, so no bound check */
2194 if (tcc_state->do_bounds_check && !const_wanted) {
2195 /* if bounded pointers, we generate a special code to
2196 test bounds */
2197 if (op == '-') {
2198 vpushi(0);
2199 vswap();
2200 gen_op('-');
2202 gen_bounded_ptr_add();
2203 } else
2204 #endif
2206 gen_opic(op);
2208 /* put again type if gen_opic() swaped operands */
2209 vtop->type = type1;
2211 } else if (is_float(bt1) || is_float(bt2)) {
2212 /* compute bigger type and do implicit casts */
2213 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2214 t = VT_LDOUBLE;
2215 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2216 t = VT_DOUBLE;
2217 } else {
2218 t = VT_FLOAT;
2220 /* floats can only be used for a few operations */
2221 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2222 (op < TOK_ULT || op > TOK_GT))
2223 tcc_error("invalid operands for binary operation");
2224 goto std_op;
2225 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2226 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2227 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2228 t |= VT_UNSIGNED;
2229 t |= (VT_LONG & t1);
2230 goto std_op;
2231 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2232 /* cast to biggest op */
2233 t = VT_LLONG | VT_LONG;
2234 if (bt1 == VT_LLONG)
2235 t &= t1;
2236 if (bt2 == VT_LLONG)
2237 t &= t2;
2238 /* convert to unsigned if it does not fit in a long long */
2239 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2240 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2241 t |= VT_UNSIGNED;
2242 goto std_op;
2243 } else {
2244 /* integer operations */
2245 t = VT_INT | (VT_LONG & (t1 | t2));
2246 /* convert to unsigned if it does not fit in an integer */
2247 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2248 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2249 t |= VT_UNSIGNED;
2250 std_op:
2251 /* XXX: currently, some unsigned operations are explicit, so
2252 we modify them here */
2253 if (t & VT_UNSIGNED) {
2254 if (op == TOK_SAR)
2255 op = TOK_SHR;
2256 else if (op == '/')
2257 op = TOK_UDIV;
2258 else if (op == '%')
2259 op = TOK_UMOD;
2260 else if (op == TOK_LT)
2261 op = TOK_ULT;
2262 else if (op == TOK_GT)
2263 op = TOK_UGT;
2264 else if (op == TOK_LE)
2265 op = TOK_ULE;
2266 else if (op == TOK_GE)
2267 op = TOK_UGE;
2269 vswap();
2270 type1.t = t;
2271 type1.ref = NULL;
2272 gen_cast(&type1);
2273 vswap();
2274 /* special case for shifts and long long: we keep the shift as
2275 an integer */
2276 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2277 type1.t = VT_INT;
2278 gen_cast(&type1);
2279 if (is_float(t))
2280 gen_opif(op);
2281 else
2282 gen_opic(op);
2283 if (op >= TOK_ULT && op <= TOK_GT) {
2284 /* relational op: the result is an int */
2285 vtop->type.t = VT_INT;
2286 } else {
2287 vtop->type.t = t;
2290 // Make sure that we have converted to an rvalue:
2291 if (vtop->r & VT_LVAL)
2292 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2295 #ifndef TCC_TARGET_ARM
2296 /* generic itof for unsigned long long case */
2297 static void gen_cvt_itof1(int t)
2299 #ifdef TCC_TARGET_ARM64
2300 gen_cvt_itof(t);
2301 #else
2302 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2303 (VT_LLONG | VT_UNSIGNED)) {
2305 if (t == VT_FLOAT)
2306 vpush_global_sym(&func_old_type, TOK___floatundisf);
2307 #if LDOUBLE_SIZE != 8
2308 else if (t == VT_LDOUBLE)
2309 vpush_global_sym(&func_old_type, TOK___floatundixf);
2310 #endif
2311 else
2312 vpush_global_sym(&func_old_type, TOK___floatundidf);
2313 vrott(2);
2314 gfunc_call(1);
2315 vpushi(0);
2316 vtop->r = reg_fret(t);
2317 } else {
2318 gen_cvt_itof(t);
2320 #endif
2322 #endif
2324 /* generic ftoi for unsigned long long case */
2325 static void gen_cvt_ftoi1(int t)
2327 #ifdef TCC_TARGET_ARM64
2328 gen_cvt_ftoi(t);
2329 #else
2330 int st;
2332 if (t == (VT_LLONG | VT_UNSIGNED)) {
2333 /* not handled natively */
2334 st = vtop->type.t & VT_BTYPE;
2335 if (st == VT_FLOAT)
2336 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2337 #if LDOUBLE_SIZE != 8
2338 else if (st == VT_LDOUBLE)
2339 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2340 #endif
2341 else
2342 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2343 vrott(2);
2344 gfunc_call(1);
2345 vpushi(0);
2346 vtop->r = REG_IRET;
2347 vtop->r2 = REG_LRET;
2348 } else {
2349 gen_cvt_ftoi(t);
2351 #endif
2354 /* force char or short cast */
2355 static void force_charshort_cast(int t)
2357 int bits, dbt;
2359 /* cannot cast static initializers */
2360 if (STATIC_DATA_WANTED)
2361 return;
2363 dbt = t & VT_BTYPE;
2364 /* XXX: add optimization if lvalue : just change type and offset */
2365 if (dbt == VT_BYTE)
2366 bits = 8;
2367 else
2368 bits = 16;
2369 if (t & VT_UNSIGNED) {
2370 vpushi((1 << bits) - 1);
2371 gen_op('&');
2372 } else {
2373 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2374 bits = 64 - bits;
2375 else
2376 bits = 32 - bits;
2377 vpushi(bits);
2378 gen_op(TOK_SHL);
2379 /* result must be signed or the SAR is converted to an SHL
2380 This was not the case when "t" was a signed short
2381 and the last value on the stack was an unsigned int */
2382 vtop->type.t &= ~VT_UNSIGNED;
2383 vpushi(bits);
2384 gen_op(TOK_SAR);
2388 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2389 static void gen_cast_s(int t)
2391 CType type;
2392 type.t = t;
2393 type.ref = NULL;
2394 gen_cast(&type);
2397 static void gen_cast(CType *type)
2399 int sbt, dbt, sf, df, c, p;
2401 /* special delayed cast for char/short */
2402 /* XXX: in some cases (multiple cascaded casts), it may still
2403 be incorrect */
2404 if (vtop->r & VT_MUSTCAST) {
2405 vtop->r &= ~VT_MUSTCAST;
2406 force_charshort_cast(vtop->type.t);
2409 /* bitfields first get cast to ints */
2410 if (vtop->type.t & VT_BITFIELD) {
2411 gv(RC_INT);
2414 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2415 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2417 if (sbt != dbt) {
2418 sf = is_float(sbt);
2419 df = is_float(dbt);
2420 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2421 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2422 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2423 c &= dbt != VT_LDOUBLE;
2424 #endif
2425 if (c) {
2426 /* constant case: we can do it now */
2427 /* XXX: in ISOC, cannot do it if error in convert */
2428 if (sbt == VT_FLOAT)
2429 vtop->c.ld = vtop->c.f;
2430 else if (sbt == VT_DOUBLE)
2431 vtop->c.ld = vtop->c.d;
2433 if (df) {
2434 if ((sbt & VT_BTYPE) == VT_LLONG) {
2435 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2436 vtop->c.ld = vtop->c.i;
2437 else
2438 vtop->c.ld = -(long double)-vtop->c.i;
2439 } else if(!sf) {
2440 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2441 vtop->c.ld = (uint32_t)vtop->c.i;
2442 else
2443 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2446 if (dbt == VT_FLOAT)
2447 vtop->c.f = (float)vtop->c.ld;
2448 else if (dbt == VT_DOUBLE)
2449 vtop->c.d = (double)vtop->c.ld;
2450 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2451 vtop->c.i = vtop->c.ld;
2452 } else if (sf && dbt == VT_BOOL) {
2453 vtop->c.i = (vtop->c.ld != 0);
2454 } else {
2455 if(sf)
2456 vtop->c.i = vtop->c.ld;
2457 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2459 else if (sbt & VT_UNSIGNED)
2460 vtop->c.i = (uint32_t)vtop->c.i;
2461 #if PTR_SIZE == 8
2462 else if (sbt == VT_PTR)
2464 #endif
2465 else if (sbt != VT_LLONG)
2466 vtop->c.i = ((uint32_t)vtop->c.i |
2467 -(vtop->c.i & 0x80000000));
2469 if (dbt == (VT_LLONG|VT_UNSIGNED))
2471 else if (dbt == VT_BOOL)
2472 vtop->c.i = (vtop->c.i != 0);
2473 #if PTR_SIZE == 8
2474 else if (dbt == VT_PTR)
2476 #endif
2477 else if (dbt != VT_LLONG) {
2478 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2479 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2480 0xffffffff);
2481 vtop->c.i &= m;
2482 if (!(dbt & VT_UNSIGNED))
2483 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2486 } else if (p && dbt == VT_BOOL) {
2487 vtop->r = VT_CONST;
2488 vtop->c.i = 1;
2489 } else {
2490 /* non constant case: generate code */
2491 if (sf && df) {
2492 /* convert from fp to fp */
2493 gen_cvt_ftof(dbt);
2494 } else if (df) {
2495 /* convert int to fp */
2496 gen_cvt_itof1(dbt);
2497 } else if (sf) {
2498 /* convert fp to int */
2499 if (dbt == VT_BOOL) {
2500 vpushi(0);
2501 gen_op(TOK_NE);
2502 } else {
2503 /* we handle char/short/etc... with generic code */
2504 if (dbt != (VT_INT | VT_UNSIGNED) &&
2505 dbt != (VT_LLONG | VT_UNSIGNED) &&
2506 dbt != VT_LLONG)
2507 dbt = VT_INT;
2508 gen_cvt_ftoi1(dbt);
2509 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2510 /* additional cast for char/short... */
2511 vtop->type.t = dbt;
2512 gen_cast(type);
2515 #if PTR_SIZE == 4
2516 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2517 if ((sbt & VT_BTYPE) != VT_LLONG) {
2518 /* scalar to long long */
2519 /* machine independent conversion */
2520 gv(RC_INT);
2521 /* generate high word */
2522 if (sbt == (VT_INT | VT_UNSIGNED)) {
2523 vpushi(0);
2524 gv(RC_INT);
2525 } else {
2526 if (sbt == VT_PTR) {
2527 /* cast from pointer to int before we apply
2528 shift operation, which pointers don't support*/
2529 gen_cast_s(VT_INT);
2531 gv_dup();
2532 vpushi(31);
2533 gen_op(TOK_SAR);
2535 /* patch second register */
2536 vtop[-1].r2 = vtop->r;
2537 vpop();
2539 #else
2540 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2541 (dbt & VT_BTYPE) == VT_PTR ||
2542 (dbt & VT_BTYPE) == VT_FUNC) {
2543 if ((sbt & VT_BTYPE) != VT_LLONG &&
2544 (sbt & VT_BTYPE) != VT_PTR &&
2545 (sbt & VT_BTYPE) != VT_FUNC) {
2546 /* need to convert from 32bit to 64bit */
2547 gv(RC_INT);
2548 if (sbt != (VT_INT | VT_UNSIGNED)) {
2549 #if defined(TCC_TARGET_ARM64)
2550 gen_cvt_sxtw();
2551 #elif defined(TCC_TARGET_X86_64)
2552 int r = gv(RC_INT);
2553 /* x86_64 specific: movslq */
2554 o(0x6348);
2555 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2556 #else
2557 #error
2558 #endif
2561 #endif
2562 } else if (dbt == VT_BOOL) {
2563 /* scalar to bool */
2564 vpushi(0);
2565 gen_op(TOK_NE);
2566 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2567 (dbt & VT_BTYPE) == VT_SHORT) {
2568 if (sbt == VT_PTR) {
2569 vtop->type.t = VT_INT;
2570 tcc_warning("nonportable conversion from pointer to char/short");
2572 force_charshort_cast(dbt);
2573 #if PTR_SIZE == 4
2574 } else if ((dbt & VT_BTYPE) == VT_INT) {
2575 /* scalar to int */
2576 if ((sbt & VT_BTYPE) == VT_LLONG) {
2577 /* from long long: just take low order word */
2578 lexpand();
2579 vpop();
2581 /* if lvalue and single word type, nothing to do because
2582 the lvalue already contains the real type size (see
2583 VT_LVAL_xxx constants) */
2584 #endif
2587 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2588 /* if we are casting between pointer types,
2589 we must update the VT_LVAL_xxx size */
2590 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2591 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2593 vtop->type = *type;
2596 /* return type size as known at compile time. Put alignment at 'a' */
2597 ST_FUNC int type_size(CType *type, int *a)
2599 Sym *s;
2600 int bt;
2602 bt = type->t & VT_BTYPE;
2603 if (bt == VT_STRUCT) {
2604 /* struct/union */
2605 s = type->ref;
2606 *a = s->r;
2607 return s->c;
2608 } else if (bt == VT_PTR) {
2609 if (type->t & VT_ARRAY) {
2610 int ts;
2612 s = type->ref;
2613 ts = type_size(&s->type, a);
2615 if (ts < 0 && s->c < 0)
2616 ts = -ts;
2618 return ts * s->c;
2619 } else {
2620 *a = PTR_SIZE;
2621 return PTR_SIZE;
2623 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2624 return -1; /* incomplete enum */
2625 } else if (bt == VT_LDOUBLE) {
2626 *a = LDOUBLE_ALIGN;
2627 return LDOUBLE_SIZE;
2628 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2629 #ifdef TCC_TARGET_I386
2630 #ifdef TCC_TARGET_PE
2631 *a = 8;
2632 #else
2633 *a = 4;
2634 #endif
2635 #elif defined(TCC_TARGET_ARM)
2636 #ifdef TCC_ARM_EABI
2637 *a = 8;
2638 #else
2639 *a = 4;
2640 #endif
2641 #else
2642 *a = 8;
2643 #endif
2644 return 8;
2645 } else if (bt == VT_INT || bt == VT_FLOAT) {
2646 *a = 4;
2647 return 4;
2648 } else if (bt == VT_SHORT) {
2649 *a = 2;
2650 return 2;
2651 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2652 *a = 8;
2653 return 16;
2654 } else {
2655 /* char, void, function, _Bool */
2656 *a = 1;
2657 return 1;
2661 /* push type size as known at runtime time on top of value stack. Put
2662 alignment at 'a' */
2663 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2665 if (type->t & VT_VLA) {
2666 type_size(&type->ref->type, a);
2667 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2668 } else {
2669 vpushi(type_size(type, a));
2673 static void vla_sp_restore(void) {
2674 if (vlas_in_scope) {
2675 gen_vla_sp_restore(vla_sp_loc);
2679 static void vla_sp_restore_root(void) {
2680 if (vlas_in_scope) {
2681 gen_vla_sp_restore(vla_sp_root_loc);
2685 /* return the pointed type of t */
2686 static inline CType *pointed_type(CType *type)
2688 return &type->ref->type;
2691 /* modify type so that its it is a pointer to type. */
2692 ST_FUNC void mk_pointer(CType *type)
2694 Sym *s;
2695 s = sym_push(SYM_FIELD, type, 0, -1);
2696 type->t = VT_PTR | (type->t & VT_STORAGE);
2697 type->ref = s;
2700 /* compare function types. OLD functions match any new functions */
2701 static int is_compatible_func(CType *type1, CType *type2)
2703 Sym *s1, *s2;
2705 s1 = type1->ref;
2706 s2 = type2->ref;
2707 if (!is_compatible_types(&s1->type, &s2->type))
2708 return 0;
2709 /* check func_call */
2710 if (s1->f.func_call != s2->f.func_call)
2711 return 0;
2712 /* XXX: not complete */
2713 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2714 return 1;
2715 if (s1->f.func_type != s2->f.func_type)
2716 return 0;
2717 while (s1 != NULL) {
2718 if (s2 == NULL)
2719 return 0;
2720 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2721 return 0;
2722 s1 = s1->next;
2723 s2 = s2->next;
2725 if (s2)
2726 return 0;
2727 return 1;
2730 /* return true if type1 and type2 are the same. If unqualified is
2731 true, qualifiers on the types are ignored.
2733 - enums are not checked as gcc __builtin_types_compatible_p ()
2735 static int compare_types(CType *type1, CType *type2, int unqualified)
2737 int bt1, t1, t2;
2739 t1 = type1->t & VT_TYPE;
2740 t2 = type2->t & VT_TYPE;
2741 if (unqualified) {
2742 /* strip qualifiers before comparing */
2743 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2744 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2747 /* Default Vs explicit signedness only matters for char */
2748 if ((t1 & VT_BTYPE) != VT_BYTE) {
2749 t1 &= ~VT_DEFSIGN;
2750 t2 &= ~VT_DEFSIGN;
2752 /* XXX: bitfields ? */
2753 if (t1 != t2)
2754 return 0;
2755 /* test more complicated cases */
2756 bt1 = t1 & VT_BTYPE;
2757 if (bt1 == VT_PTR) {
2758 type1 = pointed_type(type1);
2759 type2 = pointed_type(type2);
2760 return is_compatible_types(type1, type2);
2761 } else if (bt1 == VT_STRUCT) {
2762 return (type1->ref == type2->ref);
2763 } else if (bt1 == VT_FUNC) {
2764 return is_compatible_func(type1, type2);
2765 } else {
2766 return 1;
2770 /* return true if type1 and type2 are exactly the same (including
2771 qualifiers).
2773 static int is_compatible_types(CType *type1, CType *type2)
2775 return compare_types(type1,type2,0);
2778 /* return true if type1 and type2 are the same (ignoring qualifiers).
2780 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2782 return compare_types(type1,type2,1);
2785 /* print a type. If 'varstr' is not NULL, then the variable is also
2786 printed in the type */
2787 /* XXX: union */
2788 /* XXX: add array and function pointers */
2789 static void type_to_str(char *buf, int buf_size,
2790 CType *type, const char *varstr)
2792 int bt, v, t;
2793 Sym *s, *sa;
2794 char buf1[256];
2795 const char *tstr;
2797 t = type->t;
2798 bt = t & VT_BTYPE;
2799 buf[0] = '\0';
2801 if (t & VT_EXTERN)
2802 pstrcat(buf, buf_size, "extern ");
2803 if (t & VT_STATIC)
2804 pstrcat(buf, buf_size, "static ");
2805 if (t & VT_TYPEDEF)
2806 pstrcat(buf, buf_size, "typedef ");
2807 if (t & VT_INLINE)
2808 pstrcat(buf, buf_size, "inline ");
2809 if (t & VT_VOLATILE)
2810 pstrcat(buf, buf_size, "volatile ");
2811 if (t & VT_CONSTANT)
2812 pstrcat(buf, buf_size, "const ");
2814 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2815 || ((t & VT_UNSIGNED)
2816 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2817 && !IS_ENUM(t)
2819 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2821 buf_size -= strlen(buf);
2822 buf += strlen(buf);
2824 switch(bt) {
2825 case VT_VOID:
2826 tstr = "void";
2827 goto add_tstr;
2828 case VT_BOOL:
2829 tstr = "_Bool";
2830 goto add_tstr;
2831 case VT_BYTE:
2832 tstr = "char";
2833 goto add_tstr;
2834 case VT_SHORT:
2835 tstr = "short";
2836 goto add_tstr;
2837 case VT_INT:
2838 tstr = "int";
2839 goto maybe_long;
2840 case VT_LLONG:
2841 tstr = "long long";
2842 maybe_long:
2843 if (t & VT_LONG)
2844 tstr = "long";
2845 if (!IS_ENUM(t))
2846 goto add_tstr;
2847 tstr = "enum ";
2848 goto tstruct;
2849 case VT_FLOAT:
2850 tstr = "float";
2851 goto add_tstr;
2852 case VT_DOUBLE:
2853 tstr = "double";
2854 goto add_tstr;
2855 case VT_LDOUBLE:
2856 tstr = "long double";
2857 add_tstr:
2858 pstrcat(buf, buf_size, tstr);
2859 break;
2860 case VT_STRUCT:
2861 tstr = "struct ";
2862 if (IS_UNION(t))
2863 tstr = "union ";
2864 tstruct:
2865 pstrcat(buf, buf_size, tstr);
2866 v = type->ref->v & ~SYM_STRUCT;
2867 if (v >= SYM_FIRST_ANOM)
2868 pstrcat(buf, buf_size, "<anonymous>");
2869 else
2870 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2871 break;
2872 case VT_FUNC:
2873 s = type->ref;
2874 type_to_str(buf, buf_size, &s->type, varstr);
2875 pstrcat(buf, buf_size, "(");
2876 sa = s->next;
2877 while (sa != NULL) {
2878 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2879 pstrcat(buf, buf_size, buf1);
2880 sa = sa->next;
2881 if (sa)
2882 pstrcat(buf, buf_size, ", ");
2884 pstrcat(buf, buf_size, ")");
2885 goto no_var;
2886 case VT_PTR:
2887 s = type->ref;
2888 if (t & VT_ARRAY) {
2889 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2890 type_to_str(buf, buf_size, &s->type, buf1);
2891 goto no_var;
2893 pstrcpy(buf1, sizeof(buf1), "*");
2894 if (t & VT_CONSTANT)
2895 pstrcat(buf1, buf_size, "const ");
2896 if (t & VT_VOLATILE)
2897 pstrcat(buf1, buf_size, "volatile ");
2898 if (varstr)
2899 pstrcat(buf1, sizeof(buf1), varstr);
2900 type_to_str(buf, buf_size, &s->type, buf1);
2901 goto no_var;
2903 if (varstr) {
2904 pstrcat(buf, buf_size, " ");
2905 pstrcat(buf, buf_size, varstr);
2907 no_var: ;
2910 /* verify type compatibility to store vtop in 'dt' type, and generate
2911 casts if needed. */
2912 static void gen_assign_cast(CType *dt)
2914 CType *st, *type1, *type2;
2915 char buf1[256], buf2[256];
2916 int dbt, sbt;
2918 st = &vtop->type; /* source type */
2919 dbt = dt->t & VT_BTYPE;
2920 sbt = st->t & VT_BTYPE;
2921 if (sbt == VT_VOID || dbt == VT_VOID) {
2922 if (sbt == VT_VOID && dbt == VT_VOID)
2923 ; /*
2924 It is Ok if both are void
2925 A test program:
2926 void func1() {}
2927 void func2() {
2928 return func1();
2930 gcc accepts this program
2932 else
2933 tcc_error("cannot cast from/to void");
2935 if (dt->t & VT_CONSTANT)
2936 tcc_warning("assignment of read-only location");
2937 switch(dbt) {
2938 case VT_PTR:
2939 /* special cases for pointers */
2940 /* '0' can also be a pointer */
2941 if (is_null_pointer(vtop))
2942 goto type_ok;
2943 /* accept implicit pointer to integer cast with warning */
2944 if (is_integer_btype(sbt)) {
2945 tcc_warning("assignment makes pointer from integer without a cast");
2946 goto type_ok;
2948 type1 = pointed_type(dt);
2949 /* a function is implicitly a function pointer */
2950 if (sbt == VT_FUNC) {
2951 if ((type1->t & VT_BTYPE) != VT_VOID &&
2952 !is_compatible_types(pointed_type(dt), st))
2953 tcc_warning("assignment from incompatible pointer type");
2954 goto type_ok;
2956 if (sbt != VT_PTR)
2957 goto error;
2958 type2 = pointed_type(st);
2959 if ((type1->t & VT_BTYPE) == VT_VOID ||
2960 (type2->t & VT_BTYPE) == VT_VOID) {
2961 /* void * can match anything */
2962 } else {
2963 //printf("types %08x %08x\n", type1->t, type2->t);
2964 /* exact type match, except for qualifiers */
2965 if (!is_compatible_unqualified_types(type1, type2)) {
2966 /* Like GCC don't warn by default for merely changes
2967 in pointer target signedness. Do warn for different
2968 base types, though, in particular for unsigned enums
2969 and signed int targets. */
2970 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
2971 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
2973 tcc_warning("assignment from incompatible pointer type");
2976 /* check const and volatile */
2977 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2978 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2979 tcc_warning("assignment discards qualifiers from pointer target type");
2980 break;
2981 case VT_BYTE:
2982 case VT_SHORT:
2983 case VT_INT:
2984 case VT_LLONG:
2985 if (sbt == VT_PTR || sbt == VT_FUNC) {
2986 tcc_warning("assignment makes integer from pointer without a cast");
2987 } else if (sbt == VT_STRUCT) {
2988 goto case_VT_STRUCT;
2990 /* XXX: more tests */
2991 break;
2992 case VT_STRUCT:
2993 case_VT_STRUCT:
2994 if (!is_compatible_unqualified_types(dt, st)) {
2995 error:
2996 type_to_str(buf1, sizeof(buf1), st, NULL);
2997 type_to_str(buf2, sizeof(buf2), dt, NULL);
2998 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3000 break;
3002 type_ok:
3003 gen_cast(dt);
3006 /* store vtop in lvalue pushed on stack */
3007 ST_FUNC void vstore(void)
3009 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3011 ft = vtop[-1].type.t;
3012 sbt = vtop->type.t & VT_BTYPE;
3013 dbt = ft & VT_BTYPE;
3014 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3015 (sbt == VT_INT && dbt == VT_SHORT))
3016 && !(vtop->type.t & VT_BITFIELD)) {
3017 /* optimize char/short casts */
3018 delayed_cast = VT_MUSTCAST;
3019 vtop->type.t = ft & VT_TYPE;
3020 /* XXX: factorize */
3021 if (ft & VT_CONSTANT)
3022 tcc_warning("assignment of read-only location");
3023 } else {
3024 delayed_cast = 0;
3025 if (!(ft & VT_BITFIELD))
3026 gen_assign_cast(&vtop[-1].type);
3029 if (sbt == VT_STRUCT) {
3030 /* if structure, only generate pointer */
3031 /* structure assignment : generate memcpy */
3032 /* XXX: optimize if small size */
3033 size = type_size(&vtop->type, &align);
3035 /* destination */
3036 vswap();
3037 vtop->type.t = VT_PTR;
3038 gaddrof();
3040 /* address of memcpy() */
3041 #ifdef TCC_ARM_EABI
3042 if(!(align & 7))
3043 vpush_global_sym(&func_old_type, TOK_memcpy8);
3044 else if(!(align & 3))
3045 vpush_global_sym(&func_old_type, TOK_memcpy4);
3046 else
3047 #endif
3048 /* Use memmove, rather than memcpy, as dest and src may be same: */
3049 vpush_global_sym(&func_old_type, TOK_memmove);
3051 vswap();
3052 /* source */
3053 vpushv(vtop - 2);
3054 vtop->type.t = VT_PTR;
3055 gaddrof();
3056 /* type size */
3057 vpushi(size);
3058 gfunc_call(3);
3060 /* leave source on stack */
3061 } else if (ft & VT_BITFIELD) {
3062 /* bitfield store handling */
3064 /* save lvalue as expression result (example: s.b = s.a = n;) */
3065 vdup(), vtop[-1] = vtop[-2];
3067 bit_pos = BIT_POS(ft);
3068 bit_size = BIT_SIZE(ft);
3069 /* remove bit field info to avoid loops */
3070 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3072 if ((ft & VT_BTYPE) == VT_BOOL) {
3073 gen_cast(&vtop[-1].type);
3074 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3077 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3078 if (r == VT_STRUCT) {
3079 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3080 store_packed_bf(bit_pos, bit_size);
3081 } else {
3082 unsigned long long mask = (1ULL << bit_size) - 1;
3083 if ((ft & VT_BTYPE) != VT_BOOL) {
3084 /* mask source */
3085 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3086 vpushll(mask);
3087 else
3088 vpushi((unsigned)mask);
3089 gen_op('&');
3091 /* shift source */
3092 vpushi(bit_pos);
3093 gen_op(TOK_SHL);
3094 vswap();
3095 /* duplicate destination */
3096 vdup();
3097 vrott(3);
3098 /* load destination, mask and or with source */
3099 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3100 vpushll(~(mask << bit_pos));
3101 else
3102 vpushi(~((unsigned)mask << bit_pos));
3103 gen_op('&');
3104 gen_op('|');
3105 /* store result */
3106 vstore();
3107 /* ... and discard */
3108 vpop();
3110 } else if (dbt == VT_VOID) {
3111 --vtop;
3112 } else {
3113 #ifdef CONFIG_TCC_BCHECK
3114 /* bound check case */
3115 if (vtop[-1].r & VT_MUSTBOUND) {
3116 vswap();
3117 gbound();
3118 vswap();
3120 #endif
3121 rc = RC_INT;
3122 if (is_float(ft)) {
3123 rc = RC_FLOAT;
3124 #ifdef TCC_TARGET_X86_64
3125 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3126 rc = RC_ST0;
3127 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3128 rc = RC_FRET;
3130 #endif
3132 r = gv(rc); /* generate value */
3133 /* if lvalue was saved on stack, must read it */
3134 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3135 SValue sv;
3136 t = get_reg(RC_INT);
3137 #if PTR_SIZE == 8
3138 sv.type.t = VT_PTR;
3139 #else
3140 sv.type.t = VT_INT;
3141 #endif
3142 sv.r = VT_LOCAL | VT_LVAL;
3143 sv.c.i = vtop[-1].c.i;
3144 load(t, &sv);
3145 vtop[-1].r = t | VT_LVAL;
3147 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3148 #if PTR_SIZE == 8
3149 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3150 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3151 #else
3152 if ((ft & VT_BTYPE) == VT_LLONG) {
3153 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3154 #endif
3155 vtop[-1].type.t = load_type;
3156 store(r, vtop - 1);
3157 vswap();
3158 /* convert to int to increment easily */
3159 vtop->type.t = addr_type;
3160 gaddrof();
3161 vpushi(load_size);
3162 gen_op('+');
3163 vtop->r |= VT_LVAL;
3164 vswap();
3165 vtop[-1].type.t = load_type;
3166 /* XXX: it works because r2 is spilled last ! */
3167 store(vtop->r2, vtop - 1);
3168 } else {
3169 store(r, vtop - 1);
3172 vswap();
3173 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3174 vtop->r |= delayed_cast;
3178 /* post defines POST/PRE add. c is the token ++ or -- */
3179 ST_FUNC void inc(int post, int c)
3181 test_lvalue();
3182 vdup(); /* save lvalue */
3183 if (post) {
3184 gv_dup(); /* duplicate value */
3185 vrotb(3);
3186 vrotb(3);
3188 /* add constant */
3189 vpushi(c - TOK_MID);
3190 gen_op('+');
3191 vstore(); /* store value */
3192 if (post)
3193 vpop(); /* if post op, return saved value */
3196 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3198 /* read the string */
3199 if (tok != TOK_STR)
3200 expect(msg);
3201 cstr_new(astr);
3202 while (tok == TOK_STR) {
3203 /* XXX: add \0 handling too ? */
3204 cstr_cat(astr, tokc.str.data, -1);
3205 next();
3207 cstr_ccat(astr, '\0');
3210 /* If I is >= 1 and a power of two, returns log2(i)+1.
3211 If I is 0 returns 0. */
3212 static int exact_log2p1(int i)
3214 int ret;
3215 if (!i)
3216 return 0;
3217 for (ret = 1; i >= 1 << 8; ret += 8)
3218 i >>= 8;
3219 if (i >= 1 << 4)
3220 ret += 4, i >>= 4;
3221 if (i >= 1 << 2)
3222 ret += 2, i >>= 2;
3223 if (i >= 1 << 1)
3224 ret++;
3225 return ret;
3228 /* Parse __attribute__((...)) GNUC extension. */
3229 static void parse_attribute(AttributeDef *ad)
3231 int t, n;
3232 CString astr;
3234 redo:
3235 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3236 return;
3237 next();
3238 skip('(');
3239 skip('(');
3240 while (tok != ')') {
3241 if (tok < TOK_IDENT)
3242 expect("attribute name");
3243 t = tok;
3244 next();
3245 switch(t) {
3246 case TOK_SECTION1:
3247 case TOK_SECTION2:
3248 skip('(');
3249 parse_mult_str(&astr, "section name");
3250 ad->section = find_section(tcc_state, (char *)astr.data);
3251 skip(')');
3252 cstr_free(&astr);
3253 break;
3254 case TOK_ALIAS1:
3255 case TOK_ALIAS2:
3256 skip('(');
3257 parse_mult_str(&astr, "alias(\"target\")");
3258 ad->alias_target = /* save string as token, for later */
3259 tok_alloc((char*)astr.data, astr.size-1)->tok;
3260 skip(')');
3261 cstr_free(&astr);
3262 break;
3263 case TOK_VISIBILITY1:
3264 case TOK_VISIBILITY2:
3265 skip('(');
3266 parse_mult_str(&astr,
3267 "visibility(\"default|hidden|internal|protected\")");
3268 if (!strcmp (astr.data, "default"))
3269 ad->a.visibility = STV_DEFAULT;
3270 else if (!strcmp (astr.data, "hidden"))
3271 ad->a.visibility = STV_HIDDEN;
3272 else if (!strcmp (astr.data, "internal"))
3273 ad->a.visibility = STV_INTERNAL;
3274 else if (!strcmp (astr.data, "protected"))
3275 ad->a.visibility = STV_PROTECTED;
3276 else
3277 expect("visibility(\"default|hidden|internal|protected\")");
3278 skip(')');
3279 cstr_free(&astr);
3280 break;
3281 case TOK_ALIGNED1:
3282 case TOK_ALIGNED2:
3283 if (tok == '(') {
3284 next();
3285 n = expr_const();
3286 if (n <= 0 || (n & (n - 1)) != 0)
3287 tcc_error("alignment must be a positive power of two");
3288 skip(')');
3289 } else {
3290 n = MAX_ALIGN;
3292 ad->a.aligned = exact_log2p1(n);
3293 if (n != 1 << (ad->a.aligned - 1))
3294 tcc_error("alignment of %d is larger than implemented", n);
3295 break;
3296 case TOK_PACKED1:
3297 case TOK_PACKED2:
3298 ad->a.packed = 1;
3299 break;
3300 case TOK_WEAK1:
3301 case TOK_WEAK2:
3302 ad->a.weak = 1;
3303 break;
3304 case TOK_UNUSED1:
3305 case TOK_UNUSED2:
3306 /* currently, no need to handle it because tcc does not
3307 track unused objects */
3308 break;
3309 case TOK_NORETURN1:
3310 case TOK_NORETURN2:
3311 /* currently, no need to handle it because tcc does not
3312 track unused objects */
3313 break;
3314 case TOK_CDECL1:
3315 case TOK_CDECL2:
3316 case TOK_CDECL3:
3317 ad->f.func_call = FUNC_CDECL;
3318 break;
3319 case TOK_STDCALL1:
3320 case TOK_STDCALL2:
3321 case TOK_STDCALL3:
3322 ad->f.func_call = FUNC_STDCALL;
3323 break;
3324 #ifdef TCC_TARGET_I386
3325 case TOK_REGPARM1:
3326 case TOK_REGPARM2:
3327 skip('(');
3328 n = expr_const();
3329 if (n > 3)
3330 n = 3;
3331 else if (n < 0)
3332 n = 0;
3333 if (n > 0)
3334 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3335 skip(')');
3336 break;
3337 case TOK_FASTCALL1:
3338 case TOK_FASTCALL2:
3339 case TOK_FASTCALL3:
3340 ad->f.func_call = FUNC_FASTCALLW;
3341 break;
3342 #endif
3343 case TOK_MODE:
3344 skip('(');
3345 switch(tok) {
3346 case TOK_MODE_DI:
3347 ad->attr_mode = VT_LLONG + 1;
3348 break;
3349 case TOK_MODE_QI:
3350 ad->attr_mode = VT_BYTE + 1;
3351 break;
3352 case TOK_MODE_HI:
3353 ad->attr_mode = VT_SHORT + 1;
3354 break;
3355 case TOK_MODE_SI:
3356 case TOK_MODE_word:
3357 ad->attr_mode = VT_INT + 1;
3358 break;
3359 default:
3360 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3361 break;
3363 next();
3364 skip(')');
3365 break;
3366 case TOK_DLLEXPORT:
3367 ad->a.dllexport = 1;
3368 break;
3369 case TOK_DLLIMPORT:
3370 ad->a.dllimport = 1;
3371 break;
3372 default:
3373 if (tcc_state->warn_unsupported)
3374 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3375 /* skip parameters */
3376 if (tok == '(') {
3377 int parenthesis = 0;
3378 do {
3379 if (tok == '(')
3380 parenthesis++;
3381 else if (tok == ')')
3382 parenthesis--;
3383 next();
3384 } while (parenthesis && tok != -1);
3386 break;
3388 if (tok != ',')
3389 break;
3390 next();
3392 skip(')');
3393 skip(')');
3394 goto redo;
3397 static Sym * find_field (CType *type, int v)
3399 Sym *s = type->ref;
3400 v |= SYM_FIELD;
3401 while ((s = s->next) != NULL) {
3402 if ((s->v & SYM_FIELD) &&
3403 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3404 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3405 Sym *ret = find_field (&s->type, v);
3406 if (ret)
3407 return ret;
3409 if (s->v == v)
3410 break;
3412 return s;
3415 static void struct_add_offset (Sym *s, int offset)
3417 while ((s = s->next) != NULL) {
3418 if ((s->v & SYM_FIELD) &&
3419 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3420 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3421 struct_add_offset(s->type.ref, offset);
3422 } else
3423 s->c += offset;
3427 static void struct_layout(CType *type, AttributeDef *ad)
3429 int size, align, maxalign, offset, c, bit_pos, bit_size;
3430 int packed, a, bt, prevbt, prev_bit_size;
3431 int pcc = !tcc_state->ms_bitfields;
3432 int pragma_pack = *tcc_state->pack_stack_ptr;
3433 Sym *f;
3435 maxalign = 1;
3436 offset = 0;
3437 c = 0;
3438 bit_pos = 0;
3439 prevbt = VT_STRUCT; /* make it never match */
3440 prev_bit_size = 0;
3442 //#define BF_DEBUG
3444 for (f = type->ref->next; f; f = f->next) {
3445 if (f->type.t & VT_BITFIELD)
3446 bit_size = BIT_SIZE(f->type.t);
3447 else
3448 bit_size = -1;
3449 size = type_size(&f->type, &align);
3450 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3451 packed = 0;
3453 if (pcc && bit_size == 0) {
3454 /* in pcc mode, packing does not affect zero-width bitfields */
3456 } else {
3457 /* in pcc mode, attribute packed overrides if set. */
3458 if (pcc && (f->a.packed || ad->a.packed))
3459 align = packed = 1;
3461 /* pragma pack overrides align if lesser and packs bitfields always */
3462 if (pragma_pack) {
3463 packed = 1;
3464 if (pragma_pack < align)
3465 align = pragma_pack;
3466 /* in pcc mode pragma pack also overrides individual align */
3467 if (pcc && pragma_pack < a)
3468 a = 0;
3471 /* some individual align was specified */
3472 if (a)
3473 align = a;
3475 if (type->ref->type.t == VT_UNION) {
3476 if (pcc && bit_size >= 0)
3477 size = (bit_size + 7) >> 3;
3478 offset = 0;
3479 if (size > c)
3480 c = size;
3482 } else if (bit_size < 0) {
3483 if (pcc)
3484 c += (bit_pos + 7) >> 3;
3485 c = (c + align - 1) & -align;
3486 offset = c;
3487 if (size > 0)
3488 c += size;
3489 bit_pos = 0;
3490 prevbt = VT_STRUCT;
3491 prev_bit_size = 0;
3493 } else {
3494 /* A bit-field. Layout is more complicated. There are two
3495 options: PCC (GCC) compatible and MS compatible */
3496 if (pcc) {
3497 /* In PCC layout a bit-field is placed adjacent to the
3498 preceding bit-fields, except if:
3499 - it has zero-width
3500 - an individual alignment was given
3501 - it would overflow its base type container and
3502 there is no packing */
3503 if (bit_size == 0) {
3504 new_field:
3505 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3506 bit_pos = 0;
3507 } else if (f->a.aligned) {
3508 goto new_field;
3509 } else if (!packed) {
3510 int a8 = align * 8;
3511 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3512 if (ofs > size / align)
3513 goto new_field;
3516 /* in pcc mode, long long bitfields have type int if they fit */
3517 if (size == 8 && bit_size <= 32)
3518 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3520 while (bit_pos >= align * 8)
3521 c += align, bit_pos -= align * 8;
3522 offset = c;
3524 /* In PCC layout named bit-fields influence the alignment
3525 of the containing struct using the base types alignment,
3526 except for packed fields (which here have correct align). */
3527 if (f->v & SYM_FIRST_ANOM
3528 // && bit_size // ??? gcc on ARM/rpi does that
3530 align = 1;
3532 } else {
3533 bt = f->type.t & VT_BTYPE;
3534 if ((bit_pos + bit_size > size * 8)
3535 || (bit_size > 0) == (bt != prevbt)
3537 c = (c + align - 1) & -align;
3538 offset = c;
3539 bit_pos = 0;
3540 /* In MS bitfield mode a bit-field run always uses
3541 at least as many bits as the underlying type.
3542 To start a new run it's also required that this
3543 or the last bit-field had non-zero width. */
3544 if (bit_size || prev_bit_size)
3545 c += size;
3547 /* In MS layout the records alignment is normally
3548 influenced by the field, except for a zero-width
3549 field at the start of a run (but by further zero-width
3550 fields it is again). */
3551 if (bit_size == 0 && prevbt != bt)
3552 align = 1;
3553 prevbt = bt;
3554 prev_bit_size = bit_size;
3557 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3558 | (bit_pos << VT_STRUCT_SHIFT);
3559 bit_pos += bit_size;
3561 if (align > maxalign)
3562 maxalign = align;
3564 #ifdef BF_DEBUG
3565 printf("set field %s offset %-2d size %-2d align %-2d",
3566 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3567 if (f->type.t & VT_BITFIELD) {
3568 printf(" pos %-2d bits %-2d",
3569 BIT_POS(f->type.t),
3570 BIT_SIZE(f->type.t)
3573 printf("\n");
3574 #endif
3576 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3577 Sym *ass;
3578 /* An anonymous struct/union. Adjust member offsets
3579 to reflect the real offset of our containing struct.
3580 Also set the offset of this anon member inside
3581 the outer struct to be zero. Via this it
3582 works when accessing the field offset directly
3583 (from base object), as well as when recursing
3584 members in initializer handling. */
3585 int v2 = f->type.ref->v;
3586 if (!(v2 & SYM_FIELD) &&
3587 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3588 Sym **pps;
3589 /* This happens only with MS extensions. The
3590 anon member has a named struct type, so it
3591 potentially is shared with other references.
3592 We need to unshare members so we can modify
3593 them. */
3594 ass = f->type.ref;
3595 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3596 &f->type.ref->type, 0,
3597 f->type.ref->c);
3598 pps = &f->type.ref->next;
3599 while ((ass = ass->next) != NULL) {
3600 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3601 pps = &((*pps)->next);
3603 *pps = NULL;
3605 struct_add_offset(f->type.ref, offset);
3606 f->c = 0;
3607 } else {
3608 f->c = offset;
3611 f->r = 0;
3614 if (pcc)
3615 c += (bit_pos + 7) >> 3;
3617 /* store size and alignment */
3618 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3619 if (a < maxalign)
3620 a = maxalign;
3621 type->ref->r = a;
3622 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3623 /* can happen if individual align for some member was given. In
3624 this case MSVC ignores maxalign when aligning the size */
3625 a = pragma_pack;
3626 if (a < bt)
3627 a = bt;
3629 c = (c + a - 1) & -a;
3630 type->ref->c = c;
3632 #ifdef BF_DEBUG
3633 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3634 #endif
3636 /* check whether we can access bitfields by their type */
3637 for (f = type->ref->next; f; f = f->next) {
3638 int s, px, cx, c0;
3639 CType t;
3641 if (0 == (f->type.t & VT_BITFIELD))
3642 continue;
3643 f->type.ref = f;
3644 f->auxtype = -1;
3645 bit_size = BIT_SIZE(f->type.t);
3646 if (bit_size == 0)
3647 continue;
3648 bit_pos = BIT_POS(f->type.t);
3649 size = type_size(&f->type, &align);
3650 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3651 continue;
3653 /* try to access the field using a different type */
3654 c0 = -1, s = align = 1;
3655 for (;;) {
3656 px = f->c * 8 + bit_pos;
3657 cx = (px >> 3) & -align;
3658 px = px - (cx << 3);
3659 if (c0 == cx)
3660 break;
3661 s = (px + bit_size + 7) >> 3;
3662 if (s > 4) {
3663 t.t = VT_LLONG;
3664 } else if (s > 2) {
3665 t.t = VT_INT;
3666 } else if (s > 1) {
3667 t.t = VT_SHORT;
3668 } else {
3669 t.t = VT_BYTE;
3671 s = type_size(&t, &align);
3672 c0 = cx;
3675 if (px + bit_size <= s * 8 && cx + s <= c) {
3676 /* update offset and bit position */
3677 f->c = cx;
3678 bit_pos = px;
3679 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3680 | (bit_pos << VT_STRUCT_SHIFT);
3681 if (s != size)
3682 f->auxtype = t.t;
3683 #ifdef BF_DEBUG
3684 printf("FIX field %s offset %-2d size %-2d align %-2d "
3685 "pos %-2d bits %-2d\n",
3686 get_tok_str(f->v & ~SYM_FIELD, NULL),
3687 cx, s, align, px, bit_size);
3688 #endif
3689 } else {
3690 /* fall back to load/store single-byte wise */
3691 f->auxtype = VT_STRUCT;
3692 #ifdef BF_DEBUG
3693 printf("FIX field %s : load byte-wise\n",
3694 get_tok_str(f->v & ~SYM_FIELD, NULL));
3695 #endif
3700 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3701 static void struct_decl(CType *type, int u)
3703 int v, c, size, align, flexible;
3704 int bit_size, bsize, bt;
3705 Sym *s, *ss, **ps;
3706 AttributeDef ad, ad1;
3707 CType type1, btype;
3709 memset(&ad, 0, sizeof ad);
3710 next();
3711 parse_attribute(&ad);
3712 if (tok != '{') {
3713 v = tok;
3714 next();
3715 /* struct already defined ? return it */
3716 if (v < TOK_IDENT)
3717 expect("struct/union/enum name");
3718 s = struct_find(v);
3719 if (s && (s->sym_scope == local_scope || tok != '{')) {
3720 if (u == s->type.t)
3721 goto do_decl;
3722 if (u == VT_ENUM && IS_ENUM(s->type.t))
3723 goto do_decl;
3724 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3726 } else {
3727 v = anon_sym++;
3729 /* Record the original enum/struct/union token. */
3730 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3731 type1.ref = NULL;
3732 /* we put an undefined size for struct/union */
3733 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3734 s->r = 0; /* default alignment is zero as gcc */
3735 do_decl:
3736 type->t = s->type.t;
3737 type->ref = s;
3739 if (tok == '{') {
3740 next();
3741 if (s->c != -1)
3742 tcc_error("struct/union/enum already defined");
3743 /* cannot be empty */
3744 /* non empty enums are not allowed */
3745 ps = &s->next;
3746 if (u == VT_ENUM) {
3747 long long ll = 0, pl = 0, nl = 0;
3748 CType t;
3749 t.ref = s;
3750 /* enum symbols have static storage */
3751 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3752 for(;;) {
3753 v = tok;
3754 if (v < TOK_UIDENT)
3755 expect("identifier");
3756 ss = sym_find(v);
3757 if (ss && !local_stack)
3758 tcc_error("redefinition of enumerator '%s'",
3759 get_tok_str(v, NULL));
3760 next();
3761 if (tok == '=') {
3762 next();
3763 ll = expr_const64();
3765 ss = sym_push(v, &t, VT_CONST, 0);
3766 ss->enum_val = ll;
3767 *ps = ss, ps = &ss->next;
3768 if (ll < nl)
3769 nl = ll;
3770 if (ll > pl)
3771 pl = ll;
3772 if (tok != ',')
3773 break;
3774 next();
3775 ll++;
3776 /* NOTE: we accept a trailing comma */
3777 if (tok == '}')
3778 break;
3780 skip('}');
3781 /* set integral type of the enum */
3782 t.t = VT_INT;
3783 if (nl >= 0) {
3784 if (pl != (unsigned)pl)
3785 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3786 t.t |= VT_UNSIGNED;
3787 } else if (pl != (int)pl || nl != (int)nl)
3788 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3789 s->type.t = type->t = t.t | VT_ENUM;
3790 s->c = 0;
3791 /* set type for enum members */
3792 for (ss = s->next; ss; ss = ss->next) {
3793 ll = ss->enum_val;
3794 if (ll == (int)ll) /* default is int if it fits */
3795 continue;
3796 if (t.t & VT_UNSIGNED) {
3797 ss->type.t |= VT_UNSIGNED;
3798 if (ll == (unsigned)ll)
3799 continue;
3801 ss->type.t = (ss->type.t & ~VT_BTYPE)
3802 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3804 } else {
3805 c = 0;
3806 flexible = 0;
3807 while (tok != '}') {
3808 if (!parse_btype(&btype, &ad1)) {
3809 skip(';');
3810 continue;
3812 while (1) {
3813 if (flexible)
3814 tcc_error("flexible array member '%s' not at the end of struct",
3815 get_tok_str(v, NULL));
3816 bit_size = -1;
3817 v = 0;
3818 type1 = btype;
3819 if (tok != ':') {
3820 if (tok != ';')
3821 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3822 if (v == 0) {
3823 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3824 expect("identifier");
3825 else {
3826 int v = btype.ref->v;
3827 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3828 if (tcc_state->ms_extensions == 0)
3829 expect("identifier");
3833 if (type_size(&type1, &align) < 0) {
3834 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3835 flexible = 1;
3836 else
3837 tcc_error("field '%s' has incomplete type",
3838 get_tok_str(v, NULL));
3840 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3841 (type1.t & VT_STORAGE))
3842 tcc_error("invalid type for '%s'",
3843 get_tok_str(v, NULL));
3845 if (tok == ':') {
3846 next();
3847 bit_size = expr_const();
3848 /* XXX: handle v = 0 case for messages */
3849 if (bit_size < 0)
3850 tcc_error("negative width in bit-field '%s'",
3851 get_tok_str(v, NULL));
3852 if (v && bit_size == 0)
3853 tcc_error("zero width for bit-field '%s'",
3854 get_tok_str(v, NULL));
3855 parse_attribute(&ad1);
3857 size = type_size(&type1, &align);
3858 if (bit_size >= 0) {
3859 bt = type1.t & VT_BTYPE;
3860 if (bt != VT_INT &&
3861 bt != VT_BYTE &&
3862 bt != VT_SHORT &&
3863 bt != VT_BOOL &&
3864 bt != VT_LLONG)
3865 tcc_error("bitfields must have scalar type");
3866 bsize = size * 8;
3867 if (bit_size > bsize) {
3868 tcc_error("width of '%s' exceeds its type",
3869 get_tok_str(v, NULL));
3870 } else if (bit_size == bsize
3871 && !ad.a.packed && !ad1.a.packed) {
3872 /* no need for bit fields */
3874 } else if (bit_size == 64) {
3875 tcc_error("field width 64 not implemented");
3876 } else {
3877 type1.t = (type1.t & ~VT_STRUCT_MASK)
3878 | VT_BITFIELD
3879 | (bit_size << (VT_STRUCT_SHIFT + 6));
3882 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3883 /* Remember we've seen a real field to check
3884 for placement of flexible array member. */
3885 c = 1;
3887 /* If member is a struct or bit-field, enforce
3888 placing into the struct (as anonymous). */
3889 if (v == 0 &&
3890 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3891 bit_size >= 0)) {
3892 v = anon_sym++;
3894 if (v) {
3895 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3896 ss->a = ad1.a;
3897 *ps = ss;
3898 ps = &ss->next;
3900 if (tok == ';' || tok == TOK_EOF)
3901 break;
3902 skip(',');
3904 skip(';');
3906 skip('}');
3907 parse_attribute(&ad);
3908 struct_layout(type, &ad);
3913 static void sym_to_attr(AttributeDef *ad, Sym *s)
3915 if (s->a.aligned && 0 == ad->a.aligned)
3916 ad->a.aligned = s->a.aligned;
3917 if (s->f.func_call && 0 == ad->f.func_call)
3918 ad->f.func_call = s->f.func_call;
3919 if (s->f.func_type && 0 == ad->f.func_type)
3920 ad->f.func_type = s->f.func_type;
3921 if (s->a.packed)
3922 ad->a.packed = 1;
3925 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3926 are added to the element type, copied because it could be a typedef. */
3927 static void parse_btype_qualify(CType *type, int qualifiers)
3929 while (type->t & VT_ARRAY) {
3930 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3931 type = &type->ref->type;
3933 type->t |= qualifiers;
3936 /* return 0 if no type declaration. otherwise, return the basic type
3937 and skip it.
3939 static int parse_btype(CType *type, AttributeDef *ad)
3941 int t, u, bt, st, type_found, typespec_found, g;
3942 Sym *s;
3943 CType type1;
3945 memset(ad, 0, sizeof(AttributeDef));
3946 type_found = 0;
3947 typespec_found = 0;
3948 t = VT_INT;
3949 bt = st = -1;
3950 type->ref = NULL;
3952 while(1) {
3953 switch(tok) {
3954 case TOK_EXTENSION:
3955 /* currently, we really ignore extension */
3956 next();
3957 continue;
3959 /* basic types */
3960 case TOK_CHAR:
3961 u = VT_BYTE;
3962 basic_type:
3963 next();
3964 basic_type1:
3965 if (u == VT_SHORT || u == VT_LONG) {
3966 if (st != -1 || (bt != -1 && bt != VT_INT))
3967 tmbt: tcc_error("too many basic types");
3968 st = u;
3969 } else {
3970 if (bt != -1 || (st != -1 && u != VT_INT))
3971 goto tmbt;
3972 bt = u;
3974 if (u != VT_INT)
3975 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
3976 typespec_found = 1;
3977 break;
3978 case TOK_VOID:
3979 u = VT_VOID;
3980 goto basic_type;
3981 case TOK_SHORT:
3982 u = VT_SHORT;
3983 goto basic_type;
3984 case TOK_INT:
3985 u = VT_INT;
3986 goto basic_type;
3987 case TOK_LONG:
3988 if ((t & VT_BTYPE) == VT_DOUBLE) {
3989 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
3990 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
3991 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
3992 } else {
3993 u = VT_LONG;
3994 goto basic_type;
3996 next();
3997 break;
3998 #ifdef TCC_TARGET_ARM64
3999 case TOK_UINT128:
4000 /* GCC's __uint128_t appears in some Linux header files. Make it a
4001 synonym for long double to get the size and alignment right. */
4002 u = VT_LDOUBLE;
4003 goto basic_type;
4004 #endif
4005 case TOK_BOOL:
4006 u = VT_BOOL;
4007 goto basic_type;
4008 case TOK_FLOAT:
4009 u = VT_FLOAT;
4010 goto basic_type;
4011 case TOK_DOUBLE:
4012 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4013 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4014 } else {
4015 u = VT_DOUBLE;
4016 goto basic_type;
4018 next();
4019 break;
4020 case TOK_ENUM:
4021 struct_decl(&type1, VT_ENUM);
4022 basic_type2:
4023 u = type1.t;
4024 type->ref = type1.ref;
4025 goto basic_type1;
4026 case TOK_STRUCT:
4027 struct_decl(&type1, VT_STRUCT);
4028 goto basic_type2;
4029 case TOK_UNION:
4030 struct_decl(&type1, VT_UNION);
4031 goto basic_type2;
4033 /* type modifiers */
4034 case TOK_CONST1:
4035 case TOK_CONST2:
4036 case TOK_CONST3:
4037 type->t = t;
4038 parse_btype_qualify(type, VT_CONSTANT);
4039 t = type->t;
4040 next();
4041 break;
4042 case TOK_VOLATILE1:
4043 case TOK_VOLATILE2:
4044 case TOK_VOLATILE3:
4045 type->t = t;
4046 parse_btype_qualify(type, VT_VOLATILE);
4047 t = type->t;
4048 next();
4049 break;
4050 case TOK_SIGNED1:
4051 case TOK_SIGNED2:
4052 case TOK_SIGNED3:
4053 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4054 tcc_error("signed and unsigned modifier");
4055 t |= VT_DEFSIGN;
4056 next();
4057 typespec_found = 1;
4058 break;
4059 case TOK_REGISTER:
4060 case TOK_AUTO:
4061 case TOK_RESTRICT1:
4062 case TOK_RESTRICT2:
4063 case TOK_RESTRICT3:
4064 next();
4065 break;
4066 case TOK_UNSIGNED:
4067 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4068 tcc_error("signed and unsigned modifier");
4069 t |= VT_DEFSIGN | VT_UNSIGNED;
4070 next();
4071 typespec_found = 1;
4072 break;
4074 /* storage */
4075 case TOK_EXTERN:
4076 g = VT_EXTERN;
4077 goto storage;
4078 case TOK_STATIC:
4079 g = VT_STATIC;
4080 goto storage;
4081 case TOK_TYPEDEF:
4082 g = VT_TYPEDEF;
4083 goto storage;
4084 storage:
4085 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4086 tcc_error("multiple storage classes");
4087 t |= g;
4088 next();
4089 break;
4090 case TOK_INLINE1:
4091 case TOK_INLINE2:
4092 case TOK_INLINE3:
4093 t |= VT_INLINE;
4094 next();
4095 break;
4097 /* GNUC attribute */
4098 case TOK_ATTRIBUTE1:
4099 case TOK_ATTRIBUTE2:
4100 parse_attribute(ad);
4101 if (ad->attr_mode) {
4102 u = ad->attr_mode -1;
4103 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4105 break;
4106 /* GNUC typeof */
4107 case TOK_TYPEOF1:
4108 case TOK_TYPEOF2:
4109 case TOK_TYPEOF3:
4110 next();
4111 parse_expr_type(&type1);
4112 /* remove all storage modifiers except typedef */
4113 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4114 if (type1.ref)
4115 sym_to_attr(ad, type1.ref);
4116 goto basic_type2;
4117 default:
4118 if (typespec_found)
4119 goto the_end;
4120 s = sym_find(tok);
4121 if (!s || !(s->type.t & VT_TYPEDEF))
4122 goto the_end;
4123 t &= ~(VT_BTYPE|VT_LONG);
4124 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4125 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4126 type->ref = s->type.ref;
4127 if (t)
4128 parse_btype_qualify(type, t);
4129 t = type->t;
4130 /* get attributes from typedef */
4131 sym_to_attr(ad, s);
4132 next();
4133 typespec_found = 1;
4134 st = bt = -2;
4135 break;
4137 type_found = 1;
4139 the_end:
4140 if (tcc_state->char_is_unsigned) {
4141 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4142 t |= VT_UNSIGNED;
4144 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4145 bt = t & (VT_BTYPE|VT_LONG);
4146 if (bt == VT_LONG)
4147 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4148 #ifdef TCC_TARGET_PE
4149 if (bt == VT_LDOUBLE)
4150 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4151 #endif
4152 type->t = t;
4153 return type_found;
4156 /* convert a function parameter type (array to pointer and function to
4157 function pointer) */
4158 static inline void convert_parameter_type(CType *pt)
4160 /* remove const and volatile qualifiers (XXX: const could be used
4161 to indicate a const function parameter */
4162 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4163 /* array must be transformed to pointer according to ANSI C */
4164 pt->t &= ~VT_ARRAY;
4165 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4166 mk_pointer(pt);
4170 ST_FUNC void parse_asm_str(CString *astr)
4172 skip('(');
4173 parse_mult_str(astr, "string constant");
4176 /* Parse an asm label and return the token */
4177 static int asm_label_instr(void)
4179 int v;
4180 CString astr;
4182 next();
4183 parse_asm_str(&astr);
4184 skip(')');
4185 #ifdef ASM_DEBUG
4186 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4187 #endif
4188 v = tok_alloc(astr.data, astr.size - 1)->tok;
4189 cstr_free(&astr);
4190 return v;
4193 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4195 int n, l, t1, arg_size, align;
4196 Sym **plast, *s, *first;
4197 AttributeDef ad1;
4198 CType pt;
4200 if (tok == '(') {
4201 /* function type, or recursive declarator (return if so) */
4202 next();
4203 if (td && !(td & TYPE_ABSTRACT))
4204 return 0;
4205 if (tok == ')')
4206 l = 0;
4207 else if (parse_btype(&pt, &ad1))
4208 l = FUNC_NEW;
4209 else if (td)
4210 return 0;
4211 else
4212 l = FUNC_OLD;
4213 first = NULL;
4214 plast = &first;
4215 arg_size = 0;
4216 if (l) {
4217 for(;;) {
4218 /* read param name and compute offset */
4219 if (l != FUNC_OLD) {
4220 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4221 break;
4222 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4223 if ((pt.t & VT_BTYPE) == VT_VOID)
4224 tcc_error("parameter declared as void");
4225 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4226 } else {
4227 n = tok;
4228 if (n < TOK_UIDENT)
4229 expect("identifier");
4230 pt.t = VT_VOID; /* invalid type */
4231 next();
4233 convert_parameter_type(&pt);
4234 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4235 *plast = s;
4236 plast = &s->next;
4237 if (tok == ')')
4238 break;
4239 skip(',');
4240 if (l == FUNC_NEW && tok == TOK_DOTS) {
4241 l = FUNC_ELLIPSIS;
4242 next();
4243 break;
4245 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4246 tcc_error("invalid type");
4248 } else
4249 /* if no parameters, then old type prototype */
4250 l = FUNC_OLD;
4251 skip(')');
4252 /* NOTE: const is ignored in returned type as it has a special
4253 meaning in gcc / C++ */
4254 type->t &= ~VT_CONSTANT;
4255 /* some ancient pre-K&R C allows a function to return an array
4256 and the array brackets to be put after the arguments, such
4257 that "int c()[]" means something like "int[] c()" */
4258 if (tok == '[') {
4259 next();
4260 skip(']'); /* only handle simple "[]" */
4261 mk_pointer(type);
4263 /* we push a anonymous symbol which will contain the function prototype */
4264 ad->f.func_args = arg_size;
4265 ad->f.func_type = l;
4266 s = sym_push(SYM_FIELD, type, 0, 0);
4267 s->a = ad->a;
4268 s->f = ad->f;
4269 s->next = first;
4270 type->t = VT_FUNC;
4271 type->ref = s;
4272 } else if (tok == '[') {
4273 int saved_nocode_wanted = nocode_wanted;
4274 /* array definition */
4275 next();
4276 if (tok == TOK_RESTRICT1)
4277 next();
4278 n = -1;
4279 t1 = 0;
4280 if (tok != ']') {
4281 if (!local_stack || (storage & VT_STATIC))
4282 vpushi(expr_const());
4283 else {
4284 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4285 length must always be evaluated, even under nocode_wanted,
4286 so that its size slot is initialized (e.g. under sizeof
4287 or typeof). */
4288 nocode_wanted = 0;
4289 gexpr();
4291 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4292 n = vtop->c.i;
4293 if (n < 0)
4294 tcc_error("invalid array size");
4295 } else {
4296 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4297 tcc_error("size of variable length array should be an integer");
4298 t1 = VT_VLA;
4301 skip(']');
4302 /* parse next post type */
4303 post_type(type, ad, storage, 0);
4304 if (type->t == VT_FUNC)
4305 tcc_error("declaration of an array of functions");
4306 t1 |= type->t & VT_VLA;
4308 if (t1 & VT_VLA) {
4309 loc -= type_size(&int_type, &align);
4310 loc &= -align;
4311 n = loc;
4313 vla_runtime_type_size(type, &align);
4314 gen_op('*');
4315 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4316 vswap();
4317 vstore();
4319 if (n != -1)
4320 vpop();
4321 nocode_wanted = saved_nocode_wanted;
4323 /* we push an anonymous symbol which will contain the array
4324 element type */
4325 s = sym_push(SYM_FIELD, type, 0, n);
4326 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4327 type->ref = s;
4329 return 1;
4332 /* Parse a type declarator (except basic type), and return the type
4333 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4334 expected. 'type' should contain the basic type. 'ad' is the
4335 attribute definition of the basic type. It can be modified by
4336 type_decl(). If this (possibly abstract) declarator is a pointer chain
4337 it returns the innermost pointed to type (equals *type, but is a different
4338 pointer), otherwise returns type itself, that's used for recursive calls. */
4339 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4341 CType *post, *ret;
4342 int qualifiers, storage;
4344 /* recursive type, remove storage bits first, apply them later again */
4345 storage = type->t & VT_STORAGE;
4346 type->t &= ~VT_STORAGE;
4347 post = ret = type;
4349 while (tok == '*') {
4350 qualifiers = 0;
4351 redo:
4352 next();
4353 switch(tok) {
4354 case TOK_CONST1:
4355 case TOK_CONST2:
4356 case TOK_CONST3:
4357 qualifiers |= VT_CONSTANT;
4358 goto redo;
4359 case TOK_VOLATILE1:
4360 case TOK_VOLATILE2:
4361 case TOK_VOLATILE3:
4362 qualifiers |= VT_VOLATILE;
4363 goto redo;
4364 case TOK_RESTRICT1:
4365 case TOK_RESTRICT2:
4366 case TOK_RESTRICT3:
4367 goto redo;
4368 /* XXX: clarify attribute handling */
4369 case TOK_ATTRIBUTE1:
4370 case TOK_ATTRIBUTE2:
4371 parse_attribute(ad);
4372 break;
4374 mk_pointer(type);
4375 type->t |= qualifiers;
4376 if (ret == type)
4377 /* innermost pointed to type is the one for the first derivation */
4378 ret = pointed_type(type);
4381 if (tok == '(') {
4382 /* This is possibly a parameter type list for abstract declarators
4383 ('int ()'), use post_type for testing this. */
4384 if (!post_type(type, ad, 0, td)) {
4385 /* It's not, so it's a nested declarator, and the post operations
4386 apply to the innermost pointed to type (if any). */
4387 /* XXX: this is not correct to modify 'ad' at this point, but
4388 the syntax is not clear */
4389 parse_attribute(ad);
4390 post = type_decl(type, ad, v, td);
4391 skip(')');
4393 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4394 /* type identifier */
4395 *v = tok;
4396 next();
4397 } else {
4398 if (!(td & TYPE_ABSTRACT))
4399 expect("identifier");
4400 *v = 0;
4402 post_type(post, ad, storage, 0);
4403 parse_attribute(ad);
4404 type->t |= storage;
4405 return ret;
4408 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4409 ST_FUNC int lvalue_type(int t)
4411 int bt, r;
4412 r = VT_LVAL;
4413 bt = t & VT_BTYPE;
4414 if (bt == VT_BYTE || bt == VT_BOOL)
4415 r |= VT_LVAL_BYTE;
4416 else if (bt == VT_SHORT)
4417 r |= VT_LVAL_SHORT;
4418 else
4419 return r;
4420 if (t & VT_UNSIGNED)
4421 r |= VT_LVAL_UNSIGNED;
4422 return r;
4425 /* indirection with full error checking and bound check */
4426 ST_FUNC void indir(void)
4428 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4429 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4430 return;
4431 expect("pointer");
4433 if (vtop->r & VT_LVAL)
4434 gv(RC_INT);
4435 vtop->type = *pointed_type(&vtop->type);
4436 /* Arrays and functions are never lvalues */
4437 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4438 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4439 vtop->r |= lvalue_type(vtop->type.t);
4440 /* if bound checking, the referenced pointer must be checked */
4441 #ifdef CONFIG_TCC_BCHECK
4442 if (tcc_state->do_bounds_check)
4443 vtop->r |= VT_MUSTBOUND;
4444 #endif
4448 /* pass a parameter to a function and do type checking and casting */
4449 static void gfunc_param_typed(Sym *func, Sym *arg)
4451 int func_type;
4452 CType type;
4454 func_type = func->f.func_type;
4455 if (func_type == FUNC_OLD ||
4456 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4457 /* default casting : only need to convert float to double */
4458 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4459 gen_cast_s(VT_DOUBLE);
4460 } else if (vtop->type.t & VT_BITFIELD) {
4461 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4462 type.ref = vtop->type.ref;
4463 gen_cast(&type);
4465 } else if (arg == NULL) {
4466 tcc_error("too many arguments to function");
4467 } else {
4468 type = arg->type;
4469 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4470 gen_assign_cast(&type);
4474 /* parse an expression and return its type without any side effect. */
4475 static void expr_type(CType *type, void (*expr_fn)(void))
4477 nocode_wanted++;
4478 expr_fn();
4479 *type = vtop->type;
4480 vpop();
4481 nocode_wanted--;
4484 /* parse an expression of the form '(type)' or '(expr)' and return its
4485 type */
4486 static void parse_expr_type(CType *type)
4488 int n;
4489 AttributeDef ad;
4491 skip('(');
4492 if (parse_btype(type, &ad)) {
4493 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4494 } else {
4495 expr_type(type, gexpr);
4497 skip(')');
4500 static void parse_type(CType *type)
4502 AttributeDef ad;
4503 int n;
4505 if (!parse_btype(type, &ad)) {
4506 expect("type");
4508 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4511 static void parse_builtin_params(int nc, const char *args)
4513 char c, sep = '(';
4514 CType t;
4515 if (nc)
4516 nocode_wanted++;
4517 next();
4518 while ((c = *args++)) {
4519 skip(sep);
4520 sep = ',';
4521 switch (c) {
4522 case 'e': expr_eq(); continue;
4523 case 't': parse_type(&t); vpush(&t); continue;
4524 default: tcc_error("internal error"); break;
4527 skip(')');
4528 if (nc)
4529 nocode_wanted--;
4532 ST_FUNC void unary(void)
4534 int n, t, align, size, r, sizeof_caller;
4535 CType type;
4536 Sym *s;
4537 AttributeDef ad;
4539 sizeof_caller = in_sizeof;
4540 in_sizeof = 0;
4541 type.ref = NULL;
4542 /* XXX: GCC 2.95.3 does not generate a table although it should be
4543 better here */
4544 tok_next:
4545 switch(tok) {
4546 case TOK_EXTENSION:
4547 next();
4548 goto tok_next;
4549 case TOK_LCHAR:
4550 #ifdef TCC_TARGET_PE
4551 t = VT_SHORT|VT_UNSIGNED;
4552 goto push_tokc;
4553 #endif
4554 case TOK_CINT:
4555 case TOK_CCHAR:
4556 t = VT_INT;
4557 push_tokc:
4558 type.t = t;
4559 vsetc(&type, VT_CONST, &tokc);
4560 next();
4561 break;
4562 case TOK_CUINT:
4563 t = VT_INT | VT_UNSIGNED;
4564 goto push_tokc;
4565 case TOK_CLLONG:
4566 t = VT_LLONG;
4567 goto push_tokc;
4568 case TOK_CULLONG:
4569 t = VT_LLONG | VT_UNSIGNED;
4570 goto push_tokc;
4571 case TOK_CFLOAT:
4572 t = VT_FLOAT;
4573 goto push_tokc;
4574 case TOK_CDOUBLE:
4575 t = VT_DOUBLE;
4576 goto push_tokc;
4577 case TOK_CLDOUBLE:
4578 t = VT_LDOUBLE;
4579 goto push_tokc;
4580 case TOK_CLONG:
4581 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4582 goto push_tokc;
4583 case TOK_CULONG:
4584 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4585 goto push_tokc;
4586 case TOK___FUNCTION__:
4587 if (!gnu_ext)
4588 goto tok_identifier;
4589 /* fall thru */
4590 case TOK___FUNC__:
4592 void *ptr;
4593 int len;
4594 /* special function name identifier */
4595 len = strlen(funcname) + 1;
4596 /* generate char[len] type */
4597 type.t = VT_BYTE;
4598 mk_pointer(&type);
4599 type.t |= VT_ARRAY;
4600 type.ref->c = len;
4601 vpush_ref(&type, data_section, data_section->data_offset, len);
4602 if (!NODATA_WANTED) {
4603 ptr = section_ptr_add(data_section, len);
4604 memcpy(ptr, funcname, len);
4606 next();
4608 break;
4609 case TOK_LSTR:
4610 #ifdef TCC_TARGET_PE
4611 t = VT_SHORT | VT_UNSIGNED;
4612 #else
4613 t = VT_INT;
4614 #endif
4615 goto str_init;
4616 case TOK_STR:
4617 /* string parsing */
4618 t = VT_BYTE;
4619 if (tcc_state->char_is_unsigned)
4620 t = VT_BYTE | VT_UNSIGNED;
4621 str_init:
4622 if (tcc_state->warn_write_strings)
4623 t |= VT_CONSTANT;
4624 type.t = t;
4625 mk_pointer(&type);
4626 type.t |= VT_ARRAY;
4627 memset(&ad, 0, sizeof(AttributeDef));
4628 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4629 break;
4630 case '(':
4631 next();
4632 /* cast ? */
4633 if (parse_btype(&type, &ad)) {
4634 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4635 skip(')');
4636 /* check ISOC99 compound literal */
4637 if (tok == '{') {
4638 /* data is allocated locally by default */
4639 if (global_expr)
4640 r = VT_CONST;
4641 else
4642 r = VT_LOCAL;
4643 /* all except arrays are lvalues */
4644 if (!(type.t & VT_ARRAY))
4645 r |= lvalue_type(type.t);
4646 memset(&ad, 0, sizeof(AttributeDef));
4647 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4648 } else {
4649 if (sizeof_caller) {
4650 vpush(&type);
4651 return;
4653 unary();
4654 gen_cast(&type);
4656 } else if (tok == '{') {
4657 int saved_nocode_wanted = nocode_wanted;
4658 if (const_wanted)
4659 tcc_error("expected constant");
4660 /* save all registers */
4661 save_regs(0);
4662 /* statement expression : we do not accept break/continue
4663 inside as GCC does. We do retain the nocode_wanted state,
4664 as statement expressions can't ever be entered from the
4665 outside, so any reactivation of code emission (from labels
4666 or loop heads) can be disabled again after the end of it. */
4667 block(NULL, NULL, 1);
4668 nocode_wanted = saved_nocode_wanted;
4669 skip(')');
4670 } else {
4671 gexpr();
4672 skip(')');
4674 break;
4675 case '*':
4676 next();
4677 unary();
4678 indir();
4679 break;
4680 case '&':
4681 next();
4682 unary();
4683 /* functions names must be treated as function pointers,
4684 except for unary '&' and sizeof. Since we consider that
4685 functions are not lvalues, we only have to handle it
4686 there and in function calls. */
4687 /* arrays can also be used although they are not lvalues */
4688 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4689 !(vtop->type.t & VT_ARRAY))
4690 test_lvalue();
4691 mk_pointer(&vtop->type);
4692 gaddrof();
4693 break;
4694 case '!':
4695 next();
4696 unary();
4697 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4698 gen_cast_s(VT_BOOL);
4699 vtop->c.i = !vtop->c.i;
4700 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4701 vtop->c.i ^= 1;
4702 else {
4703 save_regs(1);
4704 vseti(VT_JMP, gvtst(1, 0));
4706 break;
4707 case '~':
4708 next();
4709 unary();
4710 vpushi(-1);
4711 gen_op('^');
4712 break;
4713 case '+':
4714 next();
4715 unary();
4716 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4717 tcc_error("pointer not accepted for unary plus");
4718 /* In order to force cast, we add zero, except for floating point
4719 where we really need an noop (otherwise -0.0 will be transformed
4720 into +0.0). */
4721 if (!is_float(vtop->type.t)) {
4722 vpushi(0);
4723 gen_op('+');
4725 break;
4726 case TOK_SIZEOF:
4727 case TOK_ALIGNOF1:
4728 case TOK_ALIGNOF2:
4729 t = tok;
4730 next();
4731 in_sizeof++;
4732 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4733 s = vtop[1].sym; /* hack: accessing previous vtop */
4734 size = type_size(&type, &align);
4735 if (s && s->a.aligned)
4736 align = 1 << (s->a.aligned - 1);
4737 if (t == TOK_SIZEOF) {
4738 if (!(type.t & VT_VLA)) {
4739 if (size < 0)
4740 tcc_error("sizeof applied to an incomplete type");
4741 vpushs(size);
4742 } else {
4743 vla_runtime_type_size(&type, &align);
4745 } else {
4746 vpushs(align);
4748 vtop->type.t |= VT_UNSIGNED;
4749 break;
4751 case TOK_builtin_expect:
4752 /* __builtin_expect is a no-op for now */
4753 parse_builtin_params(0, "ee");
4754 vpop();
4755 break;
4756 case TOK_builtin_types_compatible_p:
4757 parse_builtin_params(0, "tt");
4758 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4759 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4760 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4761 vtop -= 2;
4762 vpushi(n);
4763 break;
4764 case TOK_builtin_choose_expr:
4766 int64_t c;
4767 next();
4768 skip('(');
4769 c = expr_const64();
4770 skip(',');
4771 if (!c) {
4772 nocode_wanted++;
4774 expr_eq();
4775 if (!c) {
4776 vpop();
4777 nocode_wanted--;
4779 skip(',');
4780 if (c) {
4781 nocode_wanted++;
4783 expr_eq();
4784 if (c) {
4785 vpop();
4786 nocode_wanted--;
4788 skip(')');
4790 break;
4791 case TOK_builtin_constant_p:
4792 parse_builtin_params(1, "e");
4793 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4794 vtop--;
4795 vpushi(n);
4796 break;
4797 case TOK_builtin_frame_address:
4798 case TOK_builtin_return_address:
4800 int tok1 = tok;
4801 int level;
4802 next();
4803 skip('(');
4804 if (tok != TOK_CINT) {
4805 tcc_error("%s only takes positive integers",
4806 tok1 == TOK_builtin_return_address ?
4807 "__builtin_return_address" :
4808 "__builtin_frame_address");
4810 level = (uint32_t)tokc.i;
4811 next();
4812 skip(')');
4813 type.t = VT_VOID;
4814 mk_pointer(&type);
4815 vset(&type, VT_LOCAL, 0); /* local frame */
4816 while (level--) {
4817 mk_pointer(&vtop->type);
4818 indir(); /* -> parent frame */
4820 if (tok1 == TOK_builtin_return_address) {
4821 // assume return address is just above frame pointer on stack
4822 vpushi(PTR_SIZE);
4823 gen_op('+');
4824 mk_pointer(&vtop->type);
4825 indir();
4828 break;
4829 #ifdef TCC_TARGET_X86_64
4830 #ifdef TCC_TARGET_PE
4831 case TOK_builtin_va_start:
4832 parse_builtin_params(0, "ee");
4833 r = vtop->r & VT_VALMASK;
4834 if (r == VT_LLOCAL)
4835 r = VT_LOCAL;
4836 if (r != VT_LOCAL)
4837 tcc_error("__builtin_va_start expects a local variable");
4838 vtop->r = r;
4839 vtop->type = char_pointer_type;
4840 vtop->c.i += 8;
4841 vstore();
4842 break;
4843 #else
4844 case TOK_builtin_va_arg_types:
4845 parse_builtin_params(0, "t");
4846 vpushi(classify_x86_64_va_arg(&vtop->type));
4847 vswap();
4848 vpop();
4849 break;
4850 #endif
4851 #endif
4853 #ifdef TCC_TARGET_ARM64
4854 case TOK___va_start: {
4855 parse_builtin_params(0, "ee");
4856 //xx check types
4857 gen_va_start();
4858 vpushi(0);
4859 vtop->type.t = VT_VOID;
4860 break;
4862 case TOK___va_arg: {
4863 parse_builtin_params(0, "et");
4864 type = vtop->type;
4865 vpop();
4866 //xx check types
4867 gen_va_arg(&type);
4868 vtop->type = type;
4869 break;
4871 case TOK___arm64_clear_cache: {
4872 parse_builtin_params(0, "ee");
4873 gen_clear_cache();
4874 vpushi(0);
4875 vtop->type.t = VT_VOID;
4876 break;
4878 #endif
4879 /* pre operations */
4880 case TOK_INC:
4881 case TOK_DEC:
4882 t = tok;
4883 next();
4884 unary();
4885 inc(0, t);
4886 break;
4887 case '-':
4888 next();
4889 unary();
4890 t = vtop->type.t & VT_BTYPE;
4891 if (is_float(t)) {
4892 /* In IEEE negate(x) isn't subtract(0,x), but rather
4893 subtract(-0, x). */
4894 vpush(&vtop->type);
4895 if (t == VT_FLOAT)
4896 vtop->c.f = -1.0 * 0.0;
4897 else if (t == VT_DOUBLE)
4898 vtop->c.d = -1.0 * 0.0;
4899 else
4900 vtop->c.ld = -1.0 * 0.0;
4901 } else
4902 vpushi(0);
4903 vswap();
4904 gen_op('-');
4905 break;
4906 case TOK_LAND:
4907 if (!gnu_ext)
4908 goto tok_identifier;
4909 next();
4910 /* allow to take the address of a label */
4911 if (tok < TOK_UIDENT)
4912 expect("label identifier");
4913 s = label_find(tok);
4914 if (!s) {
4915 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4916 } else {
4917 if (s->r == LABEL_DECLARED)
4918 s->r = LABEL_FORWARD;
4920 if (!s->type.t) {
4921 s->type.t = VT_VOID;
4922 mk_pointer(&s->type);
4923 s->type.t |= VT_STATIC;
4925 vpushsym(&s->type, s);
4926 next();
4927 break;
4929 case TOK_GENERIC:
4931 CType controlling_type;
4932 int has_default = 0;
4933 int has_match = 0;
4934 int learn = 0;
4935 TokenString *str = NULL;
4937 next();
4938 skip('(');
4939 expr_type(&controlling_type, expr_eq);
4940 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
4941 for (;;) {
4942 learn = 0;
4943 skip(',');
4944 if (tok == TOK_DEFAULT) {
4945 if (has_default)
4946 tcc_error("too many 'default'");
4947 has_default = 1;
4948 if (!has_match)
4949 learn = 1;
4950 next();
4951 } else {
4952 AttributeDef ad_tmp;
4953 int itmp;
4954 CType cur_type;
4955 parse_btype(&cur_type, &ad_tmp);
4956 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
4957 if (compare_types(&controlling_type, &cur_type, 0)) {
4958 if (has_match) {
4959 tcc_error("type match twice");
4961 has_match = 1;
4962 learn = 1;
4965 skip(':');
4966 if (learn) {
4967 if (str)
4968 tok_str_free(str);
4969 skip_or_save_block(&str);
4970 } else {
4971 skip_or_save_block(NULL);
4973 if (tok == ')')
4974 break;
4976 if (!str) {
4977 char buf[60];
4978 type_to_str(buf, sizeof buf, &controlling_type, NULL);
4979 tcc_error("type '%s' does not match any association", buf);
4981 begin_macro(str, 1);
4982 next();
4983 expr_eq();
4984 if (tok != TOK_EOF)
4985 expect(",");
4986 end_macro();
4987 next();
4988 break;
4990 // special qnan , snan and infinity values
4991 case TOK___NAN__:
4992 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4993 next();
4994 break;
4995 case TOK___SNAN__:
4996 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4997 next();
4998 break;
4999 case TOK___INF__:
5000 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
5001 next();
5002 break;
5004 default:
5005 tok_identifier:
5006 t = tok;
5007 next();
5008 if (t < TOK_UIDENT)
5009 expect("identifier");
5010 s = sym_find(t);
5011 if (!s) {
5012 const char *name = get_tok_str(t, NULL);
5013 if (tok != '(')
5014 tcc_error("'%s' undeclared", name);
5015 /* for simple function calls, we tolerate undeclared
5016 external reference to int() function */
5017 if (tcc_state->warn_implicit_function_declaration
5018 #ifdef TCC_TARGET_PE
5019 /* people must be warned about using undeclared WINAPI functions
5020 (which usually start with uppercase letter) */
5021 || (name[0] >= 'A' && name[0] <= 'Z')
5022 #endif
5024 tcc_warning("implicit declaration of function '%s'", name);
5025 s = external_global_sym(t, &func_old_type, 0);
5028 r = s->r;
5029 /* A symbol that has a register is a local register variable,
5030 which starts out as VT_LOCAL value. */
5031 if ((r & VT_VALMASK) < VT_CONST)
5032 r = (r & ~VT_VALMASK) | VT_LOCAL;
5034 vset(&s->type, r, s->c);
5035 /* Point to s as backpointer (even without r&VT_SYM).
5036 Will be used by at least the x86 inline asm parser for
5037 regvars. */
5038 vtop->sym = s;
5040 if (r & VT_SYM) {
5041 vtop->c.i = 0;
5042 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5043 vtop->c.i = s->enum_val;
5045 break;
5048 /* post operations */
5049 while (1) {
5050 if (tok == TOK_INC || tok == TOK_DEC) {
5051 inc(1, tok);
5052 next();
5053 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5054 int qualifiers;
5055 /* field */
5056 if (tok == TOK_ARROW)
5057 indir();
5058 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5059 test_lvalue();
5060 gaddrof();
5061 /* expect pointer on structure */
5062 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5063 expect("struct or union");
5064 if (tok == TOK_CDOUBLE)
5065 expect("field name");
5066 next();
5067 if (tok == TOK_CINT || tok == TOK_CUINT)
5068 expect("field name");
5069 s = find_field(&vtop->type, tok);
5070 if (!s)
5071 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5072 /* add field offset to pointer */
5073 vtop->type = char_pointer_type; /* change type to 'char *' */
5074 vpushi(s->c);
5075 gen_op('+');
5076 /* change type to field type, and set to lvalue */
5077 vtop->type = s->type;
5078 vtop->type.t |= qualifiers;
5079 /* an array is never an lvalue */
5080 if (!(vtop->type.t & VT_ARRAY)) {
5081 vtop->r |= lvalue_type(vtop->type.t);
5082 #ifdef CONFIG_TCC_BCHECK
5083 /* if bound checking, the referenced pointer must be checked */
5084 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5085 vtop->r |= VT_MUSTBOUND;
5086 #endif
5088 next();
5089 } else if (tok == '[') {
5090 next();
5091 gexpr();
5092 gen_op('+');
5093 indir();
5094 skip(']');
5095 } else if (tok == '(') {
5096 SValue ret;
5097 Sym *sa;
5098 int nb_args, ret_nregs, ret_align, regsize, variadic;
5100 /* function call */
5101 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5102 /* pointer test (no array accepted) */
5103 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5104 vtop->type = *pointed_type(&vtop->type);
5105 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5106 goto error_func;
5107 } else {
5108 error_func:
5109 expect("function pointer");
5111 } else {
5112 vtop->r &= ~VT_LVAL; /* no lvalue */
5114 /* get return type */
5115 s = vtop->type.ref;
5116 next();
5117 sa = s->next; /* first parameter */
5118 nb_args = regsize = 0;
5119 ret.r2 = VT_CONST;
5120 /* compute first implicit argument if a structure is returned */
5121 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5122 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5123 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5124 &ret_align, &regsize);
5125 if (!ret_nregs) {
5126 /* get some space for the returned structure */
5127 size = type_size(&s->type, &align);
5128 #ifdef TCC_TARGET_ARM64
5129 /* On arm64, a small struct is return in registers.
5130 It is much easier to write it to memory if we know
5131 that we are allowed to write some extra bytes, so
5132 round the allocated space up to a power of 2: */
5133 if (size < 16)
5134 while (size & (size - 1))
5135 size = (size | (size - 1)) + 1;
5136 #endif
5137 loc = (loc - size) & -align;
5138 ret.type = s->type;
5139 ret.r = VT_LOCAL | VT_LVAL;
5140 /* pass it as 'int' to avoid structure arg passing
5141 problems */
5142 vseti(VT_LOCAL, loc);
5143 ret.c = vtop->c;
5144 nb_args++;
5146 } else {
5147 ret_nregs = 1;
5148 ret.type = s->type;
5151 if (ret_nregs) {
5152 /* return in register */
5153 if (is_float(ret.type.t)) {
5154 ret.r = reg_fret(ret.type.t);
5155 #ifdef TCC_TARGET_X86_64
5156 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5157 ret.r2 = REG_QRET;
5158 #endif
5159 } else {
5160 #ifndef TCC_TARGET_ARM64
5161 #ifdef TCC_TARGET_X86_64
5162 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5163 #else
5164 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5165 #endif
5166 ret.r2 = REG_LRET;
5167 #endif
5168 ret.r = REG_IRET;
5170 ret.c.i = 0;
5172 if (tok != ')') {
5173 for(;;) {
5174 expr_eq();
5175 gfunc_param_typed(s, sa);
5176 nb_args++;
5177 if (sa)
5178 sa = sa->next;
5179 if (tok == ')')
5180 break;
5181 skip(',');
5184 if (sa)
5185 tcc_error("too few arguments to function");
5186 skip(')');
5187 gfunc_call(nb_args);
5189 /* return value */
5190 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5191 vsetc(&ret.type, r, &ret.c);
5192 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5195 /* handle packed struct return */
5196 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5197 int addr, offset;
5199 size = type_size(&s->type, &align);
5200 /* We're writing whole regs often, make sure there's enough
5201 space. Assume register size is power of 2. */
5202 if (regsize > align)
5203 align = regsize;
5204 loc = (loc - size) & -align;
5205 addr = loc;
5206 offset = 0;
5207 for (;;) {
5208 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5209 vswap();
5210 vstore();
5211 vtop--;
5212 if (--ret_nregs == 0)
5213 break;
5214 offset += regsize;
5216 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5218 } else {
5219 break;
5224 ST_FUNC void expr_prod(void)
5226 int t;
5228 unary();
5229 while (tok == '*' || tok == '/' || tok == '%') {
5230 t = tok;
5231 next();
5232 unary();
5233 gen_op(t);
5237 ST_FUNC void expr_sum(void)
5239 int t;
5241 expr_prod();
5242 while (tok == '+' || tok == '-') {
5243 t = tok;
5244 next();
5245 expr_prod();
5246 gen_op(t);
5250 static void expr_shift(void)
5252 int t;
5254 expr_sum();
5255 while (tok == TOK_SHL || tok == TOK_SAR) {
5256 t = tok;
5257 next();
5258 expr_sum();
5259 gen_op(t);
5263 static void expr_cmp(void)
5265 int t;
5267 expr_shift();
5268 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5269 tok == TOK_ULT || tok == TOK_UGE) {
5270 t = tok;
5271 next();
5272 expr_shift();
5273 gen_op(t);
5277 static void expr_cmpeq(void)
5279 int t;
5281 expr_cmp();
5282 while (tok == TOK_EQ || tok == TOK_NE) {
5283 t = tok;
5284 next();
5285 expr_cmp();
5286 gen_op(t);
5290 static void expr_and(void)
5292 expr_cmpeq();
5293 while (tok == '&') {
5294 next();
5295 expr_cmpeq();
5296 gen_op('&');
5300 static void expr_xor(void)
5302 expr_and();
5303 while (tok == '^') {
5304 next();
5305 expr_and();
5306 gen_op('^');
5310 static void expr_or(void)
5312 expr_xor();
5313 while (tok == '|') {
5314 next();
5315 expr_xor();
5316 gen_op('|');
5320 static void expr_land(void)
5322 expr_or();
5323 if (tok == TOK_LAND) {
5324 int t = 0;
5325 for(;;) {
5326 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5327 gen_cast_s(VT_BOOL);
5328 if (vtop->c.i) {
5329 vpop();
5330 } else {
5331 nocode_wanted++;
5332 while (tok == TOK_LAND) {
5333 next();
5334 expr_or();
5335 vpop();
5337 nocode_wanted--;
5338 if (t)
5339 gsym(t);
5340 gen_cast_s(VT_INT);
5341 break;
5343 } else {
5344 if (!t)
5345 save_regs(1);
5346 t = gvtst(1, t);
5348 if (tok != TOK_LAND) {
5349 if (t)
5350 vseti(VT_JMPI, t);
5351 else
5352 vpushi(1);
5353 break;
5355 next();
5356 expr_or();
5361 static void expr_lor(void)
5363 expr_land();
5364 if (tok == TOK_LOR) {
5365 int t = 0;
5366 for(;;) {
5367 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5368 gen_cast_s(VT_BOOL);
5369 if (!vtop->c.i) {
5370 vpop();
5371 } else {
5372 nocode_wanted++;
5373 while (tok == TOK_LOR) {
5374 next();
5375 expr_land();
5376 vpop();
5378 nocode_wanted--;
5379 if (t)
5380 gsym(t);
5381 gen_cast_s(VT_INT);
5382 break;
5384 } else {
5385 if (!t)
5386 save_regs(1);
5387 t = gvtst(0, t);
5389 if (tok != TOK_LOR) {
5390 if (t)
5391 vseti(VT_JMP, t);
5392 else
5393 vpushi(0);
5394 break;
5396 next();
5397 expr_land();
5402 /* Assuming vtop is a value used in a conditional context
5403 (i.e. compared with zero) return 0 if it's false, 1 if
5404 true and -1 if it can't be statically determined. */
5405 static int condition_3way(void)
5407 int c = -1;
5408 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5409 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5410 vdup();
5411 gen_cast_s(VT_BOOL);
5412 c = vtop->c.i;
5413 vpop();
5415 return c;
5418 static void expr_cond(void)
5420 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5421 SValue sv;
5422 CType type, type1, type2;
5424 expr_lor();
5425 if (tok == '?') {
5426 next();
5427 c = condition_3way();
5428 g = (tok == ':' && gnu_ext);
5429 if (c < 0) {
5430 /* needed to avoid having different registers saved in
5431 each branch */
5432 if (is_float(vtop->type.t)) {
5433 rc = RC_FLOAT;
5434 #ifdef TCC_TARGET_X86_64
5435 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5436 rc = RC_ST0;
5438 #endif
5439 } else
5440 rc = RC_INT;
5441 gv(rc);
5442 save_regs(1);
5443 if (g)
5444 gv_dup();
5445 tt = gvtst(1, 0);
5447 } else {
5448 if (!g)
5449 vpop();
5450 tt = 0;
5453 if (1) {
5454 if (c == 0)
5455 nocode_wanted++;
5456 if (!g)
5457 gexpr();
5459 type1 = vtop->type;
5460 sv = *vtop; /* save value to handle it later */
5461 vtop--; /* no vpop so that FP stack is not flushed */
5462 skip(':');
5464 u = 0;
5465 if (c < 0)
5466 u = gjmp(0);
5467 gsym(tt);
5469 if (c == 0)
5470 nocode_wanted--;
5471 if (c == 1)
5472 nocode_wanted++;
5473 expr_cond();
5474 if (c == 1)
5475 nocode_wanted--;
5477 type2 = vtop->type;
5478 t1 = type1.t;
5479 bt1 = t1 & VT_BTYPE;
5480 t2 = type2.t;
5481 bt2 = t2 & VT_BTYPE;
5482 type.ref = NULL;
5484 /* cast operands to correct type according to ISOC rules */
5485 if (is_float(bt1) || is_float(bt2)) {
5486 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5487 type.t = VT_LDOUBLE;
5489 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5490 type.t = VT_DOUBLE;
5491 } else {
5492 type.t = VT_FLOAT;
5494 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5495 /* cast to biggest op */
5496 type.t = VT_LLONG | VT_LONG;
5497 if (bt1 == VT_LLONG)
5498 type.t &= t1;
5499 if (bt2 == VT_LLONG)
5500 type.t &= t2;
5501 /* convert to unsigned if it does not fit in a long long */
5502 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5503 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5504 type.t |= VT_UNSIGNED;
5505 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5506 /* If one is a null ptr constant the result type
5507 is the other. */
5508 if (is_null_pointer (vtop))
5509 type = type1;
5510 else if (is_null_pointer (&sv))
5511 type = type2;
5512 /* XXX: test pointer compatibility, C99 has more elaborate
5513 rules here. */
5514 else
5515 type = type1;
5516 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5517 /* XXX: test function pointer compatibility */
5518 type = bt1 == VT_FUNC ? type1 : type2;
5519 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5520 /* XXX: test structure compatibility */
5521 type = bt1 == VT_STRUCT ? type1 : type2;
5522 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5523 /* NOTE: as an extension, we accept void on only one side */
5524 type.t = VT_VOID;
5525 } else {
5526 /* integer operations */
5527 type.t = VT_INT | (VT_LONG & (t1 | t2));
5528 /* convert to unsigned if it does not fit in an integer */
5529 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5530 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5531 type.t |= VT_UNSIGNED;
5533 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5534 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5535 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5536 islv &= c < 0;
5538 /* now we convert second operand */
5539 if (c != 1) {
5540 gen_cast(&type);
5541 if (islv) {
5542 mk_pointer(&vtop->type);
5543 gaddrof();
5544 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5545 gaddrof();
5548 rc = RC_INT;
5549 if (is_float(type.t)) {
5550 rc = RC_FLOAT;
5551 #ifdef TCC_TARGET_X86_64
5552 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5553 rc = RC_ST0;
5555 #endif
5556 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5557 /* for long longs, we use fixed registers to avoid having
5558 to handle a complicated move */
5559 rc = RC_IRET;
5562 tt = r2 = 0;
5563 if (c < 0) {
5564 r2 = gv(rc);
5565 tt = gjmp(0);
5567 gsym(u);
5569 /* this is horrible, but we must also convert first
5570 operand */
5571 if (c != 0) {
5572 *vtop = sv;
5573 gen_cast(&type);
5574 if (islv) {
5575 mk_pointer(&vtop->type);
5576 gaddrof();
5577 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5578 gaddrof();
5581 if (c < 0) {
5582 r1 = gv(rc);
5583 move_reg(r2, r1, type.t);
5584 vtop->r = r2;
5585 gsym(tt);
5586 if (islv)
5587 indir();
5593 static void expr_eq(void)
5595 int t;
5597 expr_cond();
5598 if (tok == '=' ||
5599 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5600 tok == TOK_A_XOR || tok == TOK_A_OR ||
5601 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5602 test_lvalue();
5603 t = tok;
5604 next();
5605 if (t == '=') {
5606 expr_eq();
5607 } else {
5608 vdup();
5609 expr_eq();
5610 gen_op(t & 0x7f);
5612 vstore();
5616 ST_FUNC void gexpr(void)
5618 while (1) {
5619 expr_eq();
5620 if (tok != ',')
5621 break;
5622 vpop();
5623 next();
5627 /* parse a constant expression and return value in vtop. */
5628 static void expr_const1(void)
5630 const_wanted++;
5631 nocode_wanted++;
5632 expr_cond();
5633 nocode_wanted--;
5634 const_wanted--;
5637 /* parse an integer constant and return its value. */
5638 static inline int64_t expr_const64(void)
5640 int64_t c;
5641 expr_const1();
5642 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5643 expect("constant expression");
5644 c = vtop->c.i;
5645 vpop();
5646 return c;
5649 /* parse an integer constant and return its value.
5650 Complain if it doesn't fit 32bit (signed or unsigned). */
5651 ST_FUNC int expr_const(void)
5653 int c;
5654 int64_t wc = expr_const64();
5655 c = wc;
5656 if (c != wc && (unsigned)c != wc)
5657 tcc_error("constant exceeds 32 bit");
5658 return c;
5661 /* return the label token if current token is a label, otherwise
5662 return zero */
5663 static int is_label(void)
5665 int last_tok;
5667 /* fast test first */
5668 if (tok < TOK_UIDENT)
5669 return 0;
5670 /* no need to save tokc because tok is an identifier */
5671 last_tok = tok;
5672 next();
5673 if (tok == ':') {
5674 return last_tok;
5675 } else {
5676 unget_tok(last_tok);
5677 return 0;
5681 #ifndef TCC_TARGET_ARM64
5682 static void gfunc_return(CType *func_type)
5684 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5685 CType type, ret_type;
5686 int ret_align, ret_nregs, regsize;
5687 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5688 &ret_align, &regsize);
5689 if (0 == ret_nregs) {
5690 /* if returning structure, must copy it to implicit
5691 first pointer arg location */
5692 type = *func_type;
5693 mk_pointer(&type);
5694 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5695 indir();
5696 vswap();
5697 /* copy structure value to pointer */
5698 vstore();
5699 } else {
5700 /* returning structure packed into registers */
5701 int r, size, addr, align;
5702 size = type_size(func_type,&align);
5703 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5704 (vtop->c.i & (ret_align-1)))
5705 && (align & (ret_align-1))) {
5706 loc = (loc - size) & -ret_align;
5707 addr = loc;
5708 type = *func_type;
5709 vset(&type, VT_LOCAL | VT_LVAL, addr);
5710 vswap();
5711 vstore();
5712 vpop();
5713 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5715 vtop->type = ret_type;
5716 if (is_float(ret_type.t))
5717 r = rc_fret(ret_type.t);
5718 else
5719 r = RC_IRET;
5721 if (ret_nregs == 1)
5722 gv(r);
5723 else {
5724 for (;;) {
5725 vdup();
5726 gv(r);
5727 vpop();
5728 if (--ret_nregs == 0)
5729 break;
5730 /* We assume that when a structure is returned in multiple
5731 registers, their classes are consecutive values of the
5732 suite s(n) = 2^n */
5733 r <<= 1;
5734 vtop->c.i += regsize;
5738 } else if (is_float(func_type->t)) {
5739 gv(rc_fret(func_type->t));
5740 } else {
5741 gv(RC_IRET);
5743 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5745 #endif
5747 static int case_cmp(const void *pa, const void *pb)
5749 int64_t a = (*(struct case_t**) pa)->v1;
5750 int64_t b = (*(struct case_t**) pb)->v1;
5751 return a < b ? -1 : a > b;
5754 static void gcase(struct case_t **base, int len, int *bsym)
5756 struct case_t *p;
5757 int e;
5758 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5759 gv(RC_INT);
5760 while (len > 4) {
5761 /* binary search */
5762 p = base[len/2];
5763 vdup();
5764 if (ll)
5765 vpushll(p->v2);
5766 else
5767 vpushi(p->v2);
5768 gen_op(TOK_LE);
5769 e = gtst(1, 0);
5770 vdup();
5771 if (ll)
5772 vpushll(p->v1);
5773 else
5774 vpushi(p->v1);
5775 gen_op(TOK_GE);
5776 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5777 /* x < v1 */
5778 gcase(base, len/2, bsym);
5779 if (cur_switch->def_sym)
5780 gjmp_addr(cur_switch->def_sym);
5781 else
5782 *bsym = gjmp(*bsym);
5783 /* x > v2 */
5784 gsym(e);
5785 e = len/2 + 1;
5786 base += e; len -= e;
5788 /* linear scan */
5789 while (len--) {
5790 p = *base++;
5791 vdup();
5792 if (ll)
5793 vpushll(p->v2);
5794 else
5795 vpushi(p->v2);
5796 if (p->v1 == p->v2) {
5797 gen_op(TOK_EQ);
5798 gtst_addr(0, p->sym);
5799 } else {
5800 gen_op(TOK_LE);
5801 e = gtst(1, 0);
5802 vdup();
5803 if (ll)
5804 vpushll(p->v1);
5805 else
5806 vpushi(p->v1);
5807 gen_op(TOK_GE);
5808 gtst_addr(0, p->sym);
5809 gsym(e);
5814 static void block(int *bsym, int *csym, int is_expr)
5816 int a, b, c, d, cond;
5817 Sym *s;
5819 /* generate line number info */
5820 if (tcc_state->do_debug)
5821 tcc_debug_line(tcc_state);
5823 if (is_expr) {
5824 /* default return value is (void) */
5825 vpushi(0);
5826 vtop->type.t = VT_VOID;
5829 if (tok == TOK_IF) {
5830 /* if test */
5831 int saved_nocode_wanted = nocode_wanted;
5832 next();
5833 skip('(');
5834 gexpr();
5835 skip(')');
5836 cond = condition_3way();
5837 if (cond == 1)
5838 a = 0, vpop();
5839 else
5840 a = gvtst(1, 0);
5841 if (cond == 0)
5842 nocode_wanted |= 0x20000000;
5843 block(bsym, csym, 0);
5844 if (cond != 1)
5845 nocode_wanted = saved_nocode_wanted;
5846 c = tok;
5847 if (c == TOK_ELSE) {
5848 next();
5849 d = gjmp(0);
5850 gsym(a);
5851 if (cond == 1)
5852 nocode_wanted |= 0x20000000;
5853 block(bsym, csym, 0);
5854 gsym(d); /* patch else jmp */
5855 if (cond != 0)
5856 nocode_wanted = saved_nocode_wanted;
5857 } else
5858 gsym(a);
5859 } else if (tok == TOK_WHILE) {
5860 int saved_nocode_wanted;
5861 nocode_wanted &= ~0x20000000;
5862 next();
5863 d = ind;
5864 vla_sp_restore();
5865 skip('(');
5866 gexpr();
5867 skip(')');
5868 a = gvtst(1, 0);
5869 b = 0;
5870 ++local_scope;
5871 saved_nocode_wanted = nocode_wanted;
5872 block(&a, &b, 0);
5873 nocode_wanted = saved_nocode_wanted;
5874 --local_scope;
5875 gjmp_addr(d);
5876 gsym(a);
5877 gsym_addr(b, d);
5878 } else if (tok == '{') {
5879 Sym *llabel;
5880 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5882 next();
5883 /* record local declaration stack position */
5884 s = local_stack;
5885 llabel = local_label_stack;
5886 ++local_scope;
5888 /* handle local labels declarations */
5889 if (tok == TOK_LABEL) {
5890 next();
5891 for(;;) {
5892 if (tok < TOK_UIDENT)
5893 expect("label identifier");
5894 label_push(&local_label_stack, tok, LABEL_DECLARED);
5895 next();
5896 if (tok == ',') {
5897 next();
5898 } else {
5899 skip(';');
5900 break;
5904 while (tok != '}') {
5905 if ((a = is_label()))
5906 unget_tok(a);
5907 else
5908 decl(VT_LOCAL);
5909 if (tok != '}') {
5910 if (is_expr)
5911 vpop();
5912 block(bsym, csym, is_expr);
5915 /* pop locally defined labels */
5916 label_pop(&local_label_stack, llabel, is_expr);
5917 /* pop locally defined symbols */
5918 --local_scope;
5919 /* In the is_expr case (a statement expression is finished here),
5920 vtop might refer to symbols on the local_stack. Either via the
5921 type or via vtop->sym. We can't pop those nor any that in turn
5922 might be referred to. To make it easier we don't roll back
5923 any symbols in that case; some upper level call to block() will
5924 do that. We do have to remove such symbols from the lookup
5925 tables, though. sym_pop will do that. */
5926 sym_pop(&local_stack, s, is_expr);
5928 /* Pop VLA frames and restore stack pointer if required */
5929 if (vlas_in_scope > saved_vlas_in_scope) {
5930 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5931 vla_sp_restore();
5933 vlas_in_scope = saved_vlas_in_scope;
5935 next();
5936 } else if (tok == TOK_RETURN) {
5937 next();
5938 if (tok != ';') {
5939 gexpr();
5940 gen_assign_cast(&func_vt);
5941 if ((func_vt.t & VT_BTYPE) == VT_VOID)
5942 vtop--;
5943 else
5944 gfunc_return(&func_vt);
5946 skip(';');
5947 /* jump unless last stmt in top-level block */
5948 if (tok != '}' || local_scope != 1)
5949 rsym = gjmp(rsym);
5950 nocode_wanted |= 0x20000000;
5951 } else if (tok == TOK_BREAK) {
5952 /* compute jump */
5953 if (!bsym)
5954 tcc_error("cannot break");
5955 *bsym = gjmp(*bsym);
5956 next();
5957 skip(';');
5958 nocode_wanted |= 0x20000000;
5959 } else if (tok == TOK_CONTINUE) {
5960 /* compute jump */
5961 if (!csym)
5962 tcc_error("cannot continue");
5963 vla_sp_restore_root();
5964 *csym = gjmp(*csym);
5965 next();
5966 skip(';');
5967 } else if (tok == TOK_FOR) {
5968 int e;
5969 int saved_nocode_wanted;
5970 nocode_wanted &= ~0x20000000;
5971 next();
5972 skip('(');
5973 s = local_stack;
5974 ++local_scope;
5975 if (tok != ';') {
5976 /* c99 for-loop init decl? */
5977 if (!decl0(VT_LOCAL, 1, NULL)) {
5978 /* no, regular for-loop init expr */
5979 gexpr();
5980 vpop();
5983 skip(';');
5984 d = ind;
5985 c = ind;
5986 vla_sp_restore();
5987 a = 0;
5988 b = 0;
5989 if (tok != ';') {
5990 gexpr();
5991 a = gvtst(1, 0);
5993 skip(';');
5994 if (tok != ')') {
5995 e = gjmp(0);
5996 c = ind;
5997 vla_sp_restore();
5998 gexpr();
5999 vpop();
6000 gjmp_addr(d);
6001 gsym(e);
6003 skip(')');
6004 saved_nocode_wanted = nocode_wanted;
6005 block(&a, &b, 0);
6006 nocode_wanted = saved_nocode_wanted;
6007 gjmp_addr(c);
6008 gsym(a);
6009 gsym_addr(b, c);
6010 --local_scope;
6011 sym_pop(&local_stack, s, 0);
6013 } else
6014 if (tok == TOK_DO) {
6015 int saved_nocode_wanted;
6016 nocode_wanted &= ~0x20000000;
6017 next();
6018 a = 0;
6019 b = 0;
6020 d = ind;
6021 vla_sp_restore();
6022 saved_nocode_wanted = nocode_wanted;
6023 block(&a, &b, 0);
6024 skip(TOK_WHILE);
6025 skip('(');
6026 gsym(b);
6027 gexpr();
6028 c = gvtst(0, 0);
6029 gsym_addr(c, d);
6030 nocode_wanted = saved_nocode_wanted;
6031 skip(')');
6032 gsym(a);
6033 skip(';');
6034 } else
6035 if (tok == TOK_SWITCH) {
6036 struct switch_t *saved, sw;
6037 int saved_nocode_wanted = nocode_wanted;
6038 SValue switchval;
6039 next();
6040 skip('(');
6041 gexpr();
6042 skip(')');
6043 switchval = *vtop--;
6044 a = 0;
6045 b = gjmp(0); /* jump to first case */
6046 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6047 saved = cur_switch;
6048 cur_switch = &sw;
6049 block(&a, csym, 0);
6050 nocode_wanted = saved_nocode_wanted;
6051 a = gjmp(a); /* add implicit break */
6052 /* case lookup */
6053 gsym(b);
6054 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6055 for (b = 1; b < sw.n; b++)
6056 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6057 tcc_error("duplicate case value");
6058 /* Our switch table sorting is signed, so the compared
6059 value needs to be as well when it's 64bit. */
6060 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6061 switchval.type.t &= ~VT_UNSIGNED;
6062 vpushv(&switchval);
6063 gcase(sw.p, sw.n, &a);
6064 vpop();
6065 if (sw.def_sym)
6066 gjmp_addr(sw.def_sym);
6067 dynarray_reset(&sw.p, &sw.n);
6068 cur_switch = saved;
6069 /* break label */
6070 gsym(a);
6071 } else
6072 if (tok == TOK_CASE) {
6073 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6074 if (!cur_switch)
6075 expect("switch");
6076 nocode_wanted &= ~0x20000000;
6077 next();
6078 cr->v1 = cr->v2 = expr_const64();
6079 if (gnu_ext && tok == TOK_DOTS) {
6080 next();
6081 cr->v2 = expr_const64();
6082 if (cr->v2 < cr->v1)
6083 tcc_warning("empty case range");
6085 cr->sym = ind;
6086 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6087 skip(':');
6088 is_expr = 0;
6089 goto block_after_label;
6090 } else
6091 if (tok == TOK_DEFAULT) {
6092 next();
6093 skip(':');
6094 if (!cur_switch)
6095 expect("switch");
6096 if (cur_switch->def_sym)
6097 tcc_error("too many 'default'");
6098 cur_switch->def_sym = ind;
6099 is_expr = 0;
6100 goto block_after_label;
6101 } else
6102 if (tok == TOK_GOTO) {
6103 next();
6104 if (tok == '*' && gnu_ext) {
6105 /* computed goto */
6106 next();
6107 gexpr();
6108 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6109 expect("pointer");
6110 ggoto();
6111 } else if (tok >= TOK_UIDENT) {
6112 s = label_find(tok);
6113 /* put forward definition if needed */
6114 if (!s) {
6115 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6116 } else {
6117 if (s->r == LABEL_DECLARED)
6118 s->r = LABEL_FORWARD;
6120 vla_sp_restore_root();
6121 if (s->r & LABEL_FORWARD)
6122 s->jnext = gjmp(s->jnext);
6123 else
6124 gjmp_addr(s->jnext);
6125 next();
6126 } else {
6127 expect("label identifier");
6129 skip(';');
6130 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6131 asm_instr();
6132 } else {
6133 b = is_label();
6134 if (b) {
6135 /* label case */
6136 next();
6137 s = label_find(b);
6138 if (s) {
6139 if (s->r == LABEL_DEFINED)
6140 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6141 gsym(s->jnext);
6142 s->r = LABEL_DEFINED;
6143 } else {
6144 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6146 s->jnext = ind;
6147 vla_sp_restore();
6148 /* we accept this, but it is a mistake */
6149 block_after_label:
6150 nocode_wanted &= ~0x20000000;
6151 if (tok == '}') {
6152 tcc_warning("deprecated use of label at end of compound statement");
6153 } else {
6154 if (is_expr)
6155 vpop();
6156 block(bsym, csym, is_expr);
6158 } else {
6159 /* expression case */
6160 if (tok != ';') {
6161 if (is_expr) {
6162 vpop();
6163 gexpr();
6164 } else {
6165 gexpr();
6166 vpop();
6169 skip(';');
6174 /* This skips over a stream of tokens containing balanced {} and ()
6175 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6176 with a '{'). If STR then allocates and stores the skipped tokens
6177 in *STR. This doesn't check if () and {} are nested correctly,
6178 i.e. "({)}" is accepted. */
6179 static void skip_or_save_block(TokenString **str)
6181 int braces = tok == '{';
6182 int level = 0;
6183 if (str)
6184 *str = tok_str_alloc();
6186 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6187 int t;
6188 if (tok == TOK_EOF) {
6189 if (str || level > 0)
6190 tcc_error("unexpected end of file");
6191 else
6192 break;
6194 if (str)
6195 tok_str_add_tok(*str);
6196 t = tok;
6197 next();
6198 if (t == '{' || t == '(') {
6199 level++;
6200 } else if (t == '}' || t == ')') {
6201 level--;
6202 if (level == 0 && braces && t == '}')
6203 break;
6206 if (str) {
6207 tok_str_add(*str, -1);
6208 tok_str_add(*str, 0);
6212 #define EXPR_CONST 1
6213 #define EXPR_ANY 2
6215 static void parse_init_elem(int expr_type)
6217 int saved_global_expr;
6218 switch(expr_type) {
6219 case EXPR_CONST:
6220 /* compound literals must be allocated globally in this case */
6221 saved_global_expr = global_expr;
6222 global_expr = 1;
6223 expr_const1();
6224 global_expr = saved_global_expr;
6225 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6226 (compound literals). */
6227 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6228 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6229 || vtop->sym->v < SYM_FIRST_ANOM))
6230 #ifdef TCC_TARGET_PE
6231 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6232 #endif
6234 tcc_error("initializer element is not constant");
6235 break;
6236 case EXPR_ANY:
6237 expr_eq();
6238 break;
6242 /* put zeros for variable based init */
6243 static void init_putz(Section *sec, unsigned long c, int size)
6245 if (sec) {
6246 /* nothing to do because globals are already set to zero */
6247 } else {
6248 vpush_global_sym(&func_old_type, TOK_memset);
6249 vseti(VT_LOCAL, c);
6250 #ifdef TCC_TARGET_ARM
6251 vpushs(size);
6252 vpushi(0);
6253 #else
6254 vpushi(0);
6255 vpushs(size);
6256 #endif
6257 gfunc_call(3);
6261 /* t is the array or struct type. c is the array or struct
6262 address. cur_field is the pointer to the current
6263 field, for arrays the 'c' member contains the current start
6264 index. 'size_only' is true if only size info is needed (only used
6265 in arrays). al contains the already initialized length of the
6266 current container (starting at c). This returns the new length of that. */
6267 static int decl_designator(CType *type, Section *sec, unsigned long c,
6268 Sym **cur_field, int size_only, int al)
6270 Sym *s, *f;
6271 int index, index_last, align, l, nb_elems, elem_size;
6272 unsigned long corig = c;
6274 elem_size = 0;
6275 nb_elems = 1;
6276 if (gnu_ext && (l = is_label()) != 0)
6277 goto struct_field;
6278 /* NOTE: we only support ranges for last designator */
6279 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6280 if (tok == '[') {
6281 if (!(type->t & VT_ARRAY))
6282 expect("array type");
6283 next();
6284 index = index_last = expr_const();
6285 if (tok == TOK_DOTS && gnu_ext) {
6286 next();
6287 index_last = expr_const();
6289 skip(']');
6290 s = type->ref;
6291 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6292 index_last < index)
6293 tcc_error("invalid index");
6294 if (cur_field)
6295 (*cur_field)->c = index_last;
6296 type = pointed_type(type);
6297 elem_size = type_size(type, &align);
6298 c += index * elem_size;
6299 nb_elems = index_last - index + 1;
6300 } else {
6301 next();
6302 l = tok;
6303 struct_field:
6304 next();
6305 if ((type->t & VT_BTYPE) != VT_STRUCT)
6306 expect("struct/union type");
6307 f = find_field(type, l);
6308 if (!f)
6309 expect("field");
6310 if (cur_field)
6311 *cur_field = f;
6312 type = &f->type;
6313 c += f->c;
6315 cur_field = NULL;
6317 if (!cur_field) {
6318 if (tok == '=') {
6319 next();
6320 } else if (!gnu_ext) {
6321 expect("=");
6323 } else {
6324 if (type->t & VT_ARRAY) {
6325 index = (*cur_field)->c;
6326 if (type->ref->c >= 0 && index >= type->ref->c)
6327 tcc_error("index too large");
6328 type = pointed_type(type);
6329 c += index * type_size(type, &align);
6330 } else {
6331 f = *cur_field;
6332 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6333 *cur_field = f = f->next;
6334 if (!f)
6335 tcc_error("too many field init");
6336 type = &f->type;
6337 c += f->c;
6340 /* must put zero in holes (note that doing it that way
6341 ensures that it even works with designators) */
6342 if (!size_only && c - corig > al)
6343 init_putz(sec, corig + al, c - corig - al);
6344 decl_initializer(type, sec, c, 0, size_only);
6346 /* XXX: make it more general */
6347 if (!size_only && nb_elems > 1) {
6348 unsigned long c_end;
6349 uint8_t *src, *dst;
6350 int i;
6352 if (!sec) {
6353 vset(type, VT_LOCAL|VT_LVAL, c);
6354 for (i = 1; i < nb_elems; i++) {
6355 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6356 vswap();
6357 vstore();
6359 vpop();
6360 } else if (!NODATA_WANTED) {
6361 c_end = c + nb_elems * elem_size;
6362 if (c_end > sec->data_allocated)
6363 section_realloc(sec, c_end);
6364 src = sec->data + c;
6365 dst = src;
6366 for(i = 1; i < nb_elems; i++) {
6367 dst += elem_size;
6368 memcpy(dst, src, elem_size);
6372 c += nb_elems * type_size(type, &align);
6373 if (c - corig > al)
6374 al = c - corig;
6375 return al;
6378 /* store a value or an expression directly in global data or in local array */
6379 static void init_putv(CType *type, Section *sec, unsigned long c)
6381 int bt;
6382 void *ptr;
6383 CType dtype;
6385 dtype = *type;
6386 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6388 if (sec) {
6389 int size, align;
6390 /* XXX: not portable */
6391 /* XXX: generate error if incorrect relocation */
6392 gen_assign_cast(&dtype);
6393 bt = type->t & VT_BTYPE;
6395 if ((vtop->r & VT_SYM)
6396 && bt != VT_PTR
6397 && bt != VT_FUNC
6398 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6399 || (type->t & VT_BITFIELD))
6400 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6402 tcc_error("initializer element is not computable at load time");
6404 if (NODATA_WANTED) {
6405 vtop--;
6406 return;
6409 size = type_size(type, &align);
6410 section_reserve(sec, c + size);
6411 ptr = sec->data + c;
6413 /* XXX: make code faster ? */
6414 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6415 vtop->sym->v >= SYM_FIRST_ANOM &&
6416 /* XXX This rejects compound literals like
6417 '(void *){ptr}'. The problem is that '&sym' is
6418 represented the same way, which would be ruled out
6419 by the SYM_FIRST_ANOM check above, but also '"string"'
6420 in 'char *p = "string"' is represented the same
6421 with the type being VT_PTR and the symbol being an
6422 anonymous one. That is, there's no difference in vtop
6423 between '(void *){x}' and '&(void *){x}'. Ignore
6424 pointer typed entities here. Hopefully no real code
6425 will every use compound literals with scalar type. */
6426 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6427 /* These come from compound literals, memcpy stuff over. */
6428 Section *ssec;
6429 ElfW(Sym) *esym;
6430 ElfW_Rel *rel;
6431 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6432 ssec = tcc_state->sections[esym->st_shndx];
6433 memmove (ptr, ssec->data + esym->st_value, size);
6434 if (ssec->reloc) {
6435 /* We need to copy over all memory contents, and that
6436 includes relocations. Use the fact that relocs are
6437 created it order, so look from the end of relocs
6438 until we hit one before the copied region. */
6439 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6440 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6441 while (num_relocs--) {
6442 rel--;
6443 if (rel->r_offset >= esym->st_value + size)
6444 continue;
6445 if (rel->r_offset < esym->st_value)
6446 break;
6447 /* Note: if the same fields are initialized multiple
6448 times (possible with designators) then we possibly
6449 add multiple relocations for the same offset here.
6450 That would lead to wrong code, the last reloc needs
6451 to win. We clean this up later after the whole
6452 initializer is parsed. */
6453 put_elf_reloca(symtab_section, sec,
6454 c + rel->r_offset - esym->st_value,
6455 ELFW(R_TYPE)(rel->r_info),
6456 ELFW(R_SYM)(rel->r_info),
6457 #if PTR_SIZE == 8
6458 rel->r_addend
6459 #else
6461 #endif
6465 } else {
6466 if (type->t & VT_BITFIELD) {
6467 int bit_pos, bit_size, bits, n;
6468 unsigned char *p, v, m;
6469 bit_pos = BIT_POS(vtop->type.t);
6470 bit_size = BIT_SIZE(vtop->type.t);
6471 p = (unsigned char*)ptr + (bit_pos >> 3);
6472 bit_pos &= 7, bits = 0;
6473 while (bit_size) {
6474 n = 8 - bit_pos;
6475 if (n > bit_size)
6476 n = bit_size;
6477 v = vtop->c.i >> bits << bit_pos;
6478 m = ((1 << n) - 1) << bit_pos;
6479 *p = (*p & ~m) | (v & m);
6480 bits += n, bit_size -= n, bit_pos = 0, ++p;
6482 } else
6483 switch(bt) {
6484 /* XXX: when cross-compiling we assume that each type has the
6485 same representation on host and target, which is likely to
6486 be wrong in the case of long double */
6487 case VT_BOOL:
6488 vtop->c.i = vtop->c.i != 0;
6489 case VT_BYTE:
6490 *(char *)ptr |= vtop->c.i;
6491 break;
6492 case VT_SHORT:
6493 *(short *)ptr |= vtop->c.i;
6494 break;
6495 case VT_FLOAT:
6496 *(float*)ptr = vtop->c.f;
6497 break;
6498 case VT_DOUBLE:
6499 *(double *)ptr = vtop->c.d;
6500 break;
6501 case VT_LDOUBLE:
6502 #if defined TCC_IS_NATIVE_387
6503 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6504 memcpy(ptr, &vtop->c.ld, 10);
6505 #ifdef __TINYC__
6506 else if (sizeof (long double) == sizeof (double))
6507 __asm__("fldl %1\nfstpt %0\n" : "=m" (ptr) : "m" (vtop->c.ld));
6508 #endif
6509 else
6510 #endif
6511 if (sizeof(long double) == LDOUBLE_SIZE)
6512 *(long double*)ptr = vtop->c.ld;
6513 else if (sizeof(double) == LDOUBLE_SIZE)
6514 *(double *)ptr = (double)vtop->c.ld;
6515 else
6516 tcc_error("can't cross compile long double constants");
6517 break;
6518 #if PTR_SIZE != 8
6519 case VT_LLONG:
6520 *(long long *)ptr |= vtop->c.i;
6521 break;
6522 #else
6523 case VT_LLONG:
6524 #endif
6525 case VT_PTR:
6527 addr_t val = vtop->c.i;
6528 #if PTR_SIZE == 8
6529 if (vtop->r & VT_SYM)
6530 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6531 else
6532 *(addr_t *)ptr |= val;
6533 #else
6534 if (vtop->r & VT_SYM)
6535 greloc(sec, vtop->sym, c, R_DATA_PTR);
6536 *(addr_t *)ptr |= val;
6537 #endif
6538 break;
6540 default:
6542 int val = vtop->c.i;
6543 #if PTR_SIZE == 8
6544 if (vtop->r & VT_SYM)
6545 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6546 else
6547 *(int *)ptr |= val;
6548 #else
6549 if (vtop->r & VT_SYM)
6550 greloc(sec, vtop->sym, c, R_DATA_PTR);
6551 *(int *)ptr |= val;
6552 #endif
6553 break;
6557 vtop--;
6558 } else {
6559 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6560 vswap();
6561 vstore();
6562 vpop();
6566 /* 't' contains the type and storage info. 'c' is the offset of the
6567 object in section 'sec'. If 'sec' is NULL, it means stack based
6568 allocation. 'first' is true if array '{' must be read (multi
6569 dimension implicit array init handling). 'size_only' is true if
6570 size only evaluation is wanted (only for arrays). */
6571 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6572 int first, int size_only)
6574 int len, n, no_oblock, nb, i;
6575 int size1, align1;
6576 int have_elem;
6577 Sym *s, *f;
6578 Sym indexsym;
6579 CType *t1;
6581 /* If we currently are at an '}' or ',' we have read an initializer
6582 element in one of our callers, and not yet consumed it. */
6583 have_elem = tok == '}' || tok == ',';
6584 if (!have_elem && tok != '{' &&
6585 /* In case of strings we have special handling for arrays, so
6586 don't consume them as initializer value (which would commit them
6587 to some anonymous symbol). */
6588 tok != TOK_LSTR && tok != TOK_STR &&
6589 !size_only) {
6590 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6591 have_elem = 1;
6594 if (have_elem &&
6595 !(type->t & VT_ARRAY) &&
6596 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6597 The source type might have VT_CONSTANT set, which is
6598 of course assignable to non-const elements. */
6599 is_compatible_unqualified_types(type, &vtop->type)) {
6600 init_putv(type, sec, c);
6601 } else if (type->t & VT_ARRAY) {
6602 s = type->ref;
6603 n = s->c;
6604 t1 = pointed_type(type);
6605 size1 = type_size(t1, &align1);
6607 no_oblock = 1;
6608 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6609 tok == '{') {
6610 if (tok != '{')
6611 tcc_error("character array initializer must be a literal,"
6612 " optionally enclosed in braces");
6613 skip('{');
6614 no_oblock = 0;
6617 /* only parse strings here if correct type (otherwise: handle
6618 them as ((w)char *) expressions */
6619 if ((tok == TOK_LSTR &&
6620 #ifdef TCC_TARGET_PE
6621 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6622 #else
6623 (t1->t & VT_BTYPE) == VT_INT
6624 #endif
6625 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6626 len = 0;
6627 while (tok == TOK_STR || tok == TOK_LSTR) {
6628 int cstr_len, ch;
6630 /* compute maximum number of chars wanted */
6631 if (tok == TOK_STR)
6632 cstr_len = tokc.str.size;
6633 else
6634 cstr_len = tokc.str.size / sizeof(nwchar_t);
6635 cstr_len--;
6636 nb = cstr_len;
6637 if (n >= 0 && nb > (n - len))
6638 nb = n - len;
6639 if (!size_only) {
6640 if (cstr_len > nb)
6641 tcc_warning("initializer-string for array is too long");
6642 /* in order to go faster for common case (char
6643 string in global variable, we handle it
6644 specifically */
6645 if (sec && tok == TOK_STR && size1 == 1) {
6646 if (!NODATA_WANTED)
6647 memcpy(sec->data + c + len, tokc.str.data, nb);
6648 } else {
6649 for(i=0;i<nb;i++) {
6650 if (tok == TOK_STR)
6651 ch = ((unsigned char *)tokc.str.data)[i];
6652 else
6653 ch = ((nwchar_t *)tokc.str.data)[i];
6654 vpushi(ch);
6655 init_putv(t1, sec, c + (len + i) * size1);
6659 len += nb;
6660 next();
6662 /* only add trailing zero if enough storage (no
6663 warning in this case since it is standard) */
6664 if (n < 0 || len < n) {
6665 if (!size_only) {
6666 vpushi(0);
6667 init_putv(t1, sec, c + (len * size1));
6669 len++;
6671 len *= size1;
6672 } else {
6673 indexsym.c = 0;
6674 f = &indexsym;
6676 do_init_list:
6677 len = 0;
6678 while (tok != '}' || have_elem) {
6679 len = decl_designator(type, sec, c, &f, size_only, len);
6680 have_elem = 0;
6681 if (type->t & VT_ARRAY) {
6682 ++indexsym.c;
6683 /* special test for multi dimensional arrays (may not
6684 be strictly correct if designators are used at the
6685 same time) */
6686 if (no_oblock && len >= n*size1)
6687 break;
6688 } else {
6689 if (s->type.t == VT_UNION)
6690 f = NULL;
6691 else
6692 f = f->next;
6693 if (no_oblock && f == NULL)
6694 break;
6697 if (tok == '}')
6698 break;
6699 skip(',');
6702 /* put zeros at the end */
6703 if (!size_only && len < n*size1)
6704 init_putz(sec, c + len, n*size1 - len);
6705 if (!no_oblock)
6706 skip('}');
6707 /* patch type size if needed, which happens only for array types */
6708 if (n < 0)
6709 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6710 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6711 size1 = 1;
6712 no_oblock = 1;
6713 if (first || tok == '{') {
6714 skip('{');
6715 no_oblock = 0;
6717 s = type->ref;
6718 f = s->next;
6719 n = s->c;
6720 goto do_init_list;
6721 } else if (tok == '{') {
6722 next();
6723 decl_initializer(type, sec, c, first, size_only);
6724 skip('}');
6725 } else if (size_only) {
6726 /* If we supported only ISO C we wouldn't have to accept calling
6727 this on anything than an array size_only==1 (and even then
6728 only on the outermost level, so no recursion would be needed),
6729 because initializing a flex array member isn't supported.
6730 But GNU C supports it, so we need to recurse even into
6731 subfields of structs and arrays when size_only is set. */
6732 /* just skip expression */
6733 skip_or_save_block(NULL);
6734 } else {
6735 if (!have_elem) {
6736 /* This should happen only when we haven't parsed
6737 the init element above for fear of committing a
6738 string constant to memory too early. */
6739 if (tok != TOK_STR && tok != TOK_LSTR)
6740 expect("string constant");
6741 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6743 init_putv(type, sec, c);
6747 /* parse an initializer for type 't' if 'has_init' is non zero, and
6748 allocate space in local or global data space ('r' is either
6749 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6750 variable 'v' of scope 'scope' is declared before initializers
6751 are parsed. If 'v' is zero, then a reference to the new object
6752 is put in the value stack. If 'has_init' is 2, a special parsing
6753 is done to handle string constants. */
6754 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6755 int has_init, int v, int scope)
6757 int size, align, addr;
6758 TokenString *init_str = NULL;
6760 Section *sec;
6761 Sym *flexible_array;
6762 Sym *sym = NULL;
6763 int saved_nocode_wanted = nocode_wanted;
6764 #ifdef CONFIG_TCC_BCHECK
6765 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6766 #endif
6768 if (type->t & VT_STATIC)
6769 nocode_wanted |= NODATA_WANTED ? 0x40000000 : 0x80000000;
6771 flexible_array = NULL;
6772 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6773 Sym *field = type->ref->next;
6774 if (field) {
6775 while (field->next)
6776 field = field->next;
6777 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6778 flexible_array = field;
6782 size = type_size(type, &align);
6783 /* If unknown size, we must evaluate it before
6784 evaluating initializers because
6785 initializers can generate global data too
6786 (e.g. string pointers or ISOC99 compound
6787 literals). It also simplifies local
6788 initializers handling */
6789 if (size < 0 || (flexible_array && has_init)) {
6790 if (!has_init)
6791 tcc_error("unknown type size");
6792 /* get all init string */
6793 if (has_init == 2) {
6794 init_str = tok_str_alloc();
6795 /* only get strings */
6796 while (tok == TOK_STR || tok == TOK_LSTR) {
6797 tok_str_add_tok(init_str);
6798 next();
6800 tok_str_add(init_str, -1);
6801 tok_str_add(init_str, 0);
6802 } else {
6803 skip_or_save_block(&init_str);
6805 unget_tok(0);
6807 /* compute size */
6808 begin_macro(init_str, 1);
6809 next();
6810 decl_initializer(type, NULL, 0, 1, 1);
6811 /* prepare second initializer parsing */
6812 macro_ptr = init_str->str;
6813 next();
6815 /* if still unknown size, error */
6816 size = type_size(type, &align);
6817 if (size < 0)
6818 tcc_error("unknown type size");
6820 /* If there's a flex member and it was used in the initializer
6821 adjust size. */
6822 if (flexible_array &&
6823 flexible_array->type.ref->c > 0)
6824 size += flexible_array->type.ref->c
6825 * pointed_size(&flexible_array->type);
6826 /* take into account specified alignment if bigger */
6827 if (ad->a.aligned) {
6828 int speca = 1 << (ad->a.aligned - 1);
6829 if (speca > align)
6830 align = speca;
6831 } else if (ad->a.packed) {
6832 align = 1;
6835 if (NODATA_WANTED)
6836 size = 0, align = 1;
6838 if ((r & VT_VALMASK) == VT_LOCAL) {
6839 sec = NULL;
6840 #ifdef CONFIG_TCC_BCHECK
6841 if (bcheck && (type->t & VT_ARRAY)) {
6842 loc--;
6844 #endif
6845 loc = (loc - size) & -align;
6846 addr = loc;
6847 #ifdef CONFIG_TCC_BCHECK
6848 /* handles bounds */
6849 /* XXX: currently, since we do only one pass, we cannot track
6850 '&' operators, so we add only arrays */
6851 if (bcheck && (type->t & VT_ARRAY)) {
6852 addr_t *bounds_ptr;
6853 /* add padding between regions */
6854 loc--;
6855 /* then add local bound info */
6856 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6857 bounds_ptr[0] = addr;
6858 bounds_ptr[1] = size;
6860 #endif
6861 if (v) {
6862 /* local variable */
6863 #ifdef CONFIG_TCC_ASM
6864 if (ad->asm_label) {
6865 int reg = asm_parse_regvar(ad->asm_label);
6866 if (reg >= 0)
6867 r = (r & ~VT_VALMASK) | reg;
6869 #endif
6870 sym = sym_push(v, type, r, addr);
6871 sym->a = ad->a;
6872 } else {
6873 /* push local reference */
6874 vset(type, r, addr);
6876 } else {
6877 if (v && scope == VT_CONST) {
6878 /* see if the symbol was already defined */
6879 sym = sym_find(v);
6880 if (sym) {
6881 patch_storage(sym, ad, type);
6882 if (sym->type.t & VT_EXTERN) {
6883 /* if the variable is extern, it was not allocated */
6884 sym->type.t &= ~VT_EXTERN;
6885 /* set array size if it was omitted in extern
6886 declaration */
6887 if ((sym->type.t & VT_ARRAY) &&
6888 sym->type.ref->c < 0 &&
6889 type->ref->c >= 0)
6890 sym->type.ref->c = type->ref->c;
6891 } else if (!has_init) {
6892 /* we accept several definitions of the same
6893 global variable. this is tricky, because we
6894 must play with the SHN_COMMON type of the symbol */
6895 /* no init data, we won't add more to the symbol */
6896 goto no_alloc;
6897 } else if (sym->c) {
6898 ElfW(Sym) *esym;
6899 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6900 if (esym->st_shndx == data_section->sh_num)
6901 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6906 /* allocate symbol in corresponding section */
6907 sec = ad->section;
6908 if (!sec) {
6909 if (has_init)
6910 sec = data_section;
6911 else if (tcc_state->nocommon)
6912 sec = bss_section;
6915 if (sec) {
6916 addr = section_add(sec, size, align);
6917 #ifdef CONFIG_TCC_BCHECK
6918 /* add padding if bound check */
6919 if (bcheck)
6920 section_add(sec, 1, 1);
6921 #endif
6922 } else {
6923 addr = align; /* SHN_COMMON is special, symbol value is align */
6924 sec = common_section;
6927 if (v) {
6928 if (!sym) {
6929 sym = sym_push(v, type, r | VT_SYM, 0);
6930 patch_storage(sym, ad, NULL);
6932 /* Local statics have a scope until now (for
6933 warnings), remove it here. */
6934 sym->sym_scope = 0;
6935 /* update symbol definition */
6936 put_extern_sym(sym, sec, addr, size);
6937 } else {
6938 /* push global reference */
6939 sym = get_sym_ref(type, sec, addr, size);
6940 vpushsym(type, sym);
6941 vtop->r |= r;
6944 #ifdef CONFIG_TCC_BCHECK
6945 /* handles bounds now because the symbol must be defined
6946 before for the relocation */
6947 if (bcheck) {
6948 addr_t *bounds_ptr;
6950 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
6951 /* then add global bound info */
6952 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6953 bounds_ptr[0] = 0; /* relocated */
6954 bounds_ptr[1] = size;
6956 #endif
6959 if (type->t & VT_VLA) {
6960 int a;
6962 if (NODATA_WANTED)
6963 goto no_alloc;
6965 /* save current stack pointer */
6966 if (vlas_in_scope == 0) {
6967 if (vla_sp_root_loc == -1)
6968 vla_sp_root_loc = (loc -= PTR_SIZE);
6969 gen_vla_sp_save(vla_sp_root_loc);
6972 vla_runtime_type_size(type, &a);
6973 gen_vla_alloc(type, a);
6974 gen_vla_sp_save(addr);
6975 vla_sp_loc = addr;
6976 vlas_in_scope++;
6978 } else if (has_init) {
6979 size_t oldreloc_offset = 0;
6980 if (sec && sec->reloc)
6981 oldreloc_offset = sec->reloc->data_offset;
6982 decl_initializer(type, sec, addr, 1, 0);
6983 if (sec && sec->reloc)
6984 squeeze_multi_relocs(sec, oldreloc_offset);
6985 /* patch flexible array member size back to -1, */
6986 /* for possible subsequent similar declarations */
6987 if (flexible_array)
6988 flexible_array->type.ref->c = -1;
6991 no_alloc:
6992 /* restore parse state if needed */
6993 if (init_str) {
6994 end_macro();
6995 next();
6998 nocode_wanted = saved_nocode_wanted;
7001 /* parse a function defined by symbol 'sym' and generate its code in
7002 'cur_text_section' */
7003 static void gen_function(Sym *sym)
7005 nocode_wanted = 0;
7006 ind = cur_text_section->data_offset;
7007 /* NOTE: we patch the symbol size later */
7008 put_extern_sym(sym, cur_text_section, ind, 0);
7009 funcname = get_tok_str(sym->v, NULL);
7010 func_ind = ind;
7011 /* Initialize VLA state */
7012 vla_sp_loc = -1;
7013 vla_sp_root_loc = -1;
7014 /* put debug symbol */
7015 tcc_debug_funcstart(tcc_state, sym);
7016 /* push a dummy symbol to enable local sym storage */
7017 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7018 local_scope = 1; /* for function parameters */
7019 gfunc_prolog(&sym->type);
7020 local_scope = 0;
7021 rsym = 0;
7022 block(NULL, NULL, 0);
7023 nocode_wanted = 0;
7024 gsym(rsym);
7025 gfunc_epilog();
7026 cur_text_section->data_offset = ind;
7027 label_pop(&global_label_stack, NULL, 0);
7028 /* reset local stack */
7029 local_scope = 0;
7030 sym_pop(&local_stack, NULL, 0);
7031 /* end of function */
7032 /* patch symbol size */
7033 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
7034 ind - func_ind;
7035 tcc_debug_funcend(tcc_state, ind - func_ind);
7036 /* It's better to crash than to generate wrong code */
7037 cur_text_section = NULL;
7038 funcname = ""; /* for safety */
7039 func_vt.t = VT_VOID; /* for safety */
7040 func_var = 0; /* for safety */
7041 ind = 0; /* for safety */
7042 nocode_wanted = 0x80000000;
7043 check_vstack();
7046 static void gen_inline_functions(TCCState *s)
7048 Sym *sym;
7049 int inline_generated, i, ln;
7050 struct InlineFunc *fn;
7052 ln = file->line_num;
7053 /* iterate while inline function are referenced */
7054 do {
7055 inline_generated = 0;
7056 for (i = 0; i < s->nb_inline_fns; ++i) {
7057 fn = s->inline_fns[i];
7058 sym = fn->sym;
7059 if (sym && sym->c) {
7060 /* the function was used: generate its code and
7061 convert it to a normal function */
7062 fn->sym = NULL;
7063 if (file)
7064 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7065 sym->type.t &= ~VT_INLINE;
7067 begin_macro(fn->func_str, 1);
7068 next();
7069 cur_text_section = text_section;
7070 gen_function(sym);
7071 end_macro();
7073 inline_generated = 1;
7076 } while (inline_generated);
7077 file->line_num = ln;
7080 ST_FUNC void free_inline_functions(TCCState *s)
7082 int i;
7083 /* free tokens of unused inline functions */
7084 for (i = 0; i < s->nb_inline_fns; ++i) {
7085 struct InlineFunc *fn = s->inline_fns[i];
7086 if (fn->sym)
7087 tok_str_free(fn->func_str);
7089 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7092 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7093 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7094 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7096 int v, has_init, r;
7097 CType type, btype;
7098 Sym *sym;
7099 AttributeDef ad;
7101 while (1) {
7102 if (!parse_btype(&btype, &ad)) {
7103 if (is_for_loop_init)
7104 return 0;
7105 /* skip redundant ';' if not in old parameter decl scope */
7106 if (tok == ';' && l != VT_CMP) {
7107 next();
7108 continue;
7110 if (l != VT_CONST)
7111 break;
7112 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7113 /* global asm block */
7114 asm_global_instr();
7115 continue;
7117 if (tok >= TOK_UIDENT) {
7118 /* special test for old K&R protos without explicit int
7119 type. Only accepted when defining global data */
7120 btype.t = VT_INT;
7121 } else {
7122 if (tok != TOK_EOF)
7123 expect("declaration");
7124 break;
7127 if (tok == ';') {
7128 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7129 int v = btype.ref->v;
7130 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7131 tcc_warning("unnamed struct/union that defines no instances");
7132 next();
7133 continue;
7135 if (IS_ENUM(btype.t)) {
7136 next();
7137 continue;
7140 while (1) { /* iterate thru each declaration */
7141 type = btype;
7142 /* If the base type itself was an array type of unspecified
7143 size (like in 'typedef int arr[]; arr x = {1};') then
7144 we will overwrite the unknown size by the real one for
7145 this decl. We need to unshare the ref symbol holding
7146 that size. */
7147 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7148 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7150 type_decl(&type, &ad, &v, TYPE_DIRECT);
7151 #if 0
7153 char buf[500];
7154 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7155 printf("type = '%s'\n", buf);
7157 #endif
7158 if ((type.t & VT_BTYPE) == VT_FUNC) {
7159 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7160 tcc_error("function without file scope cannot be static");
7162 /* if old style function prototype, we accept a
7163 declaration list */
7164 sym = type.ref;
7165 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7166 decl0(VT_CMP, 0, sym);
7169 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7170 ad.asm_label = asm_label_instr();
7171 /* parse one last attribute list, after asm label */
7172 parse_attribute(&ad);
7173 if (tok == '{')
7174 expect(";");
7177 #ifdef TCC_TARGET_PE
7178 if (ad.a.dllimport || ad.a.dllexport) {
7179 if (type.t & (VT_STATIC|VT_TYPEDEF))
7180 tcc_error("cannot have dll linkage with static or typedef");
7181 if (ad.a.dllimport) {
7182 if ((type.t & VT_BTYPE) == VT_FUNC)
7183 ad.a.dllimport = 0;
7184 else
7185 type.t |= VT_EXTERN;
7188 #endif
7189 if (tok == '{') {
7190 if (l != VT_CONST)
7191 tcc_error("cannot use local functions");
7192 if ((type.t & VT_BTYPE) != VT_FUNC)
7193 expect("function definition");
7195 /* reject abstract declarators in function definition
7196 make old style params without decl have int type */
7197 sym = type.ref;
7198 while ((sym = sym->next) != NULL) {
7199 if (!(sym->v & ~SYM_FIELD))
7200 expect("identifier");
7201 if (sym->type.t == VT_VOID)
7202 sym->type = int_type;
7205 /* XXX: cannot do better now: convert extern line to static inline */
7206 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7207 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7209 sym = sym_find(v);
7210 if (sym) {
7211 Sym *ref;
7212 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7213 goto func_error1;
7215 ref = sym->type.ref;
7217 /* use func_call from prototype if not defined */
7218 if (ref->f.func_call != FUNC_CDECL
7219 && type.ref->f.func_call == FUNC_CDECL)
7220 type.ref->f.func_call = ref->f.func_call;
7222 /* use static from prototype */
7223 if (sym->type.t & VT_STATIC)
7224 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7226 /* If the definition has no visibility use the
7227 one from prototype. */
7228 if (!type.ref->a.visibility)
7229 type.ref->a.visibility = ref->a.visibility;
7230 /* apply other storage attributes from prototype */
7231 type.ref->a.dllexport |= ref->a.dllexport;
7232 type.ref->a.weak |= ref->a.weak;
7234 if (!is_compatible_types(&sym->type, &type)) {
7235 func_error1:
7236 tcc_error("incompatible types for redefinition of '%s'",
7237 get_tok_str(v, NULL));
7239 if (ref->f.func_body)
7240 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7241 /* if symbol is already defined, then put complete type */
7242 sym->type = type;
7244 } else {
7245 /* put function symbol */
7246 sym = global_identifier_push(v, type.t, 0);
7247 sym->type.ref = type.ref;
7250 sym->type.ref->f.func_body = 1;
7251 sym->r = VT_SYM | VT_CONST;
7252 patch_storage(sym, &ad, NULL);
7254 /* static inline functions are just recorded as a kind
7255 of macro. Their code will be emitted at the end of
7256 the compilation unit only if they are used */
7257 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7258 (VT_INLINE | VT_STATIC)) {
7259 struct InlineFunc *fn;
7260 const char *filename;
7262 filename = file ? file->filename : "";
7263 fn = tcc_malloc(sizeof *fn + strlen(filename));
7264 strcpy(fn->filename, filename);
7265 fn->sym = sym;
7266 skip_or_save_block(&fn->func_str);
7267 dynarray_add(&tcc_state->inline_fns,
7268 &tcc_state->nb_inline_fns, fn);
7269 } else {
7270 /* compute text section */
7271 cur_text_section = ad.section;
7272 if (!cur_text_section)
7273 cur_text_section = text_section;
7274 gen_function(sym);
7276 break;
7277 } else {
7278 if (l == VT_CMP) {
7279 /* find parameter in function parameter list */
7280 for (sym = func_sym->next; sym; sym = sym->next)
7281 if ((sym->v & ~SYM_FIELD) == v)
7282 goto found;
7283 tcc_error("declaration for parameter '%s' but no such parameter",
7284 get_tok_str(v, NULL));
7285 found:
7286 if (type.t & VT_STORAGE) /* 'register' is okay */
7287 tcc_error("storage class specified for '%s'",
7288 get_tok_str(v, NULL));
7289 if (sym->type.t != VT_VOID)
7290 tcc_error("redefinition of parameter '%s'",
7291 get_tok_str(v, NULL));
7292 convert_parameter_type(&type);
7293 sym->type = type;
7294 } else if (type.t & VT_TYPEDEF) {
7295 /* save typedefed type */
7296 /* XXX: test storage specifiers ? */
7297 sym = sym_find(v);
7298 if (sym && sym->sym_scope == local_scope) {
7299 if (!is_compatible_types(&sym->type, &type)
7300 || !(sym->type.t & VT_TYPEDEF))
7301 tcc_error("incompatible redefinition of '%s'",
7302 get_tok_str(v, NULL));
7303 sym->type = type;
7304 } else {
7305 sym = sym_push(v, &type, 0, 0);
7307 sym->a = ad.a;
7308 sym->f = ad.f;
7309 } else {
7310 r = 0;
7311 if ((type.t & VT_BTYPE) == VT_FUNC) {
7312 /* external function definition */
7313 /* specific case for func_call attribute */
7314 type.ref->f = ad.f;
7315 } else if (!(type.t & VT_ARRAY)) {
7316 /* not lvalue if array */
7317 r |= lvalue_type(type.t);
7319 has_init = (tok == '=');
7320 if (has_init && (type.t & VT_VLA))
7321 tcc_error("variable length array cannot be initialized");
7322 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7323 ((type.t & VT_BTYPE) == VT_FUNC) ||
7324 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7325 !has_init && l == VT_CONST && type.ref->c < 0)) {
7326 /* external variable or function */
7327 /* NOTE: as GCC, uninitialized global static
7328 arrays of null size are considered as
7329 extern */
7330 sym = external_sym(v, &type, r, &ad);
7331 if (ad.alias_target) {
7332 Section tsec;
7333 ElfW(Sym) *esym;
7334 Sym *alias_target;
7335 alias_target = sym_find(ad.alias_target);
7336 if (!alias_target || !alias_target->c)
7337 tcc_error("unsupported forward __alias__ attribute");
7338 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7339 tsec.sh_num = esym->st_shndx;
7340 /* Local statics have a scope until now (for
7341 warnings), remove it here. */
7342 sym->sym_scope = 0;
7343 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7345 } else {
7346 if (type.t & VT_STATIC)
7347 r |= VT_CONST;
7348 else
7349 r |= l;
7350 if (has_init)
7351 next();
7352 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7355 if (tok != ',') {
7356 if (is_for_loop_init)
7357 return 1;
7358 skip(';');
7359 break;
7361 next();
7363 ad.a.aligned = 0;
7366 return 0;
7369 static void decl(int l)
7371 decl0(l, 0, NULL);
7374 /* ------------------------------------------------------------------------- */