Don't emit applied .rel sections
[tinycc.git] / tccgen.c
blob2c365be73a8b557bed3131ca84e4991f04dd086e
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
58 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
59 ST_DATA int func_vc;
60 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
61 ST_DATA const char *funcname;
62 ST_DATA int g_debug;
64 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
66 ST_DATA struct switch_t {
67 struct case_t {
68 int64_t v1, v2;
69 int sym;
70 } **p; int n; /* list of case ranges */
71 int def_sym; /* default symbol */
72 } *cur_switch; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType *type);
77 static void gen_cast_s(int t);
78 static inline CType *pointed_type(CType *type);
79 static int is_compatible_types(CType *type1, CType *type2);
80 static int parse_btype(CType *type, AttributeDef *ad);
81 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
82 static void parse_expr_type(CType *type);
83 static void init_putv(CType *type, Section *sec, unsigned long c);
84 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
85 static void block(int *bsym, int *csym, int is_expr);
86 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
87 static void decl(int l);
88 static int decl0(int l, int is_for_loop_init, Sym *);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType *type, int *a);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType *type1, CType *type2);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty, unsigned long long v);
96 static void vpush(CType *type);
97 static int gvtst(int inv, int t);
98 static void gen_inline_functions(TCCState *s);
99 static void skip_or_save_block(TokenString **str);
100 static void gv_dup(void);
102 ST_INLN int is_float(int t)
104 int bt;
105 bt = t & VT_BTYPE;
106 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC int ieee_finite(double d)
114 int p[4];
115 memcpy(p, &d, sizeof(double));
116 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
123 #endif
125 ST_FUNC void test_lvalue(void)
127 if (!(vtop->r & VT_LVAL))
128 expect("lvalue");
131 ST_FUNC void check_vstack(void)
133 if (pvtop != vtop)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
140 #if 0
141 void pv (const char *lbl, int a, int b)
143 int i;
144 for (i = a; i < a + b; ++i) {
145 SValue *p = &vtop[-i];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
150 #endif
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC void tcc_debug_start(TCCState *s1)
156 if (s1->do_debug) {
157 char buf[512];
159 /* file info: full path + filename */
160 section_sym = put_elf_sym(symtab_section, 0, 0,
161 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
162 text_section->sh_num, NULL);
163 getcwd(buf, sizeof(buf));
164 #ifdef _WIN32
165 normalize_slashes(buf);
166 #endif
167 pstrcat(buf, sizeof(buf), "/");
168 put_stabs_r(buf, N_SO, 0, 0,
169 text_section->data_offset, text_section, section_sym);
170 put_stabs_r(file->filename, N_SO, 0, 0,
171 text_section->data_offset, text_section, section_sym);
172 last_ind = 0;
173 last_line_num = 0;
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section, 0, 0,
179 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
180 SHN_ABS, file->filename);
183 /* put end of translation unit info */
184 ST_FUNC void tcc_debug_end(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 put_stabs_r(NULL, N_SO, 0, 0,
189 text_section->data_offset, text_section, section_sym);
193 /* generate line number info */
194 ST_FUNC void tcc_debug_line(TCCState *s1)
196 if (!s1->do_debug)
197 return;
198 if ((last_line_num != file->line_num || last_ind != ind)) {
199 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
200 last_ind = ind;
201 last_line_num = file->line_num;
205 /* put function symbol */
206 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
208 char buf[512];
210 if (!s1->do_debug)
211 return;
213 /* stabs info */
214 /* XXX: we put here a dummy type */
215 snprintf(buf, sizeof(buf), "%s:%c1",
216 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
217 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
218 cur_text_section, sym->c);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE, 0, file->line_num, 0);
222 last_ind = 0;
223 last_line_num = 0;
226 /* put function size */
227 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
229 if (!s1->do_debug)
230 return;
231 put_stabn(N_FUN, 0, 0, size);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC int tccgen_compile(TCCState *s1)
237 cur_text_section = NULL;
238 funcname = "";
239 anon_sym = SYM_FIRST_ANOM;
240 section_sym = 0;
241 const_wanted = 0;
242 nocode_wanted = 0x80000000;
244 /* define some often used types */
245 int_type.t = VT_INT;
246 char_pointer_type.t = VT_BYTE;
247 mk_pointer(&char_pointer_type);
248 #if PTR_SIZE == 4
249 size_type.t = VT_INT | VT_UNSIGNED;
250 ptrdiff_type.t = VT_INT;
251 #elif LONG_SIZE == 4
252 size_type.t = VT_LLONG | VT_UNSIGNED;
253 ptrdiff_type.t = VT_LLONG;
254 #else
255 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
256 ptrdiff_type.t = VT_LONG | VT_LLONG;
257 #endif
258 func_old_type.t = VT_FUNC;
259 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
260 func_old_type.ref->f.func_call = FUNC_CDECL;
261 func_old_type.ref->f.func_type = FUNC_OLD;
263 tcc_debug_start(s1);
265 #ifdef TCC_TARGET_ARM
266 arm_init(s1);
267 #endif
269 #ifdef INC_DEBUG
270 printf("%s: **** new file\n", file->filename);
271 #endif
273 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
274 next();
275 decl(VT_CONST);
276 gen_inline_functions(s1);
277 check_vstack();
278 /* end of translation unit info */
279 tcc_debug_end(s1);
280 return 0;
283 /* ------------------------------------------------------------------------- */
284 ST_FUNC ElfSym *elfsym(Sym *s)
286 if (!s || !s->c)
287 return NULL;
288 return &((ElfSym *)symtab_section->data)[s->c];
291 /* apply storage attributes to Elf symbol */
292 ST_FUNC void update_storage(Sym *sym)
294 ElfSym *esym;
295 int sym_bind, old_sym_bind;
297 esym = elfsym(sym);
298 if (!esym)
299 return;
301 if (sym->a.visibility)
302 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
303 | sym->a.visibility;
305 if (sym->type.t & VT_STATIC)
306 sym_bind = STB_LOCAL;
307 else if (sym->a.weak)
308 sym_bind = STB_WEAK;
309 else
310 sym_bind = STB_GLOBAL;
311 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
312 if (sym_bind != old_sym_bind) {
313 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
316 #ifdef TCC_TARGET_PE
317 if (sym->a.dllimport)
318 esym->st_other |= ST_PE_IMPORT;
319 if (sym->a.dllexport)
320 esym->st_other |= ST_PE_EXPORT;
321 #endif
323 #if 0
324 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
325 get_tok_str(sym->v, NULL),
326 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
327 sym->a.visibility,
328 sym->a.dllexport,
329 sym->a.dllimport
331 #endif
334 /* ------------------------------------------------------------------------- */
335 /* update sym->c so that it points to an external symbol in section
336 'section' with value 'value' */
338 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
339 addr_t value, unsigned long size,
340 int can_add_underscore)
342 int sym_type, sym_bind, info, other, t;
343 ElfSym *esym;
344 const char *name;
345 char buf1[256];
346 #ifdef CONFIG_TCC_BCHECK
347 char buf[32];
348 #endif
350 if (!sym->c) {
351 name = get_tok_str(sym->v, NULL);
352 #ifdef CONFIG_TCC_BCHECK
353 if (tcc_state->do_bounds_check) {
354 /* XXX: avoid doing that for statics ? */
355 /* if bound checking is activated, we change some function
356 names by adding the "__bound" prefix */
357 switch(sym->v) {
358 #ifdef TCC_TARGET_PE
359 /* XXX: we rely only on malloc hooks */
360 case TOK_malloc:
361 case TOK_free:
362 case TOK_realloc:
363 case TOK_memalign:
364 case TOK_calloc:
365 #endif
366 case TOK_memcpy:
367 case TOK_memmove:
368 case TOK_memset:
369 case TOK_strlen:
370 case TOK_strcpy:
371 case TOK_alloca:
372 strcpy(buf, "__bound_");
373 strcat(buf, name);
374 name = buf;
375 break;
378 #endif
379 t = sym->type.t;
380 if ((t & VT_BTYPE) == VT_FUNC) {
381 sym_type = STT_FUNC;
382 } else if ((t & VT_BTYPE) == VT_VOID) {
383 sym_type = STT_NOTYPE;
384 } else {
385 sym_type = STT_OBJECT;
387 if (t & VT_STATIC)
388 sym_bind = STB_LOCAL;
389 else
390 sym_bind = STB_GLOBAL;
391 other = 0;
392 #ifdef TCC_TARGET_PE
393 if (sym_type == STT_FUNC && sym->type.ref) {
394 Sym *ref = sym->type.ref;
395 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
396 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
397 name = buf1;
398 other |= ST_PE_STDCALL;
399 can_add_underscore = 0;
402 #endif
403 if (tcc_state->leading_underscore && can_add_underscore) {
404 buf1[0] = '_';
405 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
406 name = buf1;
408 if (sym->asm_label)
409 name = get_tok_str(sym->asm_label, NULL);
410 info = ELFW(ST_INFO)(sym_bind, sym_type);
411 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
412 } else {
413 esym = elfsym(sym);
414 esym->st_value = value;
415 esym->st_size = size;
416 esym->st_shndx = sh_num;
418 update_storage(sym);
421 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
422 addr_t value, unsigned long size)
424 int sh_num = section ? section->sh_num : SHN_UNDEF;
425 put_extern_sym2(sym, sh_num, value, size, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
430 addr_t addend)
432 int c = 0;
434 if (nocode_wanted && s == cur_text_section)
435 return;
437 if (sym) {
438 if (0 == sym->c)
439 put_extern_sym(sym, NULL, 0, 0);
440 c = sym->c;
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section, s, offset, type, c, addend);
447 #if PTR_SIZE == 4
448 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
450 greloca(s, sym, offset, type, 0);
452 #endif
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym *__sym_malloc(void)
458 Sym *sym_pool, *sym, *last_sym;
459 int i;
461 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
462 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
464 last_sym = sym_free_first;
465 sym = sym_pool;
466 for(i = 0; i < SYM_POOL_NB; i++) {
467 sym->next = last_sym;
468 last_sym = sym;
469 sym++;
471 sym_free_first = last_sym;
472 return last_sym;
475 static inline Sym *sym_malloc(void)
477 Sym *sym;
478 #ifndef SYM_DEBUG
479 sym = sym_free_first;
480 if (!sym)
481 sym = __sym_malloc();
482 sym_free_first = sym->next;
483 return sym;
484 #else
485 sym = tcc_malloc(sizeof(Sym));
486 return sym;
487 #endif
490 ST_INLN void sym_free(Sym *sym)
492 #ifndef SYM_DEBUG
493 sym->next = sym_free_first;
494 sym_free_first = sym;
495 #else
496 tcc_free(sym);
497 #endif
500 /* push, without hashing */
501 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
503 Sym *s;
505 s = sym_malloc();
506 memset(s, 0, sizeof *s);
507 s->v = v;
508 s->type.t = t;
509 s->c = c;
510 /* add in stack */
511 s->prev = *ps;
512 *ps = s;
513 return s;
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym *sym_find2(Sym *s, int v)
520 while (s) {
521 if (s->v == v)
522 return s;
523 else if (s->v == -1)
524 return NULL;
525 s = s->prev;
527 return NULL;
530 /* structure lookup */
531 ST_INLN Sym *struct_find(int v)
533 v -= TOK_IDENT;
534 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
535 return NULL;
536 return table_ident[v]->sym_struct;
539 /* find an identifier */
540 ST_INLN Sym *sym_find(int v)
542 v -= TOK_IDENT;
543 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
544 return NULL;
545 return table_ident[v]->sym_identifier;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
551 Sym *s, **ps;
552 TokenSym *ts;
554 if (local_stack)
555 ps = &local_stack;
556 else
557 ps = &global_stack;
558 s = sym_push2(ps, v, type->t, c);
559 s->type.ref = type->ref;
560 s->r = r;
561 /* don't record fields or anonymous symbols */
562 /* XXX: simplify */
563 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
564 /* record symbol in token array */
565 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
566 if (v & SYM_STRUCT)
567 ps = &ts->sym_struct;
568 else
569 ps = &ts->sym_identifier;
570 s->prev_tok = *ps;
571 *ps = s;
572 s->sym_scope = local_scope;
573 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v & ~SYM_STRUCT, NULL));
577 return s;
580 /* push a global identifier */
581 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
583 Sym *s, **ps;
584 s = sym_push2(&global_stack, v, t, c);
585 /* don't record anonymous symbol */
586 if (v < SYM_FIRST_ANOM) {
587 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps != NULL && (*ps)->sym_scope)
591 ps = &(*ps)->prev_tok;
592 s->prev_tok = *ps;
593 *ps = s;
595 return s;
598 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
599 pop them yet from the list, but do remove them from the token array. */
600 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
602 Sym *s, *ss, **ps;
603 TokenSym *ts;
604 int v;
606 s = *ptop;
607 while(s != b) {
608 ss = s->prev;
609 v = s->v;
610 /* remove symbol in token array */
611 /* XXX: simplify */
612 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
613 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
614 if (v & SYM_STRUCT)
615 ps = &ts->sym_struct;
616 else
617 ps = &ts->sym_identifier;
618 *ps = s->prev_tok;
620 if (!keep)
621 sym_free(s);
622 s = ss;
624 if (!keep)
625 *ptop = b;
628 /* ------------------------------------------------------------------------- */
630 static void vsetc(CType *type, int r, CValue *vc)
632 int v;
634 if (vtop >= vstack + (VSTACK_SIZE - 1))
635 tcc_error("memory full (vstack)");
636 /* cannot let cpu flags if other instruction are generated. Also
637 avoid leaving VT_JMP anywhere except on the top of the stack
638 because it would complicate the code generator.
640 Don't do this when nocode_wanted. vtop might come from
641 !nocode_wanted regions (see 88_codeopt.c) and transforming
642 it to a register without actually generating code is wrong
643 as their value might still be used for real. All values
644 we push under nocode_wanted will eventually be popped
645 again, so that the VT_CMP/VT_JMP value will be in vtop
646 when code is unsuppressed again.
648 Same logic below in vswap(); */
649 if (vtop >= vstack && !nocode_wanted) {
650 v = vtop->r & VT_VALMASK;
651 if (v == VT_CMP || (v & ~1) == VT_JMP)
652 gv(RC_INT);
655 vtop++;
656 vtop->type = *type;
657 vtop->r = r;
658 vtop->r2 = VT_CONST;
659 vtop->c = *vc;
660 vtop->sym = NULL;
663 ST_FUNC void vswap(void)
665 SValue tmp;
666 /* cannot vswap cpu flags. See comment at vsetc() above */
667 if (vtop >= vstack && !nocode_wanted) {
668 int v = vtop->r & VT_VALMASK;
669 if (v == VT_CMP || (v & ~1) == VT_JMP)
670 gv(RC_INT);
672 tmp = vtop[0];
673 vtop[0] = vtop[-1];
674 vtop[-1] = tmp;
677 /* pop stack value */
678 ST_FUNC void vpop(void)
680 int v;
681 v = vtop->r & VT_VALMASK;
682 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
683 /* for x86, we need to pop the FP stack */
684 if (v == TREG_ST0) {
685 o(0xd8dd); /* fstp %st(0) */
686 } else
687 #endif
688 if (v == VT_JMP || v == VT_JMPI) {
689 /* need to put correct jump if && or || without test */
690 gsym(vtop->c.i);
692 vtop--;
695 /* push constant of type "type" with useless value */
696 ST_FUNC void vpush(CType *type)
698 vset(type, VT_CONST, 0);
701 /* push integer constant */
702 ST_FUNC void vpushi(int v)
704 CValue cval;
705 cval.i = v;
706 vsetc(&int_type, VT_CONST, &cval);
709 /* push a pointer sized constant */
710 static void vpushs(addr_t v)
712 CValue cval;
713 cval.i = v;
714 vsetc(&size_type, VT_CONST, &cval);
717 /* push arbitrary 64bit constant */
718 ST_FUNC void vpush64(int ty, unsigned long long v)
720 CValue cval;
721 CType ctype;
722 ctype.t = ty;
723 ctype.ref = NULL;
724 cval.i = v;
725 vsetc(&ctype, VT_CONST, &cval);
728 /* push long long constant */
729 static inline void vpushll(long long v)
731 vpush64(VT_LLONG, v);
734 ST_FUNC void vset(CType *type, int r, int v)
736 CValue cval;
738 cval.i = v;
739 vsetc(type, r, &cval);
742 static void vseti(int r, int v)
744 CType type;
745 type.t = VT_INT;
746 type.ref = NULL;
747 vset(&type, r, v);
750 ST_FUNC void vpushv(SValue *v)
752 if (vtop >= vstack + (VSTACK_SIZE - 1))
753 tcc_error("memory full (vstack)");
754 vtop++;
755 *vtop = *v;
758 static void vdup(void)
760 vpushv(vtop);
763 /* rotate n first stack elements to the bottom
764 I1 ... In -> I2 ... In I1 [top is right]
766 ST_FUNC void vrotb(int n)
768 int i;
769 SValue tmp;
771 tmp = vtop[-n + 1];
772 for(i=-n+1;i!=0;i++)
773 vtop[i] = vtop[i+1];
774 vtop[0] = tmp;
777 /* rotate the n elements before entry e towards the top
778 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
780 ST_FUNC void vrote(SValue *e, int n)
782 int i;
783 SValue tmp;
785 tmp = *e;
786 for(i = 0;i < n - 1; i++)
787 e[-i] = e[-i - 1];
788 e[-n + 1] = tmp;
791 /* rotate n first stack elements to the top
792 I1 ... In -> In I1 ... I(n-1) [top is right]
794 ST_FUNC void vrott(int n)
796 vrote(vtop, n);
799 /* push a symbol value of TYPE */
800 static inline void vpushsym(CType *type, Sym *sym)
802 CValue cval;
803 cval.i = 0;
804 vsetc(type, VT_CONST | VT_SYM, &cval);
805 vtop->sym = sym;
808 /* Return a static symbol pointing to a section */
809 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
811 int v;
812 Sym *sym;
814 v = anon_sym++;
815 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
816 sym->type.ref = type->ref;
817 sym->r = VT_CONST | VT_SYM;
818 put_extern_sym(sym, sec, offset, size);
819 return sym;
822 /* push a reference to a section offset by adding a dummy symbol */
823 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
825 vpushsym(type, get_sym_ref(type, sec, offset, size));
828 /* define a new external reference to a symbol 'v' of type 'u' */
829 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
831 Sym *s;
833 s = sym_find(v);
834 if (!s) {
835 /* push forward reference */
836 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
837 s->type.ref = type->ref;
838 s->r = r | VT_CONST | VT_SYM;
839 } else if (IS_ASM_SYM(s)) {
840 s->type.t = type->t | (s->type.t & VT_EXTERN);
841 s->type.ref = type->ref;
842 update_storage(s);
844 return s;
847 /* Merge some type attributes. */
848 static void patch_type(Sym *sym, CType *type)
850 if (!(type->t & VT_EXTERN)) {
851 if (!(sym->type.t & VT_EXTERN))
852 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
853 sym->type.t &= ~VT_EXTERN;
856 if (IS_ASM_SYM(sym)) {
857 /* stay static if both are static */
858 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
859 sym->type.ref = type->ref;
862 if (!is_compatible_types(&sym->type, type)) {
863 tcc_error("incompatible types for redefinition of '%s'",
864 get_tok_str(sym->v, NULL));
866 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
867 int static_proto = sym->type.t & VT_STATIC;
868 /* warn if static follows non-static function declaration */
869 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
870 tcc_warning("static storage ignored for redefinition of '%s'",
871 get_tok_str(sym->v, NULL));
873 if (0 == (type->t & VT_EXTERN)) {
874 /* put complete type, use static from prototype */
875 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
876 if (type->t & VT_INLINE)
877 sym->type.t = type->t;
878 sym->type.ref = type->ref;
881 } else {
882 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
883 /* set array size if it was omitted in extern declaration */
884 if (sym->type.ref->c < 0)
885 sym->type.ref->c = type->ref->c;
886 else if (sym->type.ref->c != type->ref->c)
887 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
889 if ((type->t ^ sym->type.t) & VT_STATIC)
890 tcc_warning("storage mismatch for redefinition of '%s'",
891 get_tok_str(sym->v, NULL));
896 /* Merge some storage attributes. */
897 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
899 if (type)
900 patch_type(sym, type);
902 #ifdef TCC_TARGET_PE
903 if (sym->a.dllimport != ad->a.dllimport)
904 tcc_error("incompatible dll linkage for redefinition of '%s'",
905 get_tok_str(sym->v, NULL));
906 sym->a.dllexport |= ad->a.dllexport;
907 #endif
908 sym->a.weak |= ad->a.weak;
909 if (ad->a.visibility) {
910 int vis = sym->a.visibility;
911 int vis2 = ad->a.visibility;
912 if (vis == STV_DEFAULT)
913 vis = vis2;
914 else if (vis2 != STV_DEFAULT)
915 vis = (vis < vis2) ? vis : vis2;
916 sym->a.visibility = vis;
918 if (ad->a.aligned)
919 sym->a.aligned = ad->a.aligned;
920 if (ad->asm_label)
921 sym->asm_label = ad->asm_label;
922 update_storage(sym);
925 /* define a new external reference to a symbol 'v' */
926 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
928 Sym *s;
929 s = sym_find(v);
930 if (!s) {
931 /* push forward reference */
932 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
933 s->type.t |= VT_EXTERN;
934 s->a = ad->a;
935 s->sym_scope = 0;
936 } else {
937 if (s->type.ref == func_old_type.ref) {
938 s->type.ref = type->ref;
939 s->r = r | VT_CONST | VT_SYM;
940 s->type.t |= VT_EXTERN;
942 patch_storage(s, ad, type);
944 return s;
947 /* push a reference to global symbol v */
948 ST_FUNC void vpush_global_sym(CType *type, int v)
950 vpushsym(type, external_global_sym(v, type, 0));
953 /* save registers up to (vtop - n) stack entry */
954 ST_FUNC void save_regs(int n)
956 SValue *p, *p1;
957 for(p = vstack, p1 = vtop - n; p <= p1; p++)
958 save_reg(p->r);
961 /* save r to the memory stack, and mark it as being free */
962 ST_FUNC void save_reg(int r)
964 save_reg_upstack(r, 0);
967 /* save r to the memory stack, and mark it as being free,
968 if seen up to (vtop - n) stack entry */
969 ST_FUNC void save_reg_upstack(int r, int n)
971 int l, saved, size, align;
972 SValue *p, *p1, sv;
973 CType *type;
975 if ((r &= VT_VALMASK) >= VT_CONST)
976 return;
977 if (nocode_wanted)
978 return;
980 /* modify all stack values */
981 saved = 0;
982 l = 0;
983 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
984 if ((p->r & VT_VALMASK) == r ||
985 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
986 /* must save value on stack if not already done */
987 if (!saved) {
988 /* NOTE: must reload 'r' because r might be equal to r2 */
989 r = p->r & VT_VALMASK;
990 /* store register in the stack */
991 type = &p->type;
992 if ((p->r & VT_LVAL) ||
993 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
994 #if PTR_SIZE == 8
995 type = &char_pointer_type;
996 #else
997 type = &int_type;
998 #endif
999 size = type_size(type, &align);
1000 loc = (loc - size) & -align;
1001 sv.type.t = type->t;
1002 sv.r = VT_LOCAL | VT_LVAL;
1003 sv.c.i = loc;
1004 store(r, &sv);
1005 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1006 /* x86 specific: need to pop fp register ST0 if saved */
1007 if (r == TREG_ST0) {
1008 o(0xd8dd); /* fstp %st(0) */
1010 #endif
1011 #if PTR_SIZE == 4
1012 /* special long long case */
1013 if ((type->t & VT_BTYPE) == VT_LLONG) {
1014 sv.c.i += 4;
1015 store(p->r2, &sv);
1017 #endif
1018 l = loc;
1019 saved = 1;
1021 /* mark that stack entry as being saved on the stack */
1022 if (p->r & VT_LVAL) {
1023 /* also clear the bounded flag because the
1024 relocation address of the function was stored in
1025 p->c.i */
1026 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1027 } else {
1028 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1030 p->r2 = VT_CONST;
1031 p->c.i = l;
1036 #ifdef TCC_TARGET_ARM
1037 /* find a register of class 'rc2' with at most one reference on stack.
1038 * If none, call get_reg(rc) */
1039 ST_FUNC int get_reg_ex(int rc, int rc2)
1041 int r;
1042 SValue *p;
1044 for(r=0;r<NB_REGS;r++) {
1045 if (reg_classes[r] & rc2) {
1046 int n;
1047 n=0;
1048 for(p = vstack; p <= vtop; p++) {
1049 if ((p->r & VT_VALMASK) == r ||
1050 (p->r2 & VT_VALMASK) == r)
1051 n++;
1053 if (n <= 1)
1054 return r;
1057 return get_reg(rc);
1059 #endif
1061 /* find a free register of class 'rc'. If none, save one register */
1062 ST_FUNC int get_reg(int rc)
1064 int r;
1065 SValue *p;
1067 /* find a free register */
1068 for(r=0;r<NB_REGS;r++) {
1069 if (reg_classes[r] & rc) {
1070 if (nocode_wanted)
1071 return r;
1072 for(p=vstack;p<=vtop;p++) {
1073 if ((p->r & VT_VALMASK) == r ||
1074 (p->r2 & VT_VALMASK) == r)
1075 goto notfound;
1077 return r;
1079 notfound: ;
1082 /* no register left : free the first one on the stack (VERY
1083 IMPORTANT to start from the bottom to ensure that we don't
1084 spill registers used in gen_opi()) */
1085 for(p=vstack;p<=vtop;p++) {
1086 /* look at second register (if long long) */
1087 r = p->r2 & VT_VALMASK;
1088 if (r < VT_CONST && (reg_classes[r] & rc))
1089 goto save_found;
1090 r = p->r & VT_VALMASK;
1091 if (r < VT_CONST && (reg_classes[r] & rc)) {
1092 save_found:
1093 save_reg(r);
1094 return r;
1097 /* Should never comes here */
1098 return -1;
1101 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1102 if needed */
1103 static void move_reg(int r, int s, int t)
1105 SValue sv;
1107 if (r != s) {
1108 save_reg(r);
1109 sv.type.t = t;
1110 sv.type.ref = NULL;
1111 sv.r = s;
1112 sv.c.i = 0;
1113 load(r, &sv);
1117 /* get address of vtop (vtop MUST BE an lvalue) */
1118 ST_FUNC void gaddrof(void)
1120 vtop->r &= ~VT_LVAL;
1121 /* tricky: if saved lvalue, then we can go back to lvalue */
1122 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1123 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1128 #ifdef CONFIG_TCC_BCHECK
1129 /* generate lvalue bound code */
1130 static void gbound(void)
1132 int lval_type;
1133 CType type1;
1135 vtop->r &= ~VT_MUSTBOUND;
1136 /* if lvalue, then use checking code before dereferencing */
1137 if (vtop->r & VT_LVAL) {
1138 /* if not VT_BOUNDED value, then make one */
1139 if (!(vtop->r & VT_BOUNDED)) {
1140 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1141 /* must save type because we must set it to int to get pointer */
1142 type1 = vtop->type;
1143 vtop->type.t = VT_PTR;
1144 gaddrof();
1145 vpushi(0);
1146 gen_bounded_ptr_add();
1147 vtop->r |= lval_type;
1148 vtop->type = type1;
1150 /* then check for dereferencing */
1151 gen_bounded_ptr_deref();
1154 #endif
1156 static void incr_bf_adr(int o)
1158 vtop->type = char_pointer_type;
1159 gaddrof();
1160 vpushi(o);
1161 gen_op('+');
1162 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1163 | (VT_BYTE|VT_UNSIGNED);
1164 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1165 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1168 /* single-byte load mode for packed or otherwise unaligned bitfields */
1169 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1171 int n, o, bits;
1172 save_reg_upstack(vtop->r, 1);
1173 vpush64(type->t & VT_BTYPE, 0); // B X
1174 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1175 do {
1176 vswap(); // X B
1177 incr_bf_adr(o);
1178 vdup(); // X B B
1179 n = 8 - bit_pos;
1180 if (n > bit_size)
1181 n = bit_size;
1182 if (bit_pos)
1183 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1184 if (n < 8)
1185 vpushi((1 << n) - 1), gen_op('&');
1186 gen_cast(type);
1187 if (bits)
1188 vpushi(bits), gen_op(TOK_SHL);
1189 vrotb(3); // B Y X
1190 gen_op('|'); // B X
1191 bits += n, bit_size -= n, o = 1;
1192 } while (bit_size);
1193 vswap(), vpop();
1194 if (!(type->t & VT_UNSIGNED)) {
1195 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1196 vpushi(n), gen_op(TOK_SHL);
1197 vpushi(n), gen_op(TOK_SAR);
1201 /* single-byte store mode for packed or otherwise unaligned bitfields */
1202 static void store_packed_bf(int bit_pos, int bit_size)
1204 int bits, n, o, m, c;
1206 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1207 vswap(); // X B
1208 save_reg_upstack(vtop->r, 1);
1209 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1210 do {
1211 incr_bf_adr(o); // X B
1212 vswap(); //B X
1213 c ? vdup() : gv_dup(); // B V X
1214 vrott(3); // X B V
1215 if (bits)
1216 vpushi(bits), gen_op(TOK_SHR);
1217 if (bit_pos)
1218 vpushi(bit_pos), gen_op(TOK_SHL);
1219 n = 8 - bit_pos;
1220 if (n > bit_size)
1221 n = bit_size;
1222 if (n < 8) {
1223 m = ((1 << n) - 1) << bit_pos;
1224 vpushi(m), gen_op('&'); // X B V1
1225 vpushv(vtop-1); // X B V1 B
1226 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1227 gen_op('&'); // X B V1 B1
1228 gen_op('|'); // X B V2
1230 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1231 vstore(), vpop(); // X B
1232 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1233 } while (bit_size);
1234 vpop(), vpop();
1237 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1239 int t;
1240 if (0 == sv->type.ref)
1241 return 0;
1242 t = sv->type.ref->auxtype;
1243 if (t != -1 && t != VT_STRUCT) {
1244 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1245 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1247 return t;
1250 /* store vtop a register belonging to class 'rc'. lvalues are
1251 converted to values. Cannot be used if cannot be converted to
1252 register value (such as structures). */
1253 ST_FUNC int gv(int rc)
1255 int r, bit_pos, bit_size, size, align, rc2;
1257 /* NOTE: get_reg can modify vstack[] */
1258 if (vtop->type.t & VT_BITFIELD) {
1259 CType type;
1261 bit_pos = BIT_POS(vtop->type.t);
1262 bit_size = BIT_SIZE(vtop->type.t);
1263 /* remove bit field info to avoid loops */
1264 vtop->type.t &= ~VT_STRUCT_MASK;
1266 type.ref = NULL;
1267 type.t = vtop->type.t & VT_UNSIGNED;
1268 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1269 type.t |= VT_UNSIGNED;
1271 r = adjust_bf(vtop, bit_pos, bit_size);
1273 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1274 type.t |= VT_LLONG;
1275 else
1276 type.t |= VT_INT;
1278 if (r == VT_STRUCT) {
1279 load_packed_bf(&type, bit_pos, bit_size);
1280 } else {
1281 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1282 /* cast to int to propagate signedness in following ops */
1283 gen_cast(&type);
1284 /* generate shifts */
1285 vpushi(bits - (bit_pos + bit_size));
1286 gen_op(TOK_SHL);
1287 vpushi(bits - bit_size);
1288 /* NOTE: transformed to SHR if unsigned */
1289 gen_op(TOK_SAR);
1291 r = gv(rc);
1292 } else {
1293 if (is_float(vtop->type.t) &&
1294 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1295 unsigned long offset;
1296 /* CPUs usually cannot use float constants, so we store them
1297 generically in data segment */
1298 size = type_size(&vtop->type, &align);
1299 if (NODATA_WANTED)
1300 size = 0, align = 1;
1301 offset = section_add(data_section, size, align);
1302 vpush_ref(&vtop->type, data_section, offset, size);
1303 vswap();
1304 init_putv(&vtop->type, data_section, offset);
1305 vtop->r |= VT_LVAL;
1307 #ifdef CONFIG_TCC_BCHECK
1308 if (vtop->r & VT_MUSTBOUND)
1309 gbound();
1310 #endif
1312 r = vtop->r & VT_VALMASK;
1313 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1314 #ifndef TCC_TARGET_ARM64
1315 if (rc == RC_IRET)
1316 rc2 = RC_LRET;
1317 #ifdef TCC_TARGET_X86_64
1318 else if (rc == RC_FRET)
1319 rc2 = RC_QRET;
1320 #endif
1321 #endif
1322 /* need to reload if:
1323 - constant
1324 - lvalue (need to dereference pointer)
1325 - already a register, but not in the right class */
1326 if (r >= VT_CONST
1327 || (vtop->r & VT_LVAL)
1328 || !(reg_classes[r] & rc)
1329 #if PTR_SIZE == 8
1330 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1331 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1332 #else
1333 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1334 #endif
1337 r = get_reg(rc);
1338 #if PTR_SIZE == 8
1339 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1340 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1341 #else
1342 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1343 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1344 unsigned long long ll;
1345 #endif
1346 int r2, original_type;
1347 original_type = vtop->type.t;
1348 /* two register type load : expand to two words
1349 temporarily */
1350 #if PTR_SIZE == 4
1351 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1352 /* load constant */
1353 ll = vtop->c.i;
1354 vtop->c.i = ll; /* first word */
1355 load(r, vtop);
1356 vtop->r = r; /* save register value */
1357 vpushi(ll >> 32); /* second word */
1358 } else
1359 #endif
1360 if (vtop->r & VT_LVAL) {
1361 /* We do not want to modifier the long long
1362 pointer here, so the safest (and less
1363 efficient) is to save all the other registers
1364 in the stack. XXX: totally inefficient. */
1365 #if 0
1366 save_regs(1);
1367 #else
1368 /* lvalue_save: save only if used further down the stack */
1369 save_reg_upstack(vtop->r, 1);
1370 #endif
1371 /* load from memory */
1372 vtop->type.t = load_type;
1373 load(r, vtop);
1374 vdup();
1375 vtop[-1].r = r; /* save register value */
1376 /* increment pointer to get second word */
1377 vtop->type.t = addr_type;
1378 gaddrof();
1379 vpushi(load_size);
1380 gen_op('+');
1381 vtop->r |= VT_LVAL;
1382 vtop->type.t = load_type;
1383 } else {
1384 /* move registers */
1385 load(r, vtop);
1386 vdup();
1387 vtop[-1].r = r; /* save register value */
1388 vtop->r = vtop[-1].r2;
1390 /* Allocate second register. Here we rely on the fact that
1391 get_reg() tries first to free r2 of an SValue. */
1392 r2 = get_reg(rc2);
1393 load(r2, vtop);
1394 vpop();
1395 /* write second register */
1396 vtop->r2 = r2;
1397 vtop->type.t = original_type;
1398 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1399 int t1, t;
1400 /* lvalue of scalar type : need to use lvalue type
1401 because of possible cast */
1402 t = vtop->type.t;
1403 t1 = t;
1404 /* compute memory access type */
1405 if (vtop->r & VT_LVAL_BYTE)
1406 t = VT_BYTE;
1407 else if (vtop->r & VT_LVAL_SHORT)
1408 t = VT_SHORT;
1409 if (vtop->r & VT_LVAL_UNSIGNED)
1410 t |= VT_UNSIGNED;
1411 vtop->type.t = t;
1412 load(r, vtop);
1413 /* restore wanted type */
1414 vtop->type.t = t1;
1415 } else {
1416 /* one register type load */
1417 load(r, vtop);
1420 vtop->r = r;
1421 #ifdef TCC_TARGET_C67
1422 /* uses register pairs for doubles */
1423 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1424 vtop->r2 = r+1;
1425 #endif
1427 return r;
1430 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1431 ST_FUNC void gv2(int rc1, int rc2)
1433 int v;
1435 /* generate more generic register first. But VT_JMP or VT_CMP
1436 values must be generated first in all cases to avoid possible
1437 reload errors */
1438 v = vtop[0].r & VT_VALMASK;
1439 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1440 vswap();
1441 gv(rc1);
1442 vswap();
1443 gv(rc2);
1444 /* test if reload is needed for first register */
1445 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1446 vswap();
1447 gv(rc1);
1448 vswap();
1450 } else {
1451 gv(rc2);
1452 vswap();
1453 gv(rc1);
1454 vswap();
1455 /* test if reload is needed for first register */
1456 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1457 gv(rc2);
1462 #ifndef TCC_TARGET_ARM64
1463 /* wrapper around RC_FRET to return a register by type */
1464 static int rc_fret(int t)
1466 #ifdef TCC_TARGET_X86_64
1467 if (t == VT_LDOUBLE) {
1468 return RC_ST0;
1470 #endif
1471 return RC_FRET;
1473 #endif
1475 /* wrapper around REG_FRET to return a register by type */
1476 static int reg_fret(int t)
1478 #ifdef TCC_TARGET_X86_64
1479 if (t == VT_LDOUBLE) {
1480 return TREG_ST0;
1482 #endif
1483 return REG_FRET;
1486 #if PTR_SIZE == 4
1487 /* expand 64bit on stack in two ints */
1488 static void lexpand(void)
1490 int u, v;
1491 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1492 v = vtop->r & (VT_VALMASK | VT_LVAL);
1493 if (v == VT_CONST) {
1494 vdup();
1495 vtop[0].c.i >>= 32;
1496 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1497 vdup();
1498 vtop[0].c.i += 4;
1499 } else {
1500 gv(RC_INT);
1501 vdup();
1502 vtop[0].r = vtop[-1].r2;
1503 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1505 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1507 #endif
1509 #ifdef TCC_TARGET_ARM
1510 /* expand long long on stack */
1511 ST_FUNC void lexpand_nr(void)
1513 int u,v;
1515 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1516 vdup();
1517 vtop->r2 = VT_CONST;
1518 vtop->type.t = VT_INT | u;
1519 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1520 if (v == VT_CONST) {
1521 vtop[-1].c.i = vtop->c.i;
1522 vtop->c.i = vtop->c.i >> 32;
1523 vtop->r = VT_CONST;
1524 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1525 vtop->c.i += 4;
1526 vtop->r = vtop[-1].r;
1527 } else if (v > VT_CONST) {
1528 vtop--;
1529 lexpand();
1530 } else
1531 vtop->r = vtop[-1].r2;
1532 vtop[-1].r2 = VT_CONST;
1533 vtop[-1].type.t = VT_INT | u;
1535 #endif
1537 #if PTR_SIZE == 4
1538 /* build a long long from two ints */
1539 static void lbuild(int t)
1541 gv2(RC_INT, RC_INT);
1542 vtop[-1].r2 = vtop[0].r;
1543 vtop[-1].type.t = t;
1544 vpop();
1546 #endif
1548 /* convert stack entry to register and duplicate its value in another
1549 register */
1550 static void gv_dup(void)
1552 int rc, t, r, r1;
1553 SValue sv;
1555 t = vtop->type.t;
1556 #if PTR_SIZE == 4
1557 if ((t & VT_BTYPE) == VT_LLONG) {
1558 if (t & VT_BITFIELD) {
1559 gv(RC_INT);
1560 t = vtop->type.t;
1562 lexpand();
1563 gv_dup();
1564 vswap();
1565 vrotb(3);
1566 gv_dup();
1567 vrotb(4);
1568 /* stack: H L L1 H1 */
1569 lbuild(t);
1570 vrotb(3);
1571 vrotb(3);
1572 vswap();
1573 lbuild(t);
1574 vswap();
1575 } else
1576 #endif
1578 /* duplicate value */
1579 rc = RC_INT;
1580 sv.type.t = VT_INT;
1581 if (is_float(t)) {
1582 rc = RC_FLOAT;
1583 #ifdef TCC_TARGET_X86_64
1584 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1585 rc = RC_ST0;
1587 #endif
1588 sv.type.t = t;
1590 r = gv(rc);
1591 r1 = get_reg(rc);
1592 sv.r = r;
1593 sv.c.i = 0;
1594 load(r1, &sv); /* move r to r1 */
1595 vdup();
1596 /* duplicates value */
1597 if (r != r1)
1598 vtop->r = r1;
1602 /* Generate value test
1604 * Generate a test for any value (jump, comparison and integers) */
1605 ST_FUNC int gvtst(int inv, int t)
1607 int v = vtop->r & VT_VALMASK;
1608 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1609 vpushi(0);
1610 gen_op(TOK_NE);
1612 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1613 /* constant jmp optimization */
1614 if ((vtop->c.i != 0) != inv)
1615 t = gjmp(t);
1616 vtop--;
1617 return t;
1619 return gtst(inv, t);
1622 #if PTR_SIZE == 4
1623 /* generate CPU independent (unsigned) long long operations */
1624 static void gen_opl(int op)
1626 int t, a, b, op1, c, i;
1627 int func;
1628 unsigned short reg_iret = REG_IRET;
1629 unsigned short reg_lret = REG_LRET;
1630 SValue tmp;
1632 switch(op) {
1633 case '/':
1634 case TOK_PDIV:
1635 func = TOK___divdi3;
1636 goto gen_func;
1637 case TOK_UDIV:
1638 func = TOK___udivdi3;
1639 goto gen_func;
1640 case '%':
1641 func = TOK___moddi3;
1642 goto gen_mod_func;
1643 case TOK_UMOD:
1644 func = TOK___umoddi3;
1645 gen_mod_func:
1646 #ifdef TCC_ARM_EABI
1647 reg_iret = TREG_R2;
1648 reg_lret = TREG_R3;
1649 #endif
1650 gen_func:
1651 /* call generic long long function */
1652 vpush_global_sym(&func_old_type, func);
1653 vrott(3);
1654 gfunc_call(2);
1655 vpushi(0);
1656 vtop->r = reg_iret;
1657 vtop->r2 = reg_lret;
1658 break;
1659 case '^':
1660 case '&':
1661 case '|':
1662 case '*':
1663 case '+':
1664 case '-':
1665 //pv("gen_opl A",0,2);
1666 t = vtop->type.t;
1667 vswap();
1668 lexpand();
1669 vrotb(3);
1670 lexpand();
1671 /* stack: L1 H1 L2 H2 */
1672 tmp = vtop[0];
1673 vtop[0] = vtop[-3];
1674 vtop[-3] = tmp;
1675 tmp = vtop[-2];
1676 vtop[-2] = vtop[-3];
1677 vtop[-3] = tmp;
1678 vswap();
1679 /* stack: H1 H2 L1 L2 */
1680 //pv("gen_opl B",0,4);
1681 if (op == '*') {
1682 vpushv(vtop - 1);
1683 vpushv(vtop - 1);
1684 gen_op(TOK_UMULL);
1685 lexpand();
1686 /* stack: H1 H2 L1 L2 ML MH */
1687 for(i=0;i<4;i++)
1688 vrotb(6);
1689 /* stack: ML MH H1 H2 L1 L2 */
1690 tmp = vtop[0];
1691 vtop[0] = vtop[-2];
1692 vtop[-2] = tmp;
1693 /* stack: ML MH H1 L2 H2 L1 */
1694 gen_op('*');
1695 vrotb(3);
1696 vrotb(3);
1697 gen_op('*');
1698 /* stack: ML MH M1 M2 */
1699 gen_op('+');
1700 gen_op('+');
1701 } else if (op == '+' || op == '-') {
1702 /* XXX: add non carry method too (for MIPS or alpha) */
1703 if (op == '+')
1704 op1 = TOK_ADDC1;
1705 else
1706 op1 = TOK_SUBC1;
1707 gen_op(op1);
1708 /* stack: H1 H2 (L1 op L2) */
1709 vrotb(3);
1710 vrotb(3);
1711 gen_op(op1 + 1); /* TOK_xxxC2 */
1712 } else {
1713 gen_op(op);
1714 /* stack: H1 H2 (L1 op L2) */
1715 vrotb(3);
1716 vrotb(3);
1717 /* stack: (L1 op L2) H1 H2 */
1718 gen_op(op);
1719 /* stack: (L1 op L2) (H1 op H2) */
1721 /* stack: L H */
1722 lbuild(t);
1723 break;
1724 case TOK_SAR:
1725 case TOK_SHR:
1726 case TOK_SHL:
1727 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1728 t = vtop[-1].type.t;
1729 vswap();
1730 lexpand();
1731 vrotb(3);
1732 /* stack: L H shift */
1733 c = (int)vtop->c.i;
1734 /* constant: simpler */
1735 /* NOTE: all comments are for SHL. the other cases are
1736 done by swapping words */
1737 vpop();
1738 if (op != TOK_SHL)
1739 vswap();
1740 if (c >= 32) {
1741 /* stack: L H */
1742 vpop();
1743 if (c > 32) {
1744 vpushi(c - 32);
1745 gen_op(op);
1747 if (op != TOK_SAR) {
1748 vpushi(0);
1749 } else {
1750 gv_dup();
1751 vpushi(31);
1752 gen_op(TOK_SAR);
1754 vswap();
1755 } else {
1756 vswap();
1757 gv_dup();
1758 /* stack: H L L */
1759 vpushi(c);
1760 gen_op(op);
1761 vswap();
1762 vpushi(32 - c);
1763 if (op == TOK_SHL)
1764 gen_op(TOK_SHR);
1765 else
1766 gen_op(TOK_SHL);
1767 vrotb(3);
1768 /* stack: L L H */
1769 vpushi(c);
1770 if (op == TOK_SHL)
1771 gen_op(TOK_SHL);
1772 else
1773 gen_op(TOK_SHR);
1774 gen_op('|');
1776 if (op != TOK_SHL)
1777 vswap();
1778 lbuild(t);
1779 } else {
1780 /* XXX: should provide a faster fallback on x86 ? */
1781 switch(op) {
1782 case TOK_SAR:
1783 func = TOK___ashrdi3;
1784 goto gen_func;
1785 case TOK_SHR:
1786 func = TOK___lshrdi3;
1787 goto gen_func;
1788 case TOK_SHL:
1789 func = TOK___ashldi3;
1790 goto gen_func;
1793 break;
1794 default:
1795 /* compare operations */
1796 t = vtop->type.t;
1797 vswap();
1798 lexpand();
1799 vrotb(3);
1800 lexpand();
1801 /* stack: L1 H1 L2 H2 */
1802 tmp = vtop[-1];
1803 vtop[-1] = vtop[-2];
1804 vtop[-2] = tmp;
1805 /* stack: L1 L2 H1 H2 */
1806 /* compare high */
1807 op1 = op;
1808 /* when values are equal, we need to compare low words. since
1809 the jump is inverted, we invert the test too. */
1810 if (op1 == TOK_LT)
1811 op1 = TOK_LE;
1812 else if (op1 == TOK_GT)
1813 op1 = TOK_GE;
1814 else if (op1 == TOK_ULT)
1815 op1 = TOK_ULE;
1816 else if (op1 == TOK_UGT)
1817 op1 = TOK_UGE;
1818 a = 0;
1819 b = 0;
1820 gen_op(op1);
1821 if (op == TOK_NE) {
1822 b = gvtst(0, 0);
1823 } else {
1824 a = gvtst(1, 0);
1825 if (op != TOK_EQ) {
1826 /* generate non equal test */
1827 vpushi(TOK_NE);
1828 vtop->r = VT_CMP;
1829 b = gvtst(0, 0);
1832 /* compare low. Always unsigned */
1833 op1 = op;
1834 if (op1 == TOK_LT)
1835 op1 = TOK_ULT;
1836 else if (op1 == TOK_LE)
1837 op1 = TOK_ULE;
1838 else if (op1 == TOK_GT)
1839 op1 = TOK_UGT;
1840 else if (op1 == TOK_GE)
1841 op1 = TOK_UGE;
1842 gen_op(op1);
1843 a = gvtst(1, a);
1844 gsym(b);
1845 vseti(VT_JMPI, a);
1846 break;
1849 #endif
1851 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1853 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1854 return (a ^ b) >> 63 ? -x : x;
1857 static int gen_opic_lt(uint64_t a, uint64_t b)
1859 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1862 /* handle integer constant optimizations and various machine
1863 independent opt */
1864 static void gen_opic(int op)
1866 SValue *v1 = vtop - 1;
1867 SValue *v2 = vtop;
1868 int t1 = v1->type.t & VT_BTYPE;
1869 int t2 = v2->type.t & VT_BTYPE;
1870 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1871 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1872 uint64_t l1 = c1 ? v1->c.i : 0;
1873 uint64_t l2 = c2 ? v2->c.i : 0;
1874 int shm = (t1 == VT_LLONG) ? 63 : 31;
1876 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1877 l1 = ((uint32_t)l1 |
1878 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1879 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1880 l2 = ((uint32_t)l2 |
1881 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1883 if (c1 && c2) {
1884 switch(op) {
1885 case '+': l1 += l2; break;
1886 case '-': l1 -= l2; break;
1887 case '&': l1 &= l2; break;
1888 case '^': l1 ^= l2; break;
1889 case '|': l1 |= l2; break;
1890 case '*': l1 *= l2; break;
1892 case TOK_PDIV:
1893 case '/':
1894 case '%':
1895 case TOK_UDIV:
1896 case TOK_UMOD:
1897 /* if division by zero, generate explicit division */
1898 if (l2 == 0) {
1899 if (const_wanted)
1900 tcc_error("division by zero in constant");
1901 goto general_case;
1903 switch(op) {
1904 default: l1 = gen_opic_sdiv(l1, l2); break;
1905 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1906 case TOK_UDIV: l1 = l1 / l2; break;
1907 case TOK_UMOD: l1 = l1 % l2; break;
1909 break;
1910 case TOK_SHL: l1 <<= (l2 & shm); break;
1911 case TOK_SHR: l1 >>= (l2 & shm); break;
1912 case TOK_SAR:
1913 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1914 break;
1915 /* tests */
1916 case TOK_ULT: l1 = l1 < l2; break;
1917 case TOK_UGE: l1 = l1 >= l2; break;
1918 case TOK_EQ: l1 = l1 == l2; break;
1919 case TOK_NE: l1 = l1 != l2; break;
1920 case TOK_ULE: l1 = l1 <= l2; break;
1921 case TOK_UGT: l1 = l1 > l2; break;
1922 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1923 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1924 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1925 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1926 /* logical */
1927 case TOK_LAND: l1 = l1 && l2; break;
1928 case TOK_LOR: l1 = l1 || l2; break;
1929 default:
1930 goto general_case;
1932 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1933 l1 = ((uint32_t)l1 |
1934 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1935 v1->c.i = l1;
1936 vtop--;
1937 } else {
1938 /* if commutative ops, put c2 as constant */
1939 if (c1 && (op == '+' || op == '&' || op == '^' ||
1940 op == '|' || op == '*')) {
1941 vswap();
1942 c2 = c1; //c = c1, c1 = c2, c2 = c;
1943 l2 = l1; //l = l1, l1 = l2, l2 = l;
1945 if (!const_wanted &&
1946 c1 && ((l1 == 0 &&
1947 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1948 (l1 == -1 && op == TOK_SAR))) {
1949 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1950 vtop--;
1951 } else if (!const_wanted &&
1952 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1953 (op == '|' &&
1954 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1955 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1956 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1957 if (l2 == 1)
1958 vtop->c.i = 0;
1959 vswap();
1960 vtop--;
1961 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1962 op == TOK_PDIV) &&
1963 l2 == 1) ||
1964 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1965 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1966 l2 == 0) ||
1967 (op == '&' &&
1968 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1969 /* filter out NOP operations like x*1, x-0, x&-1... */
1970 vtop--;
1971 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1972 /* try to use shifts instead of muls or divs */
1973 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1974 int n = -1;
1975 while (l2) {
1976 l2 >>= 1;
1977 n++;
1979 vtop->c.i = n;
1980 if (op == '*')
1981 op = TOK_SHL;
1982 else if (op == TOK_PDIV)
1983 op = TOK_SAR;
1984 else
1985 op = TOK_SHR;
1987 goto general_case;
1988 } else if (c2 && (op == '+' || op == '-') &&
1989 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1990 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1991 /* symbol + constant case */
1992 if (op == '-')
1993 l2 = -l2;
1994 l2 += vtop[-1].c.i;
1995 /* The backends can't always deal with addends to symbols
1996 larger than +-1<<31. Don't construct such. */
1997 if ((int)l2 != l2)
1998 goto general_case;
1999 vtop--;
2000 vtop->c.i = l2;
2001 } else {
2002 general_case:
2003 /* call low level op generator */
2004 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2005 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2006 gen_opl(op);
2007 else
2008 gen_opi(op);
2013 /* generate a floating point operation with constant propagation */
2014 static void gen_opif(int op)
2016 int c1, c2;
2017 SValue *v1, *v2;
2018 #if defined _MSC_VER && defined _AMD64_
2019 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2020 volatile
2021 #endif
2022 long double f1, f2;
2024 v1 = vtop - 1;
2025 v2 = vtop;
2026 /* currently, we cannot do computations with forward symbols */
2027 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2028 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2029 if (c1 && c2) {
2030 if (v1->type.t == VT_FLOAT) {
2031 f1 = v1->c.f;
2032 f2 = v2->c.f;
2033 } else if (v1->type.t == VT_DOUBLE) {
2034 f1 = v1->c.d;
2035 f2 = v2->c.d;
2036 } else {
2037 f1 = v1->c.ld;
2038 f2 = v2->c.ld;
2041 /* NOTE: we only do constant propagation if finite number (not
2042 NaN or infinity) (ANSI spec) */
2043 if (!ieee_finite(f1) || !ieee_finite(f2))
2044 goto general_case;
2046 switch(op) {
2047 case '+': f1 += f2; break;
2048 case '-': f1 -= f2; break;
2049 case '*': f1 *= f2; break;
2050 case '/':
2051 if (f2 == 0.0) {
2052 /* If not in initializer we need to potentially generate
2053 FP exceptions at runtime, otherwise we want to fold. */
2054 if (!const_wanted)
2055 goto general_case;
2057 f1 /= f2;
2058 break;
2059 /* XXX: also handles tests ? */
2060 default:
2061 goto general_case;
2063 /* XXX: overflow test ? */
2064 if (v1->type.t == VT_FLOAT) {
2065 v1->c.f = f1;
2066 } else if (v1->type.t == VT_DOUBLE) {
2067 v1->c.d = f1;
2068 } else {
2069 v1->c.ld = f1;
2071 vtop--;
2072 } else {
2073 general_case:
2074 gen_opf(op);
2078 static int pointed_size(CType *type)
2080 int align;
2081 return type_size(pointed_type(type), &align);
2084 static void vla_runtime_pointed_size(CType *type)
2086 int align;
2087 vla_runtime_type_size(pointed_type(type), &align);
2090 static inline int is_null_pointer(SValue *p)
2092 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2093 return 0;
2094 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2095 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2096 ((p->type.t & VT_BTYPE) == VT_PTR &&
2097 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2100 static inline int is_integer_btype(int bt)
2102 return (bt == VT_BYTE || bt == VT_SHORT ||
2103 bt == VT_INT || bt == VT_LLONG);
2106 /* check types for comparison or subtraction of pointers */
2107 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2109 CType *type1, *type2, tmp_type1, tmp_type2;
2110 int bt1, bt2;
2112 /* null pointers are accepted for all comparisons as gcc */
2113 if (is_null_pointer(p1) || is_null_pointer(p2))
2114 return;
2115 type1 = &p1->type;
2116 type2 = &p2->type;
2117 bt1 = type1->t & VT_BTYPE;
2118 bt2 = type2->t & VT_BTYPE;
2119 /* accept comparison between pointer and integer with a warning */
2120 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2121 if (op != TOK_LOR && op != TOK_LAND )
2122 tcc_warning("comparison between pointer and integer");
2123 return;
2126 /* both must be pointers or implicit function pointers */
2127 if (bt1 == VT_PTR) {
2128 type1 = pointed_type(type1);
2129 } else if (bt1 != VT_FUNC)
2130 goto invalid_operands;
2132 if (bt2 == VT_PTR) {
2133 type2 = pointed_type(type2);
2134 } else if (bt2 != VT_FUNC) {
2135 invalid_operands:
2136 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2138 if ((type1->t & VT_BTYPE) == VT_VOID ||
2139 (type2->t & VT_BTYPE) == VT_VOID)
2140 return;
2141 tmp_type1 = *type1;
2142 tmp_type2 = *type2;
2143 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2144 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2145 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2146 /* gcc-like error if '-' is used */
2147 if (op == '-')
2148 goto invalid_operands;
2149 else
2150 tcc_warning("comparison of distinct pointer types lacks a cast");
2154 /* generic gen_op: handles types problems */
2155 ST_FUNC void gen_op(int op)
2157 int u, t1, t2, bt1, bt2, t;
2158 CType type1;
2160 redo:
2161 t1 = vtop[-1].type.t;
2162 t2 = vtop[0].type.t;
2163 bt1 = t1 & VT_BTYPE;
2164 bt2 = t2 & VT_BTYPE;
2166 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2167 tcc_error("operation on a struct");
2168 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2169 if (bt2 == VT_FUNC) {
2170 mk_pointer(&vtop->type);
2171 gaddrof();
2173 if (bt1 == VT_FUNC) {
2174 vswap();
2175 mk_pointer(&vtop->type);
2176 gaddrof();
2177 vswap();
2179 goto redo;
2180 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2181 /* at least one operand is a pointer */
2182 /* relational op: must be both pointers */
2183 if (op >= TOK_ULT && op <= TOK_LOR) {
2184 check_comparison_pointer_types(vtop - 1, vtop, op);
2185 /* pointers are handled are unsigned */
2186 #if PTR_SIZE == 8
2187 t = VT_LLONG | VT_UNSIGNED;
2188 #else
2189 t = VT_INT | VT_UNSIGNED;
2190 #endif
2191 goto std_op;
2193 /* if both pointers, then it must be the '-' op */
2194 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2195 if (op != '-')
2196 tcc_error("cannot use pointers here");
2197 check_comparison_pointer_types(vtop - 1, vtop, op);
2198 /* XXX: check that types are compatible */
2199 if (vtop[-1].type.t & VT_VLA) {
2200 vla_runtime_pointed_size(&vtop[-1].type);
2201 } else {
2202 vpushi(pointed_size(&vtop[-1].type));
2204 vrott(3);
2205 gen_opic(op);
2206 vtop->type.t = ptrdiff_type.t;
2207 vswap();
2208 gen_op(TOK_PDIV);
2209 } else {
2210 /* exactly one pointer : must be '+' or '-'. */
2211 if (op != '-' && op != '+')
2212 tcc_error("cannot use pointers here");
2213 /* Put pointer as first operand */
2214 if (bt2 == VT_PTR) {
2215 vswap();
2216 t = t1, t1 = t2, t2 = t;
2218 #if PTR_SIZE == 4
2219 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2220 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2221 gen_cast_s(VT_INT);
2222 #endif
2223 type1 = vtop[-1].type;
2224 type1.t &= ~VT_ARRAY;
2225 if (vtop[-1].type.t & VT_VLA)
2226 vla_runtime_pointed_size(&vtop[-1].type);
2227 else {
2228 u = pointed_size(&vtop[-1].type);
2229 if (u < 0)
2230 tcc_error("unknown array element size");
2231 #if PTR_SIZE == 8
2232 vpushll(u);
2233 #else
2234 /* XXX: cast to int ? (long long case) */
2235 vpushi(u);
2236 #endif
2238 gen_op('*');
2239 #if 0
2240 /* #ifdef CONFIG_TCC_BCHECK
2241 The main reason to removing this code:
2242 #include <stdio.h>
2243 int main ()
2245 int v[10];
2246 int i = 10;
2247 int j = 9;
2248 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2249 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2251 When this code is on. then the output looks like
2252 v+i-j = 0xfffffffe
2253 v+(i-j) = 0xbff84000
2255 /* if evaluating constant expression, no code should be
2256 generated, so no bound check */
2257 if (tcc_state->do_bounds_check && !const_wanted) {
2258 /* if bounded pointers, we generate a special code to
2259 test bounds */
2260 if (op == '-') {
2261 vpushi(0);
2262 vswap();
2263 gen_op('-');
2265 gen_bounded_ptr_add();
2266 } else
2267 #endif
2269 gen_opic(op);
2271 /* put again type if gen_opic() swaped operands */
2272 vtop->type = type1;
2274 } else if (is_float(bt1) || is_float(bt2)) {
2275 /* compute bigger type and do implicit casts */
2276 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2277 t = VT_LDOUBLE;
2278 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2279 t = VT_DOUBLE;
2280 } else {
2281 t = VT_FLOAT;
2283 /* floats can only be used for a few operations */
2284 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2285 (op < TOK_ULT || op > TOK_GT))
2286 tcc_error("invalid operands for binary operation");
2287 goto std_op;
2288 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2289 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2290 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2291 t |= VT_UNSIGNED;
2292 t |= (VT_LONG & t1);
2293 goto std_op;
2294 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2295 /* cast to biggest op */
2296 t = VT_LLONG | VT_LONG;
2297 if (bt1 == VT_LLONG)
2298 t &= t1;
2299 if (bt2 == VT_LLONG)
2300 t &= t2;
2301 /* convert to unsigned if it does not fit in a long long */
2302 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2303 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2304 t |= VT_UNSIGNED;
2305 goto std_op;
2306 } else {
2307 /* integer operations */
2308 t = VT_INT | (VT_LONG & (t1 | t2));
2309 /* convert to unsigned if it does not fit in an integer */
2310 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2311 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2312 t |= VT_UNSIGNED;
2313 std_op:
2314 /* XXX: currently, some unsigned operations are explicit, so
2315 we modify them here */
2316 if (t & VT_UNSIGNED) {
2317 if (op == TOK_SAR)
2318 op = TOK_SHR;
2319 else if (op == '/')
2320 op = TOK_UDIV;
2321 else if (op == '%')
2322 op = TOK_UMOD;
2323 else if (op == TOK_LT)
2324 op = TOK_ULT;
2325 else if (op == TOK_GT)
2326 op = TOK_UGT;
2327 else if (op == TOK_LE)
2328 op = TOK_ULE;
2329 else if (op == TOK_GE)
2330 op = TOK_UGE;
2332 vswap();
2333 type1.t = t;
2334 type1.ref = NULL;
2335 gen_cast(&type1);
2336 vswap();
2337 /* special case for shifts and long long: we keep the shift as
2338 an integer */
2339 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2340 type1.t = VT_INT;
2341 gen_cast(&type1);
2342 if (is_float(t))
2343 gen_opif(op);
2344 else
2345 gen_opic(op);
2346 if (op >= TOK_ULT && op <= TOK_GT) {
2347 /* relational op: the result is an int */
2348 vtop->type.t = VT_INT;
2349 } else {
2350 vtop->type.t = t;
2353 // Make sure that we have converted to an rvalue:
2354 if (vtop->r & VT_LVAL)
2355 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2358 #ifndef TCC_TARGET_ARM
2359 /* generic itof for unsigned long long case */
2360 static void gen_cvt_itof1(int t)
2362 #ifdef TCC_TARGET_ARM64
2363 gen_cvt_itof(t);
2364 #else
2365 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2366 (VT_LLONG | VT_UNSIGNED)) {
2368 if (t == VT_FLOAT)
2369 vpush_global_sym(&func_old_type, TOK___floatundisf);
2370 #if LDOUBLE_SIZE != 8
2371 else if (t == VT_LDOUBLE)
2372 vpush_global_sym(&func_old_type, TOK___floatundixf);
2373 #endif
2374 else
2375 vpush_global_sym(&func_old_type, TOK___floatundidf);
2376 vrott(2);
2377 gfunc_call(1);
2378 vpushi(0);
2379 vtop->r = reg_fret(t);
2380 } else {
2381 gen_cvt_itof(t);
2383 #endif
2385 #endif
2387 /* generic ftoi for unsigned long long case */
2388 static void gen_cvt_ftoi1(int t)
2390 #ifdef TCC_TARGET_ARM64
2391 gen_cvt_ftoi(t);
2392 #else
2393 int st;
2395 if (t == (VT_LLONG | VT_UNSIGNED)) {
2396 /* not handled natively */
2397 st = vtop->type.t & VT_BTYPE;
2398 if (st == VT_FLOAT)
2399 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2400 #if LDOUBLE_SIZE != 8
2401 else if (st == VT_LDOUBLE)
2402 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2403 #endif
2404 else
2405 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2406 vrott(2);
2407 gfunc_call(1);
2408 vpushi(0);
2409 vtop->r = REG_IRET;
2410 vtop->r2 = REG_LRET;
2411 } else {
2412 gen_cvt_ftoi(t);
2414 #endif
2417 /* force char or short cast */
2418 static void force_charshort_cast(int t)
2420 int bits, dbt;
2422 /* cannot cast static initializers */
2423 if (STATIC_DATA_WANTED)
2424 return;
2426 dbt = t & VT_BTYPE;
2427 /* XXX: add optimization if lvalue : just change type and offset */
2428 if (dbt == VT_BYTE)
2429 bits = 8;
2430 else
2431 bits = 16;
2432 if (t & VT_UNSIGNED) {
2433 vpushi((1 << bits) - 1);
2434 gen_op('&');
2435 } else {
2436 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2437 bits = 64 - bits;
2438 else
2439 bits = 32 - bits;
2440 vpushi(bits);
2441 gen_op(TOK_SHL);
2442 /* result must be signed or the SAR is converted to an SHL
2443 This was not the case when "t" was a signed short
2444 and the last value on the stack was an unsigned int */
2445 vtop->type.t &= ~VT_UNSIGNED;
2446 vpushi(bits);
2447 gen_op(TOK_SAR);
2451 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2452 static void gen_cast_s(int t)
2454 CType type;
2455 type.t = t;
2456 type.ref = NULL;
2457 gen_cast(&type);
2460 static void gen_cast(CType *type)
2462 int sbt, dbt, sf, df, c, p;
2464 /* special delayed cast for char/short */
2465 /* XXX: in some cases (multiple cascaded casts), it may still
2466 be incorrect */
2467 if (vtop->r & VT_MUSTCAST) {
2468 vtop->r &= ~VT_MUSTCAST;
2469 force_charshort_cast(vtop->type.t);
2472 /* bitfields first get cast to ints */
2473 if (vtop->type.t & VT_BITFIELD) {
2474 gv(RC_INT);
2477 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2478 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2480 if (sbt != dbt) {
2481 sf = is_float(sbt);
2482 df = is_float(dbt);
2483 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2484 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2485 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2486 c &= dbt != VT_LDOUBLE;
2487 #endif
2488 if (c) {
2489 /* constant case: we can do it now */
2490 /* XXX: in ISOC, cannot do it if error in convert */
2491 if (sbt == VT_FLOAT)
2492 vtop->c.ld = vtop->c.f;
2493 else if (sbt == VT_DOUBLE)
2494 vtop->c.ld = vtop->c.d;
2496 if (df) {
2497 if ((sbt & VT_BTYPE) == VT_LLONG) {
2498 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2499 vtop->c.ld = vtop->c.i;
2500 else
2501 vtop->c.ld = -(long double)-vtop->c.i;
2502 } else if(!sf) {
2503 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2504 vtop->c.ld = (uint32_t)vtop->c.i;
2505 else
2506 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2509 if (dbt == VT_FLOAT)
2510 vtop->c.f = (float)vtop->c.ld;
2511 else if (dbt == VT_DOUBLE)
2512 vtop->c.d = (double)vtop->c.ld;
2513 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2514 vtop->c.i = vtop->c.ld;
2515 } else if (sf && dbt == VT_BOOL) {
2516 vtop->c.i = (vtop->c.ld != 0);
2517 } else {
2518 if(sf)
2519 vtop->c.i = vtop->c.ld;
2520 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2522 else if (sbt & VT_UNSIGNED)
2523 vtop->c.i = (uint32_t)vtop->c.i;
2524 #if PTR_SIZE == 8
2525 else if (sbt == VT_PTR)
2527 #endif
2528 else if (sbt != VT_LLONG)
2529 vtop->c.i = ((uint32_t)vtop->c.i |
2530 -(vtop->c.i & 0x80000000));
2532 if (dbt == (VT_LLONG|VT_UNSIGNED))
2534 else if (dbt == VT_BOOL)
2535 vtop->c.i = (vtop->c.i != 0);
2536 #if PTR_SIZE == 8
2537 else if (dbt == VT_PTR)
2539 #endif
2540 else if (dbt != VT_LLONG) {
2541 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2542 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2543 0xffffffff);
2544 vtop->c.i &= m;
2545 if (!(dbt & VT_UNSIGNED))
2546 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2549 } else if (p && dbt == VT_BOOL) {
2550 vtop->r = VT_CONST;
2551 vtop->c.i = 1;
2552 } else {
2553 /* non constant case: generate code */
2554 if (sf && df) {
2555 /* convert from fp to fp */
2556 gen_cvt_ftof(dbt);
2557 } else if (df) {
2558 /* convert int to fp */
2559 gen_cvt_itof1(dbt);
2560 } else if (sf) {
2561 /* convert fp to int */
2562 if (dbt == VT_BOOL) {
2563 vpushi(0);
2564 gen_op(TOK_NE);
2565 } else {
2566 /* we handle char/short/etc... with generic code */
2567 if (dbt != (VT_INT | VT_UNSIGNED) &&
2568 dbt != (VT_LLONG | VT_UNSIGNED) &&
2569 dbt != VT_LLONG)
2570 dbt = VT_INT;
2571 gen_cvt_ftoi1(dbt);
2572 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2573 /* additional cast for char/short... */
2574 vtop->type.t = dbt;
2575 gen_cast(type);
2578 #if PTR_SIZE == 4
2579 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2580 if ((sbt & VT_BTYPE) != VT_LLONG) {
2581 /* scalar to long long */
2582 /* machine independent conversion */
2583 gv(RC_INT);
2584 /* generate high word */
2585 if (sbt == (VT_INT | VT_UNSIGNED)) {
2586 vpushi(0);
2587 gv(RC_INT);
2588 } else {
2589 if (sbt == VT_PTR) {
2590 /* cast from pointer to int before we apply
2591 shift operation, which pointers don't support*/
2592 gen_cast_s(VT_INT);
2594 gv_dup();
2595 vpushi(31);
2596 gen_op(TOK_SAR);
2598 /* patch second register */
2599 vtop[-1].r2 = vtop->r;
2600 vpop();
2602 #else
2603 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2604 (dbt & VT_BTYPE) == VT_PTR ||
2605 (dbt & VT_BTYPE) == VT_FUNC) {
2606 if ((sbt & VT_BTYPE) != VT_LLONG &&
2607 (sbt & VT_BTYPE) != VT_PTR &&
2608 (sbt & VT_BTYPE) != VT_FUNC) {
2609 /* need to convert from 32bit to 64bit */
2610 gv(RC_INT);
2611 if (sbt != (VT_INT | VT_UNSIGNED)) {
2612 #if defined(TCC_TARGET_ARM64)
2613 gen_cvt_sxtw();
2614 #elif defined(TCC_TARGET_X86_64)
2615 int r = gv(RC_INT);
2616 /* x86_64 specific: movslq */
2617 o(0x6348);
2618 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2619 #else
2620 #error
2621 #endif
2624 #endif
2625 } else if (dbt == VT_BOOL) {
2626 /* scalar to bool */
2627 vpushi(0);
2628 gen_op(TOK_NE);
2629 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2630 (dbt & VT_BTYPE) == VT_SHORT) {
2631 if (sbt == VT_PTR) {
2632 vtop->type.t = VT_INT;
2633 tcc_warning("nonportable conversion from pointer to char/short");
2635 force_charshort_cast(dbt);
2636 #if PTR_SIZE == 4
2637 } else if ((dbt & VT_BTYPE) == VT_INT) {
2638 /* scalar to int */
2639 if ((sbt & VT_BTYPE) == VT_LLONG) {
2640 /* from long long: just take low order word */
2641 lexpand();
2642 vpop();
2644 /* if lvalue and single word type, nothing to do because
2645 the lvalue already contains the real type size (see
2646 VT_LVAL_xxx constants) */
2647 #endif
2650 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2651 /* if we are casting between pointer types,
2652 we must update the VT_LVAL_xxx size */
2653 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2654 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2656 vtop->type = *type;
2659 /* return type size as known at compile time. Put alignment at 'a' */
2660 ST_FUNC int type_size(CType *type, int *a)
2662 Sym *s;
2663 int bt;
2665 bt = type->t & VT_BTYPE;
2666 if (bt == VT_STRUCT) {
2667 /* struct/union */
2668 s = type->ref;
2669 *a = s->r;
2670 return s->c;
2671 } else if (bt == VT_PTR) {
2672 if (type->t & VT_ARRAY) {
2673 int ts;
2675 s = type->ref;
2676 ts = type_size(&s->type, a);
2678 if (ts < 0 && s->c < 0)
2679 ts = -ts;
2681 return ts * s->c;
2682 } else {
2683 *a = PTR_SIZE;
2684 return PTR_SIZE;
2686 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2687 return -1; /* incomplete enum */
2688 } else if (bt == VT_LDOUBLE) {
2689 *a = LDOUBLE_ALIGN;
2690 return LDOUBLE_SIZE;
2691 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2692 #ifdef TCC_TARGET_I386
2693 #ifdef TCC_TARGET_PE
2694 *a = 8;
2695 #else
2696 *a = 4;
2697 #endif
2698 #elif defined(TCC_TARGET_ARM)
2699 #ifdef TCC_ARM_EABI
2700 *a = 8;
2701 #else
2702 *a = 4;
2703 #endif
2704 #else
2705 *a = 8;
2706 #endif
2707 return 8;
2708 } else if (bt == VT_INT || bt == VT_FLOAT) {
2709 *a = 4;
2710 return 4;
2711 } else if (bt == VT_SHORT) {
2712 *a = 2;
2713 return 2;
2714 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2715 *a = 8;
2716 return 16;
2717 } else {
2718 /* char, void, function, _Bool */
2719 *a = 1;
2720 return 1;
2724 /* push type size as known at runtime time on top of value stack. Put
2725 alignment at 'a' */
2726 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2728 if (type->t & VT_VLA) {
2729 type_size(&type->ref->type, a);
2730 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2731 } else {
2732 vpushi(type_size(type, a));
2736 static void vla_sp_restore(void) {
2737 if (vlas_in_scope) {
2738 gen_vla_sp_restore(vla_sp_loc);
2742 static void vla_sp_restore_root(void) {
2743 if (vlas_in_scope) {
2744 gen_vla_sp_restore(vla_sp_root_loc);
2748 /* return the pointed type of t */
2749 static inline CType *pointed_type(CType *type)
2751 return &type->ref->type;
2754 /* modify type so that its it is a pointer to type. */
2755 ST_FUNC void mk_pointer(CType *type)
2757 Sym *s;
2758 s = sym_push(SYM_FIELD, type, 0, -1);
2759 type->t = VT_PTR | (type->t & VT_STORAGE);
2760 type->ref = s;
2763 /* compare function types. OLD functions match any new functions */
2764 static int is_compatible_func(CType *type1, CType *type2)
2766 Sym *s1, *s2;
2768 s1 = type1->ref;
2769 s2 = type2->ref;
2770 if (!is_compatible_types(&s1->type, &s2->type))
2771 return 0;
2772 /* check func_call */
2773 if (s1->f.func_call != s2->f.func_call)
2774 return 0;
2775 /* XXX: not complete */
2776 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2777 return 1;
2778 if (s1->f.func_type != s2->f.func_type)
2779 return 0;
2780 while (s1 != NULL) {
2781 if (s2 == NULL)
2782 return 0;
2783 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2784 return 0;
2785 s1 = s1->next;
2786 s2 = s2->next;
2788 if (s2)
2789 return 0;
2790 return 1;
2793 /* return true if type1 and type2 are the same. If unqualified is
2794 true, qualifiers on the types are ignored.
2796 - enums are not checked as gcc __builtin_types_compatible_p ()
2798 static int compare_types(CType *type1, CType *type2, int unqualified)
2800 int bt1, t1, t2;
2802 t1 = type1->t & VT_TYPE;
2803 t2 = type2->t & VT_TYPE;
2804 if (unqualified) {
2805 /* strip qualifiers before comparing */
2806 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2807 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2810 /* Default Vs explicit signedness only matters for char */
2811 if ((t1 & VT_BTYPE) != VT_BYTE) {
2812 t1 &= ~VT_DEFSIGN;
2813 t2 &= ~VT_DEFSIGN;
2815 /* XXX: bitfields ? */
2816 if (t1 != t2)
2817 return 0;
2818 /* test more complicated cases */
2819 bt1 = t1 & VT_BTYPE;
2820 if (bt1 == VT_PTR) {
2821 type1 = pointed_type(type1);
2822 type2 = pointed_type(type2);
2823 return is_compatible_types(type1, type2);
2824 } else if (bt1 == VT_STRUCT) {
2825 return (type1->ref == type2->ref);
2826 } else if (bt1 == VT_FUNC) {
2827 return is_compatible_func(type1, type2);
2828 } else {
2829 return 1;
2833 /* return true if type1 and type2 are exactly the same (including
2834 qualifiers).
2836 static int is_compatible_types(CType *type1, CType *type2)
2838 return compare_types(type1,type2,0);
2841 /* return true if type1 and type2 are the same (ignoring qualifiers).
2843 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2845 return compare_types(type1,type2,1);
2848 /* print a type. If 'varstr' is not NULL, then the variable is also
2849 printed in the type */
2850 /* XXX: union */
2851 /* XXX: add array and function pointers */
2852 static void type_to_str(char *buf, int buf_size,
2853 CType *type, const char *varstr)
2855 int bt, v, t;
2856 Sym *s, *sa;
2857 char buf1[256];
2858 const char *tstr;
2860 t = type->t;
2861 bt = t & VT_BTYPE;
2862 buf[0] = '\0';
2864 if (t & VT_EXTERN)
2865 pstrcat(buf, buf_size, "extern ");
2866 if (t & VT_STATIC)
2867 pstrcat(buf, buf_size, "static ");
2868 if (t & VT_TYPEDEF)
2869 pstrcat(buf, buf_size, "typedef ");
2870 if (t & VT_INLINE)
2871 pstrcat(buf, buf_size, "inline ");
2872 if (t & VT_VOLATILE)
2873 pstrcat(buf, buf_size, "volatile ");
2874 if (t & VT_CONSTANT)
2875 pstrcat(buf, buf_size, "const ");
2877 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2878 || ((t & VT_UNSIGNED)
2879 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2880 && !IS_ENUM(t)
2882 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2884 buf_size -= strlen(buf);
2885 buf += strlen(buf);
2887 switch(bt) {
2888 case VT_VOID:
2889 tstr = "void";
2890 goto add_tstr;
2891 case VT_BOOL:
2892 tstr = "_Bool";
2893 goto add_tstr;
2894 case VT_BYTE:
2895 tstr = "char";
2896 goto add_tstr;
2897 case VT_SHORT:
2898 tstr = "short";
2899 goto add_tstr;
2900 case VT_INT:
2901 tstr = "int";
2902 goto maybe_long;
2903 case VT_LLONG:
2904 tstr = "long long";
2905 maybe_long:
2906 if (t & VT_LONG)
2907 tstr = "long";
2908 if (!IS_ENUM(t))
2909 goto add_tstr;
2910 tstr = "enum ";
2911 goto tstruct;
2912 case VT_FLOAT:
2913 tstr = "float";
2914 goto add_tstr;
2915 case VT_DOUBLE:
2916 tstr = "double";
2917 goto add_tstr;
2918 case VT_LDOUBLE:
2919 tstr = "long double";
2920 add_tstr:
2921 pstrcat(buf, buf_size, tstr);
2922 break;
2923 case VT_STRUCT:
2924 tstr = "struct ";
2925 if (IS_UNION(t))
2926 tstr = "union ";
2927 tstruct:
2928 pstrcat(buf, buf_size, tstr);
2929 v = type->ref->v & ~SYM_STRUCT;
2930 if (v >= SYM_FIRST_ANOM)
2931 pstrcat(buf, buf_size, "<anonymous>");
2932 else
2933 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2934 break;
2935 case VT_FUNC:
2936 s = type->ref;
2937 type_to_str(buf, buf_size, &s->type, varstr);
2938 pstrcat(buf, buf_size, "(");
2939 sa = s->next;
2940 while (sa != NULL) {
2941 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2942 pstrcat(buf, buf_size, buf1);
2943 sa = sa->next;
2944 if (sa)
2945 pstrcat(buf, buf_size, ", ");
2947 pstrcat(buf, buf_size, ")");
2948 goto no_var;
2949 case VT_PTR:
2950 s = type->ref;
2951 if (t & VT_ARRAY) {
2952 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2953 type_to_str(buf, buf_size, &s->type, buf1);
2954 goto no_var;
2956 pstrcpy(buf1, sizeof(buf1), "*");
2957 if (t & VT_CONSTANT)
2958 pstrcat(buf1, buf_size, "const ");
2959 if (t & VT_VOLATILE)
2960 pstrcat(buf1, buf_size, "volatile ");
2961 if (varstr)
2962 pstrcat(buf1, sizeof(buf1), varstr);
2963 type_to_str(buf, buf_size, &s->type, buf1);
2964 goto no_var;
2966 if (varstr) {
2967 pstrcat(buf, buf_size, " ");
2968 pstrcat(buf, buf_size, varstr);
2970 no_var: ;
2973 /* verify type compatibility to store vtop in 'dt' type, and generate
2974 casts if needed. */
2975 static void gen_assign_cast(CType *dt)
2977 CType *st, *type1, *type2;
2978 char buf1[256], buf2[256];
2979 int dbt, sbt;
2981 st = &vtop->type; /* source type */
2982 dbt = dt->t & VT_BTYPE;
2983 sbt = st->t & VT_BTYPE;
2984 if (sbt == VT_VOID || dbt == VT_VOID) {
2985 if (sbt == VT_VOID && dbt == VT_VOID)
2986 ; /*
2987 It is Ok if both are void
2988 A test program:
2989 void func1() {}
2990 void func2() {
2991 return func1();
2993 gcc accepts this program
2995 else
2996 tcc_error("cannot cast from/to void");
2998 if (dt->t & VT_CONSTANT)
2999 tcc_warning("assignment of read-only location");
3000 switch(dbt) {
3001 case VT_PTR:
3002 /* special cases for pointers */
3003 /* '0' can also be a pointer */
3004 if (is_null_pointer(vtop))
3005 goto type_ok;
3006 /* accept implicit pointer to integer cast with warning */
3007 if (is_integer_btype(sbt)) {
3008 tcc_warning("assignment makes pointer from integer without a cast");
3009 goto type_ok;
3011 type1 = pointed_type(dt);
3012 /* a function is implicitly a function pointer */
3013 if (sbt == VT_FUNC) {
3014 if ((type1->t & VT_BTYPE) != VT_VOID &&
3015 !is_compatible_types(pointed_type(dt), st))
3016 tcc_warning("assignment from incompatible pointer type");
3017 goto type_ok;
3019 if (sbt != VT_PTR)
3020 goto error;
3021 type2 = pointed_type(st);
3022 if ((type1->t & VT_BTYPE) == VT_VOID ||
3023 (type2->t & VT_BTYPE) == VT_VOID) {
3024 /* void * can match anything */
3025 } else {
3026 //printf("types %08x %08x\n", type1->t, type2->t);
3027 /* exact type match, except for qualifiers */
3028 if (!is_compatible_unqualified_types(type1, type2)) {
3029 /* Like GCC don't warn by default for merely changes
3030 in pointer target signedness. Do warn for different
3031 base types, though, in particular for unsigned enums
3032 and signed int targets. */
3033 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
3034 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
3036 tcc_warning("assignment from incompatible pointer type");
3039 /* check const and volatile */
3040 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
3041 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
3042 tcc_warning("assignment discards qualifiers from pointer target type");
3043 break;
3044 case VT_BYTE:
3045 case VT_SHORT:
3046 case VT_INT:
3047 case VT_LLONG:
3048 if (sbt == VT_PTR || sbt == VT_FUNC) {
3049 tcc_warning("assignment makes integer from pointer without a cast");
3050 } else if (sbt == VT_STRUCT) {
3051 goto case_VT_STRUCT;
3053 /* XXX: more tests */
3054 break;
3055 case VT_STRUCT:
3056 case_VT_STRUCT:
3057 if (!is_compatible_unqualified_types(dt, st)) {
3058 error:
3059 type_to_str(buf1, sizeof(buf1), st, NULL);
3060 type_to_str(buf2, sizeof(buf2), dt, NULL);
3061 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3063 break;
3065 type_ok:
3066 gen_cast(dt);
3069 /* store vtop in lvalue pushed on stack */
3070 ST_FUNC void vstore(void)
3072 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3074 ft = vtop[-1].type.t;
3075 sbt = vtop->type.t & VT_BTYPE;
3076 dbt = ft & VT_BTYPE;
3077 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3078 (sbt == VT_INT && dbt == VT_SHORT))
3079 && !(vtop->type.t & VT_BITFIELD)) {
3080 /* optimize char/short casts */
3081 delayed_cast = VT_MUSTCAST;
3082 vtop->type.t = ft & VT_TYPE;
3083 /* XXX: factorize */
3084 if (ft & VT_CONSTANT)
3085 tcc_warning("assignment of read-only location");
3086 } else {
3087 delayed_cast = 0;
3088 if (!(ft & VT_BITFIELD))
3089 gen_assign_cast(&vtop[-1].type);
3092 if (sbt == VT_STRUCT) {
3093 /* if structure, only generate pointer */
3094 /* structure assignment : generate memcpy */
3095 /* XXX: optimize if small size */
3096 size = type_size(&vtop->type, &align);
3098 /* destination */
3099 vswap();
3100 vtop->type.t = VT_PTR;
3101 gaddrof();
3103 /* address of memcpy() */
3104 #ifdef TCC_ARM_EABI
3105 if(!(align & 7))
3106 vpush_global_sym(&func_old_type, TOK_memcpy8);
3107 else if(!(align & 3))
3108 vpush_global_sym(&func_old_type, TOK_memcpy4);
3109 else
3110 #endif
3111 /* Use memmove, rather than memcpy, as dest and src may be same: */
3112 vpush_global_sym(&func_old_type, TOK_memmove);
3114 vswap();
3115 /* source */
3116 vpushv(vtop - 2);
3117 vtop->type.t = VT_PTR;
3118 gaddrof();
3119 /* type size */
3120 vpushi(size);
3121 gfunc_call(3);
3123 /* leave source on stack */
3124 } else if (ft & VT_BITFIELD) {
3125 /* bitfield store handling */
3127 /* save lvalue as expression result (example: s.b = s.a = n;) */
3128 vdup(), vtop[-1] = vtop[-2];
3130 bit_pos = BIT_POS(ft);
3131 bit_size = BIT_SIZE(ft);
3132 /* remove bit field info to avoid loops */
3133 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3135 if ((ft & VT_BTYPE) == VT_BOOL) {
3136 gen_cast(&vtop[-1].type);
3137 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3140 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3141 if (r == VT_STRUCT) {
3142 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3143 store_packed_bf(bit_pos, bit_size);
3144 } else {
3145 unsigned long long mask = (1ULL << bit_size) - 1;
3146 if ((ft & VT_BTYPE) != VT_BOOL) {
3147 /* mask source */
3148 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3149 vpushll(mask);
3150 else
3151 vpushi((unsigned)mask);
3152 gen_op('&');
3154 /* shift source */
3155 vpushi(bit_pos);
3156 gen_op(TOK_SHL);
3157 vswap();
3158 /* duplicate destination */
3159 vdup();
3160 vrott(3);
3161 /* load destination, mask and or with source */
3162 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3163 vpushll(~(mask << bit_pos));
3164 else
3165 vpushi(~((unsigned)mask << bit_pos));
3166 gen_op('&');
3167 gen_op('|');
3168 /* store result */
3169 vstore();
3170 /* ... and discard */
3171 vpop();
3173 } else if (dbt == VT_VOID) {
3174 --vtop;
3175 } else {
3176 #ifdef CONFIG_TCC_BCHECK
3177 /* bound check case */
3178 if (vtop[-1].r & VT_MUSTBOUND) {
3179 vswap();
3180 gbound();
3181 vswap();
3183 #endif
3184 rc = RC_INT;
3185 if (is_float(ft)) {
3186 rc = RC_FLOAT;
3187 #ifdef TCC_TARGET_X86_64
3188 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3189 rc = RC_ST0;
3190 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3191 rc = RC_FRET;
3193 #endif
3195 r = gv(rc); /* generate value */
3196 /* if lvalue was saved on stack, must read it */
3197 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3198 SValue sv;
3199 t = get_reg(RC_INT);
3200 #if PTR_SIZE == 8
3201 sv.type.t = VT_PTR;
3202 #else
3203 sv.type.t = VT_INT;
3204 #endif
3205 sv.r = VT_LOCAL | VT_LVAL;
3206 sv.c.i = vtop[-1].c.i;
3207 load(t, &sv);
3208 vtop[-1].r = t | VT_LVAL;
3210 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3211 #if PTR_SIZE == 8
3212 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3213 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3214 #else
3215 if ((ft & VT_BTYPE) == VT_LLONG) {
3216 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3217 #endif
3218 vtop[-1].type.t = load_type;
3219 store(r, vtop - 1);
3220 vswap();
3221 /* convert to int to increment easily */
3222 vtop->type.t = addr_type;
3223 gaddrof();
3224 vpushi(load_size);
3225 gen_op('+');
3226 vtop->r |= VT_LVAL;
3227 vswap();
3228 vtop[-1].type.t = load_type;
3229 /* XXX: it works because r2 is spilled last ! */
3230 store(vtop->r2, vtop - 1);
3231 } else {
3232 store(r, vtop - 1);
3235 vswap();
3236 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3237 vtop->r |= delayed_cast;
3241 /* post defines POST/PRE add. c is the token ++ or -- */
3242 ST_FUNC void inc(int post, int c)
3244 test_lvalue();
3245 vdup(); /* save lvalue */
3246 if (post) {
3247 gv_dup(); /* duplicate value */
3248 vrotb(3);
3249 vrotb(3);
3251 /* add constant */
3252 vpushi(c - TOK_MID);
3253 gen_op('+');
3254 vstore(); /* store value */
3255 if (post)
3256 vpop(); /* if post op, return saved value */
3259 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3261 /* read the string */
3262 if (tok != TOK_STR)
3263 expect(msg);
3264 cstr_new(astr);
3265 while (tok == TOK_STR) {
3266 /* XXX: add \0 handling too ? */
3267 cstr_cat(astr, tokc.str.data, -1);
3268 next();
3270 cstr_ccat(astr, '\0');
3273 /* If I is >= 1 and a power of two, returns log2(i)+1.
3274 If I is 0 returns 0. */
3275 static int exact_log2p1(int i)
3277 int ret;
3278 if (!i)
3279 return 0;
3280 for (ret = 1; i >= 1 << 8; ret += 8)
3281 i >>= 8;
3282 if (i >= 1 << 4)
3283 ret += 4, i >>= 4;
3284 if (i >= 1 << 2)
3285 ret += 2, i >>= 2;
3286 if (i >= 1 << 1)
3287 ret++;
3288 return ret;
3291 /* Parse __attribute__((...)) GNUC extension. */
3292 static void parse_attribute(AttributeDef *ad)
3294 int t, n;
3295 CString astr;
3297 redo:
3298 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3299 return;
3300 next();
3301 skip('(');
3302 skip('(');
3303 while (tok != ')') {
3304 if (tok < TOK_IDENT)
3305 expect("attribute name");
3306 t = tok;
3307 next();
3308 switch(t) {
3309 case TOK_SECTION1:
3310 case TOK_SECTION2:
3311 skip('(');
3312 parse_mult_str(&astr, "section name");
3313 ad->section = find_section(tcc_state, (char *)astr.data);
3314 skip(')');
3315 cstr_free(&astr);
3316 break;
3317 case TOK_ALIAS1:
3318 case TOK_ALIAS2:
3319 skip('(');
3320 parse_mult_str(&astr, "alias(\"target\")");
3321 ad->alias_target = /* save string as token, for later */
3322 tok_alloc((char*)astr.data, astr.size-1)->tok;
3323 skip(')');
3324 cstr_free(&astr);
3325 break;
3326 case TOK_VISIBILITY1:
3327 case TOK_VISIBILITY2:
3328 skip('(');
3329 parse_mult_str(&astr,
3330 "visibility(\"default|hidden|internal|protected\")");
3331 if (!strcmp (astr.data, "default"))
3332 ad->a.visibility = STV_DEFAULT;
3333 else if (!strcmp (astr.data, "hidden"))
3334 ad->a.visibility = STV_HIDDEN;
3335 else if (!strcmp (astr.data, "internal"))
3336 ad->a.visibility = STV_INTERNAL;
3337 else if (!strcmp (astr.data, "protected"))
3338 ad->a.visibility = STV_PROTECTED;
3339 else
3340 expect("visibility(\"default|hidden|internal|protected\")");
3341 skip(')');
3342 cstr_free(&astr);
3343 break;
3344 case TOK_ALIGNED1:
3345 case TOK_ALIGNED2:
3346 if (tok == '(') {
3347 next();
3348 n = expr_const();
3349 if (n <= 0 || (n & (n - 1)) != 0)
3350 tcc_error("alignment must be a positive power of two");
3351 skip(')');
3352 } else {
3353 n = MAX_ALIGN;
3355 ad->a.aligned = exact_log2p1(n);
3356 if (n != 1 << (ad->a.aligned - 1))
3357 tcc_error("alignment of %d is larger than implemented", n);
3358 break;
3359 case TOK_PACKED1:
3360 case TOK_PACKED2:
3361 ad->a.packed = 1;
3362 break;
3363 case TOK_WEAK1:
3364 case TOK_WEAK2:
3365 ad->a.weak = 1;
3366 break;
3367 case TOK_UNUSED1:
3368 case TOK_UNUSED2:
3369 /* currently, no need to handle it because tcc does not
3370 track unused objects */
3371 break;
3372 case TOK_NORETURN1:
3373 case TOK_NORETURN2:
3374 /* currently, no need to handle it because tcc does not
3375 track unused objects */
3376 break;
3377 case TOK_CDECL1:
3378 case TOK_CDECL2:
3379 case TOK_CDECL3:
3380 ad->f.func_call = FUNC_CDECL;
3381 break;
3382 case TOK_STDCALL1:
3383 case TOK_STDCALL2:
3384 case TOK_STDCALL3:
3385 ad->f.func_call = FUNC_STDCALL;
3386 break;
3387 #ifdef TCC_TARGET_I386
3388 case TOK_REGPARM1:
3389 case TOK_REGPARM2:
3390 skip('(');
3391 n = expr_const();
3392 if (n > 3)
3393 n = 3;
3394 else if (n < 0)
3395 n = 0;
3396 if (n > 0)
3397 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3398 skip(')');
3399 break;
3400 case TOK_FASTCALL1:
3401 case TOK_FASTCALL2:
3402 case TOK_FASTCALL3:
3403 ad->f.func_call = FUNC_FASTCALLW;
3404 break;
3405 #endif
3406 case TOK_MODE:
3407 skip('(');
3408 switch(tok) {
3409 case TOK_MODE_DI:
3410 ad->attr_mode = VT_LLONG + 1;
3411 break;
3412 case TOK_MODE_QI:
3413 ad->attr_mode = VT_BYTE + 1;
3414 break;
3415 case TOK_MODE_HI:
3416 ad->attr_mode = VT_SHORT + 1;
3417 break;
3418 case TOK_MODE_SI:
3419 case TOK_MODE_word:
3420 ad->attr_mode = VT_INT + 1;
3421 break;
3422 default:
3423 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3424 break;
3426 next();
3427 skip(')');
3428 break;
3429 case TOK_DLLEXPORT:
3430 ad->a.dllexport = 1;
3431 break;
3432 case TOK_DLLIMPORT:
3433 ad->a.dllimport = 1;
3434 break;
3435 default:
3436 if (tcc_state->warn_unsupported)
3437 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3438 /* skip parameters */
3439 if (tok == '(') {
3440 int parenthesis = 0;
3441 do {
3442 if (tok == '(')
3443 parenthesis++;
3444 else if (tok == ')')
3445 parenthesis--;
3446 next();
3447 } while (parenthesis && tok != -1);
3449 break;
3451 if (tok != ',')
3452 break;
3453 next();
3455 skip(')');
3456 skip(')');
3457 goto redo;
3460 static Sym * find_field (CType *type, int v)
3462 Sym *s = type->ref;
3463 v |= SYM_FIELD;
3464 while ((s = s->next) != NULL) {
3465 if ((s->v & SYM_FIELD) &&
3466 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3467 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3468 Sym *ret = find_field (&s->type, v);
3469 if (ret)
3470 return ret;
3472 if (s->v == v)
3473 break;
3475 return s;
3478 static void struct_add_offset (Sym *s, int offset)
3480 while ((s = s->next) != NULL) {
3481 if ((s->v & SYM_FIELD) &&
3482 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3483 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3484 struct_add_offset(s->type.ref, offset);
3485 } else
3486 s->c += offset;
3490 static void struct_layout(CType *type, AttributeDef *ad)
3492 int size, align, maxalign, offset, c, bit_pos, bit_size;
3493 int packed, a, bt, prevbt, prev_bit_size;
3494 int pcc = !tcc_state->ms_bitfields;
3495 int pragma_pack = *tcc_state->pack_stack_ptr;
3496 Sym *f;
3498 maxalign = 1;
3499 offset = 0;
3500 c = 0;
3501 bit_pos = 0;
3502 prevbt = VT_STRUCT; /* make it never match */
3503 prev_bit_size = 0;
3505 //#define BF_DEBUG
3507 for (f = type->ref->next; f; f = f->next) {
3508 if (f->type.t & VT_BITFIELD)
3509 bit_size = BIT_SIZE(f->type.t);
3510 else
3511 bit_size = -1;
3512 size = type_size(&f->type, &align);
3513 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3514 packed = 0;
3516 if (pcc && bit_size == 0) {
3517 /* in pcc mode, packing does not affect zero-width bitfields */
3519 } else {
3520 /* in pcc mode, attribute packed overrides if set. */
3521 if (pcc && (f->a.packed || ad->a.packed))
3522 align = packed = 1;
3524 /* pragma pack overrides align if lesser and packs bitfields always */
3525 if (pragma_pack) {
3526 packed = 1;
3527 if (pragma_pack < align)
3528 align = pragma_pack;
3529 /* in pcc mode pragma pack also overrides individual align */
3530 if (pcc && pragma_pack < a)
3531 a = 0;
3534 /* some individual align was specified */
3535 if (a)
3536 align = a;
3538 if (type->ref->type.t == VT_UNION) {
3539 if (pcc && bit_size >= 0)
3540 size = (bit_size + 7) >> 3;
3541 offset = 0;
3542 if (size > c)
3543 c = size;
3545 } else if (bit_size < 0) {
3546 if (pcc)
3547 c += (bit_pos + 7) >> 3;
3548 c = (c + align - 1) & -align;
3549 offset = c;
3550 if (size > 0)
3551 c += size;
3552 bit_pos = 0;
3553 prevbt = VT_STRUCT;
3554 prev_bit_size = 0;
3556 } else {
3557 /* A bit-field. Layout is more complicated. There are two
3558 options: PCC (GCC) compatible and MS compatible */
3559 if (pcc) {
3560 /* In PCC layout a bit-field is placed adjacent to the
3561 preceding bit-fields, except if:
3562 - it has zero-width
3563 - an individual alignment was given
3564 - it would overflow its base type container and
3565 there is no packing */
3566 if (bit_size == 0) {
3567 new_field:
3568 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3569 bit_pos = 0;
3570 } else if (f->a.aligned) {
3571 goto new_field;
3572 } else if (!packed) {
3573 int a8 = align * 8;
3574 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3575 if (ofs > size / align)
3576 goto new_field;
3579 /* in pcc mode, long long bitfields have type int if they fit */
3580 if (size == 8 && bit_size <= 32)
3581 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3583 while (bit_pos >= align * 8)
3584 c += align, bit_pos -= align * 8;
3585 offset = c;
3587 /* In PCC layout named bit-fields influence the alignment
3588 of the containing struct using the base types alignment,
3589 except for packed fields (which here have correct align). */
3590 if (f->v & SYM_FIRST_ANOM
3591 // && bit_size // ??? gcc on ARM/rpi does that
3593 align = 1;
3595 } else {
3596 bt = f->type.t & VT_BTYPE;
3597 if ((bit_pos + bit_size > size * 8)
3598 || (bit_size > 0) == (bt != prevbt)
3600 c = (c + align - 1) & -align;
3601 offset = c;
3602 bit_pos = 0;
3603 /* In MS bitfield mode a bit-field run always uses
3604 at least as many bits as the underlying type.
3605 To start a new run it's also required that this
3606 or the last bit-field had non-zero width. */
3607 if (bit_size || prev_bit_size)
3608 c += size;
3610 /* In MS layout the records alignment is normally
3611 influenced by the field, except for a zero-width
3612 field at the start of a run (but by further zero-width
3613 fields it is again). */
3614 if (bit_size == 0 && prevbt != bt)
3615 align = 1;
3616 prevbt = bt;
3617 prev_bit_size = bit_size;
3620 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3621 | (bit_pos << VT_STRUCT_SHIFT);
3622 bit_pos += bit_size;
3624 if (align > maxalign)
3625 maxalign = align;
3627 #ifdef BF_DEBUG
3628 printf("set field %s offset %-2d size %-2d align %-2d",
3629 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3630 if (f->type.t & VT_BITFIELD) {
3631 printf(" pos %-2d bits %-2d",
3632 BIT_POS(f->type.t),
3633 BIT_SIZE(f->type.t)
3636 printf("\n");
3637 #endif
3639 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3640 Sym *ass;
3641 /* An anonymous struct/union. Adjust member offsets
3642 to reflect the real offset of our containing struct.
3643 Also set the offset of this anon member inside
3644 the outer struct to be zero. Via this it
3645 works when accessing the field offset directly
3646 (from base object), as well as when recursing
3647 members in initializer handling. */
3648 int v2 = f->type.ref->v;
3649 if (!(v2 & SYM_FIELD) &&
3650 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3651 Sym **pps;
3652 /* This happens only with MS extensions. The
3653 anon member has a named struct type, so it
3654 potentially is shared with other references.
3655 We need to unshare members so we can modify
3656 them. */
3657 ass = f->type.ref;
3658 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3659 &f->type.ref->type, 0,
3660 f->type.ref->c);
3661 pps = &f->type.ref->next;
3662 while ((ass = ass->next) != NULL) {
3663 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3664 pps = &((*pps)->next);
3666 *pps = NULL;
3668 struct_add_offset(f->type.ref, offset);
3669 f->c = 0;
3670 } else {
3671 f->c = offset;
3674 f->r = 0;
3677 if (pcc)
3678 c += (bit_pos + 7) >> 3;
3680 /* store size and alignment */
3681 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3682 if (a < maxalign)
3683 a = maxalign;
3684 type->ref->r = a;
3685 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3686 /* can happen if individual align for some member was given. In
3687 this case MSVC ignores maxalign when aligning the size */
3688 a = pragma_pack;
3689 if (a < bt)
3690 a = bt;
3692 c = (c + a - 1) & -a;
3693 type->ref->c = c;
3695 #ifdef BF_DEBUG
3696 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3697 #endif
3699 /* check whether we can access bitfields by their type */
3700 for (f = type->ref->next; f; f = f->next) {
3701 int s, px, cx, c0;
3702 CType t;
3704 if (0 == (f->type.t & VT_BITFIELD))
3705 continue;
3706 f->type.ref = f;
3707 f->auxtype = -1;
3708 bit_size = BIT_SIZE(f->type.t);
3709 if (bit_size == 0)
3710 continue;
3711 bit_pos = BIT_POS(f->type.t);
3712 size = type_size(&f->type, &align);
3713 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3714 continue;
3716 /* try to access the field using a different type */
3717 c0 = -1, s = align = 1;
3718 for (;;) {
3719 px = f->c * 8 + bit_pos;
3720 cx = (px >> 3) & -align;
3721 px = px - (cx << 3);
3722 if (c0 == cx)
3723 break;
3724 s = (px + bit_size + 7) >> 3;
3725 if (s > 4) {
3726 t.t = VT_LLONG;
3727 } else if (s > 2) {
3728 t.t = VT_INT;
3729 } else if (s > 1) {
3730 t.t = VT_SHORT;
3731 } else {
3732 t.t = VT_BYTE;
3734 s = type_size(&t, &align);
3735 c0 = cx;
3738 if (px + bit_size <= s * 8 && cx + s <= c) {
3739 /* update offset and bit position */
3740 f->c = cx;
3741 bit_pos = px;
3742 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3743 | (bit_pos << VT_STRUCT_SHIFT);
3744 if (s != size)
3745 f->auxtype = t.t;
3746 #ifdef BF_DEBUG
3747 printf("FIX field %s offset %-2d size %-2d align %-2d "
3748 "pos %-2d bits %-2d\n",
3749 get_tok_str(f->v & ~SYM_FIELD, NULL),
3750 cx, s, align, px, bit_size);
3751 #endif
3752 } else {
3753 /* fall back to load/store single-byte wise */
3754 f->auxtype = VT_STRUCT;
3755 #ifdef BF_DEBUG
3756 printf("FIX field %s : load byte-wise\n",
3757 get_tok_str(f->v & ~SYM_FIELD, NULL));
3758 #endif
3763 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3764 static void struct_decl(CType *type, int u)
3766 int v, c, size, align, flexible;
3767 int bit_size, bsize, bt;
3768 Sym *s, *ss, **ps;
3769 AttributeDef ad, ad1;
3770 CType type1, btype;
3772 memset(&ad, 0, sizeof ad);
3773 next();
3774 parse_attribute(&ad);
3775 if (tok != '{') {
3776 v = tok;
3777 next();
3778 /* struct already defined ? return it */
3779 if (v < TOK_IDENT)
3780 expect("struct/union/enum name");
3781 s = struct_find(v);
3782 if (s && (s->sym_scope == local_scope || tok != '{')) {
3783 if (u == s->type.t)
3784 goto do_decl;
3785 if (u == VT_ENUM && IS_ENUM(s->type.t))
3786 goto do_decl;
3787 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3789 } else {
3790 v = anon_sym++;
3792 /* Record the original enum/struct/union token. */
3793 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3794 type1.ref = NULL;
3795 /* we put an undefined size for struct/union */
3796 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3797 s->r = 0; /* default alignment is zero as gcc */
3798 do_decl:
3799 type->t = s->type.t;
3800 type->ref = s;
3802 if (tok == '{') {
3803 next();
3804 if (s->c != -1)
3805 tcc_error("struct/union/enum already defined");
3806 /* cannot be empty */
3807 /* non empty enums are not allowed */
3808 ps = &s->next;
3809 if (u == VT_ENUM) {
3810 long long ll = 0, pl = 0, nl = 0;
3811 CType t;
3812 t.ref = s;
3813 /* enum symbols have static storage */
3814 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3815 for(;;) {
3816 v = tok;
3817 if (v < TOK_UIDENT)
3818 expect("identifier");
3819 ss = sym_find(v);
3820 if (ss && !local_stack)
3821 tcc_error("redefinition of enumerator '%s'",
3822 get_tok_str(v, NULL));
3823 next();
3824 if (tok == '=') {
3825 next();
3826 ll = expr_const64();
3828 ss = sym_push(v, &t, VT_CONST, 0);
3829 ss->enum_val = ll;
3830 *ps = ss, ps = &ss->next;
3831 if (ll < nl)
3832 nl = ll;
3833 if (ll > pl)
3834 pl = ll;
3835 if (tok != ',')
3836 break;
3837 next();
3838 ll++;
3839 /* NOTE: we accept a trailing comma */
3840 if (tok == '}')
3841 break;
3843 skip('}');
3844 /* set integral type of the enum */
3845 t.t = VT_INT;
3846 if (nl >= 0) {
3847 if (pl != (unsigned)pl)
3848 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3849 t.t |= VT_UNSIGNED;
3850 } else if (pl != (int)pl || nl != (int)nl)
3851 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3852 s->type.t = type->t = t.t | VT_ENUM;
3853 s->c = 0;
3854 /* set type for enum members */
3855 for (ss = s->next; ss; ss = ss->next) {
3856 ll = ss->enum_val;
3857 if (ll == (int)ll) /* default is int if it fits */
3858 continue;
3859 if (t.t & VT_UNSIGNED) {
3860 ss->type.t |= VT_UNSIGNED;
3861 if (ll == (unsigned)ll)
3862 continue;
3864 ss->type.t = (ss->type.t & ~VT_BTYPE)
3865 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3867 } else {
3868 c = 0;
3869 flexible = 0;
3870 while (tok != '}') {
3871 if (!parse_btype(&btype, &ad1)) {
3872 skip(';');
3873 continue;
3875 while (1) {
3876 if (flexible)
3877 tcc_error("flexible array member '%s' not at the end of struct",
3878 get_tok_str(v, NULL));
3879 bit_size = -1;
3880 v = 0;
3881 type1 = btype;
3882 if (tok != ':') {
3883 if (tok != ';')
3884 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3885 if (v == 0) {
3886 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3887 expect("identifier");
3888 else {
3889 int v = btype.ref->v;
3890 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3891 if (tcc_state->ms_extensions == 0)
3892 expect("identifier");
3896 if (type_size(&type1, &align) < 0) {
3897 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3898 flexible = 1;
3899 else
3900 tcc_error("field '%s' has incomplete type",
3901 get_tok_str(v, NULL));
3903 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3904 (type1.t & VT_STORAGE))
3905 tcc_error("invalid type for '%s'",
3906 get_tok_str(v, NULL));
3908 if (tok == ':') {
3909 next();
3910 bit_size = expr_const();
3911 /* XXX: handle v = 0 case for messages */
3912 if (bit_size < 0)
3913 tcc_error("negative width in bit-field '%s'",
3914 get_tok_str(v, NULL));
3915 if (v && bit_size == 0)
3916 tcc_error("zero width for bit-field '%s'",
3917 get_tok_str(v, NULL));
3918 parse_attribute(&ad1);
3920 size = type_size(&type1, &align);
3921 if (bit_size >= 0) {
3922 bt = type1.t & VT_BTYPE;
3923 if (bt != VT_INT &&
3924 bt != VT_BYTE &&
3925 bt != VT_SHORT &&
3926 bt != VT_BOOL &&
3927 bt != VT_LLONG)
3928 tcc_error("bitfields must have scalar type");
3929 bsize = size * 8;
3930 if (bit_size > bsize) {
3931 tcc_error("width of '%s' exceeds its type",
3932 get_tok_str(v, NULL));
3933 } else if (bit_size == bsize
3934 && !ad.a.packed && !ad1.a.packed) {
3935 /* no need for bit fields */
3937 } else if (bit_size == 64) {
3938 tcc_error("field width 64 not implemented");
3939 } else {
3940 type1.t = (type1.t & ~VT_STRUCT_MASK)
3941 | VT_BITFIELD
3942 | (bit_size << (VT_STRUCT_SHIFT + 6));
3945 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3946 /* Remember we've seen a real field to check
3947 for placement of flexible array member. */
3948 c = 1;
3950 /* If member is a struct or bit-field, enforce
3951 placing into the struct (as anonymous). */
3952 if (v == 0 &&
3953 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3954 bit_size >= 0)) {
3955 v = anon_sym++;
3957 if (v) {
3958 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3959 ss->a = ad1.a;
3960 *ps = ss;
3961 ps = &ss->next;
3963 if (tok == ';' || tok == TOK_EOF)
3964 break;
3965 skip(',');
3967 skip(';');
3969 skip('}');
3970 parse_attribute(&ad);
3971 struct_layout(type, &ad);
3976 static void sym_to_attr(AttributeDef *ad, Sym *s)
3978 if (s->a.aligned && 0 == ad->a.aligned)
3979 ad->a.aligned = s->a.aligned;
3980 if (s->f.func_call && 0 == ad->f.func_call)
3981 ad->f.func_call = s->f.func_call;
3982 if (s->f.func_type && 0 == ad->f.func_type)
3983 ad->f.func_type = s->f.func_type;
3984 if (s->a.packed)
3985 ad->a.packed = 1;
3988 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3989 are added to the element type, copied because it could be a typedef. */
3990 static void parse_btype_qualify(CType *type, int qualifiers)
3992 while (type->t & VT_ARRAY) {
3993 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3994 type = &type->ref->type;
3996 type->t |= qualifiers;
3999 /* return 0 if no type declaration. otherwise, return the basic type
4000 and skip it.
4002 static int parse_btype(CType *type, AttributeDef *ad)
4004 int t, u, bt, st, type_found, typespec_found, g;
4005 Sym *s;
4006 CType type1;
4008 memset(ad, 0, sizeof(AttributeDef));
4009 type_found = 0;
4010 typespec_found = 0;
4011 t = VT_INT;
4012 bt = st = -1;
4013 type->ref = NULL;
4015 while(1) {
4016 switch(tok) {
4017 case TOK_EXTENSION:
4018 /* currently, we really ignore extension */
4019 next();
4020 continue;
4022 /* basic types */
4023 case TOK_CHAR:
4024 u = VT_BYTE;
4025 basic_type:
4026 next();
4027 basic_type1:
4028 if (u == VT_SHORT || u == VT_LONG) {
4029 if (st != -1 || (bt != -1 && bt != VT_INT))
4030 tmbt: tcc_error("too many basic types");
4031 st = u;
4032 } else {
4033 if (bt != -1 || (st != -1 && u != VT_INT))
4034 goto tmbt;
4035 bt = u;
4037 if (u != VT_INT)
4038 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4039 typespec_found = 1;
4040 break;
4041 case TOK_VOID:
4042 u = VT_VOID;
4043 goto basic_type;
4044 case TOK_SHORT:
4045 u = VT_SHORT;
4046 goto basic_type;
4047 case TOK_INT:
4048 u = VT_INT;
4049 goto basic_type;
4050 case TOK_LONG:
4051 if ((t & VT_BTYPE) == VT_DOUBLE) {
4052 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4053 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4054 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4055 } else {
4056 u = VT_LONG;
4057 goto basic_type;
4059 next();
4060 break;
4061 #ifdef TCC_TARGET_ARM64
4062 case TOK_UINT128:
4063 /* GCC's __uint128_t appears in some Linux header files. Make it a
4064 synonym for long double to get the size and alignment right. */
4065 u = VT_LDOUBLE;
4066 goto basic_type;
4067 #endif
4068 case TOK_BOOL:
4069 u = VT_BOOL;
4070 goto basic_type;
4071 case TOK_FLOAT:
4072 u = VT_FLOAT;
4073 goto basic_type;
4074 case TOK_DOUBLE:
4075 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4076 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4077 } else {
4078 u = VT_DOUBLE;
4079 goto basic_type;
4081 next();
4082 break;
4083 case TOK_ENUM:
4084 struct_decl(&type1, VT_ENUM);
4085 basic_type2:
4086 u = type1.t;
4087 type->ref = type1.ref;
4088 goto basic_type1;
4089 case TOK_STRUCT:
4090 struct_decl(&type1, VT_STRUCT);
4091 goto basic_type2;
4092 case TOK_UNION:
4093 struct_decl(&type1, VT_UNION);
4094 goto basic_type2;
4096 /* type modifiers */
4097 case TOK_CONST1:
4098 case TOK_CONST2:
4099 case TOK_CONST3:
4100 type->t = t;
4101 parse_btype_qualify(type, VT_CONSTANT);
4102 t = type->t;
4103 next();
4104 break;
4105 case TOK_VOLATILE1:
4106 case TOK_VOLATILE2:
4107 case TOK_VOLATILE3:
4108 type->t = t;
4109 parse_btype_qualify(type, VT_VOLATILE);
4110 t = type->t;
4111 next();
4112 break;
4113 case TOK_SIGNED1:
4114 case TOK_SIGNED2:
4115 case TOK_SIGNED3:
4116 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4117 tcc_error("signed and unsigned modifier");
4118 t |= VT_DEFSIGN;
4119 next();
4120 typespec_found = 1;
4121 break;
4122 case TOK_REGISTER:
4123 case TOK_AUTO:
4124 case TOK_RESTRICT1:
4125 case TOK_RESTRICT2:
4126 case TOK_RESTRICT3:
4127 next();
4128 break;
4129 case TOK_UNSIGNED:
4130 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4131 tcc_error("signed and unsigned modifier");
4132 t |= VT_DEFSIGN | VT_UNSIGNED;
4133 next();
4134 typespec_found = 1;
4135 break;
4137 /* storage */
4138 case TOK_EXTERN:
4139 g = VT_EXTERN;
4140 goto storage;
4141 case TOK_STATIC:
4142 g = VT_STATIC;
4143 goto storage;
4144 case TOK_TYPEDEF:
4145 g = VT_TYPEDEF;
4146 goto storage;
4147 storage:
4148 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4149 tcc_error("multiple storage classes");
4150 t |= g;
4151 next();
4152 break;
4153 case TOK_INLINE1:
4154 case TOK_INLINE2:
4155 case TOK_INLINE3:
4156 t |= VT_INLINE;
4157 next();
4158 break;
4160 /* GNUC attribute */
4161 case TOK_ATTRIBUTE1:
4162 case TOK_ATTRIBUTE2:
4163 parse_attribute(ad);
4164 if (ad->attr_mode) {
4165 u = ad->attr_mode -1;
4166 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4168 break;
4169 /* GNUC typeof */
4170 case TOK_TYPEOF1:
4171 case TOK_TYPEOF2:
4172 case TOK_TYPEOF3:
4173 next();
4174 parse_expr_type(&type1);
4175 /* remove all storage modifiers except typedef */
4176 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4177 if (type1.ref)
4178 sym_to_attr(ad, type1.ref);
4179 goto basic_type2;
4180 default:
4181 if (typespec_found)
4182 goto the_end;
4183 s = sym_find(tok);
4184 if (!s || !(s->type.t & VT_TYPEDEF))
4185 goto the_end;
4186 t &= ~(VT_BTYPE|VT_LONG);
4187 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4188 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4189 type->ref = s->type.ref;
4190 if (t)
4191 parse_btype_qualify(type, t);
4192 t = type->t;
4193 /* get attributes from typedef */
4194 sym_to_attr(ad, s);
4195 next();
4196 typespec_found = 1;
4197 st = bt = -2;
4198 break;
4200 type_found = 1;
4202 the_end:
4203 if (tcc_state->char_is_unsigned) {
4204 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4205 t |= VT_UNSIGNED;
4207 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4208 bt = t & (VT_BTYPE|VT_LONG);
4209 if (bt == VT_LONG)
4210 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4211 #ifdef TCC_TARGET_PE
4212 if (bt == VT_LDOUBLE)
4213 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4214 #endif
4215 type->t = t;
4216 return type_found;
4219 /* convert a function parameter type (array to pointer and function to
4220 function pointer) */
4221 static inline void convert_parameter_type(CType *pt)
4223 /* remove const and volatile qualifiers (XXX: const could be used
4224 to indicate a const function parameter */
4225 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4226 /* array must be transformed to pointer according to ANSI C */
4227 pt->t &= ~VT_ARRAY;
4228 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4229 mk_pointer(pt);
4233 ST_FUNC void parse_asm_str(CString *astr)
4235 skip('(');
4236 parse_mult_str(astr, "string constant");
4239 /* Parse an asm label and return the token */
4240 static int asm_label_instr(void)
4242 int v;
4243 CString astr;
4245 next();
4246 parse_asm_str(&astr);
4247 skip(')');
4248 #ifdef ASM_DEBUG
4249 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4250 #endif
4251 v = tok_alloc(astr.data, astr.size - 1)->tok;
4252 cstr_free(&astr);
4253 return v;
4256 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4258 int n, l, t1, arg_size, align;
4259 Sym **plast, *s, *first;
4260 AttributeDef ad1;
4261 CType pt;
4263 if (tok == '(') {
4264 /* function type, or recursive declarator (return if so) */
4265 next();
4266 if (td && !(td & TYPE_ABSTRACT))
4267 return 0;
4268 if (tok == ')')
4269 l = 0;
4270 else if (parse_btype(&pt, &ad1))
4271 l = FUNC_NEW;
4272 else if (td)
4273 return 0;
4274 else
4275 l = FUNC_OLD;
4276 first = NULL;
4277 plast = &first;
4278 arg_size = 0;
4279 if (l) {
4280 for(;;) {
4281 /* read param name and compute offset */
4282 if (l != FUNC_OLD) {
4283 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4284 break;
4285 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4286 if ((pt.t & VT_BTYPE) == VT_VOID)
4287 tcc_error("parameter declared as void");
4288 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4289 } else {
4290 n = tok;
4291 if (n < TOK_UIDENT)
4292 expect("identifier");
4293 pt.t = VT_VOID; /* invalid type */
4294 next();
4296 convert_parameter_type(&pt);
4297 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4298 *plast = s;
4299 plast = &s->next;
4300 if (tok == ')')
4301 break;
4302 skip(',');
4303 if (l == FUNC_NEW && tok == TOK_DOTS) {
4304 l = FUNC_ELLIPSIS;
4305 next();
4306 break;
4308 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4309 tcc_error("invalid type");
4311 } else
4312 /* if no parameters, then old type prototype */
4313 l = FUNC_OLD;
4314 skip(')');
4315 /* NOTE: const is ignored in returned type as it has a special
4316 meaning in gcc / C++ */
4317 type->t &= ~VT_CONSTANT;
4318 /* some ancient pre-K&R C allows a function to return an array
4319 and the array brackets to be put after the arguments, such
4320 that "int c()[]" means something like "int[] c()" */
4321 if (tok == '[') {
4322 next();
4323 skip(']'); /* only handle simple "[]" */
4324 mk_pointer(type);
4326 /* we push a anonymous symbol which will contain the function prototype */
4327 ad->f.func_args = arg_size;
4328 ad->f.func_type = l;
4329 s = sym_push(SYM_FIELD, type, 0, 0);
4330 s->a = ad->a;
4331 s->f = ad->f;
4332 s->next = first;
4333 type->t = VT_FUNC;
4334 type->ref = s;
4335 } else if (tok == '[') {
4336 int saved_nocode_wanted = nocode_wanted;
4337 /* array definition */
4338 next();
4339 if (tok == TOK_RESTRICT1)
4340 next();
4341 n = -1;
4342 t1 = 0;
4343 if (tok != ']') {
4344 if (!local_stack || (storage & VT_STATIC))
4345 vpushi(expr_const());
4346 else {
4347 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4348 length must always be evaluated, even under nocode_wanted,
4349 so that its size slot is initialized (e.g. under sizeof
4350 or typeof). */
4351 nocode_wanted = 0;
4352 gexpr();
4354 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4355 n = vtop->c.i;
4356 if (n < 0)
4357 tcc_error("invalid array size");
4358 } else {
4359 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4360 tcc_error("size of variable length array should be an integer");
4361 t1 = VT_VLA;
4364 skip(']');
4365 /* parse next post type */
4366 post_type(type, ad, storage, 0);
4367 if (type->t == VT_FUNC)
4368 tcc_error("declaration of an array of functions");
4369 t1 |= type->t & VT_VLA;
4371 if (t1 & VT_VLA) {
4372 loc -= type_size(&int_type, &align);
4373 loc &= -align;
4374 n = loc;
4376 vla_runtime_type_size(type, &align);
4377 gen_op('*');
4378 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4379 vswap();
4380 vstore();
4382 if (n != -1)
4383 vpop();
4384 nocode_wanted = saved_nocode_wanted;
4386 /* we push an anonymous symbol which will contain the array
4387 element type */
4388 s = sym_push(SYM_FIELD, type, 0, n);
4389 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4390 type->ref = s;
4392 return 1;
4395 /* Parse a type declarator (except basic type), and return the type
4396 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4397 expected. 'type' should contain the basic type. 'ad' is the
4398 attribute definition of the basic type. It can be modified by
4399 type_decl(). If this (possibly abstract) declarator is a pointer chain
4400 it returns the innermost pointed to type (equals *type, but is a different
4401 pointer), otherwise returns type itself, that's used for recursive calls. */
4402 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4404 CType *post, *ret;
4405 int qualifiers, storage;
4407 /* recursive type, remove storage bits first, apply them later again */
4408 storage = type->t & VT_STORAGE;
4409 type->t &= ~VT_STORAGE;
4410 post = ret = type;
4412 while (tok == '*') {
4413 qualifiers = 0;
4414 redo:
4415 next();
4416 switch(tok) {
4417 case TOK_CONST1:
4418 case TOK_CONST2:
4419 case TOK_CONST3:
4420 qualifiers |= VT_CONSTANT;
4421 goto redo;
4422 case TOK_VOLATILE1:
4423 case TOK_VOLATILE2:
4424 case TOK_VOLATILE3:
4425 qualifiers |= VT_VOLATILE;
4426 goto redo;
4427 case TOK_RESTRICT1:
4428 case TOK_RESTRICT2:
4429 case TOK_RESTRICT3:
4430 goto redo;
4431 /* XXX: clarify attribute handling */
4432 case TOK_ATTRIBUTE1:
4433 case TOK_ATTRIBUTE2:
4434 parse_attribute(ad);
4435 break;
4437 mk_pointer(type);
4438 type->t |= qualifiers;
4439 if (ret == type)
4440 /* innermost pointed to type is the one for the first derivation */
4441 ret = pointed_type(type);
4444 if (tok == '(') {
4445 /* This is possibly a parameter type list for abstract declarators
4446 ('int ()'), use post_type for testing this. */
4447 if (!post_type(type, ad, 0, td)) {
4448 /* It's not, so it's a nested declarator, and the post operations
4449 apply to the innermost pointed to type (if any). */
4450 /* XXX: this is not correct to modify 'ad' at this point, but
4451 the syntax is not clear */
4452 parse_attribute(ad);
4453 post = type_decl(type, ad, v, td);
4454 skip(')');
4456 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4457 /* type identifier */
4458 *v = tok;
4459 next();
4460 } else {
4461 if (!(td & TYPE_ABSTRACT))
4462 expect("identifier");
4463 *v = 0;
4465 post_type(post, ad, storage, 0);
4466 parse_attribute(ad);
4467 type->t |= storage;
4468 return ret;
4471 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4472 ST_FUNC int lvalue_type(int t)
4474 int bt, r;
4475 r = VT_LVAL;
4476 bt = t & VT_BTYPE;
4477 if (bt == VT_BYTE || bt == VT_BOOL)
4478 r |= VT_LVAL_BYTE;
4479 else if (bt == VT_SHORT)
4480 r |= VT_LVAL_SHORT;
4481 else
4482 return r;
4483 if (t & VT_UNSIGNED)
4484 r |= VT_LVAL_UNSIGNED;
4485 return r;
4488 /* indirection with full error checking and bound check */
4489 ST_FUNC void indir(void)
4491 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4492 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4493 return;
4494 expect("pointer");
4496 if (vtop->r & VT_LVAL)
4497 gv(RC_INT);
4498 vtop->type = *pointed_type(&vtop->type);
4499 /* Arrays and functions are never lvalues */
4500 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4501 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4502 vtop->r |= lvalue_type(vtop->type.t);
4503 /* if bound checking, the referenced pointer must be checked */
4504 #ifdef CONFIG_TCC_BCHECK
4505 if (tcc_state->do_bounds_check)
4506 vtop->r |= VT_MUSTBOUND;
4507 #endif
4511 /* pass a parameter to a function and do type checking and casting */
4512 static void gfunc_param_typed(Sym *func, Sym *arg)
4514 int func_type;
4515 CType type;
4517 func_type = func->f.func_type;
4518 if (func_type == FUNC_OLD ||
4519 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4520 /* default casting : only need to convert float to double */
4521 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4522 gen_cast_s(VT_DOUBLE);
4523 } else if (vtop->type.t & VT_BITFIELD) {
4524 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4525 type.ref = vtop->type.ref;
4526 gen_cast(&type);
4528 } else if (arg == NULL) {
4529 tcc_error("too many arguments to function");
4530 } else {
4531 type = arg->type;
4532 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4533 gen_assign_cast(&type);
4537 /* parse an expression and return its type without any side effect. */
4538 static void expr_type(CType *type, void (*expr_fn)(void))
4540 nocode_wanted++;
4541 expr_fn();
4542 *type = vtop->type;
4543 vpop();
4544 nocode_wanted--;
4547 /* parse an expression of the form '(type)' or '(expr)' and return its
4548 type */
4549 static void parse_expr_type(CType *type)
4551 int n;
4552 AttributeDef ad;
4554 skip('(');
4555 if (parse_btype(type, &ad)) {
4556 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4557 } else {
4558 expr_type(type, gexpr);
4560 skip(')');
4563 static void parse_type(CType *type)
4565 AttributeDef ad;
4566 int n;
4568 if (!parse_btype(type, &ad)) {
4569 expect("type");
4571 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4574 static void parse_builtin_params(int nc, const char *args)
4576 char c, sep = '(';
4577 CType t;
4578 if (nc)
4579 nocode_wanted++;
4580 next();
4581 while ((c = *args++)) {
4582 skip(sep);
4583 sep = ',';
4584 switch (c) {
4585 case 'e': expr_eq(); continue;
4586 case 't': parse_type(&t); vpush(&t); continue;
4587 default: tcc_error("internal error"); break;
4590 skip(')');
4591 if (nc)
4592 nocode_wanted--;
4595 ST_FUNC void unary(void)
4597 int n, t, align, size, r, sizeof_caller;
4598 CType type;
4599 Sym *s;
4600 AttributeDef ad;
4602 sizeof_caller = in_sizeof;
4603 in_sizeof = 0;
4604 type.ref = NULL;
4605 /* XXX: GCC 2.95.3 does not generate a table although it should be
4606 better here */
4607 tok_next:
4608 switch(tok) {
4609 case TOK_EXTENSION:
4610 next();
4611 goto tok_next;
4612 case TOK_LCHAR:
4613 #ifdef TCC_TARGET_PE
4614 t = VT_SHORT|VT_UNSIGNED;
4615 goto push_tokc;
4616 #endif
4617 case TOK_CINT:
4618 case TOK_CCHAR:
4619 t = VT_INT;
4620 push_tokc:
4621 type.t = t;
4622 vsetc(&type, VT_CONST, &tokc);
4623 next();
4624 break;
4625 case TOK_CUINT:
4626 t = VT_INT | VT_UNSIGNED;
4627 goto push_tokc;
4628 case TOK_CLLONG:
4629 t = VT_LLONG;
4630 goto push_tokc;
4631 case TOK_CULLONG:
4632 t = VT_LLONG | VT_UNSIGNED;
4633 goto push_tokc;
4634 case TOK_CFLOAT:
4635 t = VT_FLOAT;
4636 goto push_tokc;
4637 case TOK_CDOUBLE:
4638 t = VT_DOUBLE;
4639 goto push_tokc;
4640 case TOK_CLDOUBLE:
4641 t = VT_LDOUBLE;
4642 goto push_tokc;
4643 case TOK_CLONG:
4644 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4645 goto push_tokc;
4646 case TOK_CULONG:
4647 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4648 goto push_tokc;
4649 case TOK___FUNCTION__:
4650 if (!gnu_ext)
4651 goto tok_identifier;
4652 /* fall thru */
4653 case TOK___FUNC__:
4655 void *ptr;
4656 int len;
4657 /* special function name identifier */
4658 len = strlen(funcname) + 1;
4659 /* generate char[len] type */
4660 type.t = VT_BYTE;
4661 mk_pointer(&type);
4662 type.t |= VT_ARRAY;
4663 type.ref->c = len;
4664 vpush_ref(&type, data_section, data_section->data_offset, len);
4665 if (!NODATA_WANTED) {
4666 ptr = section_ptr_add(data_section, len);
4667 memcpy(ptr, funcname, len);
4669 next();
4671 break;
4672 case TOK_LSTR:
4673 #ifdef TCC_TARGET_PE
4674 t = VT_SHORT | VT_UNSIGNED;
4675 #else
4676 t = VT_INT;
4677 #endif
4678 goto str_init;
4679 case TOK_STR:
4680 /* string parsing */
4681 t = VT_BYTE;
4682 if (tcc_state->char_is_unsigned)
4683 t = VT_BYTE | VT_UNSIGNED;
4684 str_init:
4685 if (tcc_state->warn_write_strings)
4686 t |= VT_CONSTANT;
4687 type.t = t;
4688 mk_pointer(&type);
4689 type.t |= VT_ARRAY;
4690 memset(&ad, 0, sizeof(AttributeDef));
4691 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4692 break;
4693 case '(':
4694 next();
4695 /* cast ? */
4696 if (parse_btype(&type, &ad)) {
4697 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4698 skip(')');
4699 /* check ISOC99 compound literal */
4700 if (tok == '{') {
4701 /* data is allocated locally by default */
4702 if (global_expr)
4703 r = VT_CONST;
4704 else
4705 r = VT_LOCAL;
4706 /* all except arrays are lvalues */
4707 if (!(type.t & VT_ARRAY))
4708 r |= lvalue_type(type.t);
4709 memset(&ad, 0, sizeof(AttributeDef));
4710 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4711 } else {
4712 if (sizeof_caller) {
4713 vpush(&type);
4714 return;
4716 unary();
4717 gen_cast(&type);
4719 } else if (tok == '{') {
4720 int saved_nocode_wanted = nocode_wanted;
4721 if (const_wanted)
4722 tcc_error("expected constant");
4723 /* save all registers */
4724 save_regs(0);
4725 /* statement expression : we do not accept break/continue
4726 inside as GCC does. We do retain the nocode_wanted state,
4727 as statement expressions can't ever be entered from the
4728 outside, so any reactivation of code emission (from labels
4729 or loop heads) can be disabled again after the end of it. */
4730 block(NULL, NULL, 1);
4731 nocode_wanted = saved_nocode_wanted;
4732 skip(')');
4733 } else {
4734 gexpr();
4735 skip(')');
4737 break;
4738 case '*':
4739 next();
4740 unary();
4741 indir();
4742 break;
4743 case '&':
4744 next();
4745 unary();
4746 /* functions names must be treated as function pointers,
4747 except for unary '&' and sizeof. Since we consider that
4748 functions are not lvalues, we only have to handle it
4749 there and in function calls. */
4750 /* arrays can also be used although they are not lvalues */
4751 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4752 !(vtop->type.t & VT_ARRAY))
4753 test_lvalue();
4754 mk_pointer(&vtop->type);
4755 gaddrof();
4756 break;
4757 case '!':
4758 next();
4759 unary();
4760 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4761 gen_cast_s(VT_BOOL);
4762 vtop->c.i = !vtop->c.i;
4763 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4764 vtop->c.i ^= 1;
4765 else {
4766 save_regs(1);
4767 vseti(VT_JMP, gvtst(1, 0));
4769 break;
4770 case '~':
4771 next();
4772 unary();
4773 vpushi(-1);
4774 gen_op('^');
4775 break;
4776 case '+':
4777 next();
4778 unary();
4779 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4780 tcc_error("pointer not accepted for unary plus");
4781 /* In order to force cast, we add zero, except for floating point
4782 where we really need an noop (otherwise -0.0 will be transformed
4783 into +0.0). */
4784 if (!is_float(vtop->type.t)) {
4785 vpushi(0);
4786 gen_op('+');
4788 break;
4789 case TOK_SIZEOF:
4790 case TOK_ALIGNOF1:
4791 case TOK_ALIGNOF2:
4792 t = tok;
4793 next();
4794 in_sizeof++;
4795 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4796 s = vtop[1].sym; /* hack: accessing previous vtop */
4797 size = type_size(&type, &align);
4798 if (s && s->a.aligned)
4799 align = 1 << (s->a.aligned - 1);
4800 if (t == TOK_SIZEOF) {
4801 if (!(type.t & VT_VLA)) {
4802 if (size < 0)
4803 tcc_error("sizeof applied to an incomplete type");
4804 vpushs(size);
4805 } else {
4806 vla_runtime_type_size(&type, &align);
4808 } else {
4809 vpushs(align);
4811 vtop->type.t |= VT_UNSIGNED;
4812 break;
4814 case TOK_builtin_expect:
4815 /* __builtin_expect is a no-op for now */
4816 parse_builtin_params(0, "ee");
4817 vpop();
4818 break;
4819 case TOK_builtin_types_compatible_p:
4820 parse_builtin_params(0, "tt");
4821 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4822 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4823 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4824 vtop -= 2;
4825 vpushi(n);
4826 break;
4827 case TOK_builtin_choose_expr:
4829 int64_t c;
4830 next();
4831 skip('(');
4832 c = expr_const64();
4833 skip(',');
4834 if (!c) {
4835 nocode_wanted++;
4837 expr_eq();
4838 if (!c) {
4839 vpop();
4840 nocode_wanted--;
4842 skip(',');
4843 if (c) {
4844 nocode_wanted++;
4846 expr_eq();
4847 if (c) {
4848 vpop();
4849 nocode_wanted--;
4851 skip(')');
4853 break;
4854 case TOK_builtin_constant_p:
4855 parse_builtin_params(1, "e");
4856 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4857 vtop--;
4858 vpushi(n);
4859 break;
4860 case TOK_builtin_frame_address:
4861 case TOK_builtin_return_address:
4863 int tok1 = tok;
4864 int level;
4865 next();
4866 skip('(');
4867 if (tok != TOK_CINT) {
4868 tcc_error("%s only takes positive integers",
4869 tok1 == TOK_builtin_return_address ?
4870 "__builtin_return_address" :
4871 "__builtin_frame_address");
4873 level = (uint32_t)tokc.i;
4874 next();
4875 skip(')');
4876 type.t = VT_VOID;
4877 mk_pointer(&type);
4878 vset(&type, VT_LOCAL, 0); /* local frame */
4879 while (level--) {
4880 mk_pointer(&vtop->type);
4881 indir(); /* -> parent frame */
4883 if (tok1 == TOK_builtin_return_address) {
4884 // assume return address is just above frame pointer on stack
4885 vpushi(PTR_SIZE);
4886 gen_op('+');
4887 mk_pointer(&vtop->type);
4888 indir();
4891 break;
4892 #ifdef TCC_TARGET_X86_64
4893 #ifdef TCC_TARGET_PE
4894 case TOK_builtin_va_start:
4895 parse_builtin_params(0, "ee");
4896 r = vtop->r & VT_VALMASK;
4897 if (r == VT_LLOCAL)
4898 r = VT_LOCAL;
4899 if (r != VT_LOCAL)
4900 tcc_error("__builtin_va_start expects a local variable");
4901 vtop->r = r;
4902 vtop->type = char_pointer_type;
4903 vtop->c.i += 8;
4904 vstore();
4905 break;
4906 #else
4907 case TOK_builtin_va_arg_types:
4908 parse_builtin_params(0, "t");
4909 vpushi(classify_x86_64_va_arg(&vtop->type));
4910 vswap();
4911 vpop();
4912 break;
4913 #endif
4914 #endif
4916 #ifdef TCC_TARGET_ARM64
4917 case TOK___va_start: {
4918 parse_builtin_params(0, "ee");
4919 //xx check types
4920 gen_va_start();
4921 vpushi(0);
4922 vtop->type.t = VT_VOID;
4923 break;
4925 case TOK___va_arg: {
4926 parse_builtin_params(0, "et");
4927 type = vtop->type;
4928 vpop();
4929 //xx check types
4930 gen_va_arg(&type);
4931 vtop->type = type;
4932 break;
4934 case TOK___arm64_clear_cache: {
4935 parse_builtin_params(0, "ee");
4936 gen_clear_cache();
4937 vpushi(0);
4938 vtop->type.t = VT_VOID;
4939 break;
4941 #endif
4942 /* pre operations */
4943 case TOK_INC:
4944 case TOK_DEC:
4945 t = tok;
4946 next();
4947 unary();
4948 inc(0, t);
4949 break;
4950 case '-':
4951 next();
4952 unary();
4953 t = vtop->type.t & VT_BTYPE;
4954 if (is_float(t)) {
4955 /* In IEEE negate(x) isn't subtract(0,x), but rather
4956 subtract(-0, x). */
4957 vpush(&vtop->type);
4958 if (t == VT_FLOAT)
4959 vtop->c.f = -1.0 * 0.0;
4960 else if (t == VT_DOUBLE)
4961 vtop->c.d = -1.0 * 0.0;
4962 else
4963 vtop->c.ld = -1.0 * 0.0;
4964 } else
4965 vpushi(0);
4966 vswap();
4967 gen_op('-');
4968 break;
4969 case TOK_LAND:
4970 if (!gnu_ext)
4971 goto tok_identifier;
4972 next();
4973 /* allow to take the address of a label */
4974 if (tok < TOK_UIDENT)
4975 expect("label identifier");
4976 s = label_find(tok);
4977 if (!s) {
4978 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4979 } else {
4980 if (s->r == LABEL_DECLARED)
4981 s->r = LABEL_FORWARD;
4983 if (!s->type.t) {
4984 s->type.t = VT_VOID;
4985 mk_pointer(&s->type);
4986 s->type.t |= VT_STATIC;
4988 vpushsym(&s->type, s);
4989 next();
4990 break;
4992 case TOK_GENERIC:
4994 CType controlling_type;
4995 int has_default = 0;
4996 int has_match = 0;
4997 int learn = 0;
4998 TokenString *str = NULL;
5000 next();
5001 skip('(');
5002 expr_type(&controlling_type, expr_eq);
5003 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5004 for (;;) {
5005 learn = 0;
5006 skip(',');
5007 if (tok == TOK_DEFAULT) {
5008 if (has_default)
5009 tcc_error("too many 'default'");
5010 has_default = 1;
5011 if (!has_match)
5012 learn = 1;
5013 next();
5014 } else {
5015 AttributeDef ad_tmp;
5016 int itmp;
5017 CType cur_type;
5018 parse_btype(&cur_type, &ad_tmp);
5019 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5020 if (compare_types(&controlling_type, &cur_type, 0)) {
5021 if (has_match) {
5022 tcc_error("type match twice");
5024 has_match = 1;
5025 learn = 1;
5028 skip(':');
5029 if (learn) {
5030 if (str)
5031 tok_str_free(str);
5032 skip_or_save_block(&str);
5033 } else {
5034 skip_or_save_block(NULL);
5036 if (tok == ')')
5037 break;
5039 if (!str) {
5040 char buf[60];
5041 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5042 tcc_error("type '%s' does not match any association", buf);
5044 begin_macro(str, 1);
5045 next();
5046 expr_eq();
5047 if (tok != TOK_EOF)
5048 expect(",");
5049 end_macro();
5050 next();
5051 break;
5053 // special qnan , snan and infinity values
5054 case TOK___NAN__:
5055 n = 0x7fc00000;
5056 special_math_val:
5057 vpushi(n);
5058 vtop->type.t = VT_FLOAT;
5059 next();
5060 break;
5061 case TOK___SNAN__:
5062 n = 0x7f800001;
5063 goto special_math_val;
5064 case TOK___INF__:
5065 n = 0x7f800000;
5066 goto special_math_val;
5068 default:
5069 tok_identifier:
5070 t = tok;
5071 next();
5072 if (t < TOK_UIDENT)
5073 expect("identifier");
5074 s = sym_find(t);
5075 if (!s || IS_ASM_SYM(s)) {
5076 const char *name = get_tok_str(t, NULL);
5077 if (tok != '(')
5078 tcc_error("'%s' undeclared", name);
5079 /* for simple function calls, we tolerate undeclared
5080 external reference to int() function */
5081 if (tcc_state->warn_implicit_function_declaration
5082 #ifdef TCC_TARGET_PE
5083 /* people must be warned about using undeclared WINAPI functions
5084 (which usually start with uppercase letter) */
5085 || (name[0] >= 'A' && name[0] <= 'Z')
5086 #endif
5088 tcc_warning("implicit declaration of function '%s'", name);
5089 s = external_global_sym(t, &func_old_type, 0);
5092 r = s->r;
5093 /* A symbol that has a register is a local register variable,
5094 which starts out as VT_LOCAL value. */
5095 if ((r & VT_VALMASK) < VT_CONST)
5096 r = (r & ~VT_VALMASK) | VT_LOCAL;
5098 vset(&s->type, r, s->c);
5099 /* Point to s as backpointer (even without r&VT_SYM).
5100 Will be used by at least the x86 inline asm parser for
5101 regvars. */
5102 vtop->sym = s;
5104 if (r & VT_SYM) {
5105 vtop->c.i = 0;
5106 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5107 vtop->c.i = s->enum_val;
5109 break;
5112 /* post operations */
5113 while (1) {
5114 if (tok == TOK_INC || tok == TOK_DEC) {
5115 inc(1, tok);
5116 next();
5117 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5118 int qualifiers;
5119 /* field */
5120 if (tok == TOK_ARROW)
5121 indir();
5122 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5123 test_lvalue();
5124 gaddrof();
5125 /* expect pointer on structure */
5126 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5127 expect("struct or union");
5128 if (tok == TOK_CDOUBLE)
5129 expect("field name");
5130 next();
5131 if (tok == TOK_CINT || tok == TOK_CUINT)
5132 expect("field name");
5133 s = find_field(&vtop->type, tok);
5134 if (!s)
5135 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5136 /* add field offset to pointer */
5137 vtop->type = char_pointer_type; /* change type to 'char *' */
5138 vpushi(s->c);
5139 gen_op('+');
5140 /* change type to field type, and set to lvalue */
5141 vtop->type = s->type;
5142 vtop->type.t |= qualifiers;
5143 /* an array is never an lvalue */
5144 if (!(vtop->type.t & VT_ARRAY)) {
5145 vtop->r |= lvalue_type(vtop->type.t);
5146 #ifdef CONFIG_TCC_BCHECK
5147 /* if bound checking, the referenced pointer must be checked */
5148 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5149 vtop->r |= VT_MUSTBOUND;
5150 #endif
5152 next();
5153 } else if (tok == '[') {
5154 next();
5155 gexpr();
5156 gen_op('+');
5157 indir();
5158 skip(']');
5159 } else if (tok == '(') {
5160 SValue ret;
5161 Sym *sa;
5162 int nb_args, ret_nregs, ret_align, regsize, variadic;
5164 /* function call */
5165 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5166 /* pointer test (no array accepted) */
5167 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5168 vtop->type = *pointed_type(&vtop->type);
5169 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5170 goto error_func;
5171 } else {
5172 error_func:
5173 expect("function pointer");
5175 } else {
5176 vtop->r &= ~VT_LVAL; /* no lvalue */
5178 /* get return type */
5179 s = vtop->type.ref;
5180 next();
5181 sa = s->next; /* first parameter */
5182 nb_args = regsize = 0;
5183 ret.r2 = VT_CONST;
5184 /* compute first implicit argument if a structure is returned */
5185 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5186 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5187 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5188 &ret_align, &regsize);
5189 if (!ret_nregs) {
5190 /* get some space for the returned structure */
5191 size = type_size(&s->type, &align);
5192 #ifdef TCC_TARGET_ARM64
5193 /* On arm64, a small struct is return in registers.
5194 It is much easier to write it to memory if we know
5195 that we are allowed to write some extra bytes, so
5196 round the allocated space up to a power of 2: */
5197 if (size < 16)
5198 while (size & (size - 1))
5199 size = (size | (size - 1)) + 1;
5200 #endif
5201 loc = (loc - size) & -align;
5202 ret.type = s->type;
5203 ret.r = VT_LOCAL | VT_LVAL;
5204 /* pass it as 'int' to avoid structure arg passing
5205 problems */
5206 vseti(VT_LOCAL, loc);
5207 ret.c = vtop->c;
5208 nb_args++;
5210 } else {
5211 ret_nregs = 1;
5212 ret.type = s->type;
5215 if (ret_nregs) {
5216 /* return in register */
5217 if (is_float(ret.type.t)) {
5218 ret.r = reg_fret(ret.type.t);
5219 #ifdef TCC_TARGET_X86_64
5220 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5221 ret.r2 = REG_QRET;
5222 #endif
5223 } else {
5224 #ifndef TCC_TARGET_ARM64
5225 #ifdef TCC_TARGET_X86_64
5226 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5227 #else
5228 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5229 #endif
5230 ret.r2 = REG_LRET;
5231 #endif
5232 ret.r = REG_IRET;
5234 ret.c.i = 0;
5236 if (tok != ')') {
5237 for(;;) {
5238 expr_eq();
5239 gfunc_param_typed(s, sa);
5240 nb_args++;
5241 if (sa)
5242 sa = sa->next;
5243 if (tok == ')')
5244 break;
5245 skip(',');
5248 if (sa)
5249 tcc_error("too few arguments to function");
5250 skip(')');
5251 gfunc_call(nb_args);
5253 /* return value */
5254 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5255 vsetc(&ret.type, r, &ret.c);
5256 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5259 /* handle packed struct return */
5260 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5261 int addr, offset;
5263 size = type_size(&s->type, &align);
5264 /* We're writing whole regs often, make sure there's enough
5265 space. Assume register size is power of 2. */
5266 if (regsize > align)
5267 align = regsize;
5268 loc = (loc - size) & -align;
5269 addr = loc;
5270 offset = 0;
5271 for (;;) {
5272 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5273 vswap();
5274 vstore();
5275 vtop--;
5276 if (--ret_nregs == 0)
5277 break;
5278 offset += regsize;
5280 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5282 } else {
5283 break;
5288 ST_FUNC void expr_prod(void)
5290 int t;
5292 unary();
5293 while (tok == '*' || tok == '/' || tok == '%') {
5294 t = tok;
5295 next();
5296 unary();
5297 gen_op(t);
5301 ST_FUNC void expr_sum(void)
5303 int t;
5305 expr_prod();
5306 while (tok == '+' || tok == '-') {
5307 t = tok;
5308 next();
5309 expr_prod();
5310 gen_op(t);
5314 static void expr_shift(void)
5316 int t;
5318 expr_sum();
5319 while (tok == TOK_SHL || tok == TOK_SAR) {
5320 t = tok;
5321 next();
5322 expr_sum();
5323 gen_op(t);
5327 static void expr_cmp(void)
5329 int t;
5331 expr_shift();
5332 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5333 tok == TOK_ULT || tok == TOK_UGE) {
5334 t = tok;
5335 next();
5336 expr_shift();
5337 gen_op(t);
5341 static void expr_cmpeq(void)
5343 int t;
5345 expr_cmp();
5346 while (tok == TOK_EQ || tok == TOK_NE) {
5347 t = tok;
5348 next();
5349 expr_cmp();
5350 gen_op(t);
5354 static void expr_and(void)
5356 expr_cmpeq();
5357 while (tok == '&') {
5358 next();
5359 expr_cmpeq();
5360 gen_op('&');
5364 static void expr_xor(void)
5366 expr_and();
5367 while (tok == '^') {
5368 next();
5369 expr_and();
5370 gen_op('^');
5374 static void expr_or(void)
5376 expr_xor();
5377 while (tok == '|') {
5378 next();
5379 expr_xor();
5380 gen_op('|');
5384 static void expr_land(void)
5386 expr_or();
5387 if (tok == TOK_LAND) {
5388 int t = 0;
5389 for(;;) {
5390 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5391 gen_cast_s(VT_BOOL);
5392 if (vtop->c.i) {
5393 vpop();
5394 } else {
5395 nocode_wanted++;
5396 while (tok == TOK_LAND) {
5397 next();
5398 expr_or();
5399 vpop();
5401 nocode_wanted--;
5402 if (t)
5403 gsym(t);
5404 gen_cast_s(VT_INT);
5405 break;
5407 } else {
5408 if (!t)
5409 save_regs(1);
5410 t = gvtst(1, t);
5412 if (tok != TOK_LAND) {
5413 if (t)
5414 vseti(VT_JMPI, t);
5415 else
5416 vpushi(1);
5417 break;
5419 next();
5420 expr_or();
5425 static void expr_lor(void)
5427 expr_land();
5428 if (tok == TOK_LOR) {
5429 int t = 0;
5430 for(;;) {
5431 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5432 gen_cast_s(VT_BOOL);
5433 if (!vtop->c.i) {
5434 vpop();
5435 } else {
5436 nocode_wanted++;
5437 while (tok == TOK_LOR) {
5438 next();
5439 expr_land();
5440 vpop();
5442 nocode_wanted--;
5443 if (t)
5444 gsym(t);
5445 gen_cast_s(VT_INT);
5446 break;
5448 } else {
5449 if (!t)
5450 save_regs(1);
5451 t = gvtst(0, t);
5453 if (tok != TOK_LOR) {
5454 if (t)
5455 vseti(VT_JMP, t);
5456 else
5457 vpushi(0);
5458 break;
5460 next();
5461 expr_land();
5466 /* Assuming vtop is a value used in a conditional context
5467 (i.e. compared with zero) return 0 if it's false, 1 if
5468 true and -1 if it can't be statically determined. */
5469 static int condition_3way(void)
5471 int c = -1;
5472 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5473 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5474 vdup();
5475 gen_cast_s(VT_BOOL);
5476 c = vtop->c.i;
5477 vpop();
5479 return c;
5482 static void expr_cond(void)
5484 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5485 SValue sv;
5486 CType type, type1, type2;
5488 expr_lor();
5489 if (tok == '?') {
5490 next();
5491 c = condition_3way();
5492 g = (tok == ':' && gnu_ext);
5493 if (c < 0) {
5494 /* needed to avoid having different registers saved in
5495 each branch */
5496 if (is_float(vtop->type.t)) {
5497 rc = RC_FLOAT;
5498 #ifdef TCC_TARGET_X86_64
5499 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5500 rc = RC_ST0;
5502 #endif
5503 } else
5504 rc = RC_INT;
5505 gv(rc);
5506 save_regs(1);
5507 if (g)
5508 gv_dup();
5509 tt = gvtst(1, 0);
5511 } else {
5512 if (!g)
5513 vpop();
5514 tt = 0;
5517 if (1) {
5518 if (c == 0)
5519 nocode_wanted++;
5520 if (!g)
5521 gexpr();
5523 type1 = vtop->type;
5524 sv = *vtop; /* save value to handle it later */
5525 vtop--; /* no vpop so that FP stack is not flushed */
5526 skip(':');
5528 u = 0;
5529 if (c < 0)
5530 u = gjmp(0);
5531 gsym(tt);
5533 if (c == 0)
5534 nocode_wanted--;
5535 if (c == 1)
5536 nocode_wanted++;
5537 expr_cond();
5538 if (c == 1)
5539 nocode_wanted--;
5541 type2 = vtop->type;
5542 t1 = type1.t;
5543 bt1 = t1 & VT_BTYPE;
5544 t2 = type2.t;
5545 bt2 = t2 & VT_BTYPE;
5546 type.ref = NULL;
5548 /* cast operands to correct type according to ISOC rules */
5549 if (is_float(bt1) || is_float(bt2)) {
5550 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5551 type.t = VT_LDOUBLE;
5553 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5554 type.t = VT_DOUBLE;
5555 } else {
5556 type.t = VT_FLOAT;
5558 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5559 /* cast to biggest op */
5560 type.t = VT_LLONG | VT_LONG;
5561 if (bt1 == VT_LLONG)
5562 type.t &= t1;
5563 if (bt2 == VT_LLONG)
5564 type.t &= t2;
5565 /* convert to unsigned if it does not fit in a long long */
5566 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5567 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5568 type.t |= VT_UNSIGNED;
5569 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5570 /* If one is a null ptr constant the result type
5571 is the other. */
5572 if (is_null_pointer (vtop))
5573 type = type1;
5574 else if (is_null_pointer (&sv))
5575 type = type2;
5576 /* XXX: test pointer compatibility, C99 has more elaborate
5577 rules here. */
5578 else
5579 type = type1;
5580 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5581 /* XXX: test function pointer compatibility */
5582 type = bt1 == VT_FUNC ? type1 : type2;
5583 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5584 /* XXX: test structure compatibility */
5585 type = bt1 == VT_STRUCT ? type1 : type2;
5586 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5587 /* NOTE: as an extension, we accept void on only one side */
5588 type.t = VT_VOID;
5589 } else {
5590 /* integer operations */
5591 type.t = VT_INT | (VT_LONG & (t1 | t2));
5592 /* convert to unsigned if it does not fit in an integer */
5593 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5594 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5595 type.t |= VT_UNSIGNED;
5597 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5598 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5599 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5600 islv &= c < 0;
5602 /* now we convert second operand */
5603 if (c != 1) {
5604 gen_cast(&type);
5605 if (islv) {
5606 mk_pointer(&vtop->type);
5607 gaddrof();
5608 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5609 gaddrof();
5612 rc = RC_INT;
5613 if (is_float(type.t)) {
5614 rc = RC_FLOAT;
5615 #ifdef TCC_TARGET_X86_64
5616 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5617 rc = RC_ST0;
5619 #endif
5620 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5621 /* for long longs, we use fixed registers to avoid having
5622 to handle a complicated move */
5623 rc = RC_IRET;
5626 tt = r2 = 0;
5627 if (c < 0) {
5628 r2 = gv(rc);
5629 tt = gjmp(0);
5631 gsym(u);
5633 /* this is horrible, but we must also convert first
5634 operand */
5635 if (c != 0) {
5636 *vtop = sv;
5637 gen_cast(&type);
5638 if (islv) {
5639 mk_pointer(&vtop->type);
5640 gaddrof();
5641 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5642 gaddrof();
5645 if (c < 0) {
5646 r1 = gv(rc);
5647 move_reg(r2, r1, type.t);
5648 vtop->r = r2;
5649 gsym(tt);
5650 if (islv)
5651 indir();
5657 static void expr_eq(void)
5659 int t;
5661 expr_cond();
5662 if (tok == '=' ||
5663 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5664 tok == TOK_A_XOR || tok == TOK_A_OR ||
5665 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5666 test_lvalue();
5667 t = tok;
5668 next();
5669 if (t == '=') {
5670 expr_eq();
5671 } else {
5672 vdup();
5673 expr_eq();
5674 gen_op(t & 0x7f);
5676 vstore();
5680 ST_FUNC void gexpr(void)
5682 while (1) {
5683 expr_eq();
5684 if (tok != ',')
5685 break;
5686 vpop();
5687 next();
5691 /* parse a constant expression and return value in vtop. */
5692 static void expr_const1(void)
5694 const_wanted++;
5695 nocode_wanted++;
5696 expr_cond();
5697 nocode_wanted--;
5698 const_wanted--;
5701 /* parse an integer constant and return its value. */
5702 static inline int64_t expr_const64(void)
5704 int64_t c;
5705 expr_const1();
5706 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5707 expect("constant expression");
5708 c = vtop->c.i;
5709 vpop();
5710 return c;
5713 /* parse an integer constant and return its value.
5714 Complain if it doesn't fit 32bit (signed or unsigned). */
5715 ST_FUNC int expr_const(void)
5717 int c;
5718 int64_t wc = expr_const64();
5719 c = wc;
5720 if (c != wc && (unsigned)c != wc)
5721 tcc_error("constant exceeds 32 bit");
5722 return c;
5725 /* return the label token if current token is a label, otherwise
5726 return zero */
5727 static int is_label(void)
5729 int last_tok;
5731 /* fast test first */
5732 if (tok < TOK_UIDENT)
5733 return 0;
5734 /* no need to save tokc because tok is an identifier */
5735 last_tok = tok;
5736 next();
5737 if (tok == ':') {
5738 return last_tok;
5739 } else {
5740 unget_tok(last_tok);
5741 return 0;
5745 #ifndef TCC_TARGET_ARM64
5746 static void gfunc_return(CType *func_type)
5748 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5749 CType type, ret_type;
5750 int ret_align, ret_nregs, regsize;
5751 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5752 &ret_align, &regsize);
5753 if (0 == ret_nregs) {
5754 /* if returning structure, must copy it to implicit
5755 first pointer arg location */
5756 type = *func_type;
5757 mk_pointer(&type);
5758 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5759 indir();
5760 vswap();
5761 /* copy structure value to pointer */
5762 vstore();
5763 } else {
5764 /* returning structure packed into registers */
5765 int r, size, addr, align;
5766 size = type_size(func_type,&align);
5767 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5768 (vtop->c.i & (ret_align-1)))
5769 && (align & (ret_align-1))) {
5770 loc = (loc - size) & -ret_align;
5771 addr = loc;
5772 type = *func_type;
5773 vset(&type, VT_LOCAL | VT_LVAL, addr);
5774 vswap();
5775 vstore();
5776 vpop();
5777 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5779 vtop->type = ret_type;
5780 if (is_float(ret_type.t))
5781 r = rc_fret(ret_type.t);
5782 else
5783 r = RC_IRET;
5785 if (ret_nregs == 1)
5786 gv(r);
5787 else {
5788 for (;;) {
5789 vdup();
5790 gv(r);
5791 vpop();
5792 if (--ret_nregs == 0)
5793 break;
5794 /* We assume that when a structure is returned in multiple
5795 registers, their classes are consecutive values of the
5796 suite s(n) = 2^n */
5797 r <<= 1;
5798 vtop->c.i += regsize;
5802 } else if (is_float(func_type->t)) {
5803 gv(rc_fret(func_type->t));
5804 } else {
5805 gv(RC_IRET);
5807 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5809 #endif
5811 static int case_cmp(const void *pa, const void *pb)
5813 int64_t a = (*(struct case_t**) pa)->v1;
5814 int64_t b = (*(struct case_t**) pb)->v1;
5815 return a < b ? -1 : a > b;
5818 static void gcase(struct case_t **base, int len, int *bsym)
5820 struct case_t *p;
5821 int e;
5822 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5823 gv(RC_INT);
5824 while (len > 4) {
5825 /* binary search */
5826 p = base[len/2];
5827 vdup();
5828 if (ll)
5829 vpushll(p->v2);
5830 else
5831 vpushi(p->v2);
5832 gen_op(TOK_LE);
5833 e = gtst(1, 0);
5834 vdup();
5835 if (ll)
5836 vpushll(p->v1);
5837 else
5838 vpushi(p->v1);
5839 gen_op(TOK_GE);
5840 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5841 /* x < v1 */
5842 gcase(base, len/2, bsym);
5843 if (cur_switch->def_sym)
5844 gjmp_addr(cur_switch->def_sym);
5845 else
5846 *bsym = gjmp(*bsym);
5847 /* x > v2 */
5848 gsym(e);
5849 e = len/2 + 1;
5850 base += e; len -= e;
5852 /* linear scan */
5853 while (len--) {
5854 p = *base++;
5855 vdup();
5856 if (ll)
5857 vpushll(p->v2);
5858 else
5859 vpushi(p->v2);
5860 if (p->v1 == p->v2) {
5861 gen_op(TOK_EQ);
5862 gtst_addr(0, p->sym);
5863 } else {
5864 gen_op(TOK_LE);
5865 e = gtst(1, 0);
5866 vdup();
5867 if (ll)
5868 vpushll(p->v1);
5869 else
5870 vpushi(p->v1);
5871 gen_op(TOK_GE);
5872 gtst_addr(0, p->sym);
5873 gsym(e);
5878 static void block(int *bsym, int *csym, int is_expr)
5880 int a, b, c, d, cond;
5881 Sym *s;
5883 /* generate line number info */
5884 if (tcc_state->do_debug)
5885 tcc_debug_line(tcc_state);
5887 if (is_expr) {
5888 /* default return value is (void) */
5889 vpushi(0);
5890 vtop->type.t = VT_VOID;
5893 if (tok == TOK_IF) {
5894 /* if test */
5895 int saved_nocode_wanted = nocode_wanted;
5896 next();
5897 skip('(');
5898 gexpr();
5899 skip(')');
5900 cond = condition_3way();
5901 if (cond == 1)
5902 a = 0, vpop();
5903 else
5904 a = gvtst(1, 0);
5905 if (cond == 0)
5906 nocode_wanted |= 0x20000000;
5907 block(bsym, csym, 0);
5908 if (cond != 1)
5909 nocode_wanted = saved_nocode_wanted;
5910 c = tok;
5911 if (c == TOK_ELSE) {
5912 next();
5913 d = gjmp(0);
5914 gsym(a);
5915 if (cond == 1)
5916 nocode_wanted |= 0x20000000;
5917 block(bsym, csym, 0);
5918 gsym(d); /* patch else jmp */
5919 if (cond != 0)
5920 nocode_wanted = saved_nocode_wanted;
5921 } else
5922 gsym(a);
5923 } else if (tok == TOK_WHILE) {
5924 int saved_nocode_wanted;
5925 nocode_wanted &= ~0x20000000;
5926 next();
5927 d = ind;
5928 vla_sp_restore();
5929 skip('(');
5930 gexpr();
5931 skip(')');
5932 a = gvtst(1, 0);
5933 b = 0;
5934 ++local_scope;
5935 saved_nocode_wanted = nocode_wanted;
5936 block(&a, &b, 0);
5937 nocode_wanted = saved_nocode_wanted;
5938 --local_scope;
5939 gjmp_addr(d);
5940 gsym(a);
5941 gsym_addr(b, d);
5942 } else if (tok == '{') {
5943 Sym *llabel;
5944 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5946 next();
5947 /* record local declaration stack position */
5948 s = local_stack;
5949 llabel = local_label_stack;
5950 ++local_scope;
5952 /* handle local labels declarations */
5953 if (tok == TOK_LABEL) {
5954 next();
5955 for(;;) {
5956 if (tok < TOK_UIDENT)
5957 expect("label identifier");
5958 label_push(&local_label_stack, tok, LABEL_DECLARED);
5959 next();
5960 if (tok == ',') {
5961 next();
5962 } else {
5963 skip(';');
5964 break;
5968 while (tok != '}') {
5969 if ((a = is_label()))
5970 unget_tok(a);
5971 else
5972 decl(VT_LOCAL);
5973 if (tok != '}') {
5974 if (is_expr)
5975 vpop();
5976 block(bsym, csym, is_expr);
5979 /* pop locally defined labels */
5980 label_pop(&local_label_stack, llabel, is_expr);
5981 /* pop locally defined symbols */
5982 --local_scope;
5983 /* In the is_expr case (a statement expression is finished here),
5984 vtop might refer to symbols on the local_stack. Either via the
5985 type or via vtop->sym. We can't pop those nor any that in turn
5986 might be referred to. To make it easier we don't roll back
5987 any symbols in that case; some upper level call to block() will
5988 do that. We do have to remove such symbols from the lookup
5989 tables, though. sym_pop will do that. */
5990 sym_pop(&local_stack, s, is_expr);
5992 /* Pop VLA frames and restore stack pointer if required */
5993 if (vlas_in_scope > saved_vlas_in_scope) {
5994 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5995 vla_sp_restore();
5997 vlas_in_scope = saved_vlas_in_scope;
5999 next();
6000 } else if (tok == TOK_RETURN) {
6001 next();
6002 if (tok != ';') {
6003 gexpr();
6004 gen_assign_cast(&func_vt);
6005 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6006 vtop--;
6007 else
6008 gfunc_return(&func_vt);
6010 skip(';');
6011 /* jump unless last stmt in top-level block */
6012 if (tok != '}' || local_scope != 1)
6013 rsym = gjmp(rsym);
6014 nocode_wanted |= 0x20000000;
6015 } else if (tok == TOK_BREAK) {
6016 /* compute jump */
6017 if (!bsym)
6018 tcc_error("cannot break");
6019 *bsym = gjmp(*bsym);
6020 next();
6021 skip(';');
6022 nocode_wanted |= 0x20000000;
6023 } else if (tok == TOK_CONTINUE) {
6024 /* compute jump */
6025 if (!csym)
6026 tcc_error("cannot continue");
6027 vla_sp_restore_root();
6028 *csym = gjmp(*csym);
6029 next();
6030 skip(';');
6031 } else if (tok == TOK_FOR) {
6032 int e;
6033 int saved_nocode_wanted;
6034 nocode_wanted &= ~0x20000000;
6035 next();
6036 skip('(');
6037 s = local_stack;
6038 ++local_scope;
6039 if (tok != ';') {
6040 /* c99 for-loop init decl? */
6041 if (!decl0(VT_LOCAL, 1, NULL)) {
6042 /* no, regular for-loop init expr */
6043 gexpr();
6044 vpop();
6047 skip(';');
6048 d = ind;
6049 c = ind;
6050 vla_sp_restore();
6051 a = 0;
6052 b = 0;
6053 if (tok != ';') {
6054 gexpr();
6055 a = gvtst(1, 0);
6057 skip(';');
6058 if (tok != ')') {
6059 e = gjmp(0);
6060 c = ind;
6061 vla_sp_restore();
6062 gexpr();
6063 vpop();
6064 gjmp_addr(d);
6065 gsym(e);
6067 skip(')');
6068 saved_nocode_wanted = nocode_wanted;
6069 block(&a, &b, 0);
6070 nocode_wanted = saved_nocode_wanted;
6071 gjmp_addr(c);
6072 gsym(a);
6073 gsym_addr(b, c);
6074 --local_scope;
6075 sym_pop(&local_stack, s, 0);
6077 } else
6078 if (tok == TOK_DO) {
6079 int saved_nocode_wanted;
6080 nocode_wanted &= ~0x20000000;
6081 next();
6082 a = 0;
6083 b = 0;
6084 d = ind;
6085 vla_sp_restore();
6086 saved_nocode_wanted = nocode_wanted;
6087 block(&a, &b, 0);
6088 skip(TOK_WHILE);
6089 skip('(');
6090 gsym(b);
6091 gexpr();
6092 c = gvtst(0, 0);
6093 gsym_addr(c, d);
6094 nocode_wanted = saved_nocode_wanted;
6095 skip(')');
6096 gsym(a);
6097 skip(';');
6098 } else
6099 if (tok == TOK_SWITCH) {
6100 struct switch_t *saved, sw;
6101 int saved_nocode_wanted = nocode_wanted;
6102 SValue switchval;
6103 next();
6104 skip('(');
6105 gexpr();
6106 skip(')');
6107 switchval = *vtop--;
6108 a = 0;
6109 b = gjmp(0); /* jump to first case */
6110 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6111 saved = cur_switch;
6112 cur_switch = &sw;
6113 block(&a, csym, 0);
6114 nocode_wanted = saved_nocode_wanted;
6115 a = gjmp(a); /* add implicit break */
6116 /* case lookup */
6117 gsym(b);
6118 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6119 for (b = 1; b < sw.n; b++)
6120 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6121 tcc_error("duplicate case value");
6122 /* Our switch table sorting is signed, so the compared
6123 value needs to be as well when it's 64bit. */
6124 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6125 switchval.type.t &= ~VT_UNSIGNED;
6126 vpushv(&switchval);
6127 gcase(sw.p, sw.n, &a);
6128 vpop();
6129 if (sw.def_sym)
6130 gjmp_addr(sw.def_sym);
6131 dynarray_reset(&sw.p, &sw.n);
6132 cur_switch = saved;
6133 /* break label */
6134 gsym(a);
6135 } else
6136 if (tok == TOK_CASE) {
6137 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6138 if (!cur_switch)
6139 expect("switch");
6140 nocode_wanted &= ~0x20000000;
6141 next();
6142 cr->v1 = cr->v2 = expr_const64();
6143 if (gnu_ext && tok == TOK_DOTS) {
6144 next();
6145 cr->v2 = expr_const64();
6146 if (cr->v2 < cr->v1)
6147 tcc_warning("empty case range");
6149 cr->sym = ind;
6150 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6151 skip(':');
6152 is_expr = 0;
6153 goto block_after_label;
6154 } else
6155 if (tok == TOK_DEFAULT) {
6156 next();
6157 skip(':');
6158 if (!cur_switch)
6159 expect("switch");
6160 if (cur_switch->def_sym)
6161 tcc_error("too many 'default'");
6162 cur_switch->def_sym = ind;
6163 is_expr = 0;
6164 goto block_after_label;
6165 } else
6166 if (tok == TOK_GOTO) {
6167 next();
6168 if (tok == '*' && gnu_ext) {
6169 /* computed goto */
6170 next();
6171 gexpr();
6172 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6173 expect("pointer");
6174 ggoto();
6175 } else if (tok >= TOK_UIDENT) {
6176 s = label_find(tok);
6177 /* put forward definition if needed */
6178 if (!s) {
6179 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6180 } else {
6181 if (s->r == LABEL_DECLARED)
6182 s->r = LABEL_FORWARD;
6184 vla_sp_restore_root();
6185 if (s->r & LABEL_FORWARD)
6186 s->jnext = gjmp(s->jnext);
6187 else
6188 gjmp_addr(s->jnext);
6189 next();
6190 } else {
6191 expect("label identifier");
6193 skip(';');
6194 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6195 asm_instr();
6196 } else {
6197 b = is_label();
6198 if (b) {
6199 /* label case */
6200 next();
6201 s = label_find(b);
6202 if (s) {
6203 if (s->r == LABEL_DEFINED)
6204 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6205 gsym(s->jnext);
6206 s->r = LABEL_DEFINED;
6207 } else {
6208 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6210 s->jnext = ind;
6211 vla_sp_restore();
6212 /* we accept this, but it is a mistake */
6213 block_after_label:
6214 nocode_wanted &= ~0x20000000;
6215 if (tok == '}') {
6216 tcc_warning("deprecated use of label at end of compound statement");
6217 } else {
6218 if (is_expr)
6219 vpop();
6220 block(bsym, csym, is_expr);
6222 } else {
6223 /* expression case */
6224 if (tok != ';') {
6225 if (is_expr) {
6226 vpop();
6227 gexpr();
6228 } else {
6229 gexpr();
6230 vpop();
6233 skip(';');
6238 /* This skips over a stream of tokens containing balanced {} and ()
6239 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6240 with a '{'). If STR then allocates and stores the skipped tokens
6241 in *STR. This doesn't check if () and {} are nested correctly,
6242 i.e. "({)}" is accepted. */
6243 static void skip_or_save_block(TokenString **str)
6245 int braces = tok == '{';
6246 int level = 0;
6247 if (str)
6248 *str = tok_str_alloc();
6250 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6251 int t;
6252 if (tok == TOK_EOF) {
6253 if (str || level > 0)
6254 tcc_error("unexpected end of file");
6255 else
6256 break;
6258 if (str)
6259 tok_str_add_tok(*str);
6260 t = tok;
6261 next();
6262 if (t == '{' || t == '(') {
6263 level++;
6264 } else if (t == '}' || t == ')') {
6265 level--;
6266 if (level == 0 && braces && t == '}')
6267 break;
6270 if (str) {
6271 tok_str_add(*str, -1);
6272 tok_str_add(*str, 0);
6276 #define EXPR_CONST 1
6277 #define EXPR_ANY 2
6279 static void parse_init_elem(int expr_type)
6281 int saved_global_expr;
6282 switch(expr_type) {
6283 case EXPR_CONST:
6284 /* compound literals must be allocated globally in this case */
6285 saved_global_expr = global_expr;
6286 global_expr = 1;
6287 expr_const1();
6288 global_expr = saved_global_expr;
6289 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6290 (compound literals). */
6291 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6292 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6293 || vtop->sym->v < SYM_FIRST_ANOM))
6294 #ifdef TCC_TARGET_PE
6295 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6296 #endif
6298 tcc_error("initializer element is not constant");
6299 break;
6300 case EXPR_ANY:
6301 expr_eq();
6302 break;
6306 /* put zeros for variable based init */
6307 static void init_putz(Section *sec, unsigned long c, int size)
6309 if (sec) {
6310 /* nothing to do because globals are already set to zero */
6311 } else {
6312 vpush_global_sym(&func_old_type, TOK_memset);
6313 vseti(VT_LOCAL, c);
6314 #ifdef TCC_TARGET_ARM
6315 vpushs(size);
6316 vpushi(0);
6317 #else
6318 vpushi(0);
6319 vpushs(size);
6320 #endif
6321 gfunc_call(3);
6325 /* t is the array or struct type. c is the array or struct
6326 address. cur_field is the pointer to the current
6327 field, for arrays the 'c' member contains the current start
6328 index. 'size_only' is true if only size info is needed (only used
6329 in arrays). al contains the already initialized length of the
6330 current container (starting at c). This returns the new length of that. */
6331 static int decl_designator(CType *type, Section *sec, unsigned long c,
6332 Sym **cur_field, int size_only, int al)
6334 Sym *s, *f;
6335 int index, index_last, align, l, nb_elems, elem_size;
6336 unsigned long corig = c;
6338 elem_size = 0;
6339 nb_elems = 1;
6340 if (gnu_ext && (l = is_label()) != 0)
6341 goto struct_field;
6342 /* NOTE: we only support ranges for last designator */
6343 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6344 if (tok == '[') {
6345 if (!(type->t & VT_ARRAY))
6346 expect("array type");
6347 next();
6348 index = index_last = expr_const();
6349 if (tok == TOK_DOTS && gnu_ext) {
6350 next();
6351 index_last = expr_const();
6353 skip(']');
6354 s = type->ref;
6355 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6356 index_last < index)
6357 tcc_error("invalid index");
6358 if (cur_field)
6359 (*cur_field)->c = index_last;
6360 type = pointed_type(type);
6361 elem_size = type_size(type, &align);
6362 c += index * elem_size;
6363 nb_elems = index_last - index + 1;
6364 } else {
6365 next();
6366 l = tok;
6367 struct_field:
6368 next();
6369 if ((type->t & VT_BTYPE) != VT_STRUCT)
6370 expect("struct/union type");
6371 f = find_field(type, l);
6372 if (!f)
6373 expect("field");
6374 if (cur_field)
6375 *cur_field = f;
6376 type = &f->type;
6377 c += f->c;
6379 cur_field = NULL;
6381 if (!cur_field) {
6382 if (tok == '=') {
6383 next();
6384 } else if (!gnu_ext) {
6385 expect("=");
6387 } else {
6388 if (type->t & VT_ARRAY) {
6389 index = (*cur_field)->c;
6390 if (type->ref->c >= 0 && index >= type->ref->c)
6391 tcc_error("index too large");
6392 type = pointed_type(type);
6393 c += index * type_size(type, &align);
6394 } else {
6395 f = *cur_field;
6396 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6397 *cur_field = f = f->next;
6398 if (!f)
6399 tcc_error("too many field init");
6400 type = &f->type;
6401 c += f->c;
6404 /* must put zero in holes (note that doing it that way
6405 ensures that it even works with designators) */
6406 if (!size_only && c - corig > al)
6407 init_putz(sec, corig + al, c - corig - al);
6408 decl_initializer(type, sec, c, 0, size_only);
6410 /* XXX: make it more general */
6411 if (!size_only && nb_elems > 1) {
6412 unsigned long c_end;
6413 uint8_t *src, *dst;
6414 int i;
6416 if (!sec) {
6417 vset(type, VT_LOCAL|VT_LVAL, c);
6418 for (i = 1; i < nb_elems; i++) {
6419 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6420 vswap();
6421 vstore();
6423 vpop();
6424 } else if (!NODATA_WANTED) {
6425 c_end = c + nb_elems * elem_size;
6426 if (c_end > sec->data_allocated)
6427 section_realloc(sec, c_end);
6428 src = sec->data + c;
6429 dst = src;
6430 for(i = 1; i < nb_elems; i++) {
6431 dst += elem_size;
6432 memcpy(dst, src, elem_size);
6436 c += nb_elems * type_size(type, &align);
6437 if (c - corig > al)
6438 al = c - corig;
6439 return al;
6442 /* store a value or an expression directly in global data or in local array */
6443 static void init_putv(CType *type, Section *sec, unsigned long c)
6445 int bt;
6446 void *ptr;
6447 CType dtype;
6449 dtype = *type;
6450 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6452 if (sec) {
6453 int size, align;
6454 /* XXX: not portable */
6455 /* XXX: generate error if incorrect relocation */
6456 gen_assign_cast(&dtype);
6457 bt = type->t & VT_BTYPE;
6459 if ((vtop->r & VT_SYM)
6460 && bt != VT_PTR
6461 && bt != VT_FUNC
6462 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6463 || (type->t & VT_BITFIELD))
6464 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6466 tcc_error("initializer element is not computable at load time");
6468 if (NODATA_WANTED) {
6469 vtop--;
6470 return;
6473 size = type_size(type, &align);
6474 section_reserve(sec, c + size);
6475 ptr = sec->data + c;
6477 /* XXX: make code faster ? */
6478 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6479 vtop->sym->v >= SYM_FIRST_ANOM &&
6480 /* XXX This rejects compound literals like
6481 '(void *){ptr}'. The problem is that '&sym' is
6482 represented the same way, which would be ruled out
6483 by the SYM_FIRST_ANOM check above, but also '"string"'
6484 in 'char *p = "string"' is represented the same
6485 with the type being VT_PTR and the symbol being an
6486 anonymous one. That is, there's no difference in vtop
6487 between '(void *){x}' and '&(void *){x}'. Ignore
6488 pointer typed entities here. Hopefully no real code
6489 will every use compound literals with scalar type. */
6490 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6491 /* These come from compound literals, memcpy stuff over. */
6492 Section *ssec;
6493 ElfSym *esym;
6494 ElfW_Rel *rel;
6495 esym = elfsym(vtop->sym);
6496 ssec = tcc_state->sections[esym->st_shndx];
6497 memmove (ptr, ssec->data + esym->st_value, size);
6498 if (ssec->reloc) {
6499 /* We need to copy over all memory contents, and that
6500 includes relocations. Use the fact that relocs are
6501 created it order, so look from the end of relocs
6502 until we hit one before the copied region. */
6503 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6504 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6505 while (num_relocs--) {
6506 rel--;
6507 if (rel->r_offset >= esym->st_value + size)
6508 continue;
6509 if (rel->r_offset < esym->st_value)
6510 break;
6511 /* Note: if the same fields are initialized multiple
6512 times (possible with designators) then we possibly
6513 add multiple relocations for the same offset here.
6514 That would lead to wrong code, the last reloc needs
6515 to win. We clean this up later after the whole
6516 initializer is parsed. */
6517 put_elf_reloca(symtab_section, sec,
6518 c + rel->r_offset - esym->st_value,
6519 ELFW(R_TYPE)(rel->r_info),
6520 ELFW(R_SYM)(rel->r_info),
6521 #if PTR_SIZE == 8
6522 rel->r_addend
6523 #else
6525 #endif
6529 } else {
6530 if (type->t & VT_BITFIELD) {
6531 int bit_pos, bit_size, bits, n;
6532 unsigned char *p, v, m;
6533 bit_pos = BIT_POS(vtop->type.t);
6534 bit_size = BIT_SIZE(vtop->type.t);
6535 p = (unsigned char*)ptr + (bit_pos >> 3);
6536 bit_pos &= 7, bits = 0;
6537 while (bit_size) {
6538 n = 8 - bit_pos;
6539 if (n > bit_size)
6540 n = bit_size;
6541 v = vtop->c.i >> bits << bit_pos;
6542 m = ((1 << n) - 1) << bit_pos;
6543 *p = (*p & ~m) | (v & m);
6544 bits += n, bit_size -= n, bit_pos = 0, ++p;
6546 } else
6547 switch(bt) {
6548 /* XXX: when cross-compiling we assume that each type has the
6549 same representation on host and target, which is likely to
6550 be wrong in the case of long double */
6551 case VT_BOOL:
6552 vtop->c.i = vtop->c.i != 0;
6553 case VT_BYTE:
6554 *(char *)ptr |= vtop->c.i;
6555 break;
6556 case VT_SHORT:
6557 *(short *)ptr |= vtop->c.i;
6558 break;
6559 case VT_FLOAT:
6560 *(float*)ptr = vtop->c.f;
6561 break;
6562 case VT_DOUBLE:
6563 *(double *)ptr = vtop->c.d;
6564 break;
6565 case VT_LDOUBLE:
6566 #if defined TCC_IS_NATIVE_387
6567 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6568 memcpy(ptr, &vtop->c.ld, 10);
6569 #ifdef __TINYC__
6570 else if (sizeof (long double) == sizeof (double))
6571 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6572 #endif
6573 else if (vtop->c.ld == 0.0)
6575 else
6576 #endif
6577 if (sizeof(long double) == LDOUBLE_SIZE)
6578 *(long double*)ptr = vtop->c.ld;
6579 else if (sizeof(double) == LDOUBLE_SIZE)
6580 *(double *)ptr = (double)vtop->c.ld;
6581 else
6582 tcc_error("can't cross compile long double constants");
6583 break;
6584 #if PTR_SIZE != 8
6585 case VT_LLONG:
6586 *(long long *)ptr |= vtop->c.i;
6587 break;
6588 #else
6589 case VT_LLONG:
6590 #endif
6591 case VT_PTR:
6593 addr_t val = vtop->c.i;
6594 #if PTR_SIZE == 8
6595 if (vtop->r & VT_SYM)
6596 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6597 else
6598 *(addr_t *)ptr |= val;
6599 #else
6600 if (vtop->r & VT_SYM)
6601 greloc(sec, vtop->sym, c, R_DATA_PTR);
6602 *(addr_t *)ptr |= val;
6603 #endif
6604 break;
6606 default:
6608 int val = vtop->c.i;
6609 #if PTR_SIZE == 8
6610 if (vtop->r & VT_SYM)
6611 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6612 else
6613 *(int *)ptr |= val;
6614 #else
6615 if (vtop->r & VT_SYM)
6616 greloc(sec, vtop->sym, c, R_DATA_PTR);
6617 *(int *)ptr |= val;
6618 #endif
6619 break;
6623 vtop--;
6624 } else {
6625 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6626 vswap();
6627 vstore();
6628 vpop();
6632 /* 't' contains the type and storage info. 'c' is the offset of the
6633 object in section 'sec'. If 'sec' is NULL, it means stack based
6634 allocation. 'first' is true if array '{' must be read (multi
6635 dimension implicit array init handling). 'size_only' is true if
6636 size only evaluation is wanted (only for arrays). */
6637 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6638 int first, int size_only)
6640 int len, n, no_oblock, nb, i;
6641 int size1, align1;
6642 int have_elem;
6643 Sym *s, *f;
6644 Sym indexsym;
6645 CType *t1;
6647 /* If we currently are at an '}' or ',' we have read an initializer
6648 element in one of our callers, and not yet consumed it. */
6649 have_elem = tok == '}' || tok == ',';
6650 if (!have_elem && tok != '{' &&
6651 /* In case of strings we have special handling for arrays, so
6652 don't consume them as initializer value (which would commit them
6653 to some anonymous symbol). */
6654 tok != TOK_LSTR && tok != TOK_STR &&
6655 !size_only) {
6656 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6657 have_elem = 1;
6660 if (have_elem &&
6661 !(type->t & VT_ARRAY) &&
6662 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6663 The source type might have VT_CONSTANT set, which is
6664 of course assignable to non-const elements. */
6665 is_compatible_unqualified_types(type, &vtop->type)) {
6666 init_putv(type, sec, c);
6667 } else if (type->t & VT_ARRAY) {
6668 s = type->ref;
6669 n = s->c;
6670 t1 = pointed_type(type);
6671 size1 = type_size(t1, &align1);
6673 no_oblock = 1;
6674 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6675 tok == '{') {
6676 if (tok != '{')
6677 tcc_error("character array initializer must be a literal,"
6678 " optionally enclosed in braces");
6679 skip('{');
6680 no_oblock = 0;
6683 /* only parse strings here if correct type (otherwise: handle
6684 them as ((w)char *) expressions */
6685 if ((tok == TOK_LSTR &&
6686 #ifdef TCC_TARGET_PE
6687 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6688 #else
6689 (t1->t & VT_BTYPE) == VT_INT
6690 #endif
6691 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6692 len = 0;
6693 while (tok == TOK_STR || tok == TOK_LSTR) {
6694 int cstr_len, ch;
6696 /* compute maximum number of chars wanted */
6697 if (tok == TOK_STR)
6698 cstr_len = tokc.str.size;
6699 else
6700 cstr_len = tokc.str.size / sizeof(nwchar_t);
6701 cstr_len--;
6702 nb = cstr_len;
6703 if (n >= 0 && nb > (n - len))
6704 nb = n - len;
6705 if (!size_only) {
6706 if (cstr_len > nb)
6707 tcc_warning("initializer-string for array is too long");
6708 /* in order to go faster for common case (char
6709 string in global variable, we handle it
6710 specifically */
6711 if (sec && tok == TOK_STR && size1 == 1) {
6712 if (!NODATA_WANTED)
6713 memcpy(sec->data + c + len, tokc.str.data, nb);
6714 } else {
6715 for(i=0;i<nb;i++) {
6716 if (tok == TOK_STR)
6717 ch = ((unsigned char *)tokc.str.data)[i];
6718 else
6719 ch = ((nwchar_t *)tokc.str.data)[i];
6720 vpushi(ch);
6721 init_putv(t1, sec, c + (len + i) * size1);
6725 len += nb;
6726 next();
6728 /* only add trailing zero if enough storage (no
6729 warning in this case since it is standard) */
6730 if (n < 0 || len < n) {
6731 if (!size_only) {
6732 vpushi(0);
6733 init_putv(t1, sec, c + (len * size1));
6735 len++;
6737 len *= size1;
6738 } else {
6739 indexsym.c = 0;
6740 f = &indexsym;
6742 do_init_list:
6743 len = 0;
6744 while (tok != '}' || have_elem) {
6745 len = decl_designator(type, sec, c, &f, size_only, len);
6746 have_elem = 0;
6747 if (type->t & VT_ARRAY) {
6748 ++indexsym.c;
6749 /* special test for multi dimensional arrays (may not
6750 be strictly correct if designators are used at the
6751 same time) */
6752 if (no_oblock && len >= n*size1)
6753 break;
6754 } else {
6755 if (s->type.t == VT_UNION)
6756 f = NULL;
6757 else
6758 f = f->next;
6759 if (no_oblock && f == NULL)
6760 break;
6763 if (tok == '}')
6764 break;
6765 skip(',');
6768 /* put zeros at the end */
6769 if (!size_only && len < n*size1)
6770 init_putz(sec, c + len, n*size1 - len);
6771 if (!no_oblock)
6772 skip('}');
6773 /* patch type size if needed, which happens only for array types */
6774 if (n < 0)
6775 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6776 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6777 size1 = 1;
6778 no_oblock = 1;
6779 if (first || tok == '{') {
6780 skip('{');
6781 no_oblock = 0;
6783 s = type->ref;
6784 f = s->next;
6785 n = s->c;
6786 goto do_init_list;
6787 } else if (tok == '{') {
6788 next();
6789 decl_initializer(type, sec, c, first, size_only);
6790 skip('}');
6791 } else if (size_only) {
6792 /* If we supported only ISO C we wouldn't have to accept calling
6793 this on anything than an array size_only==1 (and even then
6794 only on the outermost level, so no recursion would be needed),
6795 because initializing a flex array member isn't supported.
6796 But GNU C supports it, so we need to recurse even into
6797 subfields of structs and arrays when size_only is set. */
6798 /* just skip expression */
6799 skip_or_save_block(NULL);
6800 } else {
6801 if (!have_elem) {
6802 /* This should happen only when we haven't parsed
6803 the init element above for fear of committing a
6804 string constant to memory too early. */
6805 if (tok != TOK_STR && tok != TOK_LSTR)
6806 expect("string constant");
6807 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6809 init_putv(type, sec, c);
6813 /* parse an initializer for type 't' if 'has_init' is non zero, and
6814 allocate space in local or global data space ('r' is either
6815 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6816 variable 'v' of scope 'scope' is declared before initializers
6817 are parsed. If 'v' is zero, then a reference to the new object
6818 is put in the value stack. If 'has_init' is 2, a special parsing
6819 is done to handle string constants. */
6820 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6821 int has_init, int v, int scope)
6823 int size, align, addr;
6824 TokenString *init_str = NULL;
6826 Section *sec;
6827 Sym *flexible_array;
6828 Sym *sym = NULL;
6829 int saved_nocode_wanted = nocode_wanted;
6830 #ifdef CONFIG_TCC_BCHECK
6831 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6832 #endif
6834 if (type->t & VT_STATIC)
6835 nocode_wanted |= NODATA_WANTED ? 0x40000000 : 0x80000000;
6837 flexible_array = NULL;
6838 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6839 Sym *field = type->ref->next;
6840 if (field) {
6841 while (field->next)
6842 field = field->next;
6843 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6844 flexible_array = field;
6848 size = type_size(type, &align);
6849 /* If unknown size, we must evaluate it before
6850 evaluating initializers because
6851 initializers can generate global data too
6852 (e.g. string pointers or ISOC99 compound
6853 literals). It also simplifies local
6854 initializers handling */
6855 if (size < 0 || (flexible_array && has_init)) {
6856 if (!has_init)
6857 tcc_error("unknown type size");
6858 /* get all init string */
6859 if (has_init == 2) {
6860 init_str = tok_str_alloc();
6861 /* only get strings */
6862 while (tok == TOK_STR || tok == TOK_LSTR) {
6863 tok_str_add_tok(init_str);
6864 next();
6866 tok_str_add(init_str, -1);
6867 tok_str_add(init_str, 0);
6868 } else {
6869 skip_or_save_block(&init_str);
6871 unget_tok(0);
6873 /* compute size */
6874 begin_macro(init_str, 1);
6875 next();
6876 decl_initializer(type, NULL, 0, 1, 1);
6877 /* prepare second initializer parsing */
6878 macro_ptr = init_str->str;
6879 next();
6881 /* if still unknown size, error */
6882 size = type_size(type, &align);
6883 if (size < 0)
6884 tcc_error("unknown type size");
6886 /* If there's a flex member and it was used in the initializer
6887 adjust size. */
6888 if (flexible_array &&
6889 flexible_array->type.ref->c > 0)
6890 size += flexible_array->type.ref->c
6891 * pointed_size(&flexible_array->type);
6892 /* take into account specified alignment if bigger */
6893 if (ad->a.aligned) {
6894 int speca = 1 << (ad->a.aligned - 1);
6895 if (speca > align)
6896 align = speca;
6897 } else if (ad->a.packed) {
6898 align = 1;
6901 if (NODATA_WANTED)
6902 size = 0, align = 1;
6904 if ((r & VT_VALMASK) == VT_LOCAL) {
6905 sec = NULL;
6906 #ifdef CONFIG_TCC_BCHECK
6907 if (bcheck && (type->t & VT_ARRAY)) {
6908 loc--;
6910 #endif
6911 loc = (loc - size) & -align;
6912 addr = loc;
6913 #ifdef CONFIG_TCC_BCHECK
6914 /* handles bounds */
6915 /* XXX: currently, since we do only one pass, we cannot track
6916 '&' operators, so we add only arrays */
6917 if (bcheck && (type->t & VT_ARRAY)) {
6918 addr_t *bounds_ptr;
6919 /* add padding between regions */
6920 loc--;
6921 /* then add local bound info */
6922 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6923 bounds_ptr[0] = addr;
6924 bounds_ptr[1] = size;
6926 #endif
6927 if (v) {
6928 /* local variable */
6929 #ifdef CONFIG_TCC_ASM
6930 if (ad->asm_label) {
6931 int reg = asm_parse_regvar(ad->asm_label);
6932 if (reg >= 0)
6933 r = (r & ~VT_VALMASK) | reg;
6935 #endif
6936 sym = sym_push(v, type, r, addr);
6937 sym->a = ad->a;
6938 } else {
6939 /* push local reference */
6940 vset(type, r, addr);
6942 } else {
6943 if (v && scope == VT_CONST) {
6944 /* see if the symbol was already defined */
6945 sym = sym_find(v);
6946 if (sym) {
6947 patch_storage(sym, ad, type);
6948 /* we accept several definitions of the same global variable. */
6949 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
6950 goto no_alloc;
6954 /* allocate symbol in corresponding section */
6955 sec = ad->section;
6956 if (!sec) {
6957 if (has_init)
6958 sec = data_section;
6959 else if (tcc_state->nocommon)
6960 sec = bss_section;
6963 if (sec) {
6964 addr = section_add(sec, size, align);
6965 #ifdef CONFIG_TCC_BCHECK
6966 /* add padding if bound check */
6967 if (bcheck)
6968 section_add(sec, 1, 1);
6969 #endif
6970 } else {
6971 addr = align; /* SHN_COMMON is special, symbol value is align */
6972 sec = common_section;
6975 if (v) {
6976 if (!sym) {
6977 sym = sym_push(v, type, r | VT_SYM, 0);
6978 patch_storage(sym, ad, NULL);
6980 /* Local statics have a scope until now (for
6981 warnings), remove it here. */
6982 sym->sym_scope = 0;
6983 /* update symbol definition */
6984 put_extern_sym(sym, sec, addr, size);
6985 } else {
6986 /* push global reference */
6987 sym = get_sym_ref(type, sec, addr, size);
6988 vpushsym(type, sym);
6989 vtop->r |= r;
6992 #ifdef CONFIG_TCC_BCHECK
6993 /* handles bounds now because the symbol must be defined
6994 before for the relocation */
6995 if (bcheck) {
6996 addr_t *bounds_ptr;
6998 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
6999 /* then add global bound info */
7000 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7001 bounds_ptr[0] = 0; /* relocated */
7002 bounds_ptr[1] = size;
7004 #endif
7007 if (type->t & VT_VLA) {
7008 int a;
7010 if (NODATA_WANTED)
7011 goto no_alloc;
7013 /* save current stack pointer */
7014 if (vlas_in_scope == 0) {
7015 if (vla_sp_root_loc == -1)
7016 vla_sp_root_loc = (loc -= PTR_SIZE);
7017 gen_vla_sp_save(vla_sp_root_loc);
7020 vla_runtime_type_size(type, &a);
7021 gen_vla_alloc(type, a);
7022 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7023 /* on _WIN64, because of the function args scratch area, the
7024 result of alloca differs from RSP and is returned in RAX. */
7025 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7026 #endif
7027 gen_vla_sp_save(addr);
7028 vla_sp_loc = addr;
7029 vlas_in_scope++;
7031 } else if (has_init) {
7032 size_t oldreloc_offset = 0;
7033 if (sec && sec->reloc)
7034 oldreloc_offset = sec->reloc->data_offset;
7035 decl_initializer(type, sec, addr, 1, 0);
7036 if (sec && sec->reloc)
7037 squeeze_multi_relocs(sec, oldreloc_offset);
7038 /* patch flexible array member size back to -1, */
7039 /* for possible subsequent similar declarations */
7040 if (flexible_array)
7041 flexible_array->type.ref->c = -1;
7044 no_alloc:
7045 /* restore parse state if needed */
7046 if (init_str) {
7047 end_macro();
7048 next();
7051 nocode_wanted = saved_nocode_wanted;
7054 /* parse a function defined by symbol 'sym' and generate its code in
7055 'cur_text_section' */
7056 static void gen_function(Sym *sym)
7058 nocode_wanted = 0;
7059 ind = cur_text_section->data_offset;
7060 /* NOTE: we patch the symbol size later */
7061 put_extern_sym(sym, cur_text_section, ind, 0);
7062 funcname = get_tok_str(sym->v, NULL);
7063 func_ind = ind;
7064 /* Initialize VLA state */
7065 vla_sp_loc = -1;
7066 vla_sp_root_loc = -1;
7067 /* put debug symbol */
7068 tcc_debug_funcstart(tcc_state, sym);
7069 /* push a dummy symbol to enable local sym storage */
7070 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7071 local_scope = 1; /* for function parameters */
7072 gfunc_prolog(&sym->type);
7073 local_scope = 0;
7074 rsym = 0;
7075 block(NULL, NULL, 0);
7076 nocode_wanted = 0;
7077 gsym(rsym);
7078 gfunc_epilog();
7079 cur_text_section->data_offset = ind;
7080 label_pop(&global_label_stack, NULL, 0);
7081 /* reset local stack */
7082 local_scope = 0;
7083 sym_pop(&local_stack, NULL, 0);
7084 /* end of function */
7085 /* patch symbol size */
7086 elfsym(sym)->st_size = ind - func_ind;
7087 tcc_debug_funcend(tcc_state, ind - func_ind);
7088 /* It's better to crash than to generate wrong code */
7089 cur_text_section = NULL;
7090 funcname = ""; /* for safety */
7091 func_vt.t = VT_VOID; /* for safety */
7092 func_var = 0; /* for safety */
7093 ind = 0; /* for safety */
7094 nocode_wanted = 0x80000000;
7095 check_vstack();
7098 static void gen_inline_functions(TCCState *s)
7100 Sym *sym;
7101 int inline_generated, i, ln;
7102 struct InlineFunc *fn;
7104 ln = file->line_num;
7105 /* iterate while inline function are referenced */
7106 do {
7107 inline_generated = 0;
7108 for (i = 0; i < s->nb_inline_fns; ++i) {
7109 fn = s->inline_fns[i];
7110 sym = fn->sym;
7111 if (sym && sym->c) {
7112 /* the function was used: generate its code and
7113 convert it to a normal function */
7114 fn->sym = NULL;
7115 if (file)
7116 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7117 sym->type.t &= ~VT_INLINE;
7119 begin_macro(fn->func_str, 1);
7120 next();
7121 cur_text_section = text_section;
7122 gen_function(sym);
7123 end_macro();
7125 inline_generated = 1;
7128 } while (inline_generated);
7129 file->line_num = ln;
7132 ST_FUNC void free_inline_functions(TCCState *s)
7134 int i;
7135 /* free tokens of unused inline functions */
7136 for (i = 0; i < s->nb_inline_fns; ++i) {
7137 struct InlineFunc *fn = s->inline_fns[i];
7138 if (fn->sym)
7139 tok_str_free(fn->func_str);
7141 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7144 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7145 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7146 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7148 int v, has_init, r;
7149 CType type, btype;
7150 Sym *sym;
7151 AttributeDef ad;
7153 while (1) {
7154 if (!parse_btype(&btype, &ad)) {
7155 if (is_for_loop_init)
7156 return 0;
7157 /* skip redundant ';' if not in old parameter decl scope */
7158 if (tok == ';' && l != VT_CMP) {
7159 next();
7160 continue;
7162 if (l != VT_CONST)
7163 break;
7164 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7165 /* global asm block */
7166 asm_global_instr();
7167 continue;
7169 if (tok >= TOK_UIDENT) {
7170 /* special test for old K&R protos without explicit int
7171 type. Only accepted when defining global data */
7172 btype.t = VT_INT;
7173 } else {
7174 if (tok != TOK_EOF)
7175 expect("declaration");
7176 break;
7179 if (tok == ';') {
7180 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7181 int v = btype.ref->v;
7182 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7183 tcc_warning("unnamed struct/union that defines no instances");
7184 next();
7185 continue;
7187 if (IS_ENUM(btype.t)) {
7188 next();
7189 continue;
7192 while (1) { /* iterate thru each declaration */
7193 type = btype;
7194 /* If the base type itself was an array type of unspecified
7195 size (like in 'typedef int arr[]; arr x = {1};') then
7196 we will overwrite the unknown size by the real one for
7197 this decl. We need to unshare the ref symbol holding
7198 that size. */
7199 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7200 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7202 type_decl(&type, &ad, &v, TYPE_DIRECT);
7203 #if 0
7205 char buf[500];
7206 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7207 printf("type = '%s'\n", buf);
7209 #endif
7210 if ((type.t & VT_BTYPE) == VT_FUNC) {
7211 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7212 tcc_error("function without file scope cannot be static");
7214 /* if old style function prototype, we accept a
7215 declaration list */
7216 sym = type.ref;
7217 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7218 decl0(VT_CMP, 0, sym);
7221 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7222 ad.asm_label = asm_label_instr();
7223 /* parse one last attribute list, after asm label */
7224 parse_attribute(&ad);
7225 if (tok == '{')
7226 expect(";");
7229 #ifdef TCC_TARGET_PE
7230 if (ad.a.dllimport || ad.a.dllexport) {
7231 if (type.t & (VT_STATIC|VT_TYPEDEF))
7232 tcc_error("cannot have dll linkage with static or typedef");
7233 if (ad.a.dllimport) {
7234 if ((type.t & VT_BTYPE) == VT_FUNC)
7235 ad.a.dllimport = 0;
7236 else
7237 type.t |= VT_EXTERN;
7240 #endif
7241 if (tok == '{') {
7242 if (l != VT_CONST)
7243 tcc_error("cannot use local functions");
7244 if ((type.t & VT_BTYPE) != VT_FUNC)
7245 expect("function definition");
7247 /* reject abstract declarators in function definition
7248 make old style params without decl have int type */
7249 sym = type.ref;
7250 while ((sym = sym->next) != NULL) {
7251 if (!(sym->v & ~SYM_FIELD))
7252 expect("identifier");
7253 if (sym->type.t == VT_VOID)
7254 sym->type = int_type;
7257 /* XXX: cannot do better now: convert extern line to static inline */
7258 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7259 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7261 /* put function symbol */
7262 sym = external_global_sym(v, &type, 0);
7263 type.t &= ~VT_EXTERN;
7264 patch_storage(sym, &ad, &type);
7266 /* static inline functions are just recorded as a kind
7267 of macro. Their code will be emitted at the end of
7268 the compilation unit only if they are used */
7269 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7270 (VT_INLINE | VT_STATIC)) {
7271 struct InlineFunc *fn;
7272 const char *filename;
7274 filename = file ? file->filename : "";
7275 fn = tcc_malloc(sizeof *fn + strlen(filename));
7276 strcpy(fn->filename, filename);
7277 fn->sym = sym;
7278 skip_or_save_block(&fn->func_str);
7279 dynarray_add(&tcc_state->inline_fns,
7280 &tcc_state->nb_inline_fns, fn);
7281 } else {
7282 /* compute text section */
7283 cur_text_section = ad.section;
7284 if (!cur_text_section)
7285 cur_text_section = text_section;
7286 gen_function(sym);
7288 break;
7289 } else {
7290 if (l == VT_CMP) {
7291 /* find parameter in function parameter list */
7292 for (sym = func_sym->next; sym; sym = sym->next)
7293 if ((sym->v & ~SYM_FIELD) == v)
7294 goto found;
7295 tcc_error("declaration for parameter '%s' but no such parameter",
7296 get_tok_str(v, NULL));
7297 found:
7298 if (type.t & VT_STORAGE) /* 'register' is okay */
7299 tcc_error("storage class specified for '%s'",
7300 get_tok_str(v, NULL));
7301 if (sym->type.t != VT_VOID)
7302 tcc_error("redefinition of parameter '%s'",
7303 get_tok_str(v, NULL));
7304 convert_parameter_type(&type);
7305 sym->type = type;
7306 } else if (type.t & VT_TYPEDEF) {
7307 /* save typedefed type */
7308 /* XXX: test storage specifiers ? */
7309 sym = sym_find(v);
7310 if (sym && sym->sym_scope == local_scope) {
7311 if (!is_compatible_types(&sym->type, &type)
7312 || !(sym->type.t & VT_TYPEDEF))
7313 tcc_error("incompatible redefinition of '%s'",
7314 get_tok_str(v, NULL));
7315 sym->type = type;
7316 } else {
7317 sym = sym_push(v, &type, 0, 0);
7319 sym->a = ad.a;
7320 sym->f = ad.f;
7321 } else {
7322 r = 0;
7323 if ((type.t & VT_BTYPE) == VT_FUNC) {
7324 /* external function definition */
7325 /* specific case for func_call attribute */
7326 type.ref->f = ad.f;
7327 } else if (!(type.t & VT_ARRAY)) {
7328 /* not lvalue if array */
7329 r |= lvalue_type(type.t);
7331 has_init = (tok == '=');
7332 if (has_init && (type.t & VT_VLA))
7333 tcc_error("variable length array cannot be initialized");
7334 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7335 ((type.t & VT_BTYPE) == VT_FUNC) ||
7336 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7337 !has_init && l == VT_CONST && type.ref->c < 0)) {
7338 /* external variable or function */
7339 /* NOTE: as GCC, uninitialized global static
7340 arrays of null size are considered as
7341 extern */
7342 type.t |= VT_EXTERN;
7343 sym = external_sym(v, &type, r, &ad);
7344 if (ad.alias_target) {
7345 ElfSym *esym;
7346 Sym *alias_target;
7347 alias_target = sym_find(ad.alias_target);
7348 esym = elfsym(alias_target);
7349 if (!esym)
7350 tcc_error("unsupported forward __alias__ attribute");
7351 /* Local statics have a scope until now (for
7352 warnings), remove it here. */
7353 sym->sym_scope = 0;
7354 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7356 } else {
7357 if (type.t & VT_STATIC)
7358 r |= VT_CONST;
7359 else
7360 r |= l;
7361 if (has_init)
7362 next();
7363 else if (l == VT_CONST)
7364 /* uninitialized global variables may be overridden */
7365 type.t |= VT_EXTERN;
7366 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7369 if (tok != ',') {
7370 if (is_for_loop_init)
7371 return 1;
7372 skip(';');
7373 break;
7375 next();
7377 ad.a.aligned = 0;
7380 return 0;
7383 static void decl(int l)
7385 decl0(l, 0, NULL);
7388 /* ------------------------------------------------------------------------- */