patch type_to_str to handle complex function-ptr decls better
[tinycc.git] / tccgen.c
blob7ed89acd4e3eeb5e92b3afd8bd1adef640555323
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
41 static int local_scope;
42 static int in_sizeof;
43 static int section_sym;
45 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
46 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
51 ST_DATA int const_wanted; /* true if constant wanted */
52 ST_DATA int nocode_wanted; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
57 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
58 ST_DATA int func_vc;
59 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
60 ST_DATA const char *funcname;
61 ST_DATA int g_debug;
63 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
65 ST_DATA struct switch_t {
66 struct case_t {
67 int64_t v1, v2;
68 int sym;
69 } **p; int n; /* list of case ranges */
70 int def_sym; /* default symbol */
71 } *cur_switch; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType *type);
76 static void gen_cast_s(int t);
77 static inline CType *pointed_type(CType *type);
78 static int is_compatible_types(CType *type1, CType *type2);
79 static int parse_btype(CType *type, AttributeDef *ad);
80 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
81 static void parse_expr_type(CType *type);
82 static void init_putv(CType *type, Section *sec, unsigned long c);
83 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
84 static void block(int *bsym, int *csym, int is_expr);
85 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
86 static void decl(int l);
87 static int decl0(int l, int is_for_loop_init, Sym *);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType *type, int *a);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType *type1, CType *type2);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty, unsigned long long v);
95 static void vpush(CType *type);
96 static int gvtst(int inv, int t);
97 static void gen_inline_functions(TCCState *s);
98 static void skip_or_save_block(TokenString **str);
99 static void gv_dup(void);
101 ST_INLN int is_float(int t)
103 int bt;
104 bt = t & VT_BTYPE;
105 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC int ieee_finite(double d)
113 int p[4];
114 memcpy(p, &d, sizeof(double));
115 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
122 #endif
124 ST_FUNC void test_lvalue(void)
126 if (!(vtop->r & VT_LVAL))
127 expect("lvalue");
130 ST_FUNC void check_vstack(void)
132 if (pvtop != vtop)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
139 #if 0
140 void pv (const char *lbl, int a, int b)
142 int i;
143 for (i = a; i < a + b; ++i) {
144 SValue *p = &vtop[-i];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
149 #endif
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC void tcc_debug_start(TCCState *s1)
155 if (s1->do_debug) {
156 char buf[512];
158 /* file info: full path + filename */
159 section_sym = put_elf_sym(symtab_section, 0, 0,
160 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
161 text_section->sh_num, NULL);
162 getcwd(buf, sizeof(buf));
163 #ifdef _WIN32
164 normalize_slashes(buf);
165 #endif
166 pstrcat(buf, sizeof(buf), "/");
167 put_stabs_r(buf, N_SO, 0, 0,
168 text_section->data_offset, text_section, section_sym);
169 put_stabs_r(file->filename, N_SO, 0, 0,
170 text_section->data_offset, text_section, section_sym);
171 last_ind = 0;
172 last_line_num = 0;
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section, 0, 0,
178 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
179 SHN_ABS, file->filename);
182 /* put end of translation unit info */
183 ST_FUNC void tcc_debug_end(TCCState *s1)
185 if (!s1->do_debug)
186 return;
187 put_stabs_r(NULL, N_SO, 0, 0,
188 text_section->data_offset, text_section, section_sym);
192 /* generate line number info */
193 ST_FUNC void tcc_debug_line(TCCState *s1)
195 if (!s1->do_debug)
196 return;
197 if ((last_line_num != file->line_num || last_ind != ind)) {
198 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
199 last_ind = ind;
200 last_line_num = file->line_num;
204 /* put function symbol */
205 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
207 char buf[512];
209 if (!s1->do_debug)
210 return;
212 /* stabs info */
213 /* XXX: we put here a dummy type */
214 snprintf(buf, sizeof(buf), "%s:%c1",
215 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
216 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
217 cur_text_section, sym->c);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE, 0, file->line_num, 0);
221 last_ind = 0;
222 last_line_num = 0;
225 /* put function size */
226 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
228 if (!s1->do_debug)
229 return;
230 put_stabn(N_FUN, 0, 0, size);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC int tccgen_compile(TCCState *s1)
236 cur_text_section = NULL;
237 funcname = "";
238 anon_sym = SYM_FIRST_ANOM;
239 section_sym = 0;
240 const_wanted = 0;
241 nocode_wanted = 0x80000000;
243 /* define some often used types */
244 int_type.t = VT_INT;
245 char_pointer_type.t = VT_BYTE;
246 mk_pointer(&char_pointer_type);
247 #if PTR_SIZE == 4
248 size_type.t = VT_INT | VT_UNSIGNED;
249 ptrdiff_type.t = VT_INT;
250 #elif LONG_SIZE == 4
251 size_type.t = VT_LLONG | VT_UNSIGNED;
252 ptrdiff_type.t = VT_LLONG;
253 #else
254 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
255 ptrdiff_type.t = VT_LONG | VT_LLONG;
256 #endif
257 func_old_type.t = VT_FUNC;
258 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
259 func_old_type.ref->f.func_call = FUNC_CDECL;
260 func_old_type.ref->f.func_type = FUNC_OLD;
262 tcc_debug_start(s1);
264 #ifdef TCC_TARGET_ARM
265 arm_init(s1);
266 #endif
268 #ifdef INC_DEBUG
269 printf("%s: **** new file\n", file->filename);
270 #endif
272 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
273 next();
274 decl(VT_CONST);
275 gen_inline_functions(s1);
276 check_vstack();
277 /* end of translation unit info */
278 tcc_debug_end(s1);
279 return 0;
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym *elfsym(Sym *s)
285 if (!s || !s->c)
286 return NULL;
287 return &((ElfSym *)symtab_section->data)[s->c];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC void update_storage(Sym *sym)
293 ElfSym *esym;
294 int sym_bind, old_sym_bind;
296 esym = elfsym(sym);
297 if (!esym)
298 return;
300 if (sym->a.visibility)
301 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
302 | sym->a.visibility;
304 if (sym->type.t & VT_STATIC)
305 sym_bind = STB_LOCAL;
306 else if (sym->a.weak)
307 sym_bind = STB_WEAK;
308 else
309 sym_bind = STB_GLOBAL;
310 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
311 if (sym_bind != old_sym_bind) {
312 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
315 #ifdef TCC_TARGET_PE
316 if (sym->a.dllimport)
317 esym->st_other |= ST_PE_IMPORT;
318 if (sym->a.dllexport)
319 esym->st_other |= ST_PE_EXPORT;
320 #endif
322 #if 0
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym->v, NULL),
325 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
326 sym->a.visibility,
327 sym->a.dllexport,
328 sym->a.dllimport
330 #endif
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
338 addr_t value, unsigned long size,
339 int can_add_underscore)
341 int sym_type, sym_bind, info, other, t;
342 ElfSym *esym;
343 const char *name;
344 char buf1[256];
345 #ifdef CONFIG_TCC_BCHECK
346 char buf[32];
347 #endif
349 if (!sym->c) {
350 name = get_tok_str(sym->v, NULL);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state->do_bounds_check) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
356 switch(sym->v) {
357 #ifdef TCC_TARGET_PE
358 /* XXX: we rely only on malloc hooks */
359 case TOK_malloc:
360 case TOK_free:
361 case TOK_realloc:
362 case TOK_memalign:
363 case TOK_calloc:
364 #endif
365 case TOK_memcpy:
366 case TOK_memmove:
367 case TOK_memset:
368 case TOK_strlen:
369 case TOK_strcpy:
370 case TOK_alloca:
371 strcpy(buf, "__bound_");
372 strcat(buf, name);
373 name = buf;
374 break;
377 #endif
378 t = sym->type.t;
379 if ((t & VT_BTYPE) == VT_FUNC) {
380 sym_type = STT_FUNC;
381 } else if ((t & VT_BTYPE) == VT_VOID) {
382 sym_type = STT_NOTYPE;
383 } else {
384 sym_type = STT_OBJECT;
386 if (t & VT_STATIC)
387 sym_bind = STB_LOCAL;
388 else
389 sym_bind = STB_GLOBAL;
390 other = 0;
391 #ifdef TCC_TARGET_PE
392 if (sym_type == STT_FUNC && sym->type.ref) {
393 Sym *ref = sym->type.ref;
394 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
395 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
396 name = buf1;
397 other |= ST_PE_STDCALL;
398 can_add_underscore = 0;
401 #endif
402 if (tcc_state->leading_underscore && can_add_underscore) {
403 buf1[0] = '_';
404 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
405 name = buf1;
407 if (sym->asm_label)
408 name = get_tok_str(sym->asm_label, NULL);
409 info = ELFW(ST_INFO)(sym_bind, sym_type);
410 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
411 } else {
412 esym = elfsym(sym);
413 esym->st_value = value;
414 esym->st_size = size;
415 esym->st_shndx = sh_num;
417 update_storage(sym);
420 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
421 addr_t value, unsigned long size)
423 int sh_num = section ? section->sh_num : SHN_UNDEF;
424 put_extern_sym2(sym, sh_num, value, size, 1);
427 /* add a new relocation entry to symbol 'sym' in section 's' */
428 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
429 addr_t addend)
431 int c = 0;
433 if (nocode_wanted && s == cur_text_section)
434 return;
436 if (sym) {
437 if (0 == sym->c)
438 put_extern_sym(sym, NULL, 0, 0);
439 c = sym->c;
442 /* now we can add ELF relocation info */
443 put_elf_reloca(symtab_section, s, offset, type, c, addend);
446 #if PTR_SIZE == 4
447 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
449 greloca(s, sym, offset, type, 0);
451 #endif
453 /* ------------------------------------------------------------------------- */
454 /* symbol allocator */
455 static Sym *__sym_malloc(void)
457 Sym *sym_pool, *sym, *last_sym;
458 int i;
460 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
461 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
463 last_sym = sym_free_first;
464 sym = sym_pool;
465 for(i = 0; i < SYM_POOL_NB; i++) {
466 sym->next = last_sym;
467 last_sym = sym;
468 sym++;
470 sym_free_first = last_sym;
471 return last_sym;
474 static inline Sym *sym_malloc(void)
476 Sym *sym;
477 #ifndef SYM_DEBUG
478 sym = sym_free_first;
479 if (!sym)
480 sym = __sym_malloc();
481 sym_free_first = sym->next;
482 return sym;
483 #else
484 sym = tcc_malloc(sizeof(Sym));
485 return sym;
486 #endif
489 ST_INLN void sym_free(Sym *sym)
491 #ifndef SYM_DEBUG
492 sym->next = sym_free_first;
493 sym_free_first = sym;
494 #else
495 tcc_free(sym);
496 #endif
499 /* push, without hashing */
500 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
502 Sym *s;
504 s = sym_malloc();
505 memset(s, 0, sizeof *s);
506 s->v = v;
507 s->type.t = t;
508 s->c = c;
509 /* add in stack */
510 s->prev = *ps;
511 *ps = s;
512 return s;
515 /* find a symbol and return its associated structure. 's' is the top
516 of the symbol stack */
517 ST_FUNC Sym *sym_find2(Sym *s, int v)
519 while (s) {
520 if (s->v == v)
521 return s;
522 else if (s->v == -1)
523 return NULL;
524 s = s->prev;
526 return NULL;
529 /* structure lookup */
530 ST_INLN Sym *struct_find(int v)
532 v -= TOK_IDENT;
533 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
534 return NULL;
535 return table_ident[v]->sym_struct;
538 /* find an identifier */
539 ST_INLN Sym *sym_find(int v)
541 v -= TOK_IDENT;
542 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
543 return NULL;
544 return table_ident[v]->sym_identifier;
547 /* push a given symbol on the symbol stack */
548 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
550 Sym *s, **ps;
551 TokenSym *ts;
553 if (local_stack)
554 ps = &local_stack;
555 else
556 ps = &global_stack;
557 s = sym_push2(ps, v, type->t, c);
558 s->type.ref = type->ref;
559 s->r = r;
560 /* don't record fields or anonymous symbols */
561 /* XXX: simplify */
562 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
563 /* record symbol in token array */
564 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
565 if (v & SYM_STRUCT)
566 ps = &ts->sym_struct;
567 else
568 ps = &ts->sym_identifier;
569 s->prev_tok = *ps;
570 *ps = s;
571 s->sym_scope = local_scope;
572 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
573 tcc_error("redeclaration of '%s'",
574 get_tok_str(v & ~SYM_STRUCT, NULL));
576 return s;
579 /* push a global identifier */
580 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
582 Sym *s, **ps;
583 s = sym_push2(&global_stack, v, t, c);
584 /* don't record anonymous symbol */
585 if (v < SYM_FIRST_ANOM) {
586 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
587 /* modify the top most local identifier, so that
588 sym_identifier will point to 's' when popped */
589 while (*ps != NULL && (*ps)->sym_scope)
590 ps = &(*ps)->prev_tok;
591 s->prev_tok = *ps;
592 *ps = s;
594 return s;
597 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
598 pop them yet from the list, but do remove them from the token array. */
599 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
601 Sym *s, *ss, **ps;
602 TokenSym *ts;
603 int v;
605 s = *ptop;
606 while(s != b) {
607 ss = s->prev;
608 v = s->v;
609 /* remove symbol in token array */
610 /* XXX: simplify */
611 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
612 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
613 if (v & SYM_STRUCT)
614 ps = &ts->sym_struct;
615 else
616 ps = &ts->sym_identifier;
617 *ps = s->prev_tok;
619 if (!keep)
620 sym_free(s);
621 s = ss;
623 if (!keep)
624 *ptop = b;
627 /* ------------------------------------------------------------------------- */
629 static void vsetc(CType *type, int r, CValue *vc)
631 int v;
633 if (vtop >= vstack + (VSTACK_SIZE - 1))
634 tcc_error("memory full (vstack)");
635 /* cannot let cpu flags if other instruction are generated. Also
636 avoid leaving VT_JMP anywhere except on the top of the stack
637 because it would complicate the code generator.
639 Don't do this when nocode_wanted. vtop might come from
640 !nocode_wanted regions (see 88_codeopt.c) and transforming
641 it to a register without actually generating code is wrong
642 as their value might still be used for real. All values
643 we push under nocode_wanted will eventually be popped
644 again, so that the VT_CMP/VT_JMP value will be in vtop
645 when code is unsuppressed again.
647 Same logic below in vswap(); */
648 if (vtop >= vstack && !nocode_wanted) {
649 v = vtop->r & VT_VALMASK;
650 if (v == VT_CMP || (v & ~1) == VT_JMP)
651 gv(RC_INT);
654 vtop++;
655 vtop->type = *type;
656 vtop->r = r;
657 vtop->r2 = VT_CONST;
658 vtop->c = *vc;
659 vtop->sym = NULL;
662 ST_FUNC void vswap(void)
664 SValue tmp;
665 /* cannot vswap cpu flags. See comment at vsetc() above */
666 if (vtop >= vstack && !nocode_wanted) {
667 int v = vtop->r & VT_VALMASK;
668 if (v == VT_CMP || (v & ~1) == VT_JMP)
669 gv(RC_INT);
671 tmp = vtop[0];
672 vtop[0] = vtop[-1];
673 vtop[-1] = tmp;
676 /* pop stack value */
677 ST_FUNC void vpop(void)
679 int v;
680 v = vtop->r & VT_VALMASK;
681 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
682 /* for x86, we need to pop the FP stack */
683 if (v == TREG_ST0) {
684 o(0xd8dd); /* fstp %st(0) */
685 } else
686 #endif
687 if (v == VT_JMP || v == VT_JMPI) {
688 /* need to put correct jump if && or || without test */
689 gsym(vtop->c.i);
691 vtop--;
694 /* push constant of type "type" with useless value */
695 ST_FUNC void vpush(CType *type)
697 vset(type, VT_CONST, 0);
700 /* push integer constant */
701 ST_FUNC void vpushi(int v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&int_type, VT_CONST, &cval);
708 /* push a pointer sized constant */
709 static void vpushs(addr_t v)
711 CValue cval;
712 cval.i = v;
713 vsetc(&size_type, VT_CONST, &cval);
716 /* push arbitrary 64bit constant */
717 ST_FUNC void vpush64(int ty, unsigned long long v)
719 CValue cval;
720 CType ctype;
721 ctype.t = ty;
722 ctype.ref = NULL;
723 cval.i = v;
724 vsetc(&ctype, VT_CONST, &cval);
727 /* push long long constant */
728 static inline void vpushll(long long v)
730 vpush64(VT_LLONG, v);
733 ST_FUNC void vset(CType *type, int r, int v)
735 CValue cval;
737 cval.i = v;
738 vsetc(type, r, &cval);
741 static void vseti(int r, int v)
743 CType type;
744 type.t = VT_INT;
745 type.ref = NULL;
746 vset(&type, r, v);
749 ST_FUNC void vpushv(SValue *v)
751 if (vtop >= vstack + (VSTACK_SIZE - 1))
752 tcc_error("memory full (vstack)");
753 vtop++;
754 *vtop = *v;
757 static void vdup(void)
759 vpushv(vtop);
762 /* rotate n first stack elements to the bottom
763 I1 ... In -> I2 ... In I1 [top is right]
765 ST_FUNC void vrotb(int n)
767 int i;
768 SValue tmp;
770 tmp = vtop[-n + 1];
771 for(i=-n+1;i!=0;i++)
772 vtop[i] = vtop[i+1];
773 vtop[0] = tmp;
776 /* rotate the n elements before entry e towards the top
777 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
779 ST_FUNC void vrote(SValue *e, int n)
781 int i;
782 SValue tmp;
784 tmp = *e;
785 for(i = 0;i < n - 1; i++)
786 e[-i] = e[-i - 1];
787 e[-n + 1] = tmp;
790 /* rotate n first stack elements to the top
791 I1 ... In -> In I1 ... I(n-1) [top is right]
793 ST_FUNC void vrott(int n)
795 vrote(vtop, n);
798 /* push a symbol value of TYPE */
799 static inline void vpushsym(CType *type, Sym *sym)
801 CValue cval;
802 cval.i = 0;
803 vsetc(type, VT_CONST | VT_SYM, &cval);
804 vtop->sym = sym;
807 /* Return a static symbol pointing to a section */
808 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
810 int v;
811 Sym *sym;
813 v = anon_sym++;
814 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
815 sym->type.ref = type->ref;
816 sym->r = VT_CONST | VT_SYM;
817 put_extern_sym(sym, sec, offset, size);
818 return sym;
821 /* push a reference to a section offset by adding a dummy symbol */
822 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
824 vpushsym(type, get_sym_ref(type, sec, offset, size));
827 /* define a new external reference to a symbol 'v' of type 'u' */
828 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
830 Sym *s;
832 s = sym_find(v);
833 if (!s) {
834 /* push forward reference */
835 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
836 s->type.ref = type->ref;
837 s->r = r | VT_CONST | VT_SYM;
838 } else if (IS_ASM_SYM(s)) {
839 s->type.t = type->t | (s->type.t & VT_EXTERN);
840 s->type.ref = type->ref;
841 update_storage(s);
843 return s;
846 /* Merge some type attributes. */
847 static void patch_type(Sym *sym, CType *type)
849 if (!(type->t & VT_EXTERN)) {
850 if (!(sym->type.t & VT_EXTERN))
851 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
852 sym->type.t &= ~VT_EXTERN;
855 if (IS_ASM_SYM(sym)) {
856 /* stay static if both are static */
857 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
858 sym->type.ref = type->ref;
861 if (!is_compatible_types(&sym->type, type)) {
862 tcc_error("incompatible types for redefinition of '%s'",
863 get_tok_str(sym->v, NULL));
865 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
866 int static_proto = sym->type.t & VT_STATIC;
867 /* warn if static follows non-static function declaration */
868 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
869 tcc_warning("static storage ignored for redefinition of '%s'",
870 get_tok_str(sym->v, NULL));
872 if (0 == (type->t & VT_EXTERN)) {
873 /* put complete type, use static from prototype */
874 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
875 if (type->t & VT_INLINE)
876 sym->type.t = type->t;
877 sym->type.ref = type->ref;
880 } else {
881 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
882 /* set array size if it was omitted in extern declaration */
883 if (sym->type.ref->c < 0)
884 sym->type.ref->c = type->ref->c;
885 else if (sym->type.ref->c != type->ref->c)
886 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
888 if ((type->t ^ sym->type.t) & VT_STATIC)
889 tcc_warning("storage mismatch for redefinition of '%s'",
890 get_tok_str(sym->v, NULL));
895 /* Merge some storage attributes. */
896 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
898 if (type)
899 patch_type(sym, type);
901 #ifdef TCC_TARGET_PE
902 if (sym->a.dllimport != ad->a.dllimport)
903 tcc_error("incompatible dll linkage for redefinition of '%s'",
904 get_tok_str(sym->v, NULL));
905 sym->a.dllexport |= ad->a.dllexport;
906 #endif
907 sym->a.weak |= ad->a.weak;
908 if (ad->a.visibility) {
909 int vis = sym->a.visibility;
910 int vis2 = ad->a.visibility;
911 if (vis == STV_DEFAULT)
912 vis = vis2;
913 else if (vis2 != STV_DEFAULT)
914 vis = (vis < vis2) ? vis : vis2;
915 sym->a.visibility = vis;
917 if (ad->a.aligned)
918 sym->a.aligned = ad->a.aligned;
919 if (ad->asm_label)
920 sym->asm_label = ad->asm_label;
921 update_storage(sym);
924 /* define a new external reference to a symbol 'v' */
925 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
927 Sym *s;
928 s = sym_find(v);
929 if (!s) {
930 /* push forward reference */
931 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
932 s->type.t |= VT_EXTERN;
933 s->a = ad->a;
934 s->sym_scope = 0;
935 } else {
936 if (s->type.ref == func_old_type.ref) {
937 s->type.ref = type->ref;
938 s->r = r | VT_CONST | VT_SYM;
939 s->type.t |= VT_EXTERN;
941 patch_storage(s, ad, type);
943 return s;
946 /* push a reference to global symbol v */
947 ST_FUNC void vpush_global_sym(CType *type, int v)
949 vpushsym(type, external_global_sym(v, type, 0));
952 /* save registers up to (vtop - n) stack entry */
953 ST_FUNC void save_regs(int n)
955 SValue *p, *p1;
956 for(p = vstack, p1 = vtop - n; p <= p1; p++)
957 save_reg(p->r);
960 /* save r to the memory stack, and mark it as being free */
961 ST_FUNC void save_reg(int r)
963 save_reg_upstack(r, 0);
966 /* save r to the memory stack, and mark it as being free,
967 if seen up to (vtop - n) stack entry */
968 ST_FUNC void save_reg_upstack(int r, int n)
970 int l, saved, size, align;
971 SValue *p, *p1, sv;
972 CType *type;
974 if ((r &= VT_VALMASK) >= VT_CONST)
975 return;
976 if (nocode_wanted)
977 return;
979 /* modify all stack values */
980 saved = 0;
981 l = 0;
982 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
983 if ((p->r & VT_VALMASK) == r ||
984 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
985 /* must save value on stack if not already done */
986 if (!saved) {
987 /* NOTE: must reload 'r' because r might be equal to r2 */
988 r = p->r & VT_VALMASK;
989 /* store register in the stack */
990 type = &p->type;
991 if ((p->r & VT_LVAL) ||
992 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
993 #if PTR_SIZE == 8
994 type = &char_pointer_type;
995 #else
996 type = &int_type;
997 #endif
998 size = type_size(type, &align);
999 loc = (loc - size) & -align;
1000 sv.type.t = type->t;
1001 sv.r = VT_LOCAL | VT_LVAL;
1002 sv.c.i = loc;
1003 store(r, &sv);
1004 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1005 /* x86 specific: need to pop fp register ST0 if saved */
1006 if (r == TREG_ST0) {
1007 o(0xd8dd); /* fstp %st(0) */
1009 #endif
1010 #if PTR_SIZE == 4
1011 /* special long long case */
1012 if ((type->t & VT_BTYPE) == VT_LLONG) {
1013 sv.c.i += 4;
1014 store(p->r2, &sv);
1016 #endif
1017 l = loc;
1018 saved = 1;
1020 /* mark that stack entry as being saved on the stack */
1021 if (p->r & VT_LVAL) {
1022 /* also clear the bounded flag because the
1023 relocation address of the function was stored in
1024 p->c.i */
1025 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1026 } else {
1027 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1029 p->r2 = VT_CONST;
1030 p->c.i = l;
1035 #ifdef TCC_TARGET_ARM
1036 /* find a register of class 'rc2' with at most one reference on stack.
1037 * If none, call get_reg(rc) */
1038 ST_FUNC int get_reg_ex(int rc, int rc2)
1040 int r;
1041 SValue *p;
1043 for(r=0;r<NB_REGS;r++) {
1044 if (reg_classes[r] & rc2) {
1045 int n;
1046 n=0;
1047 for(p = vstack; p <= vtop; p++) {
1048 if ((p->r & VT_VALMASK) == r ||
1049 (p->r2 & VT_VALMASK) == r)
1050 n++;
1052 if (n <= 1)
1053 return r;
1056 return get_reg(rc);
1058 #endif
1060 /* find a free register of class 'rc'. If none, save one register */
1061 ST_FUNC int get_reg(int rc)
1063 int r;
1064 SValue *p;
1066 /* find a free register */
1067 for(r=0;r<NB_REGS;r++) {
1068 if (reg_classes[r] & rc) {
1069 if (nocode_wanted)
1070 return r;
1071 for(p=vstack;p<=vtop;p++) {
1072 if ((p->r & VT_VALMASK) == r ||
1073 (p->r2 & VT_VALMASK) == r)
1074 goto notfound;
1076 return r;
1078 notfound: ;
1081 /* no register left : free the first one on the stack (VERY
1082 IMPORTANT to start from the bottom to ensure that we don't
1083 spill registers used in gen_opi()) */
1084 for(p=vstack;p<=vtop;p++) {
1085 /* look at second register (if long long) */
1086 r = p->r2 & VT_VALMASK;
1087 if (r < VT_CONST && (reg_classes[r] & rc))
1088 goto save_found;
1089 r = p->r & VT_VALMASK;
1090 if (r < VT_CONST && (reg_classes[r] & rc)) {
1091 save_found:
1092 save_reg(r);
1093 return r;
1096 /* Should never comes here */
1097 return -1;
1100 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1101 if needed */
1102 static void move_reg(int r, int s, int t)
1104 SValue sv;
1106 if (r != s) {
1107 save_reg(r);
1108 sv.type.t = t;
1109 sv.type.ref = NULL;
1110 sv.r = s;
1111 sv.c.i = 0;
1112 load(r, &sv);
1116 /* get address of vtop (vtop MUST BE an lvalue) */
1117 ST_FUNC void gaddrof(void)
1119 vtop->r &= ~VT_LVAL;
1120 /* tricky: if saved lvalue, then we can go back to lvalue */
1121 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1122 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1127 #ifdef CONFIG_TCC_BCHECK
1128 /* generate lvalue bound code */
1129 static void gbound(void)
1131 int lval_type;
1132 CType type1;
1134 vtop->r &= ~VT_MUSTBOUND;
1135 /* if lvalue, then use checking code before dereferencing */
1136 if (vtop->r & VT_LVAL) {
1137 /* if not VT_BOUNDED value, then make one */
1138 if (!(vtop->r & VT_BOUNDED)) {
1139 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1140 /* must save type because we must set it to int to get pointer */
1141 type1 = vtop->type;
1142 vtop->type.t = VT_PTR;
1143 gaddrof();
1144 vpushi(0);
1145 gen_bounded_ptr_add();
1146 vtop->r |= lval_type;
1147 vtop->type = type1;
1149 /* then check for dereferencing */
1150 gen_bounded_ptr_deref();
1153 #endif
1155 static void incr_bf_adr(int o)
1157 vtop->type = char_pointer_type;
1158 gaddrof();
1159 vpushi(o);
1160 gen_op('+');
1161 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1162 | (VT_BYTE|VT_UNSIGNED);
1163 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1164 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1167 /* single-byte load mode for packed or otherwise unaligned bitfields */
1168 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1170 int n, o, bits;
1171 save_reg_upstack(vtop->r, 1);
1172 vpush64(type->t & VT_BTYPE, 0); // B X
1173 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1174 do {
1175 vswap(); // X B
1176 incr_bf_adr(o);
1177 vdup(); // X B B
1178 n = 8 - bit_pos;
1179 if (n > bit_size)
1180 n = bit_size;
1181 if (bit_pos)
1182 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1183 if (n < 8)
1184 vpushi((1 << n) - 1), gen_op('&');
1185 gen_cast(type);
1186 if (bits)
1187 vpushi(bits), gen_op(TOK_SHL);
1188 vrotb(3); // B Y X
1189 gen_op('|'); // B X
1190 bits += n, bit_size -= n, o = 1;
1191 } while (bit_size);
1192 vswap(), vpop();
1193 if (!(type->t & VT_UNSIGNED)) {
1194 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1195 vpushi(n), gen_op(TOK_SHL);
1196 vpushi(n), gen_op(TOK_SAR);
1200 /* single-byte store mode for packed or otherwise unaligned bitfields */
1201 static void store_packed_bf(int bit_pos, int bit_size)
1203 int bits, n, o, m, c;
1205 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1206 vswap(); // X B
1207 save_reg_upstack(vtop->r, 1);
1208 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1209 do {
1210 incr_bf_adr(o); // X B
1211 vswap(); //B X
1212 c ? vdup() : gv_dup(); // B V X
1213 vrott(3); // X B V
1214 if (bits)
1215 vpushi(bits), gen_op(TOK_SHR);
1216 if (bit_pos)
1217 vpushi(bit_pos), gen_op(TOK_SHL);
1218 n = 8 - bit_pos;
1219 if (n > bit_size)
1220 n = bit_size;
1221 if (n < 8) {
1222 m = ((1 << n) - 1) << bit_pos;
1223 vpushi(m), gen_op('&'); // X B V1
1224 vpushv(vtop-1); // X B V1 B
1225 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1226 gen_op('&'); // X B V1 B1
1227 gen_op('|'); // X B V2
1229 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1230 vstore(), vpop(); // X B
1231 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1232 } while (bit_size);
1233 vpop(), vpop();
1236 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1238 int t;
1239 if (0 == sv->type.ref)
1240 return 0;
1241 t = sv->type.ref->auxtype;
1242 if (t != -1 && t != VT_STRUCT) {
1243 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1244 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1246 return t;
1249 /* store vtop a register belonging to class 'rc'. lvalues are
1250 converted to values. Cannot be used if cannot be converted to
1251 register value (such as structures). */
1252 ST_FUNC int gv(int rc)
1254 int r, bit_pos, bit_size, size, align, rc2;
1256 /* NOTE: get_reg can modify vstack[] */
1257 if (vtop->type.t & VT_BITFIELD) {
1258 CType type;
1260 bit_pos = BIT_POS(vtop->type.t);
1261 bit_size = BIT_SIZE(vtop->type.t);
1262 /* remove bit field info to avoid loops */
1263 vtop->type.t &= ~VT_STRUCT_MASK;
1265 type.ref = NULL;
1266 type.t = vtop->type.t & VT_UNSIGNED;
1267 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1268 type.t |= VT_UNSIGNED;
1270 r = adjust_bf(vtop, bit_pos, bit_size);
1272 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1273 type.t |= VT_LLONG;
1274 else
1275 type.t |= VT_INT;
1277 if (r == VT_STRUCT) {
1278 load_packed_bf(&type, bit_pos, bit_size);
1279 } else {
1280 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1281 /* cast to int to propagate signedness in following ops */
1282 gen_cast(&type);
1283 /* generate shifts */
1284 vpushi(bits - (bit_pos + bit_size));
1285 gen_op(TOK_SHL);
1286 vpushi(bits - bit_size);
1287 /* NOTE: transformed to SHR if unsigned */
1288 gen_op(TOK_SAR);
1290 r = gv(rc);
1291 } else {
1292 if (is_float(vtop->type.t) &&
1293 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1294 unsigned long offset;
1295 /* CPUs usually cannot use float constants, so we store them
1296 generically in data segment */
1297 size = type_size(&vtop->type, &align);
1298 if (NODATA_WANTED)
1299 size = 0, align = 1;
1300 offset = section_add(data_section, size, align);
1301 vpush_ref(&vtop->type, data_section, offset, size);
1302 vswap();
1303 init_putv(&vtop->type, data_section, offset);
1304 vtop->r |= VT_LVAL;
1306 #ifdef CONFIG_TCC_BCHECK
1307 if (vtop->r & VT_MUSTBOUND)
1308 gbound();
1309 #endif
1311 r = vtop->r & VT_VALMASK;
1312 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1313 #ifndef TCC_TARGET_ARM64
1314 if (rc == RC_IRET)
1315 rc2 = RC_LRET;
1316 #ifdef TCC_TARGET_X86_64
1317 else if (rc == RC_FRET)
1318 rc2 = RC_QRET;
1319 #endif
1320 #endif
1321 /* need to reload if:
1322 - constant
1323 - lvalue (need to dereference pointer)
1324 - already a register, but not in the right class */
1325 if (r >= VT_CONST
1326 || (vtop->r & VT_LVAL)
1327 || !(reg_classes[r] & rc)
1328 #if PTR_SIZE == 8
1329 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1330 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1331 #else
1332 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1333 #endif
1336 r = get_reg(rc);
1337 #if PTR_SIZE == 8
1338 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1339 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1340 #else
1341 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1342 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1343 unsigned long long ll;
1344 #endif
1345 int r2, original_type;
1346 original_type = vtop->type.t;
1347 /* two register type load : expand to two words
1348 temporarily */
1349 #if PTR_SIZE == 4
1350 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1351 /* load constant */
1352 ll = vtop->c.i;
1353 vtop->c.i = ll; /* first word */
1354 load(r, vtop);
1355 vtop->r = r; /* save register value */
1356 vpushi(ll >> 32); /* second word */
1357 } else
1358 #endif
1359 if (vtop->r & VT_LVAL) {
1360 /* We do not want to modifier the long long
1361 pointer here, so the safest (and less
1362 efficient) is to save all the other registers
1363 in the stack. XXX: totally inefficient. */
1364 #if 0
1365 save_regs(1);
1366 #else
1367 /* lvalue_save: save only if used further down the stack */
1368 save_reg_upstack(vtop->r, 1);
1369 #endif
1370 /* load from memory */
1371 vtop->type.t = load_type;
1372 load(r, vtop);
1373 vdup();
1374 vtop[-1].r = r; /* save register value */
1375 /* increment pointer to get second word */
1376 vtop->type.t = addr_type;
1377 gaddrof();
1378 vpushi(load_size);
1379 gen_op('+');
1380 vtop->r |= VT_LVAL;
1381 vtop->type.t = load_type;
1382 } else {
1383 /* move registers */
1384 load(r, vtop);
1385 vdup();
1386 vtop[-1].r = r; /* save register value */
1387 vtop->r = vtop[-1].r2;
1389 /* Allocate second register. Here we rely on the fact that
1390 get_reg() tries first to free r2 of an SValue. */
1391 r2 = get_reg(rc2);
1392 load(r2, vtop);
1393 vpop();
1394 /* write second register */
1395 vtop->r2 = r2;
1396 vtop->type.t = original_type;
1397 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1398 int t1, t;
1399 /* lvalue of scalar type : need to use lvalue type
1400 because of possible cast */
1401 t = vtop->type.t;
1402 t1 = t;
1403 /* compute memory access type */
1404 if (vtop->r & VT_LVAL_BYTE)
1405 t = VT_BYTE;
1406 else if (vtop->r & VT_LVAL_SHORT)
1407 t = VT_SHORT;
1408 if (vtop->r & VT_LVAL_UNSIGNED)
1409 t |= VT_UNSIGNED;
1410 vtop->type.t = t;
1411 load(r, vtop);
1412 /* restore wanted type */
1413 vtop->type.t = t1;
1414 } else {
1415 /* one register type load */
1416 load(r, vtop);
1419 vtop->r = r;
1420 #ifdef TCC_TARGET_C67
1421 /* uses register pairs for doubles */
1422 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1423 vtop->r2 = r+1;
1424 #endif
1426 return r;
1429 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1430 ST_FUNC void gv2(int rc1, int rc2)
1432 int v;
1434 /* generate more generic register first. But VT_JMP or VT_CMP
1435 values must be generated first in all cases to avoid possible
1436 reload errors */
1437 v = vtop[0].r & VT_VALMASK;
1438 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1439 vswap();
1440 gv(rc1);
1441 vswap();
1442 gv(rc2);
1443 /* test if reload is needed for first register */
1444 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1445 vswap();
1446 gv(rc1);
1447 vswap();
1449 } else {
1450 gv(rc2);
1451 vswap();
1452 gv(rc1);
1453 vswap();
1454 /* test if reload is needed for first register */
1455 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1456 gv(rc2);
1461 #ifndef TCC_TARGET_ARM64
1462 /* wrapper around RC_FRET to return a register by type */
1463 static int rc_fret(int t)
1465 #ifdef TCC_TARGET_X86_64
1466 if (t == VT_LDOUBLE) {
1467 return RC_ST0;
1469 #endif
1470 return RC_FRET;
1472 #endif
1474 /* wrapper around REG_FRET to return a register by type */
1475 static int reg_fret(int t)
1477 #ifdef TCC_TARGET_X86_64
1478 if (t == VT_LDOUBLE) {
1479 return TREG_ST0;
1481 #endif
1482 return REG_FRET;
1485 #if PTR_SIZE == 4
1486 /* expand 64bit on stack in two ints */
1487 static void lexpand(void)
1489 int u, v;
1490 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1491 v = vtop->r & (VT_VALMASK | VT_LVAL);
1492 if (v == VT_CONST) {
1493 vdup();
1494 vtop[0].c.i >>= 32;
1495 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1496 vdup();
1497 vtop[0].c.i += 4;
1498 } else {
1499 gv(RC_INT);
1500 vdup();
1501 vtop[0].r = vtop[-1].r2;
1502 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1504 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1506 #endif
1508 #ifdef TCC_TARGET_ARM
1509 /* expand long long on stack */
1510 ST_FUNC void lexpand_nr(void)
1512 int u,v;
1514 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1515 vdup();
1516 vtop->r2 = VT_CONST;
1517 vtop->type.t = VT_INT | u;
1518 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1519 if (v == VT_CONST) {
1520 vtop[-1].c.i = vtop->c.i;
1521 vtop->c.i = vtop->c.i >> 32;
1522 vtop->r = VT_CONST;
1523 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1524 vtop->c.i += 4;
1525 vtop->r = vtop[-1].r;
1526 } else if (v > VT_CONST) {
1527 vtop--;
1528 lexpand();
1529 } else
1530 vtop->r = vtop[-1].r2;
1531 vtop[-1].r2 = VT_CONST;
1532 vtop[-1].type.t = VT_INT | u;
1534 #endif
1536 #if PTR_SIZE == 4
1537 /* build a long long from two ints */
1538 static void lbuild(int t)
1540 gv2(RC_INT, RC_INT);
1541 vtop[-1].r2 = vtop[0].r;
1542 vtop[-1].type.t = t;
1543 vpop();
1545 #endif
1547 /* convert stack entry to register and duplicate its value in another
1548 register */
1549 static void gv_dup(void)
1551 int rc, t, r, r1;
1552 SValue sv;
1554 t = vtop->type.t;
1555 #if PTR_SIZE == 4
1556 if ((t & VT_BTYPE) == VT_LLONG) {
1557 if (t & VT_BITFIELD) {
1558 gv(RC_INT);
1559 t = vtop->type.t;
1561 lexpand();
1562 gv_dup();
1563 vswap();
1564 vrotb(3);
1565 gv_dup();
1566 vrotb(4);
1567 /* stack: H L L1 H1 */
1568 lbuild(t);
1569 vrotb(3);
1570 vrotb(3);
1571 vswap();
1572 lbuild(t);
1573 vswap();
1574 } else
1575 #endif
1577 /* duplicate value */
1578 rc = RC_INT;
1579 sv.type.t = VT_INT;
1580 if (is_float(t)) {
1581 rc = RC_FLOAT;
1582 #ifdef TCC_TARGET_X86_64
1583 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1584 rc = RC_ST0;
1586 #endif
1587 sv.type.t = t;
1589 r = gv(rc);
1590 r1 = get_reg(rc);
1591 sv.r = r;
1592 sv.c.i = 0;
1593 load(r1, &sv); /* move r to r1 */
1594 vdup();
1595 /* duplicates value */
1596 if (r != r1)
1597 vtop->r = r1;
1601 /* Generate value test
1603 * Generate a test for any value (jump, comparison and integers) */
1604 ST_FUNC int gvtst(int inv, int t)
1606 int v = vtop->r & VT_VALMASK;
1607 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1608 vpushi(0);
1609 gen_op(TOK_NE);
1611 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1612 /* constant jmp optimization */
1613 if ((vtop->c.i != 0) != inv)
1614 t = gjmp(t);
1615 vtop--;
1616 return t;
1618 return gtst(inv, t);
1621 #if PTR_SIZE == 4
1622 /* generate CPU independent (unsigned) long long operations */
1623 static void gen_opl(int op)
1625 int t, a, b, op1, c, i;
1626 int func;
1627 unsigned short reg_iret = REG_IRET;
1628 unsigned short reg_lret = REG_LRET;
1629 SValue tmp;
1631 switch(op) {
1632 case '/':
1633 case TOK_PDIV:
1634 func = TOK___divdi3;
1635 goto gen_func;
1636 case TOK_UDIV:
1637 func = TOK___udivdi3;
1638 goto gen_func;
1639 case '%':
1640 func = TOK___moddi3;
1641 goto gen_mod_func;
1642 case TOK_UMOD:
1643 func = TOK___umoddi3;
1644 gen_mod_func:
1645 #ifdef TCC_ARM_EABI
1646 reg_iret = TREG_R2;
1647 reg_lret = TREG_R3;
1648 #endif
1649 gen_func:
1650 /* call generic long long function */
1651 vpush_global_sym(&func_old_type, func);
1652 vrott(3);
1653 gfunc_call(2);
1654 vpushi(0);
1655 vtop->r = reg_iret;
1656 vtop->r2 = reg_lret;
1657 break;
1658 case '^':
1659 case '&':
1660 case '|':
1661 case '*':
1662 case '+':
1663 case '-':
1664 //pv("gen_opl A",0,2);
1665 t = vtop->type.t;
1666 vswap();
1667 lexpand();
1668 vrotb(3);
1669 lexpand();
1670 /* stack: L1 H1 L2 H2 */
1671 tmp = vtop[0];
1672 vtop[0] = vtop[-3];
1673 vtop[-3] = tmp;
1674 tmp = vtop[-2];
1675 vtop[-2] = vtop[-3];
1676 vtop[-3] = tmp;
1677 vswap();
1678 /* stack: H1 H2 L1 L2 */
1679 //pv("gen_opl B",0,4);
1680 if (op == '*') {
1681 vpushv(vtop - 1);
1682 vpushv(vtop - 1);
1683 gen_op(TOK_UMULL);
1684 lexpand();
1685 /* stack: H1 H2 L1 L2 ML MH */
1686 for(i=0;i<4;i++)
1687 vrotb(6);
1688 /* stack: ML MH H1 H2 L1 L2 */
1689 tmp = vtop[0];
1690 vtop[0] = vtop[-2];
1691 vtop[-2] = tmp;
1692 /* stack: ML MH H1 L2 H2 L1 */
1693 gen_op('*');
1694 vrotb(3);
1695 vrotb(3);
1696 gen_op('*');
1697 /* stack: ML MH M1 M2 */
1698 gen_op('+');
1699 gen_op('+');
1700 } else if (op == '+' || op == '-') {
1701 /* XXX: add non carry method too (for MIPS or alpha) */
1702 if (op == '+')
1703 op1 = TOK_ADDC1;
1704 else
1705 op1 = TOK_SUBC1;
1706 gen_op(op1);
1707 /* stack: H1 H2 (L1 op L2) */
1708 vrotb(3);
1709 vrotb(3);
1710 gen_op(op1 + 1); /* TOK_xxxC2 */
1711 } else {
1712 gen_op(op);
1713 /* stack: H1 H2 (L1 op L2) */
1714 vrotb(3);
1715 vrotb(3);
1716 /* stack: (L1 op L2) H1 H2 */
1717 gen_op(op);
1718 /* stack: (L1 op L2) (H1 op H2) */
1720 /* stack: L H */
1721 lbuild(t);
1722 break;
1723 case TOK_SAR:
1724 case TOK_SHR:
1725 case TOK_SHL:
1726 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1727 t = vtop[-1].type.t;
1728 vswap();
1729 lexpand();
1730 vrotb(3);
1731 /* stack: L H shift */
1732 c = (int)vtop->c.i;
1733 /* constant: simpler */
1734 /* NOTE: all comments are for SHL. the other cases are
1735 done by swapping words */
1736 vpop();
1737 if (op != TOK_SHL)
1738 vswap();
1739 if (c >= 32) {
1740 /* stack: L H */
1741 vpop();
1742 if (c > 32) {
1743 vpushi(c - 32);
1744 gen_op(op);
1746 if (op != TOK_SAR) {
1747 vpushi(0);
1748 } else {
1749 gv_dup();
1750 vpushi(31);
1751 gen_op(TOK_SAR);
1753 vswap();
1754 } else {
1755 vswap();
1756 gv_dup();
1757 /* stack: H L L */
1758 vpushi(c);
1759 gen_op(op);
1760 vswap();
1761 vpushi(32 - c);
1762 if (op == TOK_SHL)
1763 gen_op(TOK_SHR);
1764 else
1765 gen_op(TOK_SHL);
1766 vrotb(3);
1767 /* stack: L L H */
1768 vpushi(c);
1769 if (op == TOK_SHL)
1770 gen_op(TOK_SHL);
1771 else
1772 gen_op(TOK_SHR);
1773 gen_op('|');
1775 if (op != TOK_SHL)
1776 vswap();
1777 lbuild(t);
1778 } else {
1779 /* XXX: should provide a faster fallback on x86 ? */
1780 switch(op) {
1781 case TOK_SAR:
1782 func = TOK___ashrdi3;
1783 goto gen_func;
1784 case TOK_SHR:
1785 func = TOK___lshrdi3;
1786 goto gen_func;
1787 case TOK_SHL:
1788 func = TOK___ashldi3;
1789 goto gen_func;
1792 break;
1793 default:
1794 /* compare operations */
1795 t = vtop->type.t;
1796 vswap();
1797 lexpand();
1798 vrotb(3);
1799 lexpand();
1800 /* stack: L1 H1 L2 H2 */
1801 tmp = vtop[-1];
1802 vtop[-1] = vtop[-2];
1803 vtop[-2] = tmp;
1804 /* stack: L1 L2 H1 H2 */
1805 /* compare high */
1806 op1 = op;
1807 /* when values are equal, we need to compare low words. since
1808 the jump is inverted, we invert the test too. */
1809 if (op1 == TOK_LT)
1810 op1 = TOK_LE;
1811 else if (op1 == TOK_GT)
1812 op1 = TOK_GE;
1813 else if (op1 == TOK_ULT)
1814 op1 = TOK_ULE;
1815 else if (op1 == TOK_UGT)
1816 op1 = TOK_UGE;
1817 a = 0;
1818 b = 0;
1819 gen_op(op1);
1820 if (op == TOK_NE) {
1821 b = gvtst(0, 0);
1822 } else {
1823 a = gvtst(1, 0);
1824 if (op != TOK_EQ) {
1825 /* generate non equal test */
1826 vpushi(TOK_NE);
1827 vtop->r = VT_CMP;
1828 b = gvtst(0, 0);
1831 /* compare low. Always unsigned */
1832 op1 = op;
1833 if (op1 == TOK_LT)
1834 op1 = TOK_ULT;
1835 else if (op1 == TOK_LE)
1836 op1 = TOK_ULE;
1837 else if (op1 == TOK_GT)
1838 op1 = TOK_UGT;
1839 else if (op1 == TOK_GE)
1840 op1 = TOK_UGE;
1841 gen_op(op1);
1842 a = gvtst(1, a);
1843 gsym(b);
1844 vseti(VT_JMPI, a);
1845 break;
1848 #endif
1850 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1852 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1853 return (a ^ b) >> 63 ? -x : x;
1856 static int gen_opic_lt(uint64_t a, uint64_t b)
1858 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1861 /* handle integer constant optimizations and various machine
1862 independent opt */
1863 static void gen_opic(int op)
1865 SValue *v1 = vtop - 1;
1866 SValue *v2 = vtop;
1867 int t1 = v1->type.t & VT_BTYPE;
1868 int t2 = v2->type.t & VT_BTYPE;
1869 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1870 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1871 uint64_t l1 = c1 ? v1->c.i : 0;
1872 uint64_t l2 = c2 ? v2->c.i : 0;
1873 int shm = (t1 == VT_LLONG) ? 63 : 31;
1875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1876 l1 = ((uint32_t)l1 |
1877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1878 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1879 l2 = ((uint32_t)l2 |
1880 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1882 if (c1 && c2) {
1883 switch(op) {
1884 case '+': l1 += l2; break;
1885 case '-': l1 -= l2; break;
1886 case '&': l1 &= l2; break;
1887 case '^': l1 ^= l2; break;
1888 case '|': l1 |= l2; break;
1889 case '*': l1 *= l2; break;
1891 case TOK_PDIV:
1892 case '/':
1893 case '%':
1894 case TOK_UDIV:
1895 case TOK_UMOD:
1896 /* if division by zero, generate explicit division */
1897 if (l2 == 0) {
1898 if (const_wanted)
1899 tcc_error("division by zero in constant");
1900 goto general_case;
1902 switch(op) {
1903 default: l1 = gen_opic_sdiv(l1, l2); break;
1904 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1905 case TOK_UDIV: l1 = l1 / l2; break;
1906 case TOK_UMOD: l1 = l1 % l2; break;
1908 break;
1909 case TOK_SHL: l1 <<= (l2 & shm); break;
1910 case TOK_SHR: l1 >>= (l2 & shm); break;
1911 case TOK_SAR:
1912 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1913 break;
1914 /* tests */
1915 case TOK_ULT: l1 = l1 < l2; break;
1916 case TOK_UGE: l1 = l1 >= l2; break;
1917 case TOK_EQ: l1 = l1 == l2; break;
1918 case TOK_NE: l1 = l1 != l2; break;
1919 case TOK_ULE: l1 = l1 <= l2; break;
1920 case TOK_UGT: l1 = l1 > l2; break;
1921 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1922 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1923 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1924 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1925 /* logical */
1926 case TOK_LAND: l1 = l1 && l2; break;
1927 case TOK_LOR: l1 = l1 || l2; break;
1928 default:
1929 goto general_case;
1931 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1932 l1 = ((uint32_t)l1 |
1933 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1934 v1->c.i = l1;
1935 vtop--;
1936 } else {
1937 /* if commutative ops, put c2 as constant */
1938 if (c1 && (op == '+' || op == '&' || op == '^' ||
1939 op == '|' || op == '*')) {
1940 vswap();
1941 c2 = c1; //c = c1, c1 = c2, c2 = c;
1942 l2 = l1; //l = l1, l1 = l2, l2 = l;
1944 if (!const_wanted &&
1945 c1 && ((l1 == 0 &&
1946 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1947 (l1 == -1 && op == TOK_SAR))) {
1948 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1949 vtop--;
1950 } else if (!const_wanted &&
1951 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1952 (op == '|' &&
1953 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
1954 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1955 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1956 if (l2 == 1)
1957 vtop->c.i = 0;
1958 vswap();
1959 vtop--;
1960 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1961 op == TOK_PDIV) &&
1962 l2 == 1) ||
1963 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1964 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1965 l2 == 0) ||
1966 (op == '&' &&
1967 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
1968 /* filter out NOP operations like x*1, x-0, x&-1... */
1969 vtop--;
1970 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1971 /* try to use shifts instead of muls or divs */
1972 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1973 int n = -1;
1974 while (l2) {
1975 l2 >>= 1;
1976 n++;
1978 vtop->c.i = n;
1979 if (op == '*')
1980 op = TOK_SHL;
1981 else if (op == TOK_PDIV)
1982 op = TOK_SAR;
1983 else
1984 op = TOK_SHR;
1986 goto general_case;
1987 } else if (c2 && (op == '+' || op == '-') &&
1988 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1989 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1990 /* symbol + constant case */
1991 if (op == '-')
1992 l2 = -l2;
1993 l2 += vtop[-1].c.i;
1994 /* The backends can't always deal with addends to symbols
1995 larger than +-1<<31. Don't construct such. */
1996 if ((int)l2 != l2)
1997 goto general_case;
1998 vtop--;
1999 vtop->c.i = l2;
2000 } else {
2001 general_case:
2002 /* call low level op generator */
2003 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2004 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2005 gen_opl(op);
2006 else
2007 gen_opi(op);
2012 /* generate a floating point operation with constant propagation */
2013 static void gen_opif(int op)
2015 int c1, c2;
2016 SValue *v1, *v2;
2017 #if defined _MSC_VER && defined _AMD64_
2018 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2019 volatile
2020 #endif
2021 long double f1, f2;
2023 v1 = vtop - 1;
2024 v2 = vtop;
2025 /* currently, we cannot do computations with forward symbols */
2026 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2027 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2028 if (c1 && c2) {
2029 if (v1->type.t == VT_FLOAT) {
2030 f1 = v1->c.f;
2031 f2 = v2->c.f;
2032 } else if (v1->type.t == VT_DOUBLE) {
2033 f1 = v1->c.d;
2034 f2 = v2->c.d;
2035 } else {
2036 f1 = v1->c.ld;
2037 f2 = v2->c.ld;
2040 /* NOTE: we only do constant propagation if finite number (not
2041 NaN or infinity) (ANSI spec) */
2042 if (!ieee_finite(f1) || !ieee_finite(f2))
2043 goto general_case;
2045 switch(op) {
2046 case '+': f1 += f2; break;
2047 case '-': f1 -= f2; break;
2048 case '*': f1 *= f2; break;
2049 case '/':
2050 if (f2 == 0.0) {
2051 /* If not in initializer we need to potentially generate
2052 FP exceptions at runtime, otherwise we want to fold. */
2053 if (!const_wanted)
2054 goto general_case;
2056 f1 /= f2;
2057 break;
2058 /* XXX: also handles tests ? */
2059 default:
2060 goto general_case;
2062 /* XXX: overflow test ? */
2063 if (v1->type.t == VT_FLOAT) {
2064 v1->c.f = f1;
2065 } else if (v1->type.t == VT_DOUBLE) {
2066 v1->c.d = f1;
2067 } else {
2068 v1->c.ld = f1;
2070 vtop--;
2071 } else {
2072 general_case:
2073 gen_opf(op);
2077 static int pointed_size(CType *type)
2079 int align;
2080 return type_size(pointed_type(type), &align);
2083 static void vla_runtime_pointed_size(CType *type)
2085 int align;
2086 vla_runtime_type_size(pointed_type(type), &align);
2089 static inline int is_null_pointer(SValue *p)
2091 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2092 return 0;
2093 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2094 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2095 ((p->type.t & VT_BTYPE) == VT_PTR &&
2096 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
2099 static inline int is_integer_btype(int bt)
2101 return (bt == VT_BYTE || bt == VT_SHORT ||
2102 bt == VT_INT || bt == VT_LLONG);
2105 /* check types for comparison or subtraction of pointers */
2106 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2108 CType *type1, *type2, tmp_type1, tmp_type2;
2109 int bt1, bt2;
2111 /* null pointers are accepted for all comparisons as gcc */
2112 if (is_null_pointer(p1) || is_null_pointer(p2))
2113 return;
2114 type1 = &p1->type;
2115 type2 = &p2->type;
2116 bt1 = type1->t & VT_BTYPE;
2117 bt2 = type2->t & VT_BTYPE;
2118 /* accept comparison between pointer and integer with a warning */
2119 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2120 if (op != TOK_LOR && op != TOK_LAND )
2121 tcc_warning("comparison between pointer and integer");
2122 return;
2125 /* both must be pointers or implicit function pointers */
2126 if (bt1 == VT_PTR) {
2127 type1 = pointed_type(type1);
2128 } else if (bt1 != VT_FUNC)
2129 goto invalid_operands;
2131 if (bt2 == VT_PTR) {
2132 type2 = pointed_type(type2);
2133 } else if (bt2 != VT_FUNC) {
2134 invalid_operands:
2135 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2137 if ((type1->t & VT_BTYPE) == VT_VOID ||
2138 (type2->t & VT_BTYPE) == VT_VOID)
2139 return;
2140 tmp_type1 = *type1;
2141 tmp_type2 = *type2;
2142 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2143 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2144 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2145 /* gcc-like error if '-' is used */
2146 if (op == '-')
2147 goto invalid_operands;
2148 else
2149 tcc_warning("comparison of distinct pointer types lacks a cast");
2153 /* generic gen_op: handles types problems */
2154 ST_FUNC void gen_op(int op)
2156 int u, t1, t2, bt1, bt2, t;
2157 CType type1;
2159 redo:
2160 t1 = vtop[-1].type.t;
2161 t2 = vtop[0].type.t;
2162 bt1 = t1 & VT_BTYPE;
2163 bt2 = t2 & VT_BTYPE;
2165 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2166 tcc_error("operation on a struct");
2167 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2168 if (bt2 == VT_FUNC) {
2169 mk_pointer(&vtop->type);
2170 gaddrof();
2172 if (bt1 == VT_FUNC) {
2173 vswap();
2174 mk_pointer(&vtop->type);
2175 gaddrof();
2176 vswap();
2178 goto redo;
2179 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2180 /* at least one operand is a pointer */
2181 /* relational op: must be both pointers */
2182 if (op >= TOK_ULT && op <= TOK_LOR) {
2183 check_comparison_pointer_types(vtop - 1, vtop, op);
2184 /* pointers are handled are unsigned */
2185 #if PTR_SIZE == 8
2186 t = VT_LLONG | VT_UNSIGNED;
2187 #else
2188 t = VT_INT | VT_UNSIGNED;
2189 #endif
2190 goto std_op;
2192 /* if both pointers, then it must be the '-' op */
2193 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2194 if (op != '-')
2195 tcc_error("cannot use pointers here");
2196 check_comparison_pointer_types(vtop - 1, vtop, op);
2197 /* XXX: check that types are compatible */
2198 if (vtop[-1].type.t & VT_VLA) {
2199 vla_runtime_pointed_size(&vtop[-1].type);
2200 } else {
2201 vpushi(pointed_size(&vtop[-1].type));
2203 vrott(3);
2204 gen_opic(op);
2205 vtop->type.t = ptrdiff_type.t;
2206 vswap();
2207 gen_op(TOK_PDIV);
2208 } else {
2209 /* exactly one pointer : must be '+' or '-'. */
2210 if (op != '-' && op != '+')
2211 tcc_error("cannot use pointers here");
2212 /* Put pointer as first operand */
2213 if (bt2 == VT_PTR) {
2214 vswap();
2215 t = t1, t1 = t2, t2 = t;
2217 #if PTR_SIZE == 4
2218 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2219 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2220 gen_cast_s(VT_INT);
2221 #endif
2222 type1 = vtop[-1].type;
2223 type1.t &= ~VT_ARRAY;
2224 if (vtop[-1].type.t & VT_VLA)
2225 vla_runtime_pointed_size(&vtop[-1].type);
2226 else {
2227 u = pointed_size(&vtop[-1].type);
2228 if (u < 0)
2229 tcc_error("unknown array element size");
2230 #if PTR_SIZE == 8
2231 vpushll(u);
2232 #else
2233 /* XXX: cast to int ? (long long case) */
2234 vpushi(u);
2235 #endif
2237 gen_op('*');
2238 #if 0
2239 /* #ifdef CONFIG_TCC_BCHECK
2240 The main reason to removing this code:
2241 #include <stdio.h>
2242 int main ()
2244 int v[10];
2245 int i = 10;
2246 int j = 9;
2247 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2248 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2250 When this code is on. then the output looks like
2251 v+i-j = 0xfffffffe
2252 v+(i-j) = 0xbff84000
2254 /* if evaluating constant expression, no code should be
2255 generated, so no bound check */
2256 if (tcc_state->do_bounds_check && !const_wanted) {
2257 /* if bounded pointers, we generate a special code to
2258 test bounds */
2259 if (op == '-') {
2260 vpushi(0);
2261 vswap();
2262 gen_op('-');
2264 gen_bounded_ptr_add();
2265 } else
2266 #endif
2268 gen_opic(op);
2270 /* put again type if gen_opic() swaped operands */
2271 vtop->type = type1;
2273 } else if (is_float(bt1) || is_float(bt2)) {
2274 /* compute bigger type and do implicit casts */
2275 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2276 t = VT_LDOUBLE;
2277 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2278 t = VT_DOUBLE;
2279 } else {
2280 t = VT_FLOAT;
2282 /* floats can only be used for a few operations */
2283 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2284 (op < TOK_ULT || op > TOK_GT))
2285 tcc_error("invalid operands for binary operation");
2286 goto std_op;
2287 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2288 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2289 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2290 t |= VT_UNSIGNED;
2291 t |= (VT_LONG & t1);
2292 goto std_op;
2293 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2294 /* cast to biggest op */
2295 t = VT_LLONG | VT_LONG;
2296 if (bt1 == VT_LLONG)
2297 t &= t1;
2298 if (bt2 == VT_LLONG)
2299 t &= t2;
2300 /* convert to unsigned if it does not fit in a long long */
2301 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2302 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2303 t |= VT_UNSIGNED;
2304 goto std_op;
2305 } else {
2306 /* integer operations */
2307 t = VT_INT | (VT_LONG & (t1 | t2));
2308 /* convert to unsigned if it does not fit in an integer */
2309 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2310 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2311 t |= VT_UNSIGNED;
2312 std_op:
2313 /* XXX: currently, some unsigned operations are explicit, so
2314 we modify them here */
2315 if (t & VT_UNSIGNED) {
2316 if (op == TOK_SAR)
2317 op = TOK_SHR;
2318 else if (op == '/')
2319 op = TOK_UDIV;
2320 else if (op == '%')
2321 op = TOK_UMOD;
2322 else if (op == TOK_LT)
2323 op = TOK_ULT;
2324 else if (op == TOK_GT)
2325 op = TOK_UGT;
2326 else if (op == TOK_LE)
2327 op = TOK_ULE;
2328 else if (op == TOK_GE)
2329 op = TOK_UGE;
2331 vswap();
2332 type1.t = t;
2333 type1.ref = NULL;
2334 gen_cast(&type1);
2335 vswap();
2336 /* special case for shifts and long long: we keep the shift as
2337 an integer */
2338 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2339 type1.t = VT_INT;
2340 gen_cast(&type1);
2341 if (is_float(t))
2342 gen_opif(op);
2343 else
2344 gen_opic(op);
2345 if (op >= TOK_ULT && op <= TOK_GT) {
2346 /* relational op: the result is an int */
2347 vtop->type.t = VT_INT;
2348 } else {
2349 vtop->type.t = t;
2352 // Make sure that we have converted to an rvalue:
2353 if (vtop->r & VT_LVAL)
2354 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2357 #ifndef TCC_TARGET_ARM
2358 /* generic itof for unsigned long long case */
2359 static void gen_cvt_itof1(int t)
2361 #ifdef TCC_TARGET_ARM64
2362 gen_cvt_itof(t);
2363 #else
2364 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2365 (VT_LLONG | VT_UNSIGNED)) {
2367 if (t == VT_FLOAT)
2368 vpush_global_sym(&func_old_type, TOK___floatundisf);
2369 #if LDOUBLE_SIZE != 8
2370 else if (t == VT_LDOUBLE)
2371 vpush_global_sym(&func_old_type, TOK___floatundixf);
2372 #endif
2373 else
2374 vpush_global_sym(&func_old_type, TOK___floatundidf);
2375 vrott(2);
2376 gfunc_call(1);
2377 vpushi(0);
2378 vtop->r = reg_fret(t);
2379 } else {
2380 gen_cvt_itof(t);
2382 #endif
2384 #endif
2386 /* generic ftoi for unsigned long long case */
2387 static void gen_cvt_ftoi1(int t)
2389 #ifdef TCC_TARGET_ARM64
2390 gen_cvt_ftoi(t);
2391 #else
2392 int st;
2394 if (t == (VT_LLONG | VT_UNSIGNED)) {
2395 /* not handled natively */
2396 st = vtop->type.t & VT_BTYPE;
2397 if (st == VT_FLOAT)
2398 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2399 #if LDOUBLE_SIZE != 8
2400 else if (st == VT_LDOUBLE)
2401 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2402 #endif
2403 else
2404 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2405 vrott(2);
2406 gfunc_call(1);
2407 vpushi(0);
2408 vtop->r = REG_IRET;
2409 vtop->r2 = REG_LRET;
2410 } else {
2411 gen_cvt_ftoi(t);
2413 #endif
2416 /* force char or short cast */
2417 static void force_charshort_cast(int t)
2419 int bits, dbt;
2421 /* cannot cast static initializers */
2422 if (STATIC_DATA_WANTED)
2423 return;
2425 dbt = t & VT_BTYPE;
2426 /* XXX: add optimization if lvalue : just change type and offset */
2427 if (dbt == VT_BYTE)
2428 bits = 8;
2429 else
2430 bits = 16;
2431 if (t & VT_UNSIGNED) {
2432 vpushi((1 << bits) - 1);
2433 gen_op('&');
2434 } else {
2435 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2436 bits = 64 - bits;
2437 else
2438 bits = 32 - bits;
2439 vpushi(bits);
2440 gen_op(TOK_SHL);
2441 /* result must be signed or the SAR is converted to an SHL
2442 This was not the case when "t" was a signed short
2443 and the last value on the stack was an unsigned int */
2444 vtop->type.t &= ~VT_UNSIGNED;
2445 vpushi(bits);
2446 gen_op(TOK_SAR);
2450 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2451 static void gen_cast_s(int t)
2453 CType type;
2454 type.t = t;
2455 type.ref = NULL;
2456 gen_cast(&type);
2459 static void gen_cast(CType *type)
2461 int sbt, dbt, sf, df, c, p;
2463 /* special delayed cast for char/short */
2464 /* XXX: in some cases (multiple cascaded casts), it may still
2465 be incorrect */
2466 if (vtop->r & VT_MUSTCAST) {
2467 vtop->r &= ~VT_MUSTCAST;
2468 force_charshort_cast(vtop->type.t);
2471 /* bitfields first get cast to ints */
2472 if (vtop->type.t & VT_BITFIELD) {
2473 gv(RC_INT);
2476 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2477 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2479 if (sbt != dbt) {
2480 sf = is_float(sbt);
2481 df = is_float(dbt);
2482 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2483 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2484 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2485 c &= dbt != VT_LDOUBLE;
2486 #endif
2487 if (c) {
2488 /* constant case: we can do it now */
2489 /* XXX: in ISOC, cannot do it if error in convert */
2490 if (sbt == VT_FLOAT)
2491 vtop->c.ld = vtop->c.f;
2492 else if (sbt == VT_DOUBLE)
2493 vtop->c.ld = vtop->c.d;
2495 if (df) {
2496 if ((sbt & VT_BTYPE) == VT_LLONG) {
2497 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2498 vtop->c.ld = vtop->c.i;
2499 else
2500 vtop->c.ld = -(long double)-vtop->c.i;
2501 } else if(!sf) {
2502 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2503 vtop->c.ld = (uint32_t)vtop->c.i;
2504 else
2505 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2508 if (dbt == VT_FLOAT)
2509 vtop->c.f = (float)vtop->c.ld;
2510 else if (dbt == VT_DOUBLE)
2511 vtop->c.d = (double)vtop->c.ld;
2512 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2513 vtop->c.i = vtop->c.ld;
2514 } else if (sf && dbt == VT_BOOL) {
2515 vtop->c.i = (vtop->c.ld != 0);
2516 } else {
2517 if(sf)
2518 vtop->c.i = vtop->c.ld;
2519 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2521 else if (sbt & VT_UNSIGNED)
2522 vtop->c.i = (uint32_t)vtop->c.i;
2523 #if PTR_SIZE == 8
2524 else if (sbt == VT_PTR)
2526 #endif
2527 else if (sbt != VT_LLONG)
2528 vtop->c.i = ((uint32_t)vtop->c.i |
2529 -(vtop->c.i & 0x80000000));
2531 if (dbt == (VT_LLONG|VT_UNSIGNED))
2533 else if (dbt == VT_BOOL)
2534 vtop->c.i = (vtop->c.i != 0);
2535 #if PTR_SIZE == 8
2536 else if (dbt == VT_PTR)
2538 #endif
2539 else if (dbt != VT_LLONG) {
2540 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2541 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2542 0xffffffff);
2543 vtop->c.i &= m;
2544 if (!(dbt & VT_UNSIGNED))
2545 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2548 } else if (p && dbt == VT_BOOL) {
2549 vtop->r = VT_CONST;
2550 vtop->c.i = 1;
2551 } else {
2552 /* non constant case: generate code */
2553 if (sf && df) {
2554 /* convert from fp to fp */
2555 gen_cvt_ftof(dbt);
2556 } else if (df) {
2557 /* convert int to fp */
2558 gen_cvt_itof1(dbt);
2559 } else if (sf) {
2560 /* convert fp to int */
2561 if (dbt == VT_BOOL) {
2562 vpushi(0);
2563 gen_op(TOK_NE);
2564 } else {
2565 /* we handle char/short/etc... with generic code */
2566 if (dbt != (VT_INT | VT_UNSIGNED) &&
2567 dbt != (VT_LLONG | VT_UNSIGNED) &&
2568 dbt != VT_LLONG)
2569 dbt = VT_INT;
2570 gen_cvt_ftoi1(dbt);
2571 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2572 /* additional cast for char/short... */
2573 vtop->type.t = dbt;
2574 gen_cast(type);
2577 #if PTR_SIZE == 4
2578 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2579 if ((sbt & VT_BTYPE) != VT_LLONG) {
2580 /* scalar to long long */
2581 /* machine independent conversion */
2582 gv(RC_INT);
2583 /* generate high word */
2584 if (sbt == (VT_INT | VT_UNSIGNED)) {
2585 vpushi(0);
2586 gv(RC_INT);
2587 } else {
2588 if (sbt == VT_PTR) {
2589 /* cast from pointer to int before we apply
2590 shift operation, which pointers don't support*/
2591 gen_cast_s(VT_INT);
2593 gv_dup();
2594 vpushi(31);
2595 gen_op(TOK_SAR);
2597 /* patch second register */
2598 vtop[-1].r2 = vtop->r;
2599 vpop();
2601 #else
2602 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2603 (dbt & VT_BTYPE) == VT_PTR ||
2604 (dbt & VT_BTYPE) == VT_FUNC) {
2605 if ((sbt & VT_BTYPE) != VT_LLONG &&
2606 (sbt & VT_BTYPE) != VT_PTR &&
2607 (sbt & VT_BTYPE) != VT_FUNC) {
2608 /* need to convert from 32bit to 64bit */
2609 gv(RC_INT);
2610 if (sbt != (VT_INT | VT_UNSIGNED)) {
2611 #if defined(TCC_TARGET_ARM64)
2612 gen_cvt_sxtw();
2613 #elif defined(TCC_TARGET_X86_64)
2614 int r = gv(RC_INT);
2615 /* x86_64 specific: movslq */
2616 o(0x6348);
2617 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2618 #else
2619 #error
2620 #endif
2623 #endif
2624 } else if (dbt == VT_BOOL) {
2625 /* scalar to bool */
2626 vpushi(0);
2627 gen_op(TOK_NE);
2628 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2629 (dbt & VT_BTYPE) == VT_SHORT) {
2630 if (sbt == VT_PTR) {
2631 vtop->type.t = VT_INT;
2632 tcc_warning("nonportable conversion from pointer to char/short");
2634 force_charshort_cast(dbt);
2635 } else if ((dbt & VT_BTYPE) == VT_INT) {
2636 /* scalar to int */
2637 if ((sbt & VT_BTYPE) == VT_LLONG) {
2638 #if PTR_SIZE == 4
2639 /* from long long: just take low order word */
2640 lexpand();
2641 vpop();
2642 #else
2643 vpushi(0xffffffff);
2644 vtop->type.t |= VT_UNSIGNED;
2645 gen_op('&');
2646 #endif
2648 /* if lvalue and single word type, nothing to do because
2649 the lvalue already contains the real type size (see
2650 VT_LVAL_xxx constants) */
2653 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2654 /* if we are casting between pointer types,
2655 we must update the VT_LVAL_xxx size */
2656 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2657 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2659 vtop->type = *type;
2662 /* return type size as known at compile time. Put alignment at 'a' */
2663 ST_FUNC int type_size(CType *type, int *a)
2665 Sym *s;
2666 int bt;
2668 bt = type->t & VT_BTYPE;
2669 if (bt == VT_STRUCT) {
2670 /* struct/union */
2671 s = type->ref;
2672 *a = s->r;
2673 return s->c;
2674 } else if (bt == VT_PTR) {
2675 if (type->t & VT_ARRAY) {
2676 int ts;
2678 s = type->ref;
2679 ts = type_size(&s->type, a);
2681 if (ts < 0 && s->c < 0)
2682 ts = -ts;
2684 return ts * s->c;
2685 } else {
2686 *a = PTR_SIZE;
2687 return PTR_SIZE;
2689 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2690 return -1; /* incomplete enum */
2691 } else if (bt == VT_LDOUBLE) {
2692 *a = LDOUBLE_ALIGN;
2693 return LDOUBLE_SIZE;
2694 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2695 #ifdef TCC_TARGET_I386
2696 #ifdef TCC_TARGET_PE
2697 *a = 8;
2698 #else
2699 *a = 4;
2700 #endif
2701 #elif defined(TCC_TARGET_ARM)
2702 #ifdef TCC_ARM_EABI
2703 *a = 8;
2704 #else
2705 *a = 4;
2706 #endif
2707 #else
2708 *a = 8;
2709 #endif
2710 return 8;
2711 } else if (bt == VT_INT || bt == VT_FLOAT) {
2712 *a = 4;
2713 return 4;
2714 } else if (bt == VT_SHORT) {
2715 *a = 2;
2716 return 2;
2717 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2718 *a = 8;
2719 return 16;
2720 } else {
2721 /* char, void, function, _Bool */
2722 *a = 1;
2723 return 1;
2727 /* push type size as known at runtime time on top of value stack. Put
2728 alignment at 'a' */
2729 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2731 if (type->t & VT_VLA) {
2732 type_size(&type->ref->type, a);
2733 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2734 } else {
2735 vpushi(type_size(type, a));
2739 static void vla_sp_restore(void) {
2740 if (vlas_in_scope) {
2741 gen_vla_sp_restore(vla_sp_loc);
2745 static void vla_sp_restore_root(void) {
2746 if (vlas_in_scope) {
2747 gen_vla_sp_restore(vla_sp_root_loc);
2751 /* return the pointed type of t */
2752 static inline CType *pointed_type(CType *type)
2754 return &type->ref->type;
2757 /* modify type so that its it is a pointer to type. */
2758 ST_FUNC void mk_pointer(CType *type)
2760 Sym *s;
2761 s = sym_push(SYM_FIELD, type, 0, -1);
2762 type->t = VT_PTR | (type->t & VT_STORAGE);
2763 type->ref = s;
2766 /* compare function types. OLD functions match any new functions */
2767 static int is_compatible_func(CType *type1, CType *type2)
2769 Sym *s1, *s2;
2771 s1 = type1->ref;
2772 s2 = type2->ref;
2773 if (!is_compatible_types(&s1->type, &s2->type))
2774 return 0;
2775 /* check func_call */
2776 if (s1->f.func_call != s2->f.func_call)
2777 return 0;
2778 /* XXX: not complete */
2779 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2780 return 1;
2781 if (s1->f.func_type != s2->f.func_type)
2782 return 0;
2783 while (s1 != NULL) {
2784 if (s2 == NULL)
2785 return 0;
2786 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2787 return 0;
2788 s1 = s1->next;
2789 s2 = s2->next;
2791 if (s2)
2792 return 0;
2793 return 1;
2796 /* return true if type1 and type2 are the same. If unqualified is
2797 true, qualifiers on the types are ignored.
2799 - enums are not checked as gcc __builtin_types_compatible_p ()
2801 static int compare_types(CType *type1, CType *type2, int unqualified)
2803 int bt1, t1, t2;
2805 t1 = type1->t & VT_TYPE;
2806 t2 = type2->t & VT_TYPE;
2807 if (unqualified) {
2808 /* strip qualifiers before comparing */
2809 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2810 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2813 /* Default Vs explicit signedness only matters for char */
2814 if ((t1 & VT_BTYPE) != VT_BYTE) {
2815 t1 &= ~VT_DEFSIGN;
2816 t2 &= ~VT_DEFSIGN;
2818 /* XXX: bitfields ? */
2819 if (t1 != t2)
2820 return 0;
2821 /* test more complicated cases */
2822 bt1 = t1 & VT_BTYPE;
2823 if (bt1 == VT_PTR) {
2824 type1 = pointed_type(type1);
2825 type2 = pointed_type(type2);
2826 return is_compatible_types(type1, type2);
2827 } else if (bt1 == VT_STRUCT) {
2828 return (type1->ref == type2->ref);
2829 } else if (bt1 == VT_FUNC) {
2830 return is_compatible_func(type1, type2);
2831 } else {
2832 return 1;
2836 /* return true if type1 and type2 are exactly the same (including
2837 qualifiers).
2839 static int is_compatible_types(CType *type1, CType *type2)
2841 return compare_types(type1,type2,0);
2844 /* return true if type1 and type2 are the same (ignoring qualifiers).
2846 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2848 return compare_types(type1,type2,1);
2851 /* print a type. If 'varstr' is not NULL, then the variable is also
2852 printed in the type */
2853 /* XXX: union */
2854 /* XXX: add array and function pointers */
2855 static void type_to_str(char *buf, int buf_size,
2856 CType *type, const char *varstr)
2858 int bt, v, t;
2859 Sym *s, *sa;
2860 char buf1[256];
2861 const char *tstr;
2863 t = type->t;
2864 bt = t & VT_BTYPE;
2865 buf[0] = '\0';
2867 if (t & VT_EXTERN)
2868 pstrcat(buf, buf_size, "extern ");
2869 if (t & VT_STATIC)
2870 pstrcat(buf, buf_size, "static ");
2871 if (t & VT_TYPEDEF)
2872 pstrcat(buf, buf_size, "typedef ");
2873 if (t & VT_INLINE)
2874 pstrcat(buf, buf_size, "inline ");
2875 if (t & VT_VOLATILE)
2876 pstrcat(buf, buf_size, "volatile ");
2877 if (t & VT_CONSTANT)
2878 pstrcat(buf, buf_size, "const ");
2880 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2881 || ((t & VT_UNSIGNED)
2882 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2883 && !IS_ENUM(t)
2885 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2887 buf_size -= strlen(buf);
2888 buf += strlen(buf);
2890 switch(bt) {
2891 case VT_VOID:
2892 tstr = "void";
2893 goto add_tstr;
2894 case VT_BOOL:
2895 tstr = "_Bool";
2896 goto add_tstr;
2897 case VT_BYTE:
2898 tstr = "char";
2899 goto add_tstr;
2900 case VT_SHORT:
2901 tstr = "short";
2902 goto add_tstr;
2903 case VT_INT:
2904 tstr = "int";
2905 goto maybe_long;
2906 case VT_LLONG:
2907 tstr = "long long";
2908 maybe_long:
2909 if (t & VT_LONG)
2910 tstr = "long";
2911 if (!IS_ENUM(t))
2912 goto add_tstr;
2913 tstr = "enum ";
2914 goto tstruct;
2915 case VT_FLOAT:
2916 tstr = "float";
2917 goto add_tstr;
2918 case VT_DOUBLE:
2919 tstr = "double";
2920 goto add_tstr;
2921 case VT_LDOUBLE:
2922 tstr = "long double";
2923 add_tstr:
2924 pstrcat(buf, buf_size, tstr);
2925 break;
2926 case VT_STRUCT:
2927 tstr = "struct ";
2928 if (IS_UNION(t))
2929 tstr = "union ";
2930 tstruct:
2931 pstrcat(buf, buf_size, tstr);
2932 v = type->ref->v & ~SYM_STRUCT;
2933 if (v >= SYM_FIRST_ANOM)
2934 pstrcat(buf, buf_size, "<anonymous>");
2935 else
2936 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2937 break;
2938 case VT_FUNC:
2939 s = type->ref;
2940 buf1[0]=0;
2941 if (varstr && '*' == *varstr) {
2942 pstrcat(buf1, sizeof(buf1), "(");
2943 pstrcat(buf1, sizeof(buf1), varstr);
2944 pstrcat(buf1, sizeof(buf1), ")");
2946 pstrcat(buf1, buf_size, "(");
2947 sa = s->next;
2948 while (sa != NULL) {
2949 char buf2[256];
2950 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2951 pstrcat(buf1, sizeof(buf1), buf2);
2952 sa = sa->next;
2953 if (sa)
2954 pstrcat(buf1, sizeof(buf1), ", ");
2956 if (s->f.func_type == FUNC_ELLIPSIS)
2957 pstrcat(buf1, sizeof(buf1), ", ...");
2958 pstrcat(buf1, sizeof(buf1), ")");
2959 type_to_str(buf, buf_size, &s->type, buf1);
2960 goto no_var;
2961 case VT_PTR:
2962 s = type->ref;
2963 if (t & VT_ARRAY) {
2964 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2965 type_to_str(buf, buf_size, &s->type, buf1);
2966 goto no_var;
2968 pstrcpy(buf1, sizeof(buf1), "*");
2969 if (t & VT_CONSTANT)
2970 pstrcat(buf1, buf_size, "const ");
2971 if (t & VT_VOLATILE)
2972 pstrcat(buf1, buf_size, "volatile ");
2973 if (varstr)
2974 pstrcat(buf1, sizeof(buf1), varstr);
2975 type_to_str(buf, buf_size, &s->type, buf1);
2976 goto no_var;
2978 if (varstr) {
2979 pstrcat(buf, buf_size, " ");
2980 pstrcat(buf, buf_size, varstr);
2982 no_var: ;
2985 /* verify type compatibility to store vtop in 'dt' type, and generate
2986 casts if needed. */
2987 static void gen_assign_cast(CType *dt)
2989 CType *st, *type1, *type2;
2990 char buf1[256], buf2[256];
2991 int dbt, sbt;
2993 st = &vtop->type; /* source type */
2994 dbt = dt->t & VT_BTYPE;
2995 sbt = st->t & VT_BTYPE;
2996 if (sbt == VT_VOID || dbt == VT_VOID) {
2997 if (sbt == VT_VOID && dbt == VT_VOID)
2998 ; /*
2999 It is Ok if both are void
3000 A test program:
3001 void func1() {}
3002 void func2() {
3003 return func1();
3005 gcc accepts this program
3007 else
3008 tcc_error("cannot cast from/to void");
3010 if (dt->t & VT_CONSTANT)
3011 tcc_warning("assignment of read-only location");
3012 switch(dbt) {
3013 case VT_PTR:
3014 /* special cases for pointers */
3015 /* '0' can also be a pointer */
3016 if (is_null_pointer(vtop))
3017 goto type_ok;
3018 /* accept implicit pointer to integer cast with warning */
3019 if (is_integer_btype(sbt)) {
3020 tcc_warning("assignment makes pointer from integer without a cast");
3021 goto type_ok;
3023 type1 = pointed_type(dt);
3024 /* a function is implicitly a function pointer */
3025 if (sbt == VT_FUNC) {
3026 if ((type1->t & VT_BTYPE) != VT_VOID &&
3027 !is_compatible_types(pointed_type(dt), st))
3028 tcc_warning("assignment from incompatible pointer type");
3029 goto type_ok;
3031 if (sbt != VT_PTR)
3032 goto error;
3033 type2 = pointed_type(st);
3034 if ((type1->t & VT_BTYPE) == VT_VOID ||
3035 (type2->t & VT_BTYPE) == VT_VOID) {
3036 /* void * can match anything */
3037 } else {
3038 //printf("types %08x %08x\n", type1->t, type2->t);
3039 /* exact type match, except for qualifiers */
3040 if (!is_compatible_unqualified_types(type1, type2)) {
3041 /* Like GCC don't warn by default for merely changes
3042 in pointer target signedness. Do warn for different
3043 base types, though, in particular for unsigned enums
3044 and signed int targets. */
3045 if ((type1->t & (VT_BTYPE|VT_LONG)) != (type2->t & (VT_BTYPE|VT_LONG))
3046 || IS_ENUM(type1->t) || IS_ENUM(type2->t)
3048 tcc_warning("assignment from incompatible pointer type");
3051 /* check const and volatile */
3052 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
3053 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
3054 tcc_warning("assignment discards qualifiers from pointer target type");
3055 break;
3056 case VT_BYTE:
3057 case VT_SHORT:
3058 case VT_INT:
3059 case VT_LLONG:
3060 if (sbt == VT_PTR || sbt == VT_FUNC) {
3061 tcc_warning("assignment makes integer from pointer without a cast");
3062 } else if (sbt == VT_STRUCT) {
3063 goto case_VT_STRUCT;
3065 /* XXX: more tests */
3066 break;
3067 case VT_STRUCT:
3068 case_VT_STRUCT:
3069 if (!is_compatible_unqualified_types(dt, st)) {
3070 error:
3071 type_to_str(buf1, sizeof(buf1), st, NULL);
3072 type_to_str(buf2, sizeof(buf2), dt, NULL);
3073 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3075 break;
3077 type_ok:
3078 gen_cast(dt);
3081 /* store vtop in lvalue pushed on stack */
3082 ST_FUNC void vstore(void)
3084 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3086 ft = vtop[-1].type.t;
3087 sbt = vtop->type.t & VT_BTYPE;
3088 dbt = ft & VT_BTYPE;
3089 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3090 (sbt == VT_INT && dbt == VT_SHORT))
3091 && !(vtop->type.t & VT_BITFIELD)) {
3092 /* optimize char/short casts */
3093 delayed_cast = VT_MUSTCAST;
3094 vtop->type.t = ft & VT_TYPE;
3095 /* XXX: factorize */
3096 if (ft & VT_CONSTANT)
3097 tcc_warning("assignment of read-only location");
3098 } else {
3099 delayed_cast = 0;
3100 if (!(ft & VT_BITFIELD))
3101 gen_assign_cast(&vtop[-1].type);
3104 if (sbt == VT_STRUCT) {
3105 /* if structure, only generate pointer */
3106 /* structure assignment : generate memcpy */
3107 /* XXX: optimize if small size */
3108 size = type_size(&vtop->type, &align);
3110 /* destination */
3111 vswap();
3112 vtop->type.t = VT_PTR;
3113 gaddrof();
3115 /* address of memcpy() */
3116 #ifdef TCC_ARM_EABI
3117 if(!(align & 7))
3118 vpush_global_sym(&func_old_type, TOK_memcpy8);
3119 else if(!(align & 3))
3120 vpush_global_sym(&func_old_type, TOK_memcpy4);
3121 else
3122 #endif
3123 /* Use memmove, rather than memcpy, as dest and src may be same: */
3124 vpush_global_sym(&func_old_type, TOK_memmove);
3126 vswap();
3127 /* source */
3128 vpushv(vtop - 2);
3129 vtop->type.t = VT_PTR;
3130 gaddrof();
3131 /* type size */
3132 vpushi(size);
3133 gfunc_call(3);
3135 /* leave source on stack */
3136 } else if (ft & VT_BITFIELD) {
3137 /* bitfield store handling */
3139 /* save lvalue as expression result (example: s.b = s.a = n;) */
3140 vdup(), vtop[-1] = vtop[-2];
3142 bit_pos = BIT_POS(ft);
3143 bit_size = BIT_SIZE(ft);
3144 /* remove bit field info to avoid loops */
3145 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3147 if ((ft & VT_BTYPE) == VT_BOOL) {
3148 gen_cast(&vtop[-1].type);
3149 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3152 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3153 if (r == VT_STRUCT) {
3154 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3155 store_packed_bf(bit_pos, bit_size);
3156 } else {
3157 unsigned long long mask = (1ULL << bit_size) - 1;
3158 if ((ft & VT_BTYPE) != VT_BOOL) {
3159 /* mask source */
3160 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3161 vpushll(mask);
3162 else
3163 vpushi((unsigned)mask);
3164 gen_op('&');
3166 /* shift source */
3167 vpushi(bit_pos);
3168 gen_op(TOK_SHL);
3169 vswap();
3170 /* duplicate destination */
3171 vdup();
3172 vrott(3);
3173 /* load destination, mask and or with source */
3174 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3175 vpushll(~(mask << bit_pos));
3176 else
3177 vpushi(~((unsigned)mask << bit_pos));
3178 gen_op('&');
3179 gen_op('|');
3180 /* store result */
3181 vstore();
3182 /* ... and discard */
3183 vpop();
3185 } else if (dbt == VT_VOID) {
3186 --vtop;
3187 } else {
3188 #ifdef CONFIG_TCC_BCHECK
3189 /* bound check case */
3190 if (vtop[-1].r & VT_MUSTBOUND) {
3191 vswap();
3192 gbound();
3193 vswap();
3195 #endif
3196 rc = RC_INT;
3197 if (is_float(ft)) {
3198 rc = RC_FLOAT;
3199 #ifdef TCC_TARGET_X86_64
3200 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3201 rc = RC_ST0;
3202 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3203 rc = RC_FRET;
3205 #endif
3207 r = gv(rc); /* generate value */
3208 /* if lvalue was saved on stack, must read it */
3209 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3210 SValue sv;
3211 t = get_reg(RC_INT);
3212 #if PTR_SIZE == 8
3213 sv.type.t = VT_PTR;
3214 #else
3215 sv.type.t = VT_INT;
3216 #endif
3217 sv.r = VT_LOCAL | VT_LVAL;
3218 sv.c.i = vtop[-1].c.i;
3219 load(t, &sv);
3220 vtop[-1].r = t | VT_LVAL;
3222 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3223 #if PTR_SIZE == 8
3224 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3225 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3226 #else
3227 if ((ft & VT_BTYPE) == VT_LLONG) {
3228 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3229 #endif
3230 vtop[-1].type.t = load_type;
3231 store(r, vtop - 1);
3232 vswap();
3233 /* convert to int to increment easily */
3234 vtop->type.t = addr_type;
3235 gaddrof();
3236 vpushi(load_size);
3237 gen_op('+');
3238 vtop->r |= VT_LVAL;
3239 vswap();
3240 vtop[-1].type.t = load_type;
3241 /* XXX: it works because r2 is spilled last ! */
3242 store(vtop->r2, vtop - 1);
3243 } else {
3244 store(r, vtop - 1);
3247 vswap();
3248 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3249 vtop->r |= delayed_cast;
3253 /* post defines POST/PRE add. c is the token ++ or -- */
3254 ST_FUNC void inc(int post, int c)
3256 test_lvalue();
3257 vdup(); /* save lvalue */
3258 if (post) {
3259 gv_dup(); /* duplicate value */
3260 vrotb(3);
3261 vrotb(3);
3263 /* add constant */
3264 vpushi(c - TOK_MID);
3265 gen_op('+');
3266 vstore(); /* store value */
3267 if (post)
3268 vpop(); /* if post op, return saved value */
3271 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3273 /* read the string */
3274 if (tok != TOK_STR)
3275 expect(msg);
3276 cstr_new(astr);
3277 while (tok == TOK_STR) {
3278 /* XXX: add \0 handling too ? */
3279 cstr_cat(astr, tokc.str.data, -1);
3280 next();
3282 cstr_ccat(astr, '\0');
3285 /* If I is >= 1 and a power of two, returns log2(i)+1.
3286 If I is 0 returns 0. */
3287 static int exact_log2p1(int i)
3289 int ret;
3290 if (!i)
3291 return 0;
3292 for (ret = 1; i >= 1 << 8; ret += 8)
3293 i >>= 8;
3294 if (i >= 1 << 4)
3295 ret += 4, i >>= 4;
3296 if (i >= 1 << 2)
3297 ret += 2, i >>= 2;
3298 if (i >= 1 << 1)
3299 ret++;
3300 return ret;
3303 /* Parse __attribute__((...)) GNUC extension. */
3304 static void parse_attribute(AttributeDef *ad)
3306 int t, n;
3307 CString astr;
3309 redo:
3310 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3311 return;
3312 next();
3313 skip('(');
3314 skip('(');
3315 while (tok != ')') {
3316 if (tok < TOK_IDENT)
3317 expect("attribute name");
3318 t = tok;
3319 next();
3320 switch(t) {
3321 case TOK_SECTION1:
3322 case TOK_SECTION2:
3323 skip('(');
3324 parse_mult_str(&astr, "section name");
3325 ad->section = find_section(tcc_state, (char *)astr.data);
3326 skip(')');
3327 cstr_free(&astr);
3328 break;
3329 case TOK_ALIAS1:
3330 case TOK_ALIAS2:
3331 skip('(');
3332 parse_mult_str(&astr, "alias(\"target\")");
3333 ad->alias_target = /* save string as token, for later */
3334 tok_alloc((char*)astr.data, astr.size-1)->tok;
3335 skip(')');
3336 cstr_free(&astr);
3337 break;
3338 case TOK_VISIBILITY1:
3339 case TOK_VISIBILITY2:
3340 skip('(');
3341 parse_mult_str(&astr,
3342 "visibility(\"default|hidden|internal|protected\")");
3343 if (!strcmp (astr.data, "default"))
3344 ad->a.visibility = STV_DEFAULT;
3345 else if (!strcmp (astr.data, "hidden"))
3346 ad->a.visibility = STV_HIDDEN;
3347 else if (!strcmp (astr.data, "internal"))
3348 ad->a.visibility = STV_INTERNAL;
3349 else if (!strcmp (astr.data, "protected"))
3350 ad->a.visibility = STV_PROTECTED;
3351 else
3352 expect("visibility(\"default|hidden|internal|protected\")");
3353 skip(')');
3354 cstr_free(&astr);
3355 break;
3356 case TOK_ALIGNED1:
3357 case TOK_ALIGNED2:
3358 if (tok == '(') {
3359 next();
3360 n = expr_const();
3361 if (n <= 0 || (n & (n - 1)) != 0)
3362 tcc_error("alignment must be a positive power of two");
3363 skip(')');
3364 } else {
3365 n = MAX_ALIGN;
3367 ad->a.aligned = exact_log2p1(n);
3368 if (n != 1 << (ad->a.aligned - 1))
3369 tcc_error("alignment of %d is larger than implemented", n);
3370 break;
3371 case TOK_PACKED1:
3372 case TOK_PACKED2:
3373 ad->a.packed = 1;
3374 break;
3375 case TOK_WEAK1:
3376 case TOK_WEAK2:
3377 ad->a.weak = 1;
3378 break;
3379 case TOK_UNUSED1:
3380 case TOK_UNUSED2:
3381 /* currently, no need to handle it because tcc does not
3382 track unused objects */
3383 break;
3384 case TOK_NORETURN1:
3385 case TOK_NORETURN2:
3386 /* currently, no need to handle it because tcc does not
3387 track unused objects */
3388 break;
3389 case TOK_CDECL1:
3390 case TOK_CDECL2:
3391 case TOK_CDECL3:
3392 ad->f.func_call = FUNC_CDECL;
3393 break;
3394 case TOK_STDCALL1:
3395 case TOK_STDCALL2:
3396 case TOK_STDCALL3:
3397 ad->f.func_call = FUNC_STDCALL;
3398 break;
3399 #ifdef TCC_TARGET_I386
3400 case TOK_REGPARM1:
3401 case TOK_REGPARM2:
3402 skip('(');
3403 n = expr_const();
3404 if (n > 3)
3405 n = 3;
3406 else if (n < 0)
3407 n = 0;
3408 if (n > 0)
3409 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3410 skip(')');
3411 break;
3412 case TOK_FASTCALL1:
3413 case TOK_FASTCALL2:
3414 case TOK_FASTCALL3:
3415 ad->f.func_call = FUNC_FASTCALLW;
3416 break;
3417 #endif
3418 case TOK_MODE:
3419 skip('(');
3420 switch(tok) {
3421 case TOK_MODE_DI:
3422 ad->attr_mode = VT_LLONG + 1;
3423 break;
3424 case TOK_MODE_QI:
3425 ad->attr_mode = VT_BYTE + 1;
3426 break;
3427 case TOK_MODE_HI:
3428 ad->attr_mode = VT_SHORT + 1;
3429 break;
3430 case TOK_MODE_SI:
3431 case TOK_MODE_word:
3432 ad->attr_mode = VT_INT + 1;
3433 break;
3434 default:
3435 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3436 break;
3438 next();
3439 skip(')');
3440 break;
3441 case TOK_DLLEXPORT:
3442 ad->a.dllexport = 1;
3443 break;
3444 case TOK_DLLIMPORT:
3445 ad->a.dllimport = 1;
3446 break;
3447 default:
3448 if (tcc_state->warn_unsupported)
3449 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3450 /* skip parameters */
3451 if (tok == '(') {
3452 int parenthesis = 0;
3453 do {
3454 if (tok == '(')
3455 parenthesis++;
3456 else if (tok == ')')
3457 parenthesis--;
3458 next();
3459 } while (parenthesis && tok != -1);
3461 break;
3463 if (tok != ',')
3464 break;
3465 next();
3467 skip(')');
3468 skip(')');
3469 goto redo;
3472 static Sym * find_field (CType *type, int v)
3474 Sym *s = type->ref;
3475 v |= SYM_FIELD;
3476 while ((s = s->next) != NULL) {
3477 if ((s->v & SYM_FIELD) &&
3478 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3479 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3480 Sym *ret = find_field (&s->type, v);
3481 if (ret)
3482 return ret;
3484 if (s->v == v)
3485 break;
3487 return s;
3490 static void struct_add_offset (Sym *s, int offset)
3492 while ((s = s->next) != NULL) {
3493 if ((s->v & SYM_FIELD) &&
3494 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3495 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3496 struct_add_offset(s->type.ref, offset);
3497 } else
3498 s->c += offset;
3502 static void struct_layout(CType *type, AttributeDef *ad)
3504 int size, align, maxalign, offset, c, bit_pos, bit_size;
3505 int packed, a, bt, prevbt, prev_bit_size;
3506 int pcc = !tcc_state->ms_bitfields;
3507 int pragma_pack = *tcc_state->pack_stack_ptr;
3508 Sym *f;
3510 maxalign = 1;
3511 offset = 0;
3512 c = 0;
3513 bit_pos = 0;
3514 prevbt = VT_STRUCT; /* make it never match */
3515 prev_bit_size = 0;
3517 //#define BF_DEBUG
3519 for (f = type->ref->next; f; f = f->next) {
3520 if (f->type.t & VT_BITFIELD)
3521 bit_size = BIT_SIZE(f->type.t);
3522 else
3523 bit_size = -1;
3524 size = type_size(&f->type, &align);
3525 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3526 packed = 0;
3528 if (pcc && bit_size == 0) {
3529 /* in pcc mode, packing does not affect zero-width bitfields */
3531 } else {
3532 /* in pcc mode, attribute packed overrides if set. */
3533 if (pcc && (f->a.packed || ad->a.packed))
3534 align = packed = 1;
3536 /* pragma pack overrides align if lesser and packs bitfields always */
3537 if (pragma_pack) {
3538 packed = 1;
3539 if (pragma_pack < align)
3540 align = pragma_pack;
3541 /* in pcc mode pragma pack also overrides individual align */
3542 if (pcc && pragma_pack < a)
3543 a = 0;
3546 /* some individual align was specified */
3547 if (a)
3548 align = a;
3550 if (type->ref->type.t == VT_UNION) {
3551 if (pcc && bit_size >= 0)
3552 size = (bit_size + 7) >> 3;
3553 offset = 0;
3554 if (size > c)
3555 c = size;
3557 } else if (bit_size < 0) {
3558 if (pcc)
3559 c += (bit_pos + 7) >> 3;
3560 c = (c + align - 1) & -align;
3561 offset = c;
3562 if (size > 0)
3563 c += size;
3564 bit_pos = 0;
3565 prevbt = VT_STRUCT;
3566 prev_bit_size = 0;
3568 } else {
3569 /* A bit-field. Layout is more complicated. There are two
3570 options: PCC (GCC) compatible and MS compatible */
3571 if (pcc) {
3572 /* In PCC layout a bit-field is placed adjacent to the
3573 preceding bit-fields, except if:
3574 - it has zero-width
3575 - an individual alignment was given
3576 - it would overflow its base type container and
3577 there is no packing */
3578 if (bit_size == 0) {
3579 new_field:
3580 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3581 bit_pos = 0;
3582 } else if (f->a.aligned) {
3583 goto new_field;
3584 } else if (!packed) {
3585 int a8 = align * 8;
3586 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3587 if (ofs > size / align)
3588 goto new_field;
3591 /* in pcc mode, long long bitfields have type int if they fit */
3592 if (size == 8 && bit_size <= 32)
3593 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3595 while (bit_pos >= align * 8)
3596 c += align, bit_pos -= align * 8;
3597 offset = c;
3599 /* In PCC layout named bit-fields influence the alignment
3600 of the containing struct using the base types alignment,
3601 except for packed fields (which here have correct align). */
3602 if (f->v & SYM_FIRST_ANOM
3603 // && bit_size // ??? gcc on ARM/rpi does that
3605 align = 1;
3607 } else {
3608 bt = f->type.t & VT_BTYPE;
3609 if ((bit_pos + bit_size > size * 8)
3610 || (bit_size > 0) == (bt != prevbt)
3612 c = (c + align - 1) & -align;
3613 offset = c;
3614 bit_pos = 0;
3615 /* In MS bitfield mode a bit-field run always uses
3616 at least as many bits as the underlying type.
3617 To start a new run it's also required that this
3618 or the last bit-field had non-zero width. */
3619 if (bit_size || prev_bit_size)
3620 c += size;
3622 /* In MS layout the records alignment is normally
3623 influenced by the field, except for a zero-width
3624 field at the start of a run (but by further zero-width
3625 fields it is again). */
3626 if (bit_size == 0 && prevbt != bt)
3627 align = 1;
3628 prevbt = bt;
3629 prev_bit_size = bit_size;
3632 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3633 | (bit_pos << VT_STRUCT_SHIFT);
3634 bit_pos += bit_size;
3636 if (align > maxalign)
3637 maxalign = align;
3639 #ifdef BF_DEBUG
3640 printf("set field %s offset %-2d size %-2d align %-2d",
3641 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3642 if (f->type.t & VT_BITFIELD) {
3643 printf(" pos %-2d bits %-2d",
3644 BIT_POS(f->type.t),
3645 BIT_SIZE(f->type.t)
3648 printf("\n");
3649 #endif
3651 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3652 Sym *ass;
3653 /* An anonymous struct/union. Adjust member offsets
3654 to reflect the real offset of our containing struct.
3655 Also set the offset of this anon member inside
3656 the outer struct to be zero. Via this it
3657 works when accessing the field offset directly
3658 (from base object), as well as when recursing
3659 members in initializer handling. */
3660 int v2 = f->type.ref->v;
3661 if (!(v2 & SYM_FIELD) &&
3662 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3663 Sym **pps;
3664 /* This happens only with MS extensions. The
3665 anon member has a named struct type, so it
3666 potentially is shared with other references.
3667 We need to unshare members so we can modify
3668 them. */
3669 ass = f->type.ref;
3670 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3671 &f->type.ref->type, 0,
3672 f->type.ref->c);
3673 pps = &f->type.ref->next;
3674 while ((ass = ass->next) != NULL) {
3675 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3676 pps = &((*pps)->next);
3678 *pps = NULL;
3680 struct_add_offset(f->type.ref, offset);
3681 f->c = 0;
3682 } else {
3683 f->c = offset;
3686 f->r = 0;
3689 if (pcc)
3690 c += (bit_pos + 7) >> 3;
3692 /* store size and alignment */
3693 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3694 if (a < maxalign)
3695 a = maxalign;
3696 type->ref->r = a;
3697 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3698 /* can happen if individual align for some member was given. In
3699 this case MSVC ignores maxalign when aligning the size */
3700 a = pragma_pack;
3701 if (a < bt)
3702 a = bt;
3704 c = (c + a - 1) & -a;
3705 type->ref->c = c;
3707 #ifdef BF_DEBUG
3708 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3709 #endif
3711 /* check whether we can access bitfields by their type */
3712 for (f = type->ref->next; f; f = f->next) {
3713 int s, px, cx, c0;
3714 CType t;
3716 if (0 == (f->type.t & VT_BITFIELD))
3717 continue;
3718 f->type.ref = f;
3719 f->auxtype = -1;
3720 bit_size = BIT_SIZE(f->type.t);
3721 if (bit_size == 0)
3722 continue;
3723 bit_pos = BIT_POS(f->type.t);
3724 size = type_size(&f->type, &align);
3725 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3726 continue;
3728 /* try to access the field using a different type */
3729 c0 = -1, s = align = 1;
3730 for (;;) {
3731 px = f->c * 8 + bit_pos;
3732 cx = (px >> 3) & -align;
3733 px = px - (cx << 3);
3734 if (c0 == cx)
3735 break;
3736 s = (px + bit_size + 7) >> 3;
3737 if (s > 4) {
3738 t.t = VT_LLONG;
3739 } else if (s > 2) {
3740 t.t = VT_INT;
3741 } else if (s > 1) {
3742 t.t = VT_SHORT;
3743 } else {
3744 t.t = VT_BYTE;
3746 s = type_size(&t, &align);
3747 c0 = cx;
3750 if (px + bit_size <= s * 8 && cx + s <= c) {
3751 /* update offset and bit position */
3752 f->c = cx;
3753 bit_pos = px;
3754 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3755 | (bit_pos << VT_STRUCT_SHIFT);
3756 if (s != size)
3757 f->auxtype = t.t;
3758 #ifdef BF_DEBUG
3759 printf("FIX field %s offset %-2d size %-2d align %-2d "
3760 "pos %-2d bits %-2d\n",
3761 get_tok_str(f->v & ~SYM_FIELD, NULL),
3762 cx, s, align, px, bit_size);
3763 #endif
3764 } else {
3765 /* fall back to load/store single-byte wise */
3766 f->auxtype = VT_STRUCT;
3767 #ifdef BF_DEBUG
3768 printf("FIX field %s : load byte-wise\n",
3769 get_tok_str(f->v & ~SYM_FIELD, NULL));
3770 #endif
3775 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3776 static void struct_decl(CType *type, int u)
3778 int v, c, size, align, flexible;
3779 int bit_size, bsize, bt;
3780 Sym *s, *ss, **ps;
3781 AttributeDef ad, ad1;
3782 CType type1, btype;
3784 memset(&ad, 0, sizeof ad);
3785 next();
3786 parse_attribute(&ad);
3787 if (tok != '{') {
3788 v = tok;
3789 next();
3790 /* struct already defined ? return it */
3791 if (v < TOK_IDENT)
3792 expect("struct/union/enum name");
3793 s = struct_find(v);
3794 if (s && (s->sym_scope == local_scope || tok != '{')) {
3795 if (u == s->type.t)
3796 goto do_decl;
3797 if (u == VT_ENUM && IS_ENUM(s->type.t))
3798 goto do_decl;
3799 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3801 } else {
3802 v = anon_sym++;
3804 /* Record the original enum/struct/union token. */
3805 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3806 type1.ref = NULL;
3807 /* we put an undefined size for struct/union */
3808 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3809 s->r = 0; /* default alignment is zero as gcc */
3810 do_decl:
3811 type->t = s->type.t;
3812 type->ref = s;
3814 if (tok == '{') {
3815 next();
3816 if (s->c != -1)
3817 tcc_error("struct/union/enum already defined");
3818 /* cannot be empty */
3819 /* non empty enums are not allowed */
3820 ps = &s->next;
3821 if (u == VT_ENUM) {
3822 long long ll = 0, pl = 0, nl = 0;
3823 CType t;
3824 t.ref = s;
3825 /* enum symbols have static storage */
3826 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3827 for(;;) {
3828 v = tok;
3829 if (v < TOK_UIDENT)
3830 expect("identifier");
3831 ss = sym_find(v);
3832 if (ss && !local_stack)
3833 tcc_error("redefinition of enumerator '%s'",
3834 get_tok_str(v, NULL));
3835 next();
3836 if (tok == '=') {
3837 next();
3838 ll = expr_const64();
3840 ss = sym_push(v, &t, VT_CONST, 0);
3841 ss->enum_val = ll;
3842 *ps = ss, ps = &ss->next;
3843 if (ll < nl)
3844 nl = ll;
3845 if (ll > pl)
3846 pl = ll;
3847 if (tok != ',')
3848 break;
3849 next();
3850 ll++;
3851 /* NOTE: we accept a trailing comma */
3852 if (tok == '}')
3853 break;
3855 skip('}');
3856 /* set integral type of the enum */
3857 t.t = VT_INT;
3858 if (nl >= 0) {
3859 if (pl != (unsigned)pl)
3860 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3861 t.t |= VT_UNSIGNED;
3862 } else if (pl != (int)pl || nl != (int)nl)
3863 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3864 s->type.t = type->t = t.t | VT_ENUM;
3865 s->c = 0;
3866 /* set type for enum members */
3867 for (ss = s->next; ss; ss = ss->next) {
3868 ll = ss->enum_val;
3869 if (ll == (int)ll) /* default is int if it fits */
3870 continue;
3871 if (t.t & VT_UNSIGNED) {
3872 ss->type.t |= VT_UNSIGNED;
3873 if (ll == (unsigned)ll)
3874 continue;
3876 ss->type.t = (ss->type.t & ~VT_BTYPE)
3877 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3879 } else {
3880 c = 0;
3881 flexible = 0;
3882 while (tok != '}') {
3883 if (!parse_btype(&btype, &ad1)) {
3884 skip(';');
3885 continue;
3887 while (1) {
3888 if (flexible)
3889 tcc_error("flexible array member '%s' not at the end of struct",
3890 get_tok_str(v, NULL));
3891 bit_size = -1;
3892 v = 0;
3893 type1 = btype;
3894 if (tok != ':') {
3895 if (tok != ';')
3896 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3897 if (v == 0) {
3898 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3899 expect("identifier");
3900 else {
3901 int v = btype.ref->v;
3902 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3903 if (tcc_state->ms_extensions == 0)
3904 expect("identifier");
3908 if (type_size(&type1, &align) < 0) {
3909 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3910 flexible = 1;
3911 else
3912 tcc_error("field '%s' has incomplete type",
3913 get_tok_str(v, NULL));
3915 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3916 (type1.t & VT_STORAGE))
3917 tcc_error("invalid type for '%s'",
3918 get_tok_str(v, NULL));
3920 if (tok == ':') {
3921 next();
3922 bit_size = expr_const();
3923 /* XXX: handle v = 0 case for messages */
3924 if (bit_size < 0)
3925 tcc_error("negative width in bit-field '%s'",
3926 get_tok_str(v, NULL));
3927 if (v && bit_size == 0)
3928 tcc_error("zero width for bit-field '%s'",
3929 get_tok_str(v, NULL));
3930 parse_attribute(&ad1);
3932 size = type_size(&type1, &align);
3933 if (bit_size >= 0) {
3934 bt = type1.t & VT_BTYPE;
3935 if (bt != VT_INT &&
3936 bt != VT_BYTE &&
3937 bt != VT_SHORT &&
3938 bt != VT_BOOL &&
3939 bt != VT_LLONG)
3940 tcc_error("bitfields must have scalar type");
3941 bsize = size * 8;
3942 if (bit_size > bsize) {
3943 tcc_error("width of '%s' exceeds its type",
3944 get_tok_str(v, NULL));
3945 } else if (bit_size == bsize
3946 && !ad.a.packed && !ad1.a.packed) {
3947 /* no need for bit fields */
3949 } else if (bit_size == 64) {
3950 tcc_error("field width 64 not implemented");
3951 } else {
3952 type1.t = (type1.t & ~VT_STRUCT_MASK)
3953 | VT_BITFIELD
3954 | (bit_size << (VT_STRUCT_SHIFT + 6));
3957 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3958 /* Remember we've seen a real field to check
3959 for placement of flexible array member. */
3960 c = 1;
3962 /* If member is a struct or bit-field, enforce
3963 placing into the struct (as anonymous). */
3964 if (v == 0 &&
3965 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3966 bit_size >= 0)) {
3967 v = anon_sym++;
3969 if (v) {
3970 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
3971 ss->a = ad1.a;
3972 *ps = ss;
3973 ps = &ss->next;
3975 if (tok == ';' || tok == TOK_EOF)
3976 break;
3977 skip(',');
3979 skip(';');
3981 skip('}');
3982 parse_attribute(&ad);
3983 struct_layout(type, &ad);
3988 static void sym_to_attr(AttributeDef *ad, Sym *s)
3990 if (s->a.aligned && 0 == ad->a.aligned)
3991 ad->a.aligned = s->a.aligned;
3992 if (s->f.func_call && 0 == ad->f.func_call)
3993 ad->f.func_call = s->f.func_call;
3994 if (s->f.func_type && 0 == ad->f.func_type)
3995 ad->f.func_type = s->f.func_type;
3996 if (s->a.packed)
3997 ad->a.packed = 1;
4000 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4001 are added to the element type, copied because it could be a typedef. */
4002 static void parse_btype_qualify(CType *type, int qualifiers)
4004 while (type->t & VT_ARRAY) {
4005 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4006 type = &type->ref->type;
4008 type->t |= qualifiers;
4011 /* return 0 if no type declaration. otherwise, return the basic type
4012 and skip it.
4014 static int parse_btype(CType *type, AttributeDef *ad)
4016 int t, u, bt, st, type_found, typespec_found, g;
4017 Sym *s;
4018 CType type1;
4020 memset(ad, 0, sizeof(AttributeDef));
4021 type_found = 0;
4022 typespec_found = 0;
4023 t = VT_INT;
4024 bt = st = -1;
4025 type->ref = NULL;
4027 while(1) {
4028 switch(tok) {
4029 case TOK_EXTENSION:
4030 /* currently, we really ignore extension */
4031 next();
4032 continue;
4034 /* basic types */
4035 case TOK_CHAR:
4036 u = VT_BYTE;
4037 basic_type:
4038 next();
4039 basic_type1:
4040 if (u == VT_SHORT || u == VT_LONG) {
4041 if (st != -1 || (bt != -1 && bt != VT_INT))
4042 tmbt: tcc_error("too many basic types");
4043 st = u;
4044 } else {
4045 if (bt != -1 || (st != -1 && u != VT_INT))
4046 goto tmbt;
4047 bt = u;
4049 if (u != VT_INT)
4050 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4051 typespec_found = 1;
4052 break;
4053 case TOK_VOID:
4054 u = VT_VOID;
4055 goto basic_type;
4056 case TOK_SHORT:
4057 u = VT_SHORT;
4058 goto basic_type;
4059 case TOK_INT:
4060 u = VT_INT;
4061 goto basic_type;
4062 case TOK_LONG:
4063 if ((t & VT_BTYPE) == VT_DOUBLE) {
4064 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4065 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4066 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4067 } else {
4068 u = VT_LONG;
4069 goto basic_type;
4071 next();
4072 break;
4073 #ifdef TCC_TARGET_ARM64
4074 case TOK_UINT128:
4075 /* GCC's __uint128_t appears in some Linux header files. Make it a
4076 synonym for long double to get the size and alignment right. */
4077 u = VT_LDOUBLE;
4078 goto basic_type;
4079 #endif
4080 case TOK_BOOL:
4081 u = VT_BOOL;
4082 goto basic_type;
4083 case TOK_FLOAT:
4084 u = VT_FLOAT;
4085 goto basic_type;
4086 case TOK_DOUBLE:
4087 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4088 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4089 } else {
4090 u = VT_DOUBLE;
4091 goto basic_type;
4093 next();
4094 break;
4095 case TOK_ENUM:
4096 struct_decl(&type1, VT_ENUM);
4097 basic_type2:
4098 u = type1.t;
4099 type->ref = type1.ref;
4100 goto basic_type1;
4101 case TOK_STRUCT:
4102 struct_decl(&type1, VT_STRUCT);
4103 goto basic_type2;
4104 case TOK_UNION:
4105 struct_decl(&type1, VT_UNION);
4106 goto basic_type2;
4108 /* type modifiers */
4109 case TOK_CONST1:
4110 case TOK_CONST2:
4111 case TOK_CONST3:
4112 type->t = t;
4113 parse_btype_qualify(type, VT_CONSTANT);
4114 t = type->t;
4115 next();
4116 break;
4117 case TOK_VOLATILE1:
4118 case TOK_VOLATILE2:
4119 case TOK_VOLATILE3:
4120 type->t = t;
4121 parse_btype_qualify(type, VT_VOLATILE);
4122 t = type->t;
4123 next();
4124 break;
4125 case TOK_SIGNED1:
4126 case TOK_SIGNED2:
4127 case TOK_SIGNED3:
4128 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4129 tcc_error("signed and unsigned modifier");
4130 t |= VT_DEFSIGN;
4131 next();
4132 typespec_found = 1;
4133 break;
4134 case TOK_REGISTER:
4135 case TOK_AUTO:
4136 case TOK_RESTRICT1:
4137 case TOK_RESTRICT2:
4138 case TOK_RESTRICT3:
4139 next();
4140 break;
4141 case TOK_UNSIGNED:
4142 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4143 tcc_error("signed and unsigned modifier");
4144 t |= VT_DEFSIGN | VT_UNSIGNED;
4145 next();
4146 typespec_found = 1;
4147 break;
4149 /* storage */
4150 case TOK_EXTERN:
4151 g = VT_EXTERN;
4152 goto storage;
4153 case TOK_STATIC:
4154 g = VT_STATIC;
4155 goto storage;
4156 case TOK_TYPEDEF:
4157 g = VT_TYPEDEF;
4158 goto storage;
4159 storage:
4160 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4161 tcc_error("multiple storage classes");
4162 t |= g;
4163 next();
4164 break;
4165 case TOK_INLINE1:
4166 case TOK_INLINE2:
4167 case TOK_INLINE3:
4168 t |= VT_INLINE;
4169 next();
4170 break;
4172 /* GNUC attribute */
4173 case TOK_ATTRIBUTE1:
4174 case TOK_ATTRIBUTE2:
4175 parse_attribute(ad);
4176 if (ad->attr_mode) {
4177 u = ad->attr_mode -1;
4178 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4180 break;
4181 /* GNUC typeof */
4182 case TOK_TYPEOF1:
4183 case TOK_TYPEOF2:
4184 case TOK_TYPEOF3:
4185 next();
4186 parse_expr_type(&type1);
4187 /* remove all storage modifiers except typedef */
4188 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4189 if (type1.ref)
4190 sym_to_attr(ad, type1.ref);
4191 goto basic_type2;
4192 default:
4193 if (typespec_found)
4194 goto the_end;
4195 s = sym_find(tok);
4196 if (!s || !(s->type.t & VT_TYPEDEF))
4197 goto the_end;
4198 t &= ~(VT_BTYPE|VT_LONG);
4199 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4200 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4201 type->ref = s->type.ref;
4202 if (t)
4203 parse_btype_qualify(type, t);
4204 t = type->t;
4205 /* get attributes from typedef */
4206 sym_to_attr(ad, s);
4207 next();
4208 typespec_found = 1;
4209 st = bt = -2;
4210 break;
4212 type_found = 1;
4214 the_end:
4215 if (tcc_state->char_is_unsigned) {
4216 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4217 t |= VT_UNSIGNED;
4219 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4220 bt = t & (VT_BTYPE|VT_LONG);
4221 if (bt == VT_LONG)
4222 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4223 #ifdef TCC_TARGET_PE
4224 if (bt == VT_LDOUBLE)
4225 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4226 #endif
4227 type->t = t;
4228 return type_found;
4231 /* convert a function parameter type (array to pointer and function to
4232 function pointer) */
4233 static inline void convert_parameter_type(CType *pt)
4235 /* remove const and volatile qualifiers (XXX: const could be used
4236 to indicate a const function parameter */
4237 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4238 /* array must be transformed to pointer according to ANSI C */
4239 pt->t &= ~VT_ARRAY;
4240 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4241 mk_pointer(pt);
4245 ST_FUNC void parse_asm_str(CString *astr)
4247 skip('(');
4248 parse_mult_str(astr, "string constant");
4251 /* Parse an asm label and return the token */
4252 static int asm_label_instr(void)
4254 int v;
4255 CString astr;
4257 next();
4258 parse_asm_str(&astr);
4259 skip(')');
4260 #ifdef ASM_DEBUG
4261 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4262 #endif
4263 v = tok_alloc(astr.data, astr.size - 1)->tok;
4264 cstr_free(&astr);
4265 return v;
4268 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4270 int n, l, t1, arg_size, align;
4271 Sym **plast, *s, *first;
4272 AttributeDef ad1;
4273 CType pt;
4275 if (tok == '(') {
4276 /* function type, or recursive declarator (return if so) */
4277 next();
4278 if (td && !(td & TYPE_ABSTRACT))
4279 return 0;
4280 if (tok == ')')
4281 l = 0;
4282 else if (parse_btype(&pt, &ad1))
4283 l = FUNC_NEW;
4284 else if (td)
4285 return 0;
4286 else
4287 l = FUNC_OLD;
4288 first = NULL;
4289 plast = &first;
4290 arg_size = 0;
4291 if (l) {
4292 for(;;) {
4293 /* read param name and compute offset */
4294 if (l != FUNC_OLD) {
4295 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4296 break;
4297 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4298 if ((pt.t & VT_BTYPE) == VT_VOID)
4299 tcc_error("parameter declared as void");
4300 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4301 } else {
4302 n = tok;
4303 if (n < TOK_UIDENT)
4304 expect("identifier");
4305 pt.t = VT_VOID; /* invalid type */
4306 next();
4308 convert_parameter_type(&pt);
4309 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4310 *plast = s;
4311 plast = &s->next;
4312 if (tok == ')')
4313 break;
4314 skip(',');
4315 if (l == FUNC_NEW && tok == TOK_DOTS) {
4316 l = FUNC_ELLIPSIS;
4317 next();
4318 break;
4320 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4321 tcc_error("invalid type");
4323 } else
4324 /* if no parameters, then old type prototype */
4325 l = FUNC_OLD;
4326 skip(')');
4327 /* NOTE: const is ignored in returned type as it has a special
4328 meaning in gcc / C++ */
4329 type->t &= ~VT_CONSTANT;
4330 /* some ancient pre-K&R C allows a function to return an array
4331 and the array brackets to be put after the arguments, such
4332 that "int c()[]" means something like "int[] c()" */
4333 if (tok == '[') {
4334 next();
4335 skip(']'); /* only handle simple "[]" */
4336 mk_pointer(type);
4338 /* we push a anonymous symbol which will contain the function prototype */
4339 ad->f.func_args = arg_size;
4340 ad->f.func_type = l;
4341 s = sym_push(SYM_FIELD, type, 0, 0);
4342 s->a = ad->a;
4343 s->f = ad->f;
4344 s->next = first;
4345 type->t = VT_FUNC;
4346 type->ref = s;
4347 } else if (tok == '[') {
4348 int saved_nocode_wanted = nocode_wanted;
4349 /* array definition */
4350 next();
4351 if (tok == TOK_RESTRICT1)
4352 next();
4353 n = -1;
4354 t1 = 0;
4355 if (tok != ']') {
4356 if (!local_stack || (storage & VT_STATIC))
4357 vpushi(expr_const());
4358 else {
4359 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4360 length must always be evaluated, even under nocode_wanted,
4361 so that its size slot is initialized (e.g. under sizeof
4362 or typeof). */
4363 nocode_wanted = 0;
4364 gexpr();
4366 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4367 n = vtop->c.i;
4368 if (n < 0)
4369 tcc_error("invalid array size");
4370 } else {
4371 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4372 tcc_error("size of variable length array should be an integer");
4373 t1 = VT_VLA;
4376 skip(']');
4377 /* parse next post type */
4378 post_type(type, ad, storage, 0);
4379 if (type->t == VT_FUNC)
4380 tcc_error("declaration of an array of functions");
4381 t1 |= type->t & VT_VLA;
4383 if (t1 & VT_VLA) {
4384 loc -= type_size(&int_type, &align);
4385 loc &= -align;
4386 n = loc;
4388 vla_runtime_type_size(type, &align);
4389 gen_op('*');
4390 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4391 vswap();
4392 vstore();
4394 if (n != -1)
4395 vpop();
4396 nocode_wanted = saved_nocode_wanted;
4398 /* we push an anonymous symbol which will contain the array
4399 element type */
4400 s = sym_push(SYM_FIELD, type, 0, n);
4401 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4402 type->ref = s;
4404 return 1;
4407 /* Parse a type declarator (except basic type), and return the type
4408 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4409 expected. 'type' should contain the basic type. 'ad' is the
4410 attribute definition of the basic type. It can be modified by
4411 type_decl(). If this (possibly abstract) declarator is a pointer chain
4412 it returns the innermost pointed to type (equals *type, but is a different
4413 pointer), otherwise returns type itself, that's used for recursive calls. */
4414 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4416 CType *post, *ret;
4417 int qualifiers, storage;
4419 /* recursive type, remove storage bits first, apply them later again */
4420 storage = type->t & VT_STORAGE;
4421 type->t &= ~VT_STORAGE;
4422 post = ret = type;
4424 while (tok == '*') {
4425 qualifiers = 0;
4426 redo:
4427 next();
4428 switch(tok) {
4429 case TOK_CONST1:
4430 case TOK_CONST2:
4431 case TOK_CONST3:
4432 qualifiers |= VT_CONSTANT;
4433 goto redo;
4434 case TOK_VOLATILE1:
4435 case TOK_VOLATILE2:
4436 case TOK_VOLATILE3:
4437 qualifiers |= VT_VOLATILE;
4438 goto redo;
4439 case TOK_RESTRICT1:
4440 case TOK_RESTRICT2:
4441 case TOK_RESTRICT3:
4442 goto redo;
4443 /* XXX: clarify attribute handling */
4444 case TOK_ATTRIBUTE1:
4445 case TOK_ATTRIBUTE2:
4446 parse_attribute(ad);
4447 break;
4449 mk_pointer(type);
4450 type->t |= qualifiers;
4451 if (ret == type)
4452 /* innermost pointed to type is the one for the first derivation */
4453 ret = pointed_type(type);
4456 if (tok == '(') {
4457 /* This is possibly a parameter type list for abstract declarators
4458 ('int ()'), use post_type for testing this. */
4459 if (!post_type(type, ad, 0, td)) {
4460 /* It's not, so it's a nested declarator, and the post operations
4461 apply to the innermost pointed to type (if any). */
4462 /* XXX: this is not correct to modify 'ad' at this point, but
4463 the syntax is not clear */
4464 parse_attribute(ad);
4465 post = type_decl(type, ad, v, td);
4466 skip(')');
4468 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4469 /* type identifier */
4470 *v = tok;
4471 next();
4472 } else {
4473 if (!(td & TYPE_ABSTRACT))
4474 expect("identifier");
4475 *v = 0;
4477 post_type(post, ad, storage, 0);
4478 parse_attribute(ad);
4479 type->t |= storage;
4480 return ret;
4483 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4484 ST_FUNC int lvalue_type(int t)
4486 int bt, r;
4487 r = VT_LVAL;
4488 bt = t & VT_BTYPE;
4489 if (bt == VT_BYTE || bt == VT_BOOL)
4490 r |= VT_LVAL_BYTE;
4491 else if (bt == VT_SHORT)
4492 r |= VT_LVAL_SHORT;
4493 else
4494 return r;
4495 if (t & VT_UNSIGNED)
4496 r |= VT_LVAL_UNSIGNED;
4497 return r;
4500 /* indirection with full error checking and bound check */
4501 ST_FUNC void indir(void)
4503 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4504 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4505 return;
4506 expect("pointer");
4508 if (vtop->r & VT_LVAL)
4509 gv(RC_INT);
4510 vtop->type = *pointed_type(&vtop->type);
4511 /* Arrays and functions are never lvalues */
4512 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4513 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4514 vtop->r |= lvalue_type(vtop->type.t);
4515 /* if bound checking, the referenced pointer must be checked */
4516 #ifdef CONFIG_TCC_BCHECK
4517 if (tcc_state->do_bounds_check)
4518 vtop->r |= VT_MUSTBOUND;
4519 #endif
4523 /* pass a parameter to a function and do type checking and casting */
4524 static void gfunc_param_typed(Sym *func, Sym *arg)
4526 int func_type;
4527 CType type;
4529 func_type = func->f.func_type;
4530 if (func_type == FUNC_OLD ||
4531 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4532 /* default casting : only need to convert float to double */
4533 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4534 gen_cast_s(VT_DOUBLE);
4535 } else if (vtop->type.t & VT_BITFIELD) {
4536 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4537 type.ref = vtop->type.ref;
4538 gen_cast(&type);
4540 } else if (arg == NULL) {
4541 tcc_error("too many arguments to function");
4542 } else {
4543 type = arg->type;
4544 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4545 gen_assign_cast(&type);
4549 /* parse an expression and return its type without any side effect. */
4550 static void expr_type(CType *type, void (*expr_fn)(void))
4552 nocode_wanted++;
4553 expr_fn();
4554 *type = vtop->type;
4555 vpop();
4556 nocode_wanted--;
4559 /* parse an expression of the form '(type)' or '(expr)' and return its
4560 type */
4561 static void parse_expr_type(CType *type)
4563 int n;
4564 AttributeDef ad;
4566 skip('(');
4567 if (parse_btype(type, &ad)) {
4568 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4569 } else {
4570 expr_type(type, gexpr);
4572 skip(')');
4575 static void parse_type(CType *type)
4577 AttributeDef ad;
4578 int n;
4580 if (!parse_btype(type, &ad)) {
4581 expect("type");
4583 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4586 static void parse_builtin_params(int nc, const char *args)
4588 char c, sep = '(';
4589 CType t;
4590 if (nc)
4591 nocode_wanted++;
4592 next();
4593 while ((c = *args++)) {
4594 skip(sep);
4595 sep = ',';
4596 switch (c) {
4597 case 'e': expr_eq(); continue;
4598 case 't': parse_type(&t); vpush(&t); continue;
4599 default: tcc_error("internal error"); break;
4602 skip(')');
4603 if (nc)
4604 nocode_wanted--;
4607 ST_FUNC void unary(void)
4609 int n, t, align, size, r, sizeof_caller;
4610 CType type;
4611 Sym *s;
4612 AttributeDef ad;
4614 sizeof_caller = in_sizeof;
4615 in_sizeof = 0;
4616 type.ref = NULL;
4617 /* XXX: GCC 2.95.3 does not generate a table although it should be
4618 better here */
4619 tok_next:
4620 switch(tok) {
4621 case TOK_EXTENSION:
4622 next();
4623 goto tok_next;
4624 case TOK_LCHAR:
4625 #ifdef TCC_TARGET_PE
4626 t = VT_SHORT|VT_UNSIGNED;
4627 goto push_tokc;
4628 #endif
4629 case TOK_CINT:
4630 case TOK_CCHAR:
4631 t = VT_INT;
4632 push_tokc:
4633 type.t = t;
4634 vsetc(&type, VT_CONST, &tokc);
4635 next();
4636 break;
4637 case TOK_CUINT:
4638 t = VT_INT | VT_UNSIGNED;
4639 goto push_tokc;
4640 case TOK_CLLONG:
4641 t = VT_LLONG;
4642 goto push_tokc;
4643 case TOK_CULLONG:
4644 t = VT_LLONG | VT_UNSIGNED;
4645 goto push_tokc;
4646 case TOK_CFLOAT:
4647 t = VT_FLOAT;
4648 goto push_tokc;
4649 case TOK_CDOUBLE:
4650 t = VT_DOUBLE;
4651 goto push_tokc;
4652 case TOK_CLDOUBLE:
4653 t = VT_LDOUBLE;
4654 goto push_tokc;
4655 case TOK_CLONG:
4656 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4657 goto push_tokc;
4658 case TOK_CULONG:
4659 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4660 goto push_tokc;
4661 case TOK___FUNCTION__:
4662 if (!gnu_ext)
4663 goto tok_identifier;
4664 /* fall thru */
4665 case TOK___FUNC__:
4667 void *ptr;
4668 int len;
4669 /* special function name identifier */
4670 len = strlen(funcname) + 1;
4671 /* generate char[len] type */
4672 type.t = VT_BYTE;
4673 mk_pointer(&type);
4674 type.t |= VT_ARRAY;
4675 type.ref->c = len;
4676 vpush_ref(&type, data_section, data_section->data_offset, len);
4677 if (!NODATA_WANTED) {
4678 ptr = section_ptr_add(data_section, len);
4679 memcpy(ptr, funcname, len);
4681 next();
4683 break;
4684 case TOK_LSTR:
4685 #ifdef TCC_TARGET_PE
4686 t = VT_SHORT | VT_UNSIGNED;
4687 #else
4688 t = VT_INT;
4689 #endif
4690 goto str_init;
4691 case TOK_STR:
4692 /* string parsing */
4693 t = VT_BYTE;
4694 if (tcc_state->char_is_unsigned)
4695 t = VT_BYTE | VT_UNSIGNED;
4696 str_init:
4697 if (tcc_state->warn_write_strings)
4698 t |= VT_CONSTANT;
4699 type.t = t;
4700 mk_pointer(&type);
4701 type.t |= VT_ARRAY;
4702 memset(&ad, 0, sizeof(AttributeDef));
4703 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4704 break;
4705 case '(':
4706 next();
4707 /* cast ? */
4708 if (parse_btype(&type, &ad)) {
4709 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4710 skip(')');
4711 /* check ISOC99 compound literal */
4712 if (tok == '{') {
4713 /* data is allocated locally by default */
4714 if (global_expr)
4715 r = VT_CONST;
4716 else
4717 r = VT_LOCAL;
4718 /* all except arrays are lvalues */
4719 if (!(type.t & VT_ARRAY))
4720 r |= lvalue_type(type.t);
4721 memset(&ad, 0, sizeof(AttributeDef));
4722 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4723 } else {
4724 if (sizeof_caller) {
4725 vpush(&type);
4726 return;
4728 unary();
4729 gen_cast(&type);
4731 } else if (tok == '{') {
4732 int saved_nocode_wanted = nocode_wanted;
4733 if (const_wanted)
4734 tcc_error("expected constant");
4735 /* save all registers */
4736 save_regs(0);
4737 /* statement expression : we do not accept break/continue
4738 inside as GCC does. We do retain the nocode_wanted state,
4739 as statement expressions can't ever be entered from the
4740 outside, so any reactivation of code emission (from labels
4741 or loop heads) can be disabled again after the end of it. */
4742 block(NULL, NULL, 1);
4743 nocode_wanted = saved_nocode_wanted;
4744 skip(')');
4745 } else {
4746 gexpr();
4747 skip(')');
4749 break;
4750 case '*':
4751 next();
4752 unary();
4753 indir();
4754 break;
4755 case '&':
4756 next();
4757 unary();
4758 /* functions names must be treated as function pointers,
4759 except for unary '&' and sizeof. Since we consider that
4760 functions are not lvalues, we only have to handle it
4761 there and in function calls. */
4762 /* arrays can also be used although they are not lvalues */
4763 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4764 !(vtop->type.t & VT_ARRAY))
4765 test_lvalue();
4766 mk_pointer(&vtop->type);
4767 gaddrof();
4768 break;
4769 case '!':
4770 next();
4771 unary();
4772 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4773 gen_cast_s(VT_BOOL);
4774 vtop->c.i = !vtop->c.i;
4775 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4776 vtop->c.i ^= 1;
4777 else {
4778 save_regs(1);
4779 vseti(VT_JMP, gvtst(1, 0));
4781 break;
4782 case '~':
4783 next();
4784 unary();
4785 vpushi(-1);
4786 gen_op('^');
4787 break;
4788 case '+':
4789 next();
4790 unary();
4791 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4792 tcc_error("pointer not accepted for unary plus");
4793 /* In order to force cast, we add zero, except for floating point
4794 where we really need an noop (otherwise -0.0 will be transformed
4795 into +0.0). */
4796 if (!is_float(vtop->type.t)) {
4797 vpushi(0);
4798 gen_op('+');
4800 break;
4801 case TOK_SIZEOF:
4802 case TOK_ALIGNOF1:
4803 case TOK_ALIGNOF2:
4804 t = tok;
4805 next();
4806 in_sizeof++;
4807 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4808 s = vtop[1].sym; /* hack: accessing previous vtop */
4809 size = type_size(&type, &align);
4810 if (s && s->a.aligned)
4811 align = 1 << (s->a.aligned - 1);
4812 if (t == TOK_SIZEOF) {
4813 if (!(type.t & VT_VLA)) {
4814 if (size < 0)
4815 tcc_error("sizeof applied to an incomplete type");
4816 vpushs(size);
4817 } else {
4818 vla_runtime_type_size(&type, &align);
4820 } else {
4821 vpushs(align);
4823 vtop->type.t |= VT_UNSIGNED;
4824 break;
4826 case TOK_builtin_expect:
4827 /* __builtin_expect is a no-op for now */
4828 parse_builtin_params(0, "ee");
4829 vpop();
4830 break;
4831 case TOK_builtin_types_compatible_p:
4832 parse_builtin_params(0, "tt");
4833 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4834 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4835 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4836 vtop -= 2;
4837 vpushi(n);
4838 break;
4839 case TOK_builtin_choose_expr:
4841 int64_t c;
4842 next();
4843 skip('(');
4844 c = expr_const64();
4845 skip(',');
4846 if (!c) {
4847 nocode_wanted++;
4849 expr_eq();
4850 if (!c) {
4851 vpop();
4852 nocode_wanted--;
4854 skip(',');
4855 if (c) {
4856 nocode_wanted++;
4858 expr_eq();
4859 if (c) {
4860 vpop();
4861 nocode_wanted--;
4863 skip(')');
4865 break;
4866 case TOK_builtin_constant_p:
4867 parse_builtin_params(1, "e");
4868 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4869 vtop--;
4870 vpushi(n);
4871 break;
4872 case TOK_builtin_frame_address:
4873 case TOK_builtin_return_address:
4875 int tok1 = tok;
4876 int level;
4877 next();
4878 skip('(');
4879 if (tok != TOK_CINT) {
4880 tcc_error("%s only takes positive integers",
4881 tok1 == TOK_builtin_return_address ?
4882 "__builtin_return_address" :
4883 "__builtin_frame_address");
4885 level = (uint32_t)tokc.i;
4886 next();
4887 skip(')');
4888 type.t = VT_VOID;
4889 mk_pointer(&type);
4890 vset(&type, VT_LOCAL, 0); /* local frame */
4891 while (level--) {
4892 mk_pointer(&vtop->type);
4893 indir(); /* -> parent frame */
4895 if (tok1 == TOK_builtin_return_address) {
4896 // assume return address is just above frame pointer on stack
4897 vpushi(PTR_SIZE);
4898 gen_op('+');
4899 mk_pointer(&vtop->type);
4900 indir();
4903 break;
4904 #ifdef TCC_TARGET_X86_64
4905 #ifdef TCC_TARGET_PE
4906 case TOK_builtin_va_start:
4907 parse_builtin_params(0, "ee");
4908 r = vtop->r & VT_VALMASK;
4909 if (r == VT_LLOCAL)
4910 r = VT_LOCAL;
4911 if (r != VT_LOCAL)
4912 tcc_error("__builtin_va_start expects a local variable");
4913 vtop->r = r;
4914 vtop->type = char_pointer_type;
4915 vtop->c.i += 8;
4916 vstore();
4917 break;
4918 #else
4919 case TOK_builtin_va_arg_types:
4920 parse_builtin_params(0, "t");
4921 vpushi(classify_x86_64_va_arg(&vtop->type));
4922 vswap();
4923 vpop();
4924 break;
4925 #endif
4926 #endif
4928 #ifdef TCC_TARGET_ARM64
4929 case TOK___va_start: {
4930 parse_builtin_params(0, "ee");
4931 //xx check types
4932 gen_va_start();
4933 vpushi(0);
4934 vtop->type.t = VT_VOID;
4935 break;
4937 case TOK___va_arg: {
4938 parse_builtin_params(0, "et");
4939 type = vtop->type;
4940 vpop();
4941 //xx check types
4942 gen_va_arg(&type);
4943 vtop->type = type;
4944 break;
4946 case TOK___arm64_clear_cache: {
4947 parse_builtin_params(0, "ee");
4948 gen_clear_cache();
4949 vpushi(0);
4950 vtop->type.t = VT_VOID;
4951 break;
4953 #endif
4954 /* pre operations */
4955 case TOK_INC:
4956 case TOK_DEC:
4957 t = tok;
4958 next();
4959 unary();
4960 inc(0, t);
4961 break;
4962 case '-':
4963 next();
4964 unary();
4965 t = vtop->type.t & VT_BTYPE;
4966 if (is_float(t)) {
4967 /* In IEEE negate(x) isn't subtract(0,x), but rather
4968 subtract(-0, x). */
4969 vpush(&vtop->type);
4970 if (t == VT_FLOAT)
4971 vtop->c.f = -1.0 * 0.0;
4972 else if (t == VT_DOUBLE)
4973 vtop->c.d = -1.0 * 0.0;
4974 else
4975 vtop->c.ld = -1.0 * 0.0;
4976 } else
4977 vpushi(0);
4978 vswap();
4979 gen_op('-');
4980 break;
4981 case TOK_LAND:
4982 if (!gnu_ext)
4983 goto tok_identifier;
4984 next();
4985 /* allow to take the address of a label */
4986 if (tok < TOK_UIDENT)
4987 expect("label identifier");
4988 s = label_find(tok);
4989 if (!s) {
4990 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4991 } else {
4992 if (s->r == LABEL_DECLARED)
4993 s->r = LABEL_FORWARD;
4995 if (!s->type.t) {
4996 s->type.t = VT_VOID;
4997 mk_pointer(&s->type);
4998 s->type.t |= VT_STATIC;
5000 vpushsym(&s->type, s);
5001 next();
5002 break;
5004 case TOK_GENERIC:
5006 CType controlling_type;
5007 int has_default = 0;
5008 int has_match = 0;
5009 int learn = 0;
5010 TokenString *str = NULL;
5012 next();
5013 skip('(');
5014 expr_type(&controlling_type, expr_eq);
5015 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5016 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5017 mk_pointer(&controlling_type);
5018 for (;;) {
5019 learn = 0;
5020 skip(',');
5021 if (tok == TOK_DEFAULT) {
5022 if (has_default)
5023 tcc_error("too many 'default'");
5024 has_default = 1;
5025 if (!has_match)
5026 learn = 1;
5027 next();
5028 } else {
5029 AttributeDef ad_tmp;
5030 int itmp;
5031 CType cur_type;
5032 parse_btype(&cur_type, &ad_tmp);
5033 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5034 if (compare_types(&controlling_type, &cur_type, 0)) {
5035 if (has_match) {
5036 tcc_error("type match twice");
5038 has_match = 1;
5039 learn = 1;
5042 skip(':');
5043 if (learn) {
5044 if (str)
5045 tok_str_free(str);
5046 skip_or_save_block(&str);
5047 } else {
5048 skip_or_save_block(NULL);
5050 if (tok == ')')
5051 break;
5053 if (!str) {
5054 char buf[60];
5055 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5056 tcc_error("type '%s' does not match any association", buf);
5058 begin_macro(str, 1);
5059 next();
5060 expr_eq();
5061 if (tok != TOK_EOF)
5062 expect(",");
5063 end_macro();
5064 next();
5065 break;
5067 // special qnan , snan and infinity values
5068 case TOK___NAN__:
5069 n = 0x7fc00000;
5070 special_math_val:
5071 vpushi(n);
5072 vtop->type.t = VT_FLOAT;
5073 next();
5074 break;
5075 case TOK___SNAN__:
5076 n = 0x7f800001;
5077 goto special_math_val;
5078 case TOK___INF__:
5079 n = 0x7f800000;
5080 goto special_math_val;
5082 default:
5083 tok_identifier:
5084 t = tok;
5085 next();
5086 if (t < TOK_UIDENT)
5087 expect("identifier");
5088 s = sym_find(t);
5089 if (!s || IS_ASM_SYM(s)) {
5090 const char *name = get_tok_str(t, NULL);
5091 if (tok != '(')
5092 tcc_error("'%s' undeclared", name);
5093 /* for simple function calls, we tolerate undeclared
5094 external reference to int() function */
5095 if (tcc_state->warn_implicit_function_declaration
5096 #ifdef TCC_TARGET_PE
5097 /* people must be warned about using undeclared WINAPI functions
5098 (which usually start with uppercase letter) */
5099 || (name[0] >= 'A' && name[0] <= 'Z')
5100 #endif
5102 tcc_warning("implicit declaration of function '%s'", name);
5103 s = external_global_sym(t, &func_old_type, 0);
5106 r = s->r;
5107 /* A symbol that has a register is a local register variable,
5108 which starts out as VT_LOCAL value. */
5109 if ((r & VT_VALMASK) < VT_CONST)
5110 r = (r & ~VT_VALMASK) | VT_LOCAL;
5112 vset(&s->type, r, s->c);
5113 /* Point to s as backpointer (even without r&VT_SYM).
5114 Will be used by at least the x86 inline asm parser for
5115 regvars. */
5116 vtop->sym = s;
5118 if (r & VT_SYM) {
5119 vtop->c.i = 0;
5120 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5121 vtop->c.i = s->enum_val;
5123 break;
5126 /* post operations */
5127 while (1) {
5128 if (tok == TOK_INC || tok == TOK_DEC) {
5129 inc(1, tok);
5130 next();
5131 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5132 int qualifiers;
5133 /* field */
5134 if (tok == TOK_ARROW)
5135 indir();
5136 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5137 test_lvalue();
5138 gaddrof();
5139 /* expect pointer on structure */
5140 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5141 expect("struct or union");
5142 if (tok == TOK_CDOUBLE)
5143 expect("field name");
5144 next();
5145 if (tok == TOK_CINT || tok == TOK_CUINT)
5146 expect("field name");
5147 s = find_field(&vtop->type, tok);
5148 if (!s)
5149 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5150 /* add field offset to pointer */
5151 vtop->type = char_pointer_type; /* change type to 'char *' */
5152 vpushi(s->c);
5153 gen_op('+');
5154 /* change type to field type, and set to lvalue */
5155 vtop->type = s->type;
5156 vtop->type.t |= qualifiers;
5157 /* an array is never an lvalue */
5158 if (!(vtop->type.t & VT_ARRAY)) {
5159 vtop->r |= lvalue_type(vtop->type.t);
5160 #ifdef CONFIG_TCC_BCHECK
5161 /* if bound checking, the referenced pointer must be checked */
5162 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5163 vtop->r |= VT_MUSTBOUND;
5164 #endif
5166 next();
5167 } else if (tok == '[') {
5168 next();
5169 gexpr();
5170 gen_op('+');
5171 indir();
5172 skip(']');
5173 } else if (tok == '(') {
5174 SValue ret;
5175 Sym *sa;
5176 int nb_args, ret_nregs, ret_align, regsize, variadic;
5178 /* function call */
5179 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5180 /* pointer test (no array accepted) */
5181 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5182 vtop->type = *pointed_type(&vtop->type);
5183 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5184 goto error_func;
5185 } else {
5186 error_func:
5187 expect("function pointer");
5189 } else {
5190 vtop->r &= ~VT_LVAL; /* no lvalue */
5192 /* get return type */
5193 s = vtop->type.ref;
5194 next();
5195 sa = s->next; /* first parameter */
5196 nb_args = regsize = 0;
5197 ret.r2 = VT_CONST;
5198 /* compute first implicit argument if a structure is returned */
5199 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5200 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5201 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5202 &ret_align, &regsize);
5203 if (!ret_nregs) {
5204 /* get some space for the returned structure */
5205 size = type_size(&s->type, &align);
5206 #ifdef TCC_TARGET_ARM64
5207 /* On arm64, a small struct is return in registers.
5208 It is much easier to write it to memory if we know
5209 that we are allowed to write some extra bytes, so
5210 round the allocated space up to a power of 2: */
5211 if (size < 16)
5212 while (size & (size - 1))
5213 size = (size | (size - 1)) + 1;
5214 #endif
5215 loc = (loc - size) & -align;
5216 ret.type = s->type;
5217 ret.r = VT_LOCAL | VT_LVAL;
5218 /* pass it as 'int' to avoid structure arg passing
5219 problems */
5220 vseti(VT_LOCAL, loc);
5221 ret.c = vtop->c;
5222 nb_args++;
5224 } else {
5225 ret_nregs = 1;
5226 ret.type = s->type;
5229 if (ret_nregs) {
5230 /* return in register */
5231 if (is_float(ret.type.t)) {
5232 ret.r = reg_fret(ret.type.t);
5233 #ifdef TCC_TARGET_X86_64
5234 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5235 ret.r2 = REG_QRET;
5236 #endif
5237 } else {
5238 #ifndef TCC_TARGET_ARM64
5239 #ifdef TCC_TARGET_X86_64
5240 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5241 #else
5242 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5243 #endif
5244 ret.r2 = REG_LRET;
5245 #endif
5246 ret.r = REG_IRET;
5248 ret.c.i = 0;
5250 if (tok != ')') {
5251 for(;;) {
5252 expr_eq();
5253 gfunc_param_typed(s, sa);
5254 nb_args++;
5255 if (sa)
5256 sa = sa->next;
5257 if (tok == ')')
5258 break;
5259 skip(',');
5262 if (sa)
5263 tcc_error("too few arguments to function");
5264 skip(')');
5265 gfunc_call(nb_args);
5267 /* return value */
5268 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5269 vsetc(&ret.type, r, &ret.c);
5270 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5273 /* handle packed struct return */
5274 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5275 int addr, offset;
5277 size = type_size(&s->type, &align);
5278 /* We're writing whole regs often, make sure there's enough
5279 space. Assume register size is power of 2. */
5280 if (regsize > align)
5281 align = regsize;
5282 loc = (loc - size) & -align;
5283 addr = loc;
5284 offset = 0;
5285 for (;;) {
5286 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5287 vswap();
5288 vstore();
5289 vtop--;
5290 if (--ret_nregs == 0)
5291 break;
5292 offset += regsize;
5294 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5296 } else {
5297 break;
5302 ST_FUNC void expr_prod(void)
5304 int t;
5306 unary();
5307 while (tok == '*' || tok == '/' || tok == '%') {
5308 t = tok;
5309 next();
5310 unary();
5311 gen_op(t);
5315 ST_FUNC void expr_sum(void)
5317 int t;
5319 expr_prod();
5320 while (tok == '+' || tok == '-') {
5321 t = tok;
5322 next();
5323 expr_prod();
5324 gen_op(t);
5328 static void expr_shift(void)
5330 int t;
5332 expr_sum();
5333 while (tok == TOK_SHL || tok == TOK_SAR) {
5334 t = tok;
5335 next();
5336 expr_sum();
5337 gen_op(t);
5341 static void expr_cmp(void)
5343 int t;
5345 expr_shift();
5346 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5347 tok == TOK_ULT || tok == TOK_UGE) {
5348 t = tok;
5349 next();
5350 expr_shift();
5351 gen_op(t);
5355 static void expr_cmpeq(void)
5357 int t;
5359 expr_cmp();
5360 while (tok == TOK_EQ || tok == TOK_NE) {
5361 t = tok;
5362 next();
5363 expr_cmp();
5364 gen_op(t);
5368 static void expr_and(void)
5370 expr_cmpeq();
5371 while (tok == '&') {
5372 next();
5373 expr_cmpeq();
5374 gen_op('&');
5378 static void expr_xor(void)
5380 expr_and();
5381 while (tok == '^') {
5382 next();
5383 expr_and();
5384 gen_op('^');
5388 static void expr_or(void)
5390 expr_xor();
5391 while (tok == '|') {
5392 next();
5393 expr_xor();
5394 gen_op('|');
5398 static void expr_land(void)
5400 expr_or();
5401 if (tok == TOK_LAND) {
5402 int t = 0;
5403 for(;;) {
5404 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5405 gen_cast_s(VT_BOOL);
5406 if (vtop->c.i) {
5407 vpop();
5408 } else {
5409 nocode_wanted++;
5410 while (tok == TOK_LAND) {
5411 next();
5412 expr_or();
5413 vpop();
5415 nocode_wanted--;
5416 if (t)
5417 gsym(t);
5418 gen_cast_s(VT_INT);
5419 break;
5421 } else {
5422 if (!t)
5423 save_regs(1);
5424 t = gvtst(1, t);
5426 if (tok != TOK_LAND) {
5427 if (t)
5428 vseti(VT_JMPI, t);
5429 else
5430 vpushi(1);
5431 break;
5433 next();
5434 expr_or();
5439 static void expr_lor(void)
5441 expr_land();
5442 if (tok == TOK_LOR) {
5443 int t = 0;
5444 for(;;) {
5445 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5446 gen_cast_s(VT_BOOL);
5447 if (!vtop->c.i) {
5448 vpop();
5449 } else {
5450 nocode_wanted++;
5451 while (tok == TOK_LOR) {
5452 next();
5453 expr_land();
5454 vpop();
5456 nocode_wanted--;
5457 if (t)
5458 gsym(t);
5459 gen_cast_s(VT_INT);
5460 break;
5462 } else {
5463 if (!t)
5464 save_regs(1);
5465 t = gvtst(0, t);
5467 if (tok != TOK_LOR) {
5468 if (t)
5469 vseti(VT_JMP, t);
5470 else
5471 vpushi(0);
5472 break;
5474 next();
5475 expr_land();
5480 /* Assuming vtop is a value used in a conditional context
5481 (i.e. compared with zero) return 0 if it's false, 1 if
5482 true and -1 if it can't be statically determined. */
5483 static int condition_3way(void)
5485 int c = -1;
5486 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5487 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5488 vdup();
5489 gen_cast_s(VT_BOOL);
5490 c = vtop->c.i;
5491 vpop();
5493 return c;
5496 static void expr_cond(void)
5498 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5499 SValue sv;
5500 CType type, type1, type2;
5502 expr_lor();
5503 if (tok == '?') {
5504 next();
5505 c = condition_3way();
5506 g = (tok == ':' && gnu_ext);
5507 if (c < 0) {
5508 /* needed to avoid having different registers saved in
5509 each branch */
5510 if (is_float(vtop->type.t)) {
5511 rc = RC_FLOAT;
5512 #ifdef TCC_TARGET_X86_64
5513 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5514 rc = RC_ST0;
5516 #endif
5517 } else
5518 rc = RC_INT;
5519 gv(rc);
5520 save_regs(1);
5521 if (g)
5522 gv_dup();
5523 tt = gvtst(1, 0);
5525 } else {
5526 if (!g)
5527 vpop();
5528 tt = 0;
5531 if (1) {
5532 if (c == 0)
5533 nocode_wanted++;
5534 if (!g)
5535 gexpr();
5537 type1 = vtop->type;
5538 sv = *vtop; /* save value to handle it later */
5539 vtop--; /* no vpop so that FP stack is not flushed */
5540 skip(':');
5542 u = 0;
5543 if (c < 0)
5544 u = gjmp(0);
5545 gsym(tt);
5547 if (c == 0)
5548 nocode_wanted--;
5549 if (c == 1)
5550 nocode_wanted++;
5551 expr_cond();
5552 if (c == 1)
5553 nocode_wanted--;
5555 type2 = vtop->type;
5556 t1 = type1.t;
5557 bt1 = t1 & VT_BTYPE;
5558 t2 = type2.t;
5559 bt2 = t2 & VT_BTYPE;
5560 type.ref = NULL;
5562 /* cast operands to correct type according to ISOC rules */
5563 if (is_float(bt1) || is_float(bt2)) {
5564 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5565 type.t = VT_LDOUBLE;
5567 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5568 type.t = VT_DOUBLE;
5569 } else {
5570 type.t = VT_FLOAT;
5572 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5573 /* cast to biggest op */
5574 type.t = VT_LLONG | VT_LONG;
5575 if (bt1 == VT_LLONG)
5576 type.t &= t1;
5577 if (bt2 == VT_LLONG)
5578 type.t &= t2;
5579 /* convert to unsigned if it does not fit in a long long */
5580 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5581 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5582 type.t |= VT_UNSIGNED;
5583 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5584 /* If one is a null ptr constant the result type
5585 is the other. */
5586 if (is_null_pointer (vtop))
5587 type = type1;
5588 else if (is_null_pointer (&sv))
5589 type = type2;
5590 /* XXX: test pointer compatibility, C99 has more elaborate
5591 rules here. */
5592 else
5593 type = type1;
5594 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5595 /* XXX: test function pointer compatibility */
5596 type = bt1 == VT_FUNC ? type1 : type2;
5597 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5598 /* XXX: test structure compatibility */
5599 type = bt1 == VT_STRUCT ? type1 : type2;
5600 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5601 /* NOTE: as an extension, we accept void on only one side */
5602 type.t = VT_VOID;
5603 } else {
5604 /* integer operations */
5605 type.t = VT_INT | (VT_LONG & (t1 | t2));
5606 /* convert to unsigned if it does not fit in an integer */
5607 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5608 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5609 type.t |= VT_UNSIGNED;
5611 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5612 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5613 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5614 islv &= c < 0;
5616 /* now we convert second operand */
5617 if (c != 1) {
5618 gen_cast(&type);
5619 if (islv) {
5620 mk_pointer(&vtop->type);
5621 gaddrof();
5622 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5623 gaddrof();
5626 rc = RC_INT;
5627 if (is_float(type.t)) {
5628 rc = RC_FLOAT;
5629 #ifdef TCC_TARGET_X86_64
5630 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5631 rc = RC_ST0;
5633 #endif
5634 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5635 /* for long longs, we use fixed registers to avoid having
5636 to handle a complicated move */
5637 rc = RC_IRET;
5640 tt = r2 = 0;
5641 if (c < 0) {
5642 r2 = gv(rc);
5643 tt = gjmp(0);
5645 gsym(u);
5647 /* this is horrible, but we must also convert first
5648 operand */
5649 if (c != 0) {
5650 *vtop = sv;
5651 gen_cast(&type);
5652 if (islv) {
5653 mk_pointer(&vtop->type);
5654 gaddrof();
5655 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5656 gaddrof();
5659 if (c < 0) {
5660 r1 = gv(rc);
5661 move_reg(r2, r1, type.t);
5662 vtop->r = r2;
5663 gsym(tt);
5664 if (islv)
5665 indir();
5671 static void expr_eq(void)
5673 int t;
5675 expr_cond();
5676 if (tok == '=' ||
5677 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5678 tok == TOK_A_XOR || tok == TOK_A_OR ||
5679 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5680 test_lvalue();
5681 t = tok;
5682 next();
5683 if (t == '=') {
5684 expr_eq();
5685 } else {
5686 vdup();
5687 expr_eq();
5688 gen_op(t & 0x7f);
5690 vstore();
5694 ST_FUNC void gexpr(void)
5696 while (1) {
5697 expr_eq();
5698 if (tok != ',')
5699 break;
5700 vpop();
5701 next();
5705 /* parse a constant expression and return value in vtop. */
5706 static void expr_const1(void)
5708 const_wanted++;
5709 nocode_wanted++;
5710 expr_cond();
5711 nocode_wanted--;
5712 const_wanted--;
5715 /* parse an integer constant and return its value. */
5716 static inline int64_t expr_const64(void)
5718 int64_t c;
5719 expr_const1();
5720 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5721 expect("constant expression");
5722 c = vtop->c.i;
5723 vpop();
5724 return c;
5727 /* parse an integer constant and return its value.
5728 Complain if it doesn't fit 32bit (signed or unsigned). */
5729 ST_FUNC int expr_const(void)
5731 int c;
5732 int64_t wc = expr_const64();
5733 c = wc;
5734 if (c != wc && (unsigned)c != wc)
5735 tcc_error("constant exceeds 32 bit");
5736 return c;
5739 /* return the label token if current token is a label, otherwise
5740 return zero */
5741 static int is_label(void)
5743 int last_tok;
5745 /* fast test first */
5746 if (tok < TOK_UIDENT)
5747 return 0;
5748 /* no need to save tokc because tok is an identifier */
5749 last_tok = tok;
5750 next();
5751 if (tok == ':') {
5752 return last_tok;
5753 } else {
5754 unget_tok(last_tok);
5755 return 0;
5759 #ifndef TCC_TARGET_ARM64
5760 static void gfunc_return(CType *func_type)
5762 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5763 CType type, ret_type;
5764 int ret_align, ret_nregs, regsize;
5765 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5766 &ret_align, &regsize);
5767 if (0 == ret_nregs) {
5768 /* if returning structure, must copy it to implicit
5769 first pointer arg location */
5770 type = *func_type;
5771 mk_pointer(&type);
5772 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5773 indir();
5774 vswap();
5775 /* copy structure value to pointer */
5776 vstore();
5777 } else {
5778 /* returning structure packed into registers */
5779 int r, size, addr, align;
5780 size = type_size(func_type,&align);
5781 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5782 (vtop->c.i & (ret_align-1)))
5783 && (align & (ret_align-1))) {
5784 loc = (loc - size) & -ret_align;
5785 addr = loc;
5786 type = *func_type;
5787 vset(&type, VT_LOCAL | VT_LVAL, addr);
5788 vswap();
5789 vstore();
5790 vpop();
5791 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5793 vtop->type = ret_type;
5794 if (is_float(ret_type.t))
5795 r = rc_fret(ret_type.t);
5796 else
5797 r = RC_IRET;
5799 if (ret_nregs == 1)
5800 gv(r);
5801 else {
5802 for (;;) {
5803 vdup();
5804 gv(r);
5805 vpop();
5806 if (--ret_nregs == 0)
5807 break;
5808 /* We assume that when a structure is returned in multiple
5809 registers, their classes are consecutive values of the
5810 suite s(n) = 2^n */
5811 r <<= 1;
5812 vtop->c.i += regsize;
5816 } else if (is_float(func_type->t)) {
5817 gv(rc_fret(func_type->t));
5818 } else {
5819 gv(RC_IRET);
5821 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5823 #endif
5825 static int case_cmp(const void *pa, const void *pb)
5827 int64_t a = (*(struct case_t**) pa)->v1;
5828 int64_t b = (*(struct case_t**) pb)->v1;
5829 return a < b ? -1 : a > b;
5832 static void gcase(struct case_t **base, int len, int *bsym)
5834 struct case_t *p;
5835 int e;
5836 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5837 gv(RC_INT);
5838 while (len > 4) {
5839 /* binary search */
5840 p = base[len/2];
5841 vdup();
5842 if (ll)
5843 vpushll(p->v2);
5844 else
5845 vpushi(p->v2);
5846 gen_op(TOK_LE);
5847 e = gtst(1, 0);
5848 vdup();
5849 if (ll)
5850 vpushll(p->v1);
5851 else
5852 vpushi(p->v1);
5853 gen_op(TOK_GE);
5854 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5855 /* x < v1 */
5856 gcase(base, len/2, bsym);
5857 if (cur_switch->def_sym)
5858 gjmp_addr(cur_switch->def_sym);
5859 else
5860 *bsym = gjmp(*bsym);
5861 /* x > v2 */
5862 gsym(e);
5863 e = len/2 + 1;
5864 base += e; len -= e;
5866 /* linear scan */
5867 while (len--) {
5868 p = *base++;
5869 vdup();
5870 if (ll)
5871 vpushll(p->v2);
5872 else
5873 vpushi(p->v2);
5874 if (p->v1 == p->v2) {
5875 gen_op(TOK_EQ);
5876 gtst_addr(0, p->sym);
5877 } else {
5878 gen_op(TOK_LE);
5879 e = gtst(1, 0);
5880 vdup();
5881 if (ll)
5882 vpushll(p->v1);
5883 else
5884 vpushi(p->v1);
5885 gen_op(TOK_GE);
5886 gtst_addr(0, p->sym);
5887 gsym(e);
5892 static void block(int *bsym, int *csym, int is_expr)
5894 int a, b, c, d, cond;
5895 Sym *s;
5897 /* generate line number info */
5898 if (tcc_state->do_debug)
5899 tcc_debug_line(tcc_state);
5901 if (is_expr) {
5902 /* default return value is (void) */
5903 vpushi(0);
5904 vtop->type.t = VT_VOID;
5907 if (tok == TOK_IF) {
5908 /* if test */
5909 int saved_nocode_wanted = nocode_wanted;
5910 next();
5911 skip('(');
5912 gexpr();
5913 skip(')');
5914 cond = condition_3way();
5915 if (cond == 1)
5916 a = 0, vpop();
5917 else
5918 a = gvtst(1, 0);
5919 if (cond == 0)
5920 nocode_wanted |= 0x20000000;
5921 block(bsym, csym, 0);
5922 if (cond != 1)
5923 nocode_wanted = saved_nocode_wanted;
5924 c = tok;
5925 if (c == TOK_ELSE) {
5926 next();
5927 d = gjmp(0);
5928 gsym(a);
5929 if (cond == 1)
5930 nocode_wanted |= 0x20000000;
5931 block(bsym, csym, 0);
5932 gsym(d); /* patch else jmp */
5933 if (cond != 0)
5934 nocode_wanted = saved_nocode_wanted;
5935 } else
5936 gsym(a);
5937 } else if (tok == TOK_WHILE) {
5938 int saved_nocode_wanted;
5939 nocode_wanted &= ~0x20000000;
5940 next();
5941 d = ind;
5942 vla_sp_restore();
5943 skip('(');
5944 gexpr();
5945 skip(')');
5946 a = gvtst(1, 0);
5947 b = 0;
5948 ++local_scope;
5949 saved_nocode_wanted = nocode_wanted;
5950 block(&a, &b, 0);
5951 nocode_wanted = saved_nocode_wanted;
5952 --local_scope;
5953 gjmp_addr(d);
5954 gsym(a);
5955 gsym_addr(b, d);
5956 } else if (tok == '{') {
5957 Sym *llabel;
5958 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5960 next();
5961 /* record local declaration stack position */
5962 s = local_stack;
5963 llabel = local_label_stack;
5964 ++local_scope;
5966 /* handle local labels declarations */
5967 if (tok == TOK_LABEL) {
5968 next();
5969 for(;;) {
5970 if (tok < TOK_UIDENT)
5971 expect("label identifier");
5972 label_push(&local_label_stack, tok, LABEL_DECLARED);
5973 next();
5974 if (tok == ',') {
5975 next();
5976 } else {
5977 skip(';');
5978 break;
5982 while (tok != '}') {
5983 if ((a = is_label()))
5984 unget_tok(a);
5985 else
5986 decl(VT_LOCAL);
5987 if (tok != '}') {
5988 if (is_expr)
5989 vpop();
5990 block(bsym, csym, is_expr);
5993 /* pop locally defined labels */
5994 label_pop(&local_label_stack, llabel, is_expr);
5995 /* pop locally defined symbols */
5996 --local_scope;
5997 /* In the is_expr case (a statement expression is finished here),
5998 vtop might refer to symbols on the local_stack. Either via the
5999 type or via vtop->sym. We can't pop those nor any that in turn
6000 might be referred to. To make it easier we don't roll back
6001 any symbols in that case; some upper level call to block() will
6002 do that. We do have to remove such symbols from the lookup
6003 tables, though. sym_pop will do that. */
6004 sym_pop(&local_stack, s, is_expr);
6006 /* Pop VLA frames and restore stack pointer if required */
6007 if (vlas_in_scope > saved_vlas_in_scope) {
6008 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6009 vla_sp_restore();
6011 vlas_in_scope = saved_vlas_in_scope;
6013 next();
6014 } else if (tok == TOK_RETURN) {
6015 next();
6016 if (tok != ';') {
6017 gexpr();
6018 gen_assign_cast(&func_vt);
6019 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6020 vtop--;
6021 else
6022 gfunc_return(&func_vt);
6024 skip(';');
6025 /* jump unless last stmt in top-level block */
6026 if (tok != '}' || local_scope != 1)
6027 rsym = gjmp(rsym);
6028 nocode_wanted |= 0x20000000;
6029 } else if (tok == TOK_BREAK) {
6030 /* compute jump */
6031 if (!bsym)
6032 tcc_error("cannot break");
6033 *bsym = gjmp(*bsym);
6034 next();
6035 skip(';');
6036 nocode_wanted |= 0x20000000;
6037 } else if (tok == TOK_CONTINUE) {
6038 /* compute jump */
6039 if (!csym)
6040 tcc_error("cannot continue");
6041 vla_sp_restore_root();
6042 *csym = gjmp(*csym);
6043 next();
6044 skip(';');
6045 } else if (tok == TOK_FOR) {
6046 int e;
6047 int saved_nocode_wanted;
6048 nocode_wanted &= ~0x20000000;
6049 next();
6050 skip('(');
6051 s = local_stack;
6052 ++local_scope;
6053 if (tok != ';') {
6054 /* c99 for-loop init decl? */
6055 if (!decl0(VT_LOCAL, 1, NULL)) {
6056 /* no, regular for-loop init expr */
6057 gexpr();
6058 vpop();
6061 skip(';');
6062 d = ind;
6063 c = ind;
6064 vla_sp_restore();
6065 a = 0;
6066 b = 0;
6067 if (tok != ';') {
6068 gexpr();
6069 a = gvtst(1, 0);
6071 skip(';');
6072 if (tok != ')') {
6073 e = gjmp(0);
6074 c = ind;
6075 vla_sp_restore();
6076 gexpr();
6077 vpop();
6078 gjmp_addr(d);
6079 gsym(e);
6081 skip(')');
6082 saved_nocode_wanted = nocode_wanted;
6083 block(&a, &b, 0);
6084 nocode_wanted = saved_nocode_wanted;
6085 gjmp_addr(c);
6086 gsym(a);
6087 gsym_addr(b, c);
6088 --local_scope;
6089 sym_pop(&local_stack, s, 0);
6091 } else
6092 if (tok == TOK_DO) {
6093 int saved_nocode_wanted;
6094 nocode_wanted &= ~0x20000000;
6095 next();
6096 a = 0;
6097 b = 0;
6098 d = ind;
6099 vla_sp_restore();
6100 saved_nocode_wanted = nocode_wanted;
6101 block(&a, &b, 0);
6102 skip(TOK_WHILE);
6103 skip('(');
6104 gsym(b);
6105 if (b)
6106 nocode_wanted = saved_nocode_wanted;
6107 gexpr();
6108 c = gvtst(0, 0);
6109 gsym_addr(c, d);
6110 nocode_wanted = saved_nocode_wanted;
6111 skip(')');
6112 gsym(a);
6113 skip(';');
6114 } else
6115 if (tok == TOK_SWITCH) {
6116 struct switch_t *saved, sw;
6117 int saved_nocode_wanted = nocode_wanted;
6118 SValue switchval;
6119 next();
6120 skip('(');
6121 gexpr();
6122 skip(')');
6123 switchval = *vtop--;
6124 a = 0;
6125 b = gjmp(0); /* jump to first case */
6126 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6127 saved = cur_switch;
6128 cur_switch = &sw;
6129 block(&a, csym, 0);
6130 nocode_wanted = saved_nocode_wanted;
6131 a = gjmp(a); /* add implicit break */
6132 /* case lookup */
6133 gsym(b);
6134 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6135 for (b = 1; b < sw.n; b++)
6136 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6137 tcc_error("duplicate case value");
6138 /* Our switch table sorting is signed, so the compared
6139 value needs to be as well when it's 64bit. */
6140 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6141 switchval.type.t &= ~VT_UNSIGNED;
6142 vpushv(&switchval);
6143 gcase(sw.p, sw.n, &a);
6144 vpop();
6145 if (sw.def_sym)
6146 gjmp_addr(sw.def_sym);
6147 dynarray_reset(&sw.p, &sw.n);
6148 cur_switch = saved;
6149 /* break label */
6150 gsym(a);
6151 } else
6152 if (tok == TOK_CASE) {
6153 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6154 if (!cur_switch)
6155 expect("switch");
6156 nocode_wanted &= ~0x20000000;
6157 next();
6158 cr->v1 = cr->v2 = expr_const64();
6159 if (gnu_ext && tok == TOK_DOTS) {
6160 next();
6161 cr->v2 = expr_const64();
6162 if (cr->v2 < cr->v1)
6163 tcc_warning("empty case range");
6165 cr->sym = ind;
6166 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6167 skip(':');
6168 is_expr = 0;
6169 goto block_after_label;
6170 } else
6171 if (tok == TOK_DEFAULT) {
6172 next();
6173 skip(':');
6174 if (!cur_switch)
6175 expect("switch");
6176 if (cur_switch->def_sym)
6177 tcc_error("too many 'default'");
6178 cur_switch->def_sym = ind;
6179 is_expr = 0;
6180 goto block_after_label;
6181 } else
6182 if (tok == TOK_GOTO) {
6183 next();
6184 if (tok == '*' && gnu_ext) {
6185 /* computed goto */
6186 next();
6187 gexpr();
6188 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6189 expect("pointer");
6190 ggoto();
6191 } else if (tok >= TOK_UIDENT) {
6192 s = label_find(tok);
6193 /* put forward definition if needed */
6194 if (!s) {
6195 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6196 } else {
6197 if (s->r == LABEL_DECLARED)
6198 s->r = LABEL_FORWARD;
6200 vla_sp_restore_root();
6201 if (s->r & LABEL_FORWARD)
6202 s->jnext = gjmp(s->jnext);
6203 else
6204 gjmp_addr(s->jnext);
6205 next();
6206 } else {
6207 expect("label identifier");
6209 skip(';');
6210 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6211 asm_instr();
6212 } else {
6213 b = is_label();
6214 if (b) {
6215 /* label case */
6216 next();
6217 s = label_find(b);
6218 if (s) {
6219 if (s->r == LABEL_DEFINED)
6220 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6221 gsym(s->jnext);
6222 s->r = LABEL_DEFINED;
6223 } else {
6224 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6226 s->jnext = ind;
6227 vla_sp_restore();
6228 /* we accept this, but it is a mistake */
6229 block_after_label:
6230 nocode_wanted &= ~0x20000000;
6231 if (tok == '}') {
6232 tcc_warning("deprecated use of label at end of compound statement");
6233 } else {
6234 if (is_expr)
6235 vpop();
6236 block(bsym, csym, is_expr);
6238 } else {
6239 /* expression case */
6240 if (tok != ';') {
6241 if (is_expr) {
6242 vpop();
6243 gexpr();
6244 } else {
6245 gexpr();
6246 vpop();
6249 skip(';');
6254 /* This skips over a stream of tokens containing balanced {} and ()
6255 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6256 with a '{'). If STR then allocates and stores the skipped tokens
6257 in *STR. This doesn't check if () and {} are nested correctly,
6258 i.e. "({)}" is accepted. */
6259 static void skip_or_save_block(TokenString **str)
6261 int braces = tok == '{';
6262 int level = 0;
6263 if (str)
6264 *str = tok_str_alloc();
6266 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6267 int t;
6268 if (tok == TOK_EOF) {
6269 if (str || level > 0)
6270 tcc_error("unexpected end of file");
6271 else
6272 break;
6274 if (str)
6275 tok_str_add_tok(*str);
6276 t = tok;
6277 next();
6278 if (t == '{' || t == '(') {
6279 level++;
6280 } else if (t == '}' || t == ')') {
6281 level--;
6282 if (level == 0 && braces && t == '}')
6283 break;
6286 if (str) {
6287 tok_str_add(*str, -1);
6288 tok_str_add(*str, 0);
6292 #define EXPR_CONST 1
6293 #define EXPR_ANY 2
6295 static void parse_init_elem(int expr_type)
6297 int saved_global_expr;
6298 switch(expr_type) {
6299 case EXPR_CONST:
6300 /* compound literals must be allocated globally in this case */
6301 saved_global_expr = global_expr;
6302 global_expr = 1;
6303 expr_const1();
6304 global_expr = saved_global_expr;
6305 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6306 (compound literals). */
6307 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6308 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6309 || vtop->sym->v < SYM_FIRST_ANOM))
6310 #ifdef TCC_TARGET_PE
6311 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6312 #endif
6314 tcc_error("initializer element is not constant");
6315 break;
6316 case EXPR_ANY:
6317 expr_eq();
6318 break;
6322 /* put zeros for variable based init */
6323 static void init_putz(Section *sec, unsigned long c, int size)
6325 if (sec) {
6326 /* nothing to do because globals are already set to zero */
6327 } else {
6328 vpush_global_sym(&func_old_type, TOK_memset);
6329 vseti(VT_LOCAL, c);
6330 #ifdef TCC_TARGET_ARM
6331 vpushs(size);
6332 vpushi(0);
6333 #else
6334 vpushi(0);
6335 vpushs(size);
6336 #endif
6337 gfunc_call(3);
6341 /* t is the array or struct type. c is the array or struct
6342 address. cur_field is the pointer to the current
6343 field, for arrays the 'c' member contains the current start
6344 index. 'size_only' is true if only size info is needed (only used
6345 in arrays). al contains the already initialized length of the
6346 current container (starting at c). This returns the new length of that. */
6347 static int decl_designator(CType *type, Section *sec, unsigned long c,
6348 Sym **cur_field, int size_only, int al)
6350 Sym *s, *f;
6351 int index, index_last, align, l, nb_elems, elem_size;
6352 unsigned long corig = c;
6354 elem_size = 0;
6355 nb_elems = 1;
6356 if (gnu_ext && (l = is_label()) != 0)
6357 goto struct_field;
6358 /* NOTE: we only support ranges for last designator */
6359 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6360 if (tok == '[') {
6361 if (!(type->t & VT_ARRAY))
6362 expect("array type");
6363 next();
6364 index = index_last = expr_const();
6365 if (tok == TOK_DOTS && gnu_ext) {
6366 next();
6367 index_last = expr_const();
6369 skip(']');
6370 s = type->ref;
6371 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6372 index_last < index)
6373 tcc_error("invalid index");
6374 if (cur_field)
6375 (*cur_field)->c = index_last;
6376 type = pointed_type(type);
6377 elem_size = type_size(type, &align);
6378 c += index * elem_size;
6379 nb_elems = index_last - index + 1;
6380 } else {
6381 next();
6382 l = tok;
6383 struct_field:
6384 next();
6385 if ((type->t & VT_BTYPE) != VT_STRUCT)
6386 expect("struct/union type");
6387 f = find_field(type, l);
6388 if (!f)
6389 expect("field");
6390 if (cur_field)
6391 *cur_field = f;
6392 type = &f->type;
6393 c += f->c;
6395 cur_field = NULL;
6397 if (!cur_field) {
6398 if (tok == '=') {
6399 next();
6400 } else if (!gnu_ext) {
6401 expect("=");
6403 } else {
6404 if (type->t & VT_ARRAY) {
6405 index = (*cur_field)->c;
6406 if (type->ref->c >= 0 && index >= type->ref->c)
6407 tcc_error("index too large");
6408 type = pointed_type(type);
6409 c += index * type_size(type, &align);
6410 } else {
6411 f = *cur_field;
6412 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6413 *cur_field = f = f->next;
6414 if (!f)
6415 tcc_error("too many field init");
6416 type = &f->type;
6417 c += f->c;
6420 /* must put zero in holes (note that doing it that way
6421 ensures that it even works with designators) */
6422 if (!size_only && c - corig > al)
6423 init_putz(sec, corig + al, c - corig - al);
6424 decl_initializer(type, sec, c, 0, size_only);
6426 /* XXX: make it more general */
6427 if (!size_only && nb_elems > 1) {
6428 unsigned long c_end;
6429 uint8_t *src, *dst;
6430 int i;
6432 if (!sec) {
6433 vset(type, VT_LOCAL|VT_LVAL, c);
6434 for (i = 1; i < nb_elems; i++) {
6435 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6436 vswap();
6437 vstore();
6439 vpop();
6440 } else if (!NODATA_WANTED) {
6441 c_end = c + nb_elems * elem_size;
6442 if (c_end > sec->data_allocated)
6443 section_realloc(sec, c_end);
6444 src = sec->data + c;
6445 dst = src;
6446 for(i = 1; i < nb_elems; i++) {
6447 dst += elem_size;
6448 memcpy(dst, src, elem_size);
6452 c += nb_elems * type_size(type, &align);
6453 if (c - corig > al)
6454 al = c - corig;
6455 return al;
6458 /* store a value or an expression directly in global data or in local array */
6459 static void init_putv(CType *type, Section *sec, unsigned long c)
6461 int bt;
6462 void *ptr;
6463 CType dtype;
6465 dtype = *type;
6466 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6468 if (sec) {
6469 int size, align;
6470 /* XXX: not portable */
6471 /* XXX: generate error if incorrect relocation */
6472 gen_assign_cast(&dtype);
6473 bt = type->t & VT_BTYPE;
6475 if ((vtop->r & VT_SYM)
6476 && bt != VT_PTR
6477 && bt != VT_FUNC
6478 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6479 || (type->t & VT_BITFIELD))
6480 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6482 tcc_error("initializer element is not computable at load time");
6484 if (NODATA_WANTED) {
6485 vtop--;
6486 return;
6489 size = type_size(type, &align);
6490 section_reserve(sec, c + size);
6491 ptr = sec->data + c;
6493 /* XXX: make code faster ? */
6494 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6495 vtop->sym->v >= SYM_FIRST_ANOM &&
6496 /* XXX This rejects compound literals like
6497 '(void *){ptr}'. The problem is that '&sym' is
6498 represented the same way, which would be ruled out
6499 by the SYM_FIRST_ANOM check above, but also '"string"'
6500 in 'char *p = "string"' is represented the same
6501 with the type being VT_PTR and the symbol being an
6502 anonymous one. That is, there's no difference in vtop
6503 between '(void *){x}' and '&(void *){x}'. Ignore
6504 pointer typed entities here. Hopefully no real code
6505 will every use compound literals with scalar type. */
6506 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6507 /* These come from compound literals, memcpy stuff over. */
6508 Section *ssec;
6509 ElfSym *esym;
6510 ElfW_Rel *rel;
6511 esym = elfsym(vtop->sym);
6512 ssec = tcc_state->sections[esym->st_shndx];
6513 memmove (ptr, ssec->data + esym->st_value, size);
6514 if (ssec->reloc) {
6515 /* We need to copy over all memory contents, and that
6516 includes relocations. Use the fact that relocs are
6517 created it order, so look from the end of relocs
6518 until we hit one before the copied region. */
6519 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6520 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6521 while (num_relocs--) {
6522 rel--;
6523 if (rel->r_offset >= esym->st_value + size)
6524 continue;
6525 if (rel->r_offset < esym->st_value)
6526 break;
6527 /* Note: if the same fields are initialized multiple
6528 times (possible with designators) then we possibly
6529 add multiple relocations for the same offset here.
6530 That would lead to wrong code, the last reloc needs
6531 to win. We clean this up later after the whole
6532 initializer is parsed. */
6533 put_elf_reloca(symtab_section, sec,
6534 c + rel->r_offset - esym->st_value,
6535 ELFW(R_TYPE)(rel->r_info),
6536 ELFW(R_SYM)(rel->r_info),
6537 #if PTR_SIZE == 8
6538 rel->r_addend
6539 #else
6541 #endif
6545 } else {
6546 if (type->t & VT_BITFIELD) {
6547 int bit_pos, bit_size, bits, n;
6548 unsigned char *p, v, m;
6549 bit_pos = BIT_POS(vtop->type.t);
6550 bit_size = BIT_SIZE(vtop->type.t);
6551 p = (unsigned char*)ptr + (bit_pos >> 3);
6552 bit_pos &= 7, bits = 0;
6553 while (bit_size) {
6554 n = 8 - bit_pos;
6555 if (n > bit_size)
6556 n = bit_size;
6557 v = vtop->c.i >> bits << bit_pos;
6558 m = ((1 << n) - 1) << bit_pos;
6559 *p = (*p & ~m) | (v & m);
6560 bits += n, bit_size -= n, bit_pos = 0, ++p;
6562 } else
6563 switch(bt) {
6564 /* XXX: when cross-compiling we assume that each type has the
6565 same representation on host and target, which is likely to
6566 be wrong in the case of long double */
6567 case VT_BOOL:
6568 vtop->c.i = vtop->c.i != 0;
6569 case VT_BYTE:
6570 *(char *)ptr |= vtop->c.i;
6571 break;
6572 case VT_SHORT:
6573 *(short *)ptr |= vtop->c.i;
6574 break;
6575 case VT_FLOAT:
6576 *(float*)ptr = vtop->c.f;
6577 break;
6578 case VT_DOUBLE:
6579 *(double *)ptr = vtop->c.d;
6580 break;
6581 case VT_LDOUBLE:
6582 #if defined TCC_IS_NATIVE_387
6583 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6584 memcpy(ptr, &vtop->c.ld, 10);
6585 #ifdef __TINYC__
6586 else if (sizeof (long double) == sizeof (double))
6587 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6588 #endif
6589 else if (vtop->c.ld == 0.0)
6591 else
6592 #endif
6593 if (sizeof(long double) == LDOUBLE_SIZE)
6594 *(long double*)ptr = vtop->c.ld;
6595 else if (sizeof(double) == LDOUBLE_SIZE)
6596 *(double *)ptr = (double)vtop->c.ld;
6597 else
6598 tcc_error("can't cross compile long double constants");
6599 break;
6600 #if PTR_SIZE != 8
6601 case VT_LLONG:
6602 *(long long *)ptr |= vtop->c.i;
6603 break;
6604 #else
6605 case VT_LLONG:
6606 #endif
6607 case VT_PTR:
6609 addr_t val = vtop->c.i;
6610 #if PTR_SIZE == 8
6611 if (vtop->r & VT_SYM)
6612 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6613 else
6614 *(addr_t *)ptr |= val;
6615 #else
6616 if (vtop->r & VT_SYM)
6617 greloc(sec, vtop->sym, c, R_DATA_PTR);
6618 *(addr_t *)ptr |= val;
6619 #endif
6620 break;
6622 default:
6624 int val = vtop->c.i;
6625 #if PTR_SIZE == 8
6626 if (vtop->r & VT_SYM)
6627 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6628 else
6629 *(int *)ptr |= val;
6630 #else
6631 if (vtop->r & VT_SYM)
6632 greloc(sec, vtop->sym, c, R_DATA_PTR);
6633 *(int *)ptr |= val;
6634 #endif
6635 break;
6639 vtop--;
6640 } else {
6641 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6642 vswap();
6643 vstore();
6644 vpop();
6648 /* 't' contains the type and storage info. 'c' is the offset of the
6649 object in section 'sec'. If 'sec' is NULL, it means stack based
6650 allocation. 'first' is true if array '{' must be read (multi
6651 dimension implicit array init handling). 'size_only' is true if
6652 size only evaluation is wanted (only for arrays). */
6653 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6654 int first, int size_only)
6656 int len, n, no_oblock, nb, i;
6657 int size1, align1;
6658 int have_elem;
6659 Sym *s, *f;
6660 Sym indexsym;
6661 CType *t1;
6663 /* If we currently are at an '}' or ',' we have read an initializer
6664 element in one of our callers, and not yet consumed it. */
6665 have_elem = tok == '}' || tok == ',';
6666 if (!have_elem && tok != '{' &&
6667 /* In case of strings we have special handling for arrays, so
6668 don't consume them as initializer value (which would commit them
6669 to some anonymous symbol). */
6670 tok != TOK_LSTR && tok != TOK_STR &&
6671 !size_only) {
6672 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6673 have_elem = 1;
6676 if (have_elem &&
6677 !(type->t & VT_ARRAY) &&
6678 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6679 The source type might have VT_CONSTANT set, which is
6680 of course assignable to non-const elements. */
6681 is_compatible_unqualified_types(type, &vtop->type)) {
6682 init_putv(type, sec, c);
6683 } else if (type->t & VT_ARRAY) {
6684 s = type->ref;
6685 n = s->c;
6686 t1 = pointed_type(type);
6687 size1 = type_size(t1, &align1);
6689 no_oblock = 1;
6690 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6691 tok == '{') {
6692 if (tok != '{')
6693 tcc_error("character array initializer must be a literal,"
6694 " optionally enclosed in braces");
6695 skip('{');
6696 no_oblock = 0;
6699 /* only parse strings here if correct type (otherwise: handle
6700 them as ((w)char *) expressions */
6701 if ((tok == TOK_LSTR &&
6702 #ifdef TCC_TARGET_PE
6703 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6704 #else
6705 (t1->t & VT_BTYPE) == VT_INT
6706 #endif
6707 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6708 len = 0;
6709 while (tok == TOK_STR || tok == TOK_LSTR) {
6710 int cstr_len, ch;
6712 /* compute maximum number of chars wanted */
6713 if (tok == TOK_STR)
6714 cstr_len = tokc.str.size;
6715 else
6716 cstr_len = tokc.str.size / sizeof(nwchar_t);
6717 cstr_len--;
6718 nb = cstr_len;
6719 if (n >= 0 && nb > (n - len))
6720 nb = n - len;
6721 if (!size_only) {
6722 if (cstr_len > nb)
6723 tcc_warning("initializer-string for array is too long");
6724 /* in order to go faster for common case (char
6725 string in global variable, we handle it
6726 specifically */
6727 if (sec && tok == TOK_STR && size1 == 1) {
6728 if (!NODATA_WANTED)
6729 memcpy(sec->data + c + len, tokc.str.data, nb);
6730 } else {
6731 for(i=0;i<nb;i++) {
6732 if (tok == TOK_STR)
6733 ch = ((unsigned char *)tokc.str.data)[i];
6734 else
6735 ch = ((nwchar_t *)tokc.str.data)[i];
6736 vpushi(ch);
6737 init_putv(t1, sec, c + (len + i) * size1);
6741 len += nb;
6742 next();
6744 /* only add trailing zero if enough storage (no
6745 warning in this case since it is standard) */
6746 if (n < 0 || len < n) {
6747 if (!size_only) {
6748 vpushi(0);
6749 init_putv(t1, sec, c + (len * size1));
6751 len++;
6753 len *= size1;
6754 } else {
6755 indexsym.c = 0;
6756 f = &indexsym;
6758 do_init_list:
6759 len = 0;
6760 while (tok != '}' || have_elem) {
6761 len = decl_designator(type, sec, c, &f, size_only, len);
6762 have_elem = 0;
6763 if (type->t & VT_ARRAY) {
6764 ++indexsym.c;
6765 /* special test for multi dimensional arrays (may not
6766 be strictly correct if designators are used at the
6767 same time) */
6768 if (no_oblock && len >= n*size1)
6769 break;
6770 } else {
6771 if (s->type.t == VT_UNION)
6772 f = NULL;
6773 else
6774 f = f->next;
6775 if (no_oblock && f == NULL)
6776 break;
6779 if (tok == '}')
6780 break;
6781 skip(',');
6784 /* put zeros at the end */
6785 if (!size_only && len < n*size1)
6786 init_putz(sec, c + len, n*size1 - len);
6787 if (!no_oblock)
6788 skip('}');
6789 /* patch type size if needed, which happens only for array types */
6790 if (n < 0)
6791 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6792 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6793 size1 = 1;
6794 no_oblock = 1;
6795 if (first || tok == '{') {
6796 skip('{');
6797 no_oblock = 0;
6799 s = type->ref;
6800 f = s->next;
6801 n = s->c;
6802 goto do_init_list;
6803 } else if (tok == '{') {
6804 next();
6805 decl_initializer(type, sec, c, first, size_only);
6806 skip('}');
6807 } else if (size_only) {
6808 /* If we supported only ISO C we wouldn't have to accept calling
6809 this on anything than an array size_only==1 (and even then
6810 only on the outermost level, so no recursion would be needed),
6811 because initializing a flex array member isn't supported.
6812 But GNU C supports it, so we need to recurse even into
6813 subfields of structs and arrays when size_only is set. */
6814 /* just skip expression */
6815 skip_or_save_block(NULL);
6816 } else {
6817 if (!have_elem) {
6818 /* This should happen only when we haven't parsed
6819 the init element above for fear of committing a
6820 string constant to memory too early. */
6821 if (tok != TOK_STR && tok != TOK_LSTR)
6822 expect("string constant");
6823 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6825 init_putv(type, sec, c);
6829 /* parse an initializer for type 't' if 'has_init' is non zero, and
6830 allocate space in local or global data space ('r' is either
6831 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6832 variable 'v' of scope 'scope' is declared before initializers
6833 are parsed. If 'v' is zero, then a reference to the new object
6834 is put in the value stack. If 'has_init' is 2, a special parsing
6835 is done to handle string constants. */
6836 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6837 int has_init, int v, int scope)
6839 int size, align, addr;
6840 TokenString *init_str = NULL;
6842 Section *sec;
6843 Sym *flexible_array;
6844 Sym *sym = NULL;
6845 int saved_nocode_wanted = nocode_wanted;
6846 #ifdef CONFIG_TCC_BCHECK
6847 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
6848 #endif
6850 if (type->t & VT_STATIC)
6851 nocode_wanted |= NODATA_WANTED ? 0x40000000 : 0x80000000;
6853 flexible_array = NULL;
6854 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6855 Sym *field = type->ref->next;
6856 if (field) {
6857 while (field->next)
6858 field = field->next;
6859 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6860 flexible_array = field;
6864 size = type_size(type, &align);
6865 /* If unknown size, we must evaluate it before
6866 evaluating initializers because
6867 initializers can generate global data too
6868 (e.g. string pointers or ISOC99 compound
6869 literals). It also simplifies local
6870 initializers handling */
6871 if (size < 0 || (flexible_array && has_init)) {
6872 if (!has_init)
6873 tcc_error("unknown type size");
6874 /* get all init string */
6875 if (has_init == 2) {
6876 init_str = tok_str_alloc();
6877 /* only get strings */
6878 while (tok == TOK_STR || tok == TOK_LSTR) {
6879 tok_str_add_tok(init_str);
6880 next();
6882 tok_str_add(init_str, -1);
6883 tok_str_add(init_str, 0);
6884 } else {
6885 skip_or_save_block(&init_str);
6887 unget_tok(0);
6889 /* compute size */
6890 begin_macro(init_str, 1);
6891 next();
6892 decl_initializer(type, NULL, 0, 1, 1);
6893 /* prepare second initializer parsing */
6894 macro_ptr = init_str->str;
6895 next();
6897 /* if still unknown size, error */
6898 size = type_size(type, &align);
6899 if (size < 0)
6900 tcc_error("unknown type size");
6902 /* If there's a flex member and it was used in the initializer
6903 adjust size. */
6904 if (flexible_array &&
6905 flexible_array->type.ref->c > 0)
6906 size += flexible_array->type.ref->c
6907 * pointed_size(&flexible_array->type);
6908 /* take into account specified alignment if bigger */
6909 if (ad->a.aligned) {
6910 int speca = 1 << (ad->a.aligned - 1);
6911 if (speca > align)
6912 align = speca;
6913 } else if (ad->a.packed) {
6914 align = 1;
6917 if (NODATA_WANTED)
6918 size = 0, align = 1;
6920 if ((r & VT_VALMASK) == VT_LOCAL) {
6921 sec = NULL;
6922 #ifdef CONFIG_TCC_BCHECK
6923 if (bcheck && (type->t & VT_ARRAY)) {
6924 loc--;
6926 #endif
6927 loc = (loc - size) & -align;
6928 addr = loc;
6929 #ifdef CONFIG_TCC_BCHECK
6930 /* handles bounds */
6931 /* XXX: currently, since we do only one pass, we cannot track
6932 '&' operators, so we add only arrays */
6933 if (bcheck && (type->t & VT_ARRAY)) {
6934 addr_t *bounds_ptr;
6935 /* add padding between regions */
6936 loc--;
6937 /* then add local bound info */
6938 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6939 bounds_ptr[0] = addr;
6940 bounds_ptr[1] = size;
6942 #endif
6943 if (v) {
6944 /* local variable */
6945 #ifdef CONFIG_TCC_ASM
6946 if (ad->asm_label) {
6947 int reg = asm_parse_regvar(ad->asm_label);
6948 if (reg >= 0)
6949 r = (r & ~VT_VALMASK) | reg;
6951 #endif
6952 sym = sym_push(v, type, r, addr);
6953 sym->a = ad->a;
6954 } else {
6955 /* push local reference */
6956 vset(type, r, addr);
6958 } else {
6959 if (v && scope == VT_CONST) {
6960 /* see if the symbol was already defined */
6961 sym = sym_find(v);
6962 if (sym) {
6963 patch_storage(sym, ad, type);
6964 /* we accept several definitions of the same global variable. */
6965 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
6966 goto no_alloc;
6970 /* allocate symbol in corresponding section */
6971 sec = ad->section;
6972 if (!sec) {
6973 if (has_init)
6974 sec = data_section;
6975 else if (tcc_state->nocommon)
6976 sec = bss_section;
6979 if (sec) {
6980 addr = section_add(sec, size, align);
6981 #ifdef CONFIG_TCC_BCHECK
6982 /* add padding if bound check */
6983 if (bcheck)
6984 section_add(sec, 1, 1);
6985 #endif
6986 } else {
6987 addr = align; /* SHN_COMMON is special, symbol value is align */
6988 sec = common_section;
6991 if (v) {
6992 if (!sym) {
6993 sym = sym_push(v, type, r | VT_SYM, 0);
6994 patch_storage(sym, ad, NULL);
6996 /* Local statics have a scope until now (for
6997 warnings), remove it here. */
6998 sym->sym_scope = 0;
6999 /* update symbol definition */
7000 put_extern_sym(sym, sec, addr, size);
7001 } else {
7002 /* push global reference */
7003 sym = get_sym_ref(type, sec, addr, size);
7004 vpushsym(type, sym);
7005 vtop->r |= r;
7008 #ifdef CONFIG_TCC_BCHECK
7009 /* handles bounds now because the symbol must be defined
7010 before for the relocation */
7011 if (bcheck) {
7012 addr_t *bounds_ptr;
7014 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7015 /* then add global bound info */
7016 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7017 bounds_ptr[0] = 0; /* relocated */
7018 bounds_ptr[1] = size;
7020 #endif
7023 if (type->t & VT_VLA) {
7024 int a;
7026 if (NODATA_WANTED)
7027 goto no_alloc;
7029 /* save current stack pointer */
7030 if (vlas_in_scope == 0) {
7031 if (vla_sp_root_loc == -1)
7032 vla_sp_root_loc = (loc -= PTR_SIZE);
7033 gen_vla_sp_save(vla_sp_root_loc);
7036 vla_runtime_type_size(type, &a);
7037 gen_vla_alloc(type, a);
7038 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7039 /* on _WIN64, because of the function args scratch area, the
7040 result of alloca differs from RSP and is returned in RAX. */
7041 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7042 #endif
7043 gen_vla_sp_save(addr);
7044 vla_sp_loc = addr;
7045 vlas_in_scope++;
7047 } else if (has_init) {
7048 size_t oldreloc_offset = 0;
7049 if (sec && sec->reloc)
7050 oldreloc_offset = sec->reloc->data_offset;
7051 decl_initializer(type, sec, addr, 1, 0);
7052 if (sec && sec->reloc)
7053 squeeze_multi_relocs(sec, oldreloc_offset);
7054 /* patch flexible array member size back to -1, */
7055 /* for possible subsequent similar declarations */
7056 if (flexible_array)
7057 flexible_array->type.ref->c = -1;
7060 no_alloc:
7061 /* restore parse state if needed */
7062 if (init_str) {
7063 end_macro();
7064 next();
7067 nocode_wanted = saved_nocode_wanted;
7070 /* parse a function defined by symbol 'sym' and generate its code in
7071 'cur_text_section' */
7072 static void gen_function(Sym *sym)
7074 nocode_wanted = 0;
7075 ind = cur_text_section->data_offset;
7076 /* NOTE: we patch the symbol size later */
7077 put_extern_sym(sym, cur_text_section, ind, 0);
7078 funcname = get_tok_str(sym->v, NULL);
7079 func_ind = ind;
7080 /* Initialize VLA state */
7081 vla_sp_loc = -1;
7082 vla_sp_root_loc = -1;
7083 /* put debug symbol */
7084 tcc_debug_funcstart(tcc_state, sym);
7085 /* push a dummy symbol to enable local sym storage */
7086 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7087 local_scope = 1; /* for function parameters */
7088 gfunc_prolog(&sym->type);
7089 local_scope = 0;
7090 rsym = 0;
7091 block(NULL, NULL, 0);
7092 nocode_wanted = 0;
7093 gsym(rsym);
7094 gfunc_epilog();
7095 cur_text_section->data_offset = ind;
7096 label_pop(&global_label_stack, NULL, 0);
7097 /* reset local stack */
7098 local_scope = 0;
7099 sym_pop(&local_stack, NULL, 0);
7100 /* end of function */
7101 /* patch symbol size */
7102 elfsym(sym)->st_size = ind - func_ind;
7103 tcc_debug_funcend(tcc_state, ind - func_ind);
7104 /* It's better to crash than to generate wrong code */
7105 cur_text_section = NULL;
7106 funcname = ""; /* for safety */
7107 func_vt.t = VT_VOID; /* for safety */
7108 func_var = 0; /* for safety */
7109 ind = 0; /* for safety */
7110 nocode_wanted = 0x80000000;
7111 check_vstack();
7114 static void gen_inline_functions(TCCState *s)
7116 Sym *sym;
7117 int inline_generated, i, ln;
7118 struct InlineFunc *fn;
7120 ln = file->line_num;
7121 /* iterate while inline function are referenced */
7122 do {
7123 inline_generated = 0;
7124 for (i = 0; i < s->nb_inline_fns; ++i) {
7125 fn = s->inline_fns[i];
7126 sym = fn->sym;
7127 if (sym && sym->c) {
7128 /* the function was used: generate its code and
7129 convert it to a normal function */
7130 fn->sym = NULL;
7131 if (file)
7132 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7133 sym->type.t &= ~VT_INLINE;
7135 begin_macro(fn->func_str, 1);
7136 next();
7137 cur_text_section = text_section;
7138 gen_function(sym);
7139 end_macro();
7141 inline_generated = 1;
7144 } while (inline_generated);
7145 file->line_num = ln;
7148 ST_FUNC void free_inline_functions(TCCState *s)
7150 int i;
7151 /* free tokens of unused inline functions */
7152 for (i = 0; i < s->nb_inline_fns; ++i) {
7153 struct InlineFunc *fn = s->inline_fns[i];
7154 if (fn->sym)
7155 tok_str_free(fn->func_str);
7157 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7160 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7161 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7162 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7164 int v, has_init, r;
7165 CType type, btype;
7166 Sym *sym;
7167 AttributeDef ad;
7169 while (1) {
7170 if (!parse_btype(&btype, &ad)) {
7171 if (is_for_loop_init)
7172 return 0;
7173 /* skip redundant ';' if not in old parameter decl scope */
7174 if (tok == ';' && l != VT_CMP) {
7175 next();
7176 continue;
7178 if (l != VT_CONST)
7179 break;
7180 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7181 /* global asm block */
7182 asm_global_instr();
7183 continue;
7185 if (tok >= TOK_UIDENT) {
7186 /* special test for old K&R protos without explicit int
7187 type. Only accepted when defining global data */
7188 btype.t = VT_INT;
7189 } else {
7190 if (tok != TOK_EOF)
7191 expect("declaration");
7192 break;
7195 if (tok == ';') {
7196 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7197 int v = btype.ref->v;
7198 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7199 tcc_warning("unnamed struct/union that defines no instances");
7200 next();
7201 continue;
7203 if (IS_ENUM(btype.t)) {
7204 next();
7205 continue;
7208 while (1) { /* iterate thru each declaration */
7209 type = btype;
7210 /* If the base type itself was an array type of unspecified
7211 size (like in 'typedef int arr[]; arr x = {1};') then
7212 we will overwrite the unknown size by the real one for
7213 this decl. We need to unshare the ref symbol holding
7214 that size. */
7215 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7216 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7218 type_decl(&type, &ad, &v, TYPE_DIRECT);
7219 #if 0
7221 char buf[500];
7222 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7223 printf("type = '%s'\n", buf);
7225 #endif
7226 if ((type.t & VT_BTYPE) == VT_FUNC) {
7227 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7228 tcc_error("function without file scope cannot be static");
7230 /* if old style function prototype, we accept a
7231 declaration list */
7232 sym = type.ref;
7233 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7234 decl0(VT_CMP, 0, sym);
7237 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7238 ad.asm_label = asm_label_instr();
7239 /* parse one last attribute list, after asm label */
7240 parse_attribute(&ad);
7241 if (tok == '{')
7242 expect(";");
7245 #ifdef TCC_TARGET_PE
7246 if (ad.a.dllimport || ad.a.dllexport) {
7247 if (type.t & (VT_STATIC|VT_TYPEDEF))
7248 tcc_error("cannot have dll linkage with static or typedef");
7249 if (ad.a.dllimport) {
7250 if ((type.t & VT_BTYPE) == VT_FUNC)
7251 ad.a.dllimport = 0;
7252 else
7253 type.t |= VT_EXTERN;
7256 #endif
7257 if (tok == '{') {
7258 if (l != VT_CONST)
7259 tcc_error("cannot use local functions");
7260 if ((type.t & VT_BTYPE) != VT_FUNC)
7261 expect("function definition");
7263 /* reject abstract declarators in function definition
7264 make old style params without decl have int type */
7265 sym = type.ref;
7266 while ((sym = sym->next) != NULL) {
7267 if (!(sym->v & ~SYM_FIELD))
7268 expect("identifier");
7269 if (sym->type.t == VT_VOID)
7270 sym->type = int_type;
7273 /* XXX: cannot do better now: convert extern line to static inline */
7274 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7275 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7277 /* put function symbol */
7278 sym = external_global_sym(v, &type, 0);
7279 type.t &= ~VT_EXTERN;
7280 patch_storage(sym, &ad, &type);
7282 /* static inline functions are just recorded as a kind
7283 of macro. Their code will be emitted at the end of
7284 the compilation unit only if they are used */
7285 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7286 (VT_INLINE | VT_STATIC)) {
7287 struct InlineFunc *fn;
7288 const char *filename;
7290 filename = file ? file->filename : "";
7291 fn = tcc_malloc(sizeof *fn + strlen(filename));
7292 strcpy(fn->filename, filename);
7293 fn->sym = sym;
7294 skip_or_save_block(&fn->func_str);
7295 dynarray_add(&tcc_state->inline_fns,
7296 &tcc_state->nb_inline_fns, fn);
7297 } else {
7298 /* compute text section */
7299 cur_text_section = ad.section;
7300 if (!cur_text_section)
7301 cur_text_section = text_section;
7302 gen_function(sym);
7304 break;
7305 } else {
7306 if (l == VT_CMP) {
7307 /* find parameter in function parameter list */
7308 for (sym = func_sym->next; sym; sym = sym->next)
7309 if ((sym->v & ~SYM_FIELD) == v)
7310 goto found;
7311 tcc_error("declaration for parameter '%s' but no such parameter",
7312 get_tok_str(v, NULL));
7313 found:
7314 if (type.t & VT_STORAGE) /* 'register' is okay */
7315 tcc_error("storage class specified for '%s'",
7316 get_tok_str(v, NULL));
7317 if (sym->type.t != VT_VOID)
7318 tcc_error("redefinition of parameter '%s'",
7319 get_tok_str(v, NULL));
7320 convert_parameter_type(&type);
7321 sym->type = type;
7322 } else if (type.t & VT_TYPEDEF) {
7323 /* save typedefed type */
7324 /* XXX: test storage specifiers ? */
7325 sym = sym_find(v);
7326 if (sym && sym->sym_scope == local_scope) {
7327 if (!is_compatible_types(&sym->type, &type)
7328 || !(sym->type.t & VT_TYPEDEF))
7329 tcc_error("incompatible redefinition of '%s'",
7330 get_tok_str(v, NULL));
7331 sym->type = type;
7332 } else {
7333 sym = sym_push(v, &type, 0, 0);
7335 sym->a = ad.a;
7336 sym->f = ad.f;
7337 } else {
7338 r = 0;
7339 if ((type.t & VT_BTYPE) == VT_FUNC) {
7340 /* external function definition */
7341 /* specific case for func_call attribute */
7342 type.ref->f = ad.f;
7343 } else if (!(type.t & VT_ARRAY)) {
7344 /* not lvalue if array */
7345 r |= lvalue_type(type.t);
7347 has_init = (tok == '=');
7348 if (has_init && (type.t & VT_VLA))
7349 tcc_error("variable length array cannot be initialized");
7350 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7351 ((type.t & VT_BTYPE) == VT_FUNC) ||
7352 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7353 !has_init && l == VT_CONST && type.ref->c < 0)) {
7354 /* external variable or function */
7355 /* NOTE: as GCC, uninitialized global static
7356 arrays of null size are considered as
7357 extern */
7358 type.t |= VT_EXTERN;
7359 sym = external_sym(v, &type, r, &ad);
7360 if (ad.alias_target) {
7361 ElfSym *esym;
7362 Sym *alias_target;
7363 alias_target = sym_find(ad.alias_target);
7364 esym = elfsym(alias_target);
7365 if (!esym)
7366 tcc_error("unsupported forward __alias__ attribute");
7367 /* Local statics have a scope until now (for
7368 warnings), remove it here. */
7369 sym->sym_scope = 0;
7370 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7372 } else {
7373 if (type.t & VT_STATIC)
7374 r |= VT_CONST;
7375 else
7376 r |= l;
7377 if (has_init)
7378 next();
7379 else if (l == VT_CONST)
7380 /* uninitialized global variables may be overridden */
7381 type.t |= VT_EXTERN;
7382 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7385 if (tok != ',') {
7386 if (is_for_loop_init)
7387 return 1;
7388 skip(';');
7389 break;
7391 next();
7393 ad.a.aligned = 0;
7396 return 0;
7399 static void decl(int l)
7401 decl0(l, 0, NULL);
7404 /* ------------------------------------------------------------------------- */