tcc: re-enable correct option -r support
[tinycc.git] / tccgen.c
blob131dced5bd85c0d797c6509d4f654b95c8be9feb
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 /* start of translation unit info */
144 ST_FUNC void tcc_debug_start(TCCState *s1)
146 if (s1->do_debug) {
147 char buf[512];
149 /* file info: full path + filename */
150 section_sym = put_elf_sym(symtab_section, 0, 0,
151 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
152 text_section->sh_num, NULL);
153 getcwd(buf, sizeof(buf));
154 #ifdef _WIN32
155 normalize_slashes(buf);
156 #endif
157 pstrcat(buf, sizeof(buf), "/");
158 put_stabs_r(buf, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 put_stabs_r(file->filename, N_SO, 0, 0,
161 text_section->data_offset, text_section, section_sym);
162 last_ind = 0;
163 last_line_num = 0;
166 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
167 symbols can be safely used */
168 put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
170 SHN_ABS, file->filename);
173 /* put end of translation unit info */
174 ST_FUNC void tcc_debug_end(TCCState *s1)
176 if (!s1->do_debug)
177 return;
178 put_stabs_r(NULL, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
183 /* generate line number info */
184 ST_FUNC void tcc_debug_line(TCCState *s1)
186 if (!s1->do_debug)
187 return;
188 if ((last_line_num != file->line_num || last_ind != ind)) {
189 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
190 last_ind = ind;
191 last_line_num = file->line_num;
195 /* put function symbol */
196 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
198 char buf[512];
200 if (!s1->do_debug)
201 return;
203 /* stabs info */
204 /* XXX: we put here a dummy type */
205 snprintf(buf, sizeof(buf), "%s:%c1",
206 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
207 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
208 cur_text_section, sym->c);
209 /* //gr gdb wants a line at the function */
210 put_stabn(N_SLINE, 0, file->line_num, 0);
212 last_ind = 0;
213 last_line_num = 0;
216 /* put function size */
217 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
219 if (!s1->do_debug)
220 return;
221 put_stabn(N_FUN, 0, 0, size);
224 /* ------------------------------------------------------------------------- */
225 ST_FUNC void tccgen_start(TCCState *s1)
227 cur_text_section = NULL;
228 funcname = "";
229 anon_sym = SYM_FIRST_ANOM;
230 section_sym = 0;
231 const_wanted = 0;
232 nocode_wanted = 1;
234 /* define some often used types */
235 int_type.t = VT_INT;
236 char_pointer_type.t = VT_BYTE;
237 mk_pointer(&char_pointer_type);
238 #if PTR_SIZE == 4
239 size_type.t = VT_INT;
240 #else
241 size_type.t = VT_LLONG;
242 #endif
243 func_old_type.t = VT_FUNC;
244 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
246 tcc_debug_start(s1);
248 #ifdef TCC_TARGET_ARM
249 arm_init(s1);
250 #endif
253 ST_FUNC void tccgen_end(TCCState *s1)
255 gen_inline_functions(s1);
256 check_vstack();
257 /* end of translation unit info */
258 tcc_debug_end(s1);
261 /* ------------------------------------------------------------------------- */
262 /* update sym->c so that it points to an external symbol in section
263 'section' with value 'value' */
265 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
266 addr_t value, unsigned long size,
267 int can_add_underscore)
269 int sym_type, sym_bind, sh_num, info, other;
270 ElfW(Sym) *esym;
271 const char *name;
272 char buf1[256];
274 #ifdef CONFIG_TCC_BCHECK
275 char buf[32];
276 #endif
278 if (section == NULL)
279 sh_num = SHN_UNDEF;
280 else if (section == SECTION_ABS)
281 sh_num = SHN_ABS;
282 else
283 sh_num = section->sh_num;
285 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
286 sym_type = STT_FUNC;
287 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
288 sym_type = STT_NOTYPE;
289 } else {
290 sym_type = STT_OBJECT;
293 if (sym->type.t & VT_STATIC)
294 sym_bind = STB_LOCAL;
295 else {
296 if (sym->type.t & VT_WEAK)
297 sym_bind = STB_WEAK;
298 else
299 sym_bind = STB_GLOBAL;
302 if (!sym->c) {
303 name = get_tok_str(sym->v, NULL);
304 #ifdef CONFIG_TCC_BCHECK
305 if (tcc_state->do_bounds_check) {
306 /* XXX: avoid doing that for statics ? */
307 /* if bound checking is activated, we change some function
308 names by adding the "__bound" prefix */
309 switch(sym->v) {
310 #ifdef TCC_TARGET_PE
311 /* XXX: we rely only on malloc hooks */
312 case TOK_malloc:
313 case TOK_free:
314 case TOK_realloc:
315 case TOK_memalign:
316 case TOK_calloc:
317 #endif
318 case TOK_memcpy:
319 case TOK_memmove:
320 case TOK_memset:
321 case TOK_strlen:
322 case TOK_strcpy:
323 case TOK_alloca:
324 strcpy(buf, "__bound_");
325 strcat(buf, name);
326 name = buf;
327 break;
330 #endif
331 other = 0;
333 #ifdef TCC_TARGET_PE
334 if (sym->type.t & VT_EXPORT)
335 other |= ST_PE_EXPORT;
336 if (sym_type == STT_FUNC && sym->type.ref) {
337 Sym *ref = sym->type.ref;
338 if (ref->a.func_export)
339 other |= ST_PE_EXPORT;
340 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
341 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
342 name = buf1;
343 other |= ST_PE_STDCALL;
344 can_add_underscore = 0;
346 } else {
347 if (find_elf_sym(tcc_state->dynsymtab_section, name))
348 other |= ST_PE_IMPORT;
349 if (sym->type.t & VT_IMPORT)
350 other |= ST_PE_IMPORT;
352 #else
353 if (! (sym->type.t & VT_STATIC))
354 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
355 #endif
356 if (tcc_state->leading_underscore && can_add_underscore) {
357 buf1[0] = '_';
358 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
359 name = buf1;
361 if (sym->asm_label) {
362 name = get_tok_str(sym->asm_label, NULL);
364 info = ELFW(ST_INFO)(sym_bind, sym_type);
365 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
366 } else {
367 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
368 esym->st_value = value;
369 esym->st_size = size;
370 esym->st_shndx = sh_num;
374 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
375 addr_t value, unsigned long size)
377 put_extern_sym2(sym, section, value, size, 1);
380 /* add a new relocation entry to symbol 'sym' in section 's' */
381 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
382 addr_t addend)
384 int c = 0;
386 if (nocode_wanted && s == cur_text_section)
387 return;
389 if (sym) {
390 if (0 == sym->c)
391 put_extern_sym(sym, NULL, 0, 0);
392 c = sym->c;
395 /* now we can add ELF relocation info */
396 put_elf_reloca(symtab_section, s, offset, type, c, addend);
399 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
401 greloca(s, sym, offset, type, 0);
404 /* ------------------------------------------------------------------------- */
405 /* symbol allocator */
406 static Sym *__sym_malloc(void)
408 Sym *sym_pool, *sym, *last_sym;
409 int i;
411 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
412 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
414 last_sym = sym_free_first;
415 sym = sym_pool;
416 for(i = 0; i < SYM_POOL_NB; i++) {
417 sym->next = last_sym;
418 last_sym = sym;
419 sym++;
421 sym_free_first = last_sym;
422 return last_sym;
425 static inline Sym *sym_malloc(void)
427 Sym *sym;
428 #ifndef SYM_DEBUG
429 sym = sym_free_first;
430 if (!sym)
431 sym = __sym_malloc();
432 sym_free_first = sym->next;
433 return sym;
434 #else
435 sym = tcc_malloc(sizeof(Sym));
436 return sym;
437 #endif
440 ST_INLN void sym_free(Sym *sym)
442 #ifndef SYM_DEBUG
443 sym->next = sym_free_first;
444 sym_free_first = sym;
445 #else
446 tcc_free(sym);
447 #endif
450 /* push, without hashing */
451 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
453 Sym *s;
455 s = sym_malloc();
456 s->scope = 0;
457 s->v = v;
458 s->type.t = t;
459 s->type.ref = NULL;
460 #ifdef _WIN64
461 s->d = NULL;
462 #endif
463 s->c = c;
464 s->next = NULL;
465 /* add in stack */
466 s->prev = *ps;
467 *ps = s;
468 return s;
471 /* find a symbol and return its associated structure. 's' is the top
472 of the symbol stack */
473 ST_FUNC Sym *sym_find2(Sym *s, int v)
475 while (s) {
476 if (s->v == v)
477 return s;
478 else if (s->v == -1)
479 return NULL;
480 s = s->prev;
482 return NULL;
485 /* structure lookup */
486 ST_INLN Sym *struct_find(int v)
488 v -= TOK_IDENT;
489 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
490 return NULL;
491 return table_ident[v]->sym_struct;
494 /* find an identifier */
495 ST_INLN Sym *sym_find(int v)
497 v -= TOK_IDENT;
498 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
499 return NULL;
500 return table_ident[v]->sym_identifier;
503 /* push a given symbol on the symbol stack */
504 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
506 Sym *s, **ps;
507 TokenSym *ts;
509 if (local_stack)
510 ps = &local_stack;
511 else
512 ps = &global_stack;
513 s = sym_push2(ps, v, type->t, c);
514 s->type.ref = type->ref;
515 s->r = r;
516 /* don't record fields or anonymous symbols */
517 /* XXX: simplify */
518 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
519 /* record symbol in token array */
520 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
521 if (v & SYM_STRUCT)
522 ps = &ts->sym_struct;
523 else
524 ps = &ts->sym_identifier;
525 s->prev_tok = *ps;
526 *ps = s;
527 s->scope = local_scope;
528 if (s->prev_tok && s->prev_tok->scope == s->scope)
529 tcc_error("redeclaration of '%s'",
530 get_tok_str(v & ~SYM_STRUCT, NULL));
532 return s;
535 /* push a global identifier */
536 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
538 Sym *s, **ps;
539 s = sym_push2(&global_stack, v, t, c);
540 /* don't record anonymous symbol */
541 if (v < SYM_FIRST_ANOM) {
542 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
543 /* modify the top most local identifier, so that
544 sym_identifier will point to 's' when popped */
545 while (*ps != NULL)
546 ps = &(*ps)->prev_tok;
547 s->prev_tok = NULL;
548 *ps = s;
550 return s;
553 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
554 pop them yet from the list, but do remove them from the token array. */
555 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
557 Sym *s, *ss, **ps;
558 TokenSym *ts;
559 int v;
561 s = *ptop;
562 while(s != b) {
563 ss = s->prev;
564 v = s->v;
565 /* remove symbol in token array */
566 /* XXX: simplify */
567 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
568 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
569 if (v & SYM_STRUCT)
570 ps = &ts->sym_struct;
571 else
572 ps = &ts->sym_identifier;
573 *ps = s->prev_tok;
575 if (!keep)
576 sym_free(s);
577 s = ss;
579 if (!keep)
580 *ptop = b;
583 static void weaken_symbol(Sym *sym)
585 sym->type.t |= VT_WEAK;
586 if (sym->c > 0) {
587 int esym_type;
588 ElfW(Sym) *esym;
590 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
591 esym_type = ELFW(ST_TYPE)(esym->st_info);
592 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
596 static void apply_visibility(Sym *sym, CType *type)
598 int vis = sym->type.t & VT_VIS_MASK;
599 int vis2 = type->t & VT_VIS_MASK;
600 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
601 vis = vis2;
602 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
604 else
605 vis = (vis < vis2) ? vis : vis2;
606 sym->type.t &= ~VT_VIS_MASK;
607 sym->type.t |= vis;
609 if (sym->c > 0) {
610 ElfW(Sym) *esym;
612 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
613 vis >>= VT_VIS_SHIFT;
614 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
618 /* ------------------------------------------------------------------------- */
620 static void vsetc(CType *type, int r, CValue *vc)
622 int v;
624 if (vtop >= vstack + (VSTACK_SIZE - 1))
625 tcc_error("memory full (vstack)");
626 /* cannot let cpu flags if other instruction are generated. Also
627 avoid leaving VT_JMP anywhere except on the top of the stack
628 because it would complicate the code generator.
630 Don't do this when nocode_wanted. vtop might come from
631 !nocode_wanted regions (see 88_codeopt.c) and transforming
632 it to a register without actually generating code is wrong
633 as their value might still be used for real. All values
634 we push under nocode_wanted will eventually be popped
635 again, so that the VT_CMP/VT_JMP value will be in vtop
636 when code is unsuppressed again.
638 Same logic below in vswap(); */
639 if (vtop >= vstack && !nocode_wanted) {
640 v = vtop->r & VT_VALMASK;
641 if (v == VT_CMP || (v & ~1) == VT_JMP)
642 gv(RC_INT);
645 vtop++;
646 vtop->type = *type;
647 vtop->r = r;
648 vtop->r2 = VT_CONST;
649 vtop->c = *vc;
650 vtop->sym = NULL;
653 ST_FUNC void vswap(void)
655 SValue tmp;
656 /* cannot vswap cpu flags. See comment at vsetc() above */
657 if (vtop >= vstack && !nocode_wanted) {
658 int v = vtop->r & VT_VALMASK;
659 if (v == VT_CMP || (v & ~1) == VT_JMP)
660 gv(RC_INT);
662 tmp = vtop[0];
663 vtop[0] = vtop[-1];
664 vtop[-1] = tmp;
667 /* pop stack value */
668 ST_FUNC void vpop(void)
670 int v;
671 v = vtop->r & VT_VALMASK;
672 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
673 /* for x86, we need to pop the FP stack */
674 if (v == TREG_ST0) {
675 o(0xd8dd); /* fstp %st(0) */
676 } else
677 #endif
678 if (v == VT_JMP || v == VT_JMPI) {
679 /* need to put correct jump if && or || without test */
680 gsym(vtop->c.i);
682 vtop--;
685 /* push constant of type "type" with useless value */
686 ST_FUNC void vpush(CType *type)
688 CValue cval;
689 vsetc(type, VT_CONST, &cval);
692 /* push integer constant */
693 ST_FUNC void vpushi(int v)
695 CValue cval;
696 cval.i = v;
697 vsetc(&int_type, VT_CONST, &cval);
700 /* push a pointer sized constant */
701 static void vpushs(addr_t v)
703 CValue cval;
704 cval.i = v;
705 vsetc(&size_type, VT_CONST, &cval);
708 /* push arbitrary 64bit constant */
709 ST_FUNC void vpush64(int ty, unsigned long long v)
711 CValue cval;
712 CType ctype;
713 ctype.t = ty;
714 ctype.ref = NULL;
715 cval.i = v;
716 vsetc(&ctype, VT_CONST, &cval);
719 /* push long long constant */
720 static inline void vpushll(long long v)
722 vpush64(VT_LLONG, v);
725 ST_FUNC void vset(CType *type, int r, long v)
727 CValue cval;
729 cval.i = v;
730 vsetc(type, r, &cval);
733 static void vseti(int r, int v)
735 CType type;
736 type.t = VT_INT;
737 type.ref = 0;
738 vset(&type, r, v);
741 ST_FUNC void vpushv(SValue *v)
743 if (vtop >= vstack + (VSTACK_SIZE - 1))
744 tcc_error("memory full (vstack)");
745 vtop++;
746 *vtop = *v;
749 static void vdup(void)
751 vpushv(vtop);
754 /* rotate n first stack elements to the bottom
755 I1 ... In -> I2 ... In I1 [top is right]
757 ST_FUNC void vrotb(int n)
759 int i;
760 SValue tmp;
762 tmp = vtop[-n + 1];
763 for(i=-n+1;i!=0;i++)
764 vtop[i] = vtop[i+1];
765 vtop[0] = tmp;
768 /* rotate the n elements before entry e towards the top
769 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
771 ST_FUNC void vrote(SValue *e, int n)
773 int i;
774 SValue tmp;
776 tmp = *e;
777 for(i = 0;i < n - 1; i++)
778 e[-i] = e[-i - 1];
779 e[-n + 1] = tmp;
782 /* rotate n first stack elements to the top
783 I1 ... In -> In I1 ... I(n-1) [top is right]
785 ST_FUNC void vrott(int n)
787 vrote(vtop, n);
790 /* push a symbol value of TYPE */
791 static inline void vpushsym(CType *type, Sym *sym)
793 CValue cval;
794 cval.i = 0;
795 vsetc(type, VT_CONST | VT_SYM, &cval);
796 vtop->sym = sym;
799 /* Return a static symbol pointing to a section */
800 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
802 int v;
803 Sym *sym;
805 v = anon_sym++;
806 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
807 sym->type.ref = type->ref;
808 sym->r = VT_CONST | VT_SYM;
809 put_extern_sym(sym, sec, offset, size);
810 return sym;
813 /* push a reference to a section offset by adding a dummy symbol */
814 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
816 vpushsym(type, get_sym_ref(type, sec, offset, size));
819 /* define a new external reference to a symbol 'v' of type 'u' */
820 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
822 Sym *s;
824 s = sym_find(v);
825 if (!s) {
826 /* push forward reference */
827 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
828 s->type.ref = type->ref;
829 s->r = r | VT_CONST | VT_SYM;
831 return s;
834 /* define a new external reference to a symbol 'v' */
835 static Sym *external_sym(int v, CType *type, int r)
837 Sym *s;
839 s = sym_find(v);
840 if (!s) {
841 /* push forward reference */
842 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
843 s->type.t |= VT_EXTERN;
844 } else if (s->type.ref == func_old_type.ref) {
845 s->type.ref = type->ref;
846 s->r = r | VT_CONST | VT_SYM;
847 s->type.t |= VT_EXTERN;
848 } else if (!is_compatible_types(&s->type, type)) {
849 tcc_error("incompatible types for redefinition of '%s'",
850 get_tok_str(v, NULL));
852 /* Merge some storage attributes. */
853 if (type->t & VT_WEAK)
854 weaken_symbol(s);
856 if (type->t & VT_VIS_MASK)
857 apply_visibility(s, type);
859 return s;
862 /* push a reference to global symbol v */
863 ST_FUNC void vpush_global_sym(CType *type, int v)
865 vpushsym(type, external_global_sym(v, type, 0));
868 /* save registers up to (vtop - n) stack entry */
869 ST_FUNC void save_regs(int n)
871 SValue *p, *p1;
872 for(p = vstack, p1 = vtop - n; p <= p1; p++)
873 save_reg(p->r);
876 /* save r to the memory stack, and mark it as being free */
877 ST_FUNC void save_reg(int r)
879 save_reg_upstack(r, 0);
882 /* save r to the memory stack, and mark it as being free,
883 if seen up to (vtop - n) stack entry */
884 ST_FUNC void save_reg_upstack(int r, int n)
886 int l, saved, size, align;
887 SValue *p, *p1, sv;
888 CType *type;
890 if ((r &= VT_VALMASK) >= VT_CONST)
891 return;
892 if (nocode_wanted)
893 return;
895 /* modify all stack values */
896 saved = 0;
897 l = 0;
898 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
899 if ((p->r & VT_VALMASK) == r ||
900 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
901 /* must save value on stack if not already done */
902 if (!saved) {
903 /* NOTE: must reload 'r' because r might be equal to r2 */
904 r = p->r & VT_VALMASK;
905 /* store register in the stack */
906 type = &p->type;
907 if ((p->r & VT_LVAL) ||
908 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
909 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
910 type = &char_pointer_type;
911 #else
912 type = &int_type;
913 #endif
914 if ((type->t & VT_BTYPE) == VT_FLOAT) {
915 /* cast to DOUBLE to avoid precision loss */
916 type->t = (type->t & ~VT_BTYPE) | VT_DOUBLE;
918 size = type_size(type, &align);
919 loc = (loc - size) & -align;
920 sv.type.t = type->t;
921 sv.r = VT_LOCAL | VT_LVAL;
922 sv.c.i = loc;
923 store(r, &sv);
924 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
925 /* x86 specific: need to pop fp register ST0 if saved */
926 if (r == TREG_ST0) {
927 o(0xd8dd); /* fstp %st(0) */
929 #endif
930 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
931 /* special long long case */
932 if ((type->t & VT_BTYPE) == VT_LLONG) {
933 sv.c.i += 4;
934 store(p->r2, &sv);
936 #endif
937 l = loc;
938 saved = 1;
940 /* mark that stack entry as being saved on the stack */
941 if (p->r & VT_LVAL) {
942 /* also clear the bounded flag because the
943 relocation address of the function was stored in
944 p->c.i */
945 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
946 } else {
947 p->r = lvalue_type(p->type.t) | VT_LOCAL;
949 p->r2 = VT_CONST;
950 p->c.i = l;
955 #ifdef TCC_TARGET_ARM
956 /* find a register of class 'rc2' with at most one reference on stack.
957 * If none, call get_reg(rc) */
958 ST_FUNC int get_reg_ex(int rc, int rc2)
960 int r;
961 SValue *p;
963 for(r=0;r<NB_REGS;r++) {
964 if (reg_classes[r] & rc2) {
965 int n;
966 n=0;
967 for(p = vstack; p <= vtop; p++) {
968 if ((p->r & VT_VALMASK) == r ||
969 (p->r2 & VT_VALMASK) == r)
970 n++;
972 if (n <= 1)
973 return r;
976 return get_reg(rc);
978 #endif
980 /* find a free register of class 'rc'. If none, save one register */
981 ST_FUNC int get_reg(int rc)
983 int r;
984 SValue *p;
986 /* find a free register */
987 for(r=0;r<NB_REGS;r++) {
988 if (reg_classes[r] & rc) {
989 if (nocode_wanted)
990 return r;
991 for(p=vstack;p<=vtop;p++) {
992 if ((p->r & VT_VALMASK) == r ||
993 (p->r2 & VT_VALMASK) == r)
994 goto notfound;
996 return r;
998 notfound: ;
1001 /* no register left : free the first one on the stack (VERY
1002 IMPORTANT to start from the bottom to ensure that we don't
1003 spill registers used in gen_opi()) */
1004 for(p=vstack;p<=vtop;p++) {
1005 /* look at second register (if long long) */
1006 r = p->r2 & VT_VALMASK;
1007 if (r < VT_CONST && (reg_classes[r] & rc))
1008 goto save_found;
1009 r = p->r & VT_VALMASK;
1010 if (r < VT_CONST && (reg_classes[r] & rc)) {
1011 save_found:
1012 save_reg(r);
1013 return r;
1016 /* Should never comes here */
1017 return -1;
1020 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1021 if needed */
1022 static void move_reg(int r, int s, int t)
1024 SValue sv;
1026 if (r != s) {
1027 save_reg(r);
1028 sv.type.t = t;
1029 sv.type.ref = NULL;
1030 sv.r = s;
1031 sv.c.i = 0;
1032 load(r, &sv);
1036 /* get address of vtop (vtop MUST BE an lvalue) */
1037 ST_FUNC void gaddrof(void)
1039 if (vtop->r & VT_REF)
1040 gv(RC_INT);
1041 vtop->r &= ~VT_LVAL;
1042 /* tricky: if saved lvalue, then we can go back to lvalue */
1043 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1044 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1049 #ifdef CONFIG_TCC_BCHECK
1050 /* generate lvalue bound code */
1051 static void gbound(void)
1053 int lval_type;
1054 CType type1;
1056 vtop->r &= ~VT_MUSTBOUND;
1057 /* if lvalue, then use checking code before dereferencing */
1058 if (vtop->r & VT_LVAL) {
1059 /* if not VT_BOUNDED value, then make one */
1060 if (!(vtop->r & VT_BOUNDED)) {
1061 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1062 /* must save type because we must set it to int to get pointer */
1063 type1 = vtop->type;
1064 vtop->type.t = VT_PTR;
1065 gaddrof();
1066 vpushi(0);
1067 gen_bounded_ptr_add();
1068 vtop->r |= lval_type;
1069 vtop->type = type1;
1071 /* then check for dereferencing */
1072 gen_bounded_ptr_deref();
1075 #endif
1077 /* store vtop a register belonging to class 'rc'. lvalues are
1078 converted to values. Cannot be used if cannot be converted to
1079 register value (such as structures). */
1080 ST_FUNC int gv(int rc)
1082 int r, bit_pos, bit_size, size, align, i;
1083 int rc2;
1085 /* NOTE: get_reg can modify vstack[] */
1086 if (vtop->type.t & VT_BITFIELD) {
1087 CType type;
1088 int bits = 32;
1089 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1090 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1091 /* remove bit field info to avoid loops */
1092 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1093 /* cast to int to propagate signedness in following ops */
1094 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1095 type.t = VT_LLONG;
1096 bits = 64;
1097 } else
1098 type.t = VT_INT;
1099 if((vtop->type.t & VT_UNSIGNED) ||
1100 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1101 type.t |= VT_UNSIGNED;
1102 gen_cast(&type);
1103 /* generate shifts */
1104 vpushi(bits - (bit_pos + bit_size));
1105 gen_op(TOK_SHL);
1106 vpushi(bits - bit_size);
1107 /* NOTE: transformed to SHR if unsigned */
1108 gen_op(TOK_SAR);
1109 r = gv(rc);
1110 } else {
1111 if (is_float(vtop->type.t) &&
1112 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1113 Sym *sym;
1114 int *ptr;
1115 unsigned long offset;
1116 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1117 CValue check;
1118 #endif
1120 /* XXX: unify with initializers handling ? */
1121 /* CPUs usually cannot use float constants, so we store them
1122 generically in data segment */
1123 size = type_size(&vtop->type, &align);
1124 offset = (data_section->data_offset + align - 1) & -align;
1125 data_section->data_offset = offset;
1126 /* XXX: not portable yet */
1127 #if defined(__i386__) || defined(__x86_64__)
1128 /* Zero pad x87 tenbyte long doubles */
1129 if (size == LDOUBLE_SIZE) {
1130 vtop->c.tab[2] &= 0xffff;
1131 #if LDOUBLE_SIZE == 16
1132 vtop->c.tab[3] = 0;
1133 #endif
1135 #endif
1136 ptr = section_ptr_add(data_section, size);
1137 size = size >> 2;
1138 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1139 check.d = 1;
1140 if(check.tab[0])
1141 for(i=0;i<size;i++)
1142 ptr[i] = vtop->c.tab[size-1-i];
1143 else
1144 #endif
1145 for(i=0;i<size;i++)
1146 ptr[i] = vtop->c.tab[i];
1147 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1148 vtop->r |= VT_LVAL | VT_SYM;
1149 vtop->sym = sym;
1150 vtop->c.i = 0;
1152 #ifdef CONFIG_TCC_BCHECK
1153 if (vtop->r & VT_MUSTBOUND)
1154 gbound();
1155 #endif
1157 r = vtop->r & VT_VALMASK;
1158 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1159 #ifndef TCC_TARGET_ARM64
1160 if (rc == RC_IRET)
1161 rc2 = RC_LRET;
1162 #ifdef TCC_TARGET_X86_64
1163 else if (rc == RC_FRET)
1164 rc2 = RC_QRET;
1165 #endif
1166 #endif
1168 /* need to reload if:
1169 - constant
1170 - lvalue (need to dereference pointer)
1171 - already a register, but not in the right class */
1172 if (r >= VT_CONST
1173 || (vtop->r & VT_LVAL)
1174 || !(reg_classes[r] & rc)
1175 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1176 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1177 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1178 #else
1179 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1180 #endif
1183 r = get_reg(rc);
1184 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1185 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1186 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1187 #else
1188 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1189 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1190 unsigned long long ll;
1191 #endif
1192 int r2, original_type;
1193 original_type = vtop->type.t;
1194 /* two register type load : expand to two words
1195 temporarily */
1196 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1197 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1198 /* load constant */
1199 ll = vtop->c.i;
1200 vtop->c.i = ll; /* first word */
1201 load(r, vtop);
1202 vtop->r = r; /* save register value */
1203 vpushi(ll >> 32); /* second word */
1204 } else
1205 #endif
1206 if (vtop->r & VT_LVAL) {
1207 /* We do not want to modifier the long long
1208 pointer here, so the safest (and less
1209 efficient) is to save all the other registers
1210 in the stack. XXX: totally inefficient. */
1211 #if 0
1212 save_regs(1);
1213 #else
1214 /* lvalue_save: save only if used further down the stack */
1215 save_reg_upstack(vtop->r, 1);
1216 #endif
1217 /* load from memory */
1218 vtop->type.t = load_type;
1219 load(r, vtop);
1220 vdup();
1221 vtop[-1].r = r; /* save register value */
1222 /* increment pointer to get second word */
1223 vtop->type.t = addr_type;
1224 gaddrof();
1225 vpushi(load_size);
1226 gen_op('+');
1227 vtop->r |= VT_LVAL;
1228 vtop->type.t = load_type;
1229 } else {
1230 /* move registers */
1231 load(r, vtop);
1232 vdup();
1233 vtop[-1].r = r; /* save register value */
1234 vtop->r = vtop[-1].r2;
1236 /* Allocate second register. Here we rely on the fact that
1237 get_reg() tries first to free r2 of an SValue. */
1238 r2 = get_reg(rc2);
1239 load(r2, vtop);
1240 vpop();
1241 /* write second register */
1242 vtop->r2 = r2;
1243 vtop->type.t = original_type;
1244 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1245 int t1, t;
1246 /* lvalue of scalar type : need to use lvalue type
1247 because of possible cast */
1248 t = vtop->type.t;
1249 t1 = t;
1250 /* compute memory access type */
1251 if (vtop->r & VT_REF)
1252 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1253 t = VT_PTR;
1254 #else
1255 t = VT_INT;
1256 #endif
1257 else if (vtop->r & VT_LVAL_BYTE)
1258 t = VT_BYTE;
1259 else if (vtop->r & VT_LVAL_SHORT)
1260 t = VT_SHORT;
1261 if (vtop->r & VT_LVAL_UNSIGNED)
1262 t |= VT_UNSIGNED;
1263 vtop->type.t = t;
1264 load(r, vtop);
1265 /* restore wanted type */
1266 vtop->type.t = t1;
1267 } else {
1268 /* one register type load */
1269 load(r, vtop);
1272 vtop->r = r;
1273 #ifdef TCC_TARGET_C67
1274 /* uses register pairs for doubles */
1275 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1276 vtop->r2 = r+1;
1277 #endif
1279 return r;
1282 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1283 ST_FUNC void gv2(int rc1, int rc2)
1285 int v;
1287 /* generate more generic register first. But VT_JMP or VT_CMP
1288 values must be generated first in all cases to avoid possible
1289 reload errors */
1290 v = vtop[0].r & VT_VALMASK;
1291 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1292 vswap();
1293 gv(rc1);
1294 vswap();
1295 gv(rc2);
1296 /* test if reload is needed for first register */
1297 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1298 vswap();
1299 gv(rc1);
1300 vswap();
1302 } else {
1303 gv(rc2);
1304 vswap();
1305 gv(rc1);
1306 vswap();
1307 /* test if reload is needed for first register */
1308 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1309 gv(rc2);
1314 #ifndef TCC_TARGET_ARM64
1315 /* wrapper around RC_FRET to return a register by type */
1316 static int rc_fret(int t)
1318 #ifdef TCC_TARGET_X86_64
1319 if (t == VT_LDOUBLE) {
1320 return RC_ST0;
1322 #endif
1323 return RC_FRET;
1325 #endif
1327 /* wrapper around REG_FRET to return a register by type */
1328 static int reg_fret(int t)
1330 #ifdef TCC_TARGET_X86_64
1331 if (t == VT_LDOUBLE) {
1332 return TREG_ST0;
1334 #endif
1335 return REG_FRET;
1338 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1339 /* expand 64bit on stack in two ints */
1340 static void lexpand(void)
1342 int u, v;
1343 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1344 v = vtop->r & (VT_VALMASK | VT_LVAL);
1345 if (v == VT_CONST) {
1346 vdup();
1347 vtop[0].c.i >>= 32;
1348 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1349 vdup();
1350 vtop[0].c.i += 4;
1351 } else {
1352 gv(RC_INT);
1353 vdup();
1354 vtop[0].r = vtop[-1].r2;
1355 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1357 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1359 #endif
1361 #ifdef TCC_TARGET_ARM
1362 /* expand long long on stack */
1363 ST_FUNC void lexpand_nr(void)
1365 int u,v;
1367 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1368 vdup();
1369 vtop->r2 = VT_CONST;
1370 vtop->type.t = VT_INT | u;
1371 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1372 if (v == VT_CONST) {
1373 vtop[-1].c.i = vtop->c.i;
1374 vtop->c.i = vtop->c.i >> 32;
1375 vtop->r = VT_CONST;
1376 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1377 vtop->c.i += 4;
1378 vtop->r = vtop[-1].r;
1379 } else if (v > VT_CONST) {
1380 vtop--;
1381 lexpand();
1382 } else
1383 vtop->r = vtop[-1].r2;
1384 vtop[-1].r2 = VT_CONST;
1385 vtop[-1].type.t = VT_INT | u;
1387 #endif
1389 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1390 /* build a long long from two ints */
1391 static void lbuild(int t)
1393 gv2(RC_INT, RC_INT);
1394 vtop[-1].r2 = vtop[0].r;
1395 vtop[-1].type.t = t;
1396 vpop();
1398 #endif
1400 /* convert stack entry to register and duplicate its value in another
1401 register */
1402 static void gv_dup(void)
1404 int rc, t, r, r1;
1405 SValue sv;
1407 t = vtop->type.t;
1408 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1409 if ((t & VT_BTYPE) == VT_LLONG) {
1410 lexpand();
1411 gv_dup();
1412 vswap();
1413 vrotb(3);
1414 gv_dup();
1415 vrotb(4);
1416 /* stack: H L L1 H1 */
1417 lbuild(t);
1418 vrotb(3);
1419 vrotb(3);
1420 vswap();
1421 lbuild(t);
1422 vswap();
1423 } else
1424 #endif
1426 /* duplicate value */
1427 rc = RC_INT;
1428 sv.type.t = VT_INT;
1429 if (is_float(t)) {
1430 rc = RC_FLOAT;
1431 #ifdef TCC_TARGET_X86_64
1432 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1433 rc = RC_ST0;
1435 #endif
1436 sv.type.t = t;
1438 r = gv(rc);
1439 r1 = get_reg(rc);
1440 sv.r = r;
1441 sv.c.i = 0;
1442 load(r1, &sv); /* move r to r1 */
1443 vdup();
1444 /* duplicates value */
1445 if (r != r1)
1446 vtop->r = r1;
1450 /* Generate value test
1452 * Generate a test for any value (jump, comparison and integers) */
1453 ST_FUNC int gvtst(int inv, int t)
1455 int v = vtop->r & VT_VALMASK;
1456 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1457 vpushi(0);
1458 gen_op(TOK_NE);
1460 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1461 /* constant jmp optimization */
1462 if ((vtop->c.i != 0) != inv)
1463 t = gjmp(t);
1464 vtop--;
1465 return t;
1467 return gtst(inv, t);
1470 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1471 /* generate CPU independent (unsigned) long long operations */
1472 static void gen_opl(int op)
1474 int t, a, b, op1, c, i;
1475 int func;
1476 unsigned short reg_iret = REG_IRET;
1477 unsigned short reg_lret = REG_LRET;
1478 SValue tmp;
1480 switch(op) {
1481 case '/':
1482 case TOK_PDIV:
1483 func = TOK___divdi3;
1484 goto gen_func;
1485 case TOK_UDIV:
1486 func = TOK___udivdi3;
1487 goto gen_func;
1488 case '%':
1489 func = TOK___moddi3;
1490 goto gen_mod_func;
1491 case TOK_UMOD:
1492 func = TOK___umoddi3;
1493 gen_mod_func:
1494 #ifdef TCC_ARM_EABI
1495 reg_iret = TREG_R2;
1496 reg_lret = TREG_R3;
1497 #endif
1498 gen_func:
1499 /* call generic long long function */
1500 vpush_global_sym(&func_old_type, func);
1501 vrott(3);
1502 gfunc_call(2);
1503 vpushi(0);
1504 vtop->r = reg_iret;
1505 vtop->r2 = reg_lret;
1506 break;
1507 case '^':
1508 case '&':
1509 case '|':
1510 case '*':
1511 case '+':
1512 case '-':
1513 //pv("gen_opl A",0,2);
1514 t = vtop->type.t;
1515 vswap();
1516 lexpand();
1517 vrotb(3);
1518 lexpand();
1519 /* stack: L1 H1 L2 H2 */
1520 tmp = vtop[0];
1521 vtop[0] = vtop[-3];
1522 vtop[-3] = tmp;
1523 tmp = vtop[-2];
1524 vtop[-2] = vtop[-3];
1525 vtop[-3] = tmp;
1526 vswap();
1527 /* stack: H1 H2 L1 L2 */
1528 //pv("gen_opl B",0,4);
1529 if (op == '*') {
1530 vpushv(vtop - 1);
1531 vpushv(vtop - 1);
1532 gen_op(TOK_UMULL);
1533 lexpand();
1534 /* stack: H1 H2 L1 L2 ML MH */
1535 for(i=0;i<4;i++)
1536 vrotb(6);
1537 /* stack: ML MH H1 H2 L1 L2 */
1538 tmp = vtop[0];
1539 vtop[0] = vtop[-2];
1540 vtop[-2] = tmp;
1541 /* stack: ML MH H1 L2 H2 L1 */
1542 gen_op('*');
1543 vrotb(3);
1544 vrotb(3);
1545 gen_op('*');
1546 /* stack: ML MH M1 M2 */
1547 gen_op('+');
1548 gen_op('+');
1549 } else if (op == '+' || op == '-') {
1550 /* XXX: add non carry method too (for MIPS or alpha) */
1551 if (op == '+')
1552 op1 = TOK_ADDC1;
1553 else
1554 op1 = TOK_SUBC1;
1555 gen_op(op1);
1556 /* stack: H1 H2 (L1 op L2) */
1557 vrotb(3);
1558 vrotb(3);
1559 gen_op(op1 + 1); /* TOK_xxxC2 */
1560 } else {
1561 gen_op(op);
1562 /* stack: H1 H2 (L1 op L2) */
1563 vrotb(3);
1564 vrotb(3);
1565 /* stack: (L1 op L2) H1 H2 */
1566 gen_op(op);
1567 /* stack: (L1 op L2) (H1 op H2) */
1569 /* stack: L H */
1570 lbuild(t);
1571 break;
1572 case TOK_SAR:
1573 case TOK_SHR:
1574 case TOK_SHL:
1575 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1576 t = vtop[-1].type.t;
1577 vswap();
1578 lexpand();
1579 vrotb(3);
1580 /* stack: L H shift */
1581 c = (int)vtop->c.i;
1582 /* constant: simpler */
1583 /* NOTE: all comments are for SHL. the other cases are
1584 done by swaping words */
1585 vpop();
1586 if (op != TOK_SHL)
1587 vswap();
1588 if (c >= 32) {
1589 /* stack: L H */
1590 vpop();
1591 if (c > 32) {
1592 vpushi(c - 32);
1593 gen_op(op);
1595 if (op != TOK_SAR) {
1596 vpushi(0);
1597 } else {
1598 gv_dup();
1599 vpushi(31);
1600 gen_op(TOK_SAR);
1602 vswap();
1603 } else {
1604 vswap();
1605 gv_dup();
1606 /* stack: H L L */
1607 vpushi(c);
1608 gen_op(op);
1609 vswap();
1610 vpushi(32 - c);
1611 if (op == TOK_SHL)
1612 gen_op(TOK_SHR);
1613 else
1614 gen_op(TOK_SHL);
1615 vrotb(3);
1616 /* stack: L L H */
1617 vpushi(c);
1618 if (op == TOK_SHL)
1619 gen_op(TOK_SHL);
1620 else
1621 gen_op(TOK_SHR);
1622 gen_op('|');
1624 if (op != TOK_SHL)
1625 vswap();
1626 lbuild(t);
1627 } else {
1628 /* XXX: should provide a faster fallback on x86 ? */
1629 switch(op) {
1630 case TOK_SAR:
1631 func = TOK___ashrdi3;
1632 goto gen_func;
1633 case TOK_SHR:
1634 func = TOK___lshrdi3;
1635 goto gen_func;
1636 case TOK_SHL:
1637 func = TOK___ashldi3;
1638 goto gen_func;
1641 break;
1642 default:
1643 /* compare operations */
1644 t = vtop->type.t;
1645 vswap();
1646 lexpand();
1647 vrotb(3);
1648 lexpand();
1649 /* stack: L1 H1 L2 H2 */
1650 tmp = vtop[-1];
1651 vtop[-1] = vtop[-2];
1652 vtop[-2] = tmp;
1653 /* stack: L1 L2 H1 H2 */
1654 /* compare high */
1655 op1 = op;
1656 /* when values are equal, we need to compare low words. since
1657 the jump is inverted, we invert the test too. */
1658 if (op1 == TOK_LT)
1659 op1 = TOK_LE;
1660 else if (op1 == TOK_GT)
1661 op1 = TOK_GE;
1662 else if (op1 == TOK_ULT)
1663 op1 = TOK_ULE;
1664 else if (op1 == TOK_UGT)
1665 op1 = TOK_UGE;
1666 a = 0;
1667 b = 0;
1668 gen_op(op1);
1669 if (op == TOK_NE) {
1670 b = gvtst(0, 0);
1671 } else {
1672 a = gvtst(1, 0);
1673 if (op != TOK_EQ) {
1674 /* generate non equal test */
1675 vpushi(TOK_NE);
1676 vtop->r = VT_CMP;
1677 b = gvtst(0, 0);
1680 /* compare low. Always unsigned */
1681 op1 = op;
1682 if (op1 == TOK_LT)
1683 op1 = TOK_ULT;
1684 else if (op1 == TOK_LE)
1685 op1 = TOK_ULE;
1686 else if (op1 == TOK_GT)
1687 op1 = TOK_UGT;
1688 else if (op1 == TOK_GE)
1689 op1 = TOK_UGE;
1690 gen_op(op1);
1691 a = gvtst(1, a);
1692 gsym(b);
1693 vseti(VT_JMPI, a);
1694 break;
1697 #endif
1699 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1701 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1702 return (a ^ b) >> 63 ? -x : x;
1705 static int gen_opic_lt(uint64_t a, uint64_t b)
1707 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1710 /* handle integer constant optimizations and various machine
1711 independent opt */
1712 static void gen_opic(int op)
1714 SValue *v1 = vtop - 1;
1715 SValue *v2 = vtop;
1716 int t1 = v1->type.t & VT_BTYPE;
1717 int t2 = v2->type.t & VT_BTYPE;
1718 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1719 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1720 uint64_t l1 = c1 ? v1->c.i : 0;
1721 uint64_t l2 = c2 ? v2->c.i : 0;
1722 int shm = (t1 == VT_LLONG) ? 63 : 31;
1724 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1725 l1 = ((uint32_t)l1 |
1726 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1727 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1728 l2 = ((uint32_t)l2 |
1729 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1731 if (c1 && c2) {
1732 switch(op) {
1733 case '+': l1 += l2; break;
1734 case '-': l1 -= l2; break;
1735 case '&': l1 &= l2; break;
1736 case '^': l1 ^= l2; break;
1737 case '|': l1 |= l2; break;
1738 case '*': l1 *= l2; break;
1740 case TOK_PDIV:
1741 case '/':
1742 case '%':
1743 case TOK_UDIV:
1744 case TOK_UMOD:
1745 /* if division by zero, generate explicit division */
1746 if (l2 == 0) {
1747 if (const_wanted)
1748 tcc_error("division by zero in constant");
1749 goto general_case;
1751 switch(op) {
1752 default: l1 = gen_opic_sdiv(l1, l2); break;
1753 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1754 case TOK_UDIV: l1 = l1 / l2; break;
1755 case TOK_UMOD: l1 = l1 % l2; break;
1757 break;
1758 case TOK_SHL: l1 <<= (l2 & shm); break;
1759 case TOK_SHR: l1 >>= (l2 & shm); break;
1760 case TOK_SAR:
1761 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1762 break;
1763 /* tests */
1764 case TOK_ULT: l1 = l1 < l2; break;
1765 case TOK_UGE: l1 = l1 >= l2; break;
1766 case TOK_EQ: l1 = l1 == l2; break;
1767 case TOK_NE: l1 = l1 != l2; break;
1768 case TOK_ULE: l1 = l1 <= l2; break;
1769 case TOK_UGT: l1 = l1 > l2; break;
1770 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1771 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1772 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1773 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1774 /* logical */
1775 case TOK_LAND: l1 = l1 && l2; break;
1776 case TOK_LOR: l1 = l1 || l2; break;
1777 default:
1778 goto general_case;
1780 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1781 l1 = ((uint32_t)l1 |
1782 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1783 v1->c.i = l1;
1784 vtop--;
1785 } else {
1786 /* if commutative ops, put c2 as constant */
1787 if (c1 && (op == '+' || op == '&' || op == '^' ||
1788 op == '|' || op == '*')) {
1789 vswap();
1790 c2 = c1; //c = c1, c1 = c2, c2 = c;
1791 l2 = l1; //l = l1, l1 = l2, l2 = l;
1793 if (!const_wanted &&
1794 c1 && ((l1 == 0 &&
1795 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1796 (l1 == -1 && op == TOK_SAR))) {
1797 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1798 vtop--;
1799 } else if (!const_wanted &&
1800 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1801 (l2 == -1 && op == '|') ||
1802 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1803 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1804 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1805 if (l2 == 1)
1806 vtop->c.i = 0;
1807 vswap();
1808 vtop--;
1809 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1810 op == TOK_PDIV) &&
1811 l2 == 1) ||
1812 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1813 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1814 l2 == 0) ||
1815 (op == '&' &&
1816 l2 == -1))) {
1817 /* filter out NOP operations like x*1, x-0, x&-1... */
1818 vtop--;
1819 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1820 /* try to use shifts instead of muls or divs */
1821 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1822 int n = -1;
1823 while (l2) {
1824 l2 >>= 1;
1825 n++;
1827 vtop->c.i = n;
1828 if (op == '*')
1829 op = TOK_SHL;
1830 else if (op == TOK_PDIV)
1831 op = TOK_SAR;
1832 else
1833 op = TOK_SHR;
1835 goto general_case;
1836 } else if (c2 && (op == '+' || op == '-') &&
1837 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1838 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1839 /* symbol + constant case */
1840 if (op == '-')
1841 l2 = -l2;
1842 l2 += vtop[-1].c.i;
1843 /* The backends can't always deal with addends to symbols
1844 larger than +-1<<31. Don't construct such. */
1845 if ((int)l2 != l2)
1846 goto general_case;
1847 vtop--;
1848 vtop->c.i = l2;
1849 } else {
1850 general_case:
1851 /* call low level op generator */
1852 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1853 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1854 gen_opl(op);
1855 else
1856 gen_opi(op);
1861 /* generate a floating point operation with constant propagation */
1862 static void gen_opif(int op)
1864 int c1, c2;
1865 SValue *v1, *v2;
1866 long double f1, f2;
1868 v1 = vtop - 1;
1869 v2 = vtop;
1870 /* currently, we cannot do computations with forward symbols */
1871 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1872 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1873 if (c1 && c2) {
1874 if (v1->type.t == VT_FLOAT) {
1875 f1 = v1->c.f;
1876 f2 = v2->c.f;
1877 } else if (v1->type.t == VT_DOUBLE) {
1878 f1 = v1->c.d;
1879 f2 = v2->c.d;
1880 } else {
1881 f1 = v1->c.ld;
1882 f2 = v2->c.ld;
1885 /* NOTE: we only do constant propagation if finite number (not
1886 NaN or infinity) (ANSI spec) */
1887 if (!ieee_finite(f1) || !ieee_finite(f2))
1888 goto general_case;
1890 switch(op) {
1891 case '+': f1 += f2; break;
1892 case '-': f1 -= f2; break;
1893 case '*': f1 *= f2; break;
1894 case '/':
1895 if (f2 == 0.0) {
1896 if (const_wanted)
1897 tcc_error("division by zero in constant");
1898 goto general_case;
1900 f1 /= f2;
1901 break;
1902 /* XXX: also handles tests ? */
1903 default:
1904 goto general_case;
1906 /* XXX: overflow test ? */
1907 if (v1->type.t == VT_FLOAT) {
1908 v1->c.f = f1;
1909 } else if (v1->type.t == VT_DOUBLE) {
1910 v1->c.d = f1;
1911 } else {
1912 v1->c.ld = f1;
1914 vtop--;
1915 } else {
1916 general_case:
1917 gen_opf(op);
1921 static int pointed_size(CType *type)
1923 int align;
1924 return type_size(pointed_type(type), &align);
1927 static void vla_runtime_pointed_size(CType *type)
1929 int align;
1930 vla_runtime_type_size(pointed_type(type), &align);
1933 static inline int is_null_pointer(SValue *p)
1935 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1936 return 0;
1937 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1938 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1939 ((p->type.t & VT_BTYPE) == VT_PTR &&
1940 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1943 static inline int is_integer_btype(int bt)
1945 return (bt == VT_BYTE || bt == VT_SHORT ||
1946 bt == VT_INT || bt == VT_LLONG);
1949 /* check types for comparison or subtraction of pointers */
1950 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1952 CType *type1, *type2, tmp_type1, tmp_type2;
1953 int bt1, bt2;
1955 /* null pointers are accepted for all comparisons as gcc */
1956 if (is_null_pointer(p1) || is_null_pointer(p2))
1957 return;
1958 type1 = &p1->type;
1959 type2 = &p2->type;
1960 bt1 = type1->t & VT_BTYPE;
1961 bt2 = type2->t & VT_BTYPE;
1962 /* accept comparison between pointer and integer with a warning */
1963 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1964 if (op != TOK_LOR && op != TOK_LAND )
1965 tcc_warning("comparison between pointer and integer");
1966 return;
1969 /* both must be pointers or implicit function pointers */
1970 if (bt1 == VT_PTR) {
1971 type1 = pointed_type(type1);
1972 } else if (bt1 != VT_FUNC)
1973 goto invalid_operands;
1975 if (bt2 == VT_PTR) {
1976 type2 = pointed_type(type2);
1977 } else if (bt2 != VT_FUNC) {
1978 invalid_operands:
1979 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1981 if ((type1->t & VT_BTYPE) == VT_VOID ||
1982 (type2->t & VT_BTYPE) == VT_VOID)
1983 return;
1984 tmp_type1 = *type1;
1985 tmp_type2 = *type2;
1986 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1987 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1988 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1989 /* gcc-like error if '-' is used */
1990 if (op == '-')
1991 goto invalid_operands;
1992 else
1993 tcc_warning("comparison of distinct pointer types lacks a cast");
1997 /* generic gen_op: handles types problems */
1998 ST_FUNC void gen_op(int op)
2000 int u, t1, t2, bt1, bt2, t;
2001 CType type1;
2003 redo:
2004 t1 = vtop[-1].type.t;
2005 t2 = vtop[0].type.t;
2006 bt1 = t1 & VT_BTYPE;
2007 bt2 = t2 & VT_BTYPE;
2009 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2010 tcc_error("operation on a struct");
2011 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2012 if (bt2 == VT_FUNC) {
2013 mk_pointer(&vtop->type);
2014 gaddrof();
2016 if (bt1 == VT_FUNC) {
2017 vswap();
2018 mk_pointer(&vtop->type);
2019 gaddrof();
2020 vswap();
2022 goto redo;
2023 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2024 /* at least one operand is a pointer */
2025 /* relationnal op: must be both pointers */
2026 if (op >= TOK_ULT && op <= TOK_LOR) {
2027 check_comparison_pointer_types(vtop - 1, vtop, op);
2028 /* pointers are handled are unsigned */
2029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2030 t = VT_LLONG | VT_UNSIGNED;
2031 #else
2032 t = VT_INT | VT_UNSIGNED;
2033 #endif
2034 goto std_op;
2036 /* if both pointers, then it must be the '-' op */
2037 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2038 if (op != '-')
2039 tcc_error("cannot use pointers here");
2040 check_comparison_pointer_types(vtop - 1, vtop, op);
2041 /* XXX: check that types are compatible */
2042 if (vtop[-1].type.t & VT_VLA) {
2043 vla_runtime_pointed_size(&vtop[-1].type);
2044 } else {
2045 vpushi(pointed_size(&vtop[-1].type));
2047 vrott(3);
2048 gen_opic(op);
2049 /* set to integer type */
2050 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2051 vtop->type.t = VT_LLONG;
2052 #else
2053 vtop->type.t = VT_INT;
2054 #endif
2055 vswap();
2056 gen_op(TOK_PDIV);
2057 } else {
2058 /* exactly one pointer : must be '+' or '-'. */
2059 if (op != '-' && op != '+')
2060 tcc_error("cannot use pointers here");
2061 /* Put pointer as first operand */
2062 if (bt2 == VT_PTR) {
2063 vswap();
2064 t = t1, t1 = t2, t2 = t;
2066 #if PTR_SIZE == 4
2067 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2068 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2069 gen_cast(&int_type);
2070 #endif
2071 type1 = vtop[-1].type;
2072 type1.t &= ~VT_ARRAY;
2073 if (vtop[-1].type.t & VT_VLA)
2074 vla_runtime_pointed_size(&vtop[-1].type);
2075 else {
2076 u = pointed_size(&vtop[-1].type);
2077 if (u < 0)
2078 tcc_error("unknown array element size");
2079 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2080 vpushll(u);
2081 #else
2082 /* XXX: cast to int ? (long long case) */
2083 vpushi(u);
2084 #endif
2086 gen_op('*');
2087 #if 0
2088 /* #ifdef CONFIG_TCC_BCHECK
2089 The main reason to removing this code:
2090 #include <stdio.h>
2091 int main ()
2093 int v[10];
2094 int i = 10;
2095 int j = 9;
2096 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2097 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2099 When this code is on. then the output looks like
2100 v+i-j = 0xfffffffe
2101 v+(i-j) = 0xbff84000
2103 /* if evaluating constant expression, no code should be
2104 generated, so no bound check */
2105 if (tcc_state->do_bounds_check && !const_wanted) {
2106 /* if bounded pointers, we generate a special code to
2107 test bounds */
2108 if (op == '-') {
2109 vpushi(0);
2110 vswap();
2111 gen_op('-');
2113 gen_bounded_ptr_add();
2114 } else
2115 #endif
2117 gen_opic(op);
2119 /* put again type if gen_opic() swaped operands */
2120 vtop->type = type1;
2122 } else if (is_float(bt1) || is_float(bt2)) {
2123 /* compute bigger type and do implicit casts */
2124 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2125 t = VT_LDOUBLE;
2126 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2127 t = VT_DOUBLE;
2128 } else {
2129 t = VT_FLOAT;
2131 /* floats can only be used for a few operations */
2132 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2133 (op < TOK_ULT || op > TOK_GT))
2134 tcc_error("invalid operands for binary operation");
2135 goto std_op;
2136 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2137 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2138 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2139 t |= VT_UNSIGNED;
2140 goto std_op;
2141 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2142 /* cast to biggest op */
2143 t = VT_LLONG;
2144 /* convert to unsigned if it does not fit in a long long */
2145 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2146 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2147 t |= VT_UNSIGNED;
2148 goto std_op;
2149 } else {
2150 /* integer operations */
2151 t = VT_INT;
2152 /* convert to unsigned if it does not fit in an integer */
2153 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2154 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2155 t |= VT_UNSIGNED;
2156 std_op:
2157 /* XXX: currently, some unsigned operations are explicit, so
2158 we modify them here */
2159 if (t & VT_UNSIGNED) {
2160 if (op == TOK_SAR)
2161 op = TOK_SHR;
2162 else if (op == '/')
2163 op = TOK_UDIV;
2164 else if (op == '%')
2165 op = TOK_UMOD;
2166 else if (op == TOK_LT)
2167 op = TOK_ULT;
2168 else if (op == TOK_GT)
2169 op = TOK_UGT;
2170 else if (op == TOK_LE)
2171 op = TOK_ULE;
2172 else if (op == TOK_GE)
2173 op = TOK_UGE;
2175 vswap();
2176 type1.t = t;
2177 gen_cast(&type1);
2178 vswap();
2179 /* special case for shifts and long long: we keep the shift as
2180 an integer */
2181 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2182 type1.t = VT_INT;
2183 gen_cast(&type1);
2184 if (is_float(t))
2185 gen_opif(op);
2186 else
2187 gen_opic(op);
2188 if (op >= TOK_ULT && op <= TOK_GT) {
2189 /* relationnal op: the result is an int */
2190 vtop->type.t = VT_INT;
2191 } else {
2192 vtop->type.t = t;
2195 // Make sure that we have converted to an rvalue:
2196 if (vtop->r & VT_LVAL)
2197 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2200 #ifndef TCC_TARGET_ARM
2201 /* generic itof for unsigned long long case */
2202 static void gen_cvt_itof1(int t)
2204 #ifdef TCC_TARGET_ARM64
2205 gen_cvt_itof(t);
2206 #else
2207 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2208 (VT_LLONG | VT_UNSIGNED)) {
2210 if (t == VT_FLOAT)
2211 vpush_global_sym(&func_old_type, TOK___floatundisf);
2212 #if LDOUBLE_SIZE != 8
2213 else if (t == VT_LDOUBLE)
2214 vpush_global_sym(&func_old_type, TOK___floatundixf);
2215 #endif
2216 else
2217 vpush_global_sym(&func_old_type, TOK___floatundidf);
2218 vrott(2);
2219 gfunc_call(1);
2220 vpushi(0);
2221 vtop->r = reg_fret(t);
2222 } else {
2223 gen_cvt_itof(t);
2225 #endif
2227 #endif
2229 /* generic ftoi for unsigned long long case */
2230 static void gen_cvt_ftoi1(int t)
2232 #ifdef TCC_TARGET_ARM64
2233 gen_cvt_ftoi(t);
2234 #else
2235 int st;
2237 if (t == (VT_LLONG | VT_UNSIGNED)) {
2238 /* not handled natively */
2239 st = vtop->type.t & VT_BTYPE;
2240 if (st == VT_FLOAT)
2241 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2242 #if LDOUBLE_SIZE != 8
2243 else if (st == VT_LDOUBLE)
2244 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2245 #endif
2246 else
2247 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2248 vrott(2);
2249 gfunc_call(1);
2250 vpushi(0);
2251 vtop->r = REG_IRET;
2252 vtop->r2 = REG_LRET;
2253 } else {
2254 gen_cvt_ftoi(t);
2256 #endif
2259 /* force char or short cast */
2260 static void force_charshort_cast(int t)
2262 int bits, dbt;
2263 dbt = t & VT_BTYPE;
2264 /* XXX: add optimization if lvalue : just change type and offset */
2265 if (dbt == VT_BYTE)
2266 bits = 8;
2267 else
2268 bits = 16;
2269 if (t & VT_UNSIGNED) {
2270 vpushi((1 << bits) - 1);
2271 gen_op('&');
2272 } else {
2273 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2274 bits = 64 - bits;
2275 else
2276 bits = 32 - bits;
2277 vpushi(bits);
2278 gen_op(TOK_SHL);
2279 /* result must be signed or the SAR is converted to an SHL
2280 This was not the case when "t" was a signed short
2281 and the last value on the stack was an unsigned int */
2282 vtop->type.t &= ~VT_UNSIGNED;
2283 vpushi(bits);
2284 gen_op(TOK_SAR);
2288 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2289 static void gen_cast(CType *type)
2291 int sbt, dbt, sf, df, c, p;
2293 /* special delayed cast for char/short */
2294 /* XXX: in some cases (multiple cascaded casts), it may still
2295 be incorrect */
2296 if (vtop->r & VT_MUSTCAST) {
2297 vtop->r &= ~VT_MUSTCAST;
2298 force_charshort_cast(vtop->type.t);
2301 /* bitfields first get cast to ints */
2302 if (vtop->type.t & VT_BITFIELD) {
2303 gv(RC_INT);
2306 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2307 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2309 if (sbt != dbt) {
2310 sf = is_float(sbt);
2311 df = is_float(dbt);
2312 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2313 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2314 if (c) {
2315 /* constant case: we can do it now */
2316 /* XXX: in ISOC, cannot do it if error in convert */
2317 if (sbt == VT_FLOAT)
2318 vtop->c.ld = vtop->c.f;
2319 else if (sbt == VT_DOUBLE)
2320 vtop->c.ld = vtop->c.d;
2322 if (df) {
2323 if ((sbt & VT_BTYPE) == VT_LLONG) {
2324 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2325 vtop->c.ld = vtop->c.i;
2326 else
2327 vtop->c.ld = -(long double)-vtop->c.i;
2328 } else if(!sf) {
2329 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2330 vtop->c.ld = (uint32_t)vtop->c.i;
2331 else
2332 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2335 if (dbt == VT_FLOAT)
2336 vtop->c.f = (float)vtop->c.ld;
2337 else if (dbt == VT_DOUBLE)
2338 vtop->c.d = (double)vtop->c.ld;
2339 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2340 vtop->c.i = vtop->c.ld;
2341 } else if (sf && dbt == VT_BOOL) {
2342 vtop->c.i = (vtop->c.ld != 0);
2343 } else {
2344 if(sf)
2345 vtop->c.i = vtop->c.ld;
2346 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2348 else if (sbt & VT_UNSIGNED)
2349 vtop->c.i = (uint32_t)vtop->c.i;
2350 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2351 else if (sbt == VT_PTR)
2353 #endif
2354 else if (sbt != VT_LLONG)
2355 vtop->c.i = ((uint32_t)vtop->c.i |
2356 -(vtop->c.i & 0x80000000));
2358 if (dbt == (VT_LLONG|VT_UNSIGNED))
2360 else if (dbt == VT_BOOL)
2361 vtop->c.i = (vtop->c.i != 0);
2362 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2363 else if (dbt == VT_PTR)
2365 #endif
2366 else if (dbt != VT_LLONG) {
2367 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2368 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2369 0xffffffff);
2370 vtop->c.i &= m;
2371 if (!(dbt & VT_UNSIGNED))
2372 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2375 } else if (p && dbt == VT_BOOL) {
2376 vtop->r = VT_CONST;
2377 vtop->c.i = 1;
2378 } else {
2379 /* non constant case: generate code */
2380 if (sf && df) {
2381 /* convert from fp to fp */
2382 gen_cvt_ftof(dbt);
2383 } else if (df) {
2384 /* convert int to fp */
2385 gen_cvt_itof1(dbt);
2386 } else if (sf) {
2387 /* convert fp to int */
2388 if (dbt == VT_BOOL) {
2389 vpushi(0);
2390 gen_op(TOK_NE);
2391 } else {
2392 if (sbt == VT_FLOAT) {
2393 /* cast to DOUBLE to avoid precision loss */
2394 gen_cvt_ftof(VT_DOUBLE);
2395 vtop->type.t = (vtop->type.t & ~VT_BTYPE) | VT_DOUBLE;
2397 /* we handle char/short/etc... with generic code */
2398 if (dbt != (VT_INT | VT_UNSIGNED) &&
2399 dbt != (VT_LLONG | VT_UNSIGNED) &&
2400 dbt != VT_LLONG)
2401 dbt = VT_INT;
2402 gen_cvt_ftoi1(dbt);
2403 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2404 /* additional cast for char/short... */
2405 vtop->type.t = dbt;
2406 gen_cast(type);
2409 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2410 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2411 if ((sbt & VT_BTYPE) != VT_LLONG) {
2412 /* scalar to long long */
2413 /* machine independent conversion */
2414 gv(RC_INT);
2415 /* generate high word */
2416 if (sbt == (VT_INT | VT_UNSIGNED)) {
2417 vpushi(0);
2418 gv(RC_INT);
2419 } else {
2420 if (sbt == VT_PTR) {
2421 /* cast from pointer to int before we apply
2422 shift operation, which pointers don't support*/
2423 gen_cast(&int_type);
2425 gv_dup();
2426 vpushi(31);
2427 gen_op(TOK_SAR);
2429 /* patch second register */
2430 vtop[-1].r2 = vtop->r;
2431 vpop();
2433 #else
2434 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2435 (dbt & VT_BTYPE) == VT_PTR ||
2436 (dbt & VT_BTYPE) == VT_FUNC) {
2437 if ((sbt & VT_BTYPE) != VT_LLONG &&
2438 (sbt & VT_BTYPE) != VT_PTR &&
2439 (sbt & VT_BTYPE) != VT_FUNC) {
2440 /* need to convert from 32bit to 64bit */
2441 gv(RC_INT);
2442 if (sbt != (VT_INT | VT_UNSIGNED)) {
2443 #if defined(TCC_TARGET_ARM64)
2444 gen_cvt_sxtw();
2445 #elif defined(TCC_TARGET_X86_64)
2446 int r = gv(RC_INT);
2447 /* x86_64 specific: movslq */
2448 o(0x6348);
2449 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2450 #else
2451 #error
2452 #endif
2455 #endif
2456 } else if (dbt == VT_BOOL) {
2457 /* scalar to bool */
2458 vpushi(0);
2459 gen_op(TOK_NE);
2460 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2461 (dbt & VT_BTYPE) == VT_SHORT) {
2462 if (sbt == VT_PTR) {
2463 vtop->type.t = VT_INT;
2464 tcc_warning("nonportable conversion from pointer to char/short");
2466 force_charshort_cast(dbt);
2467 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2468 } else if ((dbt & VT_BTYPE) == VT_INT) {
2469 /* scalar to int */
2470 if ((sbt & VT_BTYPE) == VT_LLONG) {
2471 /* from long long: just take low order word */
2472 lexpand();
2473 vpop();
2475 /* if lvalue and single word type, nothing to do because
2476 the lvalue already contains the real type size (see
2477 VT_LVAL_xxx constants) */
2478 #endif
2481 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2482 /* if we are casting between pointer types,
2483 we must update the VT_LVAL_xxx size */
2484 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2485 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2487 vtop->type = *type;
2490 /* return type size as known at compile time. Put alignment at 'a' */
2491 ST_FUNC int type_size(CType *type, int *a)
2493 Sym *s;
2494 int bt;
2496 bt = type->t & VT_BTYPE;
2497 if (bt == VT_STRUCT) {
2498 /* struct/union */
2499 s = type->ref;
2500 *a = s->r;
2501 return s->c;
2502 } else if (bt == VT_PTR) {
2503 if (type->t & VT_ARRAY) {
2504 int ts;
2506 s = type->ref;
2507 ts = type_size(&s->type, a);
2509 if (ts < 0 && s->c < 0)
2510 ts = -ts;
2512 return ts * s->c;
2513 } else {
2514 *a = PTR_SIZE;
2515 return PTR_SIZE;
2517 } else if (bt == VT_LDOUBLE) {
2518 *a = LDOUBLE_ALIGN;
2519 return LDOUBLE_SIZE;
2520 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2521 #ifdef TCC_TARGET_I386
2522 #ifdef TCC_TARGET_PE
2523 *a = 8;
2524 #else
2525 *a = 4;
2526 #endif
2527 #elif defined(TCC_TARGET_ARM)
2528 #ifdef TCC_ARM_EABI
2529 *a = 8;
2530 #else
2531 *a = 4;
2532 #endif
2533 #else
2534 *a = 8;
2535 #endif
2536 return 8;
2537 } else if (bt == VT_INT || bt == VT_FLOAT) {
2538 *a = 4;
2539 return 4;
2540 } else if (bt == VT_SHORT) {
2541 *a = 2;
2542 return 2;
2543 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2544 *a = 8;
2545 return 16;
2546 } else if (bt == VT_ENUM) {
2547 *a = 4;
2548 /* Enums might be incomplete, so don't just return '4' here. */
2549 return type->ref->c;
2550 } else {
2551 /* char, void, function, _Bool */
2552 *a = 1;
2553 return 1;
2557 /* push type size as known at runtime time on top of value stack. Put
2558 alignment at 'a' */
2559 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2561 if (type->t & VT_VLA) {
2562 type_size(&type->ref->type, a);
2563 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2564 } else {
2565 vpushi(type_size(type, a));
2569 static void vla_sp_restore(void) {
2570 if (vlas_in_scope) {
2571 gen_vla_sp_restore(vla_sp_loc);
2575 static void vla_sp_restore_root(void) {
2576 if (vlas_in_scope) {
2577 gen_vla_sp_restore(vla_sp_root_loc);
2581 /* return the pointed type of t */
2582 static inline CType *pointed_type(CType *type)
2584 return &type->ref->type;
2587 /* modify type so that its it is a pointer to type. */
2588 ST_FUNC void mk_pointer(CType *type)
2590 Sym *s;
2591 s = sym_push(SYM_FIELD, type, 0, -1);
2592 type->t = VT_PTR | (type->t & ~VT_TYPE);
2593 type->ref = s;
2596 /* compare function types. OLD functions match any new functions */
2597 static int is_compatible_func(CType *type1, CType *type2)
2599 Sym *s1, *s2;
2601 s1 = type1->ref;
2602 s2 = type2->ref;
2603 if (!is_compatible_types(&s1->type, &s2->type))
2604 return 0;
2605 /* check func_call */
2606 if (s1->a.func_call != s2->a.func_call)
2607 return 0;
2608 /* XXX: not complete */
2609 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2610 return 1;
2611 if (s1->c != s2->c)
2612 return 0;
2613 while (s1 != NULL) {
2614 if (s2 == NULL)
2615 return 0;
2616 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2617 return 0;
2618 s1 = s1->next;
2619 s2 = s2->next;
2621 if (s2)
2622 return 0;
2623 return 1;
2626 /* return true if type1 and type2 are the same. If unqualified is
2627 true, qualifiers on the types are ignored.
2629 - enums are not checked as gcc __builtin_types_compatible_p ()
2631 static int compare_types(CType *type1, CType *type2, int unqualified)
2633 int bt1, t1, t2;
2635 t1 = type1->t & VT_TYPE;
2636 t2 = type2->t & VT_TYPE;
2637 if (unqualified) {
2638 /* strip qualifiers before comparing */
2639 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2640 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2642 /* Default Vs explicit signedness only matters for char */
2643 if ((t1 & VT_BTYPE) != VT_BYTE) {
2644 t1 &= ~VT_DEFSIGN;
2645 t2 &= ~VT_DEFSIGN;
2647 /* An enum is compatible with (unsigned) int. Ideally we would
2648 store the enums signedness in type->ref.a.<some_bit> and
2649 only accept unsigned enums with unsigned int and vice versa.
2650 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2651 from pointer target types, so we can't add it here either. */
2652 if ((t1 & VT_BTYPE) == VT_ENUM) {
2653 t1 = VT_INT;
2654 if (type1->ref->a.unsigned_enum)
2655 t1 |= VT_UNSIGNED;
2657 if ((t2 & VT_BTYPE) == VT_ENUM) {
2658 t2 = VT_INT;
2659 if (type2->ref->a.unsigned_enum)
2660 t2 |= VT_UNSIGNED;
2662 /* XXX: bitfields ? */
2663 if (t1 != t2)
2664 return 0;
2665 /* test more complicated cases */
2666 bt1 = t1 & VT_BTYPE;
2667 if (bt1 == VT_PTR) {
2668 type1 = pointed_type(type1);
2669 type2 = pointed_type(type2);
2670 return is_compatible_types(type1, type2);
2671 } else if (bt1 == VT_STRUCT) {
2672 return (type1->ref == type2->ref);
2673 } else if (bt1 == VT_FUNC) {
2674 return is_compatible_func(type1, type2);
2675 } else {
2676 return 1;
2680 /* return true if type1 and type2 are exactly the same (including
2681 qualifiers).
2683 static int is_compatible_types(CType *type1, CType *type2)
2685 return compare_types(type1,type2,0);
2688 /* return true if type1 and type2 are the same (ignoring qualifiers).
2690 static int is_compatible_parameter_types(CType *type1, CType *type2)
2692 return compare_types(type1,type2,1);
2695 /* print a type. If 'varstr' is not NULL, then the variable is also
2696 printed in the type */
2697 /* XXX: union */
2698 /* XXX: add array and function pointers */
2699 static void type_to_str(char *buf, int buf_size,
2700 CType *type, const char *varstr)
2702 int bt, v, t;
2703 Sym *s, *sa;
2704 char buf1[256];
2705 const char *tstr;
2707 t = type->t & VT_TYPE;
2708 bt = t & VT_BTYPE;
2709 buf[0] = '\0';
2710 if (t & VT_CONSTANT)
2711 pstrcat(buf, buf_size, "const ");
2712 if (t & VT_VOLATILE)
2713 pstrcat(buf, buf_size, "volatile ");
2714 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2715 pstrcat(buf, buf_size, "unsigned ");
2716 else if (t & VT_DEFSIGN)
2717 pstrcat(buf, buf_size, "signed ");
2718 switch(bt) {
2719 case VT_VOID:
2720 tstr = "void";
2721 goto add_tstr;
2722 case VT_BOOL:
2723 tstr = "_Bool";
2724 goto add_tstr;
2725 case VT_BYTE:
2726 tstr = "char";
2727 goto add_tstr;
2728 case VT_SHORT:
2729 tstr = "short";
2730 goto add_tstr;
2731 case VT_INT:
2732 tstr = "int";
2733 goto add_tstr;
2734 case VT_LONG:
2735 tstr = "long";
2736 goto add_tstr;
2737 case VT_LLONG:
2738 tstr = "long long";
2739 goto add_tstr;
2740 case VT_FLOAT:
2741 tstr = "float";
2742 goto add_tstr;
2743 case VT_DOUBLE:
2744 tstr = "double";
2745 goto add_tstr;
2746 case VT_LDOUBLE:
2747 tstr = "long double";
2748 add_tstr:
2749 pstrcat(buf, buf_size, tstr);
2750 break;
2751 case VT_ENUM:
2752 case VT_STRUCT:
2753 if (bt == VT_STRUCT)
2754 tstr = "struct ";
2755 else
2756 tstr = "enum ";
2757 pstrcat(buf, buf_size, tstr);
2758 v = type->ref->v & ~SYM_STRUCT;
2759 if (v >= SYM_FIRST_ANOM)
2760 pstrcat(buf, buf_size, "<anonymous>");
2761 else
2762 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2763 break;
2764 case VT_FUNC:
2765 s = type->ref;
2766 type_to_str(buf, buf_size, &s->type, varstr);
2767 pstrcat(buf, buf_size, "(");
2768 sa = s->next;
2769 while (sa != NULL) {
2770 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2771 pstrcat(buf, buf_size, buf1);
2772 sa = sa->next;
2773 if (sa)
2774 pstrcat(buf, buf_size, ", ");
2776 pstrcat(buf, buf_size, ")");
2777 goto no_var;
2778 case VT_PTR:
2779 s = type->ref;
2780 if (t & VT_ARRAY) {
2781 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2782 type_to_str(buf, buf_size, &s->type, buf1);
2783 goto no_var;
2785 pstrcpy(buf1, sizeof(buf1), "*");
2786 if (t & VT_CONSTANT)
2787 pstrcat(buf1, buf_size, "const ");
2788 if (t & VT_VOLATILE)
2789 pstrcat(buf1, buf_size, "volatile ");
2790 if (varstr)
2791 pstrcat(buf1, sizeof(buf1), varstr);
2792 type_to_str(buf, buf_size, &s->type, buf1);
2793 goto no_var;
2795 if (varstr) {
2796 pstrcat(buf, buf_size, " ");
2797 pstrcat(buf, buf_size, varstr);
2799 no_var: ;
2802 /* verify type compatibility to store vtop in 'dt' type, and generate
2803 casts if needed. */
2804 static void gen_assign_cast(CType *dt)
2806 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2807 char buf1[256], buf2[256];
2808 int dbt, sbt;
2810 st = &vtop->type; /* source type */
2811 dbt = dt->t & VT_BTYPE;
2812 sbt = st->t & VT_BTYPE;
2813 if (sbt == VT_VOID || dbt == VT_VOID) {
2814 if (sbt == VT_VOID && dbt == VT_VOID)
2815 ; /*
2816 It is Ok if both are void
2817 A test program:
2818 void func1() {}
2819 void func2() {
2820 return func1();
2822 gcc accepts this program
2824 else
2825 tcc_error("cannot cast from/to void");
2827 if (dt->t & VT_CONSTANT)
2828 tcc_warning("assignment of read-only location");
2829 switch(dbt) {
2830 case VT_PTR:
2831 /* special cases for pointers */
2832 /* '0' can also be a pointer */
2833 if (is_null_pointer(vtop))
2834 goto type_ok;
2835 /* accept implicit pointer to integer cast with warning */
2836 if (is_integer_btype(sbt)) {
2837 tcc_warning("assignment makes pointer from integer without a cast");
2838 goto type_ok;
2840 type1 = pointed_type(dt);
2841 /* a function is implicitely a function pointer */
2842 if (sbt == VT_FUNC) {
2843 if ((type1->t & VT_BTYPE) != VT_VOID &&
2844 !is_compatible_types(pointed_type(dt), st))
2845 tcc_warning("assignment from incompatible pointer type");
2846 goto type_ok;
2848 if (sbt != VT_PTR)
2849 goto error;
2850 type2 = pointed_type(st);
2851 if ((type1->t & VT_BTYPE) == VT_VOID ||
2852 (type2->t & VT_BTYPE) == VT_VOID) {
2853 /* void * can match anything */
2854 } else {
2855 /* exact type match, except for qualifiers */
2856 tmp_type1 = *type1;
2857 tmp_type2 = *type2;
2858 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2859 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2860 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2861 /* Like GCC don't warn by default for merely changes
2862 in pointer target signedness. Do warn for different
2863 base types, though, in particular for unsigned enums
2864 and signed int targets. */
2865 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2866 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2867 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2869 else
2870 tcc_warning("assignment from incompatible pointer type");
2873 /* check const and volatile */
2874 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2875 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2876 tcc_warning("assignment discards qualifiers from pointer target type");
2877 break;
2878 case VT_BYTE:
2879 case VT_SHORT:
2880 case VT_INT:
2881 case VT_LLONG:
2882 if (sbt == VT_PTR || sbt == VT_FUNC) {
2883 tcc_warning("assignment makes integer from pointer without a cast");
2884 } else if (sbt == VT_STRUCT) {
2885 goto case_VT_STRUCT;
2887 /* XXX: more tests */
2888 break;
2889 case VT_STRUCT:
2890 case_VT_STRUCT:
2891 tmp_type1 = *dt;
2892 tmp_type2 = *st;
2893 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2894 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2895 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2896 error:
2897 type_to_str(buf1, sizeof(buf1), st, NULL);
2898 type_to_str(buf2, sizeof(buf2), dt, NULL);
2899 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2901 break;
2903 type_ok:
2904 gen_cast(dt);
2907 /* store vtop in lvalue pushed on stack */
2908 ST_FUNC void vstore(void)
2910 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2912 ft = vtop[-1].type.t;
2913 sbt = vtop->type.t & VT_BTYPE;
2914 dbt = ft & VT_BTYPE;
2915 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2916 (sbt == VT_INT && dbt == VT_SHORT))
2917 && !(vtop->type.t & VT_BITFIELD)) {
2918 /* optimize char/short casts */
2919 delayed_cast = VT_MUSTCAST;
2920 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2921 ((1 << VT_STRUCT_SHIFT) - 1));
2922 /* XXX: factorize */
2923 if (ft & VT_CONSTANT)
2924 tcc_warning("assignment of read-only location");
2925 } else {
2926 delayed_cast = 0;
2927 if (!(ft & VT_BITFIELD))
2928 gen_assign_cast(&vtop[-1].type);
2931 if (sbt == VT_STRUCT) {
2932 /* if structure, only generate pointer */
2933 /* structure assignment : generate memcpy */
2934 /* XXX: optimize if small size */
2935 size = type_size(&vtop->type, &align);
2937 /* destination */
2938 vswap();
2939 vtop->type.t = VT_PTR;
2940 gaddrof();
2942 /* address of memcpy() */
2943 #ifdef TCC_ARM_EABI
2944 if(!(align & 7))
2945 vpush_global_sym(&func_old_type, TOK_memcpy8);
2946 else if(!(align & 3))
2947 vpush_global_sym(&func_old_type, TOK_memcpy4);
2948 else
2949 #endif
2950 /* Use memmove, rather than memcpy, as dest and src may be same: */
2951 vpush_global_sym(&func_old_type, TOK_memmove);
2953 vswap();
2954 /* source */
2955 vpushv(vtop - 2);
2956 vtop->type.t = VT_PTR;
2957 gaddrof();
2958 /* type size */
2959 vpushi(size);
2960 gfunc_call(3);
2962 /* leave source on stack */
2963 } else if (ft & VT_BITFIELD) {
2964 /* bitfield store handling */
2966 /* save lvalue as expression result (example: s.b = s.a = n;) */
2967 vdup(), vtop[-1] = vtop[-2];
2969 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2970 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2971 /* remove bit field info to avoid loops */
2972 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2974 if((ft & VT_BTYPE) == VT_BOOL) {
2975 gen_cast(&vtop[-1].type);
2976 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2979 /* duplicate destination */
2980 vdup();
2981 vtop[-1] = vtop[-2];
2983 /* mask and shift source */
2984 if((ft & VT_BTYPE) != VT_BOOL) {
2985 if((ft & VT_BTYPE) == VT_LLONG) {
2986 vpushll((1ULL << bit_size) - 1ULL);
2987 } else {
2988 vpushi((1 << bit_size) - 1);
2990 gen_op('&');
2992 vpushi(bit_pos);
2993 gen_op(TOK_SHL);
2994 /* load destination, mask and or with source */
2995 vswap();
2996 if((ft & VT_BTYPE) == VT_LLONG) {
2997 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2998 } else {
2999 vpushi(~(((1 << bit_size) - 1) << bit_pos));
3001 gen_op('&');
3002 gen_op('|');
3003 /* store result */
3004 vstore();
3005 /* ... and discard */
3006 vpop();
3008 } else {
3009 #ifdef CONFIG_TCC_BCHECK
3010 /* bound check case */
3011 if (vtop[-1].r & VT_MUSTBOUND) {
3012 vswap();
3013 gbound();
3014 vswap();
3016 #endif
3017 rc = RC_INT;
3018 if (is_float(ft)) {
3019 rc = RC_FLOAT;
3020 #ifdef TCC_TARGET_X86_64
3021 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3022 rc = RC_ST0;
3023 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3024 rc = RC_FRET;
3026 #endif
3028 r = gv(rc); /* generate value */
3029 /* if lvalue was saved on stack, must read it */
3030 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3031 SValue sv;
3032 t = get_reg(RC_INT);
3033 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3034 sv.type.t = VT_PTR;
3035 #else
3036 sv.type.t = VT_INT;
3037 #endif
3038 sv.r = VT_LOCAL | VT_LVAL;
3039 sv.c.i = vtop[-1].c.i;
3040 load(t, &sv);
3041 vtop[-1].r = t | VT_LVAL;
3043 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3044 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3045 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3046 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3047 #else
3048 if ((ft & VT_BTYPE) == VT_LLONG) {
3049 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3050 #endif
3051 vtop[-1].type.t = load_type;
3052 store(r, vtop - 1);
3053 vswap();
3054 /* convert to int to increment easily */
3055 vtop->type.t = addr_type;
3056 gaddrof();
3057 vpushi(load_size);
3058 gen_op('+');
3059 vtop->r |= VT_LVAL;
3060 vswap();
3061 vtop[-1].type.t = load_type;
3062 /* XXX: it works because r2 is spilled last ! */
3063 store(vtop->r2, vtop - 1);
3064 } else {
3065 store(r, vtop - 1);
3068 vswap();
3069 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3070 vtop->r |= delayed_cast;
3074 /* post defines POST/PRE add. c is the token ++ or -- */
3075 ST_FUNC void inc(int post, int c)
3077 test_lvalue();
3078 vdup(); /* save lvalue */
3079 if (post) {
3080 gv_dup(); /* duplicate value */
3081 vrotb(3);
3082 vrotb(3);
3084 /* add constant */
3085 vpushi(c - TOK_MID);
3086 gen_op('+');
3087 vstore(); /* store value */
3088 if (post)
3089 vpop(); /* if post op, return saved value */
3092 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3094 /* read the string */
3095 if (tok != TOK_STR)
3096 expect(msg);
3097 cstr_new(astr);
3098 while (tok == TOK_STR) {
3099 /* XXX: add \0 handling too ? */
3100 cstr_cat(astr, tokc.str.data, -1);
3101 next();
3103 cstr_ccat(astr, '\0');
3106 /* If I is >= 1 and a power of two, returns log2(i)+1.
3107 If I is 0 returns 0. */
3108 static int exact_log2p1(int i)
3110 int ret;
3111 if (!i)
3112 return 0;
3113 for (ret = 1; i >= 1 << 8; ret += 8)
3114 i >>= 8;
3115 if (i >= 1 << 4)
3116 ret += 4, i >>= 4;
3117 if (i >= 1 << 2)
3118 ret += 2, i >>= 2;
3119 if (i >= 1 << 1)
3120 ret++;
3121 return ret;
3124 /* Parse GNUC __attribute__ extension. Currently, the following
3125 extensions are recognized:
3126 - aligned(n) : set data/function alignment.
3127 - packed : force data alignment to 1
3128 - section(x) : generate data/code in this section.
3129 - unused : currently ignored, but may be used someday.
3130 - regparm(n) : pass function parameters in registers (i386 only)
3132 static void parse_attribute(AttributeDef *ad)
3134 int t, n;
3135 CString astr;
3137 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3138 next();
3139 skip('(');
3140 skip('(');
3141 while (tok != ')') {
3142 if (tok < TOK_IDENT)
3143 expect("attribute name");
3144 t = tok;
3145 next();
3146 switch(t) {
3147 case TOK_SECTION1:
3148 case TOK_SECTION2:
3149 skip('(');
3150 parse_mult_str(&astr, "section name");
3151 ad->section = find_section(tcc_state, (char *)astr.data);
3152 skip(')');
3153 cstr_free(&astr);
3154 break;
3155 case TOK_ALIAS1:
3156 case TOK_ALIAS2:
3157 skip('(');
3158 parse_mult_str(&astr, "alias(\"target\")");
3159 ad->alias_target = /* save string as token, for later */
3160 tok_alloc((char*)astr.data, astr.size-1)->tok;
3161 skip(')');
3162 cstr_free(&astr);
3163 break;
3164 case TOK_VISIBILITY1:
3165 case TOK_VISIBILITY2:
3166 skip('(');
3167 parse_mult_str(&astr,
3168 "visibility(\"default|hidden|internal|protected\")");
3169 if (!strcmp (astr.data, "default"))
3170 ad->a.visibility = STV_DEFAULT;
3171 else if (!strcmp (astr.data, "hidden"))
3172 ad->a.visibility = STV_HIDDEN;
3173 else if (!strcmp (astr.data, "internal"))
3174 ad->a.visibility = STV_INTERNAL;
3175 else if (!strcmp (astr.data, "protected"))
3176 ad->a.visibility = STV_PROTECTED;
3177 else
3178 expect("visibility(\"default|hidden|internal|protected\")");
3179 skip(')');
3180 cstr_free(&astr);
3181 break;
3182 case TOK_ALIGNED1:
3183 case TOK_ALIGNED2:
3184 if (tok == '(') {
3185 next();
3186 n = expr_const();
3187 if (n <= 0 || (n & (n - 1)) != 0)
3188 tcc_error("alignment must be a positive power of two");
3189 skip(')');
3190 } else {
3191 n = MAX_ALIGN;
3193 ad->a.aligned = exact_log2p1(n);
3194 if (n != 1 << (ad->a.aligned - 1))
3195 tcc_error("alignment of %d is larger than implemented", n);
3196 break;
3197 case TOK_PACKED1:
3198 case TOK_PACKED2:
3199 ad->a.packed = 1;
3200 break;
3201 case TOK_WEAK1:
3202 case TOK_WEAK2:
3203 ad->a.weak = 1;
3204 break;
3205 case TOK_UNUSED1:
3206 case TOK_UNUSED2:
3207 /* currently, no need to handle it because tcc does not
3208 track unused objects */
3209 break;
3210 case TOK_NORETURN1:
3211 case TOK_NORETURN2:
3212 /* currently, no need to handle it because tcc does not
3213 track unused objects */
3214 break;
3215 case TOK_CDECL1:
3216 case TOK_CDECL2:
3217 case TOK_CDECL3:
3218 ad->a.func_call = FUNC_CDECL;
3219 break;
3220 case TOK_STDCALL1:
3221 case TOK_STDCALL2:
3222 case TOK_STDCALL3:
3223 ad->a.func_call = FUNC_STDCALL;
3224 break;
3225 #ifdef TCC_TARGET_I386
3226 case TOK_REGPARM1:
3227 case TOK_REGPARM2:
3228 skip('(');
3229 n = expr_const();
3230 if (n > 3)
3231 n = 3;
3232 else if (n < 0)
3233 n = 0;
3234 if (n > 0)
3235 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3236 skip(')');
3237 break;
3238 case TOK_FASTCALL1:
3239 case TOK_FASTCALL2:
3240 case TOK_FASTCALL3:
3241 ad->a.func_call = FUNC_FASTCALLW;
3242 break;
3243 #endif
3244 case TOK_MODE:
3245 skip('(');
3246 switch(tok) {
3247 case TOK_MODE_DI:
3248 ad->a.mode = VT_LLONG + 1;
3249 break;
3250 case TOK_MODE_QI:
3251 ad->a.mode = VT_BYTE + 1;
3252 break;
3253 case TOK_MODE_HI:
3254 ad->a.mode = VT_SHORT + 1;
3255 break;
3256 case TOK_MODE_SI:
3257 case TOK_MODE_word:
3258 ad->a.mode = VT_INT + 1;
3259 break;
3260 default:
3261 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3262 break;
3264 next();
3265 skip(')');
3266 break;
3267 case TOK_DLLEXPORT:
3268 ad->a.func_export = 1;
3269 break;
3270 case TOK_DLLIMPORT:
3271 ad->a.func_import = 1;
3272 break;
3273 default:
3274 if (tcc_state->warn_unsupported)
3275 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3276 /* skip parameters */
3277 if (tok == '(') {
3278 int parenthesis = 0;
3279 do {
3280 if (tok == '(')
3281 parenthesis++;
3282 else if (tok == ')')
3283 parenthesis--;
3284 next();
3285 } while (parenthesis && tok != -1);
3287 break;
3289 if (tok != ',')
3290 break;
3291 next();
3293 skip(')');
3294 skip(')');
3298 static Sym * find_field (CType *type, int v)
3300 Sym *s = type->ref;
3301 v |= SYM_FIELD;
3302 while ((s = s->next) != NULL) {
3303 if ((s->v & SYM_FIELD) &&
3304 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3305 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3306 Sym *ret = find_field (&s->type, v);
3307 if (ret)
3308 return ret;
3310 if (s->v == v)
3311 break;
3313 return s;
3316 static void struct_add_offset (Sym *s, int offset)
3318 while ((s = s->next) != NULL) {
3319 if ((s->v & SYM_FIELD) &&
3320 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3321 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3322 struct_add_offset(s->type.ref, offset);
3323 } else
3324 s->c += offset;
3328 static void struct_layout(CType *type, AttributeDef *ad)
3330 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3331 int pcc = !tcc_state->ms_bitfields;
3332 Sym *f;
3333 if (ad->a.aligned)
3334 maxalign = 1 << (ad->a.aligned - 1);
3335 else
3336 maxalign = 1;
3337 offset = 0;
3338 c = 0;
3339 bit_pos = 0;
3340 prevbt = VT_STRUCT; /* make it never match */
3341 prev_bit_size = 0;
3342 for (f = type->ref->next; f; f = f->next) {
3343 int typealign, bit_size;
3344 int size = type_size(&f->type, &typealign);
3345 if (f->type.t & VT_BITFIELD)
3346 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3347 else
3348 bit_size = -1;
3349 if (bit_size == 0 && pcc) {
3350 /* Zero-width bit-fields in PCC mode aren't affected
3351 by any packing (attribute or pragma). */
3352 align = typealign;
3353 } else if (f->r > 1) {
3354 align = f->r;
3355 } else if (ad->a.packed || f->r == 1) {
3356 align = 1;
3357 /* Packed fields or packed records don't let the base type
3358 influence the records type alignment. */
3359 typealign = 1;
3360 } else {
3361 align = typealign;
3363 if (type->ref->type.t != TOK_STRUCT) {
3364 if (pcc && bit_size >= 0)
3365 size = (bit_size + 7) >> 3;
3366 /* Bit position is already zero from our caller. */
3367 offset = 0;
3368 if (size > c)
3369 c = size;
3370 } else if (bit_size < 0) {
3371 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3372 prevbt = VT_STRUCT;
3373 prev_bit_size = 0;
3374 c = (c + addbytes + align - 1) & -align;
3375 offset = c;
3376 if (size > 0)
3377 c += size;
3378 bit_pos = 0;
3379 } else {
3380 /* A bit-field. Layout is more complicated. There are two
3381 options TCC implements: PCC compatible and MS compatible
3382 (PCC compatible is what GCC uses for almost all targets).
3383 In PCC layout the overall size of the struct (in c) is
3384 _excluding_ the current run of bit-fields (that is,
3385 there's at least additional bit_pos bits after c). In
3386 MS layout c does include the current run of bit-fields.
3388 This matters for calculating the natural alignment buckets
3389 in PCC mode. */
3391 /* 'align' will be used to influence records alignment,
3392 so it's the max of specified and type alignment, except
3393 in certain cases that depend on the mode. */
3394 if (align < typealign)
3395 align = typealign;
3396 if (pcc) {
3397 /* In PCC layout a non-packed bit-field is placed adjacent
3398 to the preceding bit-fields, except if it would overflow
3399 its container (depending on base type) or it's a zero-width
3400 bit-field. Packed non-zero-width bit-fields always are
3401 placed adjacent. */
3402 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3403 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3404 if (bit_size == 0 ||
3405 (typealign != 1 &&
3406 (ofs2 / (typealign * 8)) > (size/typealign))) {
3407 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3408 bit_pos = 0;
3410 offset = c;
3411 /* In PCC layout named bit-fields influence the alignment
3412 of the containing struct using the base types alignment,
3413 except for packed fields (which here have correct
3414 align/typealign). */
3415 if ((f->v & SYM_FIRST_ANOM))
3416 align = 1;
3417 } else {
3418 bt = f->type.t & VT_BTYPE;
3419 if ((bit_pos + bit_size > size * 8) ||
3420 (bit_size > 0) == (bt != prevbt)) {
3421 c = (c + typealign - 1) & -typealign;
3422 offset = c;
3423 bit_pos = 0;
3424 /* In MS bitfield mode a bit-field run always uses
3425 at least as many bits as the underlying type.
3426 To start a new run it's also required that this
3427 or the last bit-field had non-zero width. */
3428 if (bit_size || prev_bit_size)
3429 c += size;
3431 /* In MS layout the records alignment is normally
3432 influenced by the field, except for a zero-width
3433 field at the start of a run (but by further zero-width
3434 fields it is again). */
3435 if (bit_size == 0 && prevbt != bt)
3436 align = 1;
3437 prevbt = bt;
3438 prev_bit_size = bit_size;
3440 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3441 | (bit_pos << VT_STRUCT_SHIFT);
3442 bit_pos += bit_size;
3443 if (pcc && bit_pos >= size * 8) {
3444 c += size;
3445 bit_pos -= size * 8;
3448 if (align > maxalign)
3449 maxalign = align;
3450 #if 0
3451 printf("set field %s offset=%d c=%d",
3452 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3453 if (f->type.t & VT_BITFIELD) {
3454 printf(" pos=%d size=%d",
3455 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3456 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3458 printf("\n");
3459 #endif
3461 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3462 Sym *ass;
3463 /* An anonymous struct/union. Adjust member offsets
3464 to reflect the real offset of our containing struct.
3465 Also set the offset of this anon member inside
3466 the outer struct to be zero. Via this it
3467 works when accessing the field offset directly
3468 (from base object), as well as when recursing
3469 members in initializer handling. */
3470 int v2 = f->type.ref->v;
3471 if (!(v2 & SYM_FIELD) &&
3472 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3473 Sym **pps;
3474 /* This happens only with MS extensions. The
3475 anon member has a named struct type, so it
3476 potentially is shared with other references.
3477 We need to unshare members so we can modify
3478 them. */
3479 ass = f->type.ref;
3480 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3481 &f->type.ref->type, 0,
3482 f->type.ref->c);
3483 pps = &f->type.ref->next;
3484 while ((ass = ass->next) != NULL) {
3485 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3486 pps = &((*pps)->next);
3488 *pps = NULL;
3490 struct_add_offset(f->type.ref, offset);
3491 f->c = 0;
3492 } else {
3493 f->c = offset;
3496 f->r = 0;
3498 /* store size and alignment */
3499 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3500 + maxalign - 1) & -maxalign;
3501 type->ref->r = maxalign;
3504 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3505 static void struct_decl(CType *type, AttributeDef *ad, int u)
3507 int a, v, size, align, flexible, alignoverride;
3508 long c;
3509 int bit_size, bsize, bt;
3510 Sym *s, *ss, **ps;
3511 AttributeDef ad1;
3512 CType type1, btype;
3514 a = tok; /* save decl type */
3515 next();
3516 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3517 parse_attribute(ad);
3518 if (tok != '{') {
3519 v = tok;
3520 next();
3521 /* struct already defined ? return it */
3522 if (v < TOK_IDENT)
3523 expect("struct/union/enum name");
3524 s = struct_find(v);
3525 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3526 if (s->type.t != a)
3527 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3528 goto do_decl;
3530 } else {
3531 v = anon_sym++;
3533 /* Record the original enum/struct/union token. */
3534 type1.t = a;
3535 type1.ref = NULL;
3536 /* we put an undefined size for struct/union */
3537 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3538 s->r = 0; /* default alignment is zero as gcc */
3539 /* put struct/union/enum name in type */
3540 do_decl:
3541 type->t = u;
3542 type->ref = s;
3544 if (tok == '{') {
3545 next();
3546 if (s->c != -1)
3547 tcc_error("struct/union/enum already defined");
3548 /* cannot be empty */
3549 c = 0;
3550 /* non empty enums are not allowed */
3551 if (a == TOK_ENUM) {
3552 int seen_neg = 0;
3553 int seen_wide = 0;
3554 for(;;) {
3555 CType *t = &int_type;
3556 v = tok;
3557 if (v < TOK_UIDENT)
3558 expect("identifier");
3559 ss = sym_find(v);
3560 if (ss && !local_stack)
3561 tcc_error("redefinition of enumerator '%s'",
3562 get_tok_str(v, NULL));
3563 next();
3564 if (tok == '=') {
3565 next();
3566 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3567 c = expr_const64();
3568 #else
3569 /* We really want to support long long enums
3570 on i386 as well, but the Sym structure only
3571 holds a 'long' for associated constants,
3572 and enlarging it would bump its size (no
3573 available padding). So punt for now. */
3574 c = expr_const();
3575 #endif
3577 if (c < 0)
3578 seen_neg = 1;
3579 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3580 seen_wide = 1, t = &size_type;
3581 /* enum symbols have static storage */
3582 ss = sym_push(v, t, VT_CONST, c);
3583 ss->type.t |= VT_STATIC;
3584 if (tok != ',')
3585 break;
3586 next();
3587 c++;
3588 /* NOTE: we accept a trailing comma */
3589 if (tok == '}')
3590 break;
3592 if (!seen_neg)
3593 s->a.unsigned_enum = 1;
3594 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3595 skip('}');
3596 } else {
3597 ps = &s->next;
3598 flexible = 0;
3599 while (tok != '}') {
3600 if (!parse_btype(&btype, &ad1)) {
3601 skip(';');
3602 continue;
3604 while (1) {
3605 if (flexible)
3606 tcc_error("flexible array member '%s' not at the end of struct",
3607 get_tok_str(v, NULL));
3608 bit_size = -1;
3609 v = 0;
3610 type1 = btype;
3611 if (tok != ':') {
3612 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3613 if (v == 0) {
3614 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3615 expect("identifier");
3616 else {
3617 int v = btype.ref->v;
3618 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3619 if (tcc_state->ms_extensions == 0)
3620 expect("identifier");
3624 if (type_size(&type1, &align) < 0) {
3625 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3626 flexible = 1;
3627 else
3628 tcc_error("field '%s' has incomplete type",
3629 get_tok_str(v, NULL));
3631 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3632 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3633 tcc_error("invalid type for '%s'",
3634 get_tok_str(v, NULL));
3636 if (tok == ':') {
3637 next();
3638 bit_size = expr_const();
3639 /* XXX: handle v = 0 case for messages */
3640 if (bit_size < 0)
3641 tcc_error("negative width in bit-field '%s'",
3642 get_tok_str(v, NULL));
3643 if (v && bit_size == 0)
3644 tcc_error("zero width for bit-field '%s'",
3645 get_tok_str(v, NULL));
3646 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3647 parse_attribute(&ad1);
3649 size = type_size(&type1, &align);
3650 /* Only remember non-default alignment. */
3651 alignoverride = 0;
3652 if (ad1.a.aligned) {
3653 int speca = 1 << (ad1.a.aligned - 1);
3654 alignoverride = speca;
3655 } else if (ad1.a.packed || ad->a.packed) {
3656 alignoverride = 1;
3657 } else if (*tcc_state->pack_stack_ptr) {
3658 if (align > *tcc_state->pack_stack_ptr)
3659 alignoverride = *tcc_state->pack_stack_ptr;
3661 if (bit_size >= 0) {
3662 bt = type1.t & VT_BTYPE;
3663 if (bt != VT_INT &&
3664 bt != VT_BYTE &&
3665 bt != VT_SHORT &&
3666 bt != VT_BOOL &&
3667 bt != VT_ENUM &&
3668 bt != VT_LLONG)
3669 tcc_error("bitfields must have scalar type");
3670 bsize = size * 8;
3671 if (bit_size > bsize) {
3672 tcc_error("width of '%s' exceeds its type",
3673 get_tok_str(v, NULL));
3674 } else if (bit_size == bsize) {
3675 /* no need for bit fields */
3677 } else {
3678 type1.t |= VT_BITFIELD |
3679 (0 << VT_STRUCT_SHIFT) |
3680 (bit_size << (VT_STRUCT_SHIFT + 6));
3683 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3684 /* Remember we've seen a real field to check
3685 for placement of flexible array member. */
3686 c = 1;
3688 /* If member is a struct or bit-field, enforce
3689 placing into the struct (as anonymous). */
3690 if (v == 0 &&
3691 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3692 bit_size >= 0)) {
3693 v = anon_sym++;
3695 if (v) {
3696 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3697 *ps = ss;
3698 ps = &ss->next;
3700 if (tok == ';' || tok == TOK_EOF)
3701 break;
3702 skip(',');
3704 skip(';');
3706 skip('}');
3707 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3708 parse_attribute(ad);
3709 struct_layout(type, ad);
3714 /* return 1 if basic type is a type size (short, long, long long) */
3715 ST_FUNC int is_btype_size(int bt)
3717 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3720 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3721 are added to the element type, copied because it could be a typedef. */
3722 static void parse_btype_qualify(CType *type, int qualifiers)
3724 while (type->t & VT_ARRAY) {
3725 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3726 type = &type->ref->type;
3728 type->t |= qualifiers;
3731 /* return 0 if no type declaration. otherwise, return the basic type
3732 and skip it.
3734 static int parse_btype(CType *type, AttributeDef *ad)
3736 int t, u, bt_size, complete, type_found, typespec_found;
3737 Sym *s;
3738 CType type1;
3740 memset(ad, 0, sizeof(AttributeDef));
3741 complete = 0;
3742 type_found = 0;
3743 typespec_found = 0;
3744 t = 0;
3745 while(1) {
3746 switch(tok) {
3747 case TOK_EXTENSION:
3748 /* currently, we really ignore extension */
3749 next();
3750 continue;
3752 /* basic types */
3753 case TOK_CHAR:
3754 u = VT_BYTE;
3755 basic_type:
3756 next();
3757 basic_type1:
3758 if (complete)
3759 tcc_error("too many basic types");
3760 t |= u;
3761 bt_size = is_btype_size (u & VT_BTYPE);
3762 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3763 complete = 1;
3764 typespec_found = 1;
3765 break;
3766 case TOK_VOID:
3767 u = VT_VOID;
3768 goto basic_type;
3769 case TOK_SHORT:
3770 u = VT_SHORT;
3771 goto basic_type;
3772 case TOK_INT:
3773 u = VT_INT;
3774 goto basic_type;
3775 case TOK_LONG:
3776 next();
3777 if ((t & VT_BTYPE) == VT_DOUBLE) {
3778 #ifndef TCC_TARGET_PE
3779 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3780 #endif
3781 } else if ((t & VT_BTYPE) == VT_LONG) {
3782 t = (t & ~VT_BTYPE) | VT_LLONG;
3783 } else {
3784 u = VT_LONG;
3785 goto basic_type1;
3787 break;
3788 #ifdef TCC_TARGET_ARM64
3789 case TOK_UINT128:
3790 /* GCC's __uint128_t appears in some Linux header files. Make it a
3791 synonym for long double to get the size and alignment right. */
3792 u = VT_LDOUBLE;
3793 goto basic_type;
3794 #endif
3795 case TOK_BOOL:
3796 u = VT_BOOL;
3797 goto basic_type;
3798 case TOK_FLOAT:
3799 u = VT_FLOAT;
3800 goto basic_type;
3801 case TOK_DOUBLE:
3802 next();
3803 if ((t & VT_BTYPE) == VT_LONG) {
3804 #ifdef TCC_TARGET_PE
3805 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3806 #else
3807 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3808 #endif
3809 } else {
3810 u = VT_DOUBLE;
3811 goto basic_type1;
3813 break;
3814 case TOK_ENUM:
3815 struct_decl(&type1, ad, VT_ENUM);
3816 basic_type2:
3817 u = type1.t;
3818 type->ref = type1.ref;
3819 goto basic_type1;
3820 case TOK_STRUCT:
3821 case TOK_UNION:
3822 struct_decl(&type1, ad, VT_STRUCT);
3823 goto basic_type2;
3825 /* type modifiers */
3826 case TOK_CONST1:
3827 case TOK_CONST2:
3828 case TOK_CONST3:
3829 type->t = t;
3830 parse_btype_qualify(type, VT_CONSTANT);
3831 t = type->t;
3832 next();
3833 break;
3834 case TOK_VOLATILE1:
3835 case TOK_VOLATILE2:
3836 case TOK_VOLATILE3:
3837 type->t = t;
3838 parse_btype_qualify(type, VT_VOLATILE);
3839 t = type->t;
3840 next();
3841 break;
3842 case TOK_SIGNED1:
3843 case TOK_SIGNED2:
3844 case TOK_SIGNED3:
3845 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3846 tcc_error("signed and unsigned modifier");
3847 typespec_found = 1;
3848 t |= VT_DEFSIGN;
3849 next();
3850 break;
3851 case TOK_REGISTER:
3852 case TOK_AUTO:
3853 case TOK_RESTRICT1:
3854 case TOK_RESTRICT2:
3855 case TOK_RESTRICT3:
3856 next();
3857 break;
3858 case TOK_UNSIGNED:
3859 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3860 tcc_error("signed and unsigned modifier");
3861 t |= VT_DEFSIGN | VT_UNSIGNED;
3862 next();
3863 typespec_found = 1;
3864 break;
3866 /* storage */
3867 case TOK_EXTERN:
3868 t |= VT_EXTERN;
3869 next();
3870 break;
3871 case TOK_STATIC:
3872 t |= VT_STATIC;
3873 next();
3874 break;
3875 case TOK_TYPEDEF:
3876 t |= VT_TYPEDEF;
3877 next();
3878 break;
3879 case TOK_INLINE1:
3880 case TOK_INLINE2:
3881 case TOK_INLINE3:
3882 t |= VT_INLINE;
3883 next();
3884 break;
3886 /* GNUC attribute */
3887 case TOK_ATTRIBUTE1:
3888 case TOK_ATTRIBUTE2:
3889 parse_attribute(ad);
3890 if (ad->a.mode) {
3891 u = ad->a.mode -1;
3892 t = (t & ~VT_BTYPE) | u;
3894 break;
3895 /* GNUC typeof */
3896 case TOK_TYPEOF1:
3897 case TOK_TYPEOF2:
3898 case TOK_TYPEOF3:
3899 next();
3900 parse_expr_type(&type1);
3901 /* remove all storage modifiers except typedef */
3902 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3903 goto basic_type2;
3904 default:
3905 if (typespec_found)
3906 goto the_end;
3907 s = sym_find(tok);
3908 if (!s || !(s->type.t & VT_TYPEDEF))
3909 goto the_end;
3911 type->t = ((s->type.t & ~VT_TYPEDEF) |
3912 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3913 type->ref = s->type.ref;
3914 if (t & (VT_CONSTANT | VT_VOLATILE))
3915 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3916 t = type->t;
3918 if (s->r) {
3919 /* get attributes from typedef */
3920 if (0 == ad->a.aligned)
3921 ad->a.aligned = s->a.aligned;
3922 if (0 == ad->a.func_call)
3923 ad->a.func_call = s->a.func_call;
3924 ad->a.packed |= s->a.packed;
3926 next();
3927 typespec_found = 1;
3928 break;
3930 type_found = 1;
3932 the_end:
3933 if (tcc_state->char_is_unsigned) {
3934 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3935 t |= VT_UNSIGNED;
3938 /* long is never used as type */
3939 if ((t & VT_BTYPE) == VT_LONG)
3940 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3941 defined TCC_TARGET_PE
3942 t = (t & ~VT_BTYPE) | VT_INT;
3943 #else
3944 t = (t & ~VT_BTYPE) | VT_LLONG;
3945 #endif
3946 type->t = t;
3947 return type_found;
3950 /* convert a function parameter type (array to pointer and function to
3951 function pointer) */
3952 static inline void convert_parameter_type(CType *pt)
3954 /* remove const and volatile qualifiers (XXX: const could be used
3955 to indicate a const function parameter */
3956 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3957 /* array must be transformed to pointer according to ANSI C */
3958 pt->t &= ~VT_ARRAY;
3959 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3960 mk_pointer(pt);
3964 ST_FUNC void parse_asm_str(CString *astr)
3966 skip('(');
3967 parse_mult_str(astr, "string constant");
3970 /* Parse an asm label and return the token */
3971 static int asm_label_instr(void)
3973 int v;
3974 CString astr;
3976 next();
3977 parse_asm_str(&astr);
3978 skip(')');
3979 #ifdef ASM_DEBUG
3980 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3981 #endif
3982 v = tok_alloc(astr.data, astr.size - 1)->tok;
3983 cstr_free(&astr);
3984 return v;
3987 static void post_type(CType *type, AttributeDef *ad, int storage)
3989 int n, l, t1, arg_size, align;
3990 Sym **plast, *s, *first;
3991 AttributeDef ad1;
3992 CType pt;
3994 if (tok == '(') {
3995 /* function declaration */
3996 next();
3997 l = 0;
3998 first = NULL;
3999 plast = &first;
4000 arg_size = 0;
4001 if (tok != ')') {
4002 for(;;) {
4003 /* read param name and compute offset */
4004 if (l != FUNC_OLD) {
4005 if (!parse_btype(&pt, &ad1)) {
4006 if (l) {
4007 tcc_error("invalid type");
4008 } else {
4009 l = FUNC_OLD;
4010 goto old_proto;
4013 l = FUNC_NEW;
4014 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4015 break;
4016 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4017 if ((pt.t & VT_BTYPE) == VT_VOID)
4018 tcc_error("parameter declared as void");
4019 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4020 } else {
4021 old_proto:
4022 n = tok;
4023 if (n < TOK_UIDENT)
4024 expect("identifier");
4025 pt.t = VT_INT;
4026 next();
4028 convert_parameter_type(&pt);
4029 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4030 *plast = s;
4031 plast = &s->next;
4032 if (tok == ')')
4033 break;
4034 skip(',');
4035 if (l == FUNC_NEW && tok == TOK_DOTS) {
4036 l = FUNC_ELLIPSIS;
4037 next();
4038 break;
4042 /* if no parameters, then old type prototype */
4043 if (l == 0)
4044 l = FUNC_OLD;
4045 skip(')');
4046 /* NOTE: const is ignored in returned type as it has a special
4047 meaning in gcc / C++ */
4048 type->t &= ~VT_CONSTANT;
4049 /* some ancient pre-K&R C allows a function to return an array
4050 and the array brackets to be put after the arguments, such
4051 that "int c()[]" means something like "int[] c()" */
4052 if (tok == '[') {
4053 next();
4054 skip(']'); /* only handle simple "[]" */
4055 type->t |= VT_PTR;
4057 /* we push a anonymous symbol which will contain the function prototype */
4058 ad->a.func_args = arg_size;
4059 s = sym_push(SYM_FIELD, type, 0, l);
4060 s->a = ad->a;
4061 s->next = first;
4062 type->t = VT_FUNC;
4063 type->ref = s;
4064 } else if (tok == '[') {
4065 int saved_nocode_wanted = nocode_wanted;
4066 /* array definition */
4067 next();
4068 if (tok == TOK_RESTRICT1)
4069 next();
4070 n = -1;
4071 t1 = 0;
4072 if (tok != ']') {
4073 if (!local_stack || (storage & VT_STATIC))
4074 vpushi(expr_const());
4075 else {
4076 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4077 length must always be evaluated, even under nocode_wanted,
4078 so that its size slot is initialized (e.g. under sizeof
4079 or typeof). */
4080 nocode_wanted = 0;
4081 gexpr();
4083 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4084 n = vtop->c.i;
4085 if (n < 0)
4086 tcc_error("invalid array size");
4087 } else {
4088 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4089 tcc_error("size of variable length array should be an integer");
4090 t1 = VT_VLA;
4093 skip(']');
4094 /* parse next post type */
4095 post_type(type, ad, storage);
4096 if (type->t == VT_FUNC)
4097 tcc_error("declaration of an array of functions");
4098 t1 |= type->t & VT_VLA;
4100 if (t1 & VT_VLA) {
4101 loc -= type_size(&int_type, &align);
4102 loc &= -align;
4103 n = loc;
4105 vla_runtime_type_size(type, &align);
4106 gen_op('*');
4107 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4108 vswap();
4109 vstore();
4111 if (n != -1)
4112 vpop();
4113 nocode_wanted = saved_nocode_wanted;
4115 /* we push an anonymous symbol which will contain the array
4116 element type */
4117 s = sym_push(SYM_FIELD, type, 0, n);
4118 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4119 type->ref = s;
4123 /* Parse a type declaration (except basic type), and return the type
4124 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4125 expected. 'type' should contain the basic type. 'ad' is the
4126 attribute definition of the basic type. It can be modified by
4127 type_decl().
4129 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4131 Sym *s;
4132 CType type1, *type2;
4133 int qualifiers, storage;
4135 while (tok == '*') {
4136 qualifiers = 0;
4137 redo:
4138 next();
4139 switch(tok) {
4140 case TOK_CONST1:
4141 case TOK_CONST2:
4142 case TOK_CONST3:
4143 qualifiers |= VT_CONSTANT;
4144 goto redo;
4145 case TOK_VOLATILE1:
4146 case TOK_VOLATILE2:
4147 case TOK_VOLATILE3:
4148 qualifiers |= VT_VOLATILE;
4149 goto redo;
4150 case TOK_RESTRICT1:
4151 case TOK_RESTRICT2:
4152 case TOK_RESTRICT3:
4153 goto redo;
4154 /* XXX: clarify attribute handling */
4155 case TOK_ATTRIBUTE1:
4156 case TOK_ATTRIBUTE2:
4157 parse_attribute(ad);
4158 break;
4160 mk_pointer(type);
4161 type->t |= qualifiers;
4164 /* recursive type */
4165 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4166 type1.t = 0; /* XXX: same as int */
4167 if (tok == '(') {
4168 next();
4169 /* XXX: this is not correct to modify 'ad' at this point, but
4170 the syntax is not clear */
4171 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4172 parse_attribute(ad);
4173 type_decl(&type1, ad, v, td);
4174 skip(')');
4175 } else {
4176 /* type identifier */
4177 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4178 *v = tok;
4179 next();
4180 } else {
4181 if (!(td & TYPE_ABSTRACT))
4182 expect("identifier");
4183 *v = 0;
4186 storage = type->t & VT_STORAGE;
4187 type->t &= ~VT_STORAGE;
4188 post_type(type, ad, storage);
4189 type->t |= storage;
4190 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4191 parse_attribute(ad);
4193 if (!type1.t)
4194 return;
4195 /* append type at the end of type1 */
4196 type2 = &type1;
4197 for(;;) {
4198 s = type2->ref;
4199 type2 = &s->type;
4200 if (!type2->t) {
4201 *type2 = *type;
4202 break;
4205 *type = type1;
4208 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4209 ST_FUNC int lvalue_type(int t)
4211 int bt, r;
4212 r = VT_LVAL;
4213 bt = t & VT_BTYPE;
4214 if (bt == VT_BYTE || bt == VT_BOOL)
4215 r |= VT_LVAL_BYTE;
4216 else if (bt == VT_SHORT)
4217 r |= VT_LVAL_SHORT;
4218 else
4219 return r;
4220 if (t & VT_UNSIGNED)
4221 r |= VT_LVAL_UNSIGNED;
4222 return r;
4225 /* indirection with full error checking and bound check */
4226 ST_FUNC void indir(void)
4228 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4229 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4230 return;
4231 expect("pointer");
4233 if (vtop->r & VT_LVAL)
4234 gv(RC_INT);
4235 vtop->type = *pointed_type(&vtop->type);
4236 /* Arrays and functions are never lvalues */
4237 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4238 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4239 vtop->r |= lvalue_type(vtop->type.t);
4240 /* if bound checking, the referenced pointer must be checked */
4241 #ifdef CONFIG_TCC_BCHECK
4242 if (tcc_state->do_bounds_check)
4243 vtop->r |= VT_MUSTBOUND;
4244 #endif
4248 /* pass a parameter to a function and do type checking and casting */
4249 static void gfunc_param_typed(Sym *func, Sym *arg)
4251 int func_type;
4252 CType type;
4254 func_type = func->c;
4255 if (func_type == FUNC_OLD ||
4256 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4257 /* default casting : only need to convert float to double */
4258 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4259 type.t = VT_DOUBLE;
4260 gen_cast(&type);
4261 } else if (vtop->type.t & VT_BITFIELD) {
4262 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4263 type.ref = vtop->type.ref;
4264 gen_cast(&type);
4266 } else if (arg == NULL) {
4267 tcc_error("too many arguments to function");
4268 } else {
4269 type = arg->type;
4270 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4271 gen_assign_cast(&type);
4275 /* parse an expression of the form '(type)' or '(expr)' and return its
4276 type */
4277 static void parse_expr_type(CType *type)
4279 int n;
4280 AttributeDef ad;
4282 skip('(');
4283 if (parse_btype(type, &ad)) {
4284 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4285 } else {
4286 expr_type(type);
4288 skip(')');
4291 static void parse_type(CType *type)
4293 AttributeDef ad;
4294 int n;
4296 if (!parse_btype(type, &ad)) {
4297 expect("type");
4299 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4302 static void vpush_tokc(int t)
4304 CType type;
4305 type.t = t;
4306 type.ref = 0;
4307 vsetc(&type, VT_CONST, &tokc);
4310 ST_FUNC void unary(void)
4312 int n, t, align, size, r, sizeof_caller;
4313 CType type;
4314 Sym *s;
4315 AttributeDef ad;
4317 sizeof_caller = in_sizeof;
4318 in_sizeof = 0;
4319 /* XXX: GCC 2.95.3 does not generate a table although it should be
4320 better here */
4321 tok_next:
4322 switch(tok) {
4323 case TOK_EXTENSION:
4324 next();
4325 goto tok_next;
4326 case TOK_CINT:
4327 case TOK_CCHAR:
4328 case TOK_LCHAR:
4329 vpushi(tokc.i);
4330 next();
4331 break;
4332 case TOK_CUINT:
4333 vpush_tokc(VT_INT | VT_UNSIGNED);
4334 next();
4335 break;
4336 case TOK_CLLONG:
4337 vpush_tokc(VT_LLONG);
4338 next();
4339 break;
4340 case TOK_CULLONG:
4341 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4342 next();
4343 break;
4344 case TOK_CFLOAT:
4345 vpush_tokc(VT_FLOAT);
4346 next();
4347 break;
4348 case TOK_CDOUBLE:
4349 vpush_tokc(VT_DOUBLE);
4350 next();
4351 break;
4352 case TOK_CLDOUBLE:
4353 vpush_tokc(VT_LDOUBLE);
4354 next();
4355 break;
4356 case TOK___FUNCTION__:
4357 if (!gnu_ext)
4358 goto tok_identifier;
4359 /* fall thru */
4360 case TOK___FUNC__:
4362 void *ptr;
4363 int len;
4364 /* special function name identifier */
4365 len = strlen(funcname) + 1;
4366 /* generate char[len] type */
4367 type.t = VT_BYTE;
4368 mk_pointer(&type);
4369 type.t |= VT_ARRAY;
4370 type.ref->c = len;
4371 vpush_ref(&type, data_section, data_section->data_offset, len);
4372 ptr = section_ptr_add(data_section, len);
4373 memcpy(ptr, funcname, len);
4374 next();
4376 break;
4377 case TOK_LSTR:
4378 #ifdef TCC_TARGET_PE
4379 t = VT_SHORT | VT_UNSIGNED;
4380 #else
4381 t = VT_INT;
4382 #endif
4383 goto str_init;
4384 case TOK_STR:
4385 /* string parsing */
4386 t = VT_BYTE;
4387 str_init:
4388 if (tcc_state->warn_write_strings)
4389 t |= VT_CONSTANT;
4390 type.t = t;
4391 mk_pointer(&type);
4392 type.t |= VT_ARRAY;
4393 memset(&ad, 0, sizeof(AttributeDef));
4394 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4395 break;
4396 case '(':
4397 next();
4398 /* cast ? */
4399 if (parse_btype(&type, &ad)) {
4400 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4401 skip(')');
4402 /* check ISOC99 compound literal */
4403 if (tok == '{') {
4404 /* data is allocated locally by default */
4405 if (global_expr)
4406 r = VT_CONST;
4407 else
4408 r = VT_LOCAL;
4409 /* all except arrays are lvalues */
4410 if (!(type.t & VT_ARRAY))
4411 r |= lvalue_type(type.t);
4412 memset(&ad, 0, sizeof(AttributeDef));
4413 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4414 } else {
4415 if (sizeof_caller) {
4416 vpush(&type);
4417 return;
4419 unary();
4420 gen_cast(&type);
4422 } else if (tok == '{') {
4423 int saved_nocode_wanted = nocode_wanted;
4424 if (const_wanted)
4425 tcc_error("expected constant");
4426 /* save all registers */
4427 save_regs(0);
4428 /* statement expression : we do not accept break/continue
4429 inside as GCC does. We do retain the nocode_wanted state,
4430 as statement expressions can't ever be entered from the
4431 outside, so any reactivation of code emission (from labels
4432 or loop heads) can be disabled again after the end of it. */
4433 block(NULL, NULL, 1);
4434 nocode_wanted = saved_nocode_wanted;
4435 skip(')');
4436 } else {
4437 gexpr();
4438 skip(')');
4440 break;
4441 case '*':
4442 next();
4443 unary();
4444 indir();
4445 break;
4446 case '&':
4447 next();
4448 unary();
4449 /* functions names must be treated as function pointers,
4450 except for unary '&' and sizeof. Since we consider that
4451 functions are not lvalues, we only have to handle it
4452 there and in function calls. */
4453 /* arrays can also be used although they are not lvalues */
4454 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4455 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4456 test_lvalue();
4457 mk_pointer(&vtop->type);
4458 gaddrof();
4459 break;
4460 case '!':
4461 next();
4462 unary();
4463 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4464 CType boolean;
4465 boolean.t = VT_BOOL;
4466 gen_cast(&boolean);
4467 vtop->c.i = !vtop->c.i;
4468 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4469 vtop->c.i ^= 1;
4470 else {
4471 save_regs(1);
4472 vseti(VT_JMP, gvtst(1, 0));
4474 break;
4475 case '~':
4476 next();
4477 unary();
4478 vpushi(-1);
4479 gen_op('^');
4480 break;
4481 case '+':
4482 next();
4483 unary();
4484 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4485 tcc_error("pointer not accepted for unary plus");
4486 /* In order to force cast, we add zero, except for floating point
4487 where we really need an noop (otherwise -0.0 will be transformed
4488 into +0.0). */
4489 if (!is_float(vtop->type.t)) {
4490 vpushi(0);
4491 gen_op('+');
4493 break;
4494 case TOK_SIZEOF:
4495 case TOK_ALIGNOF1:
4496 case TOK_ALIGNOF2:
4497 t = tok;
4498 next();
4499 in_sizeof++;
4500 unary_type(&type); // Perform a in_sizeof = 0;
4501 size = type_size(&type, &align);
4502 if (t == TOK_SIZEOF) {
4503 if (!(type.t & VT_VLA)) {
4504 if (size < 0)
4505 tcc_error("sizeof applied to an incomplete type");
4506 vpushs(size);
4507 } else {
4508 vla_runtime_type_size(&type, &align);
4510 } else {
4511 vpushs(align);
4513 vtop->type.t |= VT_UNSIGNED;
4514 break;
4516 case TOK_builtin_expect:
4518 /* __builtin_expect is a no-op for now */
4519 next();
4520 skip('(');
4521 expr_eq();
4522 skip(',');
4523 nocode_wanted++;
4524 expr_lor_const();
4525 vpop();
4526 nocode_wanted--;
4527 skip(')');
4529 break;
4530 case TOK_builtin_types_compatible_p:
4532 CType type1, type2;
4533 next();
4534 skip('(');
4535 parse_type(&type1);
4536 skip(',');
4537 parse_type(&type2);
4538 skip(')');
4539 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4540 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4541 vpushi(is_compatible_types(&type1, &type2));
4543 break;
4544 case TOK_builtin_choose_expr:
4546 int64_t c;
4547 next();
4548 skip('(');
4549 c = expr_const64();
4550 skip(',');
4551 if (!c) {
4552 nocode_wanted++;
4554 expr_eq();
4555 if (!c) {
4556 vpop();
4557 nocode_wanted--;
4559 skip(',');
4560 if (c) {
4561 nocode_wanted++;
4563 expr_eq();
4564 if (c) {
4565 vpop();
4566 nocode_wanted--;
4568 skip(')');
4570 break;
4571 case TOK_builtin_constant_p:
4573 int res;
4574 next();
4575 skip('(');
4576 nocode_wanted++;
4577 gexpr();
4578 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4579 vpop();
4580 nocode_wanted--;
4581 skip(')');
4582 vpushi(res);
4584 break;
4585 case TOK_builtin_frame_address:
4586 case TOK_builtin_return_address:
4588 int tok1 = tok;
4589 int level;
4590 CType type;
4591 next();
4592 skip('(');
4593 if (tok != TOK_CINT) {
4594 tcc_error("%s only takes positive integers",
4595 tok1 == TOK_builtin_return_address ?
4596 "__builtin_return_address" :
4597 "__builtin_frame_address");
4599 level = (uint32_t)tokc.i;
4600 next();
4601 skip(')');
4602 type.t = VT_VOID;
4603 mk_pointer(&type);
4604 vset(&type, VT_LOCAL, 0); /* local frame */
4605 while (level--) {
4606 mk_pointer(&vtop->type);
4607 indir(); /* -> parent frame */
4609 if (tok1 == TOK_builtin_return_address) {
4610 // assume return address is just above frame pointer on stack
4611 vpushi(PTR_SIZE);
4612 gen_op('+');
4613 mk_pointer(&vtop->type);
4614 indir();
4617 break;
4618 #ifdef TCC_TARGET_X86_64
4619 #ifdef TCC_TARGET_PE
4620 case TOK_builtin_va_start:
4622 next();
4623 skip('(');
4624 expr_eq();
4625 skip(',');
4626 expr_eq();
4627 skip(')');
4628 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4629 tcc_error("__builtin_va_start expects a local variable");
4630 vtop->r &= ~(VT_LVAL | VT_REF);
4631 vtop->type = char_pointer_type;
4632 vtop->c.i += 8;
4633 vstore();
4635 break;
4636 #else
4637 case TOK_builtin_va_arg_types:
4639 CType type;
4640 next();
4641 skip('(');
4642 parse_type(&type);
4643 skip(')');
4644 vpushi(classify_x86_64_va_arg(&type));
4646 break;
4647 #endif
4648 #endif
4650 #ifdef TCC_TARGET_ARM64
4651 case TOK___va_start: {
4652 next();
4653 skip('(');
4654 expr_eq();
4655 skip(',');
4656 expr_eq();
4657 skip(')');
4658 //xx check types
4659 gen_va_start();
4660 vpushi(0);
4661 vtop->type.t = VT_VOID;
4662 break;
4664 case TOK___va_arg: {
4665 CType type;
4666 next();
4667 skip('(');
4668 expr_eq();
4669 skip(',');
4670 parse_type(&type);
4671 skip(')');
4672 //xx check types
4673 gen_va_arg(&type);
4674 vtop->type = type;
4675 break;
4677 case TOK___arm64_clear_cache: {
4678 next();
4679 skip('(');
4680 expr_eq();
4681 skip(',');
4682 expr_eq();
4683 skip(')');
4684 gen_clear_cache();
4685 vpushi(0);
4686 vtop->type.t = VT_VOID;
4687 break;
4689 #endif
4690 /* pre operations */
4691 case TOK_INC:
4692 case TOK_DEC:
4693 t = tok;
4694 next();
4695 unary();
4696 inc(0, t);
4697 break;
4698 case '-':
4699 next();
4700 unary();
4701 t = vtop->type.t & VT_BTYPE;
4702 if (is_float(t)) {
4703 /* In IEEE negate(x) isn't subtract(0,x), but rather
4704 subtract(-0, x). */
4705 vpush(&vtop->type);
4706 if (t == VT_FLOAT)
4707 vtop->c.f = -1.0 * 0.0;
4708 else if (t == VT_DOUBLE)
4709 vtop->c.d = -1.0 * 0.0;
4710 else
4711 vtop->c.ld = -1.0 * 0.0;
4712 } else
4713 vpushi(0);
4714 vswap();
4715 gen_op('-');
4716 break;
4717 case TOK_LAND:
4718 if (!gnu_ext)
4719 goto tok_identifier;
4720 next();
4721 /* allow to take the address of a label */
4722 if (tok < TOK_UIDENT)
4723 expect("label identifier");
4724 s = label_find(tok);
4725 if (!s) {
4726 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4727 } else {
4728 if (s->r == LABEL_DECLARED)
4729 s->r = LABEL_FORWARD;
4731 if (!s->type.t) {
4732 s->type.t = VT_VOID;
4733 mk_pointer(&s->type);
4734 s->type.t |= VT_STATIC;
4736 vpushsym(&s->type, s);
4737 next();
4738 break;
4740 // special qnan , snan and infinity values
4741 case TOK___NAN__:
4742 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4743 next();
4744 break;
4745 case TOK___SNAN__:
4746 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4747 next();
4748 break;
4749 case TOK___INF__:
4750 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4751 next();
4752 break;
4754 default:
4755 tok_identifier:
4756 t = tok;
4757 next();
4758 if (t < TOK_UIDENT)
4759 expect("identifier");
4760 s = sym_find(t);
4761 if (!s) {
4762 const char *name = get_tok_str(t, NULL);
4763 if (tok != '(')
4764 tcc_error("'%s' undeclared", name);
4765 /* for simple function calls, we tolerate undeclared
4766 external reference to int() function */
4767 if (tcc_state->warn_implicit_function_declaration
4768 #ifdef TCC_TARGET_PE
4769 /* people must be warned about using undeclared WINAPI functions
4770 (which usually start with uppercase letter) */
4771 || (name[0] >= 'A' && name[0] <= 'Z')
4772 #endif
4774 tcc_warning("implicit declaration of function '%s'", name);
4775 s = external_global_sym(t, &func_old_type, 0);
4778 r = s->r;
4779 /* A symbol that has a register is a local register variable,
4780 which starts out as VT_LOCAL value. */
4781 if ((r & VT_VALMASK) < VT_CONST)
4782 r = (r & ~VT_VALMASK) | VT_LOCAL;
4784 vset(&s->type, r, s->c);
4785 /* Point to s as backpointer (even without r&VT_SYM).
4786 Will be used by at least the x86 inline asm parser for
4787 regvars. */
4788 vtop->sym = s;
4789 if (vtop->r & VT_SYM) {
4790 vtop->c.i = 0;
4792 break;
4795 /* post operations */
4796 while (1) {
4797 if (tok == TOK_INC || tok == TOK_DEC) {
4798 inc(1, tok);
4799 next();
4800 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4801 int qualifiers;
4802 /* field */
4803 if (tok == TOK_ARROW)
4804 indir();
4805 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4806 test_lvalue();
4807 gaddrof();
4808 /* expect pointer on structure */
4809 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4810 expect("struct or union");
4811 if (tok == TOK_CDOUBLE)
4812 expect("field name");
4813 next();
4814 if (tok == TOK_CINT || tok == TOK_CUINT)
4815 expect("field name");
4816 s = find_field(&vtop->type, tok);
4817 if (!s)
4818 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4819 /* add field offset to pointer */
4820 vtop->type = char_pointer_type; /* change type to 'char *' */
4821 vpushi(s->c);
4822 gen_op('+');
4823 /* change type to field type, and set to lvalue */
4824 vtop->type = s->type;
4825 vtop->type.t |= qualifiers;
4826 /* an array is never an lvalue */
4827 if (!(vtop->type.t & VT_ARRAY)) {
4828 vtop->r |= lvalue_type(vtop->type.t);
4829 #ifdef CONFIG_TCC_BCHECK
4830 /* if bound checking, the referenced pointer must be checked */
4831 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4832 vtop->r |= VT_MUSTBOUND;
4833 #endif
4835 next();
4836 } else if (tok == '[') {
4837 next();
4838 gexpr();
4839 gen_op('+');
4840 indir();
4841 skip(']');
4842 } else if (tok == '(') {
4843 SValue ret;
4844 Sym *sa;
4845 int nb_args, ret_nregs, ret_align, regsize, variadic;
4847 /* function call */
4848 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4849 /* pointer test (no array accepted) */
4850 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4851 vtop->type = *pointed_type(&vtop->type);
4852 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4853 goto error_func;
4854 } else {
4855 error_func:
4856 expect("function pointer");
4858 } else {
4859 vtop->r &= ~VT_LVAL; /* no lvalue */
4861 /* get return type */
4862 s = vtop->type.ref;
4863 next();
4864 sa = s->next; /* first parameter */
4865 nb_args = regsize = 0;
4866 ret.r2 = VT_CONST;
4867 /* compute first implicit argument if a structure is returned */
4868 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4869 variadic = (s->c == FUNC_ELLIPSIS);
4870 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4871 &ret_align, &regsize);
4872 if (!ret_nregs) {
4873 /* get some space for the returned structure */
4874 size = type_size(&s->type, &align);
4875 #ifdef TCC_TARGET_ARM64
4876 /* On arm64, a small struct is return in registers.
4877 It is much easier to write it to memory if we know
4878 that we are allowed to write some extra bytes, so
4879 round the allocated space up to a power of 2: */
4880 if (size < 16)
4881 while (size & (size - 1))
4882 size = (size | (size - 1)) + 1;
4883 #endif
4884 loc = (loc - size) & -align;
4885 ret.type = s->type;
4886 ret.r = VT_LOCAL | VT_LVAL;
4887 /* pass it as 'int' to avoid structure arg passing
4888 problems */
4889 vseti(VT_LOCAL, loc);
4890 ret.c = vtop->c;
4891 nb_args++;
4893 } else {
4894 ret_nregs = 1;
4895 ret.type = s->type;
4898 if (ret_nregs) {
4899 /* return in register */
4900 if (is_float(ret.type.t)) {
4901 ret.r = reg_fret(ret.type.t);
4902 #ifdef TCC_TARGET_X86_64
4903 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4904 ret.r2 = REG_QRET;
4905 #endif
4906 } else {
4907 #ifndef TCC_TARGET_ARM64
4908 #ifdef TCC_TARGET_X86_64
4909 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4910 #else
4911 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4912 #endif
4913 ret.r2 = REG_LRET;
4914 #endif
4915 ret.r = REG_IRET;
4917 ret.c.i = 0;
4919 if (tok != ')') {
4920 for(;;) {
4921 expr_eq();
4922 gfunc_param_typed(s, sa);
4923 nb_args++;
4924 if (sa)
4925 sa = sa->next;
4926 if (tok == ')')
4927 break;
4928 skip(',');
4931 if (sa)
4932 tcc_error("too few arguments to function");
4933 skip(')');
4934 gfunc_call(nb_args);
4936 /* return value */
4937 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4938 vsetc(&ret.type, r, &ret.c);
4939 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4942 /* handle packed struct return */
4943 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4944 int addr, offset;
4946 size = type_size(&s->type, &align);
4947 /* We're writing whole regs often, make sure there's enough
4948 space. Assume register size is power of 2. */
4949 if (regsize > align)
4950 align = regsize;
4951 loc = (loc - size) & -align;
4952 addr = loc;
4953 offset = 0;
4954 for (;;) {
4955 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4956 vswap();
4957 vstore();
4958 vtop--;
4959 if (--ret_nregs == 0)
4960 break;
4961 offset += regsize;
4963 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4965 } else {
4966 break;
4971 ST_FUNC void expr_prod(void)
4973 int t;
4975 unary();
4976 while (tok == '*' || tok == '/' || tok == '%') {
4977 t = tok;
4978 next();
4979 unary();
4980 gen_op(t);
4984 ST_FUNC void expr_sum(void)
4986 int t;
4988 expr_prod();
4989 while (tok == '+' || tok == '-') {
4990 t = tok;
4991 next();
4992 expr_prod();
4993 gen_op(t);
4997 static void expr_shift(void)
4999 int t;
5001 expr_sum();
5002 while (tok == TOK_SHL || tok == TOK_SAR) {
5003 t = tok;
5004 next();
5005 expr_sum();
5006 gen_op(t);
5010 static void expr_cmp(void)
5012 int t;
5014 expr_shift();
5015 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5016 tok == TOK_ULT || tok == TOK_UGE) {
5017 t = tok;
5018 next();
5019 expr_shift();
5020 gen_op(t);
5024 static void expr_cmpeq(void)
5026 int t;
5028 expr_cmp();
5029 while (tok == TOK_EQ || tok == TOK_NE) {
5030 t = tok;
5031 next();
5032 expr_cmp();
5033 gen_op(t);
5037 static void expr_and(void)
5039 expr_cmpeq();
5040 while (tok == '&') {
5041 next();
5042 expr_cmpeq();
5043 gen_op('&');
5047 static void expr_xor(void)
5049 expr_and();
5050 while (tok == '^') {
5051 next();
5052 expr_and();
5053 gen_op('^');
5057 static void expr_or(void)
5059 expr_xor();
5060 while (tok == '|') {
5061 next();
5062 expr_xor();
5063 gen_op('|');
5067 /* XXX: fix this mess */
5068 static void expr_land_const(void)
5070 expr_or();
5071 while (tok == TOK_LAND) {
5072 next();
5073 expr_or();
5074 gen_op(TOK_LAND);
5077 static void expr_lor_const(void)
5079 expr_land_const();
5080 while (tok == TOK_LOR) {
5081 next();
5082 expr_land_const();
5083 gen_op(TOK_LOR);
5087 static void expr_land(void)
5089 expr_or();
5090 if (tok == TOK_LAND) {
5091 int t = 0;
5092 for(;;) {
5093 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5094 CType ctb;
5095 ctb.t = VT_BOOL;
5096 gen_cast(&ctb);
5097 if (vtop->c.i) {
5098 vpop();
5099 } else {
5100 nocode_wanted++;
5101 while (tok == TOK_LAND) {
5102 next();
5103 expr_or();
5104 vpop();
5106 nocode_wanted--;
5107 if (t)
5108 gsym(t);
5109 gen_cast(&int_type);
5110 break;
5112 } else {
5113 if (!t)
5114 save_regs(1);
5115 t = gvtst(1, t);
5117 if (tok != TOK_LAND) {
5118 if (t)
5119 vseti(VT_JMPI, t);
5120 else
5121 vpushi(1);
5122 break;
5124 next();
5125 expr_or();
5130 static void expr_lor(void)
5132 expr_land();
5133 if (tok == TOK_LOR) {
5134 int t = 0;
5135 for(;;) {
5136 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5137 CType ctb;
5138 ctb.t = VT_BOOL;
5139 gen_cast(&ctb);
5140 if (!vtop->c.i) {
5141 vpop();
5142 } else {
5143 nocode_wanted++;
5144 while (tok == TOK_LOR) {
5145 next();
5146 expr_land();
5147 vpop();
5149 nocode_wanted--;
5150 if (t)
5151 gsym(t);
5152 gen_cast(&int_type);
5153 break;
5155 } else {
5156 if (!t)
5157 save_regs(1);
5158 t = gvtst(0, t);
5160 if (tok != TOK_LOR) {
5161 if (t)
5162 vseti(VT_JMP, t);
5163 else
5164 vpushi(0);
5165 break;
5167 next();
5168 expr_land();
5173 /* Assuming vtop is a value used in a conditional context
5174 (i.e. compared with zero) return 0 if it's false, 1 if
5175 true and -1 if it can't be statically determined. */
5176 static int condition_3way(void)
5178 int c = -1;
5179 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5180 (!(vtop->r & VT_SYM) ||
5181 !(vtop->sym->type.t & VT_WEAK))) {
5182 CType boolean;
5183 boolean.t = VT_BOOL;
5184 vdup();
5185 gen_cast(&boolean);
5186 c = vtop->c.i;
5187 vpop();
5189 return c;
5192 static void expr_cond(void)
5194 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5195 SValue sv;
5196 CType type, type1, type2;
5198 expr_lor();
5199 if (tok == '?') {
5200 next();
5201 c = condition_3way();
5202 g = (tok == ':' && gnu_ext);
5203 if (c < 0) {
5204 /* needed to avoid having different registers saved in
5205 each branch */
5206 if (is_float(vtop->type.t)) {
5207 rc = RC_FLOAT;
5208 #ifdef TCC_TARGET_X86_64
5209 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5210 rc = RC_ST0;
5212 #endif
5213 } else
5214 rc = RC_INT;
5215 gv(rc);
5216 save_regs(1);
5217 if (g)
5218 gv_dup();
5219 tt = gvtst(1, 0);
5221 } else {
5222 if (!g)
5223 vpop();
5224 tt = 0;
5227 if (1) {
5228 if (c == 0)
5229 nocode_wanted++;
5230 if (!g)
5231 gexpr();
5233 type1 = vtop->type;
5234 sv = *vtop; /* save value to handle it later */
5235 vtop--; /* no vpop so that FP stack is not flushed */
5236 skip(':');
5238 u = 0;
5239 if (c < 0)
5240 u = gjmp(0);
5241 gsym(tt);
5243 if (c == 0)
5244 nocode_wanted--;
5245 if (c == 1)
5246 nocode_wanted++;
5247 expr_cond();
5248 if (c == 1)
5249 nocode_wanted--;
5251 type2 = vtop->type;
5252 t1 = type1.t;
5253 bt1 = t1 & VT_BTYPE;
5254 t2 = type2.t;
5255 bt2 = t2 & VT_BTYPE;
5256 /* cast operands to correct type according to ISOC rules */
5257 if (is_float(bt1) || is_float(bt2)) {
5258 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5259 type.t = VT_LDOUBLE;
5261 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5262 type.t = VT_DOUBLE;
5263 } else {
5264 type.t = VT_FLOAT;
5266 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5267 /* cast to biggest op */
5268 type.t = VT_LLONG;
5269 /* convert to unsigned if it does not fit in a long long */
5270 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5271 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5272 type.t |= VT_UNSIGNED;
5273 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5274 /* If one is a null ptr constant the result type
5275 is the other. */
5276 if (is_null_pointer (vtop))
5277 type = type1;
5278 else if (is_null_pointer (&sv))
5279 type = type2;
5280 /* XXX: test pointer compatibility, C99 has more elaborate
5281 rules here. */
5282 else
5283 type = type1;
5284 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5285 /* XXX: test function pointer compatibility */
5286 type = bt1 == VT_FUNC ? type1 : type2;
5287 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5288 /* XXX: test structure compatibility */
5289 type = bt1 == VT_STRUCT ? type1 : type2;
5290 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5291 /* NOTE: as an extension, we accept void on only one side */
5292 type.t = VT_VOID;
5293 } else {
5294 /* integer operations */
5295 type.t = VT_INT;
5296 /* convert to unsigned if it does not fit in an integer */
5297 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5298 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5299 type.t |= VT_UNSIGNED;
5301 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5302 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5303 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5304 islv &= c < 0;
5306 /* now we convert second operand */
5307 if (c != 1) {
5308 gen_cast(&type);
5309 if (islv) {
5310 mk_pointer(&vtop->type);
5311 gaddrof();
5312 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5313 gaddrof();
5316 rc = RC_INT;
5317 if (is_float(type.t)) {
5318 rc = RC_FLOAT;
5319 #ifdef TCC_TARGET_X86_64
5320 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5321 rc = RC_ST0;
5323 #endif
5324 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5325 /* for long longs, we use fixed registers to avoid having
5326 to handle a complicated move */
5327 rc = RC_IRET;
5330 tt = r2 = 0;
5331 if (c < 0) {
5332 r2 = gv(rc);
5333 tt = gjmp(0);
5335 gsym(u);
5337 /* this is horrible, but we must also convert first
5338 operand */
5339 if (c != 0) {
5340 *vtop = sv;
5341 gen_cast(&type);
5342 if (islv) {
5343 mk_pointer(&vtop->type);
5344 gaddrof();
5345 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5346 gaddrof();
5349 if (c < 0) {
5350 r1 = gv(rc);
5351 move_reg(r2, r1, type.t);
5352 vtop->r = r2;
5353 gsym(tt);
5354 if (islv)
5355 indir();
5361 static void expr_eq(void)
5363 int t;
5365 expr_cond();
5366 if (tok == '=' ||
5367 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5368 tok == TOK_A_XOR || tok == TOK_A_OR ||
5369 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5370 test_lvalue();
5371 t = tok;
5372 next();
5373 if (t == '=') {
5374 expr_eq();
5375 } else {
5376 vdup();
5377 expr_eq();
5378 gen_op(t & 0x7f);
5380 vstore();
5384 ST_FUNC void gexpr(void)
5386 while (1) {
5387 expr_eq();
5388 if (tok != ',')
5389 break;
5390 vpop();
5391 next();
5395 /* parse an expression and return its type without any side effect. */
5396 static void expr_type(CType *type)
5399 nocode_wanted++;
5400 gexpr();
5401 *type = vtop->type;
5402 vpop();
5403 nocode_wanted--;
5406 /* parse a unary expression and return its type without any side
5407 effect. */
5408 static void unary_type(CType *type)
5410 nocode_wanted++;
5411 unary();
5412 *type = vtop->type;
5413 vpop();
5414 nocode_wanted--;
5417 /* parse a constant expression and return value in vtop. */
5418 static void expr_const1(void)
5420 const_wanted++;
5421 expr_cond();
5422 const_wanted--;
5425 /* parse an integer constant and return its value. */
5426 static inline int64_t expr_const64(void)
5428 int64_t c;
5429 expr_const1();
5430 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5431 expect("constant expression");
5432 c = vtop->c.i;
5433 vpop();
5434 return c;
5437 /* parse an integer constant and return its value.
5438 Complain if it doesn't fit 32bit (signed or unsigned). */
5439 ST_FUNC int expr_const(void)
5441 int c;
5442 int64_t wc = expr_const64();
5443 c = wc;
5444 if (c != wc && (unsigned)c != wc)
5445 tcc_error("constant exceeds 32 bit");
5446 return c;
5449 /* return the label token if current token is a label, otherwise
5450 return zero */
5451 static int is_label(void)
5453 int last_tok;
5455 /* fast test first */
5456 if (tok < TOK_UIDENT)
5457 return 0;
5458 /* no need to save tokc because tok is an identifier */
5459 last_tok = tok;
5460 next();
5461 if (tok == ':') {
5462 next();
5463 return last_tok;
5464 } else {
5465 unget_tok(last_tok);
5466 return 0;
5470 static void label_or_decl(int l)
5472 int last_tok;
5474 /* fast test first */
5475 if (tok >= TOK_UIDENT)
5477 /* no need to save tokc because tok is an identifier */
5478 last_tok = tok;
5479 next();
5480 if (tok == ':') {
5481 unget_tok(last_tok);
5482 return;
5484 unget_tok(last_tok);
5486 decl(l);
5489 #ifndef TCC_TARGET_ARM64
5490 static void gfunc_return(CType *func_type)
5492 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5493 CType type, ret_type;
5494 int ret_align, ret_nregs, regsize;
5495 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5496 &ret_align, &regsize);
5497 if (0 == ret_nregs) {
5498 /* if returning structure, must copy it to implicit
5499 first pointer arg location */
5500 type = *func_type;
5501 mk_pointer(&type);
5502 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5503 indir();
5504 vswap();
5505 /* copy structure value to pointer */
5506 vstore();
5507 } else {
5508 /* returning structure packed into registers */
5509 int r, size, addr, align;
5510 size = type_size(func_type,&align);
5511 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5512 (vtop->c.i & (ret_align-1)))
5513 && (align & (ret_align-1))) {
5514 loc = (loc - size) & -ret_align;
5515 addr = loc;
5516 type = *func_type;
5517 vset(&type, VT_LOCAL | VT_LVAL, addr);
5518 vswap();
5519 vstore();
5520 vpop();
5521 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5523 vtop->type = ret_type;
5524 if (is_float(ret_type.t))
5525 r = rc_fret(ret_type.t);
5526 else
5527 r = RC_IRET;
5529 if (ret_nregs == 1)
5530 gv(r);
5531 else {
5532 for (;;) {
5533 vdup();
5534 gv(r);
5535 vpop();
5536 if (--ret_nregs == 0)
5537 break;
5538 /* We assume that when a structure is returned in multiple
5539 registers, their classes are consecutive values of the
5540 suite s(n) = 2^n */
5541 r <<= 1;
5542 vtop->c.i += regsize;
5546 } else if (is_float(func_type->t)) {
5547 gv(rc_fret(func_type->t));
5548 } else {
5549 gv(RC_IRET);
5551 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5553 #endif
5555 static int case_cmp(const void *pa, const void *pb)
5557 int64_t a = (*(struct case_t**) pa)->v1;
5558 int64_t b = (*(struct case_t**) pb)->v1;
5559 return a < b ? -1 : a > b;
5562 static void gcase(struct case_t **base, int len, int *bsym)
5564 struct case_t *p;
5565 int e;
5566 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5567 gv(RC_INT);
5568 while (len > 4) {
5569 /* binary search */
5570 p = base[len/2];
5571 vdup();
5572 if (ll)
5573 vpushll(p->v2);
5574 else
5575 vpushi(p->v2);
5576 gen_op(TOK_LE);
5577 e = gtst(1, 0);
5578 vdup();
5579 if (ll)
5580 vpushll(p->v1);
5581 else
5582 vpushi(p->v1);
5583 gen_op(TOK_GE);
5584 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5585 /* x < v1 */
5586 gcase(base, len/2, bsym);
5587 if (cur_switch->def_sym)
5588 gjmp_addr(cur_switch->def_sym);
5589 else
5590 *bsym = gjmp(*bsym);
5591 /* x > v2 */
5592 gsym(e);
5593 e = len/2 + 1;
5594 base += e; len -= e;
5596 /* linear scan */
5597 while (len--) {
5598 p = *base++;
5599 vdup();
5600 if (ll)
5601 vpushll(p->v2);
5602 else
5603 vpushi(p->v2);
5604 if (p->v1 == p->v2) {
5605 gen_op(TOK_EQ);
5606 gtst_addr(0, p->sym);
5607 } else {
5608 gen_op(TOK_LE);
5609 e = gtst(1, 0);
5610 vdup();
5611 if (ll)
5612 vpushll(p->v1);
5613 else
5614 vpushi(p->v1);
5615 gen_op(TOK_GE);
5616 gtst_addr(0, p->sym);
5617 gsym(e);
5622 static void block(int *bsym, int *csym, int is_expr)
5624 int a, b, c, d, cond;
5625 Sym *s;
5627 /* generate line number info */
5628 if (tcc_state->do_debug)
5629 tcc_debug_line(tcc_state);
5631 if (is_expr) {
5632 /* default return value is (void) */
5633 vpushi(0);
5634 vtop->type.t = VT_VOID;
5637 if (tok == TOK_IF) {
5638 /* if test */
5639 int saved_nocode_wanted = nocode_wanted;
5640 next();
5641 skip('(');
5642 gexpr();
5643 skip(')');
5644 cond = condition_3way();
5645 if (cond == 1)
5646 a = 0, vpop();
5647 else
5648 a = gvtst(1, 0);
5649 if (cond == 0)
5650 nocode_wanted |= 0x20000000;
5651 block(bsym, csym, 0);
5652 if (cond != 1)
5653 nocode_wanted = saved_nocode_wanted;
5654 c = tok;
5655 if (c == TOK_ELSE) {
5656 next();
5657 d = gjmp(0);
5658 gsym(a);
5659 if (cond == 1)
5660 nocode_wanted |= 0x20000000;
5661 block(bsym, csym, 0);
5662 gsym(d); /* patch else jmp */
5663 if (cond != 0)
5664 nocode_wanted = saved_nocode_wanted;
5665 } else
5666 gsym(a);
5667 } else if (tok == TOK_WHILE) {
5668 int saved_nocode_wanted;
5669 nocode_wanted &= ~0x20000000;
5670 next();
5671 d = ind;
5672 vla_sp_restore();
5673 skip('(');
5674 gexpr();
5675 skip(')');
5676 a = gvtst(1, 0);
5677 b = 0;
5678 ++local_scope;
5679 saved_nocode_wanted = nocode_wanted;
5680 block(&a, &b, 0);
5681 nocode_wanted = saved_nocode_wanted;
5682 --local_scope;
5683 gjmp_addr(d);
5684 gsym(a);
5685 gsym_addr(b, d);
5686 } else if (tok == '{') {
5687 Sym *llabel;
5688 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5690 next();
5691 /* record local declaration stack position */
5692 s = local_stack;
5693 llabel = local_label_stack;
5694 ++local_scope;
5696 /* handle local labels declarations */
5697 if (tok == TOK_LABEL) {
5698 next();
5699 for(;;) {
5700 if (tok < TOK_UIDENT)
5701 expect("label identifier");
5702 label_push(&local_label_stack, tok, LABEL_DECLARED);
5703 next();
5704 if (tok == ',') {
5705 next();
5706 } else {
5707 skip(';');
5708 break;
5712 while (tok != '}') {
5713 label_or_decl(VT_LOCAL);
5714 if (tok != '}') {
5715 if (is_expr)
5716 vpop();
5717 block(bsym, csym, is_expr);
5720 /* pop locally defined labels */
5721 label_pop(&local_label_stack, llabel);
5722 /* pop locally defined symbols */
5723 --local_scope;
5724 /* In the is_expr case (a statement expression is finished here),
5725 vtop might refer to symbols on the local_stack. Either via the
5726 type or via vtop->sym. We can't pop those nor any that in turn
5727 might be referred to. To make it easier we don't roll back
5728 any symbols in that case; some upper level call to block() will
5729 do that. We do have to remove such symbols from the lookup
5730 tables, though. sym_pop will do that. */
5731 sym_pop(&local_stack, s, is_expr);
5733 /* Pop VLA frames and restore stack pointer if required */
5734 if (vlas_in_scope > saved_vlas_in_scope) {
5735 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5736 vla_sp_restore();
5738 vlas_in_scope = saved_vlas_in_scope;
5740 next();
5741 } else if (tok == TOK_RETURN) {
5742 next();
5743 if (tok != ';') {
5744 gexpr();
5745 gen_assign_cast(&func_vt);
5746 gfunc_return(&func_vt);
5748 skip(';');
5749 /* jump unless last stmt in top-level block */
5750 if (tok != '}' || local_scope != 1)
5751 rsym = gjmp(rsym);
5752 nocode_wanted |= 0x20000000;
5753 } else if (tok == TOK_BREAK) {
5754 /* compute jump */
5755 if (!bsym)
5756 tcc_error("cannot break");
5757 *bsym = gjmp(*bsym);
5758 next();
5759 skip(';');
5760 nocode_wanted |= 0x20000000;
5761 } else if (tok == TOK_CONTINUE) {
5762 /* compute jump */
5763 if (!csym)
5764 tcc_error("cannot continue");
5765 vla_sp_restore_root();
5766 *csym = gjmp(*csym);
5767 next();
5768 skip(';');
5769 } else if (tok == TOK_FOR) {
5770 int e;
5771 int saved_nocode_wanted;
5772 nocode_wanted &= ~0x20000000;
5773 next();
5774 skip('(');
5775 s = local_stack;
5776 ++local_scope;
5777 if (tok != ';') {
5778 /* c99 for-loop init decl? */
5779 if (!decl0(VT_LOCAL, 1)) {
5780 /* no, regular for-loop init expr */
5781 gexpr();
5782 vpop();
5785 skip(';');
5786 d = ind;
5787 c = ind;
5788 vla_sp_restore();
5789 a = 0;
5790 b = 0;
5791 if (tok != ';') {
5792 gexpr();
5793 a = gvtst(1, 0);
5795 skip(';');
5796 if (tok != ')') {
5797 e = gjmp(0);
5798 c = ind;
5799 vla_sp_restore();
5800 gexpr();
5801 vpop();
5802 gjmp_addr(d);
5803 gsym(e);
5805 skip(')');
5806 saved_nocode_wanted = nocode_wanted;
5807 block(&a, &b, 0);
5808 nocode_wanted = saved_nocode_wanted;
5809 gjmp_addr(c);
5810 gsym(a);
5811 gsym_addr(b, c);
5812 --local_scope;
5813 sym_pop(&local_stack, s, 0);
5815 } else
5816 if (tok == TOK_DO) {
5817 int saved_nocode_wanted;
5818 nocode_wanted &= ~0x20000000;
5819 next();
5820 a = 0;
5821 b = 0;
5822 d = ind;
5823 vla_sp_restore();
5824 saved_nocode_wanted = nocode_wanted;
5825 block(&a, &b, 0);
5826 skip(TOK_WHILE);
5827 skip('(');
5828 gsym(b);
5829 gexpr();
5830 c = gvtst(0, 0);
5831 gsym_addr(c, d);
5832 nocode_wanted = saved_nocode_wanted;
5833 skip(')');
5834 gsym(a);
5835 skip(';');
5836 } else
5837 if (tok == TOK_SWITCH) {
5838 struct switch_t *saved, sw;
5839 int saved_nocode_wanted = nocode_wanted;
5840 SValue switchval;
5841 next();
5842 skip('(');
5843 gexpr();
5844 skip(')');
5845 switchval = *vtop--;
5846 a = 0;
5847 b = gjmp(0); /* jump to first case */
5848 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5849 saved = cur_switch;
5850 cur_switch = &sw;
5851 block(&a, csym, 0);
5852 nocode_wanted = saved_nocode_wanted;
5853 a = gjmp(a); /* add implicit break */
5854 /* case lookup */
5855 gsym(b);
5856 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5857 for (b = 1; b < sw.n; b++)
5858 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5859 tcc_error("duplicate case value");
5860 /* Our switch table sorting is signed, so the compared
5861 value needs to be as well when it's 64bit. */
5862 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5863 switchval.type.t &= ~VT_UNSIGNED;
5864 vpushv(&switchval);
5865 gcase(sw.p, sw.n, &a);
5866 vpop();
5867 if (sw.def_sym)
5868 gjmp_addr(sw.def_sym);
5869 dynarray_reset(&sw.p, &sw.n);
5870 cur_switch = saved;
5871 /* break label */
5872 gsym(a);
5873 } else
5874 if (tok == TOK_CASE) {
5875 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5876 if (!cur_switch)
5877 expect("switch");
5878 nocode_wanted &= ~0x20000000;
5879 next();
5880 cr->v1 = cr->v2 = expr_const64();
5881 if (gnu_ext && tok == TOK_DOTS) {
5882 next();
5883 cr->v2 = expr_const64();
5884 if (cr->v2 < cr->v1)
5885 tcc_warning("empty case range");
5887 cr->sym = ind;
5888 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5889 skip(':');
5890 is_expr = 0;
5891 goto block_after_label;
5892 } else
5893 if (tok == TOK_DEFAULT) {
5894 next();
5895 skip(':');
5896 if (!cur_switch)
5897 expect("switch");
5898 if (cur_switch->def_sym)
5899 tcc_error("too many 'default'");
5900 cur_switch->def_sym = ind;
5901 is_expr = 0;
5902 goto block_after_label;
5903 } else
5904 if (tok == TOK_GOTO) {
5905 next();
5906 if (tok == '*' && gnu_ext) {
5907 /* computed goto */
5908 next();
5909 gexpr();
5910 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5911 expect("pointer");
5912 ggoto();
5913 } else if (tok >= TOK_UIDENT) {
5914 s = label_find(tok);
5915 /* put forward definition if needed */
5916 if (!s) {
5917 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5918 } else {
5919 if (s->r == LABEL_DECLARED)
5920 s->r = LABEL_FORWARD;
5922 vla_sp_restore_root();
5923 if (s->r & LABEL_FORWARD)
5924 s->jnext = gjmp(s->jnext);
5925 else
5926 gjmp_addr(s->jnext);
5927 next();
5928 } else {
5929 expect("label identifier");
5931 skip(';');
5932 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5933 asm_instr();
5934 } else {
5935 b = is_label();
5936 if (b) {
5937 /* label case */
5938 s = label_find(b);
5939 if (s) {
5940 if (s->r == LABEL_DEFINED)
5941 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5942 gsym(s->jnext);
5943 s->r = LABEL_DEFINED;
5944 } else {
5945 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5947 s->jnext = ind;
5948 vla_sp_restore();
5949 /* we accept this, but it is a mistake */
5950 block_after_label:
5951 nocode_wanted &= ~0x20000000;
5952 if (tok == '}') {
5953 tcc_warning("deprecated use of label at end of compound statement");
5954 } else {
5955 if (is_expr)
5956 vpop();
5957 block(bsym, csym, is_expr);
5959 } else {
5960 /* expression case */
5961 if (tok != ';') {
5962 if (is_expr) {
5963 vpop();
5964 gexpr();
5965 } else {
5966 gexpr();
5967 vpop();
5970 skip(';');
5975 #define EXPR_CONST 1
5976 #define EXPR_ANY 2
5978 static void parse_init_elem(int expr_type)
5980 int saved_global_expr;
5981 switch(expr_type) {
5982 case EXPR_CONST:
5983 /* compound literals must be allocated globally in this case */
5984 saved_global_expr = global_expr;
5985 global_expr = 1;
5986 expr_const1();
5987 global_expr = saved_global_expr;
5988 /* NOTE: symbols are accepted */
5989 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5990 tcc_error("initializer element is not constant");
5991 break;
5992 case EXPR_ANY:
5993 expr_eq();
5994 break;
5998 /* t is the array or struct type. c is the array or struct
5999 address. cur_field is the pointer to the current
6000 value, for arrays the 'c' member contains the current start
6001 index and the 'r' contains the end index (in case of range init).
6002 'size_only' is true if only size info is needed (only used
6003 in arrays) */
6004 static void decl_designator(CType *type, Section *sec, unsigned long c,
6005 Sym **cur_field, int size_only)
6007 Sym *s, *f;
6008 int notfirst, index, index_last, align, l, nb_elems, elem_size;
6009 CType type1;
6011 notfirst = 0;
6012 elem_size = 0;
6013 nb_elems = 1;
6014 if (gnu_ext && (l = is_label()) != 0)
6015 goto struct_field;
6016 while (tok == '[' || tok == '.') {
6017 if (tok == '[') {
6018 if (!(type->t & VT_ARRAY))
6019 expect("array type");
6020 s = type->ref;
6021 next();
6022 index = expr_const();
6023 if (index < 0 || (s->c >= 0 && index >= s->c))
6024 tcc_error("invalid index");
6025 if (tok == TOK_DOTS && gnu_ext) {
6026 next();
6027 index_last = expr_const();
6028 if (index_last < 0 ||
6029 (s->c >= 0 && index_last >= s->c) ||
6030 index_last < index)
6031 tcc_error("invalid index");
6032 } else {
6033 index_last = index;
6035 skip(']');
6036 if (!notfirst) {
6037 (*cur_field)->c = index;
6038 (*cur_field)->r = index_last;
6040 type = pointed_type(type);
6041 elem_size = type_size(type, &align);
6042 c += index * elem_size;
6043 /* NOTE: we only support ranges for last designator */
6044 nb_elems = index_last - index + 1;
6045 if (nb_elems != 1) {
6046 notfirst = 1;
6047 break;
6049 } else {
6050 next();
6051 l = tok;
6052 next();
6053 struct_field:
6054 if ((type->t & VT_BTYPE) != VT_STRUCT)
6055 expect("struct/union type");
6056 f = find_field(type, l);
6057 if (!f)
6058 expect("field");
6059 if (!notfirst)
6060 *cur_field = f;
6061 /* XXX: fix this mess by using explicit storage field */
6062 type1 = f->type;
6063 type1.t |= (type->t & ~VT_TYPE);
6064 type = &type1;
6065 c += f->c;
6067 notfirst = 1;
6069 if (notfirst) {
6070 if (tok == '=') {
6071 next();
6072 } else {
6073 if (!gnu_ext)
6074 expect("=");
6076 } else {
6077 if (type->t & VT_ARRAY) {
6078 index = (*cur_field)->c;
6079 if (type->ref->c >= 0 && index >= type->ref->c)
6080 tcc_error("index too large");
6081 type = pointed_type(type);
6082 c += index * type_size(type, &align);
6083 } else {
6084 f = *cur_field;
6085 if (!f)
6086 tcc_error("too many field init");
6087 /* XXX: fix this mess by using explicit storage field */
6088 type1 = f->type;
6089 type1.t |= (type->t & ~VT_TYPE);
6090 type = &type1;
6091 c += f->c;
6094 decl_initializer(type, sec, c, 0, size_only);
6096 /* XXX: make it more general */
6097 if (!size_only && nb_elems > 1) {
6098 unsigned long c_end;
6099 uint8_t *src, *dst;
6100 int i;
6102 if (!sec) {
6103 vset(type, VT_LOCAL|VT_LVAL, c);
6104 for (i = 1; i < nb_elems; i++) {
6105 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6106 vswap();
6107 vstore();
6109 vpop();
6110 } else {
6111 c_end = c + nb_elems * elem_size;
6112 if (c_end > sec->data_allocated)
6113 section_realloc(sec, c_end);
6114 src = sec->data + c;
6115 dst = src;
6116 for(i = 1; i < nb_elems; i++) {
6117 dst += elem_size;
6118 memcpy(dst, src, elem_size);
6124 /* store a value or an expression directly in global data or in local array */
6125 static void init_putv(CType *type, Section *sec, unsigned long c)
6127 int bt, bit_pos, bit_size;
6128 void *ptr;
6129 unsigned long long bit_mask;
6130 CType dtype;
6132 dtype = *type;
6133 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6135 if (sec) {
6136 int size, align;
6137 /* XXX: not portable */
6138 /* XXX: generate error if incorrect relocation */
6139 gen_assign_cast(&dtype);
6140 bt = type->t & VT_BTYPE;
6141 size = type_size(type, &align);
6142 if (c + size > sec->data_allocated) {
6143 section_realloc(sec, c + size);
6145 ptr = sec->data + c;
6146 /* XXX: make code faster ? */
6147 if (!(type->t & VT_BITFIELD)) {
6148 bit_pos = 0;
6149 bit_size = PTR_SIZE * 8;
6150 bit_mask = -1LL;
6151 } else {
6152 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6153 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6154 bit_mask = (1LL << bit_size) - 1;
6156 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6157 vtop->sym->v >= SYM_FIRST_ANOM &&
6158 /* XXX This rejects compount literals like
6159 '(void *){ptr}'. The problem is that '&sym' is
6160 represented the same way, which would be ruled out
6161 by the SYM_FIRST_ANOM check above, but also '"string"'
6162 in 'char *p = "string"' is represented the same
6163 with the type being VT_PTR and the symbol being an
6164 anonymous one. That is, there's no difference in vtop
6165 between '(void *){x}' and '&(void *){x}'. Ignore
6166 pointer typed entities here. Hopefully no real code
6167 will every use compound literals with scalar type. */
6168 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6169 /* These come from compound literals, memcpy stuff over. */
6170 Section *ssec;
6171 ElfW(Sym) *esym;
6172 ElfW_Rel *rel;
6173 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6174 ssec = tcc_state->sections[esym->st_shndx];
6175 memmove (ptr, ssec->data + esym->st_value, size);
6176 if (ssec->reloc) {
6177 /* We need to copy over all memory contents, and that
6178 includes relocations. Use the fact that relocs are
6179 created it order, so look from the end of relocs
6180 until we hit one before the copied region. */
6181 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6182 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6183 while (num_relocs--) {
6184 rel--;
6185 if (rel->r_offset >= esym->st_value + size)
6186 continue;
6187 if (rel->r_offset < esym->st_value)
6188 break;
6189 /* Note: if the same fields are initialized multiple
6190 times (possible with designators) then we possibly
6191 add multiple relocations for the same offset here.
6192 That would lead to wrong code, the last reloc needs
6193 to win. We clean this up later after the whole
6194 initializer is parsed. */
6195 put_elf_reloca(symtab_section, sec,
6196 c + rel->r_offset - esym->st_value,
6197 ELFW(R_TYPE)(rel->r_info),
6198 ELFW(R_SYM)(rel->r_info),
6199 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6200 rel->r_addend
6201 #else
6203 #endif
6207 } else {
6208 if ((vtop->r & VT_SYM) &&
6209 (bt == VT_BYTE ||
6210 bt == VT_SHORT ||
6211 bt == VT_DOUBLE ||
6212 bt == VT_LDOUBLE ||
6213 #if PTR_SIZE == 8
6214 (bt == VT_LLONG && bit_size != 64) ||
6215 bt == VT_INT
6216 #else
6217 bt == VT_LLONG ||
6218 (bt == VT_INT && bit_size != 32)
6219 #endif
6221 tcc_error("initializer element is not computable at load time");
6222 switch(bt) {
6223 /* XXX: when cross-compiling we assume that each type has the
6224 same representation on host and target, which is likely to
6225 be wrong in the case of long double */
6226 case VT_BOOL:
6227 vtop->c.i = (vtop->c.i != 0);
6228 case VT_BYTE:
6229 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6230 break;
6231 case VT_SHORT:
6232 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6233 break;
6234 case VT_DOUBLE:
6235 *(double *)ptr = vtop->c.d;
6236 break;
6237 case VT_LDOUBLE:
6238 if (sizeof(long double) == LDOUBLE_SIZE)
6239 *(long double *)ptr = vtop->c.ld;
6240 else if (sizeof(double) == LDOUBLE_SIZE)
6241 *(double *)ptr = vtop->c.ld;
6242 else
6243 tcc_error("can't cross compile long double constants");
6244 break;
6245 #if PTR_SIZE != 8
6246 case VT_LLONG:
6247 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6248 break;
6249 #else
6250 case VT_LLONG:
6251 #endif
6252 case VT_PTR:
6254 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6255 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6256 if (vtop->r & VT_SYM)
6257 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6258 else
6259 *(addr_t *)ptr |= val;
6260 #else
6261 if (vtop->r & VT_SYM)
6262 greloc(sec, vtop->sym, c, R_DATA_PTR);
6263 *(addr_t *)ptr |= val;
6264 #endif
6265 break;
6267 default:
6269 int val = (vtop->c.i & bit_mask) << bit_pos;
6270 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6271 if (vtop->r & VT_SYM)
6272 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6273 else
6274 *(int *)ptr |= val;
6275 #else
6276 if (vtop->r & VT_SYM)
6277 greloc(sec, vtop->sym, c, R_DATA_PTR);
6278 *(int *)ptr |= val;
6279 #endif
6280 break;
6284 vtop--;
6285 } else {
6286 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6287 vswap();
6288 vstore();
6289 vpop();
6293 /* put zeros for variable based init */
6294 static void init_putz(Section *sec, unsigned long c, int size)
6296 if (sec) {
6297 /* nothing to do because globals are already set to zero */
6298 } else {
6299 vpush_global_sym(&func_old_type, TOK_memset);
6300 vseti(VT_LOCAL, c);
6301 #ifdef TCC_TARGET_ARM
6302 vpushs(size);
6303 vpushi(0);
6304 #else
6305 vpushi(0);
6306 vpushs(size);
6307 #endif
6308 gfunc_call(3);
6312 /* 't' contains the type and storage info. 'c' is the offset of the
6313 object in section 'sec'. If 'sec' is NULL, it means stack based
6314 allocation. 'first' is true if array '{' must be read (multi
6315 dimension implicit array init handling). 'size_only' is true if
6316 size only evaluation is wanted (only for arrays). */
6317 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6318 int first, int size_only)
6320 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6321 int size1, align1;
6322 int have_elem;
6323 Sym *s, *f;
6324 Sym indexsym;
6325 CType *t1;
6327 /* If we currently are at an '}' or ',' we have read an initializer
6328 element in one of our callers, and not yet consumed it. */
6329 have_elem = tok == '}' || tok == ',';
6330 if (!have_elem && tok != '{' &&
6331 /* In case of strings we have special handling for arrays, so
6332 don't consume them as initializer value (which would commit them
6333 to some anonymous symbol). */
6334 tok != TOK_LSTR && tok != TOK_STR &&
6335 !size_only) {
6336 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6337 have_elem = 1;
6340 if (have_elem &&
6341 !(type->t & VT_ARRAY) &&
6342 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6343 The source type might have VT_CONSTANT set, which is
6344 of course assignable to non-const elements. */
6345 is_compatible_parameter_types(type, &vtop->type)) {
6346 init_putv(type, sec, c);
6347 } else if (type->t & VT_ARRAY) {
6348 s = type->ref;
6349 n = s->c;
6350 array_length = 0;
6351 t1 = pointed_type(type);
6352 size1 = type_size(t1, &align1);
6354 no_oblock = 1;
6355 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6356 tok == '{') {
6357 if (tok != '{')
6358 tcc_error("character array initializer must be a literal,"
6359 " optionally enclosed in braces");
6360 skip('{');
6361 no_oblock = 0;
6364 /* only parse strings here if correct type (otherwise: handle
6365 them as ((w)char *) expressions */
6366 if ((tok == TOK_LSTR &&
6367 #ifdef TCC_TARGET_PE
6368 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6369 #else
6370 (t1->t & VT_BTYPE) == VT_INT
6371 #endif
6372 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6373 while (tok == TOK_STR || tok == TOK_LSTR) {
6374 int cstr_len, ch;
6376 /* compute maximum number of chars wanted */
6377 if (tok == TOK_STR)
6378 cstr_len = tokc.str.size;
6379 else
6380 cstr_len = tokc.str.size / sizeof(nwchar_t);
6381 cstr_len--;
6382 nb = cstr_len;
6383 if (n >= 0 && nb > (n - array_length))
6384 nb = n - array_length;
6385 if (!size_only) {
6386 if (cstr_len > nb)
6387 tcc_warning("initializer-string for array is too long");
6388 /* in order to go faster for common case (char
6389 string in global variable, we handle it
6390 specifically */
6391 if (sec && tok == TOK_STR && size1 == 1) {
6392 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6393 } else {
6394 for(i=0;i<nb;i++) {
6395 if (tok == TOK_STR)
6396 ch = ((unsigned char *)tokc.str.data)[i];
6397 else
6398 ch = ((nwchar_t *)tokc.str.data)[i];
6399 vpushi(ch);
6400 init_putv(t1, sec, c + (array_length + i) * size1);
6404 array_length += nb;
6405 next();
6407 /* only add trailing zero if enough storage (no
6408 warning in this case since it is standard) */
6409 if (n < 0 || array_length < n) {
6410 if (!size_only) {
6411 vpushi(0);
6412 init_putv(t1, sec, c + (array_length * size1));
6414 array_length++;
6416 } else {
6417 indexsym.c = 0;
6418 indexsym.r = 0;
6419 f = &indexsym;
6421 do_init_list:
6422 while (tok != '}' || have_elem) {
6423 decl_designator(type, sec, c, &f, size_only);
6424 have_elem = 0;
6425 index = f->c;
6426 /* must put zero in holes (note that doing it that way
6427 ensures that it even works with designators) */
6428 if (!size_only && array_length < index) {
6429 init_putz(sec, c + array_length * size1,
6430 (index - array_length) * size1);
6432 if (type->t & VT_ARRAY) {
6433 index = indexsym.c = ++indexsym.r;
6434 } else {
6435 index = index + type_size(&f->type, &align1);
6436 if (s->type.t == TOK_UNION)
6437 f = NULL;
6438 else
6439 f = f->next;
6441 if (index > array_length)
6442 array_length = index;
6444 if (type->t & VT_ARRAY) {
6445 /* special test for multi dimensional arrays (may not
6446 be strictly correct if designators are used at the
6447 same time) */
6448 if (no_oblock && index >= n)
6449 break;
6450 } else {
6451 if (no_oblock && f == NULL)
6452 break;
6454 if (tok == '}')
6455 break;
6456 skip(',');
6459 /* put zeros at the end */
6460 if (!size_only && array_length < n) {
6461 init_putz(sec, c + array_length * size1,
6462 (n - array_length) * size1);
6464 if (!no_oblock)
6465 skip('}');
6466 /* patch type size if needed, which happens only for array types */
6467 if (n < 0)
6468 s->c = array_length;
6469 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6470 size1 = 1;
6471 no_oblock = 1;
6472 if (first || tok == '{') {
6473 skip('{');
6474 no_oblock = 0;
6476 s = type->ref;
6477 f = s->next;
6478 array_length = 0;
6479 n = s->c;
6480 goto do_init_list;
6481 } else if (tok == '{') {
6482 next();
6483 decl_initializer(type, sec, c, first, size_only);
6484 skip('}');
6485 } else if (size_only) {
6486 /* If we supported only ISO C we wouldn't have to accept calling
6487 this on anything than an array size_only==1 (and even then
6488 only on the outermost level, so no recursion would be needed),
6489 because initializing a flex array member isn't supported.
6490 But GNU C supports it, so we need to recurse even into
6491 subfields of structs and arrays when size_only is set. */
6492 /* just skip expression */
6493 parlevel = parlevel1 = 0;
6494 while ((parlevel > 0 || parlevel1 > 0 ||
6495 (tok != '}' && tok != ',')) && tok != -1) {
6496 if (tok == '(')
6497 parlevel++;
6498 else if (tok == ')') {
6499 if (parlevel == 0 && parlevel1 == 0)
6500 break;
6501 parlevel--;
6503 else if (tok == '{')
6504 parlevel1++;
6505 else if (tok == '}') {
6506 if (parlevel == 0 && parlevel1 == 0)
6507 break;
6508 parlevel1--;
6510 next();
6512 } else {
6513 if (!have_elem) {
6514 /* This should happen only when we haven't parsed
6515 the init element above for fear of committing a
6516 string constant to memory too early. */
6517 if (tok != TOK_STR && tok != TOK_LSTR)
6518 expect("string constant");
6519 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6521 init_putv(type, sec, c);
6525 /* parse an initializer for type 't' if 'has_init' is non zero, and
6526 allocate space in local or global data space ('r' is either
6527 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6528 variable 'v' of scope 'scope' is declared before initializers
6529 are parsed. If 'v' is zero, then a reference to the new object
6530 is put in the value stack. If 'has_init' is 2, a special parsing
6531 is done to handle string constants. */
6532 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6533 int has_init, int v, int scope)
6535 int size, align, addr, data_offset;
6536 int level;
6537 ParseState saved_parse_state = {0};
6538 TokenString *init_str = NULL;
6539 Section *sec;
6540 Sym *flexible_array;
6542 flexible_array = NULL;
6543 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6544 Sym *field = type->ref->next;
6545 if (field) {
6546 while (field->next)
6547 field = field->next;
6548 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6549 flexible_array = field;
6553 size = type_size(type, &align);
6554 /* If unknown size, we must evaluate it before
6555 evaluating initializers because
6556 initializers can generate global data too
6557 (e.g. string pointers or ISOC99 compound
6558 literals). It also simplifies local
6559 initializers handling */
6560 if (size < 0 || (flexible_array && has_init)) {
6561 if (!has_init)
6562 tcc_error("unknown type size");
6563 /* get all init string */
6564 init_str = tok_str_alloc();
6565 if (has_init == 2) {
6566 /* only get strings */
6567 while (tok == TOK_STR || tok == TOK_LSTR) {
6568 tok_str_add_tok(init_str);
6569 next();
6571 } else {
6572 level = 0;
6573 while (level > 0 || (tok != ',' && tok != ';')) {
6574 if (tok < 0)
6575 tcc_error("unexpected end of file in initializer");
6576 tok_str_add_tok(init_str);
6577 if (tok == '{')
6578 level++;
6579 else if (tok == '}') {
6580 level--;
6581 if (level <= 0) {
6582 next();
6583 break;
6586 next();
6589 tok_str_add(init_str, -1);
6590 tok_str_add(init_str, 0);
6592 /* compute size */
6593 save_parse_state(&saved_parse_state);
6595 begin_macro(init_str, 1);
6596 next();
6597 decl_initializer(type, NULL, 0, 1, 1);
6598 /* prepare second initializer parsing */
6599 macro_ptr = init_str->str;
6600 next();
6602 /* if still unknown size, error */
6603 size = type_size(type, &align);
6604 if (size < 0)
6605 tcc_error("unknown type size");
6607 /* If there's a flex member and it was used in the initializer
6608 adjust size. */
6609 if (flexible_array &&
6610 flexible_array->type.ref->c > 0)
6611 size += flexible_array->type.ref->c
6612 * pointed_size(&flexible_array->type);
6613 /* take into account specified alignment if bigger */
6614 if (ad->a.aligned) {
6615 int speca = 1 << (ad->a.aligned - 1);
6616 if (speca > align)
6617 align = speca;
6618 } else if (ad->a.packed) {
6619 align = 1;
6621 if ((r & VT_VALMASK) == VT_LOCAL) {
6622 sec = NULL;
6623 #ifdef CONFIG_TCC_BCHECK
6624 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6625 loc--;
6627 #endif
6628 loc = (loc - size) & -align;
6629 addr = loc;
6630 #ifdef CONFIG_TCC_BCHECK
6631 /* handles bounds */
6632 /* XXX: currently, since we do only one pass, we cannot track
6633 '&' operators, so we add only arrays */
6634 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6635 addr_t *bounds_ptr;
6636 /* add padding between regions */
6637 loc--;
6638 /* then add local bound info */
6639 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6640 bounds_ptr[0] = addr;
6641 bounds_ptr[1] = size;
6643 #endif
6644 if (v) {
6645 /* local variable */
6646 #ifdef CONFIG_TCC_ASM
6647 if (ad->asm_label) {
6648 int reg = asm_parse_regvar(ad->asm_label);
6649 if (reg >= 0)
6650 r = (r & ~VT_VALMASK) | reg;
6652 #endif
6653 sym_push(v, type, r, addr);
6654 } else {
6655 /* push local reference */
6656 vset(type, r, addr);
6658 } else {
6659 Sym *sym;
6661 sym = NULL;
6662 if (v && scope == VT_CONST) {
6663 /* see if the symbol was already defined */
6664 sym = sym_find(v);
6665 if (sym) {
6666 if (!is_compatible_types(&sym->type, type))
6667 tcc_error("incompatible types for redefinition of '%s'",
6668 get_tok_str(v, NULL));
6669 if (sym->type.t & VT_EXTERN) {
6670 /* if the variable is extern, it was not allocated */
6671 sym->type.t &= ~VT_EXTERN;
6672 /* set array size if it was omitted in extern
6673 declaration */
6674 if ((sym->type.t & VT_ARRAY) &&
6675 sym->type.ref->c < 0 &&
6676 type->ref->c >= 0)
6677 sym->type.ref->c = type->ref->c;
6678 } else {
6679 /* we accept several definitions of the same
6680 global variable. this is tricky, because we
6681 must play with the SHN_COMMON type of the symbol */
6682 /* XXX: should check if the variable was already
6683 initialized. It is incorrect to initialized it
6684 twice */
6685 /* no init data, we won't add more to the symbol */
6686 if (!has_init)
6687 goto no_alloc;
6692 /* allocate symbol in corresponding section */
6693 sec = ad->section;
6694 if (!sec) {
6695 if (has_init)
6696 sec = data_section;
6697 else if (tcc_state->nocommon)
6698 sec = bss_section;
6700 if (sec) {
6701 data_offset = sec->data_offset;
6702 data_offset = (data_offset + align - 1) & -align;
6703 addr = data_offset;
6704 /* very important to increment global pointer at this time
6705 because initializers themselves can create new initializers */
6706 data_offset += size;
6707 #ifdef CONFIG_TCC_BCHECK
6708 /* add padding if bound check */
6709 if (tcc_state->do_bounds_check)
6710 data_offset++;
6711 #endif
6712 sec->data_offset = data_offset;
6713 /* allocate section space to put the data */
6714 if (sec->sh_type != SHT_NOBITS &&
6715 data_offset > sec->data_allocated)
6716 section_realloc(sec, data_offset);
6717 /* align section if needed */
6718 if (align > sec->sh_addralign)
6719 sec->sh_addralign = align;
6720 } else {
6721 addr = 0; /* avoid warning */
6724 if (v) {
6725 if (scope != VT_CONST || !sym) {
6726 sym = sym_push(v, type, r | VT_SYM, 0);
6727 sym->asm_label = ad->asm_label;
6729 /* update symbol definition */
6730 if (sec) {
6731 put_extern_sym(sym, sec, addr, size);
6732 } else {
6733 ElfW(Sym) *esym;
6734 /* put a common area */
6735 put_extern_sym(sym, NULL, align, size);
6736 /* XXX: find a nicer way */
6737 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6738 esym->st_shndx = SHN_COMMON;
6740 } else {
6741 /* push global reference */
6742 sym = get_sym_ref(type, sec, addr, size);
6743 vpushsym(type, sym);
6745 /* patch symbol weakness */
6746 if (type->t & VT_WEAK)
6747 weaken_symbol(sym);
6748 apply_visibility(sym, type);
6749 #ifdef CONFIG_TCC_BCHECK
6750 /* handles bounds now because the symbol must be defined
6751 before for the relocation */
6752 if (tcc_state->do_bounds_check) {
6753 addr_t *bounds_ptr;
6755 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6756 /* then add global bound info */
6757 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6758 bounds_ptr[0] = 0; /* relocated */
6759 bounds_ptr[1] = size;
6761 #endif
6763 if (type->t & VT_VLA) {
6764 int a;
6766 /* save current stack pointer */
6767 if (vlas_in_scope == 0) {
6768 if (vla_sp_root_loc == -1)
6769 vla_sp_root_loc = (loc -= PTR_SIZE);
6770 gen_vla_sp_save(vla_sp_root_loc);
6773 vla_runtime_type_size(type, &a);
6774 gen_vla_alloc(type, a);
6775 gen_vla_sp_save(addr);
6776 vla_sp_loc = addr;
6777 vlas_in_scope++;
6778 } else if (has_init) {
6779 size_t oldreloc_offset = 0;
6780 if (sec && sec->reloc)
6781 oldreloc_offset = sec->reloc->data_offset;
6782 decl_initializer(type, sec, addr, 1, 0);
6783 if (sec && sec->reloc)
6784 squeeze_multi_relocs(sec, oldreloc_offset);
6785 /* patch flexible array member size back to -1, */
6786 /* for possible subsequent similar declarations */
6787 if (flexible_array)
6788 flexible_array->type.ref->c = -1;
6790 no_alloc: ;
6791 /* restore parse state if needed */
6792 if (init_str) {
6793 end_macro();
6794 restore_parse_state(&saved_parse_state);
6798 /* parse an old style function declaration list */
6799 /* XXX: check multiple parameter */
6800 static void func_decl_list(Sym *func_sym)
6802 AttributeDef ad;
6803 int v;
6804 Sym *s;
6805 CType btype, type;
6807 /* parse each declaration */
6808 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6809 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6810 if (!parse_btype(&btype, &ad))
6811 expect("declaration list");
6812 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6813 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6814 tok == ';') {
6815 /* we accept no variable after */
6816 } else {
6817 for(;;) {
6818 type = btype;
6819 type_decl(&type, &ad, &v, TYPE_DIRECT);
6820 /* find parameter in function parameter list */
6821 s = func_sym->next;
6822 while (s != NULL) {
6823 if ((s->v & ~SYM_FIELD) == v)
6824 goto found;
6825 s = s->next;
6827 tcc_error("declaration for parameter '%s' but no such parameter",
6828 get_tok_str(v, NULL));
6829 found:
6830 /* check that no storage specifier except 'register' was given */
6831 if (type.t & VT_STORAGE)
6832 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6833 convert_parameter_type(&type);
6834 /* we can add the type (NOTE: it could be local to the function) */
6835 s->type = type;
6836 /* accept other parameters */
6837 if (tok == ',')
6838 next();
6839 else
6840 break;
6843 skip(';');
6847 /* parse a function defined by symbol 'sym' and generate its code in
6848 'cur_text_section' */
6849 static void gen_function(Sym *sym)
6851 nocode_wanted = 0;
6852 ind = cur_text_section->data_offset;
6853 /* NOTE: we patch the symbol size later */
6854 put_extern_sym(sym, cur_text_section, ind, 0);
6855 funcname = get_tok_str(sym->v, NULL);
6856 func_ind = ind;
6857 /* Initialize VLA state */
6858 vla_sp_loc = -1;
6859 vla_sp_root_loc = -1;
6860 /* put debug symbol */
6861 tcc_debug_funcstart(tcc_state, sym);
6862 /* push a dummy symbol to enable local sym storage */
6863 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6864 local_scope = 1; /* for function parameters */
6865 gfunc_prolog(&sym->type);
6866 local_scope = 0;
6867 rsym = 0;
6868 block(NULL, NULL, 0);
6869 nocode_wanted = 0;
6870 gsym(rsym);
6871 gfunc_epilog();
6872 cur_text_section->data_offset = ind;
6873 label_pop(&global_label_stack, NULL);
6874 /* reset local stack */
6875 local_scope = 0;
6876 sym_pop(&local_stack, NULL, 0);
6877 /* end of function */
6878 /* patch symbol size */
6879 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6880 ind - func_ind;
6881 /* patch symbol weakness (this definition overrules any prototype) */
6882 if (sym->type.t & VT_WEAK)
6883 weaken_symbol(sym);
6884 apply_visibility(sym, &sym->type);
6885 tcc_debug_funcend(tcc_state, ind - func_ind);
6886 /* It's better to crash than to generate wrong code */
6887 cur_text_section = NULL;
6888 funcname = ""; /* for safety */
6889 func_vt.t = VT_VOID; /* for safety */
6890 func_var = 0; /* for safety */
6891 ind = 0; /* for safety */
6892 nocode_wanted = 1;
6893 check_vstack();
6896 static void gen_inline_functions(TCCState *s)
6898 Sym *sym;
6899 int inline_generated, i, ln;
6900 struct InlineFunc *fn;
6902 ln = file->line_num;
6903 /* iterate while inline function are referenced */
6904 for(;;) {
6905 inline_generated = 0;
6906 for (i = 0; i < s->nb_inline_fns; ++i) {
6907 fn = s->inline_fns[i];
6908 sym = fn->sym;
6909 if (sym && sym->c) {
6910 /* the function was used: generate its code and
6911 convert it to a normal function */
6912 fn->sym = NULL;
6913 if (file)
6914 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6915 sym->type.t &= ~VT_INLINE;
6917 begin_macro(fn->func_str, 1);
6918 next();
6919 cur_text_section = text_section;
6920 gen_function(sym);
6921 end_macro();
6923 inline_generated = 1;
6926 if (!inline_generated)
6927 break;
6929 file->line_num = ln;
6932 ST_FUNC void free_inline_functions(TCCState *s)
6934 int i;
6935 /* free tokens of unused inline functions */
6936 for (i = 0; i < s->nb_inline_fns; ++i) {
6937 struct InlineFunc *fn = s->inline_fns[i];
6938 if (fn->sym)
6939 tok_str_free(fn->func_str);
6941 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6944 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6945 static int decl0(int l, int is_for_loop_init)
6947 int v, has_init, r;
6948 CType type, btype;
6949 Sym *sym;
6950 AttributeDef ad;
6952 while (1) {
6953 if (!parse_btype(&btype, &ad)) {
6954 if (is_for_loop_init)
6955 return 0;
6956 /* skip redundant ';' */
6957 /* XXX: find more elegant solution */
6958 if (tok == ';') {
6959 next();
6960 continue;
6962 if (l == VT_CONST &&
6963 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6964 /* global asm block */
6965 asm_global_instr();
6966 continue;
6968 /* special test for old K&R protos without explicit int
6969 type. Only accepted when defining global data */
6970 if (l == VT_LOCAL || tok < TOK_UIDENT)
6971 break;
6972 btype.t = VT_INT;
6974 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6975 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6976 tok == ';') {
6977 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6978 int v = btype.ref->v;
6979 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6980 tcc_warning("unnamed struct/union that defines no instances");
6982 next();
6983 continue;
6985 while (1) { /* iterate thru each declaration */
6986 type = btype;
6987 /* If the base type itself was an array type of unspecified
6988 size (like in 'typedef int arr[]; arr x = {1};') then
6989 we will overwrite the unknown size by the real one for
6990 this decl. We need to unshare the ref symbol holding
6991 that size. */
6992 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6993 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6995 type_decl(&type, &ad, &v, TYPE_DIRECT);
6996 #if 0
6998 char buf[500];
6999 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
7000 printf("type = '%s'\n", buf);
7002 #endif
7003 if ((type.t & VT_BTYPE) == VT_FUNC) {
7004 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7005 tcc_error("function without file scope cannot be static");
7007 /* if old style function prototype, we accept a
7008 declaration list */
7009 sym = type.ref;
7010 if (sym->c == FUNC_OLD)
7011 func_decl_list(sym);
7014 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7015 ad.asm_label = asm_label_instr();
7016 /* parse one last attribute list, after asm label */
7017 parse_attribute(&ad);
7018 if (tok == '{')
7019 expect(";");
7022 if (ad.a.weak)
7023 type.t |= VT_WEAK;
7024 #ifdef TCC_TARGET_PE
7025 if (ad.a.func_import)
7026 type.t |= VT_IMPORT;
7027 if (ad.a.func_export)
7028 type.t |= VT_EXPORT;
7029 #endif
7030 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7032 if (tok == '{') {
7033 if (l == VT_LOCAL)
7034 tcc_error("cannot use local functions");
7035 if ((type.t & VT_BTYPE) != VT_FUNC)
7036 expect("function definition");
7038 /* reject abstract declarators in function definition */
7039 sym = type.ref;
7040 while ((sym = sym->next) != NULL)
7041 if (!(sym->v & ~SYM_FIELD))
7042 expect("identifier");
7044 /* XXX: cannot do better now: convert extern line to static inline */
7045 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7046 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7048 sym = sym_find(v);
7049 if (sym) {
7050 Sym *ref;
7051 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7052 goto func_error1;
7054 ref = sym->type.ref;
7056 /* use func_call from prototype if not defined */
7057 if (ref->a.func_call != FUNC_CDECL
7058 && type.ref->a.func_call == FUNC_CDECL)
7059 type.ref->a.func_call = ref->a.func_call;
7061 /* use export from prototype */
7062 if (ref->a.func_export)
7063 type.ref->a.func_export = 1;
7065 /* use static from prototype */
7066 if (sym->type.t & VT_STATIC)
7067 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7069 /* If the definition has no visibility use the
7070 one from prototype. */
7071 if (! (type.t & VT_VIS_MASK))
7072 type.t |= sym->type.t & VT_VIS_MASK;
7074 if (!is_compatible_types(&sym->type, &type)) {
7075 func_error1:
7076 tcc_error("incompatible types for redefinition of '%s'",
7077 get_tok_str(v, NULL));
7079 if (ref->a.func_body)
7080 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7081 /* if symbol is already defined, then put complete type */
7082 sym->type = type;
7084 } else {
7085 /* put function symbol */
7086 sym = global_identifier_push(v, type.t, 0);
7087 sym->type.ref = type.ref;
7090 sym->type.ref->a.func_body = 1;
7091 sym->r = VT_SYM | VT_CONST;
7093 /* static inline functions are just recorded as a kind
7094 of macro. Their code will be emitted at the end of
7095 the compilation unit only if they are used */
7096 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7097 (VT_INLINE | VT_STATIC)) {
7098 int block_level;
7099 struct InlineFunc *fn;
7100 const char *filename;
7102 filename = file ? file->filename : "";
7103 fn = tcc_malloc(sizeof *fn + strlen(filename));
7104 strcpy(fn->filename, filename);
7105 fn->sym = sym;
7106 fn->func_str = tok_str_alloc();
7108 block_level = 0;
7109 for(;;) {
7110 int t;
7111 if (tok == TOK_EOF)
7112 tcc_error("unexpected end of file");
7113 tok_str_add_tok(fn->func_str);
7114 t = tok;
7115 next();
7116 if (t == '{') {
7117 block_level++;
7118 } else if (t == '}') {
7119 block_level--;
7120 if (block_level == 0)
7121 break;
7124 tok_str_add(fn->func_str, -1);
7125 tok_str_add(fn->func_str, 0);
7126 dynarray_add(&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7128 } else {
7129 /* compute text section */
7130 cur_text_section = ad.section;
7131 if (!cur_text_section)
7132 cur_text_section = text_section;
7133 gen_function(sym);
7135 break;
7136 } else {
7137 if (btype.t & VT_TYPEDEF) {
7138 /* save typedefed type */
7139 /* XXX: test storage specifiers ? */
7140 sym = sym_find(v);
7141 if (sym && sym->scope == local_scope) {
7142 if (!is_compatible_types(&sym->type, &type)
7143 || !(sym->type.t & VT_TYPEDEF))
7144 tcc_error("incompatible redefinition of '%s'",
7145 get_tok_str(v, NULL));
7146 sym->type = type;
7147 } else {
7148 sym = sym_push(v, &type, 0, 0);
7150 sym->a = ad.a;
7151 sym->type.t |= VT_TYPEDEF;
7152 } else {
7153 r = 0;
7154 if ((type.t & VT_BTYPE) == VT_FUNC) {
7155 /* external function definition */
7156 /* specific case for func_call attribute */
7157 type.ref->a = ad.a;
7158 } else if (!(type.t & VT_ARRAY)) {
7159 /* not lvalue if array */
7160 r |= lvalue_type(type.t);
7162 has_init = (tok == '=');
7163 if (has_init && (type.t & VT_VLA))
7164 tcc_error("variable length array cannot be initialized");
7165 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7166 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7167 !has_init && l == VT_CONST && type.ref->c < 0)) {
7168 /* external variable or function */
7169 /* NOTE: as GCC, uninitialized global static
7170 arrays of null size are considered as
7171 extern */
7172 sym = external_sym(v, &type, r);
7173 sym->asm_label = ad.asm_label;
7175 if (ad.alias_target) {
7176 Section tsec;
7177 ElfW(Sym) *esym;
7178 Sym *alias_target;
7180 alias_target = sym_find(ad.alias_target);
7181 if (!alias_target || !alias_target->c)
7182 tcc_error("unsupported forward __alias__ attribute");
7183 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7184 tsec.sh_num = esym->st_shndx;
7185 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7187 } else {
7188 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7189 if (type.t & VT_STATIC)
7190 r |= VT_CONST;
7191 else
7192 r |= l;
7193 if (has_init)
7194 next();
7195 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7198 if (tok != ',') {
7199 if (is_for_loop_init)
7200 return 1;
7201 skip(';');
7202 break;
7204 next();
7206 ad.a.aligned = 0;
7209 return 0;
7212 ST_FUNC void decl(int l)
7214 decl0(l, 0);
7217 /* ------------------------------------------------------------------------- */