Improve skip_or_save_block
[tinycc.git] / tccgen.c
blobf875e51abe8cb5fd6b3597afc274e8a4ded5c21b
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void init_putv(CType *type, Section *sec, unsigned long c);
80 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
81 static void block(int *bsym, int *csym, int is_expr);
82 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
83 static int decl0(int l, int is_for_loop_init, Sym *);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType *type, int *a);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType *type1, CType *type2);
89 static inline int64_t expr_const64(void);
90 ST_FUNC void vpush64(int ty, unsigned long long v);
91 ST_FUNC void vpush(CType *type);
92 ST_FUNC int gvtst(int inv, int t);
93 ST_FUNC int is_btype_size(int bt);
94 static void gen_inline_functions(TCCState *s);
96 ST_INLN int is_float(int t)
98 int bt;
99 bt = t & VT_BTYPE;
100 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC int ieee_finite(double d)
108 int p[4];
109 memcpy(p, &d, sizeof(double));
110 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC void test_lvalue(void)
115 if (!(vtop->r & VT_LVAL))
116 expect("lvalue");
119 ST_FUNC void check_vstack(void)
121 if (pvtop != vtop)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
128 #if 0
129 void pv (const char *lbl, int a, int b)
131 int i;
132 for (i = a; i < a + b; ++i) {
133 SValue *p = &vtop[-i];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
138 #endif
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC void tcc_debug_start(TCCState *s1)
144 if (s1->do_debug) {
145 char buf[512];
147 /* file info: full path + filename */
148 section_sym = put_elf_sym(symtab_section, 0, 0,
149 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
150 text_section->sh_num, NULL);
151 getcwd(buf, sizeof(buf));
152 #ifdef _WIN32
153 normalize_slashes(buf);
154 #endif
155 pstrcat(buf, sizeof(buf), "/");
156 put_stabs_r(buf, N_SO, 0, 0,
157 text_section->data_offset, text_section, section_sym);
158 put_stabs_r(file->filename, N_SO, 0, 0,
159 text_section->data_offset, text_section, section_sym);
160 last_ind = 0;
161 last_line_num = 0;
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section, 0, 0,
167 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
168 SHN_ABS, file->filename);
171 /* put end of translation unit info */
172 ST_FUNC void tcc_debug_end(TCCState *s1)
174 if (!s1->do_debug)
175 return;
176 put_stabs_r(NULL, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
181 /* generate line number info */
182 ST_FUNC void tcc_debug_line(TCCState *s1)
184 if (!s1->do_debug)
185 return;
186 if ((last_line_num != file->line_num || last_ind != ind)) {
187 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
188 last_ind = ind;
189 last_line_num = file->line_num;
193 /* put function symbol */
194 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
196 char buf[512];
198 if (!s1->do_debug)
199 return;
201 /* stabs info */
202 /* XXX: we put here a dummy type */
203 snprintf(buf, sizeof(buf), "%s:%c1",
204 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
205 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
206 cur_text_section, sym->c);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE, 0, file->line_num, 0);
210 last_ind = 0;
211 last_line_num = 0;
214 /* put function size */
215 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
217 if (!s1->do_debug)
218 return;
219 put_stabn(N_FUN, 0, 0, size);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC void tccgen_start(TCCState *s1)
225 cur_text_section = NULL;
226 funcname = "";
227 anon_sym = SYM_FIRST_ANOM;
228 section_sym = 0;
229 const_wanted = 0;
230 nocode_wanted = 1;
232 /* define some often used types */
233 int_type.t = VT_INT;
234 char_pointer_type.t = VT_BYTE;
235 mk_pointer(&char_pointer_type);
236 #if PTR_SIZE == 4
237 size_type.t = VT_INT;
238 #else
239 size_type.t = VT_LLONG;
240 #endif
241 func_old_type.t = VT_FUNC;
242 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
244 tcc_debug_start(s1);
246 #ifdef TCC_TARGET_ARM
247 arm_init(s1);
248 #endif
251 ST_FUNC void tccgen_end(TCCState *s1)
253 gen_inline_functions(s1);
254 check_vstack();
255 /* end of translation unit info */
256 tcc_debug_end(s1);
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attributes to Elf symbol */
262 static void update_storage(Sym *sym)
264 int t;
265 ElfW(Sym) *esym;
267 if (0 == sym->c)
268 return;
270 t = sym->type.t;
271 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
273 if (t & VT_VIS_MASK)
274 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
275 | ((t & VT_VIS_MASK) >> VT_VIS_SHIFT);
277 if (t & VT_WEAK)
278 esym->st_info = ELFW(ST_INFO)(STB_WEAK, ELFW(ST_TYPE)(esym->st_info));
280 #ifdef TCC_TARGET_PE
281 if (t & VT_EXPORT)
282 esym->st_other |= ST_PE_EXPORT;
283 #endif
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
291 addr_t value, unsigned long size,
292 int can_add_underscore)
294 int sym_type, sym_bind, sh_num, info, other, t;
295 ElfW(Sym) *esym;
296 const char *name;
297 char buf1[256];
298 #ifdef CONFIG_TCC_BCHECK
299 char buf[32];
300 #endif
302 if (section == NULL)
303 sh_num = SHN_UNDEF;
304 else if (section == SECTION_ABS)
305 sh_num = SHN_ABS;
306 else
307 sh_num = section->sh_num;
309 if (!sym->c) {
310 name = get_tok_str(sym->v, NULL);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state->do_bounds_check) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
316 switch(sym->v) {
317 #ifdef TCC_TARGET_PE
318 /* XXX: we rely only on malloc hooks */
319 case TOK_malloc:
320 case TOK_free:
321 case TOK_realloc:
322 case TOK_memalign:
323 case TOK_calloc:
324 #endif
325 case TOK_memcpy:
326 case TOK_memmove:
327 case TOK_memset:
328 case TOK_strlen:
329 case TOK_strcpy:
330 case TOK_alloca:
331 strcpy(buf, "__bound_");
332 strcat(buf, name);
333 name = buf;
334 break;
337 #endif
338 t = sym->type.t;
339 if ((t & VT_BTYPE) == VT_FUNC) {
340 sym_type = STT_FUNC;
341 } else if ((t & VT_BTYPE) == VT_VOID) {
342 sym_type = STT_NOTYPE;
343 } else {
344 sym_type = STT_OBJECT;
346 if (t & VT_STATIC)
347 sym_bind = STB_LOCAL;
348 else
349 sym_bind = STB_GLOBAL;
350 other = 0;
351 #ifdef TCC_TARGET_PE
352 if (sym_type == STT_FUNC && sym->type.ref) {
353 Sym *ref = sym->type.ref;
354 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
355 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
356 name = buf1;
357 other |= ST_PE_STDCALL;
358 can_add_underscore = 0;
361 if (t & VT_IMPORT)
362 other |= ST_PE_IMPORT;
363 #endif
364 if (tcc_state->leading_underscore && can_add_underscore) {
365 buf1[0] = '_';
366 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
367 name = buf1;
369 if (sym->asm_label)
370 name = get_tok_str(sym->asm_label, NULL);
371 info = ELFW(ST_INFO)(sym_bind, sym_type);
372 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
373 } else {
374 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
375 esym->st_value = value;
376 esym->st_size = size;
377 esym->st_shndx = sh_num;
379 update_storage(sym);
382 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
383 addr_t value, unsigned long size)
385 put_extern_sym2(sym, section, value, size, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
390 addr_t addend)
392 int c = 0;
394 if (nocode_wanted && s == cur_text_section)
395 return;
397 if (sym) {
398 if (0 == sym->c)
399 put_extern_sym(sym, NULL, 0, 0);
400 c = sym->c;
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section, s, offset, type, c, addend);
407 #if PTR_SIZE == 4
408 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
410 greloca(s, sym, offset, type, 0);
412 #endif
414 /* ------------------------------------------------------------------------- */
415 /* symbol allocator */
416 static Sym *__sym_malloc(void)
418 Sym *sym_pool, *sym, *last_sym;
419 int i;
421 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
422 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
424 last_sym = sym_free_first;
425 sym = sym_pool;
426 for(i = 0; i < SYM_POOL_NB; i++) {
427 sym->next = last_sym;
428 last_sym = sym;
429 sym++;
431 sym_free_first = last_sym;
432 return last_sym;
435 static inline Sym *sym_malloc(void)
437 Sym *sym;
438 #ifndef SYM_DEBUG
439 sym = sym_free_first;
440 if (!sym)
441 sym = __sym_malloc();
442 sym_free_first = sym->next;
443 return sym;
444 #else
445 sym = tcc_malloc(sizeof(Sym));
446 return sym;
447 #endif
450 ST_INLN void sym_free(Sym *sym)
452 #ifndef SYM_DEBUG
453 sym->next = sym_free_first;
454 sym_free_first = sym;
455 #else
456 tcc_free(sym);
457 #endif
460 /* push, without hashing */
461 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
463 Sym *s;
465 s = sym_malloc();
466 s->scope = 0;
467 s->v = v;
468 s->type.t = t;
469 s->type.ref = NULL;
470 #ifdef _WIN64
471 s->d = NULL;
472 #endif
473 s->c = c;
474 s->next = NULL;
475 /* add in stack */
476 s->prev = *ps;
477 *ps = s;
478 return s;
481 /* find a symbol and return its associated structure. 's' is the top
482 of the symbol stack */
483 ST_FUNC Sym *sym_find2(Sym *s, int v)
485 while (s) {
486 if (s->v == v)
487 return s;
488 else if (s->v == -1)
489 return NULL;
490 s = s->prev;
492 return NULL;
495 /* structure lookup */
496 ST_INLN Sym *struct_find(int v)
498 v -= TOK_IDENT;
499 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
500 return NULL;
501 return table_ident[v]->sym_struct;
504 /* find an identifier */
505 ST_INLN Sym *sym_find(int v)
507 v -= TOK_IDENT;
508 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
509 return NULL;
510 return table_ident[v]->sym_identifier;
513 /* push a given symbol on the symbol stack */
514 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
516 Sym *s, **ps;
517 TokenSym *ts;
519 if (local_stack)
520 ps = &local_stack;
521 else
522 ps = &global_stack;
523 s = sym_push2(ps, v, type->t, c);
524 s->type.ref = type->ref;
525 s->r = r;
526 /* don't record fields or anonymous symbols */
527 /* XXX: simplify */
528 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
529 /* record symbol in token array */
530 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
531 if (v & SYM_STRUCT)
532 ps = &ts->sym_struct;
533 else
534 ps = &ts->sym_identifier;
535 s->prev_tok = *ps;
536 *ps = s;
537 s->scope = local_scope;
538 if (s->prev_tok && s->prev_tok->scope == s->scope)
539 tcc_error("redeclaration of '%s'",
540 get_tok_str(v & ~SYM_STRUCT, NULL));
542 return s;
545 /* push a global identifier */
546 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
548 Sym *s, **ps;
549 s = sym_push2(&global_stack, v, t, c);
550 /* don't record anonymous symbol */
551 if (v < SYM_FIRST_ANOM) {
552 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
553 /* modify the top most local identifier, so that
554 sym_identifier will point to 's' when popped */
555 while (*ps != NULL)
556 ps = &(*ps)->prev_tok;
557 s->prev_tok = NULL;
558 *ps = s;
560 return s;
563 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
564 pop them yet from the list, but do remove them from the token array. */
565 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
567 Sym *s, *ss, **ps;
568 TokenSym *ts;
569 int v;
571 s = *ptop;
572 while(s != b) {
573 ss = s->prev;
574 v = s->v;
575 /* remove symbol in token array */
576 /* XXX: simplify */
577 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
578 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
579 if (v & SYM_STRUCT)
580 ps = &ts->sym_struct;
581 else
582 ps = &ts->sym_identifier;
583 *ps = s->prev_tok;
585 if (!keep)
586 sym_free(s);
587 s = ss;
589 if (!keep)
590 *ptop = b;
593 /* ------------------------------------------------------------------------- */
595 static void vsetc(CType *type, int r, CValue *vc)
597 int v;
599 if (vtop >= vstack + (VSTACK_SIZE - 1))
600 tcc_error("memory full (vstack)");
601 /* cannot let cpu flags if other instruction are generated. Also
602 avoid leaving VT_JMP anywhere except on the top of the stack
603 because it would complicate the code generator.
605 Don't do this when nocode_wanted. vtop might come from
606 !nocode_wanted regions (see 88_codeopt.c) and transforming
607 it to a register without actually generating code is wrong
608 as their value might still be used for real. All values
609 we push under nocode_wanted will eventually be popped
610 again, so that the VT_CMP/VT_JMP value will be in vtop
611 when code is unsuppressed again.
613 Same logic below in vswap(); */
614 if (vtop >= vstack && !nocode_wanted) {
615 v = vtop->r & VT_VALMASK;
616 if (v == VT_CMP || (v & ~1) == VT_JMP)
617 gv(RC_INT);
620 vtop++;
621 vtop->type = *type;
622 vtop->r = r;
623 vtop->r2 = VT_CONST;
624 vtop->c = *vc;
625 vtop->sym = NULL;
628 ST_FUNC void vswap(void)
630 SValue tmp;
631 /* cannot vswap cpu flags. See comment at vsetc() above */
632 if (vtop >= vstack && !nocode_wanted) {
633 int v = vtop->r & VT_VALMASK;
634 if (v == VT_CMP || (v & ~1) == VT_JMP)
635 gv(RC_INT);
637 tmp = vtop[0];
638 vtop[0] = vtop[-1];
639 vtop[-1] = tmp;
642 /* pop stack value */
643 ST_FUNC void vpop(void)
645 int v;
646 v = vtop->r & VT_VALMASK;
647 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
648 /* for x86, we need to pop the FP stack */
649 if (v == TREG_ST0) {
650 o(0xd8dd); /* fstp %st(0) */
651 } else
652 #endif
653 if (v == VT_JMP || v == VT_JMPI) {
654 /* need to put correct jump if && or || without test */
655 gsym(vtop->c.i);
657 vtop--;
660 /* push constant of type "type" with useless value */
661 ST_FUNC void vpush(CType *type)
663 CValue cval;
664 vsetc(type, VT_CONST, &cval);
667 /* push integer constant */
668 ST_FUNC void vpushi(int v)
670 CValue cval;
671 cval.i = v;
672 vsetc(&int_type, VT_CONST, &cval);
675 /* push a pointer sized constant */
676 static void vpushs(addr_t v)
678 CValue cval;
679 cval.i = v;
680 vsetc(&size_type, VT_CONST, &cval);
683 /* push arbitrary 64bit constant */
684 ST_FUNC void vpush64(int ty, unsigned long long v)
686 CValue cval;
687 CType ctype;
688 ctype.t = ty;
689 ctype.ref = NULL;
690 cval.i = v;
691 vsetc(&ctype, VT_CONST, &cval);
694 /* push long long constant */
695 static inline void vpushll(long long v)
697 vpush64(VT_LLONG, v);
700 ST_FUNC void vset(CType *type, int r, long v)
702 CValue cval;
704 cval.i = v;
705 vsetc(type, r, &cval);
708 static void vseti(int r, int v)
710 CType type;
711 type.t = VT_INT;
712 type.ref = 0;
713 vset(&type, r, v);
716 ST_FUNC void vpushv(SValue *v)
718 if (vtop >= vstack + (VSTACK_SIZE - 1))
719 tcc_error("memory full (vstack)");
720 vtop++;
721 *vtop = *v;
724 static void vdup(void)
726 vpushv(vtop);
729 /* rotate n first stack elements to the bottom
730 I1 ... In -> I2 ... In I1 [top is right]
732 ST_FUNC void vrotb(int n)
734 int i;
735 SValue tmp;
737 tmp = vtop[-n + 1];
738 for(i=-n+1;i!=0;i++)
739 vtop[i] = vtop[i+1];
740 vtop[0] = tmp;
743 /* rotate the n elements before entry e towards the top
744 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
746 ST_FUNC void vrote(SValue *e, int n)
748 int i;
749 SValue tmp;
751 tmp = *e;
752 for(i = 0;i < n - 1; i++)
753 e[-i] = e[-i - 1];
754 e[-n + 1] = tmp;
757 /* rotate n first stack elements to the top
758 I1 ... In -> In I1 ... I(n-1) [top is right]
760 ST_FUNC void vrott(int n)
762 vrote(vtop, n);
765 /* push a symbol value of TYPE */
766 static inline void vpushsym(CType *type, Sym *sym)
768 CValue cval;
769 cval.i = 0;
770 vsetc(type, VT_CONST | VT_SYM, &cval);
771 vtop->sym = sym;
774 /* Return a static symbol pointing to a section */
775 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
777 int v;
778 Sym *sym;
780 v = anon_sym++;
781 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
782 sym->type.ref = type->ref;
783 sym->r = VT_CONST | VT_SYM;
784 put_extern_sym(sym, sec, offset, size);
785 return sym;
788 /* push a reference to a section offset by adding a dummy symbol */
789 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
791 vpushsym(type, get_sym_ref(type, sec, offset, size));
794 /* define a new external reference to a symbol 'v' of type 'u' */
795 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
797 Sym *s;
799 s = sym_find(v);
800 if (!s) {
801 /* push forward reference */
802 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
803 s->type.ref = type->ref;
804 s->r = r | VT_CONST | VT_SYM;
806 return s;
809 /* Merge some storage attributes. */
810 static void patch_storage(Sym *sym, CType *type)
812 int t;
813 if (!is_compatible_types(&sym->type, type))
814 tcc_error("incompatible types for redefinition of '%s'",
815 get_tok_str(sym->v, NULL));
816 t = type->t;
817 #ifdef TCC_TARGET_PE
818 if ((sym->type.t ^ t) & VT_IMPORT)
819 tcc_error("incompatible dll linkage for redefinition of '%s'",
820 get_tok_str(sym->v, NULL));
821 #endif
822 sym->type.t |= t & (VT_EXPORT|VT_WEAK);
823 if (t & VT_VIS_MASK) {
824 int vis = sym->type.t & VT_VIS_MASK;
825 int vis2 = t & VT_VIS_MASK;
826 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
827 vis = vis2;
828 else if (vis2 != (STV_DEFAULT << VT_VIS_SHIFT))
829 vis = (vis < vis2) ? vis : vis2;
830 sym->type.t = (sym->type.t & ~VT_VIS_MASK) | vis;
834 /* define a new external reference to a symbol 'v' */
835 static Sym *external_sym(int v, CType *type, int r)
837 Sym *s;
838 s = sym_find(v);
839 if (!s) {
840 /* push forward reference */
841 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
842 s->type.t |= VT_EXTERN;
843 } else {
844 if (s->type.ref == func_old_type.ref) {
845 s->type.ref = type->ref;
846 s->r = r | VT_CONST | VT_SYM;
847 s->type.t |= VT_EXTERN;
849 patch_storage(s, type);
850 update_storage(s);
852 return s;
855 /* push a reference to global symbol v */
856 ST_FUNC void vpush_global_sym(CType *type, int v)
858 vpushsym(type, external_global_sym(v, type, 0));
861 /* save registers up to (vtop - n) stack entry */
862 ST_FUNC void save_regs(int n)
864 SValue *p, *p1;
865 for(p = vstack, p1 = vtop - n; p <= p1; p++)
866 save_reg(p->r);
869 /* save r to the memory stack, and mark it as being free */
870 ST_FUNC void save_reg(int r)
872 save_reg_upstack(r, 0);
875 /* save r to the memory stack, and mark it as being free,
876 if seen up to (vtop - n) stack entry */
877 ST_FUNC void save_reg_upstack(int r, int n)
879 int l, saved, size, align;
880 SValue *p, *p1, sv;
881 CType *type;
883 if ((r &= VT_VALMASK) >= VT_CONST)
884 return;
885 if (nocode_wanted)
886 return;
888 /* modify all stack values */
889 saved = 0;
890 l = 0;
891 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
892 if ((p->r & VT_VALMASK) == r ||
893 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
894 /* must save value on stack if not already done */
895 if (!saved) {
896 /* NOTE: must reload 'r' because r might be equal to r2 */
897 r = p->r & VT_VALMASK;
898 /* store register in the stack */
899 type = &p->type;
900 if ((p->r & VT_LVAL) ||
901 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
902 #if PTR_SIZE == 8
903 type = &char_pointer_type;
904 #else
905 type = &int_type;
906 #endif
907 size = type_size(type, &align);
908 loc = (loc - size) & -align;
909 sv.type.t = type->t;
910 sv.r = VT_LOCAL | VT_LVAL;
911 sv.c.i = loc;
912 store(r, &sv);
913 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
914 /* x86 specific: need to pop fp register ST0 if saved */
915 if (r == TREG_ST0) {
916 o(0xd8dd); /* fstp %st(0) */
918 #endif
919 #if PTR_SIZE == 4
920 /* special long long case */
921 if ((type->t & VT_BTYPE) == VT_LLONG) {
922 sv.c.i += 4;
923 store(p->r2, &sv);
925 #endif
926 l = loc;
927 saved = 1;
929 /* mark that stack entry as being saved on the stack */
930 if (p->r & VT_LVAL) {
931 /* also clear the bounded flag because the
932 relocation address of the function was stored in
933 p->c.i */
934 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
935 } else {
936 p->r = lvalue_type(p->type.t) | VT_LOCAL;
938 p->r2 = VT_CONST;
939 p->c.i = l;
944 #ifdef TCC_TARGET_ARM
945 /* find a register of class 'rc2' with at most one reference on stack.
946 * If none, call get_reg(rc) */
947 ST_FUNC int get_reg_ex(int rc, int rc2)
949 int r;
950 SValue *p;
952 for(r=0;r<NB_REGS;r++) {
953 if (reg_classes[r] & rc2) {
954 int n;
955 n=0;
956 for(p = vstack; p <= vtop; p++) {
957 if ((p->r & VT_VALMASK) == r ||
958 (p->r2 & VT_VALMASK) == r)
959 n++;
961 if (n <= 1)
962 return r;
965 return get_reg(rc);
967 #endif
969 /* find a free register of class 'rc'. If none, save one register */
970 ST_FUNC int get_reg(int rc)
972 int r;
973 SValue *p;
975 /* find a free register */
976 for(r=0;r<NB_REGS;r++) {
977 if (reg_classes[r] & rc) {
978 if (nocode_wanted)
979 return r;
980 for(p=vstack;p<=vtop;p++) {
981 if ((p->r & VT_VALMASK) == r ||
982 (p->r2 & VT_VALMASK) == r)
983 goto notfound;
985 return r;
987 notfound: ;
990 /* no register left : free the first one on the stack (VERY
991 IMPORTANT to start from the bottom to ensure that we don't
992 spill registers used in gen_opi()) */
993 for(p=vstack;p<=vtop;p++) {
994 /* look at second register (if long long) */
995 r = p->r2 & VT_VALMASK;
996 if (r < VT_CONST && (reg_classes[r] & rc))
997 goto save_found;
998 r = p->r & VT_VALMASK;
999 if (r < VT_CONST && (reg_classes[r] & rc)) {
1000 save_found:
1001 save_reg(r);
1002 return r;
1005 /* Should never comes here */
1006 return -1;
1009 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1010 if needed */
1011 static void move_reg(int r, int s, int t)
1013 SValue sv;
1015 if (r != s) {
1016 save_reg(r);
1017 sv.type.t = t;
1018 sv.type.ref = NULL;
1019 sv.r = s;
1020 sv.c.i = 0;
1021 load(r, &sv);
1025 /* get address of vtop (vtop MUST BE an lvalue) */
1026 ST_FUNC void gaddrof(void)
1028 vtop->r &= ~VT_LVAL;
1029 /* tricky: if saved lvalue, then we can go back to lvalue */
1030 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1031 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1036 #ifdef CONFIG_TCC_BCHECK
1037 /* generate lvalue bound code */
1038 static void gbound(void)
1040 int lval_type;
1041 CType type1;
1043 vtop->r &= ~VT_MUSTBOUND;
1044 /* if lvalue, then use checking code before dereferencing */
1045 if (vtop->r & VT_LVAL) {
1046 /* if not VT_BOUNDED value, then make one */
1047 if (!(vtop->r & VT_BOUNDED)) {
1048 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1049 /* must save type because we must set it to int to get pointer */
1050 type1 = vtop->type;
1051 vtop->type.t = VT_PTR;
1052 gaddrof();
1053 vpushi(0);
1054 gen_bounded_ptr_add();
1055 vtop->r |= lval_type;
1056 vtop->type = type1;
1058 /* then check for dereferencing */
1059 gen_bounded_ptr_deref();
1062 #endif
1064 /* store vtop a register belonging to class 'rc'. lvalues are
1065 converted to values. Cannot be used if cannot be converted to
1066 register value (such as structures). */
1067 ST_FUNC int gv(int rc)
1069 int r, bit_pos, bit_size, size, align;
1070 int rc2;
1072 /* NOTE: get_reg can modify vstack[] */
1073 if (vtop->type.t & VT_BITFIELD) {
1074 CType type;
1075 int bits = 32;
1076 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1077 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1078 /* remove bit field info to avoid loops */
1079 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1080 /* cast to int to propagate signedness in following ops */
1081 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1082 type.t = VT_LLONG;
1083 bits = 64;
1084 } else
1085 type.t = VT_INT;
1086 if((vtop->type.t & VT_UNSIGNED) ||
1087 (vtop->type.t & VT_BTYPE) == VT_BOOL ||
1088 (((vtop->type.t & VT_BTYPE) == VT_ENUM) &&
1089 vtop->type.ref->a.unsigned_enum))
1090 type.t |= VT_UNSIGNED;
1091 gen_cast(&type);
1092 /* generate shifts */
1093 vpushi(bits - (bit_pos + bit_size));
1094 gen_op(TOK_SHL);
1095 vpushi(bits - bit_size);
1096 /* NOTE: transformed to SHR if unsigned */
1097 gen_op(TOK_SAR);
1098 r = gv(rc);
1099 } else {
1100 if (is_float(vtop->type.t) &&
1101 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1102 unsigned long offset;
1103 /* CPUs usually cannot use float constants, so we store them
1104 generically in data segment */
1105 size = type_size(&vtop->type, &align);
1106 offset = section_add(data_section, size, align);
1107 vpush_ref(&vtop->type, data_section, offset, size);
1108 vswap();
1109 init_putv(&vtop->type, data_section, offset);
1110 vtop->r |= VT_LVAL;
1112 #ifdef CONFIG_TCC_BCHECK
1113 if (vtop->r & VT_MUSTBOUND)
1114 gbound();
1115 #endif
1117 r = vtop->r & VT_VALMASK;
1118 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1119 #ifndef TCC_TARGET_ARM64
1120 if (rc == RC_IRET)
1121 rc2 = RC_LRET;
1122 #ifdef TCC_TARGET_X86_64
1123 else if (rc == RC_FRET)
1124 rc2 = RC_QRET;
1125 #endif
1126 #endif
1127 /* need to reload if:
1128 - constant
1129 - lvalue (need to dereference pointer)
1130 - already a register, but not in the right class */
1131 if (r >= VT_CONST
1132 || (vtop->r & VT_LVAL)
1133 || !(reg_classes[r] & rc)
1134 #if PTR_SIZE == 8
1135 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1136 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1137 #else
1138 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1139 #endif
1142 r = get_reg(rc);
1143 #if PTR_SIZE == 8
1144 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1145 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1146 #else
1147 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1148 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1149 unsigned long long ll;
1150 #endif
1151 int r2, original_type;
1152 original_type = vtop->type.t;
1153 /* two register type load : expand to two words
1154 temporarily */
1155 #if PTR_SIZE == 4
1156 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1157 /* load constant */
1158 ll = vtop->c.i;
1159 vtop->c.i = ll; /* first word */
1160 load(r, vtop);
1161 vtop->r = r; /* save register value */
1162 vpushi(ll >> 32); /* second word */
1163 } else
1164 #endif
1165 if (vtop->r & VT_LVAL) {
1166 /* We do not want to modifier the long long
1167 pointer here, so the safest (and less
1168 efficient) is to save all the other registers
1169 in the stack. XXX: totally inefficient. */
1170 #if 0
1171 save_regs(1);
1172 #else
1173 /* lvalue_save: save only if used further down the stack */
1174 save_reg_upstack(vtop->r, 1);
1175 #endif
1176 /* load from memory */
1177 vtop->type.t = load_type;
1178 load(r, vtop);
1179 vdup();
1180 vtop[-1].r = r; /* save register value */
1181 /* increment pointer to get second word */
1182 vtop->type.t = addr_type;
1183 gaddrof();
1184 vpushi(load_size);
1185 gen_op('+');
1186 vtop->r |= VT_LVAL;
1187 vtop->type.t = load_type;
1188 } else {
1189 /* move registers */
1190 load(r, vtop);
1191 vdup();
1192 vtop[-1].r = r; /* save register value */
1193 vtop->r = vtop[-1].r2;
1195 /* Allocate second register. Here we rely on the fact that
1196 get_reg() tries first to free r2 of an SValue. */
1197 r2 = get_reg(rc2);
1198 load(r2, vtop);
1199 vpop();
1200 /* write second register */
1201 vtop->r2 = r2;
1202 vtop->type.t = original_type;
1203 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1204 int t1, t;
1205 /* lvalue of scalar type : need to use lvalue type
1206 because of possible cast */
1207 t = vtop->type.t;
1208 t1 = t;
1209 /* compute memory access type */
1210 if (vtop->r & VT_LVAL_BYTE)
1211 t = VT_BYTE;
1212 else if (vtop->r & VT_LVAL_SHORT)
1213 t = VT_SHORT;
1214 if (vtop->r & VT_LVAL_UNSIGNED)
1215 t |= VT_UNSIGNED;
1216 vtop->type.t = t;
1217 load(r, vtop);
1218 /* restore wanted type */
1219 vtop->type.t = t1;
1220 } else {
1221 /* one register type load */
1222 load(r, vtop);
1225 vtop->r = r;
1226 #ifdef TCC_TARGET_C67
1227 /* uses register pairs for doubles */
1228 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1229 vtop->r2 = r+1;
1230 #endif
1232 return r;
1235 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1236 ST_FUNC void gv2(int rc1, int rc2)
1238 int v;
1240 /* generate more generic register first. But VT_JMP or VT_CMP
1241 values must be generated first in all cases to avoid possible
1242 reload errors */
1243 v = vtop[0].r & VT_VALMASK;
1244 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1245 vswap();
1246 gv(rc1);
1247 vswap();
1248 gv(rc2);
1249 /* test if reload is needed for first register */
1250 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1251 vswap();
1252 gv(rc1);
1253 vswap();
1255 } else {
1256 gv(rc2);
1257 vswap();
1258 gv(rc1);
1259 vswap();
1260 /* test if reload is needed for first register */
1261 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1262 gv(rc2);
1267 #ifndef TCC_TARGET_ARM64
1268 /* wrapper around RC_FRET to return a register by type */
1269 static int rc_fret(int t)
1271 #ifdef TCC_TARGET_X86_64
1272 if (t == VT_LDOUBLE) {
1273 return RC_ST0;
1275 #endif
1276 return RC_FRET;
1278 #endif
1280 /* wrapper around REG_FRET to return a register by type */
1281 static int reg_fret(int t)
1283 #ifdef TCC_TARGET_X86_64
1284 if (t == VT_LDOUBLE) {
1285 return TREG_ST0;
1287 #endif
1288 return REG_FRET;
1291 #if PTR_SIZE == 4
1292 /* expand 64bit on stack in two ints */
1293 static void lexpand(void)
1295 int u, v;
1296 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1297 v = vtop->r & (VT_VALMASK | VT_LVAL);
1298 if (v == VT_CONST) {
1299 vdup();
1300 vtop[0].c.i >>= 32;
1301 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1302 vdup();
1303 vtop[0].c.i += 4;
1304 } else {
1305 gv(RC_INT);
1306 vdup();
1307 vtop[0].r = vtop[-1].r2;
1308 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1310 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1312 #endif
1314 #ifdef TCC_TARGET_ARM
1315 /* expand long long on stack */
1316 ST_FUNC void lexpand_nr(void)
1318 int u,v;
1320 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1321 vdup();
1322 vtop->r2 = VT_CONST;
1323 vtop->type.t = VT_INT | u;
1324 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1325 if (v == VT_CONST) {
1326 vtop[-1].c.i = vtop->c.i;
1327 vtop->c.i = vtop->c.i >> 32;
1328 vtop->r = VT_CONST;
1329 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1330 vtop->c.i += 4;
1331 vtop->r = vtop[-1].r;
1332 } else if (v > VT_CONST) {
1333 vtop--;
1334 lexpand();
1335 } else
1336 vtop->r = vtop[-1].r2;
1337 vtop[-1].r2 = VT_CONST;
1338 vtop[-1].type.t = VT_INT | u;
1340 #endif
1342 #if PTR_SIZE == 4
1343 /* build a long long from two ints */
1344 static void lbuild(int t)
1346 gv2(RC_INT, RC_INT);
1347 vtop[-1].r2 = vtop[0].r;
1348 vtop[-1].type.t = t;
1349 vpop();
1351 #endif
1353 /* convert stack entry to register and duplicate its value in another
1354 register */
1355 static void gv_dup(void)
1357 int rc, t, r, r1;
1358 SValue sv;
1360 t = vtop->type.t;
1361 #if PTR_SIZE == 4
1362 if ((t & VT_BTYPE) == VT_LLONG) {
1363 lexpand();
1364 gv_dup();
1365 vswap();
1366 vrotb(3);
1367 gv_dup();
1368 vrotb(4);
1369 /* stack: H L L1 H1 */
1370 lbuild(t);
1371 vrotb(3);
1372 vrotb(3);
1373 vswap();
1374 lbuild(t);
1375 vswap();
1376 } else
1377 #endif
1379 /* duplicate value */
1380 rc = RC_INT;
1381 sv.type.t = VT_INT;
1382 if (is_float(t)) {
1383 rc = RC_FLOAT;
1384 #ifdef TCC_TARGET_X86_64
1385 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1386 rc = RC_ST0;
1388 #endif
1389 sv.type.t = t;
1391 r = gv(rc);
1392 r1 = get_reg(rc);
1393 sv.r = r;
1394 sv.c.i = 0;
1395 load(r1, &sv); /* move r to r1 */
1396 vdup();
1397 /* duplicates value */
1398 if (r != r1)
1399 vtop->r = r1;
1403 /* Generate value test
1405 * Generate a test for any value (jump, comparison and integers) */
1406 ST_FUNC int gvtst(int inv, int t)
1408 int v = vtop->r & VT_VALMASK;
1409 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1410 vpushi(0);
1411 gen_op(TOK_NE);
1413 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1414 /* constant jmp optimization */
1415 if ((vtop->c.i != 0) != inv)
1416 t = gjmp(t);
1417 vtop--;
1418 return t;
1420 return gtst(inv, t);
1423 #if PTR_SIZE == 4
1424 /* generate CPU independent (unsigned) long long operations */
1425 static void gen_opl(int op)
1427 int t, a, b, op1, c, i;
1428 int func;
1429 unsigned short reg_iret = REG_IRET;
1430 unsigned short reg_lret = REG_LRET;
1431 SValue tmp;
1433 switch(op) {
1434 case '/':
1435 case TOK_PDIV:
1436 func = TOK___divdi3;
1437 goto gen_func;
1438 case TOK_UDIV:
1439 func = TOK___udivdi3;
1440 goto gen_func;
1441 case '%':
1442 func = TOK___moddi3;
1443 goto gen_mod_func;
1444 case TOK_UMOD:
1445 func = TOK___umoddi3;
1446 gen_mod_func:
1447 #ifdef TCC_ARM_EABI
1448 reg_iret = TREG_R2;
1449 reg_lret = TREG_R3;
1450 #endif
1451 gen_func:
1452 /* call generic long long function */
1453 vpush_global_sym(&func_old_type, func);
1454 vrott(3);
1455 gfunc_call(2);
1456 vpushi(0);
1457 vtop->r = reg_iret;
1458 vtop->r2 = reg_lret;
1459 break;
1460 case '^':
1461 case '&':
1462 case '|':
1463 case '*':
1464 case '+':
1465 case '-':
1466 //pv("gen_opl A",0,2);
1467 t = vtop->type.t;
1468 vswap();
1469 lexpand();
1470 vrotb(3);
1471 lexpand();
1472 /* stack: L1 H1 L2 H2 */
1473 tmp = vtop[0];
1474 vtop[0] = vtop[-3];
1475 vtop[-3] = tmp;
1476 tmp = vtop[-2];
1477 vtop[-2] = vtop[-3];
1478 vtop[-3] = tmp;
1479 vswap();
1480 /* stack: H1 H2 L1 L2 */
1481 //pv("gen_opl B",0,4);
1482 if (op == '*') {
1483 vpushv(vtop - 1);
1484 vpushv(vtop - 1);
1485 gen_op(TOK_UMULL);
1486 lexpand();
1487 /* stack: H1 H2 L1 L2 ML MH */
1488 for(i=0;i<4;i++)
1489 vrotb(6);
1490 /* stack: ML MH H1 H2 L1 L2 */
1491 tmp = vtop[0];
1492 vtop[0] = vtop[-2];
1493 vtop[-2] = tmp;
1494 /* stack: ML MH H1 L2 H2 L1 */
1495 gen_op('*');
1496 vrotb(3);
1497 vrotb(3);
1498 gen_op('*');
1499 /* stack: ML MH M1 M2 */
1500 gen_op('+');
1501 gen_op('+');
1502 } else if (op == '+' || op == '-') {
1503 /* XXX: add non carry method too (for MIPS or alpha) */
1504 if (op == '+')
1505 op1 = TOK_ADDC1;
1506 else
1507 op1 = TOK_SUBC1;
1508 gen_op(op1);
1509 /* stack: H1 H2 (L1 op L2) */
1510 vrotb(3);
1511 vrotb(3);
1512 gen_op(op1 + 1); /* TOK_xxxC2 */
1513 } else {
1514 gen_op(op);
1515 /* stack: H1 H2 (L1 op L2) */
1516 vrotb(3);
1517 vrotb(3);
1518 /* stack: (L1 op L2) H1 H2 */
1519 gen_op(op);
1520 /* stack: (L1 op L2) (H1 op H2) */
1522 /* stack: L H */
1523 lbuild(t);
1524 break;
1525 case TOK_SAR:
1526 case TOK_SHR:
1527 case TOK_SHL:
1528 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1529 t = vtop[-1].type.t;
1530 vswap();
1531 lexpand();
1532 vrotb(3);
1533 /* stack: L H shift */
1534 c = (int)vtop->c.i;
1535 /* constant: simpler */
1536 /* NOTE: all comments are for SHL. the other cases are
1537 done by swapping words */
1538 vpop();
1539 if (op != TOK_SHL)
1540 vswap();
1541 if (c >= 32) {
1542 /* stack: L H */
1543 vpop();
1544 if (c > 32) {
1545 vpushi(c - 32);
1546 gen_op(op);
1548 if (op != TOK_SAR) {
1549 vpushi(0);
1550 } else {
1551 gv_dup();
1552 vpushi(31);
1553 gen_op(TOK_SAR);
1555 vswap();
1556 } else {
1557 vswap();
1558 gv_dup();
1559 /* stack: H L L */
1560 vpushi(c);
1561 gen_op(op);
1562 vswap();
1563 vpushi(32 - c);
1564 if (op == TOK_SHL)
1565 gen_op(TOK_SHR);
1566 else
1567 gen_op(TOK_SHL);
1568 vrotb(3);
1569 /* stack: L L H */
1570 vpushi(c);
1571 if (op == TOK_SHL)
1572 gen_op(TOK_SHL);
1573 else
1574 gen_op(TOK_SHR);
1575 gen_op('|');
1577 if (op != TOK_SHL)
1578 vswap();
1579 lbuild(t);
1580 } else {
1581 /* XXX: should provide a faster fallback on x86 ? */
1582 switch(op) {
1583 case TOK_SAR:
1584 func = TOK___ashrdi3;
1585 goto gen_func;
1586 case TOK_SHR:
1587 func = TOK___lshrdi3;
1588 goto gen_func;
1589 case TOK_SHL:
1590 func = TOK___ashldi3;
1591 goto gen_func;
1594 break;
1595 default:
1596 /* compare operations */
1597 t = vtop->type.t;
1598 vswap();
1599 lexpand();
1600 vrotb(3);
1601 lexpand();
1602 /* stack: L1 H1 L2 H2 */
1603 tmp = vtop[-1];
1604 vtop[-1] = vtop[-2];
1605 vtop[-2] = tmp;
1606 /* stack: L1 L2 H1 H2 */
1607 /* compare high */
1608 op1 = op;
1609 /* when values are equal, we need to compare low words. since
1610 the jump is inverted, we invert the test too. */
1611 if (op1 == TOK_LT)
1612 op1 = TOK_LE;
1613 else if (op1 == TOK_GT)
1614 op1 = TOK_GE;
1615 else if (op1 == TOK_ULT)
1616 op1 = TOK_ULE;
1617 else if (op1 == TOK_UGT)
1618 op1 = TOK_UGE;
1619 a = 0;
1620 b = 0;
1621 gen_op(op1);
1622 if (op == TOK_NE) {
1623 b = gvtst(0, 0);
1624 } else {
1625 a = gvtst(1, 0);
1626 if (op != TOK_EQ) {
1627 /* generate non equal test */
1628 vpushi(TOK_NE);
1629 vtop->r = VT_CMP;
1630 b = gvtst(0, 0);
1633 /* compare low. Always unsigned */
1634 op1 = op;
1635 if (op1 == TOK_LT)
1636 op1 = TOK_ULT;
1637 else if (op1 == TOK_LE)
1638 op1 = TOK_ULE;
1639 else if (op1 == TOK_GT)
1640 op1 = TOK_UGT;
1641 else if (op1 == TOK_GE)
1642 op1 = TOK_UGE;
1643 gen_op(op1);
1644 a = gvtst(1, a);
1645 gsym(b);
1646 vseti(VT_JMPI, a);
1647 break;
1650 #endif
1652 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1654 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1655 return (a ^ b) >> 63 ? -x : x;
1658 static int gen_opic_lt(uint64_t a, uint64_t b)
1660 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1663 /* handle integer constant optimizations and various machine
1664 independent opt */
1665 static void gen_opic(int op)
1667 SValue *v1 = vtop - 1;
1668 SValue *v2 = vtop;
1669 int t1 = v1->type.t & VT_BTYPE;
1670 int t2 = v2->type.t & VT_BTYPE;
1671 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1672 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1673 uint64_t l1 = c1 ? v1->c.i : 0;
1674 uint64_t l2 = c2 ? v2->c.i : 0;
1675 int shm = (t1 == VT_LLONG) ? 63 : 31;
1677 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1678 l1 = ((uint32_t)l1 |
1679 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1680 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1681 l2 = ((uint32_t)l2 |
1682 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1684 if (c1 && c2) {
1685 switch(op) {
1686 case '+': l1 += l2; break;
1687 case '-': l1 -= l2; break;
1688 case '&': l1 &= l2; break;
1689 case '^': l1 ^= l2; break;
1690 case '|': l1 |= l2; break;
1691 case '*': l1 *= l2; break;
1693 case TOK_PDIV:
1694 case '/':
1695 case '%':
1696 case TOK_UDIV:
1697 case TOK_UMOD:
1698 /* if division by zero, generate explicit division */
1699 if (l2 == 0) {
1700 if (const_wanted)
1701 tcc_error("division by zero in constant");
1702 goto general_case;
1704 switch(op) {
1705 default: l1 = gen_opic_sdiv(l1, l2); break;
1706 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1707 case TOK_UDIV: l1 = l1 / l2; break;
1708 case TOK_UMOD: l1 = l1 % l2; break;
1710 break;
1711 case TOK_SHL: l1 <<= (l2 & shm); break;
1712 case TOK_SHR: l1 >>= (l2 & shm); break;
1713 case TOK_SAR:
1714 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1715 break;
1716 /* tests */
1717 case TOK_ULT: l1 = l1 < l2; break;
1718 case TOK_UGE: l1 = l1 >= l2; break;
1719 case TOK_EQ: l1 = l1 == l2; break;
1720 case TOK_NE: l1 = l1 != l2; break;
1721 case TOK_ULE: l1 = l1 <= l2; break;
1722 case TOK_UGT: l1 = l1 > l2; break;
1723 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1724 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1725 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1726 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1727 /* logical */
1728 case TOK_LAND: l1 = l1 && l2; break;
1729 case TOK_LOR: l1 = l1 || l2; break;
1730 default:
1731 goto general_case;
1733 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1734 l1 = ((uint32_t)l1 |
1735 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1736 v1->c.i = l1;
1737 vtop--;
1738 } else {
1739 /* if commutative ops, put c2 as constant */
1740 if (c1 && (op == '+' || op == '&' || op == '^' ||
1741 op == '|' || op == '*')) {
1742 vswap();
1743 c2 = c1; //c = c1, c1 = c2, c2 = c;
1744 l2 = l1; //l = l1, l1 = l2, l2 = l;
1746 if (!const_wanted &&
1747 c1 && ((l1 == 0 &&
1748 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1749 (l1 == -1 && op == TOK_SAR))) {
1750 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1751 vtop--;
1752 } else if (!const_wanted &&
1753 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1754 (l2 == -1 && op == '|') ||
1755 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1756 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1757 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1758 if (l2 == 1)
1759 vtop->c.i = 0;
1760 vswap();
1761 vtop--;
1762 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1763 op == TOK_PDIV) &&
1764 l2 == 1) ||
1765 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1766 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1767 l2 == 0) ||
1768 (op == '&' &&
1769 l2 == -1))) {
1770 /* filter out NOP operations like x*1, x-0, x&-1... */
1771 vtop--;
1772 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1773 /* try to use shifts instead of muls or divs */
1774 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1775 int n = -1;
1776 while (l2) {
1777 l2 >>= 1;
1778 n++;
1780 vtop->c.i = n;
1781 if (op == '*')
1782 op = TOK_SHL;
1783 else if (op == TOK_PDIV)
1784 op = TOK_SAR;
1785 else
1786 op = TOK_SHR;
1788 goto general_case;
1789 } else if (c2 && (op == '+' || op == '-') &&
1790 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1791 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1792 /* symbol + constant case */
1793 if (op == '-')
1794 l2 = -l2;
1795 l2 += vtop[-1].c.i;
1796 /* The backends can't always deal with addends to symbols
1797 larger than +-1<<31. Don't construct such. */
1798 if ((int)l2 != l2)
1799 goto general_case;
1800 vtop--;
1801 vtop->c.i = l2;
1802 } else {
1803 general_case:
1804 /* call low level op generator */
1805 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1806 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1807 gen_opl(op);
1808 else
1809 gen_opi(op);
1814 /* generate a floating point operation with constant propagation */
1815 static void gen_opif(int op)
1817 int c1, c2;
1818 SValue *v1, *v2;
1819 long double f1, f2;
1821 v1 = vtop - 1;
1822 v2 = vtop;
1823 /* currently, we cannot do computations with forward symbols */
1824 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1826 if (c1 && c2) {
1827 if (v1->type.t == VT_FLOAT) {
1828 f1 = v1->c.f;
1829 f2 = v2->c.f;
1830 } else if (v1->type.t == VT_DOUBLE) {
1831 f1 = v1->c.d;
1832 f2 = v2->c.d;
1833 } else {
1834 f1 = v1->c.ld;
1835 f2 = v2->c.ld;
1838 /* NOTE: we only do constant propagation if finite number (not
1839 NaN or infinity) (ANSI spec) */
1840 if (!ieee_finite(f1) || !ieee_finite(f2))
1841 goto general_case;
1843 switch(op) {
1844 case '+': f1 += f2; break;
1845 case '-': f1 -= f2; break;
1846 case '*': f1 *= f2; break;
1847 case '/':
1848 if (f2 == 0.0) {
1849 if (const_wanted)
1850 tcc_error("division by zero in constant");
1851 goto general_case;
1853 f1 /= f2;
1854 break;
1855 /* XXX: also handles tests ? */
1856 default:
1857 goto general_case;
1859 /* XXX: overflow test ? */
1860 if (v1->type.t == VT_FLOAT) {
1861 v1->c.f = f1;
1862 } else if (v1->type.t == VT_DOUBLE) {
1863 v1->c.d = f1;
1864 } else {
1865 v1->c.ld = f1;
1867 vtop--;
1868 } else {
1869 general_case:
1870 gen_opf(op);
1874 static int pointed_size(CType *type)
1876 int align;
1877 return type_size(pointed_type(type), &align);
1880 static void vla_runtime_pointed_size(CType *type)
1882 int align;
1883 vla_runtime_type_size(pointed_type(type), &align);
1886 static inline int is_null_pointer(SValue *p)
1888 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1889 return 0;
1890 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1891 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1892 ((p->type.t & VT_BTYPE) == VT_PTR &&
1893 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1896 static inline int is_integer_btype(int bt)
1898 return (bt == VT_BYTE || bt == VT_SHORT ||
1899 bt == VT_INT || bt == VT_LLONG);
1902 /* check types for comparison or subtraction of pointers */
1903 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1905 CType *type1, *type2, tmp_type1, tmp_type2;
1906 int bt1, bt2;
1908 /* null pointers are accepted for all comparisons as gcc */
1909 if (is_null_pointer(p1) || is_null_pointer(p2))
1910 return;
1911 type1 = &p1->type;
1912 type2 = &p2->type;
1913 bt1 = type1->t & VT_BTYPE;
1914 bt2 = type2->t & VT_BTYPE;
1915 /* accept comparison between pointer and integer with a warning */
1916 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1917 if (op != TOK_LOR && op != TOK_LAND )
1918 tcc_warning("comparison between pointer and integer");
1919 return;
1922 /* both must be pointers or implicit function pointers */
1923 if (bt1 == VT_PTR) {
1924 type1 = pointed_type(type1);
1925 } else if (bt1 != VT_FUNC)
1926 goto invalid_operands;
1928 if (bt2 == VT_PTR) {
1929 type2 = pointed_type(type2);
1930 } else if (bt2 != VT_FUNC) {
1931 invalid_operands:
1932 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1934 if ((type1->t & VT_BTYPE) == VT_VOID ||
1935 (type2->t & VT_BTYPE) == VT_VOID)
1936 return;
1937 tmp_type1 = *type1;
1938 tmp_type2 = *type2;
1939 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1940 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1941 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1942 /* gcc-like error if '-' is used */
1943 if (op == '-')
1944 goto invalid_operands;
1945 else
1946 tcc_warning("comparison of distinct pointer types lacks a cast");
1950 /* generic gen_op: handles types problems */
1951 ST_FUNC void gen_op(int op)
1953 int u, t1, t2, bt1, bt2, t;
1954 CType type1;
1956 redo:
1957 t1 = vtop[-1].type.t;
1958 t2 = vtop[0].type.t;
1959 bt1 = t1 & VT_BTYPE;
1960 bt2 = t2 & VT_BTYPE;
1962 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1963 tcc_error("operation on a struct");
1964 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1965 if (bt2 == VT_FUNC) {
1966 mk_pointer(&vtop->type);
1967 gaddrof();
1969 if (bt1 == VT_FUNC) {
1970 vswap();
1971 mk_pointer(&vtop->type);
1972 gaddrof();
1973 vswap();
1975 goto redo;
1976 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1977 /* at least one operand is a pointer */
1978 /* relational op: must be both pointers */
1979 if (op >= TOK_ULT && op <= TOK_LOR) {
1980 check_comparison_pointer_types(vtop - 1, vtop, op);
1981 /* pointers are handled are unsigned */
1982 #if PTR_SIZE == 8
1983 t = VT_LLONG | VT_UNSIGNED;
1984 #else
1985 t = VT_INT | VT_UNSIGNED;
1986 #endif
1987 goto std_op;
1989 /* if both pointers, then it must be the '-' op */
1990 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1991 if (op != '-')
1992 tcc_error("cannot use pointers here");
1993 check_comparison_pointer_types(vtop - 1, vtop, op);
1994 /* XXX: check that types are compatible */
1995 if (vtop[-1].type.t & VT_VLA) {
1996 vla_runtime_pointed_size(&vtop[-1].type);
1997 } else {
1998 vpushi(pointed_size(&vtop[-1].type));
2000 vrott(3);
2001 gen_opic(op);
2002 /* set to integer type */
2003 #if PTR_SIZE == 8
2004 vtop->type.t = VT_LLONG;
2005 #else
2006 vtop->type.t = VT_INT;
2007 #endif
2008 vswap();
2009 gen_op(TOK_PDIV);
2010 } else {
2011 /* exactly one pointer : must be '+' or '-'. */
2012 if (op != '-' && op != '+')
2013 tcc_error("cannot use pointers here");
2014 /* Put pointer as first operand */
2015 if (bt2 == VT_PTR) {
2016 vswap();
2017 t = t1, t1 = t2, t2 = t;
2019 #if PTR_SIZE == 4
2020 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2021 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2022 gen_cast(&int_type);
2023 #endif
2024 type1 = vtop[-1].type;
2025 type1.t &= ~VT_ARRAY;
2026 if (vtop[-1].type.t & VT_VLA)
2027 vla_runtime_pointed_size(&vtop[-1].type);
2028 else {
2029 u = pointed_size(&vtop[-1].type);
2030 if (u < 0)
2031 tcc_error("unknown array element size");
2032 #if PTR_SIZE == 8
2033 vpushll(u);
2034 #else
2035 /* XXX: cast to int ? (long long case) */
2036 vpushi(u);
2037 #endif
2039 gen_op('*');
2040 #if 0
2041 /* #ifdef CONFIG_TCC_BCHECK
2042 The main reason to removing this code:
2043 #include <stdio.h>
2044 int main ()
2046 int v[10];
2047 int i = 10;
2048 int j = 9;
2049 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2050 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2052 When this code is on. then the output looks like
2053 v+i-j = 0xfffffffe
2054 v+(i-j) = 0xbff84000
2056 /* if evaluating constant expression, no code should be
2057 generated, so no bound check */
2058 if (tcc_state->do_bounds_check && !const_wanted) {
2059 /* if bounded pointers, we generate a special code to
2060 test bounds */
2061 if (op == '-') {
2062 vpushi(0);
2063 vswap();
2064 gen_op('-');
2066 gen_bounded_ptr_add();
2067 } else
2068 #endif
2070 gen_opic(op);
2072 /* put again type if gen_opic() swaped operands */
2073 vtop->type = type1;
2075 } else if (is_float(bt1) || is_float(bt2)) {
2076 /* compute bigger type and do implicit casts */
2077 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2078 t = VT_LDOUBLE;
2079 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2080 t = VT_DOUBLE;
2081 } else {
2082 t = VT_FLOAT;
2084 /* floats can only be used for a few operations */
2085 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2086 (op < TOK_ULT || op > TOK_GT))
2087 tcc_error("invalid operands for binary operation");
2088 goto std_op;
2089 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2090 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2091 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2092 t |= VT_UNSIGNED;
2093 goto std_op;
2094 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2095 /* cast to biggest op */
2096 t = VT_LLONG;
2097 /* convert to unsigned if it does not fit in a long long */
2098 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2099 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2100 t |= VT_UNSIGNED;
2101 goto std_op;
2102 } else {
2103 /* integer operations */
2104 t = VT_INT;
2105 /* convert to unsigned if it does not fit in an integer */
2106 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2107 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2108 t |= VT_UNSIGNED;
2109 std_op:
2110 /* XXX: currently, some unsigned operations are explicit, so
2111 we modify them here */
2112 if (t & VT_UNSIGNED) {
2113 if (op == TOK_SAR)
2114 op = TOK_SHR;
2115 else if (op == '/')
2116 op = TOK_UDIV;
2117 else if (op == '%')
2118 op = TOK_UMOD;
2119 else if (op == TOK_LT)
2120 op = TOK_ULT;
2121 else if (op == TOK_GT)
2122 op = TOK_UGT;
2123 else if (op == TOK_LE)
2124 op = TOK_ULE;
2125 else if (op == TOK_GE)
2126 op = TOK_UGE;
2128 vswap();
2129 type1.t = t;
2130 gen_cast(&type1);
2131 vswap();
2132 /* special case for shifts and long long: we keep the shift as
2133 an integer */
2134 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2135 type1.t = VT_INT;
2136 gen_cast(&type1);
2137 if (is_float(t))
2138 gen_opif(op);
2139 else
2140 gen_opic(op);
2141 if (op >= TOK_ULT && op <= TOK_GT) {
2142 /* relational op: the result is an int */
2143 vtop->type.t = VT_INT;
2144 } else {
2145 vtop->type.t = t;
2148 // Make sure that we have converted to an rvalue:
2149 if (vtop->r & VT_LVAL)
2150 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2153 #ifndef TCC_TARGET_ARM
2154 /* generic itof for unsigned long long case */
2155 static void gen_cvt_itof1(int t)
2157 #ifdef TCC_TARGET_ARM64
2158 gen_cvt_itof(t);
2159 #else
2160 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2161 (VT_LLONG | VT_UNSIGNED)) {
2163 if (t == VT_FLOAT)
2164 vpush_global_sym(&func_old_type, TOK___floatundisf);
2165 #if LDOUBLE_SIZE != 8
2166 else if (t == VT_LDOUBLE)
2167 vpush_global_sym(&func_old_type, TOK___floatundixf);
2168 #endif
2169 else
2170 vpush_global_sym(&func_old_type, TOK___floatundidf);
2171 vrott(2);
2172 gfunc_call(1);
2173 vpushi(0);
2174 vtop->r = reg_fret(t);
2175 } else {
2176 gen_cvt_itof(t);
2178 #endif
2180 #endif
2182 /* generic ftoi for unsigned long long case */
2183 static void gen_cvt_ftoi1(int t)
2185 #ifdef TCC_TARGET_ARM64
2186 gen_cvt_ftoi(t);
2187 #else
2188 int st;
2190 if (t == (VT_LLONG | VT_UNSIGNED)) {
2191 /* not handled natively */
2192 st = vtop->type.t & VT_BTYPE;
2193 if (st == VT_FLOAT)
2194 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2195 #if LDOUBLE_SIZE != 8
2196 else if (st == VT_LDOUBLE)
2197 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2198 #endif
2199 else
2200 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2201 vrott(2);
2202 gfunc_call(1);
2203 vpushi(0);
2204 vtop->r = REG_IRET;
2205 vtop->r2 = REG_LRET;
2206 } else {
2207 gen_cvt_ftoi(t);
2209 #endif
2212 /* force char or short cast */
2213 static void force_charshort_cast(int t)
2215 int bits, dbt;
2216 dbt = t & VT_BTYPE;
2217 /* XXX: add optimization if lvalue : just change type and offset */
2218 if (dbt == VT_BYTE)
2219 bits = 8;
2220 else
2221 bits = 16;
2222 if (t & VT_UNSIGNED) {
2223 vpushi((1 << bits) - 1);
2224 gen_op('&');
2225 } else {
2226 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2227 bits = 64 - bits;
2228 else
2229 bits = 32 - bits;
2230 vpushi(bits);
2231 gen_op(TOK_SHL);
2232 /* result must be signed or the SAR is converted to an SHL
2233 This was not the case when "t" was a signed short
2234 and the last value on the stack was an unsigned int */
2235 vtop->type.t &= ~VT_UNSIGNED;
2236 vpushi(bits);
2237 gen_op(TOK_SAR);
2241 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2242 static void gen_cast(CType *type)
2244 int sbt, dbt, sf, df, c, p;
2246 /* special delayed cast for char/short */
2247 /* XXX: in some cases (multiple cascaded casts), it may still
2248 be incorrect */
2249 if (vtop->r & VT_MUSTCAST) {
2250 vtop->r &= ~VT_MUSTCAST;
2251 force_charshort_cast(vtop->type.t);
2254 /* bitfields first get cast to ints */
2255 if (vtop->type.t & VT_BITFIELD) {
2256 gv(RC_INT);
2259 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2260 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2262 if (sbt != dbt) {
2263 sf = is_float(sbt);
2264 df = is_float(dbt);
2265 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2266 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2267 if (c) {
2268 /* constant case: we can do it now */
2269 /* XXX: in ISOC, cannot do it if error in convert */
2270 if (sbt == VT_FLOAT)
2271 vtop->c.ld = vtop->c.f;
2272 else if (sbt == VT_DOUBLE)
2273 vtop->c.ld = vtop->c.d;
2275 if (df) {
2276 if ((sbt & VT_BTYPE) == VT_LLONG) {
2277 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2278 vtop->c.ld = vtop->c.i;
2279 else
2280 vtop->c.ld = -(long double)-vtop->c.i;
2281 } else if(!sf) {
2282 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2283 vtop->c.ld = (uint32_t)vtop->c.i;
2284 else
2285 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2288 if (dbt == VT_FLOAT)
2289 vtop->c.f = (float)vtop->c.ld;
2290 else if (dbt == VT_DOUBLE)
2291 vtop->c.d = (double)vtop->c.ld;
2292 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2293 vtop->c.i = vtop->c.ld;
2294 } else if (sf && dbt == VT_BOOL) {
2295 vtop->c.i = (vtop->c.ld != 0);
2296 } else {
2297 if(sf)
2298 vtop->c.i = vtop->c.ld;
2299 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2301 else if (sbt & VT_UNSIGNED)
2302 vtop->c.i = (uint32_t)vtop->c.i;
2303 #if PTR_SIZE == 8
2304 else if (sbt == VT_PTR)
2306 #endif
2307 else if (sbt != VT_LLONG)
2308 vtop->c.i = ((uint32_t)vtop->c.i |
2309 -(vtop->c.i & 0x80000000));
2311 if (dbt == (VT_LLONG|VT_UNSIGNED))
2313 else if (dbt == VT_BOOL)
2314 vtop->c.i = (vtop->c.i != 0);
2315 #if PTR_SIZE == 8
2316 else if (dbt == VT_PTR)
2318 #endif
2319 else if (dbt != VT_LLONG) {
2320 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2321 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2322 0xffffffff);
2323 vtop->c.i &= m;
2324 if (!(dbt & VT_UNSIGNED))
2325 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2328 } else if (p && dbt == VT_BOOL) {
2329 vtop->r = VT_CONST;
2330 vtop->c.i = 1;
2331 } else {
2332 /* non constant case: generate code */
2333 if (sf && df) {
2334 /* convert from fp to fp */
2335 gen_cvt_ftof(dbt);
2336 } else if (df) {
2337 /* convert int to fp */
2338 gen_cvt_itof1(dbt);
2339 } else if (sf) {
2340 /* convert fp to int */
2341 if (dbt == VT_BOOL) {
2342 vpushi(0);
2343 gen_op(TOK_NE);
2344 } else {
2345 /* we handle char/short/etc... with generic code */
2346 if (dbt != (VT_INT | VT_UNSIGNED) &&
2347 dbt != (VT_LLONG | VT_UNSIGNED) &&
2348 dbt != VT_LLONG)
2349 dbt = VT_INT;
2350 gen_cvt_ftoi1(dbt);
2351 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2352 /* additional cast for char/short... */
2353 vtop->type.t = dbt;
2354 gen_cast(type);
2357 #if PTR_SIZE == 4
2358 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2359 if ((sbt & VT_BTYPE) != VT_LLONG) {
2360 /* scalar to long long */
2361 /* machine independent conversion */
2362 gv(RC_INT);
2363 /* generate high word */
2364 if (sbt == (VT_INT | VT_UNSIGNED)) {
2365 vpushi(0);
2366 gv(RC_INT);
2367 } else {
2368 if (sbt == VT_PTR) {
2369 /* cast from pointer to int before we apply
2370 shift operation, which pointers don't support*/
2371 gen_cast(&int_type);
2373 gv_dup();
2374 vpushi(31);
2375 gen_op(TOK_SAR);
2377 /* patch second register */
2378 vtop[-1].r2 = vtop->r;
2379 vpop();
2381 #else
2382 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2383 (dbt & VT_BTYPE) == VT_PTR ||
2384 (dbt & VT_BTYPE) == VT_FUNC) {
2385 if ((sbt & VT_BTYPE) != VT_LLONG &&
2386 (sbt & VT_BTYPE) != VT_PTR &&
2387 (sbt & VT_BTYPE) != VT_FUNC) {
2388 /* need to convert from 32bit to 64bit */
2389 gv(RC_INT);
2390 if (sbt != (VT_INT | VT_UNSIGNED)) {
2391 #if defined(TCC_TARGET_ARM64)
2392 gen_cvt_sxtw();
2393 #elif defined(TCC_TARGET_X86_64)
2394 int r = gv(RC_INT);
2395 /* x86_64 specific: movslq */
2396 o(0x6348);
2397 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2398 #else
2399 #error
2400 #endif
2403 #endif
2404 } else if (dbt == VT_BOOL) {
2405 /* scalar to bool */
2406 vpushi(0);
2407 gen_op(TOK_NE);
2408 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2409 (dbt & VT_BTYPE) == VT_SHORT) {
2410 if (sbt == VT_PTR) {
2411 vtop->type.t = VT_INT;
2412 tcc_warning("nonportable conversion from pointer to char/short");
2414 force_charshort_cast(dbt);
2415 #if PTR_SIZE == 4
2416 } else if ((dbt & VT_BTYPE) == VT_INT) {
2417 /* scalar to int */
2418 if ((sbt & VT_BTYPE) == VT_LLONG) {
2419 /* from long long: just take low order word */
2420 lexpand();
2421 vpop();
2423 /* if lvalue and single word type, nothing to do because
2424 the lvalue already contains the real type size (see
2425 VT_LVAL_xxx constants) */
2426 #endif
2429 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2430 /* if we are casting between pointer types,
2431 we must update the VT_LVAL_xxx size */
2432 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2433 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2435 vtop->type = *type;
2438 /* return type size as known at compile time. Put alignment at 'a' */
2439 ST_FUNC int type_size(CType *type, int *a)
2441 Sym *s;
2442 int bt;
2444 bt = type->t & VT_BTYPE;
2445 if (bt == VT_STRUCT) {
2446 /* struct/union */
2447 s = type->ref;
2448 *a = s->r;
2449 return s->c;
2450 } else if (bt == VT_PTR) {
2451 if (type->t & VT_ARRAY) {
2452 int ts;
2454 s = type->ref;
2455 ts = type_size(&s->type, a);
2457 if (ts < 0 && s->c < 0)
2458 ts = -ts;
2460 return ts * s->c;
2461 } else {
2462 *a = PTR_SIZE;
2463 return PTR_SIZE;
2465 } else if (bt == VT_LDOUBLE) {
2466 *a = LDOUBLE_ALIGN;
2467 return LDOUBLE_SIZE;
2468 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2469 #ifdef TCC_TARGET_I386
2470 #ifdef TCC_TARGET_PE
2471 *a = 8;
2472 #else
2473 *a = 4;
2474 #endif
2475 #elif defined(TCC_TARGET_ARM)
2476 #ifdef TCC_ARM_EABI
2477 *a = 8;
2478 #else
2479 *a = 4;
2480 #endif
2481 #else
2482 *a = 8;
2483 #endif
2484 return 8;
2485 } else if (bt == VT_INT || bt == VT_FLOAT) {
2486 *a = 4;
2487 return 4;
2488 } else if (bt == VT_SHORT) {
2489 *a = 2;
2490 return 2;
2491 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2492 *a = 8;
2493 return 16;
2494 } else if (bt == VT_ENUM) {
2495 *a = 4;
2496 /* Enums might be incomplete, so don't just return '4' here. */
2497 return type->ref->c;
2498 } else {
2499 /* char, void, function, _Bool */
2500 *a = 1;
2501 return 1;
2505 /* push type size as known at runtime time on top of value stack. Put
2506 alignment at 'a' */
2507 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2509 if (type->t & VT_VLA) {
2510 type_size(&type->ref->type, a);
2511 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2512 } else {
2513 vpushi(type_size(type, a));
2517 static void vla_sp_restore(void) {
2518 if (vlas_in_scope) {
2519 gen_vla_sp_restore(vla_sp_loc);
2523 static void vla_sp_restore_root(void) {
2524 if (vlas_in_scope) {
2525 gen_vla_sp_restore(vla_sp_root_loc);
2529 /* return the pointed type of t */
2530 static inline CType *pointed_type(CType *type)
2532 return &type->ref->type;
2535 /* modify type so that its it is a pointer to type. */
2536 ST_FUNC void mk_pointer(CType *type)
2538 Sym *s;
2539 s = sym_push(SYM_FIELD, type, 0, -1);
2540 type->t = VT_PTR | (type->t & ~VT_TYPE);
2541 type->ref = s;
2544 /* compare function types. OLD functions match any new functions */
2545 static int is_compatible_func(CType *type1, CType *type2)
2547 Sym *s1, *s2;
2549 s1 = type1->ref;
2550 s2 = type2->ref;
2551 if (!is_compatible_types(&s1->type, &s2->type))
2552 return 0;
2553 /* check func_call */
2554 if (s1->a.func_call != s2->a.func_call)
2555 return 0;
2556 /* XXX: not complete */
2557 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2558 return 1;
2559 if (s1->c != s2->c)
2560 return 0;
2561 while (s1 != NULL) {
2562 if (s2 == NULL)
2563 return 0;
2564 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2565 return 0;
2566 s1 = s1->next;
2567 s2 = s2->next;
2569 if (s2)
2570 return 0;
2571 return 1;
2574 /* return true if type1 and type2 are the same. If unqualified is
2575 true, qualifiers on the types are ignored.
2577 - enums are not checked as gcc __builtin_types_compatible_p ()
2579 static int compare_types(CType *type1, CType *type2, int unqualified)
2581 int bt1, t1, t2;
2583 t1 = type1->t & VT_TYPE;
2584 t2 = type2->t & VT_TYPE;
2585 if (unqualified) {
2586 /* strip qualifiers before comparing */
2587 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2588 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2590 /* Default Vs explicit signedness only matters for char */
2591 if ((t1 & VT_BTYPE) != VT_BYTE) {
2592 t1 &= ~VT_DEFSIGN;
2593 t2 &= ~VT_DEFSIGN;
2595 /* An enum is compatible with (unsigned) int. Ideally we would
2596 store the enums signedness in type->ref.a.<some_bit> and
2597 only accept unsigned enums with unsigned int and vice versa.
2598 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2599 from pointer target types, so we can't add it here either. */
2600 if ((t1 & VT_BTYPE) == VT_ENUM) {
2601 t1 = VT_INT;
2602 if (type1->ref->a.unsigned_enum)
2603 t1 |= VT_UNSIGNED;
2605 if ((t2 & VT_BTYPE) == VT_ENUM) {
2606 t2 = VT_INT;
2607 if (type2->ref->a.unsigned_enum)
2608 t2 |= VT_UNSIGNED;
2610 /* XXX: bitfields ? */
2611 if (t1 != t2)
2612 return 0;
2613 /* test more complicated cases */
2614 bt1 = t1 & VT_BTYPE;
2615 if (bt1 == VT_PTR) {
2616 type1 = pointed_type(type1);
2617 type2 = pointed_type(type2);
2618 return is_compatible_types(type1, type2);
2619 } else if (bt1 == VT_STRUCT) {
2620 return (type1->ref == type2->ref);
2621 } else if (bt1 == VT_FUNC) {
2622 return is_compatible_func(type1, type2);
2623 } else {
2624 return 1;
2628 /* return true if type1 and type2 are exactly the same (including
2629 qualifiers).
2631 static int is_compatible_types(CType *type1, CType *type2)
2633 return compare_types(type1,type2,0);
2636 /* return true if type1 and type2 are the same (ignoring qualifiers).
2638 static int is_compatible_parameter_types(CType *type1, CType *type2)
2640 return compare_types(type1,type2,1);
2643 /* print a type. If 'varstr' is not NULL, then the variable is also
2644 printed in the type */
2645 /* XXX: union */
2646 /* XXX: add array and function pointers */
2647 static void type_to_str(char *buf, int buf_size,
2648 CType *type, const char *varstr)
2650 int bt, v, t;
2651 Sym *s, *sa;
2652 char buf1[256];
2653 const char *tstr;
2655 t = type->t;
2656 bt = t & VT_BTYPE;
2657 buf[0] = '\0';
2658 if (t & VT_CONSTANT)
2659 pstrcat(buf, buf_size, "const ");
2660 if (t & VT_VOLATILE)
2661 pstrcat(buf, buf_size, "volatile ");
2662 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2663 pstrcat(buf, buf_size, "unsigned ");
2664 else if (t & VT_DEFSIGN)
2665 pstrcat(buf, buf_size, "signed ");
2666 if (t & VT_EXTERN)
2667 pstrcat(buf, buf_size, "extern ");
2668 if (t & VT_STATIC)
2669 pstrcat(buf, buf_size, "static ");
2670 if (t & VT_TYPEDEF)
2671 pstrcat(buf, buf_size, "typedef ");
2672 if (t & VT_INLINE)
2673 pstrcat(buf, buf_size, "inline ");
2674 buf_size -= strlen(buf);
2675 buf += strlen(buf);
2676 switch(bt) {
2677 case VT_VOID:
2678 tstr = "void";
2679 goto add_tstr;
2680 case VT_BOOL:
2681 tstr = "_Bool";
2682 goto add_tstr;
2683 case VT_BYTE:
2684 tstr = "char";
2685 goto add_tstr;
2686 case VT_SHORT:
2687 tstr = "short";
2688 goto add_tstr;
2689 case VT_INT:
2690 tstr = "int";
2691 goto add_tstr;
2692 case VT_LONG:
2693 tstr = "long";
2694 goto add_tstr;
2695 case VT_LLONG:
2696 tstr = "long long";
2697 goto add_tstr;
2698 case VT_FLOAT:
2699 tstr = "float";
2700 goto add_tstr;
2701 case VT_DOUBLE:
2702 tstr = "double";
2703 goto add_tstr;
2704 case VT_LDOUBLE:
2705 tstr = "long double";
2706 add_tstr:
2707 pstrcat(buf, buf_size, tstr);
2708 break;
2709 case VT_ENUM:
2710 case VT_STRUCT:
2711 if (bt == VT_STRUCT)
2712 tstr = "struct ";
2713 else
2714 tstr = "enum ";
2715 pstrcat(buf, buf_size, tstr);
2716 v = type->ref->v & ~SYM_STRUCT;
2717 if (v >= SYM_FIRST_ANOM)
2718 pstrcat(buf, buf_size, "<anonymous>");
2719 else
2720 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2721 break;
2722 case VT_FUNC:
2723 s = type->ref;
2724 type_to_str(buf, buf_size, &s->type, varstr);
2725 pstrcat(buf, buf_size, "(");
2726 sa = s->next;
2727 while (sa != NULL) {
2728 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2729 pstrcat(buf, buf_size, buf1);
2730 sa = sa->next;
2731 if (sa)
2732 pstrcat(buf, buf_size, ", ");
2734 pstrcat(buf, buf_size, ")");
2735 goto no_var;
2736 case VT_PTR:
2737 s = type->ref;
2738 if (t & VT_ARRAY) {
2739 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2740 type_to_str(buf, buf_size, &s->type, buf1);
2741 goto no_var;
2743 pstrcpy(buf1, sizeof(buf1), "*");
2744 if (t & VT_CONSTANT)
2745 pstrcat(buf1, buf_size, "const ");
2746 if (t & VT_VOLATILE)
2747 pstrcat(buf1, buf_size, "volatile ");
2748 if (varstr)
2749 pstrcat(buf1, sizeof(buf1), varstr);
2750 type_to_str(buf, buf_size, &s->type, buf1);
2751 goto no_var;
2753 if (varstr) {
2754 pstrcat(buf, buf_size, " ");
2755 pstrcat(buf, buf_size, varstr);
2757 no_var: ;
2760 /* verify type compatibility to store vtop in 'dt' type, and generate
2761 casts if needed. */
2762 static void gen_assign_cast(CType *dt)
2764 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2765 char buf1[256], buf2[256];
2766 int dbt, sbt;
2768 st = &vtop->type; /* source type */
2769 dbt = dt->t & VT_BTYPE;
2770 sbt = st->t & VT_BTYPE;
2771 if (sbt == VT_VOID || dbt == VT_VOID) {
2772 if (sbt == VT_VOID && dbt == VT_VOID)
2773 ; /*
2774 It is Ok if both are void
2775 A test program:
2776 void func1() {}
2777 void func2() {
2778 return func1();
2780 gcc accepts this program
2782 else
2783 tcc_error("cannot cast from/to void");
2785 if (dt->t & VT_CONSTANT)
2786 tcc_warning("assignment of read-only location");
2787 switch(dbt) {
2788 case VT_PTR:
2789 /* special cases for pointers */
2790 /* '0' can also be a pointer */
2791 if (is_null_pointer(vtop))
2792 goto type_ok;
2793 /* accept implicit pointer to integer cast with warning */
2794 if (is_integer_btype(sbt)) {
2795 tcc_warning("assignment makes pointer from integer without a cast");
2796 goto type_ok;
2798 type1 = pointed_type(dt);
2799 /* a function is implicitly a function pointer */
2800 if (sbt == VT_FUNC) {
2801 if ((type1->t & VT_BTYPE) != VT_VOID &&
2802 !is_compatible_types(pointed_type(dt), st))
2803 tcc_warning("assignment from incompatible pointer type");
2804 goto type_ok;
2806 if (sbt != VT_PTR)
2807 goto error;
2808 type2 = pointed_type(st);
2809 if ((type1->t & VT_BTYPE) == VT_VOID ||
2810 (type2->t & VT_BTYPE) == VT_VOID) {
2811 /* void * can match anything */
2812 } else {
2813 /* exact type match, except for qualifiers */
2814 tmp_type1 = *type1;
2815 tmp_type2 = *type2;
2816 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2817 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2818 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2819 /* Like GCC don't warn by default for merely changes
2820 in pointer target signedness. Do warn for different
2821 base types, though, in particular for unsigned enums
2822 and signed int targets. */
2823 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2824 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2825 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2827 else
2828 tcc_warning("assignment from incompatible pointer type");
2831 /* check const and volatile */
2832 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT) &&
2833 ((type2->t & VT_BTYPE) != VT_BYTE || tcc_state->warn_write_strings)) ||
2834 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2835 tcc_warning("assignment discards qualifiers from pointer target type");
2836 break;
2837 case VT_BYTE:
2838 case VT_SHORT:
2839 case VT_INT:
2840 case VT_LLONG:
2841 if (sbt == VT_PTR || sbt == VT_FUNC) {
2842 tcc_warning("assignment makes integer from pointer without a cast");
2843 } else if (sbt == VT_STRUCT) {
2844 goto case_VT_STRUCT;
2846 /* XXX: more tests */
2847 break;
2848 case VT_STRUCT:
2849 case_VT_STRUCT:
2850 tmp_type1 = *dt;
2851 tmp_type2 = *st;
2852 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2853 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2854 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2855 error:
2856 type_to_str(buf1, sizeof(buf1), st, NULL);
2857 type_to_str(buf2, sizeof(buf2), dt, NULL);
2858 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2860 break;
2862 type_ok:
2863 gen_cast(dt);
2866 /* store vtop in lvalue pushed on stack */
2867 ST_FUNC void vstore(void)
2869 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2871 ft = vtop[-1].type.t;
2872 sbt = vtop->type.t & VT_BTYPE;
2873 dbt = ft & VT_BTYPE;
2874 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2875 (sbt == VT_INT && dbt == VT_SHORT))
2876 && !(vtop->type.t & VT_BITFIELD)) {
2877 /* optimize char/short casts */
2878 delayed_cast = VT_MUSTCAST;
2879 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2880 ((1 << VT_STRUCT_SHIFT) - 1));
2881 /* XXX: factorize */
2882 if (ft & VT_CONSTANT)
2883 tcc_warning("assignment of read-only location");
2884 } else {
2885 delayed_cast = 0;
2886 if (!(ft & VT_BITFIELD))
2887 gen_assign_cast(&vtop[-1].type);
2890 if (sbt == VT_STRUCT) {
2891 /* if structure, only generate pointer */
2892 /* structure assignment : generate memcpy */
2893 /* XXX: optimize if small size */
2894 size = type_size(&vtop->type, &align);
2896 /* destination */
2897 vswap();
2898 vtop->type.t = VT_PTR;
2899 gaddrof();
2901 /* address of memcpy() */
2902 #ifdef TCC_ARM_EABI
2903 if(!(align & 7))
2904 vpush_global_sym(&func_old_type, TOK_memcpy8);
2905 else if(!(align & 3))
2906 vpush_global_sym(&func_old_type, TOK_memcpy4);
2907 else
2908 #endif
2909 /* Use memmove, rather than memcpy, as dest and src may be same: */
2910 vpush_global_sym(&func_old_type, TOK_memmove);
2912 vswap();
2913 /* source */
2914 vpushv(vtop - 2);
2915 vtop->type.t = VT_PTR;
2916 gaddrof();
2917 /* type size */
2918 vpushi(size);
2919 gfunc_call(3);
2921 /* leave source on stack */
2922 } else if (ft & VT_BITFIELD) {
2923 /* bitfield store handling */
2925 /* save lvalue as expression result (example: s.b = s.a = n;) */
2926 vdup(), vtop[-1] = vtop[-2];
2928 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2929 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2930 /* remove bit field info to avoid loops */
2931 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2933 if((ft & VT_BTYPE) == VT_BOOL) {
2934 gen_cast(&vtop[-1].type);
2935 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2938 /* duplicate destination */
2939 vdup();
2940 vtop[-1] = vtop[-2];
2942 /* mask and shift source */
2943 if((ft & VT_BTYPE) != VT_BOOL) {
2944 if((ft & VT_BTYPE) == VT_LLONG) {
2945 vpushll((1ULL << bit_size) - 1ULL);
2946 } else {
2947 vpushi((1 << bit_size) - 1);
2949 gen_op('&');
2951 vpushi(bit_pos);
2952 gen_op(TOK_SHL);
2953 /* load destination, mask and or with source */
2954 vswap();
2955 if((ft & VT_BTYPE) == VT_LLONG) {
2956 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2957 } else {
2958 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2960 gen_op('&');
2961 gen_op('|');
2962 /* store result */
2963 vstore();
2964 /* ... and discard */
2965 vpop();
2967 } else {
2968 #ifdef CONFIG_TCC_BCHECK
2969 /* bound check case */
2970 if (vtop[-1].r & VT_MUSTBOUND) {
2971 vswap();
2972 gbound();
2973 vswap();
2975 #endif
2976 rc = RC_INT;
2977 if (is_float(ft)) {
2978 rc = RC_FLOAT;
2979 #ifdef TCC_TARGET_X86_64
2980 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2981 rc = RC_ST0;
2982 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2983 rc = RC_FRET;
2985 #endif
2987 r = gv(rc); /* generate value */
2988 /* if lvalue was saved on stack, must read it */
2989 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2990 SValue sv;
2991 t = get_reg(RC_INT);
2992 #if PTR_SIZE == 8
2993 sv.type.t = VT_PTR;
2994 #else
2995 sv.type.t = VT_INT;
2996 #endif
2997 sv.r = VT_LOCAL | VT_LVAL;
2998 sv.c.i = vtop[-1].c.i;
2999 load(t, &sv);
3000 vtop[-1].r = t | VT_LVAL;
3002 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3003 #if PTR_SIZE == 8
3004 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3005 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3006 #else
3007 if ((ft & VT_BTYPE) == VT_LLONG) {
3008 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3009 #endif
3010 vtop[-1].type.t = load_type;
3011 store(r, vtop - 1);
3012 vswap();
3013 /* convert to int to increment easily */
3014 vtop->type.t = addr_type;
3015 gaddrof();
3016 vpushi(load_size);
3017 gen_op('+');
3018 vtop->r |= VT_LVAL;
3019 vswap();
3020 vtop[-1].type.t = load_type;
3021 /* XXX: it works because r2 is spilled last ! */
3022 store(vtop->r2, vtop - 1);
3023 } else {
3024 store(r, vtop - 1);
3027 vswap();
3028 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3029 vtop->r |= delayed_cast;
3033 /* post defines POST/PRE add. c is the token ++ or -- */
3034 ST_FUNC void inc(int post, int c)
3036 test_lvalue();
3037 vdup(); /* save lvalue */
3038 if (post) {
3039 gv_dup(); /* duplicate value */
3040 vrotb(3);
3041 vrotb(3);
3043 /* add constant */
3044 vpushi(c - TOK_MID);
3045 gen_op('+');
3046 vstore(); /* store value */
3047 if (post)
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3053 /* read the string */
3054 if (tok != TOK_STR)
3055 expect(msg);
3056 cstr_new(astr);
3057 while (tok == TOK_STR) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr, tokc.str.data, -1);
3060 next();
3062 cstr_ccat(astr, '\0');
3065 /* If I is >= 1 and a power of two, returns log2(i)+1.
3066 If I is 0 returns 0. */
3067 static int exact_log2p1(int i)
3069 int ret;
3070 if (!i)
3071 return 0;
3072 for (ret = 1; i >= 1 << 8; ret += 8)
3073 i >>= 8;
3074 if (i >= 1 << 4)
3075 ret += 4, i >>= 4;
3076 if (i >= 1 << 2)
3077 ret += 2, i >>= 2;
3078 if (i >= 1 << 1)
3079 ret++;
3080 return ret;
3083 /* Parse GNUC __attribute__ extension. Currently, the following
3084 extensions are recognized:
3085 - aligned(n) : set data/function alignment.
3086 - packed : force data alignment to 1
3087 - section(x) : generate data/code in this section.
3088 - unused : currently ignored, but may be used someday.
3089 - regparm(n) : pass function parameters in registers (i386 only)
3091 static void parse_attribute(AttributeDef *ad)
3093 int t, n;
3094 CString astr;
3096 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3097 next();
3098 skip('(');
3099 skip('(');
3100 while (tok != ')') {
3101 if (tok < TOK_IDENT)
3102 expect("attribute name");
3103 t = tok;
3104 next();
3105 switch(t) {
3106 case TOK_SECTION1:
3107 case TOK_SECTION2:
3108 skip('(');
3109 parse_mult_str(&astr, "section name");
3110 ad->section = find_section(tcc_state, (char *)astr.data);
3111 skip(')');
3112 cstr_free(&astr);
3113 break;
3114 case TOK_ALIAS1:
3115 case TOK_ALIAS2:
3116 skip('(');
3117 parse_mult_str(&astr, "alias(\"target\")");
3118 ad->alias_target = /* save string as token, for later */
3119 tok_alloc((char*)astr.data, astr.size-1)->tok;
3120 skip(')');
3121 cstr_free(&astr);
3122 break;
3123 case TOK_VISIBILITY1:
3124 case TOK_VISIBILITY2:
3125 skip('(');
3126 parse_mult_str(&astr,
3127 "visibility(\"default|hidden|internal|protected\")");
3128 if (!strcmp (astr.data, "default"))
3129 ad->a.visibility = STV_DEFAULT;
3130 else if (!strcmp (astr.data, "hidden"))
3131 ad->a.visibility = STV_HIDDEN;
3132 else if (!strcmp (astr.data, "internal"))
3133 ad->a.visibility = STV_INTERNAL;
3134 else if (!strcmp (astr.data, "protected"))
3135 ad->a.visibility = STV_PROTECTED;
3136 else
3137 expect("visibility(\"default|hidden|internal|protected\")");
3138 skip(')');
3139 cstr_free(&astr);
3140 break;
3141 case TOK_ALIGNED1:
3142 case TOK_ALIGNED2:
3143 if (tok == '(') {
3144 next();
3145 n = expr_const();
3146 if (n <= 0 || (n & (n - 1)) != 0)
3147 tcc_error("alignment must be a positive power of two");
3148 skip(')');
3149 } else {
3150 n = MAX_ALIGN;
3152 ad->a.aligned = exact_log2p1(n);
3153 if (n != 1 << (ad->a.aligned - 1))
3154 tcc_error("alignment of %d is larger than implemented", n);
3155 break;
3156 case TOK_PACKED1:
3157 case TOK_PACKED2:
3158 ad->a.packed = 1;
3159 break;
3160 case TOK_WEAK1:
3161 case TOK_WEAK2:
3162 ad->a.weak = 1;
3163 break;
3164 case TOK_UNUSED1:
3165 case TOK_UNUSED2:
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3168 break;
3169 case TOK_NORETURN1:
3170 case TOK_NORETURN2:
3171 /* currently, no need to handle it because tcc does not
3172 track unused objects */
3173 break;
3174 case TOK_CDECL1:
3175 case TOK_CDECL2:
3176 case TOK_CDECL3:
3177 ad->a.func_call = FUNC_CDECL;
3178 break;
3179 case TOK_STDCALL1:
3180 case TOK_STDCALL2:
3181 case TOK_STDCALL3:
3182 ad->a.func_call = FUNC_STDCALL;
3183 break;
3184 #ifdef TCC_TARGET_I386
3185 case TOK_REGPARM1:
3186 case TOK_REGPARM2:
3187 skip('(');
3188 n = expr_const();
3189 if (n > 3)
3190 n = 3;
3191 else if (n < 0)
3192 n = 0;
3193 if (n > 0)
3194 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3195 skip(')');
3196 break;
3197 case TOK_FASTCALL1:
3198 case TOK_FASTCALL2:
3199 case TOK_FASTCALL3:
3200 ad->a.func_call = FUNC_FASTCALLW;
3201 break;
3202 #endif
3203 case TOK_MODE:
3204 skip('(');
3205 switch(tok) {
3206 case TOK_MODE_DI:
3207 ad->a.mode = VT_LLONG + 1;
3208 break;
3209 case TOK_MODE_QI:
3210 ad->a.mode = VT_BYTE + 1;
3211 break;
3212 case TOK_MODE_HI:
3213 ad->a.mode = VT_SHORT + 1;
3214 break;
3215 case TOK_MODE_SI:
3216 case TOK_MODE_word:
3217 ad->a.mode = VT_INT + 1;
3218 break;
3219 default:
3220 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3221 break;
3223 next();
3224 skip(')');
3225 break;
3226 case TOK_DLLEXPORT:
3227 ad->a.func_export = 1;
3228 break;
3229 case TOK_DLLIMPORT:
3230 ad->a.func_import = 1;
3231 break;
3232 default:
3233 if (tcc_state->warn_unsupported)
3234 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3235 /* skip parameters */
3236 if (tok == '(') {
3237 int parenthesis = 0;
3238 do {
3239 if (tok == '(')
3240 parenthesis++;
3241 else if (tok == ')')
3242 parenthesis--;
3243 next();
3244 } while (parenthesis && tok != -1);
3246 break;
3248 if (tok != ',')
3249 break;
3250 next();
3252 skip(')');
3253 skip(')');
3257 static Sym * find_field (CType *type, int v)
3259 Sym *s = type->ref;
3260 v |= SYM_FIELD;
3261 while ((s = s->next) != NULL) {
3262 if ((s->v & SYM_FIELD) &&
3263 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3264 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3265 Sym *ret = find_field (&s->type, v);
3266 if (ret)
3267 return ret;
3269 if (s->v == v)
3270 break;
3272 return s;
3275 static void struct_add_offset (Sym *s, int offset)
3277 while ((s = s->next) != NULL) {
3278 if ((s->v & SYM_FIELD) &&
3279 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3280 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3281 struct_add_offset(s->type.ref, offset);
3282 } else
3283 s->c += offset;
3287 static void struct_layout(CType *type, AttributeDef *ad)
3289 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3290 int pcc = !tcc_state->ms_bitfields;
3291 int packwarn = tcc_state->warn_gcc_compat;
3292 int typealign, bit_size, size;
3294 Sym *f;
3295 if (ad->a.aligned)
3296 maxalign = 1 << (ad->a.aligned - 1);
3297 else
3298 maxalign = 1;
3299 offset = 0;
3300 c = 0;
3301 bit_pos = 0;
3302 prevbt = VT_STRUCT; /* make it never match */
3303 prev_bit_size = 0;
3304 size = 0;
3306 for (f = type->ref->next; f; f = f->next) {
3307 size = type_size(&f->type, &typealign);
3308 if (f->type.t & VT_BITFIELD)
3309 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3310 else
3311 bit_size = -1;
3312 if (bit_size == 0 && pcc) {
3313 /* Zero-width bit-fields in PCC mode aren't affected
3314 by any packing (attribute or pragma). */
3315 align = typealign;
3316 } else if (f->r > 1) {
3317 align = f->r;
3318 } else if (ad->a.packed || f->r == 1) {
3319 align = 1;
3320 /* Packed fields or packed records don't let the base type
3321 influence the records type alignment. */
3322 typealign = 1;
3323 } else {
3324 align = typealign;
3326 if (type->ref->type.t != TOK_STRUCT) {
3327 if (pcc && bit_size >= 0)
3328 size = (bit_size + 7) >> 3;
3329 /* Bit position is already zero from our caller. */
3330 offset = 0;
3331 if (size > c)
3332 c = size;
3333 } else if (bit_size < 0) {
3334 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3335 prevbt = VT_STRUCT;
3336 prev_bit_size = 0;
3337 c = (c + addbytes + align - 1) & -align;
3338 offset = c;
3339 if (size > 0)
3340 c += size;
3341 bit_pos = 0;
3342 } else {
3343 /* A bit-field. Layout is more complicated. There are two
3344 options TCC implements: PCC compatible and MS compatible
3345 (PCC compatible is what GCC uses for almost all targets).
3346 In PCC layout the overall size of the struct (in c) is
3347 _excluding_ the current run of bit-fields (that is,
3348 there's at least additional bit_pos bits after c). In
3349 MS layout c does include the current run of bit-fields.
3351 This matters for calculating the natural alignment buckets
3352 in PCC mode. */
3354 /* 'align' will be used to influence records alignment,
3355 so it's the max of specified and type alignment, except
3356 in certain cases that depend on the mode. */
3357 if (align < typealign)
3358 align = typealign;
3359 if (pcc) {
3360 /* In PCC layout a non-packed bit-field is placed adjacent
3361 to the preceding bit-fields, except if it would overflow
3362 its container (depending on base type) or it's a zero-width
3363 bit-field. Packed non-zero-width bit-fields always are
3364 placed adjacent. */
3365 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3366 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3367 if (bit_size == 0 ||
3368 (typealign != 1 &&
3369 (ofs2 / (typealign * 8)) > (size/typealign))) {
3370 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3371 bit_pos = 0;
3372 } else if (bit_pos + bit_size > size * 8) {
3373 c += bit_pos >> 3;
3374 bit_pos &= 7;
3375 if (bit_pos + bit_size > size * 8) {
3376 c += 1, bit_pos = 0;
3377 if ((ad->a.packed || f->r) && packwarn) {
3378 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3379 packwarn = 0;
3383 offset = c;
3384 /* In PCC layout named bit-fields influence the alignment
3385 of the containing struct using the base types alignment,
3386 except for packed fields (which here have correct
3387 align/typealign). */
3388 if ((f->v & SYM_FIRST_ANOM))
3389 align = 1;
3390 } else {
3391 bt = f->type.t & VT_BTYPE;
3392 if ((bit_pos + bit_size > size * 8) ||
3393 (bit_size > 0) == (bt != prevbt)) {
3394 c = (c + typealign - 1) & -typealign;
3395 offset = c;
3396 bit_pos = 0;
3397 /* In MS bitfield mode a bit-field run always uses
3398 at least as many bits as the underlying type.
3399 To start a new run it's also required that this
3400 or the last bit-field had non-zero width. */
3401 if (bit_size || prev_bit_size)
3402 c += size;
3404 /* In MS layout the records alignment is normally
3405 influenced by the field, except for a zero-width
3406 field at the start of a run (but by further zero-width
3407 fields it is again). */
3408 if (bit_size == 0 && prevbt != bt)
3409 align = 1;
3410 prevbt = bt;
3411 prev_bit_size = bit_size;
3413 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3414 | (bit_pos << VT_STRUCT_SHIFT);
3415 bit_pos += bit_size;
3416 if (pcc && bit_pos >= size * 8) {
3417 c += size;
3418 bit_pos -= size * 8;
3421 if (align > maxalign)
3422 maxalign = align;
3423 #if 0
3424 printf("set field %s offset=%d",
3425 get_tok_str(f->v & ~SYM_FIELD, NULL), offset);
3426 if (f->type.t & VT_BITFIELD) {
3427 printf(" pos=%d size=%d",
3428 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3429 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3431 printf("\n");
3432 #endif
3434 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3435 Sym *ass;
3436 /* An anonymous struct/union. Adjust member offsets
3437 to reflect the real offset of our containing struct.
3438 Also set the offset of this anon member inside
3439 the outer struct to be zero. Via this it
3440 works when accessing the field offset directly
3441 (from base object), as well as when recursing
3442 members in initializer handling. */
3443 int v2 = f->type.ref->v;
3444 if (!(v2 & SYM_FIELD) &&
3445 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3446 Sym **pps;
3447 /* This happens only with MS extensions. The
3448 anon member has a named struct type, so it
3449 potentially is shared with other references.
3450 We need to unshare members so we can modify
3451 them. */
3452 ass = f->type.ref;
3453 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3454 &f->type.ref->type, 0,
3455 f->type.ref->c);
3456 pps = &f->type.ref->next;
3457 while ((ass = ass->next) != NULL) {
3458 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3459 pps = &((*pps)->next);
3461 *pps = NULL;
3463 struct_add_offset(f->type.ref, offset);
3464 f->c = 0;
3465 } else {
3466 f->c = offset;
3469 f->r = 0;
3471 /* store size and alignment */
3472 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3473 + maxalign - 1) & -maxalign;
3474 type->ref->r = maxalign;
3475 if (offset + size > type->ref->c && type->ref->c)
3476 tcc_warning("will touch memory past end of the struct (internal limitation)");
3479 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3480 static void struct_decl(CType *type, AttributeDef *ad, int u)
3482 int a, v, size, align, flexible, alignoverride;
3483 long c;
3484 int bit_size, bsize, bt;
3485 Sym *s, *ss, **ps;
3486 AttributeDef ad1;
3487 CType type1, btype;
3489 a = tok; /* save decl type */
3490 next();
3491 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3492 parse_attribute(ad);
3493 if (tok != '{') {
3494 v = tok;
3495 next();
3496 /* struct already defined ? return it */
3497 if (v < TOK_IDENT)
3498 expect("struct/union/enum name");
3499 s = struct_find(v);
3500 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3501 if (s->type.t != a)
3502 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3503 goto do_decl;
3505 } else {
3506 v = anon_sym++;
3508 /* Record the original enum/struct/union token. */
3509 type1.t = a;
3510 type1.ref = NULL;
3511 /* we put an undefined size for struct/union */
3512 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3513 s->r = 0; /* default alignment is zero as gcc */
3514 /* put struct/union/enum name in type */
3515 do_decl:
3516 type->t = u;
3517 type->ref = s;
3519 if (tok == '{') {
3520 next();
3521 if (s->c != -1)
3522 tcc_error("struct/union/enum already defined");
3523 /* cannot be empty */
3524 c = 0;
3525 /* non empty enums are not allowed */
3526 if (a == TOK_ENUM) {
3527 int seen_neg = 0;
3528 int seen_wide = 0;
3529 for(;;) {
3530 CType *t = &int_type;
3531 v = tok;
3532 if (v < TOK_UIDENT)
3533 expect("identifier");
3534 ss = sym_find(v);
3535 if (ss && !local_stack)
3536 tcc_error("redefinition of enumerator '%s'",
3537 get_tok_str(v, NULL));
3538 next();
3539 if (tok == '=') {
3540 next();
3541 #if PTR_SIZE == 8
3542 c = expr_const64();
3543 #else
3544 /* We really want to support long long enums
3545 on i386 as well, but the Sym structure only
3546 holds a 'long' for associated constants,
3547 and enlarging it would bump its size (no
3548 available padding). So punt for now. */
3549 c = expr_const();
3550 #endif
3552 if (c < 0)
3553 seen_neg = 1;
3554 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3555 seen_wide = 1, t = &size_type;
3556 /* enum symbols have static storage */
3557 ss = sym_push(v, t, VT_CONST, c);
3558 ss->type.t |= VT_STATIC;
3559 if (tok != ',')
3560 break;
3561 next();
3562 c++;
3563 /* NOTE: we accept a trailing comma */
3564 if (tok == '}')
3565 break;
3567 if (!seen_neg)
3568 s->a.unsigned_enum = 1;
3569 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3570 skip('}');
3571 } else {
3572 ps = &s->next;
3573 flexible = 0;
3574 while (tok != '}') {
3575 if (!parse_btype(&btype, &ad1)) {
3576 skip(';');
3577 continue;
3579 while (1) {
3580 if (flexible)
3581 tcc_error("flexible array member '%s' not at the end of struct",
3582 get_tok_str(v, NULL));
3583 bit_size = -1;
3584 v = 0;
3585 type1 = btype;
3586 if (tok != ':') {
3587 if (tok != ';')
3588 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3589 if (v == 0) {
3590 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3591 expect("identifier");
3592 else {
3593 int v = btype.ref->v;
3594 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3595 if (tcc_state->ms_extensions == 0)
3596 expect("identifier");
3600 if (type_size(&type1, &align) < 0) {
3601 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3602 flexible = 1;
3603 else
3604 tcc_error("field '%s' has incomplete type",
3605 get_tok_str(v, NULL));
3607 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3608 (type1.t & VT_STORAGE))
3609 tcc_error("invalid type for '%s'",
3610 get_tok_str(v, NULL));
3612 if (tok == ':') {
3613 next();
3614 bit_size = expr_const();
3615 /* XXX: handle v = 0 case for messages */
3616 if (bit_size < 0)
3617 tcc_error("negative width in bit-field '%s'",
3618 get_tok_str(v, NULL));
3619 if (v && bit_size == 0)
3620 tcc_error("zero width for bit-field '%s'",
3621 get_tok_str(v, NULL));
3622 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3623 parse_attribute(&ad1);
3625 size = type_size(&type1, &align);
3626 /* Only remember non-default alignment. */
3627 alignoverride = 0;
3628 if (ad1.a.aligned) {
3629 int speca = 1 << (ad1.a.aligned - 1);
3630 alignoverride = speca;
3631 } else if (ad1.a.packed || ad->a.packed) {
3632 alignoverride = 1;
3633 } else if (*tcc_state->pack_stack_ptr) {
3634 if (align >= *tcc_state->pack_stack_ptr)
3635 alignoverride = *tcc_state->pack_stack_ptr;
3637 if (bit_size >= 0) {
3638 bt = type1.t & VT_BTYPE;
3639 if (bt != VT_INT &&
3640 bt != VT_BYTE &&
3641 bt != VT_SHORT &&
3642 bt != VT_BOOL &&
3643 bt != VT_ENUM &&
3644 bt != VT_LLONG)
3645 tcc_error("bitfields must have scalar type");
3646 bsize = size * 8;
3647 if (bit_size > bsize) {
3648 tcc_error("width of '%s' exceeds its type",
3649 get_tok_str(v, NULL));
3650 } else if (bit_size == bsize) {
3651 /* no need for bit fields */
3653 } else {
3654 type1.t |= VT_BITFIELD |
3655 (0 << VT_STRUCT_SHIFT) |
3656 (bit_size << (VT_STRUCT_SHIFT + 6));
3659 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3660 /* Remember we've seen a real field to check
3661 for placement of flexible array member. */
3662 c = 1;
3664 /* If member is a struct or bit-field, enforce
3665 placing into the struct (as anonymous). */
3666 if (v == 0 &&
3667 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3668 bit_size >= 0)) {
3669 v = anon_sym++;
3671 if (v) {
3672 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3673 *ps = ss;
3674 ps = &ss->next;
3676 if (tok == ';' || tok == TOK_EOF)
3677 break;
3678 skip(',');
3680 skip(';');
3682 skip('}');
3683 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3684 parse_attribute(ad);
3685 struct_layout(type, ad);
3690 /* return 1 if basic type is a type size (short, long, long long) */
3691 ST_FUNC int is_btype_size(int bt)
3693 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3696 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3697 are added to the element type, copied because it could be a typedef. */
3698 static void parse_btype_qualify(CType *type, int qualifiers)
3700 while (type->t & VT_ARRAY) {
3701 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3702 type = &type->ref->type;
3704 type->t |= qualifiers;
3707 /* return 0 if no type declaration. otherwise, return the basic type
3708 and skip it.
3710 static int parse_btype(CType *type, AttributeDef *ad)
3712 int t, u, bt_size, complete, type_found, typespec_found, g;
3713 Sym *s;
3714 CType type1;
3716 memset(ad, 0, sizeof(AttributeDef));
3717 complete = 0;
3718 type_found = 0;
3719 typespec_found = 0;
3720 t = 0;
3721 while(1) {
3722 switch(tok) {
3723 case TOK_EXTENSION:
3724 /* currently, we really ignore extension */
3725 next();
3726 continue;
3728 /* basic types */
3729 case TOK_CHAR:
3730 u = VT_BYTE;
3731 basic_type:
3732 next();
3733 basic_type1:
3734 if (complete)
3735 tcc_error("too many basic types");
3736 t |= u;
3737 bt_size = is_btype_size (u & VT_BTYPE);
3738 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3739 complete = 1;
3740 typespec_found = 1;
3741 break;
3742 case TOK_VOID:
3743 u = VT_VOID;
3744 goto basic_type;
3745 case TOK_SHORT:
3746 u = VT_SHORT;
3747 goto basic_type;
3748 case TOK_INT:
3749 u = VT_INT;
3750 goto basic_type;
3751 case TOK_LONG:
3752 next();
3753 if ((t & VT_BTYPE) == VT_DOUBLE) {
3754 #ifndef TCC_TARGET_PE
3755 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3756 #endif
3757 } else if ((t & VT_BTYPE) == VT_LONG) {
3758 t = (t & ~VT_BTYPE) | VT_LLONG;
3759 } else {
3760 u = VT_LONG;
3761 goto basic_type1;
3763 break;
3764 #ifdef TCC_TARGET_ARM64
3765 case TOK_UINT128:
3766 /* GCC's __uint128_t appears in some Linux header files. Make it a
3767 synonym for long double to get the size and alignment right. */
3768 u = VT_LDOUBLE;
3769 goto basic_type;
3770 #endif
3771 case TOK_BOOL:
3772 u = VT_BOOL;
3773 goto basic_type;
3774 case TOK_FLOAT:
3775 u = VT_FLOAT;
3776 goto basic_type;
3777 case TOK_DOUBLE:
3778 next();
3779 if ((t & VT_BTYPE) == VT_LONG) {
3780 #ifdef TCC_TARGET_PE
3781 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3782 #else
3783 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3784 #endif
3785 } else {
3786 u = VT_DOUBLE;
3787 goto basic_type1;
3789 break;
3790 case TOK_ENUM:
3791 struct_decl(&type1, ad, VT_ENUM);
3792 basic_type2:
3793 u = type1.t;
3794 type->ref = type1.ref;
3795 goto basic_type1;
3796 case TOK_STRUCT:
3797 case TOK_UNION:
3798 struct_decl(&type1, ad, VT_STRUCT);
3799 goto basic_type2;
3801 /* type modifiers */
3802 case TOK_CONST1:
3803 case TOK_CONST2:
3804 case TOK_CONST3:
3805 type->t = t;
3806 parse_btype_qualify(type, VT_CONSTANT);
3807 t = type->t;
3808 next();
3809 break;
3810 case TOK_VOLATILE1:
3811 case TOK_VOLATILE2:
3812 case TOK_VOLATILE3:
3813 type->t = t;
3814 parse_btype_qualify(type, VT_VOLATILE);
3815 t = type->t;
3816 next();
3817 break;
3818 case TOK_SIGNED1:
3819 case TOK_SIGNED2:
3820 case TOK_SIGNED3:
3821 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3822 tcc_error("signed and unsigned modifier");
3823 typespec_found = 1;
3824 t |= VT_DEFSIGN;
3825 next();
3826 break;
3827 case TOK_REGISTER:
3828 case TOK_AUTO:
3829 case TOK_RESTRICT1:
3830 case TOK_RESTRICT2:
3831 case TOK_RESTRICT3:
3832 next();
3833 break;
3834 case TOK_UNSIGNED:
3835 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3836 tcc_error("signed and unsigned modifier");
3837 t |= VT_DEFSIGN | VT_UNSIGNED;
3838 next();
3839 typespec_found = 1;
3840 break;
3842 /* storage */
3843 case TOK_EXTERN:
3844 g = VT_EXTERN;
3845 goto storage;
3846 case TOK_STATIC:
3847 g = VT_STATIC;
3848 goto storage;
3849 case TOK_TYPEDEF:
3850 g = VT_TYPEDEF;
3851 goto storage;
3852 storage:
3853 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
3854 tcc_error("multiple storage classes");
3855 t |= g;
3856 next();
3857 break;
3858 case TOK_INLINE1:
3859 case TOK_INLINE2:
3860 case TOK_INLINE3:
3861 t |= VT_INLINE;
3862 next();
3863 break;
3865 /* GNUC attribute */
3866 case TOK_ATTRIBUTE1:
3867 case TOK_ATTRIBUTE2:
3868 parse_attribute(ad);
3869 if (ad->a.mode) {
3870 u = ad->a.mode -1;
3871 t = (t & ~VT_BTYPE) | u;
3873 break;
3874 /* GNUC typeof */
3875 case TOK_TYPEOF1:
3876 case TOK_TYPEOF2:
3877 case TOK_TYPEOF3:
3878 next();
3879 parse_expr_type(&type1);
3880 /* remove all storage modifiers except typedef */
3881 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3882 goto basic_type2;
3883 default:
3884 if (typespec_found)
3885 goto the_end;
3886 s = sym_find(tok);
3887 if (!s || !(s->type.t & VT_TYPEDEF))
3888 goto the_end;
3890 type->t = ((s->type.t & ~VT_TYPEDEF) |
3891 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3892 type->ref = s->type.ref;
3893 if (t & (VT_CONSTANT | VT_VOLATILE))
3894 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3895 t = type->t;
3897 if (s->r) {
3898 /* get attributes from typedef */
3899 if (0 == ad->a.aligned)
3900 ad->a.aligned = s->a.aligned;
3901 if (0 == ad->a.func_call)
3902 ad->a.func_call = s->a.func_call;
3903 ad->a.packed |= s->a.packed;
3905 next();
3906 typespec_found = 1;
3907 break;
3909 type_found = 1;
3911 the_end:
3912 if (tcc_state->char_is_unsigned) {
3913 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3914 t |= VT_UNSIGNED;
3917 /* long is never used as type */
3918 if ((t & VT_BTYPE) == VT_LONG)
3919 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
3920 t = (t & ~VT_BTYPE) | VT_LLONG;
3921 #else
3922 t = (t & ~VT_BTYPE) | VT_INT;
3923 #endif
3924 type->t = t;
3925 return type_found;
3928 /* convert a function parameter type (array to pointer and function to
3929 function pointer) */
3930 static inline void convert_parameter_type(CType *pt)
3932 /* remove const and volatile qualifiers (XXX: const could be used
3933 to indicate a const function parameter */
3934 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3935 /* array must be transformed to pointer according to ANSI C */
3936 pt->t &= ~VT_ARRAY;
3937 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3938 mk_pointer(pt);
3942 ST_FUNC void parse_asm_str(CString *astr)
3944 skip('(');
3945 parse_mult_str(astr, "string constant");
3948 /* Parse an asm label and return the token */
3949 static int asm_label_instr(void)
3951 int v;
3952 CString astr;
3954 next();
3955 parse_asm_str(&astr);
3956 skip(')');
3957 #ifdef ASM_DEBUG
3958 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3959 #endif
3960 v = tok_alloc(astr.data, astr.size - 1)->tok;
3961 cstr_free(&astr);
3962 return v;
3965 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
3967 int n, l, t1, arg_size, align;
3968 Sym **plast, *s, *first;
3969 AttributeDef ad1;
3970 CType pt;
3972 if (tok == '(') {
3973 /* function type, or recursive declarator (return if so) */
3974 next();
3975 if (td && !(td & TYPE_ABSTRACT))
3976 return 0;
3977 if (tok == ')')
3978 l = 0;
3979 else if (parse_btype(&pt, &ad1))
3980 l = FUNC_NEW;
3981 else if (td)
3982 return 0;
3983 else
3984 l = FUNC_OLD;
3985 first = NULL;
3986 plast = &first;
3987 arg_size = 0;
3988 if (l) {
3989 for(;;) {
3990 /* read param name and compute offset */
3991 if (l != FUNC_OLD) {
3992 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3993 break;
3994 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3995 if ((pt.t & VT_BTYPE) == VT_VOID)
3996 tcc_error("parameter declared as void");
3997 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3998 } else {
3999 n = tok;
4000 if (n < TOK_UIDENT)
4001 expect("identifier");
4002 pt.t = VT_VOID; /* invalid type */
4003 next();
4005 convert_parameter_type(&pt);
4006 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4007 *plast = s;
4008 plast = &s->next;
4009 if (tok == ')')
4010 break;
4011 skip(',');
4012 if (l == FUNC_NEW && tok == TOK_DOTS) {
4013 l = FUNC_ELLIPSIS;
4014 next();
4015 break;
4017 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4018 tcc_error("invalid type");
4020 } else
4021 /* if no parameters, then old type prototype */
4022 l = FUNC_OLD;
4023 skip(')');
4024 /* NOTE: const is ignored in returned type as it has a special
4025 meaning in gcc / C++ */
4026 type->t &= ~VT_CONSTANT;
4027 /* some ancient pre-K&R C allows a function to return an array
4028 and the array brackets to be put after the arguments, such
4029 that "int c()[]" means something like "int[] c()" */
4030 if (tok == '[') {
4031 next();
4032 skip(']'); /* only handle simple "[]" */
4033 mk_pointer(type);
4035 /* we push a anonymous symbol which will contain the function prototype */
4036 ad->a.func_args = arg_size;
4037 s = sym_push(SYM_FIELD, type, 0, l);
4038 s->a = ad->a;
4039 s->next = first;
4040 type->t = VT_FUNC;
4041 type->ref = s;
4042 } else if (tok == '[') {
4043 int saved_nocode_wanted = nocode_wanted;
4044 /* array definition */
4045 next();
4046 if (tok == TOK_RESTRICT1)
4047 next();
4048 n = -1;
4049 t1 = 0;
4050 if (tok != ']') {
4051 if (!local_stack || (storage & VT_STATIC))
4052 vpushi(expr_const());
4053 else {
4054 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4055 length must always be evaluated, even under nocode_wanted,
4056 so that its size slot is initialized (e.g. under sizeof
4057 or typeof). */
4058 nocode_wanted = 0;
4059 gexpr();
4061 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4062 n = vtop->c.i;
4063 if (n < 0)
4064 tcc_error("invalid array size");
4065 } else {
4066 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4067 tcc_error("size of variable length array should be an integer");
4068 t1 = VT_VLA;
4071 skip(']');
4072 /* parse next post type */
4073 post_type(type, ad, storage, 0);
4074 if (type->t == VT_FUNC)
4075 tcc_error("declaration of an array of functions");
4076 t1 |= type->t & VT_VLA;
4078 if (t1 & VT_VLA) {
4079 loc -= type_size(&int_type, &align);
4080 loc &= -align;
4081 n = loc;
4083 vla_runtime_type_size(type, &align);
4084 gen_op('*');
4085 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4086 vswap();
4087 vstore();
4089 if (n != -1)
4090 vpop();
4091 nocode_wanted = saved_nocode_wanted;
4093 /* we push an anonymous symbol which will contain the array
4094 element type */
4095 s = sym_push(SYM_FIELD, type, 0, n);
4096 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4097 type->ref = s;
4099 return 1;
4102 /* Parse a type declarator (except basic type), and return the type
4103 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4104 expected. 'type' should contain the basic type. 'ad' is the
4105 attribute definition of the basic type. It can be modified by
4106 type_decl(). If this (possibly abstract) declarator is a pointer chain
4107 it returns the innermost pointed to type (equals *type, but is a different
4108 pointer), otherwise returns type itself, that's used for recursive calls. */
4109 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4111 CType *post, *ret;
4112 int qualifiers, storage;
4114 /* recursive type, remove storage bits first, apply them later again */
4115 storage = type->t & VT_STORAGE;
4116 type->t &= ~VT_STORAGE;
4117 post = ret = type;
4118 while (tok == '*') {
4119 qualifiers = 0;
4120 redo:
4121 next();
4122 switch(tok) {
4123 case TOK_CONST1:
4124 case TOK_CONST2:
4125 case TOK_CONST3:
4126 qualifiers |= VT_CONSTANT;
4127 goto redo;
4128 case TOK_VOLATILE1:
4129 case TOK_VOLATILE2:
4130 case TOK_VOLATILE3:
4131 qualifiers |= VT_VOLATILE;
4132 goto redo;
4133 case TOK_RESTRICT1:
4134 case TOK_RESTRICT2:
4135 case TOK_RESTRICT3:
4136 goto redo;
4137 /* XXX: clarify attribute handling */
4138 case TOK_ATTRIBUTE1:
4139 case TOK_ATTRIBUTE2:
4140 parse_attribute(ad);
4141 break;
4143 mk_pointer(type);
4144 type->t |= qualifiers;
4145 if (ret == type)
4146 /* innermost pointed to type is the one for the first derivation */
4147 ret = pointed_type(type);
4150 if (tok == '(') {
4151 /* This is possibly a parameter type list for abstract declarators
4152 ('int ()'), use post_type for testing this. */
4153 if (!post_type(type, ad, 0, td)) {
4154 /* It's not, so it's a nested declarator, and the post operations
4155 apply to the innermost pointed to type (if any). */
4156 /* XXX: this is not correct to modify 'ad' at this point, but
4157 the syntax is not clear */
4158 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4159 parse_attribute(ad);
4160 post = type_decl(type, ad, v, td);
4161 skip(')');
4163 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4164 /* type identifier */
4165 *v = tok;
4166 next();
4167 } else {
4168 if (!(td & TYPE_ABSTRACT))
4169 expect("identifier");
4170 *v = 0;
4172 post_type(post, ad, storage, 0);
4173 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4174 parse_attribute(ad);
4175 type->t |= storage;
4176 return ret;
4179 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4180 ST_FUNC int lvalue_type(int t)
4182 int bt, r;
4183 r = VT_LVAL;
4184 bt = t & VT_BTYPE;
4185 if (bt == VT_BYTE || bt == VT_BOOL)
4186 r |= VT_LVAL_BYTE;
4187 else if (bt == VT_SHORT)
4188 r |= VT_LVAL_SHORT;
4189 else
4190 return r;
4191 if (t & VT_UNSIGNED)
4192 r |= VT_LVAL_UNSIGNED;
4193 return r;
4196 /* indirection with full error checking and bound check */
4197 ST_FUNC void indir(void)
4199 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4200 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4201 return;
4202 expect("pointer");
4204 if (vtop->r & VT_LVAL)
4205 gv(RC_INT);
4206 vtop->type = *pointed_type(&vtop->type);
4207 /* Arrays and functions are never lvalues */
4208 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4209 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4210 vtop->r |= lvalue_type(vtop->type.t);
4211 /* if bound checking, the referenced pointer must be checked */
4212 #ifdef CONFIG_TCC_BCHECK
4213 if (tcc_state->do_bounds_check)
4214 vtop->r |= VT_MUSTBOUND;
4215 #endif
4219 /* pass a parameter to a function and do type checking and casting */
4220 static void gfunc_param_typed(Sym *func, Sym *arg)
4222 int func_type;
4223 CType type;
4225 func_type = func->c;
4226 if (func_type == FUNC_OLD ||
4227 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4228 /* default casting : only need to convert float to double */
4229 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4230 type.t = VT_DOUBLE;
4231 gen_cast(&type);
4232 } else if (vtop->type.t & VT_BITFIELD) {
4233 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4234 type.ref = vtop->type.ref;
4235 gen_cast(&type);
4237 } else if (arg == NULL) {
4238 tcc_error("too many arguments to function");
4239 } else {
4240 type = arg->type;
4241 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4242 gen_assign_cast(&type);
4246 /* parse an expression and return its type without any side effect.
4247 If UNRY we parse an unary expression, otherwise a full one. */
4248 static void expr_type(CType *type, int unry)
4250 nocode_wanted++;
4251 if (unry)
4252 unary();
4253 else
4254 gexpr();
4255 *type = vtop->type;
4256 vpop();
4257 nocode_wanted--;
4260 /* parse an expression of the form '(type)' or '(expr)' and return its
4261 type */
4262 static void parse_expr_type(CType *type)
4264 int n;
4265 AttributeDef ad;
4267 skip('(');
4268 if (parse_btype(type, &ad)) {
4269 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4270 } else {
4271 expr_type(type, 0);
4273 skip(')');
4276 static void parse_type(CType *type)
4278 AttributeDef ad;
4279 int n;
4281 if (!parse_btype(type, &ad)) {
4282 expect("type");
4284 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4287 static void parse_builtin_params(int nc, const char *args)
4289 char c, sep = '(';
4290 CType t;
4291 if (nc)
4292 nocode_wanted++;
4293 next();
4294 while ((c = *args++)) {
4295 skip(sep);
4296 sep = ',';
4297 switch (c) {
4298 case 'e': expr_eq(); continue;
4299 case 't': parse_type(&t); vpush(&t); continue;
4300 default: tcc_error("internal error"); break;
4303 skip(')');
4304 if (nc)
4305 nocode_wanted--;
4308 ST_FUNC void unary(void)
4310 int n, t, align, size, r, sizeof_caller;
4311 CType type;
4312 Sym *s;
4313 AttributeDef ad;
4315 sizeof_caller = in_sizeof;
4316 in_sizeof = 0;
4317 /* XXX: GCC 2.95.3 does not generate a table although it should be
4318 better here */
4319 tok_next:
4320 switch(tok) {
4321 case TOK_EXTENSION:
4322 next();
4323 goto tok_next;
4324 case TOK_CINT:
4325 case TOK_CCHAR:
4326 case TOK_LCHAR:
4327 t = VT_INT;
4328 push_tokc:
4329 type.t = t;
4330 type.ref = 0;
4331 vsetc(&type, VT_CONST, &tokc);
4332 next();
4333 break;
4334 case TOK_CUINT:
4335 t = VT_INT | VT_UNSIGNED;
4336 goto push_tokc;
4337 case TOK_CLLONG:
4338 t = VT_LLONG;
4339 goto push_tokc;
4340 case TOK_CULLONG:
4341 t = VT_LLONG | VT_UNSIGNED;
4342 goto push_tokc;
4343 case TOK_CFLOAT:
4344 t = VT_FLOAT;
4345 goto push_tokc;
4346 case TOK_CDOUBLE:
4347 t = VT_DOUBLE;
4348 goto push_tokc;
4349 case TOK_CLDOUBLE:
4350 t = VT_LDOUBLE;
4351 goto push_tokc;
4353 case TOK___FUNCTION__:
4354 if (!gnu_ext)
4355 goto tok_identifier;
4356 /* fall thru */
4357 case TOK___FUNC__:
4359 void *ptr;
4360 int len;
4361 /* special function name identifier */
4362 len = strlen(funcname) + 1;
4363 /* generate char[len] type */
4364 type.t = VT_BYTE;
4365 mk_pointer(&type);
4366 type.t |= VT_ARRAY;
4367 type.ref->c = len;
4368 vpush_ref(&type, data_section, data_section->data_offset, len);
4369 ptr = section_ptr_add(data_section, len);
4370 memcpy(ptr, funcname, len);
4371 next();
4373 break;
4374 case TOK_LSTR:
4375 #ifdef TCC_TARGET_PE
4376 t = VT_SHORT | VT_UNSIGNED;
4377 #else
4378 t = VT_INT;
4379 #endif
4380 goto str_init;
4381 case TOK_STR:
4382 /* string parsing */
4383 t = VT_BYTE;
4384 str_init:
4385 t |= VT_CONSTANT;
4386 type.t = t;
4387 mk_pointer(&type);
4388 type.t |= VT_ARRAY;
4389 memset(&ad, 0, sizeof(AttributeDef));
4390 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4391 break;
4392 case '(':
4393 next();
4394 /* cast ? */
4395 if (parse_btype(&type, &ad)) {
4396 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4397 skip(')');
4398 /* check ISOC99 compound literal */
4399 if (tok == '{') {
4400 /* data is allocated locally by default */
4401 if (global_expr)
4402 r = VT_CONST;
4403 else
4404 r = VT_LOCAL;
4405 /* all except arrays are lvalues */
4406 if (!(type.t & VT_ARRAY))
4407 r |= lvalue_type(type.t);
4408 memset(&ad, 0, sizeof(AttributeDef));
4409 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4410 } else {
4411 if (sizeof_caller) {
4412 vpush(&type);
4413 return;
4415 unary();
4416 gen_cast(&type);
4418 } else if (tok == '{') {
4419 int saved_nocode_wanted = nocode_wanted;
4420 if (const_wanted)
4421 tcc_error("expected constant");
4422 /* save all registers */
4423 save_regs(0);
4424 /* statement expression : we do not accept break/continue
4425 inside as GCC does. We do retain the nocode_wanted state,
4426 as statement expressions can't ever be entered from the
4427 outside, so any reactivation of code emission (from labels
4428 or loop heads) can be disabled again after the end of it. */
4429 block(NULL, NULL, 1);
4430 nocode_wanted = saved_nocode_wanted;
4431 skip(')');
4432 } else {
4433 gexpr();
4434 skip(')');
4436 break;
4437 case '*':
4438 next();
4439 unary();
4440 indir();
4441 break;
4442 case '&':
4443 next();
4444 unary();
4445 /* functions names must be treated as function pointers,
4446 except for unary '&' and sizeof. Since we consider that
4447 functions are not lvalues, we only have to handle it
4448 there and in function calls. */
4449 /* arrays can also be used although they are not lvalues */
4450 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4451 !(vtop->type.t & VT_ARRAY))
4452 test_lvalue();
4453 mk_pointer(&vtop->type);
4454 gaddrof();
4455 break;
4456 case '!':
4457 next();
4458 unary();
4459 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4460 CType boolean;
4461 boolean.t = VT_BOOL;
4462 gen_cast(&boolean);
4463 vtop->c.i = !vtop->c.i;
4464 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4465 vtop->c.i ^= 1;
4466 else {
4467 save_regs(1);
4468 vseti(VT_JMP, gvtst(1, 0));
4470 break;
4471 case '~':
4472 next();
4473 unary();
4474 vpushi(-1);
4475 gen_op('^');
4476 break;
4477 case '+':
4478 next();
4479 unary();
4480 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4481 tcc_error("pointer not accepted for unary plus");
4482 /* In order to force cast, we add zero, except for floating point
4483 where we really need an noop (otherwise -0.0 will be transformed
4484 into +0.0). */
4485 if (!is_float(vtop->type.t)) {
4486 vpushi(0);
4487 gen_op('+');
4489 break;
4490 case TOK_SIZEOF:
4491 case TOK_ALIGNOF1:
4492 case TOK_ALIGNOF2:
4493 t = tok;
4494 next();
4495 in_sizeof++;
4496 expr_type(&type, 1); // Perform a in_sizeof = 0;
4497 size = type_size(&type, &align);
4498 if (t == TOK_SIZEOF) {
4499 if (!(type.t & VT_VLA)) {
4500 if (size < 0)
4501 tcc_error("sizeof applied to an incomplete type");
4502 vpushs(size);
4503 } else {
4504 vla_runtime_type_size(&type, &align);
4506 } else {
4507 vpushs(align);
4509 vtop->type.t |= VT_UNSIGNED;
4510 break;
4512 case TOK_builtin_expect:
4513 /* __builtin_expect is a no-op for now */
4514 parse_builtin_params(0, "ee");
4515 vpop();
4516 break;
4517 case TOK_builtin_types_compatible_p:
4518 parse_builtin_params(0, "tt");
4519 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4520 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4521 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4522 vtop -= 2;
4523 vpushi(n);
4524 break;
4525 case TOK_builtin_choose_expr:
4527 int64_t c;
4528 next();
4529 skip('(');
4530 c = expr_const64();
4531 skip(',');
4532 if (!c) {
4533 nocode_wanted++;
4535 expr_eq();
4536 if (!c) {
4537 vpop();
4538 nocode_wanted--;
4540 skip(',');
4541 if (c) {
4542 nocode_wanted++;
4544 expr_eq();
4545 if (c) {
4546 vpop();
4547 nocode_wanted--;
4549 skip(')');
4551 break;
4552 case TOK_builtin_constant_p:
4553 parse_builtin_params(1, "e");
4554 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4555 vtop--;
4556 vpushi(n);
4557 break;
4558 case TOK_builtin_frame_address:
4559 case TOK_builtin_return_address:
4561 int tok1 = tok;
4562 int level;
4563 CType type;
4564 next();
4565 skip('(');
4566 if (tok != TOK_CINT) {
4567 tcc_error("%s only takes positive integers",
4568 tok1 == TOK_builtin_return_address ?
4569 "__builtin_return_address" :
4570 "__builtin_frame_address");
4572 level = (uint32_t)tokc.i;
4573 next();
4574 skip(')');
4575 type.t = VT_VOID;
4576 mk_pointer(&type);
4577 vset(&type, VT_LOCAL, 0); /* local frame */
4578 while (level--) {
4579 mk_pointer(&vtop->type);
4580 indir(); /* -> parent frame */
4582 if (tok1 == TOK_builtin_return_address) {
4583 // assume return address is just above frame pointer on stack
4584 vpushi(PTR_SIZE);
4585 gen_op('+');
4586 mk_pointer(&vtop->type);
4587 indir();
4590 break;
4591 #ifdef TCC_TARGET_X86_64
4592 #ifdef TCC_TARGET_PE
4593 case TOK_builtin_va_start:
4594 parse_builtin_params(0, "ee");
4595 r = vtop->r & VT_VALMASK;
4596 if (r == VT_LLOCAL)
4597 r = VT_LOCAL;
4598 if (r != VT_LOCAL)
4599 tcc_error("__builtin_va_start expects a local variable");
4600 vtop->r = r;
4601 vtop->type = char_pointer_type;
4602 vtop->c.i += 8;
4603 vstore();
4604 break;
4605 #else
4606 case TOK_builtin_va_arg_types:
4607 parse_builtin_params(0, "t");
4608 vpushi(classify_x86_64_va_arg(&vtop->type));
4609 vswap();
4610 vpop();
4611 break;
4612 #endif
4613 #endif
4615 #ifdef TCC_TARGET_ARM64
4616 case TOK___va_start: {
4617 parse_builtin_params(0, "ee");
4618 //xx check types
4619 gen_va_start();
4620 vpushi(0);
4621 vtop->type.t = VT_VOID;
4622 break;
4624 case TOK___va_arg: {
4625 CType type;
4626 parse_builtin_params(0, "et");
4627 type = vtop->type;
4628 vpop();
4629 //xx check types
4630 gen_va_arg(&type);
4631 vtop->type = type;
4632 break;
4634 case TOK___arm64_clear_cache: {
4635 parse_builtin_params(0, "ee");
4636 gen_clear_cache();
4637 vpushi(0);
4638 vtop->type.t = VT_VOID;
4639 break;
4641 #endif
4642 /* pre operations */
4643 case TOK_INC:
4644 case TOK_DEC:
4645 t = tok;
4646 next();
4647 unary();
4648 inc(0, t);
4649 break;
4650 case '-':
4651 next();
4652 unary();
4653 t = vtop->type.t & VT_BTYPE;
4654 if (is_float(t)) {
4655 /* In IEEE negate(x) isn't subtract(0,x), but rather
4656 subtract(-0, x). */
4657 vpush(&vtop->type);
4658 if (t == VT_FLOAT)
4659 vtop->c.f = -1.0 * 0.0;
4660 else if (t == VT_DOUBLE)
4661 vtop->c.d = -1.0 * 0.0;
4662 else
4663 vtop->c.ld = -1.0 * 0.0;
4664 } else
4665 vpushi(0);
4666 vswap();
4667 gen_op('-');
4668 break;
4669 case TOK_LAND:
4670 if (!gnu_ext)
4671 goto tok_identifier;
4672 next();
4673 /* allow to take the address of a label */
4674 if (tok < TOK_UIDENT)
4675 expect("label identifier");
4676 s = label_find(tok);
4677 if (!s) {
4678 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4679 } else {
4680 if (s->r == LABEL_DECLARED)
4681 s->r = LABEL_FORWARD;
4683 if (!s->type.t) {
4684 s->type.t = VT_VOID;
4685 mk_pointer(&s->type);
4686 s->type.t |= VT_STATIC;
4688 vpushsym(&s->type, s);
4689 next();
4690 break;
4692 // special qnan , snan and infinity values
4693 case TOK___NAN__:
4694 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4695 next();
4696 break;
4697 case TOK___SNAN__:
4698 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4699 next();
4700 break;
4701 case TOK___INF__:
4702 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4703 next();
4704 break;
4706 default:
4707 tok_identifier:
4708 t = tok;
4709 next();
4710 if (t < TOK_UIDENT)
4711 expect("identifier");
4712 s = sym_find(t);
4713 if (!s) {
4714 const char *name = get_tok_str(t, NULL);
4715 if (tok != '(')
4716 tcc_error("'%s' undeclared", name);
4717 /* for simple function calls, we tolerate undeclared
4718 external reference to int() function */
4719 if (tcc_state->warn_implicit_function_declaration
4720 #ifdef TCC_TARGET_PE
4721 /* people must be warned about using undeclared WINAPI functions
4722 (which usually start with uppercase letter) */
4723 || (name[0] >= 'A' && name[0] <= 'Z')
4724 #endif
4726 tcc_warning("implicit declaration of function '%s'", name);
4727 s = external_global_sym(t, &func_old_type, 0);
4730 r = s->r;
4731 /* A symbol that has a register is a local register variable,
4732 which starts out as VT_LOCAL value. */
4733 if ((r & VT_VALMASK) < VT_CONST)
4734 r = (r & ~VT_VALMASK) | VT_LOCAL;
4736 vset(&s->type, r, s->c);
4737 /* Point to s as backpointer (even without r&VT_SYM).
4738 Will be used by at least the x86 inline asm parser for
4739 regvars. */
4740 vtop->sym = s;
4741 if (vtop->r & VT_SYM) {
4742 vtop->c.i = 0;
4744 break;
4747 /* post operations */
4748 while (1) {
4749 if (tok == TOK_INC || tok == TOK_DEC) {
4750 inc(1, tok);
4751 next();
4752 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4753 int qualifiers;
4754 /* field */
4755 if (tok == TOK_ARROW)
4756 indir();
4757 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4758 test_lvalue();
4759 gaddrof();
4760 /* expect pointer on structure */
4761 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4762 expect("struct or union");
4763 if (tok == TOK_CDOUBLE)
4764 expect("field name");
4765 next();
4766 if (tok == TOK_CINT || tok == TOK_CUINT)
4767 expect("field name");
4768 s = find_field(&vtop->type, tok);
4769 if (!s)
4770 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4771 /* add field offset to pointer */
4772 vtop->type = char_pointer_type; /* change type to 'char *' */
4773 vpushi(s->c);
4774 gen_op('+');
4775 /* change type to field type, and set to lvalue */
4776 vtop->type = s->type;
4777 vtop->type.t |= qualifiers;
4778 /* an array is never an lvalue */
4779 if (!(vtop->type.t & VT_ARRAY)) {
4780 vtop->r |= lvalue_type(vtop->type.t);
4781 #ifdef CONFIG_TCC_BCHECK
4782 /* if bound checking, the referenced pointer must be checked */
4783 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4784 vtop->r |= VT_MUSTBOUND;
4785 #endif
4787 next();
4788 } else if (tok == '[') {
4789 next();
4790 gexpr();
4791 gen_op('+');
4792 indir();
4793 skip(']');
4794 } else if (tok == '(') {
4795 SValue ret;
4796 Sym *sa;
4797 int nb_args, ret_nregs, ret_align, regsize, variadic;
4799 /* function call */
4800 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4801 /* pointer test (no array accepted) */
4802 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4803 vtop->type = *pointed_type(&vtop->type);
4804 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4805 goto error_func;
4806 } else {
4807 error_func:
4808 expect("function pointer");
4810 } else {
4811 vtop->r &= ~VT_LVAL; /* no lvalue */
4813 /* get return type */
4814 s = vtop->type.ref;
4815 next();
4816 sa = s->next; /* first parameter */
4817 nb_args = regsize = 0;
4818 ret.r2 = VT_CONST;
4819 /* compute first implicit argument if a structure is returned */
4820 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4821 variadic = (s->c == FUNC_ELLIPSIS);
4822 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4823 &ret_align, &regsize);
4824 if (!ret_nregs) {
4825 /* get some space for the returned structure */
4826 size = type_size(&s->type, &align);
4827 #ifdef TCC_TARGET_ARM64
4828 /* On arm64, a small struct is return in registers.
4829 It is much easier to write it to memory if we know
4830 that we are allowed to write some extra bytes, so
4831 round the allocated space up to a power of 2: */
4832 if (size < 16)
4833 while (size & (size - 1))
4834 size = (size | (size - 1)) + 1;
4835 #endif
4836 loc = (loc - size) & -align;
4837 ret.type = s->type;
4838 ret.r = VT_LOCAL | VT_LVAL;
4839 /* pass it as 'int' to avoid structure arg passing
4840 problems */
4841 vseti(VT_LOCAL, loc);
4842 ret.c = vtop->c;
4843 nb_args++;
4845 } else {
4846 ret_nregs = 1;
4847 ret.type = s->type;
4850 if (ret_nregs) {
4851 /* return in register */
4852 if (is_float(ret.type.t)) {
4853 ret.r = reg_fret(ret.type.t);
4854 #ifdef TCC_TARGET_X86_64
4855 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4856 ret.r2 = REG_QRET;
4857 #endif
4858 } else {
4859 #ifndef TCC_TARGET_ARM64
4860 #ifdef TCC_TARGET_X86_64
4861 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4862 #else
4863 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4864 #endif
4865 ret.r2 = REG_LRET;
4866 #endif
4867 ret.r = REG_IRET;
4869 ret.c.i = 0;
4871 if (tok != ')') {
4872 for(;;) {
4873 expr_eq();
4874 gfunc_param_typed(s, sa);
4875 nb_args++;
4876 if (sa)
4877 sa = sa->next;
4878 if (tok == ')')
4879 break;
4880 skip(',');
4883 if (sa)
4884 tcc_error("too few arguments to function");
4885 skip(')');
4886 gfunc_call(nb_args);
4888 /* return value */
4889 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4890 vsetc(&ret.type, r, &ret.c);
4891 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4894 /* handle packed struct return */
4895 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4896 int addr, offset;
4898 size = type_size(&s->type, &align);
4899 /* We're writing whole regs often, make sure there's enough
4900 space. Assume register size is power of 2. */
4901 if (regsize > align)
4902 align = regsize;
4903 loc = (loc - size) & -align;
4904 addr = loc;
4905 offset = 0;
4906 for (;;) {
4907 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4908 vswap();
4909 vstore();
4910 vtop--;
4911 if (--ret_nregs == 0)
4912 break;
4913 offset += regsize;
4915 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4917 } else {
4918 break;
4923 ST_FUNC void expr_prod(void)
4925 int t;
4927 unary();
4928 while (tok == '*' || tok == '/' || tok == '%') {
4929 t = tok;
4930 next();
4931 unary();
4932 gen_op(t);
4936 ST_FUNC void expr_sum(void)
4938 int t;
4940 expr_prod();
4941 while (tok == '+' || tok == '-') {
4942 t = tok;
4943 next();
4944 expr_prod();
4945 gen_op(t);
4949 static void expr_shift(void)
4951 int t;
4953 expr_sum();
4954 while (tok == TOK_SHL || tok == TOK_SAR) {
4955 t = tok;
4956 next();
4957 expr_sum();
4958 gen_op(t);
4962 static void expr_cmp(void)
4964 int t;
4966 expr_shift();
4967 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4968 tok == TOK_ULT || tok == TOK_UGE) {
4969 t = tok;
4970 next();
4971 expr_shift();
4972 gen_op(t);
4976 static void expr_cmpeq(void)
4978 int t;
4980 expr_cmp();
4981 while (tok == TOK_EQ || tok == TOK_NE) {
4982 t = tok;
4983 next();
4984 expr_cmp();
4985 gen_op(t);
4989 static void expr_and(void)
4991 expr_cmpeq();
4992 while (tok == '&') {
4993 next();
4994 expr_cmpeq();
4995 gen_op('&');
4999 static void expr_xor(void)
5001 expr_and();
5002 while (tok == '^') {
5003 next();
5004 expr_and();
5005 gen_op('^');
5009 static void expr_or(void)
5011 expr_xor();
5012 while (tok == '|') {
5013 next();
5014 expr_xor();
5015 gen_op('|');
5019 static void expr_land(void)
5021 expr_or();
5022 if (tok == TOK_LAND) {
5023 int t = 0;
5024 for(;;) {
5025 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5026 CType ctb;
5027 ctb.t = VT_BOOL;
5028 gen_cast(&ctb);
5029 if (vtop->c.i) {
5030 vpop();
5031 } else {
5032 nocode_wanted++;
5033 while (tok == TOK_LAND) {
5034 next();
5035 expr_or();
5036 vpop();
5038 nocode_wanted--;
5039 if (t)
5040 gsym(t);
5041 gen_cast(&int_type);
5042 break;
5044 } else {
5045 if (!t)
5046 save_regs(1);
5047 t = gvtst(1, t);
5049 if (tok != TOK_LAND) {
5050 if (t)
5051 vseti(VT_JMPI, t);
5052 else
5053 vpushi(1);
5054 break;
5056 next();
5057 expr_or();
5062 static void expr_lor(void)
5064 expr_land();
5065 if (tok == TOK_LOR) {
5066 int t = 0;
5067 for(;;) {
5068 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5069 CType ctb;
5070 ctb.t = VT_BOOL;
5071 gen_cast(&ctb);
5072 if (!vtop->c.i) {
5073 vpop();
5074 } else {
5075 nocode_wanted++;
5076 while (tok == TOK_LOR) {
5077 next();
5078 expr_land();
5079 vpop();
5081 nocode_wanted--;
5082 if (t)
5083 gsym(t);
5084 gen_cast(&int_type);
5085 break;
5087 } else {
5088 if (!t)
5089 save_regs(1);
5090 t = gvtst(0, t);
5092 if (tok != TOK_LOR) {
5093 if (t)
5094 vseti(VT_JMP, t);
5095 else
5096 vpushi(0);
5097 break;
5099 next();
5100 expr_land();
5105 /* Assuming vtop is a value used in a conditional context
5106 (i.e. compared with zero) return 0 if it's false, 1 if
5107 true and -1 if it can't be statically determined. */
5108 static int condition_3way(void)
5110 int c = -1;
5111 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5112 (!(vtop->r & VT_SYM) ||
5113 !(vtop->sym->type.t & VT_WEAK))) {
5114 CType boolean;
5115 boolean.t = VT_BOOL;
5116 vdup();
5117 gen_cast(&boolean);
5118 c = vtop->c.i;
5119 vpop();
5121 return c;
5124 static void expr_cond(void)
5126 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5127 SValue sv;
5128 CType type, type1, type2;
5130 expr_lor();
5131 if (tok == '?') {
5132 next();
5133 c = condition_3way();
5134 g = (tok == ':' && gnu_ext);
5135 if (c < 0) {
5136 /* needed to avoid having different registers saved in
5137 each branch */
5138 if (is_float(vtop->type.t)) {
5139 rc = RC_FLOAT;
5140 #ifdef TCC_TARGET_X86_64
5141 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5142 rc = RC_ST0;
5144 #endif
5145 } else
5146 rc = RC_INT;
5147 gv(rc);
5148 save_regs(1);
5149 if (g)
5150 gv_dup();
5151 tt = gvtst(1, 0);
5153 } else {
5154 if (!g)
5155 vpop();
5156 tt = 0;
5159 if (1) {
5160 if (c == 0)
5161 nocode_wanted++;
5162 if (!g)
5163 gexpr();
5165 type1 = vtop->type;
5166 sv = *vtop; /* save value to handle it later */
5167 vtop--; /* no vpop so that FP stack is not flushed */
5168 skip(':');
5170 u = 0;
5171 if (c < 0)
5172 u = gjmp(0);
5173 gsym(tt);
5175 if (c == 0)
5176 nocode_wanted--;
5177 if (c == 1)
5178 nocode_wanted++;
5179 expr_cond();
5180 if (c == 1)
5181 nocode_wanted--;
5183 type2 = vtop->type;
5184 t1 = type1.t;
5185 bt1 = t1 & VT_BTYPE;
5186 t2 = type2.t;
5187 bt2 = t2 & VT_BTYPE;
5188 /* cast operands to correct type according to ISOC rules */
5189 if (is_float(bt1) || is_float(bt2)) {
5190 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5191 type.t = VT_LDOUBLE;
5193 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5194 type.t = VT_DOUBLE;
5195 } else {
5196 type.t = VT_FLOAT;
5198 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5199 /* cast to biggest op */
5200 type.t = VT_LLONG;
5201 /* convert to unsigned if it does not fit in a long long */
5202 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5203 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5204 type.t |= VT_UNSIGNED;
5205 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5206 /* If one is a null ptr constant the result type
5207 is the other. */
5208 if (is_null_pointer (vtop))
5209 type = type1;
5210 else if (is_null_pointer (&sv))
5211 type = type2;
5212 /* XXX: test pointer compatibility, C99 has more elaborate
5213 rules here. */
5214 else
5215 type = type1;
5216 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5217 /* XXX: test function pointer compatibility */
5218 type = bt1 == VT_FUNC ? type1 : type2;
5219 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5220 /* XXX: test structure compatibility */
5221 type = bt1 == VT_STRUCT ? type1 : type2;
5222 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5223 /* NOTE: as an extension, we accept void on only one side */
5224 type.t = VT_VOID;
5225 } else {
5226 /* integer operations */
5227 type.t = VT_INT;
5228 /* convert to unsigned if it does not fit in an integer */
5229 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5230 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5231 type.t |= VT_UNSIGNED;
5233 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5234 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5235 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5236 islv &= c < 0;
5238 /* now we convert second operand */
5239 if (c != 1) {
5240 gen_cast(&type);
5241 if (islv) {
5242 mk_pointer(&vtop->type);
5243 gaddrof();
5244 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5245 gaddrof();
5248 rc = RC_INT;
5249 if (is_float(type.t)) {
5250 rc = RC_FLOAT;
5251 #ifdef TCC_TARGET_X86_64
5252 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5253 rc = RC_ST0;
5255 #endif
5256 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5257 /* for long longs, we use fixed registers to avoid having
5258 to handle a complicated move */
5259 rc = RC_IRET;
5262 tt = r2 = 0;
5263 if (c < 0) {
5264 r2 = gv(rc);
5265 tt = gjmp(0);
5267 gsym(u);
5269 /* this is horrible, but we must also convert first
5270 operand */
5271 if (c != 0) {
5272 *vtop = sv;
5273 gen_cast(&type);
5274 if (islv) {
5275 mk_pointer(&vtop->type);
5276 gaddrof();
5277 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5278 gaddrof();
5281 if (c < 0) {
5282 r1 = gv(rc);
5283 move_reg(r2, r1, type.t);
5284 vtop->r = r2;
5285 gsym(tt);
5286 if (islv)
5287 indir();
5293 static void expr_eq(void)
5295 int t;
5297 expr_cond();
5298 if (tok == '=' ||
5299 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5300 tok == TOK_A_XOR || tok == TOK_A_OR ||
5301 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5302 test_lvalue();
5303 t = tok;
5304 next();
5305 if (t == '=') {
5306 expr_eq();
5307 } else {
5308 vdup();
5309 expr_eq();
5310 gen_op(t & 0x7f);
5312 vstore();
5316 ST_FUNC void gexpr(void)
5318 while (1) {
5319 expr_eq();
5320 if (tok != ',')
5321 break;
5322 vpop();
5323 next();
5327 /* parse a constant expression and return value in vtop. */
5328 static void expr_const1(void)
5330 const_wanted++;
5331 expr_cond();
5332 const_wanted--;
5335 /* parse an integer constant and return its value. */
5336 static inline int64_t expr_const64(void)
5338 int64_t c;
5339 expr_const1();
5340 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5341 expect("constant expression");
5342 c = vtop->c.i;
5343 vpop();
5344 return c;
5347 /* parse an integer constant and return its value.
5348 Complain if it doesn't fit 32bit (signed or unsigned). */
5349 ST_FUNC int expr_const(void)
5351 int c;
5352 int64_t wc = expr_const64();
5353 c = wc;
5354 if (c != wc && (unsigned)c != wc)
5355 tcc_error("constant exceeds 32 bit");
5356 return c;
5359 /* return the label token if current token is a label, otherwise
5360 return zero */
5361 static int is_label(void)
5363 int last_tok;
5365 /* fast test first */
5366 if (tok < TOK_UIDENT)
5367 return 0;
5368 /* no need to save tokc because tok is an identifier */
5369 last_tok = tok;
5370 next();
5371 if (tok == ':') {
5372 return last_tok;
5373 } else {
5374 unget_tok(last_tok);
5375 return 0;
5379 #ifndef TCC_TARGET_ARM64
5380 static void gfunc_return(CType *func_type)
5382 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5383 CType type, ret_type;
5384 int ret_align, ret_nregs, regsize;
5385 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5386 &ret_align, &regsize);
5387 if (0 == ret_nregs) {
5388 /* if returning structure, must copy it to implicit
5389 first pointer arg location */
5390 type = *func_type;
5391 mk_pointer(&type);
5392 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5393 indir();
5394 vswap();
5395 /* copy structure value to pointer */
5396 vstore();
5397 } else {
5398 /* returning structure packed into registers */
5399 int r, size, addr, align;
5400 size = type_size(func_type,&align);
5401 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5402 (vtop->c.i & (ret_align-1)))
5403 && (align & (ret_align-1))) {
5404 loc = (loc - size) & -ret_align;
5405 addr = loc;
5406 type = *func_type;
5407 vset(&type, VT_LOCAL | VT_LVAL, addr);
5408 vswap();
5409 vstore();
5410 vpop();
5411 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5413 vtop->type = ret_type;
5414 if (is_float(ret_type.t))
5415 r = rc_fret(ret_type.t);
5416 else
5417 r = RC_IRET;
5419 if (ret_nregs == 1)
5420 gv(r);
5421 else {
5422 for (;;) {
5423 vdup();
5424 gv(r);
5425 vpop();
5426 if (--ret_nregs == 0)
5427 break;
5428 /* We assume that when a structure is returned in multiple
5429 registers, their classes are consecutive values of the
5430 suite s(n) = 2^n */
5431 r <<= 1;
5432 vtop->c.i += regsize;
5436 } else if (is_float(func_type->t)) {
5437 gv(rc_fret(func_type->t));
5438 } else {
5439 gv(RC_IRET);
5441 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5443 #endif
5445 static int case_cmp(const void *pa, const void *pb)
5447 int64_t a = (*(struct case_t**) pa)->v1;
5448 int64_t b = (*(struct case_t**) pb)->v1;
5449 return a < b ? -1 : a > b;
5452 static void gcase(struct case_t **base, int len, int *bsym)
5454 struct case_t *p;
5455 int e;
5456 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5457 gv(RC_INT);
5458 while (len > 4) {
5459 /* binary search */
5460 p = base[len/2];
5461 vdup();
5462 if (ll)
5463 vpushll(p->v2);
5464 else
5465 vpushi(p->v2);
5466 gen_op(TOK_LE);
5467 e = gtst(1, 0);
5468 vdup();
5469 if (ll)
5470 vpushll(p->v1);
5471 else
5472 vpushi(p->v1);
5473 gen_op(TOK_GE);
5474 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5475 /* x < v1 */
5476 gcase(base, len/2, bsym);
5477 if (cur_switch->def_sym)
5478 gjmp_addr(cur_switch->def_sym);
5479 else
5480 *bsym = gjmp(*bsym);
5481 /* x > v2 */
5482 gsym(e);
5483 e = len/2 + 1;
5484 base += e; len -= e;
5486 /* linear scan */
5487 while (len--) {
5488 p = *base++;
5489 vdup();
5490 if (ll)
5491 vpushll(p->v2);
5492 else
5493 vpushi(p->v2);
5494 if (p->v1 == p->v2) {
5495 gen_op(TOK_EQ);
5496 gtst_addr(0, p->sym);
5497 } else {
5498 gen_op(TOK_LE);
5499 e = gtst(1, 0);
5500 vdup();
5501 if (ll)
5502 vpushll(p->v1);
5503 else
5504 vpushi(p->v1);
5505 gen_op(TOK_GE);
5506 gtst_addr(0, p->sym);
5507 gsym(e);
5512 static void block(int *bsym, int *csym, int is_expr)
5514 int a, b, c, d, cond;
5515 Sym *s;
5517 /* generate line number info */
5518 if (tcc_state->do_debug)
5519 tcc_debug_line(tcc_state);
5521 if (is_expr) {
5522 /* default return value is (void) */
5523 vpushi(0);
5524 vtop->type.t = VT_VOID;
5527 if (tok == TOK_IF) {
5528 /* if test */
5529 int saved_nocode_wanted = nocode_wanted;
5530 next();
5531 skip('(');
5532 gexpr();
5533 skip(')');
5534 cond = condition_3way();
5535 if (cond == 1)
5536 a = 0, vpop();
5537 else
5538 a = gvtst(1, 0);
5539 if (cond == 0)
5540 nocode_wanted |= 0x20000000;
5541 block(bsym, csym, 0);
5542 if (cond != 1)
5543 nocode_wanted = saved_nocode_wanted;
5544 c = tok;
5545 if (c == TOK_ELSE) {
5546 next();
5547 d = gjmp(0);
5548 gsym(a);
5549 if (cond == 1)
5550 nocode_wanted |= 0x20000000;
5551 block(bsym, csym, 0);
5552 gsym(d); /* patch else jmp */
5553 if (cond != 0)
5554 nocode_wanted = saved_nocode_wanted;
5555 } else
5556 gsym(a);
5557 } else if (tok == TOK_WHILE) {
5558 int saved_nocode_wanted;
5559 nocode_wanted &= ~0x20000000;
5560 next();
5561 d = ind;
5562 vla_sp_restore();
5563 skip('(');
5564 gexpr();
5565 skip(')');
5566 a = gvtst(1, 0);
5567 b = 0;
5568 ++local_scope;
5569 saved_nocode_wanted = nocode_wanted;
5570 block(&a, &b, 0);
5571 nocode_wanted = saved_nocode_wanted;
5572 --local_scope;
5573 gjmp_addr(d);
5574 gsym(a);
5575 gsym_addr(b, d);
5576 } else if (tok == '{') {
5577 Sym *llabel;
5578 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5580 next();
5581 /* record local declaration stack position */
5582 s = local_stack;
5583 llabel = local_label_stack;
5584 ++local_scope;
5586 /* handle local labels declarations */
5587 if (tok == TOK_LABEL) {
5588 next();
5589 for(;;) {
5590 if (tok < TOK_UIDENT)
5591 expect("label identifier");
5592 label_push(&local_label_stack, tok, LABEL_DECLARED);
5593 next();
5594 if (tok == ',') {
5595 next();
5596 } else {
5597 skip(';');
5598 break;
5602 while (tok != '}') {
5603 if ((a = is_label()))
5604 unget_tok(a);
5605 else
5606 decl(VT_LOCAL);
5607 if (tok != '}') {
5608 if (is_expr)
5609 vpop();
5610 block(bsym, csym, is_expr);
5613 /* pop locally defined labels */
5614 label_pop(&local_label_stack, llabel);
5615 /* pop locally defined symbols */
5616 --local_scope;
5617 /* In the is_expr case (a statement expression is finished here),
5618 vtop might refer to symbols on the local_stack. Either via the
5619 type or via vtop->sym. We can't pop those nor any that in turn
5620 might be referred to. To make it easier we don't roll back
5621 any symbols in that case; some upper level call to block() will
5622 do that. We do have to remove such symbols from the lookup
5623 tables, though. sym_pop will do that. */
5624 sym_pop(&local_stack, s, is_expr);
5626 /* Pop VLA frames and restore stack pointer if required */
5627 if (vlas_in_scope > saved_vlas_in_scope) {
5628 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5629 vla_sp_restore();
5631 vlas_in_scope = saved_vlas_in_scope;
5633 next();
5634 } else if (tok == TOK_RETURN) {
5635 next();
5636 if (tok != ';') {
5637 gexpr();
5638 gen_assign_cast(&func_vt);
5639 gfunc_return(&func_vt);
5641 skip(';');
5642 /* jump unless last stmt in top-level block */
5643 if (tok != '}' || local_scope != 1)
5644 rsym = gjmp(rsym);
5645 nocode_wanted |= 0x20000000;
5646 } else if (tok == TOK_BREAK) {
5647 /* compute jump */
5648 if (!bsym)
5649 tcc_error("cannot break");
5650 *bsym = gjmp(*bsym);
5651 next();
5652 skip(';');
5653 nocode_wanted |= 0x20000000;
5654 } else if (tok == TOK_CONTINUE) {
5655 /* compute jump */
5656 if (!csym)
5657 tcc_error("cannot continue");
5658 vla_sp_restore_root();
5659 *csym = gjmp(*csym);
5660 next();
5661 skip(';');
5662 } else if (tok == TOK_FOR) {
5663 int e;
5664 int saved_nocode_wanted;
5665 nocode_wanted &= ~0x20000000;
5666 next();
5667 skip('(');
5668 s = local_stack;
5669 ++local_scope;
5670 if (tok != ';') {
5671 /* c99 for-loop init decl? */
5672 if (!decl0(VT_LOCAL, 1, NULL)) {
5673 /* no, regular for-loop init expr */
5674 gexpr();
5675 vpop();
5678 skip(';');
5679 d = ind;
5680 c = ind;
5681 vla_sp_restore();
5682 a = 0;
5683 b = 0;
5684 if (tok != ';') {
5685 gexpr();
5686 a = gvtst(1, 0);
5688 skip(';');
5689 if (tok != ')') {
5690 e = gjmp(0);
5691 c = ind;
5692 vla_sp_restore();
5693 gexpr();
5694 vpop();
5695 gjmp_addr(d);
5696 gsym(e);
5698 skip(')');
5699 saved_nocode_wanted = nocode_wanted;
5700 block(&a, &b, 0);
5701 nocode_wanted = saved_nocode_wanted;
5702 gjmp_addr(c);
5703 gsym(a);
5704 gsym_addr(b, c);
5705 --local_scope;
5706 sym_pop(&local_stack, s, 0);
5708 } else
5709 if (tok == TOK_DO) {
5710 int saved_nocode_wanted;
5711 nocode_wanted &= ~0x20000000;
5712 next();
5713 a = 0;
5714 b = 0;
5715 d = ind;
5716 vla_sp_restore();
5717 saved_nocode_wanted = nocode_wanted;
5718 block(&a, &b, 0);
5719 skip(TOK_WHILE);
5720 skip('(');
5721 gsym(b);
5722 gexpr();
5723 c = gvtst(0, 0);
5724 gsym_addr(c, d);
5725 nocode_wanted = saved_nocode_wanted;
5726 skip(')');
5727 gsym(a);
5728 skip(';');
5729 } else
5730 if (tok == TOK_SWITCH) {
5731 struct switch_t *saved, sw;
5732 int saved_nocode_wanted = nocode_wanted;
5733 SValue switchval;
5734 next();
5735 skip('(');
5736 gexpr();
5737 skip(')');
5738 switchval = *vtop--;
5739 a = 0;
5740 b = gjmp(0); /* jump to first case */
5741 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5742 saved = cur_switch;
5743 cur_switch = &sw;
5744 block(&a, csym, 0);
5745 nocode_wanted = saved_nocode_wanted;
5746 a = gjmp(a); /* add implicit break */
5747 /* case lookup */
5748 gsym(b);
5749 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5750 for (b = 1; b < sw.n; b++)
5751 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5752 tcc_error("duplicate case value");
5753 /* Our switch table sorting is signed, so the compared
5754 value needs to be as well when it's 64bit. */
5755 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5756 switchval.type.t &= ~VT_UNSIGNED;
5757 vpushv(&switchval);
5758 gcase(sw.p, sw.n, &a);
5759 vpop();
5760 if (sw.def_sym)
5761 gjmp_addr(sw.def_sym);
5762 dynarray_reset(&sw.p, &sw.n);
5763 cur_switch = saved;
5764 /* break label */
5765 gsym(a);
5766 } else
5767 if (tok == TOK_CASE) {
5768 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5769 if (!cur_switch)
5770 expect("switch");
5771 nocode_wanted &= ~0x20000000;
5772 next();
5773 cr->v1 = cr->v2 = expr_const64();
5774 if (gnu_ext && tok == TOK_DOTS) {
5775 next();
5776 cr->v2 = expr_const64();
5777 if (cr->v2 < cr->v1)
5778 tcc_warning("empty case range");
5780 cr->sym = ind;
5781 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
5782 skip(':');
5783 is_expr = 0;
5784 goto block_after_label;
5785 } else
5786 if (tok == TOK_DEFAULT) {
5787 next();
5788 skip(':');
5789 if (!cur_switch)
5790 expect("switch");
5791 if (cur_switch->def_sym)
5792 tcc_error("too many 'default'");
5793 cur_switch->def_sym = ind;
5794 is_expr = 0;
5795 goto block_after_label;
5796 } else
5797 if (tok == TOK_GOTO) {
5798 next();
5799 if (tok == '*' && gnu_ext) {
5800 /* computed goto */
5801 next();
5802 gexpr();
5803 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5804 expect("pointer");
5805 ggoto();
5806 } else if (tok >= TOK_UIDENT) {
5807 s = label_find(tok);
5808 /* put forward definition if needed */
5809 if (!s) {
5810 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5811 } else {
5812 if (s->r == LABEL_DECLARED)
5813 s->r = LABEL_FORWARD;
5815 vla_sp_restore_root();
5816 if (s->r & LABEL_FORWARD)
5817 s->jnext = gjmp(s->jnext);
5818 else
5819 gjmp_addr(s->jnext);
5820 next();
5821 } else {
5822 expect("label identifier");
5824 skip(';');
5825 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5826 asm_instr();
5827 } else {
5828 b = is_label();
5829 if (b) {
5830 /* label case */
5831 next();
5832 s = label_find(b);
5833 if (s) {
5834 if (s->r == LABEL_DEFINED)
5835 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5836 gsym(s->jnext);
5837 s->r = LABEL_DEFINED;
5838 } else {
5839 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5841 s->jnext = ind;
5842 vla_sp_restore();
5843 /* we accept this, but it is a mistake */
5844 block_after_label:
5845 nocode_wanted &= ~0x20000000;
5846 if (tok == '}') {
5847 tcc_warning("deprecated use of label at end of compound statement");
5848 } else {
5849 if (is_expr)
5850 vpop();
5851 block(bsym, csym, is_expr);
5853 } else {
5854 /* expression case */
5855 if (tok != ';') {
5856 if (is_expr) {
5857 vpop();
5858 gexpr();
5859 } else {
5860 gexpr();
5861 vpop();
5864 skip(';');
5869 /* This skips over a stream of tokens containing balanced {} and ()
5870 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
5871 with a '{'). If STR then allocates and stores the skipped tokens
5872 in *STR. This doesn't check if () and {} are nested correctly,
5873 i.e. "({)}" is accepted. */
5874 static void skip_or_save_block(TokenString **str)
5876 int braces = tok == '{';
5877 int level = 0;
5878 if (str)
5879 *str = tok_str_alloc();
5881 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';'))) {
5882 int t;
5883 if (tok == TOK_EOF) {
5884 if (str || level > 0)
5885 tcc_error("unexpected end of file");
5886 else
5887 break;
5889 if (str)
5890 tok_str_add_tok(*str);
5891 t = tok;
5892 next();
5893 if (t == '{' || t == '(') {
5894 level++;
5895 } else if (t == '}' || t == ')') {
5896 level--;
5897 if (level == 0 && braces && t == '}')
5898 break;
5901 if (str) {
5902 tok_str_add(*str, -1);
5903 tok_str_add(*str, 0);
5907 #define EXPR_CONST 1
5908 #define EXPR_ANY 2
5910 static void parse_init_elem(int expr_type)
5912 int saved_global_expr;
5913 switch(expr_type) {
5914 case EXPR_CONST:
5915 /* compound literals must be allocated globally in this case */
5916 saved_global_expr = global_expr;
5917 global_expr = 1;
5918 expr_const1();
5919 global_expr = saved_global_expr;
5920 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5921 (compound literals). */
5922 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
5923 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
5924 || vtop->sym->v < SYM_FIRST_ANOM))
5925 #ifdef TCC_TARGET_PE
5926 || (vtop->type.t & VT_IMPORT)
5927 #endif
5929 tcc_error("initializer element is not constant");
5930 break;
5931 case EXPR_ANY:
5932 expr_eq();
5933 break;
5937 /* put zeros for variable based init */
5938 static void init_putz(Section *sec, unsigned long c, int size)
5940 if (sec) {
5941 /* nothing to do because globals are already set to zero */
5942 } else {
5943 vpush_global_sym(&func_old_type, TOK_memset);
5944 vseti(VT_LOCAL, c);
5945 #ifdef TCC_TARGET_ARM
5946 vpushs(size);
5947 vpushi(0);
5948 #else
5949 vpushi(0);
5950 vpushs(size);
5951 #endif
5952 gfunc_call(3);
5956 /* t is the array or struct type. c is the array or struct
5957 address. cur_field is the pointer to the current
5958 field, for arrays the 'c' member contains the current start
5959 index. 'size_only' is true if only size info is needed (only used
5960 in arrays). al contains the already initialized length of the
5961 current container (starting at c). This returns the new length of that. */
5962 static int decl_designator(CType *type, Section *sec, unsigned long c,
5963 Sym **cur_field, int size_only, int al)
5965 Sym *s, *f;
5966 int index, index_last, align, l, nb_elems, elem_size;
5967 unsigned long corig = c;
5969 elem_size = 0;
5970 nb_elems = 1;
5971 if (gnu_ext && (l = is_label()) != 0)
5972 goto struct_field;
5973 /* NOTE: we only support ranges for last designator */
5974 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
5975 if (tok == '[') {
5976 if (!(type->t & VT_ARRAY))
5977 expect("array type");
5978 next();
5979 index = index_last = expr_const();
5980 if (tok == TOK_DOTS && gnu_ext) {
5981 next();
5982 index_last = expr_const();
5984 skip(']');
5985 s = type->ref;
5986 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
5987 index_last < index)
5988 tcc_error("invalid index");
5989 if (cur_field)
5990 (*cur_field)->c = index_last;
5991 type = pointed_type(type);
5992 elem_size = type_size(type, &align);
5993 c += index * elem_size;
5994 nb_elems = index_last - index + 1;
5995 } else {
5996 next();
5997 l = tok;
5998 struct_field:
5999 next();
6000 if ((type->t & VT_BTYPE) != VT_STRUCT)
6001 expect("struct/union type");
6002 f = find_field(type, l);
6003 if (!f)
6004 expect("field");
6005 if (cur_field)
6006 *cur_field = f;
6007 type = &f->type;
6008 c += f->c;
6010 cur_field = NULL;
6012 if (!cur_field) {
6013 if (tok == '=') {
6014 next();
6015 } else if (!gnu_ext) {
6016 expect("=");
6018 } else {
6019 if (type->t & VT_ARRAY) {
6020 index = (*cur_field)->c;
6021 if (type->ref->c >= 0 && index >= type->ref->c)
6022 tcc_error("index too large");
6023 type = pointed_type(type);
6024 c += index * type_size(type, &align);
6025 } else {
6026 f = *cur_field;
6027 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6028 *cur_field = f = f->next;
6029 if (!f)
6030 tcc_error("too many field init");
6031 type = &f->type;
6032 c += f->c;
6035 /* must put zero in holes (note that doing it that way
6036 ensures that it even works with designators) */
6037 if (!size_only && c - corig > al)
6038 init_putz(sec, corig + al, c - corig - al);
6039 decl_initializer(type, sec, c, 0, size_only);
6041 /* XXX: make it more general */
6042 if (!size_only && nb_elems > 1) {
6043 unsigned long c_end;
6044 uint8_t *src, *dst;
6045 int i;
6047 if (!sec) {
6048 vset(type, VT_LOCAL|VT_LVAL, c);
6049 for (i = 1; i < nb_elems; i++) {
6050 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6051 vswap();
6052 vstore();
6054 vpop();
6055 } else {
6056 c_end = c + nb_elems * elem_size;
6057 if (c_end > sec->data_allocated)
6058 section_realloc(sec, c_end);
6059 src = sec->data + c;
6060 dst = src;
6061 for(i = 1; i < nb_elems; i++) {
6062 dst += elem_size;
6063 memcpy(dst, src, elem_size);
6067 c += nb_elems * type_size(type, &align);
6068 if (c - corig > al)
6069 al = c - corig;
6070 return al;
6073 /* store a value or an expression directly in global data or in local array */
6074 static void init_putv(CType *type, Section *sec, unsigned long c)
6076 int bt, bit_pos, bit_size;
6077 void *ptr;
6078 unsigned long long bit_mask;
6079 CType dtype;
6081 dtype = *type;
6082 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6084 if (sec) {
6085 int size, align;
6086 /* XXX: not portable */
6087 /* XXX: generate error if incorrect relocation */
6088 gen_assign_cast(&dtype);
6089 bt = type->t & VT_BTYPE;
6090 size = type_size(type, &align);
6091 section_reserve(sec, c + size);
6092 ptr = sec->data + c;
6093 /* XXX: make code faster ? */
6094 if (!(type->t & VT_BITFIELD)) {
6095 bit_pos = 0;
6096 bit_size = PTR_SIZE * 8;
6097 bit_mask = -1LL;
6098 } else {
6099 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6100 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6101 bit_mask = (1LL << bit_size) - 1;
6103 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6104 vtop->sym->v >= SYM_FIRST_ANOM &&
6105 /* XXX This rejects compound literals like
6106 '(void *){ptr}'. The problem is that '&sym' is
6107 represented the same way, which would be ruled out
6108 by the SYM_FIRST_ANOM check above, but also '"string"'
6109 in 'char *p = "string"' is represented the same
6110 with the type being VT_PTR and the symbol being an
6111 anonymous one. That is, there's no difference in vtop
6112 between '(void *){x}' and '&(void *){x}'. Ignore
6113 pointer typed entities here. Hopefully no real code
6114 will every use compound literals with scalar type. */
6115 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6116 /* These come from compound literals, memcpy stuff over. */
6117 Section *ssec;
6118 ElfW(Sym) *esym;
6119 ElfW_Rel *rel;
6120 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6121 ssec = tcc_state->sections[esym->st_shndx];
6122 memmove (ptr, ssec->data + esym->st_value, size);
6123 if (ssec->reloc) {
6124 /* We need to copy over all memory contents, and that
6125 includes relocations. Use the fact that relocs are
6126 created it order, so look from the end of relocs
6127 until we hit one before the copied region. */
6128 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6129 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6130 while (num_relocs--) {
6131 rel--;
6132 if (rel->r_offset >= esym->st_value + size)
6133 continue;
6134 if (rel->r_offset < esym->st_value)
6135 break;
6136 /* Note: if the same fields are initialized multiple
6137 times (possible with designators) then we possibly
6138 add multiple relocations for the same offset here.
6139 That would lead to wrong code, the last reloc needs
6140 to win. We clean this up later after the whole
6141 initializer is parsed. */
6142 put_elf_reloca(symtab_section, sec,
6143 c + rel->r_offset - esym->st_value,
6144 ELFW(R_TYPE)(rel->r_info),
6145 ELFW(R_SYM)(rel->r_info),
6146 #if PTR_SIZE == 8
6147 rel->r_addend
6148 #else
6150 #endif
6154 } else {
6155 if ((vtop->r & VT_SYM) &&
6156 (bt == VT_BYTE ||
6157 bt == VT_SHORT ||
6158 bt == VT_DOUBLE ||
6159 bt == VT_LDOUBLE ||
6160 #if PTR_SIZE == 8
6161 (bt == VT_LLONG && bit_size != 64) ||
6162 bt == VT_INT
6163 #else
6164 bt == VT_LLONG ||
6165 (bt == VT_INT && bit_size != 32)
6166 #endif
6168 tcc_error("initializer element is not computable at load time");
6169 switch(bt) {
6170 /* XXX: when cross-compiling we assume that each type has the
6171 same representation on host and target, which is likely to
6172 be wrong in the case of long double */
6173 case VT_BOOL:
6174 vtop->c.i = (vtop->c.i != 0);
6175 case VT_BYTE:
6176 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6177 break;
6178 case VT_SHORT:
6179 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6180 break;
6181 case VT_FLOAT:
6182 *(float*)ptr = vtop->c.f;
6183 break;
6184 case VT_DOUBLE:
6185 *(double *)ptr = vtop->c.d;
6186 break;
6187 case VT_LDOUBLE:
6188 if (sizeof(long double) == LDOUBLE_SIZE)
6189 *(long double *)ptr = vtop->c.ld;
6190 else if (sizeof(double) == LDOUBLE_SIZE)
6191 *(double *)ptr = (double)vtop->c.ld;
6192 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6193 else if (sizeof (long double) >= 10)
6194 memcpy(memset(ptr, 0, LDOUBLE_SIZE), &vtop->c.ld, 10);
6195 #ifdef __TINYC__
6196 else if (sizeof (long double) == sizeof (double))
6197 __asm__("fldl %1\nfstpt %0\n" : "=m"
6198 (memset(ptr, 0, LDOUBLE_SIZE), ptr) : "m" (vtop->c.ld));
6199 #endif
6200 #endif
6201 else
6202 tcc_error("can't cross compile long double constants");
6203 break;
6204 #if PTR_SIZE != 8
6205 case VT_LLONG:
6206 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6207 break;
6208 #else
6209 case VT_LLONG:
6210 #endif
6211 case VT_PTR:
6213 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6214 #if PTR_SIZE == 8
6215 if (vtop->r & VT_SYM)
6216 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6217 else
6218 *(addr_t *)ptr |= val;
6219 #else
6220 if (vtop->r & VT_SYM)
6221 greloc(sec, vtop->sym, c, R_DATA_PTR);
6222 *(addr_t *)ptr |= val;
6223 #endif
6224 break;
6226 default:
6228 int val = (vtop->c.i & bit_mask) << bit_pos;
6229 #if PTR_SIZE == 8
6230 if (vtop->r & VT_SYM)
6231 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6232 else
6233 *(int *)ptr |= val;
6234 #else
6235 if (vtop->r & VT_SYM)
6236 greloc(sec, vtop->sym, c, R_DATA_PTR);
6237 *(int *)ptr |= val;
6238 #endif
6239 break;
6243 vtop--;
6244 } else {
6245 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6246 vswap();
6247 vstore();
6248 vpop();
6252 /* 't' contains the type and storage info. 'c' is the offset of the
6253 object in section 'sec'. If 'sec' is NULL, it means stack based
6254 allocation. 'first' is true if array '{' must be read (multi
6255 dimension implicit array init handling). 'size_only' is true if
6256 size only evaluation is wanted (only for arrays). */
6257 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6258 int first, int size_only)
6260 int len, n, no_oblock, nb, i;
6261 int size1, align1;
6262 int have_elem;
6263 Sym *s, *f;
6264 Sym indexsym;
6265 CType *t1;
6267 /* If we currently are at an '}' or ',' we have read an initializer
6268 element in one of our callers, and not yet consumed it. */
6269 have_elem = tok == '}' || tok == ',';
6270 if (!have_elem && tok != '{' &&
6271 /* In case of strings we have special handling for arrays, so
6272 don't consume them as initializer value (which would commit them
6273 to some anonymous symbol). */
6274 tok != TOK_LSTR && tok != TOK_STR &&
6275 !size_only) {
6276 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6277 have_elem = 1;
6280 if (have_elem &&
6281 !(type->t & VT_ARRAY) &&
6282 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6283 The source type might have VT_CONSTANT set, which is
6284 of course assignable to non-const elements. */
6285 is_compatible_parameter_types(type, &vtop->type)) {
6286 init_putv(type, sec, c);
6287 } else if (type->t & VT_ARRAY) {
6288 s = type->ref;
6289 n = s->c;
6290 t1 = pointed_type(type);
6291 size1 = type_size(t1, &align1);
6293 no_oblock = 1;
6294 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6295 tok == '{') {
6296 if (tok != '{')
6297 tcc_error("character array initializer must be a literal,"
6298 " optionally enclosed in braces");
6299 skip('{');
6300 no_oblock = 0;
6303 /* only parse strings here if correct type (otherwise: handle
6304 them as ((w)char *) expressions */
6305 if ((tok == TOK_LSTR &&
6306 #ifdef TCC_TARGET_PE
6307 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6308 #else
6309 (t1->t & VT_BTYPE) == VT_INT
6310 #endif
6311 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6312 len = 0;
6313 while (tok == TOK_STR || tok == TOK_LSTR) {
6314 int cstr_len, ch;
6316 /* compute maximum number of chars wanted */
6317 if (tok == TOK_STR)
6318 cstr_len = tokc.str.size;
6319 else
6320 cstr_len = tokc.str.size / sizeof(nwchar_t);
6321 cstr_len--;
6322 nb = cstr_len;
6323 if (n >= 0 && nb > (n - len))
6324 nb = n - len;
6325 if (!size_only) {
6326 if (cstr_len > nb)
6327 tcc_warning("initializer-string for array is too long");
6328 /* in order to go faster for common case (char
6329 string in global variable, we handle it
6330 specifically */
6331 if (sec && tok == TOK_STR && size1 == 1) {
6332 memcpy(sec->data + c + len, tokc.str.data, nb);
6333 } else {
6334 for(i=0;i<nb;i++) {
6335 if (tok == TOK_STR)
6336 ch = ((unsigned char *)tokc.str.data)[i];
6337 else
6338 ch = ((nwchar_t *)tokc.str.data)[i];
6339 vpushi(ch);
6340 init_putv(t1, sec, c + (len + i) * size1);
6344 len += nb;
6345 next();
6347 /* only add trailing zero if enough storage (no
6348 warning in this case since it is standard) */
6349 if (n < 0 || len < n) {
6350 if (!size_only) {
6351 vpushi(0);
6352 init_putv(t1, sec, c + (len * size1));
6354 len++;
6356 len *= size1;
6357 } else {
6358 indexsym.c = 0;
6359 f = &indexsym;
6361 do_init_list:
6362 len = 0;
6363 while (tok != '}' || have_elem) {
6364 len = decl_designator(type, sec, c, &f, size_only, len);
6365 have_elem = 0;
6366 if (type->t & VT_ARRAY) {
6367 ++indexsym.c;
6368 /* special test for multi dimensional arrays (may not
6369 be strictly correct if designators are used at the
6370 same time) */
6371 if (no_oblock && len >= n*size1)
6372 break;
6373 } else {
6374 if (s->type.t == TOK_UNION)
6375 f = NULL;
6376 else
6377 f = f->next;
6378 if (no_oblock && f == NULL)
6379 break;
6382 if (tok == '}')
6383 break;
6384 skip(',');
6387 /* put zeros at the end */
6388 if (!size_only && len < n*size1)
6389 init_putz(sec, c + len, n*size1 - len);
6390 if (!no_oblock)
6391 skip('}');
6392 /* patch type size if needed, which happens only for array types */
6393 if (n < 0)
6394 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
6395 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6396 size1 = 1;
6397 no_oblock = 1;
6398 if (first || tok == '{') {
6399 skip('{');
6400 no_oblock = 0;
6402 s = type->ref;
6403 f = s->next;
6404 n = s->c;
6405 goto do_init_list;
6406 } else if (tok == '{') {
6407 next();
6408 decl_initializer(type, sec, c, first, size_only);
6409 skip('}');
6410 } else if (size_only) {
6411 /* If we supported only ISO C we wouldn't have to accept calling
6412 this on anything than an array size_only==1 (and even then
6413 only on the outermost level, so no recursion would be needed),
6414 because initializing a flex array member isn't supported.
6415 But GNU C supports it, so we need to recurse even into
6416 subfields of structs and arrays when size_only is set. */
6417 /* just skip expression */
6418 skip_or_save_block(NULL);
6419 } else {
6420 if (!have_elem) {
6421 /* This should happen only when we haven't parsed
6422 the init element above for fear of committing a
6423 string constant to memory too early. */
6424 if (tok != TOK_STR && tok != TOK_LSTR)
6425 expect("string constant");
6426 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6428 init_putv(type, sec, c);
6432 /* parse an initializer for type 't' if 'has_init' is non zero, and
6433 allocate space in local or global data space ('r' is either
6434 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6435 variable 'v' of scope 'scope' is declared before initializers
6436 are parsed. If 'v' is zero, then a reference to the new object
6437 is put in the value stack. If 'has_init' is 2, a special parsing
6438 is done to handle string constants. */
6439 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6440 int has_init, int v, int scope)
6442 int size, align, addr;
6443 ParseState saved_parse_state = {0};
6444 TokenString *init_str = NULL;
6445 Section *sec;
6446 Sym *flexible_array;
6448 flexible_array = NULL;
6449 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6450 Sym *field = type->ref->next;
6451 if (field) {
6452 while (field->next)
6453 field = field->next;
6454 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6455 flexible_array = field;
6459 size = type_size(type, &align);
6460 /* If unknown size, we must evaluate it before
6461 evaluating initializers because
6462 initializers can generate global data too
6463 (e.g. string pointers or ISOC99 compound
6464 literals). It also simplifies local
6465 initializers handling */
6466 if (size < 0 || (flexible_array && has_init)) {
6467 if (!has_init)
6468 tcc_error("unknown type size");
6469 /* get all init string */
6470 if (has_init == 2) {
6471 init_str = tok_str_alloc();
6472 /* only get strings */
6473 while (tok == TOK_STR || tok == TOK_LSTR) {
6474 tok_str_add_tok(init_str);
6475 next();
6477 tok_str_add(init_str, -1);
6478 tok_str_add(init_str, 0);
6479 } else {
6480 skip_or_save_block(&init_str);
6483 /* compute size */
6484 save_parse_state(&saved_parse_state);
6486 begin_macro(init_str, 1);
6487 next();
6488 decl_initializer(type, NULL, 0, 1, 1);
6489 /* prepare second initializer parsing */
6490 macro_ptr = init_str->str;
6491 next();
6493 /* if still unknown size, error */
6494 size = type_size(type, &align);
6495 if (size < 0)
6496 tcc_error("unknown type size");
6498 /* If there's a flex member and it was used in the initializer
6499 adjust size. */
6500 if (flexible_array &&
6501 flexible_array->type.ref->c > 0)
6502 size += flexible_array->type.ref->c
6503 * pointed_size(&flexible_array->type);
6504 /* take into account specified alignment if bigger */
6505 if (ad->a.aligned) {
6506 int speca = 1 << (ad->a.aligned - 1);
6507 if (speca > align)
6508 align = speca;
6509 } else if (ad->a.packed) {
6510 align = 1;
6512 if ((r & VT_VALMASK) == VT_LOCAL) {
6513 sec = NULL;
6514 #ifdef CONFIG_TCC_BCHECK
6515 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6516 loc--;
6518 #endif
6519 loc = (loc - size) & -align;
6520 addr = loc;
6521 #ifdef CONFIG_TCC_BCHECK
6522 /* handles bounds */
6523 /* XXX: currently, since we do only one pass, we cannot track
6524 '&' operators, so we add only arrays */
6525 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6526 addr_t *bounds_ptr;
6527 /* add padding between regions */
6528 loc--;
6529 /* then add local bound info */
6530 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6531 bounds_ptr[0] = addr;
6532 bounds_ptr[1] = size;
6534 #endif
6535 if (v) {
6536 /* local variable */
6537 #ifdef CONFIG_TCC_ASM
6538 if (ad->asm_label) {
6539 int reg = asm_parse_regvar(ad->asm_label);
6540 if (reg >= 0)
6541 r = (r & ~VT_VALMASK) | reg;
6543 #endif
6544 sym_push(v, type, r, addr);
6545 } else {
6546 /* push local reference */
6547 vset(type, r, addr);
6549 } else {
6550 Sym *sym = NULL;
6551 if (v && scope == VT_CONST) {
6552 /* see if the symbol was already defined */
6553 sym = sym_find(v);
6554 if (sym) {
6555 patch_storage(sym, type);
6556 if (sym->type.t & VT_EXTERN) {
6557 /* if the variable is extern, it was not allocated */
6558 sym->type.t &= ~VT_EXTERN;
6559 /* set array size if it was omitted in extern
6560 declaration */
6561 if ((sym->type.t & VT_ARRAY) &&
6562 sym->type.ref->c < 0 &&
6563 type->ref->c >= 0)
6564 sym->type.ref->c = type->ref->c;
6565 } else if (!has_init) {
6566 /* we accept several definitions of the same
6567 global variable. this is tricky, because we
6568 must play with the SHN_COMMON type of the symbol */
6569 /* no init data, we won't add more to the symbol */
6570 update_storage(sym);
6571 goto no_alloc;
6572 } else if (sym->c) {
6573 ElfW(Sym) *esym;
6574 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6575 if (esym->st_shndx == data_section->sh_num)
6576 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6581 /* allocate symbol in corresponding section */
6582 sec = ad->section;
6583 if (!sec) {
6584 if (has_init)
6585 sec = data_section;
6586 else if (tcc_state->nocommon)
6587 sec = bss_section;
6590 if (sec) {
6591 addr = section_add(sec, size, align);
6592 #ifdef CONFIG_TCC_BCHECK
6593 /* add padding if bound check */
6594 if (tcc_state->do_bounds_check)
6595 section_add(sec, 1, 1);
6596 #endif
6597 } else {
6598 addr = align; /* SHN_COMMON is special, symbol value is align */
6599 sec = common_section;
6602 if (v) {
6603 if (!sym) {
6604 sym = sym_push(v, type, r | VT_SYM, 0);
6605 sym->asm_label = ad->asm_label;
6607 /* update symbol definition */
6608 put_extern_sym(sym, sec, addr, size);
6609 } else {
6610 /* push global reference */
6611 sym = get_sym_ref(type, sec, addr, size);
6612 vpushsym(type, sym);
6613 vtop->r |= r;
6616 #ifdef CONFIG_TCC_BCHECK
6617 /* handles bounds now because the symbol must be defined
6618 before for the relocation */
6619 if (tcc_state->do_bounds_check) {
6620 addr_t *bounds_ptr;
6622 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
6623 /* then add global bound info */
6624 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6625 bounds_ptr[0] = 0; /* relocated */
6626 bounds_ptr[1] = size;
6628 #endif
6631 if (type->t & VT_VLA) {
6632 int a;
6634 /* save current stack pointer */
6635 if (vlas_in_scope == 0) {
6636 if (vla_sp_root_loc == -1)
6637 vla_sp_root_loc = (loc -= PTR_SIZE);
6638 gen_vla_sp_save(vla_sp_root_loc);
6641 vla_runtime_type_size(type, &a);
6642 gen_vla_alloc(type, a);
6643 gen_vla_sp_save(addr);
6644 vla_sp_loc = addr;
6645 vlas_in_scope++;
6647 } else if (has_init) {
6648 size_t oldreloc_offset = 0;
6649 if (sec && sec->reloc)
6650 oldreloc_offset = sec->reloc->data_offset;
6651 decl_initializer(type, sec, addr, 1, 0);
6652 if (sec && sec->reloc)
6653 squeeze_multi_relocs(sec, oldreloc_offset);
6654 /* patch flexible array member size back to -1, */
6655 /* for possible subsequent similar declarations */
6656 if (flexible_array)
6657 flexible_array->type.ref->c = -1;
6660 no_alloc:
6661 /* restore parse state if needed */
6662 if (init_str) {
6663 end_macro();
6664 restore_parse_state(&saved_parse_state);
6668 /* parse a function defined by symbol 'sym' and generate its code in
6669 'cur_text_section' */
6670 static void gen_function(Sym *sym)
6672 nocode_wanted = 0;
6673 ind = cur_text_section->data_offset;
6674 /* NOTE: we patch the symbol size later */
6675 put_extern_sym(sym, cur_text_section, ind, 0);
6676 funcname = get_tok_str(sym->v, NULL);
6677 func_ind = ind;
6678 /* Initialize VLA state */
6679 vla_sp_loc = -1;
6680 vla_sp_root_loc = -1;
6681 /* put debug symbol */
6682 tcc_debug_funcstart(tcc_state, sym);
6683 /* push a dummy symbol to enable local sym storage */
6684 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6685 local_scope = 1; /* for function parameters */
6686 gfunc_prolog(&sym->type);
6687 local_scope = 0;
6688 rsym = 0;
6689 block(NULL, NULL, 0);
6690 nocode_wanted = 0;
6691 gsym(rsym);
6692 gfunc_epilog();
6693 cur_text_section->data_offset = ind;
6694 label_pop(&global_label_stack, NULL);
6695 /* reset local stack */
6696 local_scope = 0;
6697 sym_pop(&local_stack, NULL, 0);
6698 /* end of function */
6699 /* patch symbol size */
6700 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6701 ind - func_ind;
6702 tcc_debug_funcend(tcc_state, ind - func_ind);
6703 /* It's better to crash than to generate wrong code */
6704 cur_text_section = NULL;
6705 funcname = ""; /* for safety */
6706 func_vt.t = VT_VOID; /* for safety */
6707 func_var = 0; /* for safety */
6708 ind = 0; /* for safety */
6709 nocode_wanted = 1;
6710 check_vstack();
6713 static void gen_inline_functions(TCCState *s)
6715 Sym *sym;
6716 int inline_generated, i, ln;
6717 struct InlineFunc *fn;
6719 ln = file->line_num;
6720 /* iterate while inline function are referenced */
6721 for(;;) {
6722 inline_generated = 0;
6723 for (i = 0; i < s->nb_inline_fns; ++i) {
6724 fn = s->inline_fns[i];
6725 sym = fn->sym;
6726 if (sym && sym->c) {
6727 /* the function was used: generate its code and
6728 convert it to a normal function */
6729 fn->sym = NULL;
6730 if (file)
6731 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6732 sym->type.t &= ~VT_INLINE;
6734 begin_macro(fn->func_str, 1);
6735 next();
6736 cur_text_section = text_section;
6737 gen_function(sym);
6738 end_macro();
6740 inline_generated = 1;
6743 if (!inline_generated)
6744 break;
6746 file->line_num = ln;
6749 ST_FUNC void free_inline_functions(TCCState *s)
6751 int i;
6752 /* free tokens of unused inline functions */
6753 for (i = 0; i < s->nb_inline_fns; ++i) {
6754 struct InlineFunc *fn = s->inline_fns[i];
6755 if (fn->sym)
6756 tok_str_free(fn->func_str);
6758 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6761 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6762 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6763 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
6765 int v, has_init, r;
6766 CType type, btype;
6767 Sym *sym;
6768 AttributeDef ad;
6770 while (1) {
6771 if (!parse_btype(&btype, &ad)) {
6772 if (is_for_loop_init)
6773 return 0;
6774 /* skip redundant ';' if not in old parameter decl scope */
6775 if (tok == ';' && l != VT_CMP) {
6776 next();
6777 continue;
6779 if (l == VT_CONST &&
6780 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6781 /* global asm block */
6782 asm_global_instr();
6783 continue;
6785 /* special test for old K&R protos without explicit int
6786 type. Only accepted when defining global data */
6787 if (l != VT_CONST || tok < TOK_UIDENT)
6788 break;
6789 btype.t = VT_INT;
6791 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6792 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6793 tok == ';') {
6794 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6795 int v = btype.ref->v;
6796 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6797 tcc_warning("unnamed struct/union that defines no instances");
6799 next();
6800 continue;
6802 while (1) { /* iterate thru each declaration */
6803 type = btype;
6804 /* If the base type itself was an array type of unspecified
6805 size (like in 'typedef int arr[]; arr x = {1};') then
6806 we will overwrite the unknown size by the real one for
6807 this decl. We need to unshare the ref symbol holding
6808 that size. */
6809 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6810 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6812 type_decl(&type, &ad, &v, TYPE_DIRECT);
6813 #if 0
6815 char buf[500];
6816 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
6817 printf("type = '%s'\n", buf);
6819 #endif
6820 if ((type.t & VT_BTYPE) == VT_FUNC) {
6821 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6822 tcc_error("function without file scope cannot be static");
6824 /* if old style function prototype, we accept a
6825 declaration list */
6826 sym = type.ref;
6827 if (sym->c == FUNC_OLD && l == VT_CONST)
6828 decl0(VT_CMP, 0, sym);
6831 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6832 ad.asm_label = asm_label_instr();
6833 /* parse one last attribute list, after asm label */
6834 parse_attribute(&ad);
6835 if (tok == '{')
6836 expect(";");
6839 if (ad.a.weak)
6840 type.t |= VT_WEAK;
6841 #ifdef TCC_TARGET_PE
6842 if (ad.a.func_import || ad.a.func_export) {
6843 if (type.t & (VT_STATIC|VT_TYPEDEF))
6844 tcc_error("cannot have dll linkage with static or typedef");
6845 if (ad.a.func_export)
6846 type.t |= VT_EXPORT;
6847 else if ((type.t & VT_BTYPE) != VT_FUNC)
6848 type.t |= VT_IMPORT|VT_EXTERN;
6850 #endif
6851 type.t |= ad.a.visibility << VT_VIS_SHIFT;
6853 if (tok == '{') {
6854 if (l != VT_CONST)
6855 tcc_error("cannot use local functions");
6856 if ((type.t & VT_BTYPE) != VT_FUNC)
6857 expect("function definition");
6859 /* reject abstract declarators in function definition
6860 make old style params without decl have int type */
6861 sym = type.ref;
6862 while ((sym = sym->next) != NULL) {
6863 if (!(sym->v & ~SYM_FIELD))
6864 expect("identifier");
6865 if (sym->type.t == VT_VOID)
6866 sym->type = int_type;
6869 /* XXX: cannot do better now: convert extern line to static inline */
6870 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
6871 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6873 sym = sym_find(v);
6874 if (sym) {
6875 Sym *ref;
6876 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
6877 goto func_error1;
6879 ref = sym->type.ref;
6881 /* use func_call from prototype if not defined */
6882 if (ref->a.func_call != FUNC_CDECL
6883 && type.ref->a.func_call == FUNC_CDECL)
6884 type.ref->a.func_call = ref->a.func_call;
6886 /* use static from prototype */
6887 if (sym->type.t & VT_STATIC)
6888 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
6890 /* If the definition has no visibility use the
6891 one from prototype. */
6892 if (! (type.t & VT_VIS_MASK))
6893 type.t |= sym->type.t & VT_VIS_MASK;
6895 /* apply other storage attributes from prototype */
6896 type.t |= sym->type.t & (VT_EXPORT|VT_WEAK);
6898 if (!is_compatible_types(&sym->type, &type)) {
6899 func_error1:
6900 tcc_error("incompatible types for redefinition of '%s'",
6901 get_tok_str(v, NULL));
6903 if (ref->a.func_body)
6904 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
6905 /* if symbol is already defined, then put complete type */
6906 sym->type = type;
6908 } else {
6909 /* put function symbol */
6910 sym = global_identifier_push(v, type.t, 0);
6911 sym->type.ref = type.ref;
6914 sym->type.ref->a.func_body = 1;
6915 sym->r = VT_SYM | VT_CONST;
6917 /* static inline functions are just recorded as a kind
6918 of macro. Their code will be emitted at the end of
6919 the compilation unit only if they are used */
6920 if ((type.t & (VT_INLINE | VT_STATIC)) ==
6921 (VT_INLINE | VT_STATIC)) {
6922 struct InlineFunc *fn;
6923 const char *filename;
6925 filename = file ? file->filename : "";
6926 fn = tcc_malloc(sizeof *fn + strlen(filename));
6927 strcpy(fn->filename, filename);
6928 fn->sym = sym;
6929 skip_or_save_block(&fn->func_str);
6930 dynarray_add(&tcc_state->inline_fns,
6931 &tcc_state->nb_inline_fns, fn);
6932 } else {
6933 /* compute text section */
6934 cur_text_section = ad.section;
6935 if (!cur_text_section)
6936 cur_text_section = text_section;
6937 gen_function(sym);
6939 break;
6940 } else {
6941 if (l == VT_CMP) {
6942 /* find parameter in function parameter list */
6943 for (sym = func_sym->next; sym; sym = sym->next)
6944 if ((sym->v & ~SYM_FIELD) == v)
6945 goto found;
6946 tcc_error("declaration for parameter '%s' but no such parameter",
6947 get_tok_str(v, NULL));
6948 found:
6949 if (type.t & VT_STORAGE) /* 'register' is okay */
6950 tcc_error("storage class specified for '%s'",
6951 get_tok_str(v, NULL));
6952 if (sym->type.t != VT_VOID)
6953 tcc_error("redefinition of parameter '%s'",
6954 get_tok_str(v, NULL));
6955 convert_parameter_type(&type);
6956 sym->type = type;
6957 } else if (type.t & VT_TYPEDEF) {
6958 /* save typedefed type */
6959 /* XXX: test storage specifiers ? */
6960 sym = sym_find(v);
6961 if (sym && sym->scope == local_scope) {
6962 if (!is_compatible_types(&sym->type, &type)
6963 || !(sym->type.t & VT_TYPEDEF))
6964 tcc_error("incompatible redefinition of '%s'",
6965 get_tok_str(v, NULL));
6966 sym->type = type;
6967 } else {
6968 sym = sym_push(v, &type, 0, 0);
6970 sym->a = ad.a;
6971 } else {
6972 r = 0;
6973 if ((type.t & VT_BTYPE) == VT_FUNC) {
6974 /* external function definition */
6975 /* specific case for func_call attribute */
6976 type.ref->a = ad.a;
6977 } else if (!(type.t & VT_ARRAY)) {
6978 /* not lvalue if array */
6979 r |= lvalue_type(type.t);
6981 has_init = (tok == '=');
6982 if (has_init && (type.t & VT_VLA))
6983 tcc_error("variable length array cannot be initialized");
6984 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
6985 ((type.t & VT_BTYPE) == VT_FUNC) ||
6986 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
6987 !has_init && l == VT_CONST && type.ref->c < 0)) {
6988 /* external variable or function */
6989 /* NOTE: as GCC, uninitialized global static
6990 arrays of null size are considered as
6991 extern */
6992 sym = external_sym(v, &type, r);
6993 sym->asm_label = ad.asm_label;
6994 if (ad.alias_target) {
6995 Section tsec;
6996 ElfW(Sym) *esym;
6997 Sym *alias_target;
6999 alias_target = sym_find(ad.alias_target);
7000 if (!alias_target || !alias_target->c)
7001 tcc_error("unsupported forward __alias__ attribute");
7002 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7003 tsec.sh_num = esym->st_shndx;
7004 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7006 } else {
7007 if (type.t & VT_STATIC)
7008 r |= VT_CONST;
7009 else
7010 r |= l;
7011 if (has_init)
7012 next();
7013 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7016 if (tok != ',') {
7017 if (is_for_loop_init)
7018 return 1;
7019 skip(';');
7020 break;
7022 next();
7024 ad.a.aligned = 0;
7027 return 0;
7030 ST_FUNC void decl(int l)
7032 decl0(l, 0, NULL);
7035 /* ------------------------------------------------------------------------- */