tccgen: yet another nocode_wanted fix
[tinycc.git] / tccgen.c
bloba0284ff17ed929706518475669bda6f980db8ed7
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC void tccgen_start(TCCState *s1)
145 cur_text_section = NULL;
146 funcname = "";
147 anon_sym = SYM_FIRST_ANOM;
148 section_sym = 0;
149 const_wanted = 0;
150 nocode_wanted = 1;
152 /* define some often used types */
153 int_type.t = VT_INT;
154 char_pointer_type.t = VT_BYTE;
155 mk_pointer(&char_pointer_type);
156 #if PTR_SIZE == 4
157 size_type.t = VT_INT;
158 #else
159 size_type.t = VT_LLONG;
160 #endif
161 func_old_type.t = VT_FUNC;
162 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
164 if (s1->do_debug) {
165 char buf[512];
167 /* file info: full path + filename */
168 section_sym = put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
170 text_section->sh_num, NULL);
171 getcwd(buf, sizeof(buf));
172 #ifdef _WIN32
173 normalize_slashes(buf);
174 #endif
175 pstrcat(buf, sizeof(buf), "/");
176 put_stabs_r(buf, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
178 put_stabs_r(file->filename, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
185 SHN_ABS, file->filename);
187 #ifdef TCC_TARGET_ARM
188 arm_init(s1);
189 #endif
192 ST_FUNC void tccgen_end(TCCState *s1)
194 gen_inline_functions(s1);
195 check_vstack();
196 /* end of translation unit info */
197 if (s1->do_debug) {
198 put_stabs_r(NULL, N_SO, 0, 0,
199 text_section->data_offset, text_section, section_sym);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
208 addr_t value, unsigned long size,
209 int can_add_underscore)
211 int sym_type, sym_bind, sh_num, info, other;
212 ElfW(Sym) *esym;
213 const char *name;
214 char buf1[256];
216 #ifdef CONFIG_TCC_BCHECK
217 char buf[32];
218 #endif
220 if (section == NULL)
221 sh_num = SHN_UNDEF;
222 else if (section == SECTION_ABS)
223 sh_num = SHN_ABS;
224 else
225 sh_num = section->sh_num;
227 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
228 sym_type = STT_FUNC;
229 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
230 sym_type = STT_NOTYPE;
231 } else {
232 sym_type = STT_OBJECT;
235 if (sym->type.t & VT_STATIC)
236 sym_bind = STB_LOCAL;
237 else {
238 if (sym->type.t & VT_WEAK)
239 sym_bind = STB_WEAK;
240 else
241 sym_bind = STB_GLOBAL;
244 if (!sym->c) {
245 name = get_tok_str(sym->v, NULL);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state->do_bounds_check) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
251 switch(sym->v) {
252 #ifdef TCC_TARGET_PE
253 /* XXX: we rely only on malloc hooks */
254 case TOK_malloc:
255 case TOK_free:
256 case TOK_realloc:
257 case TOK_memalign:
258 case TOK_calloc:
259 #endif
260 case TOK_memcpy:
261 case TOK_memmove:
262 case TOK_memset:
263 case TOK_strlen:
264 case TOK_strcpy:
265 case TOK_alloca:
266 strcpy(buf, "__bound_");
267 strcat(buf, name);
268 name = buf;
269 break;
272 #endif
273 other = 0;
275 #ifdef TCC_TARGET_PE
276 if (sym->type.t & VT_EXPORT)
277 other |= ST_PE_EXPORT;
278 if (sym_type == STT_FUNC && sym->type.ref) {
279 Sym *ref = sym->type.ref;
280 if (ref->a.func_export)
281 other |= ST_PE_EXPORT;
282 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
283 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
284 name = buf1;
285 other |= ST_PE_STDCALL;
286 can_add_underscore = 0;
288 } else {
289 if (find_elf_sym(tcc_state->dynsymtab_section, name))
290 other |= ST_PE_IMPORT;
291 if (sym->type.t & VT_IMPORT)
292 other |= ST_PE_IMPORT;
294 #else
295 if (! (sym->type.t & VT_STATIC))
296 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
297 #endif
298 if (tcc_state->leading_underscore && can_add_underscore) {
299 buf1[0] = '_';
300 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
301 name = buf1;
303 if (sym->asm_label) {
304 name = get_tok_str(sym->asm_label, NULL);
306 info = ELFW(ST_INFO)(sym_bind, sym_type);
307 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
308 } else {
309 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
310 esym->st_value = value;
311 esym->st_size = size;
312 esym->st_shndx = sh_num;
316 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
317 addr_t value, unsigned long size)
319 put_extern_sym2(sym, section, value, size, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
324 addr_t addend)
326 int c = 0;
328 if (nocode_wanted && s == cur_text_section)
329 return;
331 if (sym) {
332 if (0 == sym->c)
333 put_extern_sym(sym, NULL, 0, 0);
334 c = sym->c;
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section, s, offset, type, c, addend);
341 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
343 greloca(s, sym, offset, type, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym *__sym_malloc(void)
350 Sym *sym_pool, *sym, *last_sym;
351 int i;
353 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
354 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
356 last_sym = sym_free_first;
357 sym = sym_pool;
358 for(i = 0; i < SYM_POOL_NB; i++) {
359 sym->next = last_sym;
360 last_sym = sym;
361 sym++;
363 sym_free_first = last_sym;
364 return last_sym;
367 static inline Sym *sym_malloc(void)
369 Sym *sym;
370 #ifndef SYM_DEBUG
371 sym = sym_free_first;
372 if (!sym)
373 sym = __sym_malloc();
374 sym_free_first = sym->next;
375 return sym;
376 #else
377 sym = tcc_malloc(sizeof(Sym));
378 return sym;
379 #endif
382 ST_INLN void sym_free(Sym *sym)
384 #ifndef SYM_DEBUG
385 sym->next = sym_free_first;
386 sym_free_first = sym;
387 #else
388 tcc_free(sym);
389 #endif
392 /* push, without hashing */
393 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
395 Sym *s;
397 s = sym_malloc();
398 s->scope = 0;
399 s->v = v;
400 s->type.t = t;
401 s->type.ref = NULL;
402 #ifdef _WIN64
403 s->d = NULL;
404 #endif
405 s->c = c;
406 s->next = NULL;
407 /* add in stack */
408 s->prev = *ps;
409 *ps = s;
410 return s;
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym *sym_find2(Sym *s, int v)
417 while (s) {
418 if (s->v == v)
419 return s;
420 else if (s->v == -1)
421 return NULL;
422 s = s->prev;
424 return NULL;
427 /* structure lookup */
428 ST_INLN Sym *struct_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_struct;
436 /* find an identifier */
437 ST_INLN Sym *sym_find(int v)
439 v -= TOK_IDENT;
440 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
441 return NULL;
442 return table_ident[v]->sym_identifier;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
448 Sym *s, **ps;
449 TokenSym *ts;
451 if (local_stack)
452 ps = &local_stack;
453 else
454 ps = &global_stack;
455 s = sym_push2(ps, v, type->t, c);
456 s->type.ref = type->ref;
457 s->r = r;
458 /* don't record fields or anonymous symbols */
459 /* XXX: simplify */
460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
461 /* record symbol in token array */
462 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
463 if (v & SYM_STRUCT)
464 ps = &ts->sym_struct;
465 else
466 ps = &ts->sym_identifier;
467 s->prev_tok = *ps;
468 *ps = s;
469 s->scope = local_scope;
470 if (s->prev_tok && s->prev_tok->scope == s->scope)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v & ~SYM_STRUCT, NULL));
474 return s;
477 /* push a global identifier */
478 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
480 Sym *s, **ps;
481 s = sym_push2(&global_stack, v, t, c);
482 /* don't record anonymous symbol */
483 if (v < SYM_FIRST_ANOM) {
484 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
487 while (*ps != NULL)
488 ps = &(*ps)->prev_tok;
489 s->prev_tok = NULL;
490 *ps = s;
492 return s;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
499 Sym *s, *ss, **ps;
500 TokenSym *ts;
501 int v;
503 s = *ptop;
504 while(s != b) {
505 ss = s->prev;
506 v = s->v;
507 /* remove symbol in token array */
508 /* XXX: simplify */
509 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
510 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
511 if (v & SYM_STRUCT)
512 ps = &ts->sym_struct;
513 else
514 ps = &ts->sym_identifier;
515 *ps = s->prev_tok;
517 if (!keep)
518 sym_free(s);
519 s = ss;
521 if (!keep)
522 *ptop = b;
525 static void weaken_symbol(Sym *sym)
527 sym->type.t |= VT_WEAK;
528 if (sym->c > 0) {
529 int esym_type;
530 ElfW(Sym) *esym;
532 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
533 esym_type = ELFW(ST_TYPE)(esym->st_info);
534 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
538 static void apply_visibility(Sym *sym, CType *type)
540 int vis = sym->type.t & VT_VIS_MASK;
541 int vis2 = type->t & VT_VIS_MASK;
542 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
543 vis = vis2;
544 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
546 else
547 vis = (vis < vis2) ? vis : vis2;
548 sym->type.t &= ~VT_VIS_MASK;
549 sym->type.t |= vis;
551 if (sym->c > 0) {
552 ElfW(Sym) *esym;
554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
555 vis >>= VT_VIS_SHIFT;
556 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
560 /* ------------------------------------------------------------------------- */
562 static void vsetc(CType *type, int r, CValue *vc)
564 int v;
566 if (vtop >= vstack + (VSTACK_SIZE - 1))
567 tcc_error("memory full (vstack)");
568 /* cannot let cpu flags if other instruction are generated. Also
569 avoid leaving VT_JMP anywhere except on the top of the stack
570 because it would complicate the code generator.
572 Don't do this when nocode_wanted. vtop might come from
573 !nocode_wanted regions (see 88_codeopt.c) and transforming
574 it to a register without actually generating code is wrong
575 as their value might still be used for real. All values
576 we push under nocode_wanted will eventually be popped
577 again, so that the VT_CMP/VT_JMP value will be in vtop
578 when code is unsuppressed again.
580 Same logic below in vswap(); */
581 if (vtop >= vstack && !nocode_wanted) {
582 v = vtop->r & VT_VALMASK;
583 if (v == VT_CMP || (v & ~1) == VT_JMP)
584 gv(RC_INT);
587 vtop++;
588 vtop->type = *type;
589 vtop->r = r;
590 vtop->r2 = VT_CONST;
591 vtop->c = *vc;
592 vtop->sym = NULL;
595 ST_FUNC void vswap(void)
597 SValue tmp;
598 /* cannot vswap cpu flags. See comment at vsetc() above */
599 if (vtop >= vstack && !nocode_wanted) {
600 int v = vtop->r & VT_VALMASK;
601 if (v == VT_CMP || (v & ~1) == VT_JMP)
602 gv(RC_INT);
604 tmp = vtop[0];
605 vtop[0] = vtop[-1];
606 vtop[-1] = tmp;
609 /* pop stack value */
610 ST_FUNC void vpop(void)
612 int v;
613 v = vtop->r & VT_VALMASK;
614 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
615 /* for x86, we need to pop the FP stack */
616 if (v == TREG_ST0) {
617 o(0xd8dd); /* fstp %st(0) */
618 } else
619 #endif
620 if (v == VT_JMP || v == VT_JMPI) {
621 /* need to put correct jump if && or || without test */
622 gsym(vtop->c.i);
624 vtop--;
627 /* push constant of type "type" with useless value */
628 ST_FUNC void vpush(CType *type)
630 CValue cval;
631 vsetc(type, VT_CONST, &cval);
634 /* push integer constant */
635 ST_FUNC void vpushi(int v)
637 CValue cval;
638 cval.i = v;
639 vsetc(&int_type, VT_CONST, &cval);
642 /* push a pointer sized constant */
643 static void vpushs(addr_t v)
645 CValue cval;
646 cval.i = v;
647 vsetc(&size_type, VT_CONST, &cval);
650 /* push arbitrary 64bit constant */
651 ST_FUNC void vpush64(int ty, unsigned long long v)
653 CValue cval;
654 CType ctype;
655 ctype.t = ty;
656 ctype.ref = NULL;
657 cval.i = v;
658 vsetc(&ctype, VT_CONST, &cval);
661 /* push long long constant */
662 static inline void vpushll(long long v)
664 vpush64(VT_LLONG, v);
667 ST_FUNC void vset(CType *type, int r, long v)
669 CValue cval;
671 cval.i = v;
672 vsetc(type, r, &cval);
675 static void vseti(int r, int v)
677 CType type;
678 type.t = VT_INT;
679 type.ref = 0;
680 vset(&type, r, v);
683 ST_FUNC void vpushv(SValue *v)
685 if (vtop >= vstack + (VSTACK_SIZE - 1))
686 tcc_error("memory full (vstack)");
687 vtop++;
688 *vtop = *v;
691 static void vdup(void)
693 vpushv(vtop);
696 /* rotate n first stack elements to the bottom
697 I1 ... In -> I2 ... In I1 [top is right]
699 ST_FUNC void vrotb(int n)
701 int i;
702 SValue tmp;
704 tmp = vtop[-n + 1];
705 for(i=-n+1;i!=0;i++)
706 vtop[i] = vtop[i+1];
707 vtop[0] = tmp;
710 /* rotate the n elements before entry e towards the top
711 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
713 ST_FUNC void vrote(SValue *e, int n)
715 int i;
716 SValue tmp;
718 tmp = *e;
719 for(i = 0;i < n - 1; i++)
720 e[-i] = e[-i - 1];
721 e[-n + 1] = tmp;
724 /* rotate n first stack elements to the top
725 I1 ... In -> In I1 ... I(n-1) [top is right]
727 ST_FUNC void vrott(int n)
729 vrote(vtop, n);
732 /* push a symbol value of TYPE */
733 static inline void vpushsym(CType *type, Sym *sym)
735 CValue cval;
736 cval.i = 0;
737 vsetc(type, VT_CONST | VT_SYM, &cval);
738 vtop->sym = sym;
741 /* Return a static symbol pointing to a section */
742 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
744 int v;
745 Sym *sym;
747 v = anon_sym++;
748 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
749 sym->type.ref = type->ref;
750 sym->r = VT_CONST | VT_SYM;
751 put_extern_sym(sym, sec, offset, size);
752 return sym;
755 /* push a reference to a section offset by adding a dummy symbol */
756 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
758 vpushsym(type, get_sym_ref(type, sec, offset, size));
761 /* define a new external reference to a symbol 'v' of type 'u' */
762 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
764 Sym *s;
766 s = sym_find(v);
767 if (!s) {
768 /* push forward reference */
769 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
770 s->type.ref = type->ref;
771 s->r = r | VT_CONST | VT_SYM;
773 return s;
776 /* define a new external reference to a symbol 'v' */
777 static Sym *external_sym(int v, CType *type, int r)
779 Sym *s;
781 s = sym_find(v);
782 if (!s) {
783 /* push forward reference */
784 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
785 s->type.t |= VT_EXTERN;
786 } else if (s->type.ref == func_old_type.ref) {
787 s->type.ref = type->ref;
788 s->r = r | VT_CONST | VT_SYM;
789 s->type.t |= VT_EXTERN;
790 } else if (!is_compatible_types(&s->type, type)) {
791 tcc_error("incompatible types for redefinition of '%s'",
792 get_tok_str(v, NULL));
794 /* Merge some storage attributes. */
795 if (type->t & VT_WEAK)
796 weaken_symbol(s);
798 if (type->t & VT_VIS_MASK)
799 apply_visibility(s, type);
801 return s;
804 /* push a reference to global symbol v */
805 ST_FUNC void vpush_global_sym(CType *type, int v)
807 vpushsym(type, external_global_sym(v, type, 0));
810 /* save registers up to (vtop - n) stack entry */
811 ST_FUNC void save_regs(int n)
813 SValue *p, *p1;
814 for(p = vstack, p1 = vtop - n; p <= p1; p++)
815 save_reg(p->r);
818 /* save r to the memory stack, and mark it as being free */
819 ST_FUNC void save_reg(int r)
821 save_reg_upstack(r, 0);
824 /* save r to the memory stack, and mark it as being free,
825 if seen up to (vtop - n) stack entry */
826 ST_FUNC void save_reg_upstack(int r, int n)
828 int l, saved, size, align;
829 SValue *p, *p1, sv;
830 CType *type;
832 if ((r &= VT_VALMASK) >= VT_CONST)
833 return;
834 if (nocode_wanted)
835 return;
837 /* modify all stack values */
838 saved = 0;
839 l = 0;
840 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
841 if ((p->r & VT_VALMASK) == r ||
842 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
843 /* must save value on stack if not already done */
844 if (!saved) {
845 /* NOTE: must reload 'r' because r might be equal to r2 */
846 r = p->r & VT_VALMASK;
847 /* store register in the stack */
848 type = &p->type;
849 if ((p->r & VT_LVAL) ||
850 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
851 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
852 type = &char_pointer_type;
853 #else
854 type = &int_type;
855 #endif
856 if ((type->t & VT_BTYPE) == VT_FLOAT) {
857 /* cast to DOUBLE to avoid precision loss */
858 type->t = (type->t & ~VT_BTYPE) | VT_DOUBLE;
860 size = type_size(type, &align);
861 loc = (loc - size) & -align;
862 sv.type.t = type->t;
863 sv.r = VT_LOCAL | VT_LVAL;
864 sv.c.i = loc;
865 store(r, &sv);
866 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
867 /* x86 specific: need to pop fp register ST0 if saved */
868 if (r == TREG_ST0) {
869 o(0xd8dd); /* fstp %st(0) */
871 #endif
872 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
873 /* special long long case */
874 if ((type->t & VT_BTYPE) == VT_LLONG) {
875 sv.c.i += 4;
876 store(p->r2, &sv);
878 #endif
879 l = loc;
880 saved = 1;
882 /* mark that stack entry as being saved on the stack */
883 if (p->r & VT_LVAL) {
884 /* also clear the bounded flag because the
885 relocation address of the function was stored in
886 p->c.i */
887 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
888 } else {
889 p->r = lvalue_type(p->type.t) | VT_LOCAL;
891 p->r2 = VT_CONST;
892 p->c.i = l;
897 #ifdef TCC_TARGET_ARM
898 /* find a register of class 'rc2' with at most one reference on stack.
899 * If none, call get_reg(rc) */
900 ST_FUNC int get_reg_ex(int rc, int rc2)
902 int r;
903 SValue *p;
905 for(r=0;r<NB_REGS;r++) {
906 if (reg_classes[r] & rc2) {
907 int n;
908 n=0;
909 for(p = vstack; p <= vtop; p++) {
910 if ((p->r & VT_VALMASK) == r ||
911 (p->r2 & VT_VALMASK) == r)
912 n++;
914 if (n <= 1)
915 return r;
918 return get_reg(rc);
920 #endif
922 /* find a free register of class 'rc'. If none, save one register */
923 ST_FUNC int get_reg(int rc)
925 int r;
926 SValue *p;
928 /* find a free register */
929 for(r=0;r<NB_REGS;r++) {
930 if (reg_classes[r] & rc) {
931 if (nocode_wanted)
932 return r;
933 for(p=vstack;p<=vtop;p++) {
934 if ((p->r & VT_VALMASK) == r ||
935 (p->r2 & VT_VALMASK) == r)
936 goto notfound;
938 return r;
940 notfound: ;
943 /* no register left : free the first one on the stack (VERY
944 IMPORTANT to start from the bottom to ensure that we don't
945 spill registers used in gen_opi()) */
946 for(p=vstack;p<=vtop;p++) {
947 /* look at second register (if long long) */
948 r = p->r2 & VT_VALMASK;
949 if (r < VT_CONST && (reg_classes[r] & rc))
950 goto save_found;
951 r = p->r & VT_VALMASK;
952 if (r < VT_CONST && (reg_classes[r] & rc)) {
953 save_found:
954 save_reg(r);
955 return r;
958 /* Should never comes here */
959 return -1;
962 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
963 if needed */
964 static void move_reg(int r, int s, int t)
966 SValue sv;
968 if (r != s) {
969 save_reg(r);
970 sv.type.t = t;
971 sv.type.ref = NULL;
972 sv.r = s;
973 sv.c.i = 0;
974 load(r, &sv);
978 /* get address of vtop (vtop MUST BE an lvalue) */
979 ST_FUNC void gaddrof(void)
981 if (vtop->r & VT_REF)
982 gv(RC_INT);
983 vtop->r &= ~VT_LVAL;
984 /* tricky: if saved lvalue, then we can go back to lvalue */
985 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
986 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
991 #ifdef CONFIG_TCC_BCHECK
992 /* generate lvalue bound code */
993 static void gbound(void)
995 int lval_type;
996 CType type1;
998 vtop->r &= ~VT_MUSTBOUND;
999 /* if lvalue, then use checking code before dereferencing */
1000 if (vtop->r & VT_LVAL) {
1001 /* if not VT_BOUNDED value, then make one */
1002 if (!(vtop->r & VT_BOUNDED)) {
1003 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1004 /* must save type because we must set it to int to get pointer */
1005 type1 = vtop->type;
1006 vtop->type.t = VT_PTR;
1007 gaddrof();
1008 vpushi(0);
1009 gen_bounded_ptr_add();
1010 vtop->r |= lval_type;
1011 vtop->type = type1;
1013 /* then check for dereferencing */
1014 gen_bounded_ptr_deref();
1017 #endif
1019 /* store vtop a register belonging to class 'rc'. lvalues are
1020 converted to values. Cannot be used if cannot be converted to
1021 register value (such as structures). */
1022 ST_FUNC int gv(int rc)
1024 int r, bit_pos, bit_size, size, align, i;
1025 int rc2;
1027 /* NOTE: get_reg can modify vstack[] */
1028 if (vtop->type.t & VT_BITFIELD) {
1029 CType type;
1030 int bits = 32;
1031 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1032 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1033 /* remove bit field info to avoid loops */
1034 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1035 /* cast to int to propagate signedness in following ops */
1036 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1037 type.t = VT_LLONG;
1038 bits = 64;
1039 } else
1040 type.t = VT_INT;
1041 if((vtop->type.t & VT_UNSIGNED) ||
1042 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1043 type.t |= VT_UNSIGNED;
1044 gen_cast(&type);
1045 /* generate shifts */
1046 vpushi(bits - (bit_pos + bit_size));
1047 gen_op(TOK_SHL);
1048 vpushi(bits - bit_size);
1049 /* NOTE: transformed to SHR if unsigned */
1050 gen_op(TOK_SAR);
1051 r = gv(rc);
1052 } else {
1053 if (is_float(vtop->type.t) &&
1054 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1055 Sym *sym;
1056 int *ptr;
1057 unsigned long offset;
1058 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1059 CValue check;
1060 #endif
1062 /* XXX: unify with initializers handling ? */
1063 /* CPUs usually cannot use float constants, so we store them
1064 generically in data segment */
1065 size = type_size(&vtop->type, &align);
1066 offset = (data_section->data_offset + align - 1) & -align;
1067 data_section->data_offset = offset;
1068 /* XXX: not portable yet */
1069 #if defined(__i386__) || defined(__x86_64__)
1070 /* Zero pad x87 tenbyte long doubles */
1071 if (size == LDOUBLE_SIZE) {
1072 vtop->c.tab[2] &= 0xffff;
1073 #if LDOUBLE_SIZE == 16
1074 vtop->c.tab[3] = 0;
1075 #endif
1077 #endif
1078 ptr = section_ptr_add(data_section, size);
1079 size = size >> 2;
1080 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1081 check.d = 1;
1082 if(check.tab[0])
1083 for(i=0;i<size;i++)
1084 ptr[i] = vtop->c.tab[size-1-i];
1085 else
1086 #endif
1087 for(i=0;i<size;i++)
1088 ptr[i] = vtop->c.tab[i];
1089 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1090 vtop->r |= VT_LVAL | VT_SYM;
1091 vtop->sym = sym;
1092 vtop->c.i = 0;
1094 #ifdef CONFIG_TCC_BCHECK
1095 if (vtop->r & VT_MUSTBOUND)
1096 gbound();
1097 #endif
1099 r = vtop->r & VT_VALMASK;
1100 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1101 #ifndef TCC_TARGET_ARM64
1102 if (rc == RC_IRET)
1103 rc2 = RC_LRET;
1104 #ifdef TCC_TARGET_X86_64
1105 else if (rc == RC_FRET)
1106 rc2 = RC_QRET;
1107 #endif
1108 #endif
1110 /* need to reload if:
1111 - constant
1112 - lvalue (need to dereference pointer)
1113 - already a register, but not in the right class */
1114 if (r >= VT_CONST
1115 || (vtop->r & VT_LVAL)
1116 || !(reg_classes[r] & rc)
1117 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1118 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1119 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1120 #else
1121 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1122 #endif
1125 r = get_reg(rc);
1126 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1127 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1128 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1129 #else
1130 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1131 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1132 unsigned long long ll;
1133 #endif
1134 int r2, original_type;
1135 original_type = vtop->type.t;
1136 /* two register type load : expand to two words
1137 temporarily */
1138 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1139 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1140 /* load constant */
1141 ll = vtop->c.i;
1142 vtop->c.i = ll; /* first word */
1143 load(r, vtop);
1144 vtop->r = r; /* save register value */
1145 vpushi(ll >> 32); /* second word */
1146 } else
1147 #endif
1148 if (vtop->r & VT_LVAL) {
1149 /* We do not want to modifier the long long
1150 pointer here, so the safest (and less
1151 efficient) is to save all the other registers
1152 in the stack. XXX: totally inefficient. */
1153 #if 0
1154 save_regs(1);
1155 #else
1156 /* lvalue_save: save only if used further down the stack */
1157 save_reg_upstack(vtop->r, 1);
1158 #endif
1159 /* load from memory */
1160 vtop->type.t = load_type;
1161 load(r, vtop);
1162 vdup();
1163 vtop[-1].r = r; /* save register value */
1164 /* increment pointer to get second word */
1165 vtop->type.t = addr_type;
1166 gaddrof();
1167 vpushi(load_size);
1168 gen_op('+');
1169 vtop->r |= VT_LVAL;
1170 vtop->type.t = load_type;
1171 } else {
1172 /* move registers */
1173 load(r, vtop);
1174 vdup();
1175 vtop[-1].r = r; /* save register value */
1176 vtop->r = vtop[-1].r2;
1178 /* Allocate second register. Here we rely on the fact that
1179 get_reg() tries first to free r2 of an SValue. */
1180 r2 = get_reg(rc2);
1181 load(r2, vtop);
1182 vpop();
1183 /* write second register */
1184 vtop->r2 = r2;
1185 vtop->type.t = original_type;
1186 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1187 int t1, t;
1188 /* lvalue of scalar type : need to use lvalue type
1189 because of possible cast */
1190 t = vtop->type.t;
1191 t1 = t;
1192 /* compute memory access type */
1193 if (vtop->r & VT_REF)
1194 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1195 t = VT_PTR;
1196 #else
1197 t = VT_INT;
1198 #endif
1199 else if (vtop->r & VT_LVAL_BYTE)
1200 t = VT_BYTE;
1201 else if (vtop->r & VT_LVAL_SHORT)
1202 t = VT_SHORT;
1203 if (vtop->r & VT_LVAL_UNSIGNED)
1204 t |= VT_UNSIGNED;
1205 vtop->type.t = t;
1206 load(r, vtop);
1207 /* restore wanted type */
1208 vtop->type.t = t1;
1209 } else {
1210 /* one register type load */
1211 load(r, vtop);
1214 vtop->r = r;
1215 #ifdef TCC_TARGET_C67
1216 /* uses register pairs for doubles */
1217 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1218 vtop->r2 = r+1;
1219 #endif
1221 return r;
1224 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1225 ST_FUNC void gv2(int rc1, int rc2)
1227 int v;
1229 /* generate more generic register first. But VT_JMP or VT_CMP
1230 values must be generated first in all cases to avoid possible
1231 reload errors */
1232 v = vtop[0].r & VT_VALMASK;
1233 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1234 vswap();
1235 gv(rc1);
1236 vswap();
1237 gv(rc2);
1238 /* test if reload is needed for first register */
1239 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1240 vswap();
1241 gv(rc1);
1242 vswap();
1244 } else {
1245 gv(rc2);
1246 vswap();
1247 gv(rc1);
1248 vswap();
1249 /* test if reload is needed for first register */
1250 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1251 gv(rc2);
1256 #ifndef TCC_TARGET_ARM64
1257 /* wrapper around RC_FRET to return a register by type */
1258 static int rc_fret(int t)
1260 #ifdef TCC_TARGET_X86_64
1261 if (t == VT_LDOUBLE) {
1262 return RC_ST0;
1264 #endif
1265 return RC_FRET;
1267 #endif
1269 /* wrapper around REG_FRET to return a register by type */
1270 static int reg_fret(int t)
1272 #ifdef TCC_TARGET_X86_64
1273 if (t == VT_LDOUBLE) {
1274 return TREG_ST0;
1276 #endif
1277 return REG_FRET;
1280 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1281 /* expand 64bit on stack in two ints */
1282 static void lexpand(void)
1284 int u, v;
1285 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1286 v = vtop->r & (VT_VALMASK | VT_LVAL);
1287 if (v == VT_CONST) {
1288 vdup();
1289 vtop[0].c.i >>= 32;
1290 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1291 vdup();
1292 vtop[0].c.i += 4;
1293 } else {
1294 gv(RC_INT);
1295 vdup();
1296 vtop[0].r = vtop[-1].r2;
1297 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1299 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1301 #endif
1303 #ifdef TCC_TARGET_ARM
1304 /* expand long long on stack */
1305 ST_FUNC void lexpand_nr(void)
1307 int u,v;
1309 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1310 vdup();
1311 vtop->r2 = VT_CONST;
1312 vtop->type.t = VT_INT | u;
1313 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1314 if (v == VT_CONST) {
1315 vtop[-1].c.i = vtop->c.i;
1316 vtop->c.i = vtop->c.i >> 32;
1317 vtop->r = VT_CONST;
1318 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1319 vtop->c.i += 4;
1320 vtop->r = vtop[-1].r;
1321 } else if (v > VT_CONST) {
1322 vtop--;
1323 lexpand();
1324 } else
1325 vtop->r = vtop[-1].r2;
1326 vtop[-1].r2 = VT_CONST;
1327 vtop[-1].type.t = VT_INT | u;
1329 #endif
1331 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1332 /* build a long long from two ints */
1333 static void lbuild(int t)
1335 gv2(RC_INT, RC_INT);
1336 vtop[-1].r2 = vtop[0].r;
1337 vtop[-1].type.t = t;
1338 vpop();
1340 #endif
1342 /* convert stack entry to register and duplicate its value in another
1343 register */
1344 static void gv_dup(void)
1346 int rc, t, r, r1;
1347 SValue sv;
1349 t = vtop->type.t;
1350 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1351 if ((t & VT_BTYPE) == VT_LLONG) {
1352 lexpand();
1353 gv_dup();
1354 vswap();
1355 vrotb(3);
1356 gv_dup();
1357 vrotb(4);
1358 /* stack: H L L1 H1 */
1359 lbuild(t);
1360 vrotb(3);
1361 vrotb(3);
1362 vswap();
1363 lbuild(t);
1364 vswap();
1365 } else
1366 #endif
1368 /* duplicate value */
1369 rc = RC_INT;
1370 sv.type.t = VT_INT;
1371 if (is_float(t)) {
1372 rc = RC_FLOAT;
1373 #ifdef TCC_TARGET_X86_64
1374 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1375 rc = RC_ST0;
1377 #endif
1378 sv.type.t = t;
1380 r = gv(rc);
1381 r1 = get_reg(rc);
1382 sv.r = r;
1383 sv.c.i = 0;
1384 load(r1, &sv); /* move r to r1 */
1385 vdup();
1386 /* duplicates value */
1387 if (r != r1)
1388 vtop->r = r1;
1392 /* Generate value test
1394 * Generate a test for any value (jump, comparison and integers) */
1395 ST_FUNC int gvtst(int inv, int t)
1397 int v = vtop->r & VT_VALMASK;
1398 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1399 vpushi(0);
1400 gen_op(TOK_NE);
1402 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1403 /* constant jmp optimization */
1404 if ((vtop->c.i != 0) != inv)
1405 t = gjmp(t);
1406 vtop--;
1407 return t;
1409 return gtst(inv, t);
1412 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1413 /* generate CPU independent (unsigned) long long operations */
1414 static void gen_opl(int op)
1416 int t, a, b, op1, c, i;
1417 int func;
1418 unsigned short reg_iret = REG_IRET;
1419 unsigned short reg_lret = REG_LRET;
1420 SValue tmp;
1422 switch(op) {
1423 case '/':
1424 case TOK_PDIV:
1425 func = TOK___divdi3;
1426 goto gen_func;
1427 case TOK_UDIV:
1428 func = TOK___udivdi3;
1429 goto gen_func;
1430 case '%':
1431 func = TOK___moddi3;
1432 goto gen_mod_func;
1433 case TOK_UMOD:
1434 func = TOK___umoddi3;
1435 gen_mod_func:
1436 #ifdef TCC_ARM_EABI
1437 reg_iret = TREG_R2;
1438 reg_lret = TREG_R3;
1439 #endif
1440 gen_func:
1441 /* call generic long long function */
1442 vpush_global_sym(&func_old_type, func);
1443 vrott(3);
1444 gfunc_call(2);
1445 vpushi(0);
1446 vtop->r = reg_iret;
1447 vtop->r2 = reg_lret;
1448 break;
1449 case '^':
1450 case '&':
1451 case '|':
1452 case '*':
1453 case '+':
1454 case '-':
1455 //pv("gen_opl A",0,2);
1456 t = vtop->type.t;
1457 vswap();
1458 lexpand();
1459 vrotb(3);
1460 lexpand();
1461 /* stack: L1 H1 L2 H2 */
1462 tmp = vtop[0];
1463 vtop[0] = vtop[-3];
1464 vtop[-3] = tmp;
1465 tmp = vtop[-2];
1466 vtop[-2] = vtop[-3];
1467 vtop[-3] = tmp;
1468 vswap();
1469 /* stack: H1 H2 L1 L2 */
1470 //pv("gen_opl B",0,4);
1471 if (op == '*') {
1472 vpushv(vtop - 1);
1473 vpushv(vtop - 1);
1474 gen_op(TOK_UMULL);
1475 lexpand();
1476 /* stack: H1 H2 L1 L2 ML MH */
1477 for(i=0;i<4;i++)
1478 vrotb(6);
1479 /* stack: ML MH H1 H2 L1 L2 */
1480 tmp = vtop[0];
1481 vtop[0] = vtop[-2];
1482 vtop[-2] = tmp;
1483 /* stack: ML MH H1 L2 H2 L1 */
1484 gen_op('*');
1485 vrotb(3);
1486 vrotb(3);
1487 gen_op('*');
1488 /* stack: ML MH M1 M2 */
1489 gen_op('+');
1490 gen_op('+');
1491 } else if (op == '+' || op == '-') {
1492 /* XXX: add non carry method too (for MIPS or alpha) */
1493 if (op == '+')
1494 op1 = TOK_ADDC1;
1495 else
1496 op1 = TOK_SUBC1;
1497 gen_op(op1);
1498 /* stack: H1 H2 (L1 op L2) */
1499 vrotb(3);
1500 vrotb(3);
1501 gen_op(op1 + 1); /* TOK_xxxC2 */
1502 } else {
1503 gen_op(op);
1504 /* stack: H1 H2 (L1 op L2) */
1505 vrotb(3);
1506 vrotb(3);
1507 /* stack: (L1 op L2) H1 H2 */
1508 gen_op(op);
1509 /* stack: (L1 op L2) (H1 op H2) */
1511 /* stack: L H */
1512 lbuild(t);
1513 break;
1514 case TOK_SAR:
1515 case TOK_SHR:
1516 case TOK_SHL:
1517 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1518 t = vtop[-1].type.t;
1519 vswap();
1520 lexpand();
1521 vrotb(3);
1522 /* stack: L H shift */
1523 c = (int)vtop->c.i;
1524 /* constant: simpler */
1525 /* NOTE: all comments are for SHL. the other cases are
1526 done by swaping words */
1527 vpop();
1528 if (op != TOK_SHL)
1529 vswap();
1530 if (c >= 32) {
1531 /* stack: L H */
1532 vpop();
1533 if (c > 32) {
1534 vpushi(c - 32);
1535 gen_op(op);
1537 if (op != TOK_SAR) {
1538 vpushi(0);
1539 } else {
1540 gv_dup();
1541 vpushi(31);
1542 gen_op(TOK_SAR);
1544 vswap();
1545 } else {
1546 vswap();
1547 gv_dup();
1548 /* stack: H L L */
1549 vpushi(c);
1550 gen_op(op);
1551 vswap();
1552 vpushi(32 - c);
1553 if (op == TOK_SHL)
1554 gen_op(TOK_SHR);
1555 else
1556 gen_op(TOK_SHL);
1557 vrotb(3);
1558 /* stack: L L H */
1559 vpushi(c);
1560 if (op == TOK_SHL)
1561 gen_op(TOK_SHL);
1562 else
1563 gen_op(TOK_SHR);
1564 gen_op('|');
1566 if (op != TOK_SHL)
1567 vswap();
1568 lbuild(t);
1569 } else {
1570 /* XXX: should provide a faster fallback on x86 ? */
1571 switch(op) {
1572 case TOK_SAR:
1573 func = TOK___ashrdi3;
1574 goto gen_func;
1575 case TOK_SHR:
1576 func = TOK___lshrdi3;
1577 goto gen_func;
1578 case TOK_SHL:
1579 func = TOK___ashldi3;
1580 goto gen_func;
1583 break;
1584 default:
1585 /* compare operations */
1586 t = vtop->type.t;
1587 vswap();
1588 lexpand();
1589 vrotb(3);
1590 lexpand();
1591 /* stack: L1 H1 L2 H2 */
1592 tmp = vtop[-1];
1593 vtop[-1] = vtop[-2];
1594 vtop[-2] = tmp;
1595 /* stack: L1 L2 H1 H2 */
1596 /* compare high */
1597 op1 = op;
1598 /* when values are equal, we need to compare low words. since
1599 the jump is inverted, we invert the test too. */
1600 if (op1 == TOK_LT)
1601 op1 = TOK_LE;
1602 else if (op1 == TOK_GT)
1603 op1 = TOK_GE;
1604 else if (op1 == TOK_ULT)
1605 op1 = TOK_ULE;
1606 else if (op1 == TOK_UGT)
1607 op1 = TOK_UGE;
1608 a = 0;
1609 b = 0;
1610 gen_op(op1);
1611 if (op1 != TOK_NE) {
1612 a = gvtst(1, 0);
1614 if (op != TOK_EQ) {
1615 /* generate non equal test */
1616 /* XXX: NOT PORTABLE yet */
1617 if (op1 == TOK_NE) {
1618 b = gvtst(0, 0);
1619 } else {
1620 #if defined(TCC_TARGET_I386)
1621 b = gjmp2(0x850f, 0);
1622 #elif defined(TCC_TARGET_ARM)
1623 b = ind;
1624 o(0x1A000000 | encbranch(ind, 0, 1));
1625 #else
1626 tcc_error("not implemented");
1627 #endif
1630 /* compare low. Always unsigned */
1631 op1 = op;
1632 if (op1 == TOK_LT)
1633 op1 = TOK_ULT;
1634 else if (op1 == TOK_LE)
1635 op1 = TOK_ULE;
1636 else if (op1 == TOK_GT)
1637 op1 = TOK_UGT;
1638 else if (op1 == TOK_GE)
1639 op1 = TOK_UGE;
1640 gen_op(op1);
1641 a = gvtst(1, a);
1642 gsym(b);
1643 vseti(VT_JMPI, a);
1644 break;
1647 #endif
1649 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1651 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1652 return (a ^ b) >> 63 ? -x : x;
1655 static int gen_opic_lt(uint64_t a, uint64_t b)
1657 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1660 /* handle integer constant optimizations and various machine
1661 independent opt */
1662 static void gen_opic(int op)
1664 SValue *v1 = vtop - 1;
1665 SValue *v2 = vtop;
1666 int t1 = v1->type.t & VT_BTYPE;
1667 int t2 = v2->type.t & VT_BTYPE;
1668 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1669 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1670 uint64_t l1 = c1 ? v1->c.i : 0;
1671 uint64_t l2 = c2 ? v2->c.i : 0;
1672 int shm = (t1 == VT_LLONG) ? 63 : 31;
1674 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1675 l1 = ((uint32_t)l1 |
1676 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1677 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1678 l2 = ((uint32_t)l2 |
1679 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1681 if (c1 && c2) {
1682 switch(op) {
1683 case '+': l1 += l2; break;
1684 case '-': l1 -= l2; break;
1685 case '&': l1 &= l2; break;
1686 case '^': l1 ^= l2; break;
1687 case '|': l1 |= l2; break;
1688 case '*': l1 *= l2; break;
1690 case TOK_PDIV:
1691 case '/':
1692 case '%':
1693 case TOK_UDIV:
1694 case TOK_UMOD:
1695 /* if division by zero, generate explicit division */
1696 if (l2 == 0) {
1697 if (const_wanted)
1698 tcc_error("division by zero in constant");
1699 goto general_case;
1701 switch(op) {
1702 default: l1 = gen_opic_sdiv(l1, l2); break;
1703 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1704 case TOK_UDIV: l1 = l1 / l2; break;
1705 case TOK_UMOD: l1 = l1 % l2; break;
1707 break;
1708 case TOK_SHL: l1 <<= (l2 & shm); break;
1709 case TOK_SHR: l1 >>= (l2 & shm); break;
1710 case TOK_SAR:
1711 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1712 break;
1713 /* tests */
1714 case TOK_ULT: l1 = l1 < l2; break;
1715 case TOK_UGE: l1 = l1 >= l2; break;
1716 case TOK_EQ: l1 = l1 == l2; break;
1717 case TOK_NE: l1 = l1 != l2; break;
1718 case TOK_ULE: l1 = l1 <= l2; break;
1719 case TOK_UGT: l1 = l1 > l2; break;
1720 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1721 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1722 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1723 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1724 /* logical */
1725 case TOK_LAND: l1 = l1 && l2; break;
1726 case TOK_LOR: l1 = l1 || l2; break;
1727 default:
1728 goto general_case;
1730 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1731 l1 = ((uint32_t)l1 |
1732 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1733 v1->c.i = l1;
1734 vtop--;
1735 } else {
1736 /* if commutative ops, put c2 as constant */
1737 if (c1 && (op == '+' || op == '&' || op == '^' ||
1738 op == '|' || op == '*')) {
1739 vswap();
1740 c2 = c1; //c = c1, c1 = c2, c2 = c;
1741 l2 = l1; //l = l1, l1 = l2, l2 = l;
1743 if (!const_wanted &&
1744 c1 && ((l1 == 0 &&
1745 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1746 (l1 == -1 && op == TOK_SAR))) {
1747 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1748 vtop--;
1749 } else if (!const_wanted &&
1750 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1751 (l2 == -1 && op == '|') ||
1752 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1753 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1754 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1755 if (l2 == 1)
1756 vtop->c.i = 0;
1757 vswap();
1758 vtop--;
1759 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1760 op == TOK_PDIV) &&
1761 l2 == 1) ||
1762 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1763 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1764 l2 == 0) ||
1765 (op == '&' &&
1766 l2 == -1))) {
1767 /* filter out NOP operations like x*1, x-0, x&-1... */
1768 vtop--;
1769 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1770 /* try to use shifts instead of muls or divs */
1771 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1772 int n = -1;
1773 while (l2) {
1774 l2 >>= 1;
1775 n++;
1777 vtop->c.i = n;
1778 if (op == '*')
1779 op = TOK_SHL;
1780 else if (op == TOK_PDIV)
1781 op = TOK_SAR;
1782 else
1783 op = TOK_SHR;
1785 goto general_case;
1786 } else if (c2 && (op == '+' || op == '-') &&
1787 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1788 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1789 /* symbol + constant case */
1790 if (op == '-')
1791 l2 = -l2;
1792 l2 += vtop[-1].c.i;
1793 /* The backends can't always deal with addends to symbols
1794 larger than +-1<<31. Don't construct such. */
1795 if ((int)l2 != l2)
1796 goto general_case;
1797 vtop--;
1798 vtop->c.i = l2;
1799 } else {
1800 general_case:
1801 /* call low level op generator */
1802 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1803 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1804 gen_opl(op);
1805 else
1806 gen_opi(op);
1811 /* generate a floating point operation with constant propagation */
1812 static void gen_opif(int op)
1814 int c1, c2;
1815 SValue *v1, *v2;
1816 long double f1, f2;
1818 v1 = vtop - 1;
1819 v2 = vtop;
1820 /* currently, we cannot do computations with forward symbols */
1821 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1822 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1823 if (c1 && c2) {
1824 if (v1->type.t == VT_FLOAT) {
1825 f1 = v1->c.f;
1826 f2 = v2->c.f;
1827 } else if (v1->type.t == VT_DOUBLE) {
1828 f1 = v1->c.d;
1829 f2 = v2->c.d;
1830 } else {
1831 f1 = v1->c.ld;
1832 f2 = v2->c.ld;
1835 /* NOTE: we only do constant propagation if finite number (not
1836 NaN or infinity) (ANSI spec) */
1837 if (!ieee_finite(f1) || !ieee_finite(f2))
1838 goto general_case;
1840 switch(op) {
1841 case '+': f1 += f2; break;
1842 case '-': f1 -= f2; break;
1843 case '*': f1 *= f2; break;
1844 case '/':
1845 if (f2 == 0.0) {
1846 if (const_wanted)
1847 tcc_error("division by zero in constant");
1848 goto general_case;
1850 f1 /= f2;
1851 break;
1852 /* XXX: also handles tests ? */
1853 default:
1854 goto general_case;
1856 /* XXX: overflow test ? */
1857 if (v1->type.t == VT_FLOAT) {
1858 v1->c.f = f1;
1859 } else if (v1->type.t == VT_DOUBLE) {
1860 v1->c.d = f1;
1861 } else {
1862 v1->c.ld = f1;
1864 vtop--;
1865 } else {
1866 general_case:
1867 gen_opf(op);
1871 static int pointed_size(CType *type)
1873 int align;
1874 return type_size(pointed_type(type), &align);
1877 static void vla_runtime_pointed_size(CType *type)
1879 int align;
1880 vla_runtime_type_size(pointed_type(type), &align);
1883 static inline int is_null_pointer(SValue *p)
1885 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1886 return 0;
1887 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1888 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1889 ((p->type.t & VT_BTYPE) == VT_PTR &&
1890 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1893 static inline int is_integer_btype(int bt)
1895 return (bt == VT_BYTE || bt == VT_SHORT ||
1896 bt == VT_INT || bt == VT_LLONG);
1899 /* check types for comparison or subtraction of pointers */
1900 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1902 CType *type1, *type2, tmp_type1, tmp_type2;
1903 int bt1, bt2;
1905 /* null pointers are accepted for all comparisons as gcc */
1906 if (is_null_pointer(p1) || is_null_pointer(p2))
1907 return;
1908 type1 = &p1->type;
1909 type2 = &p2->type;
1910 bt1 = type1->t & VT_BTYPE;
1911 bt2 = type2->t & VT_BTYPE;
1912 /* accept comparison between pointer and integer with a warning */
1913 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1914 if (op != TOK_LOR && op != TOK_LAND )
1915 tcc_warning("comparison between pointer and integer");
1916 return;
1919 /* both must be pointers or implicit function pointers */
1920 if (bt1 == VT_PTR) {
1921 type1 = pointed_type(type1);
1922 } else if (bt1 != VT_FUNC)
1923 goto invalid_operands;
1925 if (bt2 == VT_PTR) {
1926 type2 = pointed_type(type2);
1927 } else if (bt2 != VT_FUNC) {
1928 invalid_operands:
1929 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1931 if ((type1->t & VT_BTYPE) == VT_VOID ||
1932 (type2->t & VT_BTYPE) == VT_VOID)
1933 return;
1934 tmp_type1 = *type1;
1935 tmp_type2 = *type2;
1936 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1937 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1938 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1939 /* gcc-like error if '-' is used */
1940 if (op == '-')
1941 goto invalid_operands;
1942 else
1943 tcc_warning("comparison of distinct pointer types lacks a cast");
1947 /* generic gen_op: handles types problems */
1948 ST_FUNC void gen_op(int op)
1950 int u, t1, t2, bt1, bt2, t;
1951 CType type1;
1953 redo:
1954 t1 = vtop[-1].type.t;
1955 t2 = vtop[0].type.t;
1956 bt1 = t1 & VT_BTYPE;
1957 bt2 = t2 & VT_BTYPE;
1959 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1960 tcc_error("operation on a struct");
1961 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1962 if (bt2 == VT_FUNC) {
1963 mk_pointer(&vtop->type);
1964 gaddrof();
1966 if (bt1 == VT_FUNC) {
1967 vswap();
1968 mk_pointer(&vtop->type);
1969 gaddrof();
1970 vswap();
1972 goto redo;
1973 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1974 /* at least one operand is a pointer */
1975 /* relationnal op: must be both pointers */
1976 if (op >= TOK_ULT && op <= TOK_LOR) {
1977 check_comparison_pointer_types(vtop - 1, vtop, op);
1978 /* pointers are handled are unsigned */
1979 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1980 t = VT_LLONG | VT_UNSIGNED;
1981 #else
1982 t = VT_INT | VT_UNSIGNED;
1983 #endif
1984 goto std_op;
1986 /* if both pointers, then it must be the '-' op */
1987 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1988 if (op != '-')
1989 tcc_error("cannot use pointers here");
1990 check_comparison_pointer_types(vtop - 1, vtop, op);
1991 /* XXX: check that types are compatible */
1992 if (vtop[-1].type.t & VT_VLA) {
1993 vla_runtime_pointed_size(&vtop[-1].type);
1994 } else {
1995 vpushi(pointed_size(&vtop[-1].type));
1997 vrott(3);
1998 gen_opic(op);
1999 /* set to integer type */
2000 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2001 vtop->type.t = VT_LLONG;
2002 #else
2003 vtop->type.t = VT_INT;
2004 #endif
2005 vswap();
2006 gen_op(TOK_PDIV);
2007 } else {
2008 /* exactly one pointer : must be '+' or '-'. */
2009 if (op != '-' && op != '+')
2010 tcc_error("cannot use pointers here");
2011 /* Put pointer as first operand */
2012 if (bt2 == VT_PTR) {
2013 vswap();
2014 t = t1, t1 = t2, t2 = t;
2016 #if PTR_SIZE == 4
2017 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2018 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2019 gen_cast(&int_type);
2020 #endif
2021 type1 = vtop[-1].type;
2022 type1.t &= ~VT_ARRAY;
2023 if (vtop[-1].type.t & VT_VLA)
2024 vla_runtime_pointed_size(&vtop[-1].type);
2025 else {
2026 u = pointed_size(&vtop[-1].type);
2027 if (u < 0)
2028 tcc_error("unknown array element size");
2029 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2030 vpushll(u);
2031 #else
2032 /* XXX: cast to int ? (long long case) */
2033 vpushi(u);
2034 #endif
2036 gen_op('*');
2037 #if 0
2038 /* #ifdef CONFIG_TCC_BCHECK
2039 The main reason to removing this code:
2040 #include <stdio.h>
2041 int main ()
2043 int v[10];
2044 int i = 10;
2045 int j = 9;
2046 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2047 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2049 When this code is on. then the output looks like
2050 v+i-j = 0xfffffffe
2051 v+(i-j) = 0xbff84000
2053 /* if evaluating constant expression, no code should be
2054 generated, so no bound check */
2055 if (tcc_state->do_bounds_check && !const_wanted) {
2056 /* if bounded pointers, we generate a special code to
2057 test bounds */
2058 if (op == '-') {
2059 vpushi(0);
2060 vswap();
2061 gen_op('-');
2063 gen_bounded_ptr_add();
2064 } else
2065 #endif
2067 gen_opic(op);
2069 /* put again type if gen_opic() swaped operands */
2070 vtop->type = type1;
2072 } else if (is_float(bt1) || is_float(bt2)) {
2073 /* compute bigger type and do implicit casts */
2074 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2075 t = VT_LDOUBLE;
2076 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2077 t = VT_DOUBLE;
2078 } else {
2079 t = VT_FLOAT;
2081 /* floats can only be used for a few operations */
2082 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2083 (op < TOK_ULT || op > TOK_GT))
2084 tcc_error("invalid operands for binary operation");
2085 goto std_op;
2086 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2087 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2088 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2089 t |= VT_UNSIGNED;
2090 goto std_op;
2091 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2092 /* cast to biggest op */
2093 t = VT_LLONG;
2094 /* convert to unsigned if it does not fit in a long long */
2095 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2096 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2097 t |= VT_UNSIGNED;
2098 goto std_op;
2099 } else {
2100 /* integer operations */
2101 t = VT_INT;
2102 /* convert to unsigned if it does not fit in an integer */
2103 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2104 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2105 t |= VT_UNSIGNED;
2106 std_op:
2107 /* XXX: currently, some unsigned operations are explicit, so
2108 we modify them here */
2109 if (t & VT_UNSIGNED) {
2110 if (op == TOK_SAR)
2111 op = TOK_SHR;
2112 else if (op == '/')
2113 op = TOK_UDIV;
2114 else if (op == '%')
2115 op = TOK_UMOD;
2116 else if (op == TOK_LT)
2117 op = TOK_ULT;
2118 else if (op == TOK_GT)
2119 op = TOK_UGT;
2120 else if (op == TOK_LE)
2121 op = TOK_ULE;
2122 else if (op == TOK_GE)
2123 op = TOK_UGE;
2125 vswap();
2126 type1.t = t;
2127 gen_cast(&type1);
2128 vswap();
2129 /* special case for shifts and long long: we keep the shift as
2130 an integer */
2131 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2132 type1.t = VT_INT;
2133 gen_cast(&type1);
2134 if (is_float(t))
2135 gen_opif(op);
2136 else
2137 gen_opic(op);
2138 if (op >= TOK_ULT && op <= TOK_GT) {
2139 /* relationnal op: the result is an int */
2140 vtop->type.t = VT_INT;
2141 } else {
2142 vtop->type.t = t;
2145 // Make sure that we have converted to an rvalue:
2146 if (vtop->r & VT_LVAL)
2147 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2150 #ifndef TCC_TARGET_ARM
2151 /* generic itof for unsigned long long case */
2152 static void gen_cvt_itof1(int t)
2154 #ifdef TCC_TARGET_ARM64
2155 gen_cvt_itof(t);
2156 #else
2157 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2158 (VT_LLONG | VT_UNSIGNED)) {
2160 if (t == VT_FLOAT)
2161 vpush_global_sym(&func_old_type, TOK___floatundisf);
2162 #if LDOUBLE_SIZE != 8
2163 else if (t == VT_LDOUBLE)
2164 vpush_global_sym(&func_old_type, TOK___floatundixf);
2165 #endif
2166 else
2167 vpush_global_sym(&func_old_type, TOK___floatundidf);
2168 vrott(2);
2169 gfunc_call(1);
2170 vpushi(0);
2171 vtop->r = reg_fret(t);
2172 } else {
2173 gen_cvt_itof(t);
2175 #endif
2177 #endif
2179 /* generic ftoi for unsigned long long case */
2180 static void gen_cvt_ftoi1(int t)
2182 #ifdef TCC_TARGET_ARM64
2183 gen_cvt_ftoi(t);
2184 #else
2185 int st;
2187 if (t == (VT_LLONG | VT_UNSIGNED)) {
2188 /* not handled natively */
2189 st = vtop->type.t & VT_BTYPE;
2190 if (st == VT_FLOAT)
2191 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2192 #if LDOUBLE_SIZE != 8
2193 else if (st == VT_LDOUBLE)
2194 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2195 #endif
2196 else
2197 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2198 vrott(2);
2199 gfunc_call(1);
2200 vpushi(0);
2201 vtop->r = REG_IRET;
2202 vtop->r2 = REG_LRET;
2203 } else {
2204 gen_cvt_ftoi(t);
2206 #endif
2209 /* force char or short cast */
2210 static void force_charshort_cast(int t)
2212 int bits, dbt;
2213 dbt = t & VT_BTYPE;
2214 /* XXX: add optimization if lvalue : just change type and offset */
2215 if (dbt == VT_BYTE)
2216 bits = 8;
2217 else
2218 bits = 16;
2219 if (t & VT_UNSIGNED) {
2220 vpushi((1 << bits) - 1);
2221 gen_op('&');
2222 } else {
2223 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2224 bits = 64 - bits;
2225 else
2226 bits = 32 - bits;
2227 vpushi(bits);
2228 gen_op(TOK_SHL);
2229 /* result must be signed or the SAR is converted to an SHL
2230 This was not the case when "t" was a signed short
2231 and the last value on the stack was an unsigned int */
2232 vtop->type.t &= ~VT_UNSIGNED;
2233 vpushi(bits);
2234 gen_op(TOK_SAR);
2238 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2239 static void gen_cast(CType *type)
2241 int sbt, dbt, sf, df, c, p;
2243 /* special delayed cast for char/short */
2244 /* XXX: in some cases (multiple cascaded casts), it may still
2245 be incorrect */
2246 if (vtop->r & VT_MUSTCAST) {
2247 vtop->r &= ~VT_MUSTCAST;
2248 force_charshort_cast(vtop->type.t);
2251 /* bitfields first get cast to ints */
2252 if (vtop->type.t & VT_BITFIELD) {
2253 gv(RC_INT);
2256 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2257 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2259 if (sbt != dbt) {
2260 sf = is_float(sbt);
2261 df = is_float(dbt);
2262 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2263 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2264 if (c) {
2265 /* constant case: we can do it now */
2266 /* XXX: in ISOC, cannot do it if error in convert */
2267 if (sbt == VT_FLOAT)
2268 vtop->c.ld = vtop->c.f;
2269 else if (sbt == VT_DOUBLE)
2270 vtop->c.ld = vtop->c.d;
2272 if (df) {
2273 if ((sbt & VT_BTYPE) == VT_LLONG) {
2274 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2275 vtop->c.ld = vtop->c.i;
2276 else
2277 vtop->c.ld = -(long double)-vtop->c.i;
2278 } else if(!sf) {
2279 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2280 vtop->c.ld = (uint32_t)vtop->c.i;
2281 else
2282 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2285 if (dbt == VT_FLOAT)
2286 vtop->c.f = (float)vtop->c.ld;
2287 else if (dbt == VT_DOUBLE)
2288 vtop->c.d = (double)vtop->c.ld;
2289 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2290 vtop->c.i = vtop->c.ld;
2291 } else if (sf && dbt == VT_BOOL) {
2292 vtop->c.i = (vtop->c.ld != 0);
2293 } else {
2294 if(sf)
2295 vtop->c.i = vtop->c.ld;
2296 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2298 else if (sbt & VT_UNSIGNED)
2299 vtop->c.i = (uint32_t)vtop->c.i;
2300 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2301 else if (sbt == VT_PTR)
2303 #endif
2304 else if (sbt != VT_LLONG)
2305 vtop->c.i = ((uint32_t)vtop->c.i |
2306 -(vtop->c.i & 0x80000000));
2308 if (dbt == (VT_LLONG|VT_UNSIGNED))
2310 else if (dbt == VT_BOOL)
2311 vtop->c.i = (vtop->c.i != 0);
2312 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2313 else if (dbt == VT_PTR)
2315 #endif
2316 else if (dbt != VT_LLONG) {
2317 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2318 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2319 0xffffffff);
2320 vtop->c.i &= m;
2321 if (!(dbt & VT_UNSIGNED))
2322 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2325 } else if (p && dbt == VT_BOOL) {
2326 vtop->r = VT_CONST;
2327 vtop->c.i = 1;
2328 } else {
2329 /* non constant case: generate code */
2330 if (sf && df) {
2331 /* convert from fp to fp */
2332 gen_cvt_ftof(dbt);
2333 } else if (df) {
2334 /* convert int to fp */
2335 gen_cvt_itof1(dbt);
2336 } else if (sf) {
2337 /* convert fp to int */
2338 if (dbt == VT_BOOL) {
2339 vpushi(0);
2340 gen_op(TOK_NE);
2341 } else {
2342 if (sbt == VT_FLOAT) {
2343 /* cast to DOUBLE to avoid precision loss */
2344 gen_cvt_ftof(VT_DOUBLE);
2345 vtop->type.t = (vtop->type.t & ~VT_BTYPE) | VT_DOUBLE;
2347 /* we handle char/short/etc... with generic code */
2348 if (dbt != (VT_INT | VT_UNSIGNED) &&
2349 dbt != (VT_LLONG | VT_UNSIGNED) &&
2350 dbt != VT_LLONG)
2351 dbt = VT_INT;
2352 gen_cvt_ftoi1(dbt);
2353 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2354 /* additional cast for char/short... */
2355 vtop->type.t = dbt;
2356 gen_cast(type);
2359 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2360 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2361 if ((sbt & VT_BTYPE) != VT_LLONG) {
2362 /* scalar to long long */
2363 /* machine independent conversion */
2364 gv(RC_INT);
2365 /* generate high word */
2366 if (sbt == (VT_INT | VT_UNSIGNED)) {
2367 vpushi(0);
2368 gv(RC_INT);
2369 } else {
2370 if (sbt == VT_PTR) {
2371 /* cast from pointer to int before we apply
2372 shift operation, which pointers don't support*/
2373 gen_cast(&int_type);
2375 gv_dup();
2376 vpushi(31);
2377 gen_op(TOK_SAR);
2379 /* patch second register */
2380 vtop[-1].r2 = vtop->r;
2381 vpop();
2383 #else
2384 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2385 (dbt & VT_BTYPE) == VT_PTR ||
2386 (dbt & VT_BTYPE) == VT_FUNC) {
2387 if ((sbt & VT_BTYPE) != VT_LLONG &&
2388 (sbt & VT_BTYPE) != VT_PTR &&
2389 (sbt & VT_BTYPE) != VT_FUNC) {
2390 /* need to convert from 32bit to 64bit */
2391 gv(RC_INT);
2392 if (sbt != (VT_INT | VT_UNSIGNED)) {
2393 #if defined(TCC_TARGET_ARM64)
2394 gen_cvt_sxtw();
2395 #elif defined(TCC_TARGET_X86_64)
2396 int r = gv(RC_INT);
2397 /* x86_64 specific: movslq */
2398 o(0x6348);
2399 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2400 #else
2401 #error
2402 #endif
2405 #endif
2406 } else if (dbt == VT_BOOL) {
2407 /* scalar to bool */
2408 vpushi(0);
2409 gen_op(TOK_NE);
2410 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2411 (dbt & VT_BTYPE) == VT_SHORT) {
2412 if (sbt == VT_PTR) {
2413 vtop->type.t = VT_INT;
2414 tcc_warning("nonportable conversion from pointer to char/short");
2416 force_charshort_cast(dbt);
2417 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2418 } else if ((dbt & VT_BTYPE) == VT_INT) {
2419 /* scalar to int */
2420 if ((sbt & VT_BTYPE) == VT_LLONG) {
2421 /* from long long: just take low order word */
2422 lexpand();
2423 vpop();
2425 /* if lvalue and single word type, nothing to do because
2426 the lvalue already contains the real type size (see
2427 VT_LVAL_xxx constants) */
2428 #endif
2431 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2432 /* if we are casting between pointer types,
2433 we must update the VT_LVAL_xxx size */
2434 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2435 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2437 vtop->type = *type;
2440 /* return type size as known at compile time. Put alignment at 'a' */
2441 ST_FUNC int type_size(CType *type, int *a)
2443 Sym *s;
2444 int bt;
2446 bt = type->t & VT_BTYPE;
2447 if (bt == VT_STRUCT) {
2448 /* struct/union */
2449 s = type->ref;
2450 *a = s->r;
2451 return s->c;
2452 } else if (bt == VT_PTR) {
2453 if (type->t & VT_ARRAY) {
2454 int ts;
2456 s = type->ref;
2457 ts = type_size(&s->type, a);
2459 if (ts < 0 && s->c < 0)
2460 ts = -ts;
2462 return ts * s->c;
2463 } else {
2464 *a = PTR_SIZE;
2465 return PTR_SIZE;
2467 } else if (bt == VT_LDOUBLE) {
2468 *a = LDOUBLE_ALIGN;
2469 return LDOUBLE_SIZE;
2470 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2471 #ifdef TCC_TARGET_I386
2472 #ifdef TCC_TARGET_PE
2473 *a = 8;
2474 #else
2475 *a = 4;
2476 #endif
2477 #elif defined(TCC_TARGET_ARM)
2478 #ifdef TCC_ARM_EABI
2479 *a = 8;
2480 #else
2481 *a = 4;
2482 #endif
2483 #else
2484 *a = 8;
2485 #endif
2486 return 8;
2487 } else if (bt == VT_INT || bt == VT_FLOAT) {
2488 *a = 4;
2489 return 4;
2490 } else if (bt == VT_SHORT) {
2491 *a = 2;
2492 return 2;
2493 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2494 *a = 8;
2495 return 16;
2496 } else if (bt == VT_ENUM) {
2497 *a = 4;
2498 /* Enums might be incomplete, so don't just return '4' here. */
2499 return type->ref->c;
2500 } else {
2501 /* char, void, function, _Bool */
2502 *a = 1;
2503 return 1;
2507 /* push type size as known at runtime time on top of value stack. Put
2508 alignment at 'a' */
2509 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2511 if (type->t & VT_VLA) {
2512 type_size(&type->ref->type, a);
2513 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2514 } else {
2515 vpushi(type_size(type, a));
2519 static void vla_sp_restore(void) {
2520 if (vlas_in_scope) {
2521 gen_vla_sp_restore(vla_sp_loc);
2525 static void vla_sp_restore_root(void) {
2526 if (vlas_in_scope) {
2527 gen_vla_sp_restore(vla_sp_root_loc);
2531 /* return the pointed type of t */
2532 static inline CType *pointed_type(CType *type)
2534 return &type->ref->type;
2537 /* modify type so that its it is a pointer to type. */
2538 ST_FUNC void mk_pointer(CType *type)
2540 Sym *s;
2541 s = sym_push(SYM_FIELD, type, 0, -1);
2542 type->t = VT_PTR | (type->t & ~VT_TYPE);
2543 type->ref = s;
2546 /* compare function types. OLD functions match any new functions */
2547 static int is_compatible_func(CType *type1, CType *type2)
2549 Sym *s1, *s2;
2551 s1 = type1->ref;
2552 s2 = type2->ref;
2553 if (!is_compatible_types(&s1->type, &s2->type))
2554 return 0;
2555 /* check func_call */
2556 if (s1->a.func_call != s2->a.func_call)
2557 return 0;
2558 /* XXX: not complete */
2559 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2560 return 1;
2561 if (s1->c != s2->c)
2562 return 0;
2563 while (s1 != NULL) {
2564 if (s2 == NULL)
2565 return 0;
2566 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2567 return 0;
2568 s1 = s1->next;
2569 s2 = s2->next;
2571 if (s2)
2572 return 0;
2573 return 1;
2576 /* return true if type1 and type2 are the same. If unqualified is
2577 true, qualifiers on the types are ignored.
2579 - enums are not checked as gcc __builtin_types_compatible_p ()
2581 static int compare_types(CType *type1, CType *type2, int unqualified)
2583 int bt1, t1, t2;
2585 t1 = type1->t & VT_TYPE;
2586 t2 = type2->t & VT_TYPE;
2587 if (unqualified) {
2588 /* strip qualifiers before comparing */
2589 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2590 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2592 /* Default Vs explicit signedness only matters for char */
2593 if ((t1 & VT_BTYPE) != VT_BYTE) {
2594 t1 &= ~VT_DEFSIGN;
2595 t2 &= ~VT_DEFSIGN;
2597 /* An enum is compatible with (unsigned) int. Ideally we would
2598 store the enums signedness in type->ref.a.<some_bit> and
2599 only accept unsigned enums with unsigned int and vice versa.
2600 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2601 from pointer target types, so we can't add it here either. */
2602 if ((t1 & VT_BTYPE) == VT_ENUM) {
2603 t1 = VT_INT;
2604 if (type1->ref->a.unsigned_enum)
2605 t1 |= VT_UNSIGNED;
2607 if ((t2 & VT_BTYPE) == VT_ENUM) {
2608 t2 = VT_INT;
2609 if (type2->ref->a.unsigned_enum)
2610 t2 |= VT_UNSIGNED;
2612 /* XXX: bitfields ? */
2613 if (t1 != t2)
2614 return 0;
2615 /* test more complicated cases */
2616 bt1 = t1 & VT_BTYPE;
2617 if (bt1 == VT_PTR) {
2618 type1 = pointed_type(type1);
2619 type2 = pointed_type(type2);
2620 return is_compatible_types(type1, type2);
2621 } else if (bt1 == VT_STRUCT) {
2622 return (type1->ref == type2->ref);
2623 } else if (bt1 == VT_FUNC) {
2624 return is_compatible_func(type1, type2);
2625 } else {
2626 return 1;
2630 /* return true if type1 and type2 are exactly the same (including
2631 qualifiers).
2633 static int is_compatible_types(CType *type1, CType *type2)
2635 return compare_types(type1,type2,0);
2638 /* return true if type1 and type2 are the same (ignoring qualifiers).
2640 static int is_compatible_parameter_types(CType *type1, CType *type2)
2642 return compare_types(type1,type2,1);
2645 /* print a type. If 'varstr' is not NULL, then the variable is also
2646 printed in the type */
2647 /* XXX: union */
2648 /* XXX: add array and function pointers */
2649 static void type_to_str(char *buf, int buf_size,
2650 CType *type, const char *varstr)
2652 int bt, v, t;
2653 Sym *s, *sa;
2654 char buf1[256];
2655 const char *tstr;
2657 t = type->t & VT_TYPE;
2658 bt = t & VT_BTYPE;
2659 buf[0] = '\0';
2660 if (t & VT_CONSTANT)
2661 pstrcat(buf, buf_size, "const ");
2662 if (t & VT_VOLATILE)
2663 pstrcat(buf, buf_size, "volatile ");
2664 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2665 pstrcat(buf, buf_size, "unsigned ");
2666 else if (t & VT_DEFSIGN)
2667 pstrcat(buf, buf_size, "signed ");
2668 switch(bt) {
2669 case VT_VOID:
2670 tstr = "void";
2671 goto add_tstr;
2672 case VT_BOOL:
2673 tstr = "_Bool";
2674 goto add_tstr;
2675 case VT_BYTE:
2676 tstr = "char";
2677 goto add_tstr;
2678 case VT_SHORT:
2679 tstr = "short";
2680 goto add_tstr;
2681 case VT_INT:
2682 tstr = "int";
2683 goto add_tstr;
2684 case VT_LONG:
2685 tstr = "long";
2686 goto add_tstr;
2687 case VT_LLONG:
2688 tstr = "long long";
2689 goto add_tstr;
2690 case VT_FLOAT:
2691 tstr = "float";
2692 goto add_tstr;
2693 case VT_DOUBLE:
2694 tstr = "double";
2695 goto add_tstr;
2696 case VT_LDOUBLE:
2697 tstr = "long double";
2698 add_tstr:
2699 pstrcat(buf, buf_size, tstr);
2700 break;
2701 case VT_ENUM:
2702 case VT_STRUCT:
2703 if (bt == VT_STRUCT)
2704 tstr = "struct ";
2705 else
2706 tstr = "enum ";
2707 pstrcat(buf, buf_size, tstr);
2708 v = type->ref->v & ~SYM_STRUCT;
2709 if (v >= SYM_FIRST_ANOM)
2710 pstrcat(buf, buf_size, "<anonymous>");
2711 else
2712 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2713 break;
2714 case VT_FUNC:
2715 s = type->ref;
2716 type_to_str(buf, buf_size, &s->type, varstr);
2717 pstrcat(buf, buf_size, "(");
2718 sa = s->next;
2719 while (sa != NULL) {
2720 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2721 pstrcat(buf, buf_size, buf1);
2722 sa = sa->next;
2723 if (sa)
2724 pstrcat(buf, buf_size, ", ");
2726 pstrcat(buf, buf_size, ")");
2727 goto no_var;
2728 case VT_PTR:
2729 s = type->ref;
2730 if (t & VT_ARRAY) {
2731 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2732 type_to_str(buf, buf_size, &s->type, buf1);
2733 goto no_var;
2735 pstrcpy(buf1, sizeof(buf1), "*");
2736 if (t & VT_CONSTANT)
2737 pstrcat(buf1, buf_size, "const ");
2738 if (t & VT_VOLATILE)
2739 pstrcat(buf1, buf_size, "volatile ");
2740 if (varstr)
2741 pstrcat(buf1, sizeof(buf1), varstr);
2742 type_to_str(buf, buf_size, &s->type, buf1);
2743 goto no_var;
2745 if (varstr) {
2746 pstrcat(buf, buf_size, " ");
2747 pstrcat(buf, buf_size, varstr);
2749 no_var: ;
2752 /* verify type compatibility to store vtop in 'dt' type, and generate
2753 casts if needed. */
2754 static void gen_assign_cast(CType *dt)
2756 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2757 char buf1[256], buf2[256];
2758 int dbt, sbt;
2760 st = &vtop->type; /* source type */
2761 dbt = dt->t & VT_BTYPE;
2762 sbt = st->t & VT_BTYPE;
2763 if (sbt == VT_VOID || dbt == VT_VOID) {
2764 if (sbt == VT_VOID && dbt == VT_VOID)
2765 ; /*
2766 It is Ok if both are void
2767 A test program:
2768 void func1() {}
2769 void func2() {
2770 return func1();
2772 gcc accepts this program
2774 else
2775 tcc_error("cannot cast from/to void");
2777 if (dt->t & VT_CONSTANT)
2778 tcc_warning("assignment of read-only location");
2779 switch(dbt) {
2780 case VT_PTR:
2781 /* special cases for pointers */
2782 /* '0' can also be a pointer */
2783 if (is_null_pointer(vtop))
2784 goto type_ok;
2785 /* accept implicit pointer to integer cast with warning */
2786 if (is_integer_btype(sbt)) {
2787 tcc_warning("assignment makes pointer from integer without a cast");
2788 goto type_ok;
2790 type1 = pointed_type(dt);
2791 /* a function is implicitely a function pointer */
2792 if (sbt == VT_FUNC) {
2793 if ((type1->t & VT_BTYPE) != VT_VOID &&
2794 !is_compatible_types(pointed_type(dt), st))
2795 tcc_warning("assignment from incompatible pointer type");
2796 goto type_ok;
2798 if (sbt != VT_PTR)
2799 goto error;
2800 type2 = pointed_type(st);
2801 if ((type1->t & VT_BTYPE) == VT_VOID ||
2802 (type2->t & VT_BTYPE) == VT_VOID) {
2803 /* void * can match anything */
2804 } else {
2805 /* exact type match, except for qualifiers */
2806 tmp_type1 = *type1;
2807 tmp_type2 = *type2;
2808 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2809 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2810 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2811 /* Like GCC don't warn by default for merely changes
2812 in pointer target signedness. Do warn for different
2813 base types, though, in particular for unsigned enums
2814 and signed int targets. */
2815 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2816 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2817 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2819 else
2820 tcc_warning("assignment from incompatible pointer type");
2823 /* check const and volatile */
2824 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2825 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2826 tcc_warning("assignment discards qualifiers from pointer target type");
2827 break;
2828 case VT_BYTE:
2829 case VT_SHORT:
2830 case VT_INT:
2831 case VT_LLONG:
2832 if (sbt == VT_PTR || sbt == VT_FUNC) {
2833 tcc_warning("assignment makes integer from pointer without a cast");
2834 } else if (sbt == VT_STRUCT) {
2835 goto case_VT_STRUCT;
2837 /* XXX: more tests */
2838 break;
2839 case VT_STRUCT:
2840 case_VT_STRUCT:
2841 tmp_type1 = *dt;
2842 tmp_type2 = *st;
2843 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2844 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2845 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2846 error:
2847 type_to_str(buf1, sizeof(buf1), st, NULL);
2848 type_to_str(buf2, sizeof(buf2), dt, NULL);
2849 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2851 break;
2853 type_ok:
2854 gen_cast(dt);
2857 /* store vtop in lvalue pushed on stack */
2858 ST_FUNC void vstore(void)
2860 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2862 ft = vtop[-1].type.t;
2863 sbt = vtop->type.t & VT_BTYPE;
2864 dbt = ft & VT_BTYPE;
2865 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2866 (sbt == VT_INT && dbt == VT_SHORT))
2867 && !(vtop->type.t & VT_BITFIELD)) {
2868 /* optimize char/short casts */
2869 delayed_cast = VT_MUSTCAST;
2870 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2871 ((1 << VT_STRUCT_SHIFT) - 1));
2872 /* XXX: factorize */
2873 if (ft & VT_CONSTANT)
2874 tcc_warning("assignment of read-only location");
2875 } else {
2876 delayed_cast = 0;
2877 if (!(ft & VT_BITFIELD))
2878 gen_assign_cast(&vtop[-1].type);
2881 if (sbt == VT_STRUCT) {
2882 /* if structure, only generate pointer */
2883 /* structure assignment : generate memcpy */
2884 /* XXX: optimize if small size */
2885 size = type_size(&vtop->type, &align);
2887 /* destination */
2888 vswap();
2889 vtop->type.t = VT_PTR;
2890 gaddrof();
2892 /* address of memcpy() */
2893 #ifdef TCC_ARM_EABI
2894 if(!(align & 7))
2895 vpush_global_sym(&func_old_type, TOK_memcpy8);
2896 else if(!(align & 3))
2897 vpush_global_sym(&func_old_type, TOK_memcpy4);
2898 else
2899 #endif
2900 /* Use memmove, rather than memcpy, as dest and src may be same: */
2901 vpush_global_sym(&func_old_type, TOK_memmove);
2903 vswap();
2904 /* source */
2905 vpushv(vtop - 2);
2906 vtop->type.t = VT_PTR;
2907 gaddrof();
2908 /* type size */
2909 vpushi(size);
2910 gfunc_call(3);
2912 /* leave source on stack */
2913 } else if (ft & VT_BITFIELD) {
2914 /* bitfield store handling */
2916 /* save lvalue as expression result (example: s.b = s.a = n;) */
2917 vdup(), vtop[-1] = vtop[-2];
2919 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2920 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2921 /* remove bit field info to avoid loops */
2922 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2924 if((ft & VT_BTYPE) == VT_BOOL) {
2925 gen_cast(&vtop[-1].type);
2926 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2929 /* duplicate destination */
2930 vdup();
2931 vtop[-1] = vtop[-2];
2933 /* mask and shift source */
2934 if((ft & VT_BTYPE) != VT_BOOL) {
2935 if((ft & VT_BTYPE) == VT_LLONG) {
2936 vpushll((1ULL << bit_size) - 1ULL);
2937 } else {
2938 vpushi((1 << bit_size) - 1);
2940 gen_op('&');
2942 vpushi(bit_pos);
2943 gen_op(TOK_SHL);
2944 /* load destination, mask and or with source */
2945 vswap();
2946 if((ft & VT_BTYPE) == VT_LLONG) {
2947 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2948 } else {
2949 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2951 gen_op('&');
2952 gen_op('|');
2953 /* store result */
2954 vstore();
2955 /* ... and discard */
2956 vpop();
2958 } else {
2959 #ifdef CONFIG_TCC_BCHECK
2960 /* bound check case */
2961 if (vtop[-1].r & VT_MUSTBOUND) {
2962 vswap();
2963 gbound();
2964 vswap();
2966 #endif
2967 rc = RC_INT;
2968 if (is_float(ft)) {
2969 rc = RC_FLOAT;
2970 #ifdef TCC_TARGET_X86_64
2971 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2972 rc = RC_ST0;
2973 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2974 rc = RC_FRET;
2976 #endif
2978 r = gv(rc); /* generate value */
2979 /* if lvalue was saved on stack, must read it */
2980 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2981 SValue sv;
2982 t = get_reg(RC_INT);
2983 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2984 sv.type.t = VT_PTR;
2985 #else
2986 sv.type.t = VT_INT;
2987 #endif
2988 sv.r = VT_LOCAL | VT_LVAL;
2989 sv.c.i = vtop[-1].c.i;
2990 load(t, &sv);
2991 vtop[-1].r = t | VT_LVAL;
2993 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2994 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2995 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2996 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2997 #else
2998 if ((ft & VT_BTYPE) == VT_LLONG) {
2999 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3000 #endif
3001 vtop[-1].type.t = load_type;
3002 store(r, vtop - 1);
3003 vswap();
3004 /* convert to int to increment easily */
3005 vtop->type.t = addr_type;
3006 gaddrof();
3007 vpushi(load_size);
3008 gen_op('+');
3009 vtop->r |= VT_LVAL;
3010 vswap();
3011 vtop[-1].type.t = load_type;
3012 /* XXX: it works because r2 is spilled last ! */
3013 store(vtop->r2, vtop - 1);
3014 } else {
3015 store(r, vtop - 1);
3018 vswap();
3019 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3020 vtop->r |= delayed_cast;
3024 /* post defines POST/PRE add. c is the token ++ or -- */
3025 ST_FUNC void inc(int post, int c)
3027 test_lvalue();
3028 vdup(); /* save lvalue */
3029 if (post) {
3030 gv_dup(); /* duplicate value */
3031 vrotb(3);
3032 vrotb(3);
3034 /* add constant */
3035 vpushi(c - TOK_MID);
3036 gen_op('+');
3037 vstore(); /* store value */
3038 if (post)
3039 vpop(); /* if post op, return saved value */
3042 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3044 /* read the string */
3045 if (tok != TOK_STR)
3046 expect(msg);
3047 cstr_new(astr);
3048 while (tok == TOK_STR) {
3049 /* XXX: add \0 handling too ? */
3050 cstr_cat(astr, tokc.str.data, -1);
3051 next();
3053 cstr_ccat(astr, '\0');
3056 /* If I is >= 1 and a power of two, returns log2(i)+1.
3057 If I is 0 returns 0. */
3058 static int exact_log2p1(int i)
3060 int ret;
3061 if (!i)
3062 return 0;
3063 for (ret = 1; i >= 1 << 8; ret += 8)
3064 i >>= 8;
3065 if (i >= 1 << 4)
3066 ret += 4, i >>= 4;
3067 if (i >= 1 << 2)
3068 ret += 2, i >>= 2;
3069 if (i >= 1 << 1)
3070 ret++;
3071 return ret;
3074 /* Parse GNUC __attribute__ extension. Currently, the following
3075 extensions are recognized:
3076 - aligned(n) : set data/function alignment.
3077 - packed : force data alignment to 1
3078 - section(x) : generate data/code in this section.
3079 - unused : currently ignored, but may be used someday.
3080 - regparm(n) : pass function parameters in registers (i386 only)
3082 static void parse_attribute(AttributeDef *ad)
3084 int t, n;
3085 CString astr;
3087 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3088 next();
3089 skip('(');
3090 skip('(');
3091 while (tok != ')') {
3092 if (tok < TOK_IDENT)
3093 expect("attribute name");
3094 t = tok;
3095 next();
3096 switch(t) {
3097 case TOK_SECTION1:
3098 case TOK_SECTION2:
3099 skip('(');
3100 parse_mult_str(&astr, "section name");
3101 ad->section = find_section(tcc_state, (char *)astr.data);
3102 skip(')');
3103 cstr_free(&astr);
3104 break;
3105 case TOK_ALIAS1:
3106 case TOK_ALIAS2:
3107 skip('(');
3108 parse_mult_str(&astr, "alias(\"target\")");
3109 ad->alias_target = /* save string as token, for later */
3110 tok_alloc((char*)astr.data, astr.size-1)->tok;
3111 skip(')');
3112 cstr_free(&astr);
3113 break;
3114 case TOK_VISIBILITY1:
3115 case TOK_VISIBILITY2:
3116 skip('(');
3117 parse_mult_str(&astr,
3118 "visibility(\"default|hidden|internal|protected\")");
3119 if (!strcmp (astr.data, "default"))
3120 ad->a.visibility = STV_DEFAULT;
3121 else if (!strcmp (astr.data, "hidden"))
3122 ad->a.visibility = STV_HIDDEN;
3123 else if (!strcmp (astr.data, "internal"))
3124 ad->a.visibility = STV_INTERNAL;
3125 else if (!strcmp (astr.data, "protected"))
3126 ad->a.visibility = STV_PROTECTED;
3127 else
3128 expect("visibility(\"default|hidden|internal|protected\")");
3129 skip(')');
3130 cstr_free(&astr);
3131 break;
3132 case TOK_ALIGNED1:
3133 case TOK_ALIGNED2:
3134 if (tok == '(') {
3135 next();
3136 n = expr_const();
3137 if (n <= 0 || (n & (n - 1)) != 0)
3138 tcc_error("alignment must be a positive power of two");
3139 skip(')');
3140 } else {
3141 n = MAX_ALIGN;
3143 ad->a.aligned = exact_log2p1(n);
3144 if (n != 1 << (ad->a.aligned - 1))
3145 tcc_error("alignment of %d is larger than implemented", n);
3146 break;
3147 case TOK_PACKED1:
3148 case TOK_PACKED2:
3149 ad->a.packed = 1;
3150 break;
3151 case TOK_WEAK1:
3152 case TOK_WEAK2:
3153 ad->a.weak = 1;
3154 break;
3155 case TOK_UNUSED1:
3156 case TOK_UNUSED2:
3157 /* currently, no need to handle it because tcc does not
3158 track unused objects */
3159 break;
3160 case TOK_NORETURN1:
3161 case TOK_NORETURN2:
3162 /* currently, no need to handle it because tcc does not
3163 track unused objects */
3164 break;
3165 case TOK_CDECL1:
3166 case TOK_CDECL2:
3167 case TOK_CDECL3:
3168 ad->a.func_call = FUNC_CDECL;
3169 break;
3170 case TOK_STDCALL1:
3171 case TOK_STDCALL2:
3172 case TOK_STDCALL3:
3173 ad->a.func_call = FUNC_STDCALL;
3174 break;
3175 #ifdef TCC_TARGET_I386
3176 case TOK_REGPARM1:
3177 case TOK_REGPARM2:
3178 skip('(');
3179 n = expr_const();
3180 if (n > 3)
3181 n = 3;
3182 else if (n < 0)
3183 n = 0;
3184 if (n > 0)
3185 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3186 skip(')');
3187 break;
3188 case TOK_FASTCALL1:
3189 case TOK_FASTCALL2:
3190 case TOK_FASTCALL3:
3191 ad->a.func_call = FUNC_FASTCALLW;
3192 break;
3193 #endif
3194 case TOK_MODE:
3195 skip('(');
3196 switch(tok) {
3197 case TOK_MODE_DI:
3198 ad->a.mode = VT_LLONG + 1;
3199 break;
3200 case TOK_MODE_QI:
3201 ad->a.mode = VT_BYTE + 1;
3202 break;
3203 case TOK_MODE_HI:
3204 ad->a.mode = VT_SHORT + 1;
3205 break;
3206 case TOK_MODE_SI:
3207 case TOK_MODE_word:
3208 ad->a.mode = VT_INT + 1;
3209 break;
3210 default:
3211 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3212 break;
3214 next();
3215 skip(')');
3216 break;
3217 case TOK_DLLEXPORT:
3218 ad->a.func_export = 1;
3219 break;
3220 case TOK_DLLIMPORT:
3221 ad->a.func_import = 1;
3222 break;
3223 default:
3224 if (tcc_state->warn_unsupported)
3225 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3226 /* skip parameters */
3227 if (tok == '(') {
3228 int parenthesis = 0;
3229 do {
3230 if (tok == '(')
3231 parenthesis++;
3232 else if (tok == ')')
3233 parenthesis--;
3234 next();
3235 } while (parenthesis && tok != -1);
3237 break;
3239 if (tok != ',')
3240 break;
3241 next();
3243 skip(')');
3244 skip(')');
3248 static Sym * find_field (CType *type, int v)
3250 Sym *s = type->ref;
3251 v |= SYM_FIELD;
3252 while ((s = s->next) != NULL) {
3253 if ((s->v & SYM_FIELD) &&
3254 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3255 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3256 Sym *ret = find_field (&s->type, v);
3257 if (ret)
3258 return ret;
3260 if (s->v == v)
3261 break;
3263 return s;
3266 static void struct_add_offset (Sym *s, int offset)
3268 while ((s = s->next) != NULL) {
3269 if ((s->v & SYM_FIELD) &&
3270 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3271 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3272 struct_add_offset(s->type.ref, offset);
3273 } else
3274 s->c += offset;
3278 static void struct_layout(CType *type, AttributeDef *ad)
3280 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3281 int pcc = !tcc_state->ms_bitfields;
3282 Sym *f;
3283 if (ad->a.aligned)
3284 maxalign = 1 << (ad->a.aligned - 1);
3285 else
3286 maxalign = 1;
3287 offset = 0;
3288 c = 0;
3289 bit_pos = 0;
3290 prevbt = VT_STRUCT; /* make it never match */
3291 prev_bit_size = 0;
3292 for (f = type->ref->next; f; f = f->next) {
3293 int typealign, bit_size;
3294 int size = type_size(&f->type, &typealign);
3295 if (f->type.t & VT_BITFIELD)
3296 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3297 else
3298 bit_size = -1;
3299 if (bit_size == 0 && pcc) {
3300 /* Zero-width bit-fields in PCC mode aren't affected
3301 by any packing (attribute or pragma). */
3302 align = typealign;
3303 } else if (f->r > 1) {
3304 align = f->r;
3305 } else if (ad->a.packed || f->r == 1) {
3306 align = 1;
3307 /* Packed fields or packed records don't let the base type
3308 influence the records type alignment. */
3309 typealign = 1;
3310 } else {
3311 align = typealign;
3313 if (type->ref->type.t != TOK_STRUCT) {
3314 if (pcc && bit_size >= 0)
3315 size = (bit_size + 7) >> 3;
3316 /* Bit position is already zero from our caller. */
3317 offset = 0;
3318 if (size > c)
3319 c = size;
3320 } else if (bit_size < 0) {
3321 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3322 prevbt = VT_STRUCT;
3323 prev_bit_size = 0;
3324 c = (c + addbytes + align - 1) & -align;
3325 offset = c;
3326 if (size > 0)
3327 c += size;
3328 bit_pos = 0;
3329 } else {
3330 /* A bit-field. Layout is more complicated. There are two
3331 options TCC implements: PCC compatible and MS compatible
3332 (PCC compatible is what GCC uses for almost all targets).
3333 In PCC layout the overall size of the struct (in c) is
3334 _excluding_ the current run of bit-fields (that is,
3335 there's at least additional bit_pos bits after c). In
3336 MS layout c does include the current run of bit-fields.
3338 This matters for calculating the natural alignment buckets
3339 in PCC mode. */
3341 /* 'align' will be used to influence records alignment,
3342 so it's the max of specified and type alignment, except
3343 in certain cases that depend on the mode. */
3344 if (align < typealign)
3345 align = typealign;
3346 if (pcc) {
3347 /* In PCC layout a non-packed bit-field is placed adjacent
3348 to the preceding bit-fields, except if it would overflow
3349 its container (depending on base type) or it's a zero-width
3350 bit-field. Packed non-zero-width bit-fields always are
3351 placed adjacent. */
3352 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3353 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3354 if (bit_size == 0 ||
3355 (typealign != 1 &&
3356 (ofs2 / (typealign * 8)) > (size/typealign))) {
3357 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3358 bit_pos = 0;
3360 offset = c;
3361 /* In PCC layout named bit-fields influence the alignment
3362 of the containing struct using the base types alignment,
3363 except for packed fields (which here have correct
3364 align/typealign). */
3365 if ((f->v & SYM_FIRST_ANOM))
3366 align = 1;
3367 } else {
3368 bt = f->type.t & VT_BTYPE;
3369 if ((bit_pos + bit_size > size * 8) ||
3370 (bit_size > 0) == (bt != prevbt)) {
3371 c = (c + typealign - 1) & -typealign;
3372 offset = c;
3373 bit_pos = 0;
3374 /* In MS bitfield mode a bit-field run always uses
3375 at least as many bits as the underlying type.
3376 To start a new run it's also required that this
3377 or the last bit-field had non-zero width. */
3378 if (bit_size || prev_bit_size)
3379 c += size;
3381 /* In MS layout the records alignment is normally
3382 influenced by the field, except for a zero-width
3383 field at the start of a run (but by further zero-width
3384 fields it is again). */
3385 if (bit_size == 0 && prevbt != bt)
3386 align = 1;
3387 prevbt = bt;
3388 prev_bit_size = bit_size;
3390 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3391 | (bit_pos << VT_STRUCT_SHIFT);
3392 bit_pos += bit_size;
3393 if (pcc && bit_pos >= size * 8) {
3394 c += size;
3395 bit_pos -= size * 8;
3398 if (align > maxalign)
3399 maxalign = align;
3400 #if 0
3401 printf("set field %s offset=%d c=%d",
3402 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3403 if (f->type.t & VT_BITFIELD) {
3404 printf(" pos=%d size=%d",
3405 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3406 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3408 printf("\n");
3409 #endif
3411 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3412 Sym *ass;
3413 /* An anonymous struct/union. Adjust member offsets
3414 to reflect the real offset of our containing struct.
3415 Also set the offset of this anon member inside
3416 the outer struct to be zero. Via this it
3417 works when accessing the field offset directly
3418 (from base object), as well as when recursing
3419 members in initializer handling. */
3420 int v2 = f->type.ref->v;
3421 if (!(v2 & SYM_FIELD) &&
3422 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3423 Sym **pps;
3424 /* This happens only with MS extensions. The
3425 anon member has a named struct type, so it
3426 potentially is shared with other references.
3427 We need to unshare members so we can modify
3428 them. */
3429 ass = f->type.ref;
3430 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3431 &f->type.ref->type, 0,
3432 f->type.ref->c);
3433 pps = &f->type.ref->next;
3434 while ((ass = ass->next) != NULL) {
3435 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3436 pps = &((*pps)->next);
3438 *pps = NULL;
3440 struct_add_offset(f->type.ref, offset);
3441 f->c = 0;
3442 } else {
3443 f->c = offset;
3446 f->r = 0;
3448 /* store size and alignment */
3449 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3450 + maxalign - 1) & -maxalign;
3451 type->ref->r = maxalign;
3454 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3455 static void struct_decl(CType *type, AttributeDef *ad, int u)
3457 int a, v, size, align, flexible, alignoverride;
3458 long c;
3459 int bit_size, bsize, bt;
3460 Sym *s, *ss, **ps;
3461 AttributeDef ad1;
3462 CType type1, btype;
3464 a = tok; /* save decl type */
3465 next();
3466 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3467 parse_attribute(ad);
3468 if (tok != '{') {
3469 v = tok;
3470 next();
3471 /* struct already defined ? return it */
3472 if (v < TOK_IDENT)
3473 expect("struct/union/enum name");
3474 s = struct_find(v);
3475 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3476 if (s->type.t != a)
3477 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3478 goto do_decl;
3480 } else {
3481 v = anon_sym++;
3483 /* Record the original enum/struct/union token. */
3484 type1.t = a;
3485 type1.ref = NULL;
3486 /* we put an undefined size for struct/union */
3487 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3488 s->r = 0; /* default alignment is zero as gcc */
3489 /* put struct/union/enum name in type */
3490 do_decl:
3491 type->t = u;
3492 type->ref = s;
3494 if (tok == '{') {
3495 next();
3496 if (s->c != -1)
3497 tcc_error("struct/union/enum already defined");
3498 /* cannot be empty */
3499 c = 0;
3500 /* non empty enums are not allowed */
3501 if (a == TOK_ENUM) {
3502 int seen_neg = 0;
3503 int seen_wide = 0;
3504 for(;;) {
3505 CType *t = &int_type;
3506 v = tok;
3507 if (v < TOK_UIDENT)
3508 expect("identifier");
3509 ss = sym_find(v);
3510 if (ss && !local_stack)
3511 tcc_error("redefinition of enumerator '%s'",
3512 get_tok_str(v, NULL));
3513 next();
3514 if (tok == '=') {
3515 next();
3516 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3517 c = expr_const64();
3518 #else
3519 /* We really want to support long long enums
3520 on i386 as well, but the Sym structure only
3521 holds a 'long' for associated constants,
3522 and enlarging it would bump its size (no
3523 available padding). So punt for now. */
3524 c = expr_const();
3525 #endif
3527 if (c < 0)
3528 seen_neg = 1;
3529 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3530 seen_wide = 1, t = &size_type;
3531 /* enum symbols have static storage */
3532 ss = sym_push(v, t, VT_CONST, c);
3533 ss->type.t |= VT_STATIC;
3534 if (tok != ',')
3535 break;
3536 next();
3537 c++;
3538 /* NOTE: we accept a trailing comma */
3539 if (tok == '}')
3540 break;
3542 if (!seen_neg)
3543 s->a.unsigned_enum = 1;
3544 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3545 skip('}');
3546 } else {
3547 ps = &s->next;
3548 flexible = 0;
3549 while (tok != '}') {
3550 if (!parse_btype(&btype, &ad1)) {
3551 skip(';');
3552 continue;
3554 while (1) {
3555 if (flexible)
3556 tcc_error("flexible array member '%s' not at the end of struct",
3557 get_tok_str(v, NULL));
3558 bit_size = -1;
3559 v = 0;
3560 type1 = btype;
3561 if (tok != ':') {
3562 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3563 if (v == 0) {
3564 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3565 expect("identifier");
3566 else {
3567 int v = btype.ref->v;
3568 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3569 if (tcc_state->ms_extensions == 0)
3570 expect("identifier");
3574 if (type_size(&type1, &align) < 0) {
3575 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3576 flexible = 1;
3577 else
3578 tcc_error("field '%s' has incomplete type",
3579 get_tok_str(v, NULL));
3581 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3582 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3583 tcc_error("invalid type for '%s'",
3584 get_tok_str(v, NULL));
3586 if (tok == ':') {
3587 next();
3588 bit_size = expr_const();
3589 /* XXX: handle v = 0 case for messages */
3590 if (bit_size < 0)
3591 tcc_error("negative width in bit-field '%s'",
3592 get_tok_str(v, NULL));
3593 if (v && bit_size == 0)
3594 tcc_error("zero width for bit-field '%s'",
3595 get_tok_str(v, NULL));
3596 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3597 parse_attribute(&ad1);
3599 size = type_size(&type1, &align);
3600 /* Only remember non-default alignment. */
3601 alignoverride = 0;
3602 if (ad1.a.aligned) {
3603 int speca = 1 << (ad1.a.aligned - 1);
3604 alignoverride = speca;
3605 } else if (ad1.a.packed || ad->a.packed) {
3606 alignoverride = 1;
3607 } else if (*tcc_state->pack_stack_ptr) {
3608 if (align > *tcc_state->pack_stack_ptr)
3609 alignoverride = *tcc_state->pack_stack_ptr;
3611 if (bit_size >= 0) {
3612 bt = type1.t & VT_BTYPE;
3613 if (bt != VT_INT &&
3614 bt != VT_BYTE &&
3615 bt != VT_SHORT &&
3616 bt != VT_BOOL &&
3617 bt != VT_ENUM &&
3618 bt != VT_LLONG)
3619 tcc_error("bitfields must have scalar type");
3620 bsize = size * 8;
3621 if (bit_size > bsize) {
3622 tcc_error("width of '%s' exceeds its type",
3623 get_tok_str(v, NULL));
3624 } else if (bit_size == bsize) {
3625 /* no need for bit fields */
3627 } else {
3628 type1.t |= VT_BITFIELD |
3629 (0 << VT_STRUCT_SHIFT) |
3630 (bit_size << (VT_STRUCT_SHIFT + 6));
3633 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3634 /* Remember we've seen a real field to check
3635 for placement of flexible array member. */
3636 c = 1;
3638 /* If member is a struct or bit-field, enforce
3639 placing into the struct (as anonymous). */
3640 if (v == 0 &&
3641 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3642 bit_size >= 0)) {
3643 v = anon_sym++;
3645 if (v) {
3646 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3647 *ps = ss;
3648 ps = &ss->next;
3650 if (tok == ';' || tok == TOK_EOF)
3651 break;
3652 skip(',');
3654 skip(';');
3656 skip('}');
3657 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3658 parse_attribute(ad);
3659 struct_layout(type, ad);
3664 /* return 1 if basic type is a type size (short, long, long long) */
3665 ST_FUNC int is_btype_size(int bt)
3667 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3670 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3671 are added to the element type, copied because it could be a typedef. */
3672 static void parse_btype_qualify(CType *type, int qualifiers)
3674 while (type->t & VT_ARRAY) {
3675 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3676 type = &type->ref->type;
3678 type->t |= qualifiers;
3681 /* return 0 if no type declaration. otherwise, return the basic type
3682 and skip it.
3684 static int parse_btype(CType *type, AttributeDef *ad)
3686 int t, u, bt_size, complete, type_found, typespec_found;
3687 Sym *s;
3688 CType type1;
3690 memset(ad, 0, sizeof(AttributeDef));
3691 complete = 0;
3692 type_found = 0;
3693 typespec_found = 0;
3694 t = 0;
3695 while(1) {
3696 switch(tok) {
3697 case TOK_EXTENSION:
3698 /* currently, we really ignore extension */
3699 next();
3700 continue;
3702 /* basic types */
3703 case TOK_CHAR:
3704 u = VT_BYTE;
3705 basic_type:
3706 next();
3707 basic_type1:
3708 if (complete)
3709 tcc_error("too many basic types");
3710 t |= u;
3711 bt_size = is_btype_size (u & VT_BTYPE);
3712 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3713 complete = 1;
3714 typespec_found = 1;
3715 break;
3716 case TOK_VOID:
3717 u = VT_VOID;
3718 goto basic_type;
3719 case TOK_SHORT:
3720 u = VT_SHORT;
3721 goto basic_type;
3722 case TOK_INT:
3723 u = VT_INT;
3724 goto basic_type;
3725 case TOK_LONG:
3726 next();
3727 if ((t & VT_BTYPE) == VT_DOUBLE) {
3728 #ifndef TCC_TARGET_PE
3729 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3730 #endif
3731 } else if ((t & VT_BTYPE) == VT_LONG) {
3732 t = (t & ~VT_BTYPE) | VT_LLONG;
3733 } else {
3734 u = VT_LONG;
3735 goto basic_type1;
3737 break;
3738 #ifdef TCC_TARGET_ARM64
3739 case TOK_UINT128:
3740 /* GCC's __uint128_t appears in some Linux header files. Make it a
3741 synonym for long double to get the size and alignment right. */
3742 u = VT_LDOUBLE;
3743 goto basic_type;
3744 #endif
3745 case TOK_BOOL:
3746 u = VT_BOOL;
3747 goto basic_type;
3748 case TOK_FLOAT:
3749 u = VT_FLOAT;
3750 goto basic_type;
3751 case TOK_DOUBLE:
3752 next();
3753 if ((t & VT_BTYPE) == VT_LONG) {
3754 #ifdef TCC_TARGET_PE
3755 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3756 #else
3757 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3758 #endif
3759 } else {
3760 u = VT_DOUBLE;
3761 goto basic_type1;
3763 break;
3764 case TOK_ENUM:
3765 struct_decl(&type1, ad, VT_ENUM);
3766 basic_type2:
3767 u = type1.t;
3768 type->ref = type1.ref;
3769 goto basic_type1;
3770 case TOK_STRUCT:
3771 case TOK_UNION:
3772 struct_decl(&type1, ad, VT_STRUCT);
3773 goto basic_type2;
3775 /* type modifiers */
3776 case TOK_CONST1:
3777 case TOK_CONST2:
3778 case TOK_CONST3:
3779 type->t = t;
3780 parse_btype_qualify(type, VT_CONSTANT);
3781 t = type->t;
3782 next();
3783 break;
3784 case TOK_VOLATILE1:
3785 case TOK_VOLATILE2:
3786 case TOK_VOLATILE3:
3787 type->t = t;
3788 parse_btype_qualify(type, VT_VOLATILE);
3789 t = type->t;
3790 next();
3791 break;
3792 case TOK_SIGNED1:
3793 case TOK_SIGNED2:
3794 case TOK_SIGNED3:
3795 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3796 tcc_error("signed and unsigned modifier");
3797 typespec_found = 1;
3798 t |= VT_DEFSIGN;
3799 next();
3800 break;
3801 case TOK_REGISTER:
3802 case TOK_AUTO:
3803 case TOK_RESTRICT1:
3804 case TOK_RESTRICT2:
3805 case TOK_RESTRICT3:
3806 next();
3807 break;
3808 case TOK_UNSIGNED:
3809 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3810 tcc_error("signed and unsigned modifier");
3811 t |= VT_DEFSIGN | VT_UNSIGNED;
3812 next();
3813 typespec_found = 1;
3814 break;
3816 /* storage */
3817 case TOK_EXTERN:
3818 t |= VT_EXTERN;
3819 next();
3820 break;
3821 case TOK_STATIC:
3822 t |= VT_STATIC;
3823 next();
3824 break;
3825 case TOK_TYPEDEF:
3826 t |= VT_TYPEDEF;
3827 next();
3828 break;
3829 case TOK_INLINE1:
3830 case TOK_INLINE2:
3831 case TOK_INLINE3:
3832 t |= VT_INLINE;
3833 next();
3834 break;
3836 /* GNUC attribute */
3837 case TOK_ATTRIBUTE1:
3838 case TOK_ATTRIBUTE2:
3839 parse_attribute(ad);
3840 if (ad->a.mode) {
3841 u = ad->a.mode -1;
3842 t = (t & ~VT_BTYPE) | u;
3844 break;
3845 /* GNUC typeof */
3846 case TOK_TYPEOF1:
3847 case TOK_TYPEOF2:
3848 case TOK_TYPEOF3:
3849 next();
3850 parse_expr_type(&type1);
3851 /* remove all storage modifiers except typedef */
3852 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3853 goto basic_type2;
3854 default:
3855 if (typespec_found)
3856 goto the_end;
3857 s = sym_find(tok);
3858 if (!s || !(s->type.t & VT_TYPEDEF))
3859 goto the_end;
3861 type->t = ((s->type.t & ~VT_TYPEDEF) |
3862 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3863 type->ref = s->type.ref;
3864 if (t & (VT_CONSTANT | VT_VOLATILE))
3865 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3866 t = type->t;
3868 if (s->r) {
3869 /* get attributes from typedef */
3870 if (0 == ad->a.aligned)
3871 ad->a.aligned = s->a.aligned;
3872 if (0 == ad->a.func_call)
3873 ad->a.func_call = s->a.func_call;
3874 ad->a.packed |= s->a.packed;
3876 next();
3877 typespec_found = 1;
3878 break;
3880 type_found = 1;
3882 the_end:
3883 if (tcc_state->char_is_unsigned) {
3884 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3885 t |= VT_UNSIGNED;
3888 /* long is never used as type */
3889 if ((t & VT_BTYPE) == VT_LONG)
3890 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3891 defined TCC_TARGET_PE
3892 t = (t & ~VT_BTYPE) | VT_INT;
3893 #else
3894 t = (t & ~VT_BTYPE) | VT_LLONG;
3895 #endif
3896 type->t = t;
3897 return type_found;
3900 /* convert a function parameter type (array to pointer and function to
3901 function pointer) */
3902 static inline void convert_parameter_type(CType *pt)
3904 /* remove const and volatile qualifiers (XXX: const could be used
3905 to indicate a const function parameter */
3906 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3907 /* array must be transformed to pointer according to ANSI C */
3908 pt->t &= ~VT_ARRAY;
3909 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3910 mk_pointer(pt);
3914 ST_FUNC void parse_asm_str(CString *astr)
3916 skip('(');
3917 parse_mult_str(astr, "string constant");
3920 /* Parse an asm label and return the token */
3921 static int asm_label_instr(void)
3923 int v;
3924 CString astr;
3926 next();
3927 parse_asm_str(&astr);
3928 skip(')');
3929 #ifdef ASM_DEBUG
3930 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3931 #endif
3932 v = tok_alloc(astr.data, astr.size - 1)->tok;
3933 cstr_free(&astr);
3934 return v;
3937 static void post_type(CType *type, AttributeDef *ad, int storage)
3939 int n, l, t1, arg_size, align;
3940 Sym **plast, *s, *first;
3941 AttributeDef ad1;
3942 CType pt;
3944 if (tok == '(') {
3945 /* function declaration */
3946 next();
3947 l = 0;
3948 first = NULL;
3949 plast = &first;
3950 arg_size = 0;
3951 if (tok != ')') {
3952 for(;;) {
3953 /* read param name and compute offset */
3954 if (l != FUNC_OLD) {
3955 if (!parse_btype(&pt, &ad1)) {
3956 if (l) {
3957 tcc_error("invalid type");
3958 } else {
3959 l = FUNC_OLD;
3960 goto old_proto;
3963 l = FUNC_NEW;
3964 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3965 break;
3966 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3967 if ((pt.t & VT_BTYPE) == VT_VOID)
3968 tcc_error("parameter declared as void");
3969 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3970 } else {
3971 old_proto:
3972 n = tok;
3973 if (n < TOK_UIDENT)
3974 expect("identifier");
3975 pt.t = VT_INT;
3976 next();
3978 convert_parameter_type(&pt);
3979 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3980 *plast = s;
3981 plast = &s->next;
3982 if (tok == ')')
3983 break;
3984 skip(',');
3985 if (l == FUNC_NEW && tok == TOK_DOTS) {
3986 l = FUNC_ELLIPSIS;
3987 next();
3988 break;
3992 /* if no parameters, then old type prototype */
3993 if (l == 0)
3994 l = FUNC_OLD;
3995 skip(')');
3996 /* NOTE: const is ignored in returned type as it has a special
3997 meaning in gcc / C++ */
3998 type->t &= ~VT_CONSTANT;
3999 /* some ancient pre-K&R C allows a function to return an array
4000 and the array brackets to be put after the arguments, such
4001 that "int c()[]" means something like "int[] c()" */
4002 if (tok == '[') {
4003 next();
4004 skip(']'); /* only handle simple "[]" */
4005 type->t |= VT_PTR;
4007 /* we push a anonymous symbol which will contain the function prototype */
4008 ad->a.func_args = arg_size;
4009 s = sym_push(SYM_FIELD, type, 0, l);
4010 s->a = ad->a;
4011 s->next = first;
4012 type->t = VT_FUNC;
4013 type->ref = s;
4014 } else if (tok == '[') {
4015 int saved_nocode_wanted = nocode_wanted;
4016 /* array definition */
4017 next();
4018 if (tok == TOK_RESTRICT1)
4019 next();
4020 n = -1;
4021 t1 = 0;
4022 if (tok != ']') {
4023 if (!local_stack || (storage & VT_STATIC))
4024 vpushi(expr_const());
4025 else {
4026 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4027 length must always be evaluated, even under nocode_wanted,
4028 so that its size slot is initialized (e.g. under sizeof
4029 or typeof). */
4030 nocode_wanted = 0;
4031 gexpr();
4033 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4034 n = vtop->c.i;
4035 if (n < 0)
4036 tcc_error("invalid array size");
4037 } else {
4038 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4039 tcc_error("size of variable length array should be an integer");
4040 t1 = VT_VLA;
4043 skip(']');
4044 /* parse next post type */
4045 post_type(type, ad, storage);
4046 if (type->t == VT_FUNC)
4047 tcc_error("declaration of an array of functions");
4048 t1 |= type->t & VT_VLA;
4050 if (t1 & VT_VLA) {
4051 loc -= type_size(&int_type, &align);
4052 loc &= -align;
4053 n = loc;
4055 vla_runtime_type_size(type, &align);
4056 gen_op('*');
4057 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4058 vswap();
4059 vstore();
4061 if (n != -1)
4062 vpop();
4063 nocode_wanted = saved_nocode_wanted;
4065 /* we push an anonymous symbol which will contain the array
4066 element type */
4067 s = sym_push(SYM_FIELD, type, 0, n);
4068 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4069 type->ref = s;
4073 /* Parse a type declaration (except basic type), and return the type
4074 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4075 expected. 'type' should contain the basic type. 'ad' is the
4076 attribute definition of the basic type. It can be modified by
4077 type_decl().
4079 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4081 Sym *s;
4082 CType type1, *type2;
4083 int qualifiers, storage;
4085 while (tok == '*') {
4086 qualifiers = 0;
4087 redo:
4088 next();
4089 switch(tok) {
4090 case TOK_CONST1:
4091 case TOK_CONST2:
4092 case TOK_CONST3:
4093 qualifiers |= VT_CONSTANT;
4094 goto redo;
4095 case TOK_VOLATILE1:
4096 case TOK_VOLATILE2:
4097 case TOK_VOLATILE3:
4098 qualifiers |= VT_VOLATILE;
4099 goto redo;
4100 case TOK_RESTRICT1:
4101 case TOK_RESTRICT2:
4102 case TOK_RESTRICT3:
4103 goto redo;
4104 /* XXX: clarify attribute handling */
4105 case TOK_ATTRIBUTE1:
4106 case TOK_ATTRIBUTE2:
4107 parse_attribute(ad);
4108 break;
4110 mk_pointer(type);
4111 type->t |= qualifiers;
4114 /* recursive type */
4115 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4116 type1.t = 0; /* XXX: same as int */
4117 if (tok == '(') {
4118 next();
4119 /* XXX: this is not correct to modify 'ad' at this point, but
4120 the syntax is not clear */
4121 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4122 parse_attribute(ad);
4123 type_decl(&type1, ad, v, td);
4124 skip(')');
4125 } else {
4126 /* type identifier */
4127 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4128 *v = tok;
4129 next();
4130 } else {
4131 if (!(td & TYPE_ABSTRACT))
4132 expect("identifier");
4133 *v = 0;
4136 storage = type->t & VT_STORAGE;
4137 type->t &= ~VT_STORAGE;
4138 post_type(type, ad, storage);
4139 type->t |= storage;
4140 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4141 parse_attribute(ad);
4143 if (!type1.t)
4144 return;
4145 /* append type at the end of type1 */
4146 type2 = &type1;
4147 for(;;) {
4148 s = type2->ref;
4149 type2 = &s->type;
4150 if (!type2->t) {
4151 *type2 = *type;
4152 break;
4155 *type = type1;
4158 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4159 ST_FUNC int lvalue_type(int t)
4161 int bt, r;
4162 r = VT_LVAL;
4163 bt = t & VT_BTYPE;
4164 if (bt == VT_BYTE || bt == VT_BOOL)
4165 r |= VT_LVAL_BYTE;
4166 else if (bt == VT_SHORT)
4167 r |= VT_LVAL_SHORT;
4168 else
4169 return r;
4170 if (t & VT_UNSIGNED)
4171 r |= VT_LVAL_UNSIGNED;
4172 return r;
4175 /* indirection with full error checking and bound check */
4176 ST_FUNC void indir(void)
4178 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4179 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4180 return;
4181 expect("pointer");
4183 if (vtop->r & VT_LVAL)
4184 gv(RC_INT);
4185 vtop->type = *pointed_type(&vtop->type);
4186 /* Arrays and functions are never lvalues */
4187 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4188 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4189 vtop->r |= lvalue_type(vtop->type.t);
4190 /* if bound checking, the referenced pointer must be checked */
4191 #ifdef CONFIG_TCC_BCHECK
4192 if (tcc_state->do_bounds_check)
4193 vtop->r |= VT_MUSTBOUND;
4194 #endif
4198 /* pass a parameter to a function and do type checking and casting */
4199 static void gfunc_param_typed(Sym *func, Sym *arg)
4201 int func_type;
4202 CType type;
4204 func_type = func->c;
4205 if (func_type == FUNC_OLD ||
4206 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4207 /* default casting : only need to convert float to double */
4208 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4209 type.t = VT_DOUBLE;
4210 gen_cast(&type);
4211 } else if (vtop->type.t & VT_BITFIELD) {
4212 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4213 type.ref = vtop->type.ref;
4214 gen_cast(&type);
4216 } else if (arg == NULL) {
4217 tcc_error("too many arguments to function");
4218 } else {
4219 type = arg->type;
4220 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4221 gen_assign_cast(&type);
4225 /* parse an expression of the form '(type)' or '(expr)' and return its
4226 type */
4227 static void parse_expr_type(CType *type)
4229 int n;
4230 AttributeDef ad;
4232 skip('(');
4233 if (parse_btype(type, &ad)) {
4234 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4235 } else {
4236 expr_type(type);
4238 skip(')');
4241 static void parse_type(CType *type)
4243 AttributeDef ad;
4244 int n;
4246 if (!parse_btype(type, &ad)) {
4247 expect("type");
4249 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4252 static void vpush_tokc(int t)
4254 CType type;
4255 type.t = t;
4256 type.ref = 0;
4257 vsetc(&type, VT_CONST, &tokc);
4260 ST_FUNC void unary(void)
4262 int n, t, align, size, r, sizeof_caller;
4263 CType type;
4264 Sym *s;
4265 AttributeDef ad;
4267 sizeof_caller = in_sizeof;
4268 in_sizeof = 0;
4269 /* XXX: GCC 2.95.3 does not generate a table although it should be
4270 better here */
4271 tok_next:
4272 switch(tok) {
4273 case TOK_EXTENSION:
4274 next();
4275 goto tok_next;
4276 case TOK_CINT:
4277 case TOK_CCHAR:
4278 case TOK_LCHAR:
4279 vpushi(tokc.i);
4280 next();
4281 break;
4282 case TOK_CUINT:
4283 vpush_tokc(VT_INT | VT_UNSIGNED);
4284 next();
4285 break;
4286 case TOK_CLLONG:
4287 vpush_tokc(VT_LLONG);
4288 next();
4289 break;
4290 case TOK_CULLONG:
4291 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4292 next();
4293 break;
4294 case TOK_CFLOAT:
4295 vpush_tokc(VT_FLOAT);
4296 next();
4297 break;
4298 case TOK_CDOUBLE:
4299 vpush_tokc(VT_DOUBLE);
4300 next();
4301 break;
4302 case TOK_CLDOUBLE:
4303 vpush_tokc(VT_LDOUBLE);
4304 next();
4305 break;
4306 case TOK___FUNCTION__:
4307 if (!gnu_ext)
4308 goto tok_identifier;
4309 /* fall thru */
4310 case TOK___FUNC__:
4312 void *ptr;
4313 int len;
4314 /* special function name identifier */
4315 len = strlen(funcname) + 1;
4316 /* generate char[len] type */
4317 type.t = VT_BYTE;
4318 mk_pointer(&type);
4319 type.t |= VT_ARRAY;
4320 type.ref->c = len;
4321 vpush_ref(&type, data_section, data_section->data_offset, len);
4322 ptr = section_ptr_add(data_section, len);
4323 memcpy(ptr, funcname, len);
4324 next();
4326 break;
4327 case TOK_LSTR:
4328 #ifdef TCC_TARGET_PE
4329 t = VT_SHORT | VT_UNSIGNED;
4330 #else
4331 t = VT_INT;
4332 #endif
4333 goto str_init;
4334 case TOK_STR:
4335 /* string parsing */
4336 t = VT_BYTE;
4337 str_init:
4338 if (tcc_state->warn_write_strings)
4339 t |= VT_CONSTANT;
4340 type.t = t;
4341 mk_pointer(&type);
4342 type.t |= VT_ARRAY;
4343 memset(&ad, 0, sizeof(AttributeDef));
4344 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4345 break;
4346 case '(':
4347 next();
4348 /* cast ? */
4349 if (parse_btype(&type, &ad)) {
4350 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4351 skip(')');
4352 /* check ISOC99 compound literal */
4353 if (tok == '{') {
4354 /* data is allocated locally by default */
4355 if (global_expr)
4356 r = VT_CONST;
4357 else
4358 r = VT_LOCAL;
4359 /* all except arrays are lvalues */
4360 if (!(type.t & VT_ARRAY))
4361 r |= lvalue_type(type.t);
4362 memset(&ad, 0, sizeof(AttributeDef));
4363 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4364 } else {
4365 if (sizeof_caller) {
4366 vpush(&type);
4367 return;
4369 unary();
4370 gen_cast(&type);
4372 } else if (tok == '{') {
4373 int saved_nocode_wanted = nocode_wanted;
4374 if (const_wanted)
4375 tcc_error("expected constant");
4376 /* save all registers */
4377 save_regs(0);
4378 /* statement expression : we do not accept break/continue
4379 inside as GCC does. We do retain the nocode_wanted state,
4380 as statement expressions can't ever be entered from the
4381 outside, so any reactivation of code emission (from labels
4382 or loop heads) can be disabled again after the end of it. */
4383 block(NULL, NULL, 1);
4384 nocode_wanted = saved_nocode_wanted;
4385 skip(')');
4386 } else {
4387 gexpr();
4388 skip(')');
4390 break;
4391 case '*':
4392 next();
4393 unary();
4394 indir();
4395 break;
4396 case '&':
4397 next();
4398 unary();
4399 /* functions names must be treated as function pointers,
4400 except for unary '&' and sizeof. Since we consider that
4401 functions are not lvalues, we only have to handle it
4402 there and in function calls. */
4403 /* arrays can also be used although they are not lvalues */
4404 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4405 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4406 test_lvalue();
4407 mk_pointer(&vtop->type);
4408 gaddrof();
4409 break;
4410 case '!':
4411 next();
4412 unary();
4413 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4414 CType boolean;
4415 boolean.t = VT_BOOL;
4416 gen_cast(&boolean);
4417 vtop->c.i = !vtop->c.i;
4418 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4419 vtop->c.i ^= 1;
4420 else {
4421 save_regs(1);
4422 vseti(VT_JMP, gvtst(1, 0));
4424 break;
4425 case '~':
4426 next();
4427 unary();
4428 vpushi(-1);
4429 gen_op('^');
4430 break;
4431 case '+':
4432 next();
4433 unary();
4434 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4435 tcc_error("pointer not accepted for unary plus");
4436 /* In order to force cast, we add zero, except for floating point
4437 where we really need an noop (otherwise -0.0 will be transformed
4438 into +0.0). */
4439 if (!is_float(vtop->type.t)) {
4440 vpushi(0);
4441 gen_op('+');
4443 break;
4444 case TOK_SIZEOF:
4445 case TOK_ALIGNOF1:
4446 case TOK_ALIGNOF2:
4447 t = tok;
4448 next();
4449 in_sizeof++;
4450 unary_type(&type); // Perform a in_sizeof = 0;
4451 size = type_size(&type, &align);
4452 if (t == TOK_SIZEOF) {
4453 if (!(type.t & VT_VLA)) {
4454 if (size < 0)
4455 tcc_error("sizeof applied to an incomplete type");
4456 vpushs(size);
4457 } else {
4458 vla_runtime_type_size(&type, &align);
4460 } else {
4461 vpushs(align);
4463 vtop->type.t |= VT_UNSIGNED;
4464 break;
4466 case TOK_builtin_expect:
4468 /* __builtin_expect is a no-op for now */
4469 next();
4470 skip('(');
4471 expr_eq();
4472 skip(',');
4473 nocode_wanted++;
4474 expr_lor_const();
4475 vpop();
4476 nocode_wanted--;
4477 skip(')');
4479 break;
4480 case TOK_builtin_types_compatible_p:
4482 CType type1, type2;
4483 next();
4484 skip('(');
4485 parse_type(&type1);
4486 skip(',');
4487 parse_type(&type2);
4488 skip(')');
4489 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4490 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4491 vpushi(is_compatible_types(&type1, &type2));
4493 break;
4494 case TOK_builtin_choose_expr:
4496 int64_t c;
4497 next();
4498 skip('(');
4499 c = expr_const64();
4500 skip(',');
4501 if (!c) {
4502 nocode_wanted++;
4504 expr_eq();
4505 if (!c) {
4506 vpop();
4507 nocode_wanted--;
4509 skip(',');
4510 if (c) {
4511 nocode_wanted++;
4513 expr_eq();
4514 if (c) {
4515 vpop();
4516 nocode_wanted--;
4518 skip(')');
4520 break;
4521 case TOK_builtin_constant_p:
4523 int res;
4524 next();
4525 skip('(');
4526 nocode_wanted++;
4527 gexpr();
4528 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4529 vpop();
4530 nocode_wanted--;
4531 skip(')');
4532 vpushi(res);
4534 break;
4535 case TOK_builtin_frame_address:
4536 case TOK_builtin_return_address:
4538 int tok1 = tok;
4539 int level;
4540 CType type;
4541 next();
4542 skip('(');
4543 if (tok != TOK_CINT) {
4544 tcc_error("%s only takes positive integers",
4545 tok1 == TOK_builtin_return_address ?
4546 "__builtin_return_address" :
4547 "__builtin_frame_address");
4549 level = (uint32_t)tokc.i;
4550 next();
4551 skip(')');
4552 type.t = VT_VOID;
4553 mk_pointer(&type);
4554 vset(&type, VT_LOCAL, 0); /* local frame */
4555 while (level--) {
4556 mk_pointer(&vtop->type);
4557 indir(); /* -> parent frame */
4559 if (tok1 == TOK_builtin_return_address) {
4560 // assume return address is just above frame pointer on stack
4561 vpushi(PTR_SIZE);
4562 gen_op('+');
4563 mk_pointer(&vtop->type);
4564 indir();
4567 break;
4568 #ifdef TCC_TARGET_X86_64
4569 #ifdef TCC_TARGET_PE
4570 case TOK_builtin_va_start:
4572 next();
4573 skip('(');
4574 expr_eq();
4575 skip(',');
4576 expr_eq();
4577 skip(')');
4578 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4579 tcc_error("__builtin_va_start expects a local variable");
4580 vtop->r &= ~(VT_LVAL | VT_REF);
4581 vtop->type = char_pointer_type;
4582 vtop->c.i += 8;
4583 vstore();
4585 break;
4586 #else
4587 case TOK_builtin_va_arg_types:
4589 CType type;
4590 next();
4591 skip('(');
4592 parse_type(&type);
4593 skip(')');
4594 vpushi(classify_x86_64_va_arg(&type));
4596 break;
4597 #endif
4598 #endif
4600 #ifdef TCC_TARGET_ARM64
4601 case TOK___va_start: {
4602 next();
4603 skip('(');
4604 expr_eq();
4605 skip(',');
4606 expr_eq();
4607 skip(')');
4608 //xx check types
4609 gen_va_start();
4610 vpushi(0);
4611 vtop->type.t = VT_VOID;
4612 break;
4614 case TOK___va_arg: {
4615 CType type;
4616 next();
4617 skip('(');
4618 expr_eq();
4619 skip(',');
4620 parse_type(&type);
4621 skip(')');
4622 //xx check types
4623 gen_va_arg(&type);
4624 vtop->type = type;
4625 break;
4627 case TOK___arm64_clear_cache: {
4628 next();
4629 skip('(');
4630 expr_eq();
4631 skip(',');
4632 expr_eq();
4633 skip(')');
4634 gen_clear_cache();
4635 vpushi(0);
4636 vtop->type.t = VT_VOID;
4637 break;
4639 #endif
4640 /* pre operations */
4641 case TOK_INC:
4642 case TOK_DEC:
4643 t = tok;
4644 next();
4645 unary();
4646 inc(0, t);
4647 break;
4648 case '-':
4649 next();
4650 unary();
4651 t = vtop->type.t & VT_BTYPE;
4652 if (is_float(t)) {
4653 /* In IEEE negate(x) isn't subtract(0,x), but rather
4654 subtract(-0, x). */
4655 vpush(&vtop->type);
4656 if (t == VT_FLOAT)
4657 vtop->c.f = -1.0 * 0.0;
4658 else if (t == VT_DOUBLE)
4659 vtop->c.d = -1.0 * 0.0;
4660 else
4661 vtop->c.ld = -1.0 * 0.0;
4662 } else
4663 vpushi(0);
4664 vswap();
4665 gen_op('-');
4666 break;
4667 case TOK_LAND:
4668 if (!gnu_ext)
4669 goto tok_identifier;
4670 next();
4671 /* allow to take the address of a label */
4672 if (tok < TOK_UIDENT)
4673 expect("label identifier");
4674 s = label_find(tok);
4675 if (!s) {
4676 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4677 } else {
4678 if (s->r == LABEL_DECLARED)
4679 s->r = LABEL_FORWARD;
4681 if (!s->type.t) {
4682 s->type.t = VT_VOID;
4683 mk_pointer(&s->type);
4684 s->type.t |= VT_STATIC;
4686 vpushsym(&s->type, s);
4687 next();
4688 break;
4690 // special qnan , snan and infinity values
4691 case TOK___NAN__:
4692 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4693 next();
4694 break;
4695 case TOK___SNAN__:
4696 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4697 next();
4698 break;
4699 case TOK___INF__:
4700 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4701 next();
4702 break;
4704 default:
4705 tok_identifier:
4706 t = tok;
4707 next();
4708 if (t < TOK_UIDENT)
4709 expect("identifier");
4710 s = sym_find(t);
4711 if (!s) {
4712 const char *name = get_tok_str(t, NULL);
4713 if (tok != '(')
4714 tcc_error("'%s' undeclared", name);
4715 /* for simple function calls, we tolerate undeclared
4716 external reference to int() function */
4717 if (tcc_state->warn_implicit_function_declaration
4718 #ifdef TCC_TARGET_PE
4719 /* people must be warned about using undeclared WINAPI functions
4720 (which usually start with uppercase letter) */
4721 || (name[0] >= 'A' && name[0] <= 'Z')
4722 #endif
4724 tcc_warning("implicit declaration of function '%s'", name);
4725 s = external_global_sym(t, &func_old_type, 0);
4727 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4728 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4729 /* if referencing an inline function, then we generate a
4730 symbol to it if not already done. It will have the
4731 effect to generate code for it at the end of the
4732 compilation unit. Inline function as always
4733 generated in the text section. */
4734 if (!s->c && !nocode_wanted)
4735 put_extern_sym(s, text_section, 0, 0);
4736 r = VT_SYM | VT_CONST;
4737 } else {
4738 r = s->r;
4739 /* A symbol that has a register is a local register variable,
4740 which starts out as VT_LOCAL value. */
4741 if ((r & VT_VALMASK) < VT_CONST)
4742 r = (r & ~VT_VALMASK) | VT_LOCAL;
4744 vset(&s->type, r, s->c);
4745 /* Point to s as backpointer (even without r&VT_SYM).
4746 Will be used by at least the x86 inline asm parser for
4747 regvars. */
4748 vtop->sym = s;
4749 if (vtop->r & VT_SYM) {
4750 vtop->c.i = 0;
4752 break;
4755 /* post operations */
4756 while (1) {
4757 if (tok == TOK_INC || tok == TOK_DEC) {
4758 inc(1, tok);
4759 next();
4760 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4761 int qualifiers;
4762 /* field */
4763 if (tok == TOK_ARROW)
4764 indir();
4765 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4766 test_lvalue();
4767 gaddrof();
4768 /* expect pointer on structure */
4769 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4770 expect("struct or union");
4771 if (tok == TOK_CDOUBLE)
4772 expect("field name");
4773 next();
4774 if (tok == TOK_CINT || tok == TOK_CUINT)
4775 expect("field name");
4776 s = find_field(&vtop->type, tok);
4777 if (!s)
4778 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4779 /* add field offset to pointer */
4780 vtop->type = char_pointer_type; /* change type to 'char *' */
4781 vpushi(s->c);
4782 gen_op('+');
4783 /* change type to field type, and set to lvalue */
4784 vtop->type = s->type;
4785 vtop->type.t |= qualifiers;
4786 /* an array is never an lvalue */
4787 if (!(vtop->type.t & VT_ARRAY)) {
4788 vtop->r |= lvalue_type(vtop->type.t);
4789 #ifdef CONFIG_TCC_BCHECK
4790 /* if bound checking, the referenced pointer must be checked */
4791 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4792 vtop->r |= VT_MUSTBOUND;
4793 #endif
4795 next();
4796 } else if (tok == '[') {
4797 next();
4798 gexpr();
4799 gen_op('+');
4800 indir();
4801 skip(']');
4802 } else if (tok == '(') {
4803 SValue ret;
4804 Sym *sa;
4805 int nb_args, ret_nregs, ret_align, regsize, variadic;
4807 /* function call */
4808 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4809 /* pointer test (no array accepted) */
4810 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4811 vtop->type = *pointed_type(&vtop->type);
4812 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4813 goto error_func;
4814 } else {
4815 error_func:
4816 expect("function pointer");
4818 } else {
4819 vtop->r &= ~VT_LVAL; /* no lvalue */
4821 /* get return type */
4822 s = vtop->type.ref;
4823 next();
4824 sa = s->next; /* first parameter */
4825 nb_args = 0;
4826 ret.r2 = VT_CONST;
4827 /* compute first implicit argument if a structure is returned */
4828 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4829 variadic = (s->c == FUNC_ELLIPSIS);
4830 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4831 &ret_align, &regsize);
4832 if (!ret_nregs) {
4833 /* get some space for the returned structure */
4834 size = type_size(&s->type, &align);
4835 #ifdef TCC_TARGET_ARM64
4836 /* On arm64, a small struct is return in registers.
4837 It is much easier to write it to memory if we know
4838 that we are allowed to write some extra bytes, so
4839 round the allocated space up to a power of 2: */
4840 if (size < 16)
4841 while (size & (size - 1))
4842 size = (size | (size - 1)) + 1;
4843 #endif
4844 loc = (loc - size) & -align;
4845 ret.type = s->type;
4846 ret.r = VT_LOCAL | VT_LVAL;
4847 /* pass it as 'int' to avoid structure arg passing
4848 problems */
4849 vseti(VT_LOCAL, loc);
4850 ret.c = vtop->c;
4851 nb_args++;
4853 } else {
4854 ret_nregs = 1;
4855 ret.type = s->type;
4858 if (ret_nregs) {
4859 /* return in register */
4860 if (is_float(ret.type.t)) {
4861 ret.r = reg_fret(ret.type.t);
4862 #ifdef TCC_TARGET_X86_64
4863 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4864 ret.r2 = REG_QRET;
4865 #endif
4866 } else {
4867 #ifndef TCC_TARGET_ARM64
4868 #ifdef TCC_TARGET_X86_64
4869 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4870 #else
4871 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4872 #endif
4873 ret.r2 = REG_LRET;
4874 #endif
4875 ret.r = REG_IRET;
4877 ret.c.i = 0;
4879 if (tok != ')') {
4880 for(;;) {
4881 expr_eq();
4882 gfunc_param_typed(s, sa);
4883 nb_args++;
4884 if (sa)
4885 sa = sa->next;
4886 if (tok == ')')
4887 break;
4888 skip(',');
4891 if (sa)
4892 tcc_error("too few arguments to function");
4893 skip(')');
4894 gfunc_call(nb_args);
4896 /* return value */
4897 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4898 vsetc(&ret.type, r, &ret.c);
4899 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4902 /* handle packed struct return */
4903 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4904 int addr, offset;
4906 size = type_size(&s->type, &align);
4907 /* We're writing whole regs often, make sure there's enough
4908 space. Assume register size is power of 2. */
4909 if (regsize > align)
4910 align = regsize;
4911 loc = (loc - size) & -align;
4912 addr = loc;
4913 offset = 0;
4914 for (;;) {
4915 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4916 vswap();
4917 vstore();
4918 vtop--;
4919 if (--ret_nregs == 0)
4920 break;
4921 offset += regsize;
4923 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4925 } else {
4926 break;
4931 ST_FUNC void expr_prod(void)
4933 int t;
4935 unary();
4936 while (tok == '*' || tok == '/' || tok == '%') {
4937 t = tok;
4938 next();
4939 unary();
4940 gen_op(t);
4944 ST_FUNC void expr_sum(void)
4946 int t;
4948 expr_prod();
4949 while (tok == '+' || tok == '-') {
4950 t = tok;
4951 next();
4952 expr_prod();
4953 gen_op(t);
4957 static void expr_shift(void)
4959 int t;
4961 expr_sum();
4962 while (tok == TOK_SHL || tok == TOK_SAR) {
4963 t = tok;
4964 next();
4965 expr_sum();
4966 gen_op(t);
4970 static void expr_cmp(void)
4972 int t;
4974 expr_shift();
4975 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4976 tok == TOK_ULT || tok == TOK_UGE) {
4977 t = tok;
4978 next();
4979 expr_shift();
4980 gen_op(t);
4984 static void expr_cmpeq(void)
4986 int t;
4988 expr_cmp();
4989 while (tok == TOK_EQ || tok == TOK_NE) {
4990 t = tok;
4991 next();
4992 expr_cmp();
4993 gen_op(t);
4997 static void expr_and(void)
4999 expr_cmpeq();
5000 while (tok == '&') {
5001 next();
5002 expr_cmpeq();
5003 gen_op('&');
5007 static void expr_xor(void)
5009 expr_and();
5010 while (tok == '^') {
5011 next();
5012 expr_and();
5013 gen_op('^');
5017 static void expr_or(void)
5019 expr_xor();
5020 while (tok == '|') {
5021 next();
5022 expr_xor();
5023 gen_op('|');
5027 /* XXX: fix this mess */
5028 static void expr_land_const(void)
5030 expr_or();
5031 while (tok == TOK_LAND) {
5032 next();
5033 expr_or();
5034 gen_op(TOK_LAND);
5037 static void expr_lor_const(void)
5039 expr_land_const();
5040 while (tok == TOK_LOR) {
5041 next();
5042 expr_land_const();
5043 gen_op(TOK_LOR);
5047 static void expr_land(void)
5049 expr_or();
5050 if (tok == TOK_LAND) {
5051 int t = 0;
5052 for(;;) {
5053 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5054 CType ctb;
5055 ctb.t = VT_BOOL;
5056 gen_cast(&ctb);
5057 if (vtop->c.i) {
5058 vpop();
5059 } else {
5060 nocode_wanted++;
5061 while (tok == TOK_LAND) {
5062 next();
5063 expr_or();
5064 vpop();
5066 nocode_wanted--;
5067 if (t)
5068 gsym(t);
5069 gen_cast(&int_type);
5070 break;
5072 } else {
5073 if (!t)
5074 save_regs(1);
5075 t = gvtst(1, t);
5077 if (tok != TOK_LAND) {
5078 if (t)
5079 vseti(VT_JMPI, t);
5080 else
5081 vpushi(1);
5082 break;
5084 next();
5085 expr_or();
5090 static void expr_lor(void)
5092 expr_land();
5093 if (tok == TOK_LOR) {
5094 int t = 0;
5095 for(;;) {
5096 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5097 CType ctb;
5098 ctb.t = VT_BOOL;
5099 gen_cast(&ctb);
5100 if (!vtop->c.i) {
5101 vpop();
5102 } else {
5103 nocode_wanted++;
5104 while (tok == TOK_LOR) {
5105 next();
5106 expr_land();
5107 vpop();
5109 nocode_wanted--;
5110 if (t)
5111 gsym(t);
5112 gen_cast(&int_type);
5113 break;
5115 } else {
5116 if (!t)
5117 save_regs(1);
5118 t = gvtst(0, t);
5120 if (tok != TOK_LOR) {
5121 if (t)
5122 vseti(VT_JMP, t);
5123 else
5124 vpushi(0);
5125 break;
5127 next();
5128 expr_land();
5133 /* Assuming vtop is a value used in a conditional context
5134 (i.e. compared with zero) return 0 if it's false, 1 if
5135 true and -1 if it can't be statically determined. */
5136 static int condition_3way(void)
5138 int c = -1;
5139 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5140 (!(vtop->r & VT_SYM) ||
5141 !(vtop->sym->type.t & VT_WEAK))) {
5142 CType boolean;
5143 boolean.t = VT_BOOL;
5144 vdup();
5145 gen_cast(&boolean);
5146 c = vtop->c.i;
5147 vpop();
5149 return c;
5152 static void expr_cond(void)
5154 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5155 SValue sv;
5156 CType type, type1, type2;
5158 expr_lor();
5159 if (tok == '?') {
5160 next();
5161 c = condition_3way();
5162 g = (tok == ':' && gnu_ext);
5163 if (c < 0) {
5164 /* needed to avoid having different registers saved in
5165 each branch */
5166 if (is_float(vtop->type.t)) {
5167 rc = RC_FLOAT;
5168 #ifdef TCC_TARGET_X86_64
5169 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5170 rc = RC_ST0;
5172 #endif
5173 } else
5174 rc = RC_INT;
5175 gv(rc);
5176 save_regs(1);
5177 if (g)
5178 gv_dup();
5179 tt = gvtst(1, 0);
5181 } else {
5182 if (!g)
5183 vpop();
5184 tt = 0;
5187 if (1) {
5188 if (c == 0)
5189 nocode_wanted++;
5190 if (!g)
5191 gexpr();
5193 type1 = vtop->type;
5194 sv = *vtop; /* save value to handle it later */
5195 vtop--; /* no vpop so that FP stack is not flushed */
5196 skip(':');
5198 u = 0;
5199 if (c < 0)
5200 u = gjmp(0);
5201 gsym(tt);
5203 if (c == 0)
5204 nocode_wanted--;
5205 if (c == 1)
5206 nocode_wanted++;
5207 expr_cond();
5208 if (c == 1)
5209 nocode_wanted--;
5211 type2 = vtop->type;
5212 t1 = type1.t;
5213 bt1 = t1 & VT_BTYPE;
5214 t2 = type2.t;
5215 bt2 = t2 & VT_BTYPE;
5216 /* cast operands to correct type according to ISOC rules */
5217 if (is_float(bt1) || is_float(bt2)) {
5218 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5219 type.t = VT_LDOUBLE;
5221 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5222 type.t = VT_DOUBLE;
5223 } else {
5224 type.t = VT_FLOAT;
5226 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5227 /* cast to biggest op */
5228 type.t = VT_LLONG;
5229 /* convert to unsigned if it does not fit in a long long */
5230 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5231 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5232 type.t |= VT_UNSIGNED;
5233 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5234 /* If one is a null ptr constant the result type
5235 is the other. */
5236 if (is_null_pointer (vtop))
5237 type = type1;
5238 else if (is_null_pointer (&sv))
5239 type = type2;
5240 /* XXX: test pointer compatibility, C99 has more elaborate
5241 rules here. */
5242 else
5243 type = type1;
5244 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5245 /* XXX: test function pointer compatibility */
5246 type = bt1 == VT_FUNC ? type1 : type2;
5247 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5248 /* XXX: test structure compatibility */
5249 type = bt1 == VT_STRUCT ? type1 : type2;
5250 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5251 /* NOTE: as an extension, we accept void on only one side */
5252 type.t = VT_VOID;
5253 } else {
5254 /* integer operations */
5255 type.t = VT_INT;
5256 /* convert to unsigned if it does not fit in an integer */
5257 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5258 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5259 type.t |= VT_UNSIGNED;
5261 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5262 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5263 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5264 islv &= c < 0;
5266 /* now we convert second operand */
5267 if (c != 1) {
5268 gen_cast(&type);
5269 if (islv) {
5270 mk_pointer(&vtop->type);
5271 gaddrof();
5272 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5273 gaddrof();
5276 rc = RC_INT;
5277 if (is_float(type.t)) {
5278 rc = RC_FLOAT;
5279 #ifdef TCC_TARGET_X86_64
5280 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5281 rc = RC_ST0;
5283 #endif
5284 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5285 /* for long longs, we use fixed registers to avoid having
5286 to handle a complicated move */
5287 rc = RC_IRET;
5290 tt = r2 = 0;
5291 if (c < 0) {
5292 r2 = gv(rc);
5293 tt = gjmp(0);
5295 gsym(u);
5297 /* this is horrible, but we must also convert first
5298 operand */
5299 if (c != 0) {
5300 *vtop = sv;
5301 gen_cast(&type);
5302 if (islv) {
5303 mk_pointer(&vtop->type);
5304 gaddrof();
5305 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5306 gaddrof();
5309 if (c < 0) {
5310 r1 = gv(rc);
5311 move_reg(r2, r1, type.t);
5312 vtop->r = r2;
5313 gsym(tt);
5314 if (islv)
5315 indir();
5321 static void expr_eq(void)
5323 int t;
5325 expr_cond();
5326 if (tok == '=' ||
5327 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5328 tok == TOK_A_XOR || tok == TOK_A_OR ||
5329 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5330 test_lvalue();
5331 t = tok;
5332 next();
5333 if (t == '=') {
5334 expr_eq();
5335 } else {
5336 vdup();
5337 expr_eq();
5338 gen_op(t & 0x7f);
5340 vstore();
5344 ST_FUNC void gexpr(void)
5346 while (1) {
5347 expr_eq();
5348 if (tok != ',')
5349 break;
5350 vpop();
5351 next();
5355 /* parse an expression and return its type without any side effect. */
5356 static void expr_type(CType *type)
5359 nocode_wanted++;
5360 gexpr();
5361 *type = vtop->type;
5362 vpop();
5363 nocode_wanted--;
5366 /* parse a unary expression and return its type without any side
5367 effect. */
5368 static void unary_type(CType *type)
5370 nocode_wanted++;
5371 unary();
5372 *type = vtop->type;
5373 vpop();
5374 nocode_wanted--;
5377 /* parse a constant expression and return value in vtop. */
5378 static void expr_const1(void)
5380 const_wanted++;
5381 expr_cond();
5382 const_wanted--;
5385 /* parse an integer constant and return its value. */
5386 static inline int64_t expr_const64(void)
5388 int64_t c;
5389 expr_const1();
5390 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5391 expect("constant expression");
5392 c = vtop->c.i;
5393 vpop();
5394 return c;
5397 /* parse an integer constant and return its value.
5398 Complain if it doesn't fit 32bit (signed or unsigned). */
5399 ST_FUNC int expr_const(void)
5401 int c;
5402 int64_t wc = expr_const64();
5403 c = wc;
5404 if (c != wc && (unsigned)c != wc)
5405 tcc_error("constant exceeds 32 bit");
5406 return c;
5409 /* return the label token if current token is a label, otherwise
5410 return zero */
5411 static int is_label(void)
5413 int last_tok;
5415 /* fast test first */
5416 if (tok < TOK_UIDENT)
5417 return 0;
5418 /* no need to save tokc because tok is an identifier */
5419 last_tok = tok;
5420 next();
5421 if (tok == ':') {
5422 next();
5423 return last_tok;
5424 } else {
5425 unget_tok(last_tok);
5426 return 0;
5430 static void label_or_decl(int l)
5432 int last_tok;
5434 /* fast test first */
5435 if (tok >= TOK_UIDENT)
5437 /* no need to save tokc because tok is an identifier */
5438 last_tok = tok;
5439 next();
5440 if (tok == ':') {
5441 unget_tok(last_tok);
5442 return;
5444 unget_tok(last_tok);
5446 decl(l);
5449 #ifndef TCC_TARGET_ARM64
5450 static void gfunc_return(CType *func_type)
5452 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5453 CType type, ret_type;
5454 int ret_align, ret_nregs, regsize;
5455 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5456 &ret_align, &regsize);
5457 if (0 == ret_nregs) {
5458 /* if returning structure, must copy it to implicit
5459 first pointer arg location */
5460 type = *func_type;
5461 mk_pointer(&type);
5462 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5463 indir();
5464 vswap();
5465 /* copy structure value to pointer */
5466 vstore();
5467 } else {
5468 /* returning structure packed into registers */
5469 int r, size, addr, align;
5470 size = type_size(func_type,&align);
5471 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5472 (vtop->c.i & (ret_align-1)))
5473 && (align & (ret_align-1))) {
5474 loc = (loc - size) & -ret_align;
5475 addr = loc;
5476 type = *func_type;
5477 vset(&type, VT_LOCAL | VT_LVAL, addr);
5478 vswap();
5479 vstore();
5480 vpop();
5481 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5483 vtop->type = ret_type;
5484 if (is_float(ret_type.t))
5485 r = rc_fret(ret_type.t);
5486 else
5487 r = RC_IRET;
5489 if (ret_nregs == 1)
5490 gv(r);
5491 else {
5492 for (;;) {
5493 vdup();
5494 gv(r);
5495 vpop();
5496 if (--ret_nregs == 0)
5497 break;
5498 /* We assume that when a structure is returned in multiple
5499 registers, their classes are consecutive values of the
5500 suite s(n) = 2^n */
5501 r <<= 1;
5502 vtop->c.i += regsize;
5506 } else if (is_float(func_type->t)) {
5507 gv(rc_fret(func_type->t));
5508 } else {
5509 gv(RC_IRET);
5511 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5513 #endif
5515 static int case_cmp(const void *pa, const void *pb)
5517 int64_t a = (*(struct case_t**) pa)->v1;
5518 int64_t b = (*(struct case_t**) pb)->v1;
5519 return a < b ? -1 : a > b;
5522 static void gcase(struct case_t **base, int len, int *bsym)
5524 struct case_t *p;
5525 int e;
5526 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5527 gv(RC_INT);
5528 while (len > 4) {
5529 /* binary search */
5530 p = base[len/2];
5531 vdup();
5532 if (ll)
5533 vpushll(p->v2);
5534 else
5535 vpushi(p->v2);
5536 gen_op(TOK_LE);
5537 e = gtst(1, 0);
5538 vdup();
5539 if (ll)
5540 vpushll(p->v1);
5541 else
5542 vpushi(p->v1);
5543 gen_op(TOK_GE);
5544 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5545 /* x < v1 */
5546 gcase(base, len/2, bsym);
5547 if (cur_switch->def_sym)
5548 gjmp_addr(cur_switch->def_sym);
5549 else
5550 *bsym = gjmp(*bsym);
5551 /* x > v2 */
5552 gsym(e);
5553 e = len/2 + 1;
5554 base += e; len -= e;
5556 /* linear scan */
5557 while (len--) {
5558 p = *base++;
5559 vdup();
5560 if (ll)
5561 vpushll(p->v2);
5562 else
5563 vpushi(p->v2);
5564 if (p->v1 == p->v2) {
5565 gen_op(TOK_EQ);
5566 gtst_addr(0, p->sym);
5567 } else {
5568 gen_op(TOK_LE);
5569 e = gtst(1, 0);
5570 vdup();
5571 if (ll)
5572 vpushll(p->v1);
5573 else
5574 vpushi(p->v1);
5575 gen_op(TOK_GE);
5576 gtst_addr(0, p->sym);
5577 gsym(e);
5582 static void block(int *bsym, int *csym, int is_expr)
5584 int a, b, c, d, cond;
5585 Sym *s;
5587 /* generate line number info */
5588 if (tcc_state->do_debug &&
5589 (last_line_num != file->line_num || last_ind != ind)) {
5590 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5591 last_ind = ind;
5592 last_line_num = file->line_num;
5595 if (is_expr) {
5596 /* default return value is (void) */
5597 vpushi(0);
5598 vtop->type.t = VT_VOID;
5601 if (tok == TOK_IF) {
5602 /* if test */
5603 int saved_nocode_wanted = nocode_wanted;
5604 next();
5605 skip('(');
5606 gexpr();
5607 skip(')');
5608 cond = condition_3way();
5609 if (cond == 1)
5610 a = 0, vpop();
5611 else
5612 a = gvtst(1, 0);
5613 if (cond == 0)
5614 nocode_wanted |= 0x20000000;
5615 block(bsym, csym, 0);
5616 if (cond != 1)
5617 nocode_wanted = saved_nocode_wanted;
5618 c = tok;
5619 if (c == TOK_ELSE) {
5620 next();
5621 d = gjmp(0);
5622 gsym(a);
5623 if (cond == 1)
5624 nocode_wanted |= 0x20000000;
5625 block(bsym, csym, 0);
5626 gsym(d); /* patch else jmp */
5627 if (cond != 0)
5628 nocode_wanted = saved_nocode_wanted;
5629 } else
5630 gsym(a);
5631 } else if (tok == TOK_WHILE) {
5632 int saved_nocode_wanted;
5633 nocode_wanted &= ~0x20000000;
5634 next();
5635 d = ind;
5636 vla_sp_restore();
5637 skip('(');
5638 gexpr();
5639 skip(')');
5640 a = gvtst(1, 0);
5641 b = 0;
5642 ++local_scope;
5643 saved_nocode_wanted = nocode_wanted;
5644 block(&a, &b, 0);
5645 nocode_wanted = saved_nocode_wanted;
5646 --local_scope;
5647 gjmp_addr(d);
5648 gsym(a);
5649 gsym_addr(b, d);
5650 } else if (tok == '{') {
5651 Sym *llabel;
5652 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5654 next();
5655 /* record local declaration stack position */
5656 s = local_stack;
5657 llabel = local_label_stack;
5658 ++local_scope;
5660 /* handle local labels declarations */
5661 if (tok == TOK_LABEL) {
5662 next();
5663 for(;;) {
5664 if (tok < TOK_UIDENT)
5665 expect("label identifier");
5666 label_push(&local_label_stack, tok, LABEL_DECLARED);
5667 next();
5668 if (tok == ',') {
5669 next();
5670 } else {
5671 skip(';');
5672 break;
5676 while (tok != '}') {
5677 label_or_decl(VT_LOCAL);
5678 if (tok != '}') {
5679 if (is_expr)
5680 vpop();
5681 block(bsym, csym, is_expr);
5684 /* pop locally defined labels */
5685 label_pop(&local_label_stack, llabel);
5686 /* pop locally defined symbols */
5687 --local_scope;
5688 /* In the is_expr case (a statement expression is finished here),
5689 vtop might refer to symbols on the local_stack. Either via the
5690 type or via vtop->sym. We can't pop those nor any that in turn
5691 might be referred to. To make it easier we don't roll back
5692 any symbols in that case; some upper level call to block() will
5693 do that. We do have to remove such symbols from the lookup
5694 tables, though. sym_pop will do that. */
5695 sym_pop(&local_stack, s, is_expr);
5697 /* Pop VLA frames and restore stack pointer if required */
5698 if (vlas_in_scope > saved_vlas_in_scope) {
5699 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5700 vla_sp_restore();
5702 vlas_in_scope = saved_vlas_in_scope;
5704 next();
5705 } else if (tok == TOK_RETURN) {
5706 next();
5707 if (tok != ';') {
5708 gexpr();
5709 gen_assign_cast(&func_vt);
5710 gfunc_return(&func_vt);
5712 skip(';');
5713 /* jump unless last stmt in top-level block */
5714 if (tok != '}' || local_scope != 1)
5715 rsym = gjmp(rsym);
5716 nocode_wanted |= 0x20000000;
5717 } else if (tok == TOK_BREAK) {
5718 /* compute jump */
5719 if (!bsym)
5720 tcc_error("cannot break");
5721 *bsym = gjmp(*bsym);
5722 next();
5723 skip(';');
5724 nocode_wanted |= 0x20000000;
5725 } else if (tok == TOK_CONTINUE) {
5726 /* compute jump */
5727 if (!csym)
5728 tcc_error("cannot continue");
5729 vla_sp_restore_root();
5730 *csym = gjmp(*csym);
5731 next();
5732 skip(';');
5733 } else if (tok == TOK_FOR) {
5734 int e;
5735 int saved_nocode_wanted;
5736 nocode_wanted &= ~0x20000000;
5737 next();
5738 skip('(');
5739 s = local_stack;
5740 ++local_scope;
5741 if (tok != ';') {
5742 /* c99 for-loop init decl? */
5743 if (!decl0(VT_LOCAL, 1)) {
5744 /* no, regular for-loop init expr */
5745 gexpr();
5746 vpop();
5749 skip(';');
5750 d = ind;
5751 c = ind;
5752 vla_sp_restore();
5753 a = 0;
5754 b = 0;
5755 if (tok != ';') {
5756 gexpr();
5757 a = gvtst(1, 0);
5759 skip(';');
5760 if (tok != ')') {
5761 e = gjmp(0);
5762 c = ind;
5763 vla_sp_restore();
5764 gexpr();
5765 vpop();
5766 gjmp_addr(d);
5767 gsym(e);
5769 skip(')');
5770 saved_nocode_wanted = nocode_wanted;
5771 block(&a, &b, 0);
5772 nocode_wanted = saved_nocode_wanted;
5773 gjmp_addr(c);
5774 gsym(a);
5775 gsym_addr(b, c);
5776 --local_scope;
5777 sym_pop(&local_stack, s, 0);
5779 } else
5780 if (tok == TOK_DO) {
5781 int saved_nocode_wanted;
5782 nocode_wanted &= ~0x20000000;
5783 next();
5784 a = 0;
5785 b = 0;
5786 d = ind;
5787 vla_sp_restore();
5788 saved_nocode_wanted = nocode_wanted;
5789 block(&a, &b, 0);
5790 skip(TOK_WHILE);
5791 skip('(');
5792 gsym(b);
5793 gexpr();
5794 c = gvtst(0, 0);
5795 gsym_addr(c, d);
5796 nocode_wanted = saved_nocode_wanted;
5797 skip(')');
5798 gsym(a);
5799 skip(';');
5800 } else
5801 if (tok == TOK_SWITCH) {
5802 struct switch_t *saved, sw;
5803 int saved_nocode_wanted = nocode_wanted;
5804 SValue switchval;
5805 next();
5806 skip('(');
5807 gexpr();
5808 skip(')');
5809 switchval = *vtop--;
5810 a = 0;
5811 b = gjmp(0); /* jump to first case */
5812 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5813 saved = cur_switch;
5814 cur_switch = &sw;
5815 block(&a, csym, 0);
5816 nocode_wanted = saved_nocode_wanted;
5817 a = gjmp(a); /* add implicit break */
5818 /* case lookup */
5819 gsym(b);
5820 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5821 for (b = 1; b < sw.n; b++)
5822 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5823 tcc_error("duplicate case value");
5824 /* Our switch table sorting is signed, so the compared
5825 value needs to be as well when it's 64bit. */
5826 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5827 switchval.type.t &= ~VT_UNSIGNED;
5828 vpushv(&switchval);
5829 gcase(sw.p, sw.n, &a);
5830 vpop();
5831 if (sw.def_sym)
5832 gjmp_addr(sw.def_sym);
5833 dynarray_reset(&sw.p, &sw.n);
5834 cur_switch = saved;
5835 /* break label */
5836 gsym(a);
5837 } else
5838 if (tok == TOK_CASE) {
5839 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5840 if (!cur_switch)
5841 expect("switch");
5842 nocode_wanted &= ~0x20000000;
5843 next();
5844 cr->v1 = cr->v2 = expr_const64();
5845 if (gnu_ext && tok == TOK_DOTS) {
5846 next();
5847 cr->v2 = expr_const64();
5848 if (cr->v2 < cr->v1)
5849 tcc_warning("empty case range");
5851 cr->sym = ind;
5852 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5853 skip(':');
5854 is_expr = 0;
5855 goto block_after_label;
5856 } else
5857 if (tok == TOK_DEFAULT) {
5858 next();
5859 skip(':');
5860 if (!cur_switch)
5861 expect("switch");
5862 if (cur_switch->def_sym)
5863 tcc_error("too many 'default'");
5864 cur_switch->def_sym = ind;
5865 is_expr = 0;
5866 goto block_after_label;
5867 } else
5868 if (tok == TOK_GOTO) {
5869 next();
5870 if (tok == '*' && gnu_ext) {
5871 /* computed goto */
5872 next();
5873 gexpr();
5874 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5875 expect("pointer");
5876 ggoto();
5877 } else if (tok >= TOK_UIDENT) {
5878 s = label_find(tok);
5879 /* put forward definition if needed */
5880 if (!s) {
5881 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5882 } else {
5883 if (s->r == LABEL_DECLARED)
5884 s->r = LABEL_FORWARD;
5886 vla_sp_restore_root();
5887 if (s->r & LABEL_FORWARD)
5888 s->jnext = gjmp(s->jnext);
5889 else
5890 gjmp_addr(s->jnext);
5891 next();
5892 } else {
5893 expect("label identifier");
5895 skip(';');
5896 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5897 asm_instr();
5898 } else {
5899 b = is_label();
5900 if (b) {
5901 /* label case */
5902 s = label_find(b);
5903 if (s) {
5904 if (s->r == LABEL_DEFINED)
5905 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5906 gsym(s->jnext);
5907 s->r = LABEL_DEFINED;
5908 } else {
5909 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5911 s->jnext = ind;
5912 vla_sp_restore();
5913 /* we accept this, but it is a mistake */
5914 block_after_label:
5915 nocode_wanted &= ~0x20000000;
5916 if (tok == '}') {
5917 tcc_warning("deprecated use of label at end of compound statement");
5918 } else {
5919 if (is_expr)
5920 vpop();
5921 block(bsym, csym, is_expr);
5923 } else {
5924 /* expression case */
5925 if (tok != ';') {
5926 if (is_expr) {
5927 vpop();
5928 gexpr();
5929 } else {
5930 gexpr();
5931 vpop();
5934 skip(';');
5939 #define EXPR_CONST 1
5940 #define EXPR_ANY 2
5942 static void parse_init_elem(int expr_type)
5944 int saved_global_expr;
5945 switch(expr_type) {
5946 case EXPR_CONST:
5947 /* compound literals must be allocated globally in this case */
5948 saved_global_expr = global_expr;
5949 global_expr = 1;
5950 expr_const1();
5951 global_expr = saved_global_expr;
5952 /* NOTE: symbols are accepted */
5953 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5954 tcc_error("initializer element is not constant");
5955 break;
5956 case EXPR_ANY:
5957 expr_eq();
5958 break;
5962 /* t is the array or struct type. c is the array or struct
5963 address. cur_field is the pointer to the current
5964 value, for arrays the 'c' member contains the current start
5965 index and the 'r' contains the end index (in case of range init).
5966 'size_only' is true if only size info is needed (only used
5967 in arrays) */
5968 static void decl_designator(CType *type, Section *sec, unsigned long c,
5969 Sym **cur_field, int size_only)
5971 Sym *s, *f;
5972 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5973 CType type1;
5975 notfirst = 0;
5976 elem_size = 0;
5977 nb_elems = 1;
5978 if (gnu_ext && (l = is_label()) != 0)
5979 goto struct_field;
5980 while (tok == '[' || tok == '.') {
5981 if (tok == '[') {
5982 if (!(type->t & VT_ARRAY))
5983 expect("array type");
5984 s = type->ref;
5985 next();
5986 index = expr_const();
5987 if (index < 0 || (s->c >= 0 && index >= s->c))
5988 tcc_error("invalid index");
5989 if (tok == TOK_DOTS && gnu_ext) {
5990 next();
5991 index_last = expr_const();
5992 if (index_last < 0 ||
5993 (s->c >= 0 && index_last >= s->c) ||
5994 index_last < index)
5995 tcc_error("invalid index");
5996 } else {
5997 index_last = index;
5999 skip(']');
6000 if (!notfirst) {
6001 (*cur_field)->c = index;
6002 (*cur_field)->r = index_last;
6004 type = pointed_type(type);
6005 elem_size = type_size(type, &align);
6006 c += index * elem_size;
6007 /* NOTE: we only support ranges for last designator */
6008 nb_elems = index_last - index + 1;
6009 if (nb_elems != 1) {
6010 notfirst = 1;
6011 break;
6013 } else {
6014 next();
6015 l = tok;
6016 next();
6017 struct_field:
6018 if ((type->t & VT_BTYPE) != VT_STRUCT)
6019 expect("struct/union type");
6020 f = find_field(type, l);
6021 if (!f)
6022 expect("field");
6023 if (!notfirst)
6024 *cur_field = f;
6025 /* XXX: fix this mess by using explicit storage field */
6026 type1 = f->type;
6027 type1.t |= (type->t & ~VT_TYPE);
6028 type = &type1;
6029 c += f->c;
6031 notfirst = 1;
6033 if (notfirst) {
6034 if (tok == '=') {
6035 next();
6036 } else {
6037 if (!gnu_ext)
6038 expect("=");
6040 } else {
6041 if (type->t & VT_ARRAY) {
6042 index = (*cur_field)->c;
6043 if (type->ref->c >= 0 && index >= type->ref->c)
6044 tcc_error("index too large");
6045 type = pointed_type(type);
6046 c += index * type_size(type, &align);
6047 } else {
6048 f = *cur_field;
6049 if (!f)
6050 tcc_error("too many field init");
6051 /* XXX: fix this mess by using explicit storage field */
6052 type1 = f->type;
6053 type1.t |= (type->t & ~VT_TYPE);
6054 type = &type1;
6055 c += f->c;
6058 decl_initializer(type, sec, c, 0, size_only);
6060 /* XXX: make it more general */
6061 if (!size_only && nb_elems > 1) {
6062 unsigned long c_end;
6063 uint8_t *src, *dst;
6064 int i;
6066 if (!sec) {
6067 vset(type, VT_LOCAL|VT_LVAL, c);
6068 for (i = 1; i < nb_elems; i++) {
6069 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6070 vswap();
6071 vstore();
6073 vpop();
6074 } else {
6075 c_end = c + nb_elems * elem_size;
6076 if (c_end > sec->data_allocated)
6077 section_realloc(sec, c_end);
6078 src = sec->data + c;
6079 dst = src;
6080 for(i = 1; i < nb_elems; i++) {
6081 dst += elem_size;
6082 memcpy(dst, src, elem_size);
6088 /* store a value or an expression directly in global data or in local array */
6089 static void init_putv(CType *type, Section *sec, unsigned long c)
6091 int bt, bit_pos, bit_size;
6092 void *ptr;
6093 unsigned long long bit_mask;
6094 CType dtype;
6096 dtype = *type;
6097 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6099 if (sec) {
6100 int size, align;
6101 /* XXX: not portable */
6102 /* XXX: generate error if incorrect relocation */
6103 gen_assign_cast(&dtype);
6104 bt = type->t & VT_BTYPE;
6105 size = type_size(type, &align);
6106 if (c + size > sec->data_allocated) {
6107 section_realloc(sec, c + size);
6109 ptr = sec->data + c;
6110 /* XXX: make code faster ? */
6111 if (!(type->t & VT_BITFIELD)) {
6112 bit_pos = 0;
6113 bit_size = PTR_SIZE * 8;
6114 bit_mask = -1LL;
6115 } else {
6116 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6117 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6118 bit_mask = (1LL << bit_size) - 1;
6120 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6121 vtop->sym->v >= SYM_FIRST_ANOM &&
6122 /* XXX This rejects compount literals like
6123 '(void *){ptr}'. The problem is that '&sym' is
6124 represented the same way, which would be ruled out
6125 by the SYM_FIRST_ANOM check above, but also '"string"'
6126 in 'char *p = "string"' is represented the same
6127 with the type being VT_PTR and the symbol being an
6128 anonymous one. That is, there's no difference in vtop
6129 between '(void *){x}' and '&(void *){x}'. Ignore
6130 pointer typed entities here. Hopefully no real code
6131 will every use compound literals with scalar type. */
6132 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6133 /* These come from compound literals, memcpy stuff over. */
6134 Section *ssec;
6135 ElfW(Sym) *esym;
6136 ElfW_Rel *rel;
6137 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6138 ssec = tcc_state->sections[esym->st_shndx];
6139 memmove (ptr, ssec->data + esym->st_value, size);
6140 if (ssec->reloc) {
6141 /* We need to copy over all memory contents, and that
6142 includes relocations. Use the fact that relocs are
6143 created it order, so look from the end of relocs
6144 until we hit one before the copied region. */
6145 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6146 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6147 while (num_relocs--) {
6148 rel--;
6149 if (rel->r_offset >= esym->st_value + size)
6150 continue;
6151 if (rel->r_offset < esym->st_value)
6152 break;
6153 /* Note: if the same fields are initialized multiple
6154 times (possible with designators) then we possibly
6155 add multiple relocations for the same offset here.
6156 That would lead to wrong code, the last reloc needs
6157 to win. We clean this up later after the whole
6158 initializer is parsed. */
6159 put_elf_reloca(symtab_section, sec,
6160 c + rel->r_offset - esym->st_value,
6161 ELFW(R_TYPE)(rel->r_info),
6162 ELFW(R_SYM)(rel->r_info),
6163 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6164 rel->r_addend
6165 #else
6167 #endif
6171 } else {
6172 if ((vtop->r & VT_SYM) &&
6173 (bt == VT_BYTE ||
6174 bt == VT_SHORT ||
6175 bt == VT_DOUBLE ||
6176 bt == VT_LDOUBLE ||
6177 #if PTR_SIZE == 8
6178 (bt == VT_LLONG && bit_size != 64) ||
6179 bt == VT_INT
6180 #else
6181 bt == VT_LLONG ||
6182 (bt == VT_INT && bit_size != 32)
6183 #endif
6185 tcc_error("initializer element is not computable at load time");
6186 switch(bt) {
6187 /* XXX: when cross-compiling we assume that each type has the
6188 same representation on host and target, which is likely to
6189 be wrong in the case of long double */
6190 case VT_BOOL:
6191 vtop->c.i = (vtop->c.i != 0);
6192 case VT_BYTE:
6193 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6194 break;
6195 case VT_SHORT:
6196 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6197 break;
6198 case VT_DOUBLE:
6199 *(double *)ptr = vtop->c.d;
6200 break;
6201 case VT_LDOUBLE:
6202 if (sizeof(long double) == LDOUBLE_SIZE)
6203 *(long double *)ptr = vtop->c.ld;
6204 else if (sizeof(double) == LDOUBLE_SIZE)
6205 *(double *)ptr = vtop->c.ld;
6206 else
6207 tcc_error("can't cross compile long double constants");
6208 break;
6209 #if PTR_SIZE != 8
6210 case VT_LLONG:
6211 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6212 break;
6213 #else
6214 case VT_LLONG:
6215 #endif
6216 case VT_PTR:
6218 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6219 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6220 if (vtop->r & VT_SYM)
6221 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6222 else
6223 *(addr_t *)ptr |= val;
6224 #else
6225 if (vtop->r & VT_SYM)
6226 greloc(sec, vtop->sym, c, R_DATA_PTR);
6227 *(addr_t *)ptr |= val;
6228 #endif
6229 break;
6231 default:
6233 int val = (vtop->c.i & bit_mask) << bit_pos;
6234 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6235 if (vtop->r & VT_SYM)
6236 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6237 else
6238 *(int *)ptr |= val;
6239 #else
6240 if (vtop->r & VT_SYM)
6241 greloc(sec, vtop->sym, c, R_DATA_PTR);
6242 *(int *)ptr |= val;
6243 #endif
6244 break;
6248 vtop--;
6249 } else {
6250 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6251 vswap();
6252 vstore();
6253 vpop();
6257 /* put zeros for variable based init */
6258 static void init_putz(Section *sec, unsigned long c, int size)
6260 if (sec) {
6261 /* nothing to do because globals are already set to zero */
6262 } else {
6263 vpush_global_sym(&func_old_type, TOK_memset);
6264 vseti(VT_LOCAL, c);
6265 #ifdef TCC_TARGET_ARM
6266 vpushs(size);
6267 vpushi(0);
6268 #else
6269 vpushi(0);
6270 vpushs(size);
6271 #endif
6272 gfunc_call(3);
6276 /* 't' contains the type and storage info. 'c' is the offset of the
6277 object in section 'sec'. If 'sec' is NULL, it means stack based
6278 allocation. 'first' is true if array '{' must be read (multi
6279 dimension implicit array init handling). 'size_only' is true if
6280 size only evaluation is wanted (only for arrays). */
6281 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6282 int first, int size_only)
6284 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6285 int size1, align1;
6286 int have_elem;
6287 Sym *s, *f;
6288 Sym indexsym;
6289 CType *t1;
6291 /* If we currently are at an '}' or ',' we have read an initializer
6292 element in one of our callers, and not yet consumed it. */
6293 have_elem = tok == '}' || tok == ',';
6294 if (!have_elem && tok != '{' &&
6295 /* In case of strings we have special handling for arrays, so
6296 don't consume them as initializer value (which would commit them
6297 to some anonymous symbol). */
6298 tok != TOK_LSTR && tok != TOK_STR &&
6299 !size_only) {
6300 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6301 have_elem = 1;
6304 if (have_elem &&
6305 !(type->t & VT_ARRAY) &&
6306 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6307 The source type might have VT_CONSTANT set, which is
6308 of course assignable to non-const elements. */
6309 is_compatible_parameter_types(type, &vtop->type)) {
6310 init_putv(type, sec, c);
6311 } else if (type->t & VT_ARRAY) {
6312 s = type->ref;
6313 n = s->c;
6314 array_length = 0;
6315 t1 = pointed_type(type);
6316 size1 = type_size(t1, &align1);
6318 no_oblock = 1;
6319 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6320 tok == '{') {
6321 if (tok != '{')
6322 tcc_error("character array initializer must be a literal,"
6323 " optionally enclosed in braces");
6324 skip('{');
6325 no_oblock = 0;
6328 /* only parse strings here if correct type (otherwise: handle
6329 them as ((w)char *) expressions */
6330 if ((tok == TOK_LSTR &&
6331 #ifdef TCC_TARGET_PE
6332 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6333 #else
6334 (t1->t & VT_BTYPE) == VT_INT
6335 #endif
6336 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6337 while (tok == TOK_STR || tok == TOK_LSTR) {
6338 int cstr_len, ch;
6340 /* compute maximum number of chars wanted */
6341 if (tok == TOK_STR)
6342 cstr_len = tokc.str.size;
6343 else
6344 cstr_len = tokc.str.size / sizeof(nwchar_t);
6345 cstr_len--;
6346 nb = cstr_len;
6347 if (n >= 0 && nb > (n - array_length))
6348 nb = n - array_length;
6349 if (!size_only) {
6350 if (cstr_len > nb)
6351 tcc_warning("initializer-string for array is too long");
6352 /* in order to go faster for common case (char
6353 string in global variable, we handle it
6354 specifically */
6355 if (sec && tok == TOK_STR && size1 == 1) {
6356 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6357 } else {
6358 for(i=0;i<nb;i++) {
6359 if (tok == TOK_STR)
6360 ch = ((unsigned char *)tokc.str.data)[i];
6361 else
6362 ch = ((nwchar_t *)tokc.str.data)[i];
6363 vpushi(ch);
6364 init_putv(t1, sec, c + (array_length + i) * size1);
6368 array_length += nb;
6369 next();
6371 /* only add trailing zero if enough storage (no
6372 warning in this case since it is standard) */
6373 if (n < 0 || array_length < n) {
6374 if (!size_only) {
6375 vpushi(0);
6376 init_putv(t1, sec, c + (array_length * size1));
6378 array_length++;
6380 } else {
6381 indexsym.c = 0;
6382 indexsym.r = 0;
6383 f = &indexsym;
6385 do_init_list:
6386 while (tok != '}' || have_elem) {
6387 decl_designator(type, sec, c, &f, size_only);
6388 have_elem = 0;
6389 index = f->c;
6390 /* must put zero in holes (note that doing it that way
6391 ensures that it even works with designators) */
6392 if (!size_only && array_length < index) {
6393 init_putz(sec, c + array_length * size1,
6394 (index - array_length) * size1);
6396 if (type->t & VT_ARRAY) {
6397 index = indexsym.c = ++indexsym.r;
6398 } else {
6399 index = index + type_size(&f->type, &align1);
6400 if (s->type.t == TOK_UNION)
6401 f = NULL;
6402 else
6403 f = f->next;
6405 if (index > array_length)
6406 array_length = index;
6408 if (type->t & VT_ARRAY) {
6409 /* special test for multi dimensional arrays (may not
6410 be strictly correct if designators are used at the
6411 same time) */
6412 if (no_oblock && index >= n)
6413 break;
6414 } else {
6415 if (no_oblock && f == NULL)
6416 break;
6418 if (tok == '}')
6419 break;
6420 skip(',');
6423 /* put zeros at the end */
6424 if (!size_only && array_length < n) {
6425 init_putz(sec, c + array_length * size1,
6426 (n - array_length) * size1);
6428 if (!no_oblock)
6429 skip('}');
6430 /* patch type size if needed, which happens only for array types */
6431 if (n < 0)
6432 s->c = array_length;
6433 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6434 size1 = 1;
6435 no_oblock = 1;
6436 if (first || tok == '{') {
6437 skip('{');
6438 no_oblock = 0;
6440 s = type->ref;
6441 f = s->next;
6442 array_length = 0;
6443 n = s->c;
6444 goto do_init_list;
6445 } else if (tok == '{') {
6446 next();
6447 decl_initializer(type, sec, c, first, size_only);
6448 skip('}');
6449 } else if (size_only) {
6450 /* If we supported only ISO C we wouldn't have to accept calling
6451 this on anything than an array size_only==1 (and even then
6452 only on the outermost level, so no recursion would be needed),
6453 because initializing a flex array member isn't supported.
6454 But GNU C supports it, so we need to recurse even into
6455 subfields of structs and arrays when size_only is set. */
6456 /* just skip expression */
6457 parlevel = parlevel1 = 0;
6458 while ((parlevel > 0 || parlevel1 > 0 ||
6459 (tok != '}' && tok != ',')) && tok != -1) {
6460 if (tok == '(')
6461 parlevel++;
6462 else if (tok == ')') {
6463 if (parlevel == 0 && parlevel1 == 0)
6464 break;
6465 parlevel--;
6467 else if (tok == '{')
6468 parlevel1++;
6469 else if (tok == '}') {
6470 if (parlevel == 0 && parlevel1 == 0)
6471 break;
6472 parlevel1--;
6474 next();
6476 } else {
6477 if (!have_elem) {
6478 /* This should happen only when we haven't parsed
6479 the init element above for fear of committing a
6480 string constant to memory too early. */
6481 if (tok != TOK_STR && tok != TOK_LSTR)
6482 expect("string constant");
6483 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6485 init_putv(type, sec, c);
6489 /* parse an initializer for type 't' if 'has_init' is non zero, and
6490 allocate space in local or global data space ('r' is either
6491 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6492 variable 'v' of scope 'scope' is declared before initializers
6493 are parsed. If 'v' is zero, then a reference to the new object
6494 is put in the value stack. If 'has_init' is 2, a special parsing
6495 is done to handle string constants. */
6496 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6497 int has_init, int v, int scope)
6499 int size, align, addr, data_offset;
6500 int level;
6501 ParseState saved_parse_state = {0};
6502 TokenString *init_str = NULL;
6503 Section *sec;
6504 Sym *flexible_array;
6506 flexible_array = NULL;
6507 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6508 Sym *field = type->ref->next;
6509 if (field) {
6510 while (field->next)
6511 field = field->next;
6512 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6513 flexible_array = field;
6517 size = type_size(type, &align);
6518 /* If unknown size, we must evaluate it before
6519 evaluating initializers because
6520 initializers can generate global data too
6521 (e.g. string pointers or ISOC99 compound
6522 literals). It also simplifies local
6523 initializers handling */
6524 if (size < 0 || (flexible_array && has_init)) {
6525 if (!has_init)
6526 tcc_error("unknown type size");
6527 /* get all init string */
6528 init_str = tok_str_alloc();
6529 if (has_init == 2) {
6530 /* only get strings */
6531 while (tok == TOK_STR || tok == TOK_LSTR) {
6532 tok_str_add_tok(init_str);
6533 next();
6535 } else {
6536 level = 0;
6537 while (level > 0 || (tok != ',' && tok != ';')) {
6538 if (tok < 0)
6539 tcc_error("unexpected end of file in initializer");
6540 tok_str_add_tok(init_str);
6541 if (tok == '{')
6542 level++;
6543 else if (tok == '}') {
6544 level--;
6545 if (level <= 0) {
6546 next();
6547 break;
6550 next();
6553 tok_str_add(init_str, -1);
6554 tok_str_add(init_str, 0);
6556 /* compute size */
6557 save_parse_state(&saved_parse_state);
6559 begin_macro(init_str, 1);
6560 next();
6561 decl_initializer(type, NULL, 0, 1, 1);
6562 /* prepare second initializer parsing */
6563 macro_ptr = init_str->str;
6564 next();
6566 /* if still unknown size, error */
6567 size = type_size(type, &align);
6568 if (size < 0)
6569 tcc_error("unknown type size");
6571 /* If there's a flex member and it was used in the initializer
6572 adjust size. */
6573 if (flexible_array &&
6574 flexible_array->type.ref->c > 0)
6575 size += flexible_array->type.ref->c
6576 * pointed_size(&flexible_array->type);
6577 /* take into account specified alignment if bigger */
6578 if (ad->a.aligned) {
6579 int speca = 1 << (ad->a.aligned - 1);
6580 if (speca > align)
6581 align = speca;
6582 } else if (ad->a.packed) {
6583 align = 1;
6585 if ((r & VT_VALMASK) == VT_LOCAL) {
6586 sec = NULL;
6587 #ifdef CONFIG_TCC_BCHECK
6588 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6589 loc--;
6591 #endif
6592 loc = (loc - size) & -align;
6593 addr = loc;
6594 #ifdef CONFIG_TCC_BCHECK
6595 /* handles bounds */
6596 /* XXX: currently, since we do only one pass, we cannot track
6597 '&' operators, so we add only arrays */
6598 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6599 addr_t *bounds_ptr;
6600 /* add padding between regions */
6601 loc--;
6602 /* then add local bound info */
6603 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6604 bounds_ptr[0] = addr;
6605 bounds_ptr[1] = size;
6607 #endif
6608 if (v) {
6609 /* local variable */
6610 #ifdef CONFIG_TCC_ASM
6611 if (ad->asm_label) {
6612 int reg = asm_parse_regvar(ad->asm_label);
6613 if (reg >= 0)
6614 r = (r & ~VT_VALMASK) | reg;
6616 #endif
6617 sym_push(v, type, r, addr);
6618 } else {
6619 /* push local reference */
6620 vset(type, r, addr);
6622 } else {
6623 Sym *sym;
6625 sym = NULL;
6626 if (v && scope == VT_CONST) {
6627 /* see if the symbol was already defined */
6628 sym = sym_find(v);
6629 if (sym) {
6630 if (!is_compatible_types(&sym->type, type))
6631 tcc_error("incompatible types for redefinition of '%s'",
6632 get_tok_str(v, NULL));
6633 if (sym->type.t & VT_EXTERN) {
6634 /* if the variable is extern, it was not allocated */
6635 sym->type.t &= ~VT_EXTERN;
6636 /* set array size if it was omitted in extern
6637 declaration */
6638 if ((sym->type.t & VT_ARRAY) &&
6639 sym->type.ref->c < 0 &&
6640 type->ref->c >= 0)
6641 sym->type.ref->c = type->ref->c;
6642 } else {
6643 /* we accept several definitions of the same
6644 global variable. this is tricky, because we
6645 must play with the SHN_COMMON type of the symbol */
6646 /* XXX: should check if the variable was already
6647 initialized. It is incorrect to initialized it
6648 twice */
6649 /* no init data, we won't add more to the symbol */
6650 if (!has_init)
6651 goto no_alloc;
6656 /* allocate symbol in corresponding section */
6657 sec = ad->section;
6658 if (!sec) {
6659 if (has_init)
6660 sec = data_section;
6661 else if (tcc_state->nocommon)
6662 sec = bss_section;
6664 if (sec) {
6665 data_offset = sec->data_offset;
6666 data_offset = (data_offset + align - 1) & -align;
6667 addr = data_offset;
6668 /* very important to increment global pointer at this time
6669 because initializers themselves can create new initializers */
6670 data_offset += size;
6671 #ifdef CONFIG_TCC_BCHECK
6672 /* add padding if bound check */
6673 if (tcc_state->do_bounds_check)
6674 data_offset++;
6675 #endif
6676 sec->data_offset = data_offset;
6677 /* allocate section space to put the data */
6678 if (sec->sh_type != SHT_NOBITS &&
6679 data_offset > sec->data_allocated)
6680 section_realloc(sec, data_offset);
6681 /* align section if needed */
6682 if (align > sec->sh_addralign)
6683 sec->sh_addralign = align;
6684 } else {
6685 addr = 0; /* avoid warning */
6688 if (v) {
6689 if (scope != VT_CONST || !sym) {
6690 sym = sym_push(v, type, r | VT_SYM, 0);
6691 sym->asm_label = ad->asm_label;
6693 /* update symbol definition */
6694 if (sec) {
6695 put_extern_sym(sym, sec, addr, size);
6696 } else {
6697 ElfW(Sym) *esym;
6698 /* put a common area */
6699 put_extern_sym(sym, NULL, align, size);
6700 /* XXX: find a nicer way */
6701 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6702 esym->st_shndx = SHN_COMMON;
6704 } else {
6705 /* push global reference */
6706 sym = get_sym_ref(type, sec, addr, size);
6707 vpushsym(type, sym);
6709 /* patch symbol weakness */
6710 if (type->t & VT_WEAK)
6711 weaken_symbol(sym);
6712 apply_visibility(sym, type);
6713 #ifdef CONFIG_TCC_BCHECK
6714 /* handles bounds now because the symbol must be defined
6715 before for the relocation */
6716 if (tcc_state->do_bounds_check) {
6717 addr_t *bounds_ptr;
6719 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6720 /* then add global bound info */
6721 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6722 bounds_ptr[0] = 0; /* relocated */
6723 bounds_ptr[1] = size;
6725 #endif
6727 if (type->t & VT_VLA) {
6728 int a;
6730 /* save current stack pointer */
6731 if (vlas_in_scope == 0) {
6732 if (vla_sp_root_loc == -1)
6733 vla_sp_root_loc = (loc -= PTR_SIZE);
6734 gen_vla_sp_save(vla_sp_root_loc);
6737 vla_runtime_type_size(type, &a);
6738 gen_vla_alloc(type, a);
6739 gen_vla_sp_save(addr);
6740 vla_sp_loc = addr;
6741 vlas_in_scope++;
6742 } else if (has_init) {
6743 size_t oldreloc_offset = 0;
6744 if (sec && sec->reloc)
6745 oldreloc_offset = sec->reloc->data_offset;
6746 decl_initializer(type, sec, addr, 1, 0);
6747 if (sec && sec->reloc)
6748 squeeze_multi_relocs(sec, oldreloc_offset);
6749 /* patch flexible array member size back to -1, */
6750 /* for possible subsequent similar declarations */
6751 if (flexible_array)
6752 flexible_array->type.ref->c = -1;
6754 no_alloc: ;
6755 /* restore parse state if needed */
6756 if (init_str) {
6757 end_macro();
6758 restore_parse_state(&saved_parse_state);
6762 static void put_func_debug(Sym *sym)
6764 char buf[512];
6766 /* stabs info */
6767 /* XXX: we put here a dummy type */
6768 snprintf(buf, sizeof(buf), "%s:%c1",
6769 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6770 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6771 cur_text_section, sym->c);
6772 /* //gr gdb wants a line at the function */
6773 put_stabn(N_SLINE, 0, file->line_num, 0);
6774 last_ind = 0;
6775 last_line_num = 0;
6778 /* parse an old style function declaration list */
6779 /* XXX: check multiple parameter */
6780 static void func_decl_list(Sym *func_sym)
6782 AttributeDef ad;
6783 int v;
6784 Sym *s;
6785 CType btype, type;
6787 /* parse each declaration */
6788 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6789 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6790 if (!parse_btype(&btype, &ad))
6791 expect("declaration list");
6792 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6793 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6794 tok == ';') {
6795 /* we accept no variable after */
6796 } else {
6797 for(;;) {
6798 type = btype;
6799 type_decl(&type, &ad, &v, TYPE_DIRECT);
6800 /* find parameter in function parameter list */
6801 s = func_sym->next;
6802 while (s != NULL) {
6803 if ((s->v & ~SYM_FIELD) == v)
6804 goto found;
6805 s = s->next;
6807 tcc_error("declaration for parameter '%s' but no such parameter",
6808 get_tok_str(v, NULL));
6809 found:
6810 /* check that no storage specifier except 'register' was given */
6811 if (type.t & VT_STORAGE)
6812 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6813 convert_parameter_type(&type);
6814 /* we can add the type (NOTE: it could be local to the function) */
6815 s->type = type;
6816 /* accept other parameters */
6817 if (tok == ',')
6818 next();
6819 else
6820 break;
6823 skip(';');
6827 /* parse a function defined by symbol 'sym' and generate its code in
6828 'cur_text_section' */
6829 static void gen_function(Sym *sym)
6831 nocode_wanted = 0;
6832 ind = cur_text_section->data_offset;
6833 /* NOTE: we patch the symbol size later */
6834 put_extern_sym(sym, cur_text_section, ind, 0);
6835 funcname = get_tok_str(sym->v, NULL);
6836 func_ind = ind;
6837 /* Initialize VLA state */
6838 vla_sp_loc = -1;
6839 vla_sp_root_loc = -1;
6840 /* put debug symbol */
6841 if (tcc_state->do_debug)
6842 put_func_debug(sym);
6844 /* push a dummy symbol to enable local sym storage */
6845 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6846 local_scope = 1; /* for function parameters */
6847 gfunc_prolog(&sym->type);
6848 local_scope = 0;
6850 rsym = 0;
6851 block(NULL, NULL, 0);
6852 nocode_wanted = 0;
6853 gsym(rsym);
6854 gfunc_epilog();
6855 cur_text_section->data_offset = ind;
6856 label_pop(&global_label_stack, NULL);
6857 /* reset local stack */
6858 local_scope = 0;
6859 sym_pop(&local_stack, NULL, 0);
6860 /* end of function */
6861 /* patch symbol size */
6862 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6863 ind - func_ind;
6864 /* patch symbol weakness (this definition overrules any prototype) */
6865 if (sym->type.t & VT_WEAK)
6866 weaken_symbol(sym);
6867 apply_visibility(sym, &sym->type);
6868 if (tcc_state->do_debug) {
6869 put_stabn(N_FUN, 0, 0, ind - func_ind);
6871 /* It's better to crash than to generate wrong code */
6872 cur_text_section = NULL;
6873 funcname = ""; /* for safety */
6874 func_vt.t = VT_VOID; /* for safety */
6875 func_var = 0; /* for safety */
6876 ind = 0; /* for safety */
6877 nocode_wanted = 1;
6878 check_vstack();
6881 static void gen_inline_functions(TCCState *s)
6883 Sym *sym;
6884 int inline_generated, i, ln;
6885 struct InlineFunc *fn;
6887 ln = file->line_num;
6888 /* iterate while inline function are referenced */
6889 for(;;) {
6890 inline_generated = 0;
6891 for (i = 0; i < s->nb_inline_fns; ++i) {
6892 fn = s->inline_fns[i];
6893 sym = fn->sym;
6894 if (sym && sym->c) {
6895 /* the function was used: generate its code and
6896 convert it to a normal function */
6897 fn->sym = NULL;
6898 if (file)
6899 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6900 sym->r = VT_SYM | VT_CONST;
6901 sym->type.t &= ~VT_INLINE;
6903 begin_macro(fn->func_str, 1);
6904 next();
6905 cur_text_section = text_section;
6906 gen_function(sym);
6907 end_macro();
6909 inline_generated = 1;
6912 if (!inline_generated)
6913 break;
6915 file->line_num = ln;
6918 ST_FUNC void free_inline_functions(TCCState *s)
6920 int i;
6921 /* free tokens of unused inline functions */
6922 for (i = 0; i < s->nb_inline_fns; ++i) {
6923 struct InlineFunc *fn = s->inline_fns[i];
6924 if (fn->sym)
6925 tok_str_free(fn->func_str);
6927 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6930 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6931 static int decl0(int l, int is_for_loop_init)
6933 int v, has_init, r;
6934 CType type, btype;
6935 Sym *sym;
6936 AttributeDef ad;
6938 while (1) {
6939 if (!parse_btype(&btype, &ad)) {
6940 if (is_for_loop_init)
6941 return 0;
6942 /* skip redundant ';' */
6943 /* XXX: find more elegant solution */
6944 if (tok == ';') {
6945 next();
6946 continue;
6948 if (l == VT_CONST &&
6949 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6950 /* global asm block */
6951 asm_global_instr();
6952 continue;
6954 /* special test for old K&R protos without explicit int
6955 type. Only accepted when defining global data */
6956 if (l == VT_LOCAL || tok < TOK_UIDENT)
6957 break;
6958 btype.t = VT_INT;
6960 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6961 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6962 tok == ';') {
6963 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6964 int v = btype.ref->v;
6965 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6966 tcc_warning("unnamed struct/union that defines no instances");
6968 next();
6969 continue;
6971 while (1) { /* iterate thru each declaration */
6972 type = btype;
6973 /* If the base type itself was an array type of unspecified
6974 size (like in 'typedef int arr[]; arr x = {1};') then
6975 we will overwrite the unknown size by the real one for
6976 this decl. We need to unshare the ref symbol holding
6977 that size. */
6978 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6979 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6981 type_decl(&type, &ad, &v, TYPE_DIRECT);
6982 #if 0
6984 char buf[500];
6985 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6986 printf("type = '%s'\n", buf);
6988 #endif
6989 if ((type.t & VT_BTYPE) == VT_FUNC) {
6990 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6991 tcc_error("function without file scope cannot be static");
6993 /* if old style function prototype, we accept a
6994 declaration list */
6995 sym = type.ref;
6996 if (sym->c == FUNC_OLD)
6997 func_decl_list(sym);
7000 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7001 ad.asm_label = asm_label_instr();
7002 /* parse one last attribute list, after asm label */
7003 parse_attribute(&ad);
7004 if (tok == '{')
7005 expect(";");
7008 if (ad.a.weak)
7009 type.t |= VT_WEAK;
7010 #ifdef TCC_TARGET_PE
7011 if (ad.a.func_import)
7012 type.t |= VT_IMPORT;
7013 if (ad.a.func_export)
7014 type.t |= VT_EXPORT;
7015 #endif
7016 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7018 if (tok == '{') {
7019 if (l == VT_LOCAL)
7020 tcc_error("cannot use local functions");
7021 if ((type.t & VT_BTYPE) != VT_FUNC)
7022 expect("function definition");
7024 /* reject abstract declarators in function definition */
7025 sym = type.ref;
7026 while ((sym = sym->next) != NULL)
7027 if (!(sym->v & ~SYM_FIELD))
7028 expect("identifier");
7030 /* XXX: cannot do better now: convert extern line to static inline */
7031 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7032 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7034 sym = sym_find(v);
7035 if (sym) {
7036 Sym *ref;
7037 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7038 goto func_error1;
7040 ref = sym->type.ref;
7041 if (0 == ref->a.func_proto)
7042 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7044 /* use func_call from prototype if not defined */
7045 if (ref->a.func_call != FUNC_CDECL
7046 && type.ref->a.func_call == FUNC_CDECL)
7047 type.ref->a.func_call = ref->a.func_call;
7049 /* use export from prototype */
7050 if (ref->a.func_export)
7051 type.ref->a.func_export = 1;
7053 /* use static from prototype */
7054 if (sym->type.t & VT_STATIC)
7055 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7057 /* If the definition has no visibility use the
7058 one from prototype. */
7059 if (! (type.t & VT_VIS_MASK))
7060 type.t |= sym->type.t & VT_VIS_MASK;
7062 if (!is_compatible_types(&sym->type, &type)) {
7063 func_error1:
7064 tcc_error("incompatible types for redefinition of '%s'",
7065 get_tok_str(v, NULL));
7067 type.ref->a.func_proto = 0;
7068 /* if symbol is already defined, then put complete type */
7069 sym->type = type;
7070 } else {
7071 /* put function symbol */
7072 sym = global_identifier_push(v, type.t, 0);
7073 sym->type.ref = type.ref;
7076 /* static inline functions are just recorded as a kind
7077 of macro. Their code will be emitted at the end of
7078 the compilation unit only if they are used */
7079 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7080 (VT_INLINE | VT_STATIC)) {
7081 int block_level;
7082 struct InlineFunc *fn;
7083 const char *filename;
7085 filename = file ? file->filename : "";
7086 fn = tcc_malloc(sizeof *fn + strlen(filename));
7087 strcpy(fn->filename, filename);
7088 fn->sym = sym;
7089 fn->func_str = tok_str_alloc();
7091 block_level = 0;
7092 for(;;) {
7093 int t;
7094 if (tok == TOK_EOF)
7095 tcc_error("unexpected end of file");
7096 tok_str_add_tok(fn->func_str);
7097 t = tok;
7098 next();
7099 if (t == '{') {
7100 block_level++;
7101 } else if (t == '}') {
7102 block_level--;
7103 if (block_level == 0)
7104 break;
7107 tok_str_add(fn->func_str, -1);
7108 tok_str_add(fn->func_str, 0);
7109 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7111 } else {
7112 /* compute text section */
7113 cur_text_section = ad.section;
7114 if (!cur_text_section)
7115 cur_text_section = text_section;
7116 sym->r = VT_SYM | VT_CONST;
7117 gen_function(sym);
7119 break;
7120 } else {
7121 if (btype.t & VT_TYPEDEF) {
7122 /* save typedefed type */
7123 /* XXX: test storage specifiers ? */
7124 sym = sym_find(v);
7125 if (sym && sym->scope == local_scope) {
7126 if (!is_compatible_types(&sym->type, &type)
7127 || !(sym->type.t & VT_TYPEDEF))
7128 tcc_error("incompatible redefinition of '%s'",
7129 get_tok_str(v, NULL));
7130 sym->type = type;
7131 } else {
7132 sym = sym_push(v, &type, 0, 0);
7134 sym->a = ad.a;
7135 sym->type.t |= VT_TYPEDEF;
7136 } else {
7137 r = 0;
7138 if ((type.t & VT_BTYPE) == VT_FUNC) {
7139 /* external function definition */
7140 /* specific case for func_call attribute */
7141 ad.a.func_proto = 1;
7142 type.ref->a = ad.a;
7143 } else if (!(type.t & VT_ARRAY)) {
7144 /* not lvalue if array */
7145 r |= lvalue_type(type.t);
7147 has_init = (tok == '=');
7148 if (has_init && (type.t & VT_VLA))
7149 tcc_error("variable length array cannot be initialized");
7150 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7151 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7152 !has_init && l == VT_CONST && type.ref->c < 0)) {
7153 /* external variable or function */
7154 /* NOTE: as GCC, uninitialized global static
7155 arrays of null size are considered as
7156 extern */
7157 sym = external_sym(v, &type, r);
7158 sym->asm_label = ad.asm_label;
7160 if (ad.alias_target) {
7161 Section tsec;
7162 ElfW(Sym) *esym;
7163 Sym *alias_target;
7165 alias_target = sym_find(ad.alias_target);
7166 if (!alias_target || !alias_target->c)
7167 tcc_error("unsupported forward __alias__ attribute");
7168 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7169 tsec.sh_num = esym->st_shndx;
7170 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7172 } else {
7173 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7174 if (type.t & VT_STATIC)
7175 r |= VT_CONST;
7176 else
7177 r |= l;
7178 if (has_init)
7179 next();
7180 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7183 if (tok != ',') {
7184 if (is_for_loop_init)
7185 return 1;
7186 skip(';');
7187 break;
7189 next();
7191 ad.a.aligned = 0;
7194 return 0;
7197 ST_FUNC void decl(int l)
7199 decl0(l, 0);
7202 /* ------------------------------------------------------------------------- */