tccgen: factor out gfunc_return
[tinycc.git] / tccgen.c
blob0f08c354de2dbd4bace66ef78793955b2d088299
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
28 rsym: return symbol
29 anon_sym: anonymous symbol index
31 ST_DATA int rsym, anon_sym, ind, loc;
33 ST_DATA Sym *sym_free_first;
34 ST_DATA void **sym_pools;
35 ST_DATA int nb_sym_pools;
37 ST_DATA Sym *global_stack;
38 ST_DATA Sym *local_stack;
39 ST_DATA Sym *define_stack;
40 ST_DATA Sym *global_label_stack;
41 ST_DATA Sym *local_label_stack;
42 static int local_scope;
43 static int in_sizeof;
44 static int section_sym;
46 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
47 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
52 ST_DATA int const_wanted; /* true if constant wanted */
53 ST_DATA int nocode_wanted; /* true if no code generation wanted for an expression */
54 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
56 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
57 ST_DATA int func_vc;
58 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
59 ST_DATA const char *funcname;
61 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type;
63 ST_DATA struct switch_t {
64 struct case_t {
65 int64_t v1, v2;
66 int sym;
67 } **p; int n; /* list of case ranges */
68 int def_sym; /* default symbol */
69 } *cur_switch; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType *type);
74 static inline CType *pointed_type(CType *type);
75 static int is_compatible_types(CType *type1, CType *type2);
76 static int parse_btype(CType *type, AttributeDef *ad);
77 static void type_decl(CType *type, AttributeDef *ad, int *v, int td);
78 static void parse_expr_type(CType *type);
79 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
80 static void block(int *bsym, int *csym, int is_expr);
81 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
82 static int decl0(int l, int is_for_loop_init);
83 static void expr_eq(void);
84 static void expr_lor_const(void);
85 static void unary_type(CType *type);
86 static void vla_runtime_type_size(CType *type, int *a);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType *type1, CType *type2);
90 static void expr_type(CType *type);
91 static inline int64_t expr_const64(void);
92 ST_FUNC void vpush64(int ty, unsigned long long v);
93 ST_FUNC void vpush(CType *type);
94 ST_FUNC int gvtst(int inv, int t);
95 ST_FUNC int is_btype_size(int bt);
96 static void gen_inline_functions(TCCState *s);
98 ST_INLN int is_float(int t)
100 int bt;
101 bt = t & VT_BTYPE;
102 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC int ieee_finite(double d)
110 int p[4];
111 memcpy(p, &d, sizeof(double));
112 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC void test_lvalue(void)
117 if (!(vtop->r & VT_LVAL))
118 expect("lvalue");
121 ST_FUNC void check_vstack(void)
123 if (pvtop != vtop)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
127 /* ------------------------------------------------------------------------- */
128 /* vstack debugging aid */
130 #if 0
131 void pv (const char *lbl, int a, int b)
133 int i;
134 for (i = a; i < a + b; ++i) {
135 SValue *p = &vtop[-i];
136 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
137 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
140 #endif
142 /* ------------------------------------------------------------------------- */
143 ST_FUNC void tccgen_start(TCCState *s1)
145 cur_text_section = NULL;
146 funcname = "";
147 anon_sym = SYM_FIRST_ANOM;
148 section_sym = 0;
149 const_wanted = 0;
150 nocode_wanted = 1;
152 /* define some often used types */
153 int_type.t = VT_INT;
154 char_pointer_type.t = VT_BYTE;
155 mk_pointer(&char_pointer_type);
156 #if PTR_SIZE == 4
157 size_type.t = VT_INT;
158 #else
159 size_type.t = VT_LLONG;
160 #endif
161 func_old_type.t = VT_FUNC;
162 func_old_type.ref = sym_push(SYM_FIELD, &int_type, FUNC_CDECL, FUNC_OLD);
164 if (s1->do_debug) {
165 char buf[512];
167 /* file info: full path + filename */
168 section_sym = put_elf_sym(symtab_section, 0, 0,
169 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
170 text_section->sh_num, NULL);
171 getcwd(buf, sizeof(buf));
172 #ifdef _WIN32
173 normalize_slashes(buf);
174 #endif
175 pstrcat(buf, sizeof(buf), "/");
176 put_stabs_r(buf, N_SO, 0, 0,
177 text_section->data_offset, text_section, section_sym);
178 put_stabs_r(file->filename, N_SO, 0, 0,
179 text_section->data_offset, text_section, section_sym);
181 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
182 symbols can be safely used */
183 put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
185 SHN_ABS, file->filename);
187 #ifdef TCC_TARGET_ARM
188 arm_init(s1);
189 #endif
192 ST_FUNC void tccgen_end(TCCState *s1)
194 gen_inline_functions(s1);
195 check_vstack();
196 /* end of translation unit info */
197 if (s1->do_debug) {
198 put_stabs_r(NULL, N_SO, 0, 0,
199 text_section->data_offset, text_section, section_sym);
203 /* ------------------------------------------------------------------------- */
204 /* update sym->c so that it points to an external symbol in section
205 'section' with value 'value' */
207 ST_FUNC void put_extern_sym2(Sym *sym, Section *section,
208 addr_t value, unsigned long size,
209 int can_add_underscore)
211 int sym_type, sym_bind, sh_num, info, other;
212 ElfW(Sym) *esym;
213 const char *name;
214 char buf1[256];
216 #ifdef CONFIG_TCC_BCHECK
217 char buf[32];
218 #endif
220 if (section == NULL)
221 sh_num = SHN_UNDEF;
222 else if (section == SECTION_ABS)
223 sh_num = SHN_ABS;
224 else
225 sh_num = section->sh_num;
227 if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
228 sym_type = STT_FUNC;
229 } else if ((sym->type.t & VT_BTYPE) == VT_VOID) {
230 sym_type = STT_NOTYPE;
231 } else {
232 sym_type = STT_OBJECT;
235 if (sym->type.t & VT_STATIC)
236 sym_bind = STB_LOCAL;
237 else {
238 if (sym->type.t & VT_WEAK)
239 sym_bind = STB_WEAK;
240 else
241 sym_bind = STB_GLOBAL;
244 if (!sym->c) {
245 name = get_tok_str(sym->v, NULL);
246 #ifdef CONFIG_TCC_BCHECK
247 if (tcc_state->do_bounds_check) {
248 /* XXX: avoid doing that for statics ? */
249 /* if bound checking is activated, we change some function
250 names by adding the "__bound" prefix */
251 switch(sym->v) {
252 #ifdef TCC_TARGET_PE
253 /* XXX: we rely only on malloc hooks */
254 case TOK_malloc:
255 case TOK_free:
256 case TOK_realloc:
257 case TOK_memalign:
258 case TOK_calloc:
259 #endif
260 case TOK_memcpy:
261 case TOK_memmove:
262 case TOK_memset:
263 case TOK_strlen:
264 case TOK_strcpy:
265 case TOK_alloca:
266 strcpy(buf, "__bound_");
267 strcat(buf, name);
268 name = buf;
269 break;
272 #endif
273 other = 0;
275 #ifdef TCC_TARGET_PE
276 if (sym->type.t & VT_EXPORT)
277 other |= ST_PE_EXPORT;
278 if (sym_type == STT_FUNC && sym->type.ref) {
279 Sym *ref = sym->type.ref;
280 if (ref->a.func_export)
281 other |= ST_PE_EXPORT;
282 if (ref->a.func_call == FUNC_STDCALL && can_add_underscore) {
283 sprintf(buf1, "_%s@%d", name, ref->a.func_args * PTR_SIZE);
284 name = buf1;
285 other |= ST_PE_STDCALL;
286 can_add_underscore = 0;
288 } else {
289 if (find_elf_sym(tcc_state->dynsymtab_section, name))
290 other |= ST_PE_IMPORT;
291 if (sym->type.t & VT_IMPORT)
292 other |= ST_PE_IMPORT;
294 #else
295 if (! (sym->type.t & VT_STATIC))
296 other = (sym->type.t & VT_VIS_MASK) >> VT_VIS_SHIFT;
297 #endif
298 if (tcc_state->leading_underscore && can_add_underscore) {
299 buf1[0] = '_';
300 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
301 name = buf1;
303 if (sym->asm_label) {
304 name = get_tok_str(sym->asm_label, NULL);
306 info = ELFW(ST_INFO)(sym_bind, sym_type);
307 sym->c = set_elf_sym(symtab_section, value, size, info, other, sh_num, name);
308 } else {
309 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
310 esym->st_value = value;
311 esym->st_size = size;
312 esym->st_shndx = sh_num;
316 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
317 addr_t value, unsigned long size)
319 put_extern_sym2(sym, section, value, size, 1);
322 /* add a new relocation entry to symbol 'sym' in section 's' */
323 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
324 addr_t addend)
326 int c = 0;
328 if (nocode_wanted && s == cur_text_section)
329 return;
331 if (sym) {
332 if (0 == sym->c)
333 put_extern_sym(sym, NULL, 0, 0);
334 c = sym->c;
337 /* now we can add ELF relocation info */
338 put_elf_reloca(symtab_section, s, offset, type, c, addend);
341 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
343 greloca(s, sym, offset, type, 0);
346 /* ------------------------------------------------------------------------- */
347 /* symbol allocator */
348 static Sym *__sym_malloc(void)
350 Sym *sym_pool, *sym, *last_sym;
351 int i;
353 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
354 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
356 last_sym = sym_free_first;
357 sym = sym_pool;
358 for(i = 0; i < SYM_POOL_NB; i++) {
359 sym->next = last_sym;
360 last_sym = sym;
361 sym++;
363 sym_free_first = last_sym;
364 return last_sym;
367 static inline Sym *sym_malloc(void)
369 Sym *sym;
370 #ifndef SYM_DEBUG
371 sym = sym_free_first;
372 if (!sym)
373 sym = __sym_malloc();
374 sym_free_first = sym->next;
375 return sym;
376 #else
377 sym = tcc_malloc(sizeof(Sym));
378 return sym;
379 #endif
382 ST_INLN void sym_free(Sym *sym)
384 #ifndef SYM_DEBUG
385 sym->next = sym_free_first;
386 sym_free_first = sym;
387 #else
388 tcc_free(sym);
389 #endif
392 /* push, without hashing */
393 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, long c)
395 Sym *s;
397 s = sym_malloc();
398 s->scope = 0;
399 s->v = v;
400 s->type.t = t;
401 s->type.ref = NULL;
402 #ifdef _WIN64
403 s->d = NULL;
404 #endif
405 s->c = c;
406 s->next = NULL;
407 /* add in stack */
408 s->prev = *ps;
409 *ps = s;
410 return s;
413 /* find a symbol and return its associated structure. 's' is the top
414 of the symbol stack */
415 ST_FUNC Sym *sym_find2(Sym *s, int v)
417 while (s) {
418 if (s->v == v)
419 return s;
420 else if (s->v == -1)
421 return NULL;
422 s = s->prev;
424 return NULL;
427 /* structure lookup */
428 ST_INLN Sym *struct_find(int v)
430 v -= TOK_IDENT;
431 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
432 return NULL;
433 return table_ident[v]->sym_struct;
436 /* find an identifier */
437 ST_INLN Sym *sym_find(int v)
439 v -= TOK_IDENT;
440 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
441 return NULL;
442 return table_ident[v]->sym_identifier;
445 /* push a given symbol on the symbol stack */
446 ST_FUNC Sym *sym_push(int v, CType *type, int r, long c)
448 Sym *s, **ps;
449 TokenSym *ts;
451 if (local_stack)
452 ps = &local_stack;
453 else
454 ps = &global_stack;
455 s = sym_push2(ps, v, type->t, c);
456 s->type.ref = type->ref;
457 s->r = r;
458 /* don't record fields or anonymous symbols */
459 /* XXX: simplify */
460 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
461 /* record symbol in token array */
462 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
463 if (v & SYM_STRUCT)
464 ps = &ts->sym_struct;
465 else
466 ps = &ts->sym_identifier;
467 s->prev_tok = *ps;
468 *ps = s;
469 s->scope = local_scope;
470 if (s->prev_tok && s->prev_tok->scope == s->scope)
471 tcc_error("redeclaration of '%s'",
472 get_tok_str(v & ~SYM_STRUCT, NULL));
474 return s;
477 /* push a global identifier */
478 ST_FUNC Sym *global_identifier_push(int v, int t, long c)
480 Sym *s, **ps;
481 s = sym_push2(&global_stack, v, t, c);
482 /* don't record anonymous symbol */
483 if (v < SYM_FIRST_ANOM) {
484 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
485 /* modify the top most local identifier, so that
486 sym_identifier will point to 's' when popped */
487 while (*ps != NULL)
488 ps = &(*ps)->prev_tok;
489 s->prev_tok = NULL;
490 *ps = s;
492 return s;
495 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
496 pop them yet from the list, but do remove them from the token array. */
497 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
499 Sym *s, *ss, **ps;
500 TokenSym *ts;
501 int v;
503 s = *ptop;
504 while(s != b) {
505 ss = s->prev;
506 v = s->v;
507 /* remove symbol in token array */
508 /* XXX: simplify */
509 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
510 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
511 if (v & SYM_STRUCT)
512 ps = &ts->sym_struct;
513 else
514 ps = &ts->sym_identifier;
515 *ps = s->prev_tok;
517 if (!keep)
518 sym_free(s);
519 s = ss;
521 if (!keep)
522 *ptop = b;
525 static void weaken_symbol(Sym *sym)
527 sym->type.t |= VT_WEAK;
528 if (sym->c > 0) {
529 int esym_type;
530 ElfW(Sym) *esym;
532 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
533 esym_type = ELFW(ST_TYPE)(esym->st_info);
534 esym->st_info = ELFW(ST_INFO)(STB_WEAK, esym_type);
538 static void apply_visibility(Sym *sym, CType *type)
540 int vis = sym->type.t & VT_VIS_MASK;
541 int vis2 = type->t & VT_VIS_MASK;
542 if (vis == (STV_DEFAULT << VT_VIS_SHIFT))
543 vis = vis2;
544 else if (vis2 == (STV_DEFAULT << VT_VIS_SHIFT))
546 else
547 vis = (vis < vis2) ? vis : vis2;
548 sym->type.t &= ~VT_VIS_MASK;
549 sym->type.t |= vis;
551 if (sym->c > 0) {
552 ElfW(Sym) *esym;
554 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
555 vis >>= VT_VIS_SHIFT;
556 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1)) | vis;
560 /* ------------------------------------------------------------------------- */
562 static void vsetc(CType *type, int r, CValue *vc)
564 int v;
566 if (vtop >= vstack + (VSTACK_SIZE - 1))
567 tcc_error("memory full (vstack)");
568 /* cannot let cpu flags if other instruction are generated. Also
569 avoid leaving VT_JMP anywhere except on the top of the stack
570 because it would complicate the code generator.
572 Don't do this when nocode_wanted. vtop might come from
573 !nocode_wanted regions (see 88_codeopt.c) and transforming
574 it to a register without actually generating code is wrong
575 as their value might still be used for real. All values
576 we push under nocode_wanted will eventually be popped
577 again, so that the VT_CMP/VT_JMP value will be in vtop
578 when code is unsuppressed again.
580 Same logic below in vswap(); */
581 if (vtop >= vstack && !nocode_wanted) {
582 v = vtop->r & VT_VALMASK;
583 if (v == VT_CMP || (v & ~1) == VT_JMP)
584 gv(RC_INT);
587 vtop++;
588 vtop->type = *type;
589 vtop->r = r;
590 vtop->r2 = VT_CONST;
591 vtop->c = *vc;
592 vtop->sym = NULL;
595 ST_FUNC void vswap(void)
597 SValue tmp;
598 /* cannot vswap cpu flags. See comment at vsetc() above */
599 if (vtop >= vstack && !nocode_wanted) {
600 int v = vtop->r & VT_VALMASK;
601 if (v == VT_CMP || (v & ~1) == VT_JMP)
602 gv(RC_INT);
604 tmp = vtop[0];
605 vtop[0] = vtop[-1];
606 vtop[-1] = tmp;
609 /* pop stack value */
610 ST_FUNC void vpop(void)
612 int v;
613 v = vtop->r & VT_VALMASK;
614 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
615 /* for x86, we need to pop the FP stack */
616 if (v == TREG_ST0) {
617 o(0xd8dd); /* fstp %st(0) */
618 } else
619 #endif
620 if (v == VT_JMP || v == VT_JMPI) {
621 /* need to put correct jump if && or || without test */
622 gsym(vtop->c.i);
624 vtop--;
627 /* push constant of type "type" with useless value */
628 ST_FUNC void vpush(CType *type)
630 CValue cval;
631 vsetc(type, VT_CONST, &cval);
634 /* push integer constant */
635 ST_FUNC void vpushi(int v)
637 CValue cval;
638 cval.i = v;
639 vsetc(&int_type, VT_CONST, &cval);
642 /* push a pointer sized constant */
643 static void vpushs(addr_t v)
645 CValue cval;
646 cval.i = v;
647 vsetc(&size_type, VT_CONST, &cval);
650 /* push arbitrary 64bit constant */
651 ST_FUNC void vpush64(int ty, unsigned long long v)
653 CValue cval;
654 CType ctype;
655 ctype.t = ty;
656 ctype.ref = NULL;
657 cval.i = v;
658 vsetc(&ctype, VT_CONST, &cval);
661 /* push long long constant */
662 static inline void vpushll(long long v)
664 vpush64(VT_LLONG, v);
667 ST_FUNC void vset(CType *type, int r, long v)
669 CValue cval;
671 cval.i = v;
672 vsetc(type, r, &cval);
675 static void vseti(int r, int v)
677 CType type;
678 type.t = VT_INT;
679 type.ref = 0;
680 vset(&type, r, v);
683 ST_FUNC void vpushv(SValue *v)
685 if (vtop >= vstack + (VSTACK_SIZE - 1))
686 tcc_error("memory full (vstack)");
687 vtop++;
688 *vtop = *v;
691 static void vdup(void)
693 vpushv(vtop);
696 /* rotate n first stack elements to the bottom
697 I1 ... In -> I2 ... In I1 [top is right]
699 ST_FUNC void vrotb(int n)
701 int i;
702 SValue tmp;
704 tmp = vtop[-n + 1];
705 for(i=-n+1;i!=0;i++)
706 vtop[i] = vtop[i+1];
707 vtop[0] = tmp;
710 /* rotate the n elements before entry e towards the top
711 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
713 ST_FUNC void vrote(SValue *e, int n)
715 int i;
716 SValue tmp;
718 tmp = *e;
719 for(i = 0;i < n - 1; i++)
720 e[-i] = e[-i - 1];
721 e[-n + 1] = tmp;
724 /* rotate n first stack elements to the top
725 I1 ... In -> In I1 ... I(n-1) [top is right]
727 ST_FUNC void vrott(int n)
729 vrote(vtop, n);
732 /* push a symbol value of TYPE */
733 static inline void vpushsym(CType *type, Sym *sym)
735 CValue cval;
736 cval.i = 0;
737 vsetc(type, VT_CONST | VT_SYM, &cval);
738 vtop->sym = sym;
741 /* Return a static symbol pointing to a section */
742 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
744 int v;
745 Sym *sym;
747 v = anon_sym++;
748 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
749 sym->type.ref = type->ref;
750 sym->r = VT_CONST | VT_SYM;
751 put_extern_sym(sym, sec, offset, size);
752 return sym;
755 /* push a reference to a section offset by adding a dummy symbol */
756 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
758 vpushsym(type, get_sym_ref(type, sec, offset, size));
761 /* define a new external reference to a symbol 'v' of type 'u' */
762 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
764 Sym *s;
766 s = sym_find(v);
767 if (!s) {
768 /* push forward reference */
769 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
770 s->type.ref = type->ref;
771 s->r = r | VT_CONST | VT_SYM;
773 return s;
776 /* define a new external reference to a symbol 'v' */
777 static Sym *external_sym(int v, CType *type, int r)
779 Sym *s;
781 s = sym_find(v);
782 if (!s) {
783 /* push forward reference */
784 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
785 s->type.t |= VT_EXTERN;
786 } else if (s->type.ref == func_old_type.ref) {
787 s->type.ref = type->ref;
788 s->r = r | VT_CONST | VT_SYM;
789 s->type.t |= VT_EXTERN;
790 } else if (!is_compatible_types(&s->type, type)) {
791 tcc_error("incompatible types for redefinition of '%s'",
792 get_tok_str(v, NULL));
794 /* Merge some storage attributes. */
795 if (type->t & VT_WEAK)
796 weaken_symbol(s);
798 if (type->t & VT_VIS_MASK)
799 apply_visibility(s, type);
801 return s;
804 /* push a reference to global symbol v */
805 ST_FUNC void vpush_global_sym(CType *type, int v)
807 vpushsym(type, external_global_sym(v, type, 0));
810 /* save registers up to (vtop - n) stack entry */
811 ST_FUNC void save_regs(int n)
813 SValue *p, *p1;
814 for(p = vstack, p1 = vtop - n; p <= p1; p++)
815 save_reg(p->r);
818 /* save r to the memory stack, and mark it as being free */
819 ST_FUNC void save_reg(int r)
821 save_reg_upstack(r, 0);
824 /* save r to the memory stack, and mark it as being free,
825 if seen up to (vtop - n) stack entry */
826 ST_FUNC void save_reg_upstack(int r, int n)
828 int l, saved, size, align;
829 SValue *p, *p1, sv;
830 CType *type;
832 if ((r &= VT_VALMASK) >= VT_CONST)
833 return;
834 if (nocode_wanted)
835 return;
837 /* modify all stack values */
838 saved = 0;
839 l = 0;
840 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
841 if ((p->r & VT_VALMASK) == r ||
842 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
843 /* must save value on stack if not already done */
844 if (!saved) {
845 /* NOTE: must reload 'r' because r might be equal to r2 */
846 r = p->r & VT_VALMASK;
847 /* store register in the stack */
848 type = &p->type;
849 if ((p->r & VT_LVAL) ||
850 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
851 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
852 type = &char_pointer_type;
853 #else
854 type = &int_type;
855 #endif
856 if ((type->t & VT_BTYPE) == VT_FLOAT) {
857 /* cast to DOUBLE to avoid precision loss */
858 type->t = (type->t & ~VT_BTYPE) | VT_DOUBLE;
860 size = type_size(type, &align);
861 loc = (loc - size) & -align;
862 sv.type.t = type->t;
863 sv.r = VT_LOCAL | VT_LVAL;
864 sv.c.i = loc;
865 store(r, &sv);
866 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
867 /* x86 specific: need to pop fp register ST0 if saved */
868 if (r == TREG_ST0) {
869 o(0xd8dd); /* fstp %st(0) */
871 #endif
872 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
873 /* special long long case */
874 if ((type->t & VT_BTYPE) == VT_LLONG) {
875 sv.c.i += 4;
876 store(p->r2, &sv);
878 #endif
879 l = loc;
880 saved = 1;
882 /* mark that stack entry as being saved on the stack */
883 if (p->r & VT_LVAL) {
884 /* also clear the bounded flag because the
885 relocation address of the function was stored in
886 p->c.i */
887 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
888 } else {
889 p->r = lvalue_type(p->type.t) | VT_LOCAL;
891 p->r2 = VT_CONST;
892 p->c.i = l;
897 #ifdef TCC_TARGET_ARM
898 /* find a register of class 'rc2' with at most one reference on stack.
899 * If none, call get_reg(rc) */
900 ST_FUNC int get_reg_ex(int rc, int rc2)
902 int r;
903 SValue *p;
905 for(r=0;r<NB_REGS;r++) {
906 if (reg_classes[r] & rc2) {
907 int n;
908 n=0;
909 for(p = vstack; p <= vtop; p++) {
910 if ((p->r & VT_VALMASK) == r ||
911 (p->r2 & VT_VALMASK) == r)
912 n++;
914 if (n <= 1)
915 return r;
918 return get_reg(rc);
920 #endif
922 /* find a free register of class 'rc'. If none, save one register */
923 ST_FUNC int get_reg(int rc)
925 int r;
926 SValue *p;
928 /* find a free register */
929 for(r=0;r<NB_REGS;r++) {
930 if (reg_classes[r] & rc) {
931 if (nocode_wanted)
932 return r;
933 for(p=vstack;p<=vtop;p++) {
934 if ((p->r & VT_VALMASK) == r ||
935 (p->r2 & VT_VALMASK) == r)
936 goto notfound;
938 return r;
940 notfound: ;
943 /* no register left : free the first one on the stack (VERY
944 IMPORTANT to start from the bottom to ensure that we don't
945 spill registers used in gen_opi()) */
946 for(p=vstack;p<=vtop;p++) {
947 /* look at second register (if long long) */
948 r = p->r2 & VT_VALMASK;
949 if (r < VT_CONST && (reg_classes[r] & rc))
950 goto save_found;
951 r = p->r & VT_VALMASK;
952 if (r < VT_CONST && (reg_classes[r] & rc)) {
953 save_found:
954 save_reg(r);
955 return r;
958 /* Should never comes here */
959 return -1;
962 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
963 if needed */
964 static void move_reg(int r, int s, int t)
966 SValue sv;
968 if (r != s) {
969 save_reg(r);
970 sv.type.t = t;
971 sv.type.ref = NULL;
972 sv.r = s;
973 sv.c.i = 0;
974 load(r, &sv);
978 /* get address of vtop (vtop MUST BE an lvalue) */
979 ST_FUNC void gaddrof(void)
981 if (vtop->r & VT_REF)
982 gv(RC_INT);
983 vtop->r &= ~VT_LVAL;
984 /* tricky: if saved lvalue, then we can go back to lvalue */
985 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
986 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
991 #ifdef CONFIG_TCC_BCHECK
992 /* generate lvalue bound code */
993 static void gbound(void)
995 int lval_type;
996 CType type1;
998 vtop->r &= ~VT_MUSTBOUND;
999 /* if lvalue, then use checking code before dereferencing */
1000 if (vtop->r & VT_LVAL) {
1001 /* if not VT_BOUNDED value, then make one */
1002 if (!(vtop->r & VT_BOUNDED)) {
1003 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1004 /* must save type because we must set it to int to get pointer */
1005 type1 = vtop->type;
1006 vtop->type.t = VT_PTR;
1007 gaddrof();
1008 vpushi(0);
1009 gen_bounded_ptr_add();
1010 vtop->r |= lval_type;
1011 vtop->type = type1;
1013 /* then check for dereferencing */
1014 gen_bounded_ptr_deref();
1017 #endif
1019 /* store vtop a register belonging to class 'rc'. lvalues are
1020 converted to values. Cannot be used if cannot be converted to
1021 register value (such as structures). */
1022 ST_FUNC int gv(int rc)
1024 int r, bit_pos, bit_size, size, align, i;
1025 int rc2;
1027 /* NOTE: get_reg can modify vstack[] */
1028 if (vtop->type.t & VT_BITFIELD) {
1029 CType type;
1030 int bits = 32;
1031 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
1032 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
1033 /* remove bit field info to avoid loops */
1034 vtop->type.t &= ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
1035 /* cast to int to propagate signedness in following ops */
1036 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1037 type.t = VT_LLONG;
1038 bits = 64;
1039 } else
1040 type.t = VT_INT;
1041 if((vtop->type.t & VT_UNSIGNED) ||
1042 (vtop->type.t & VT_BTYPE) == VT_BOOL)
1043 type.t |= VT_UNSIGNED;
1044 gen_cast(&type);
1045 /* generate shifts */
1046 vpushi(bits - (bit_pos + bit_size));
1047 gen_op(TOK_SHL);
1048 vpushi(bits - bit_size);
1049 /* NOTE: transformed to SHR if unsigned */
1050 gen_op(TOK_SAR);
1051 r = gv(rc);
1052 } else {
1053 if (is_float(vtop->type.t) &&
1054 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1055 Sym *sym;
1056 int *ptr;
1057 unsigned long offset;
1058 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1059 CValue check;
1060 #endif
1062 /* XXX: unify with initializers handling ? */
1063 /* CPUs usually cannot use float constants, so we store them
1064 generically in data segment */
1065 size = type_size(&vtop->type, &align);
1066 offset = (data_section->data_offset + align - 1) & -align;
1067 data_section->data_offset = offset;
1068 /* XXX: not portable yet */
1069 #if defined(__i386__) || defined(__x86_64__)
1070 /* Zero pad x87 tenbyte long doubles */
1071 if (size == LDOUBLE_SIZE) {
1072 vtop->c.tab[2] &= 0xffff;
1073 #if LDOUBLE_SIZE == 16
1074 vtop->c.tab[3] = 0;
1075 #endif
1077 #endif
1078 ptr = section_ptr_add(data_section, size);
1079 size = size >> 2;
1080 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
1081 check.d = 1;
1082 if(check.tab[0])
1083 for(i=0;i<size;i++)
1084 ptr[i] = vtop->c.tab[size-1-i];
1085 else
1086 #endif
1087 for(i=0;i<size;i++)
1088 ptr[i] = vtop->c.tab[i];
1089 sym = get_sym_ref(&vtop->type, data_section, offset, size << 2);
1090 vtop->r |= VT_LVAL | VT_SYM;
1091 vtop->sym = sym;
1092 vtop->c.i = 0;
1094 #ifdef CONFIG_TCC_BCHECK
1095 if (vtop->r & VT_MUSTBOUND)
1096 gbound();
1097 #endif
1099 r = vtop->r & VT_VALMASK;
1100 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1101 #ifndef TCC_TARGET_ARM64
1102 if (rc == RC_IRET)
1103 rc2 = RC_LRET;
1104 #ifdef TCC_TARGET_X86_64
1105 else if (rc == RC_FRET)
1106 rc2 = RC_QRET;
1107 #endif
1108 #endif
1110 /* need to reload if:
1111 - constant
1112 - lvalue (need to dereference pointer)
1113 - already a register, but not in the right class */
1114 if (r >= VT_CONST
1115 || (vtop->r & VT_LVAL)
1116 || !(reg_classes[r] & rc)
1117 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1118 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1119 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1120 #else
1121 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1122 #endif
1125 r = get_reg(rc);
1126 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1127 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1128 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1129 #else
1130 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1131 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1132 unsigned long long ll;
1133 #endif
1134 int r2, original_type;
1135 original_type = vtop->type.t;
1136 /* two register type load : expand to two words
1137 temporarily */
1138 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1139 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1140 /* load constant */
1141 ll = vtop->c.i;
1142 vtop->c.i = ll; /* first word */
1143 load(r, vtop);
1144 vtop->r = r; /* save register value */
1145 vpushi(ll >> 32); /* second word */
1146 } else
1147 #endif
1148 if (vtop->r & VT_LVAL) {
1149 /* We do not want to modifier the long long
1150 pointer here, so the safest (and less
1151 efficient) is to save all the other registers
1152 in the stack. XXX: totally inefficient. */
1153 #if 0
1154 save_regs(1);
1155 #else
1156 /* lvalue_save: save only if used further down the stack */
1157 save_reg_upstack(vtop->r, 1);
1158 #endif
1159 /* load from memory */
1160 vtop->type.t = load_type;
1161 load(r, vtop);
1162 vdup();
1163 vtop[-1].r = r; /* save register value */
1164 /* increment pointer to get second word */
1165 vtop->type.t = addr_type;
1166 gaddrof();
1167 vpushi(load_size);
1168 gen_op('+');
1169 vtop->r |= VT_LVAL;
1170 vtop->type.t = load_type;
1171 } else {
1172 /* move registers */
1173 load(r, vtop);
1174 vdup();
1175 vtop[-1].r = r; /* save register value */
1176 vtop->r = vtop[-1].r2;
1178 /* Allocate second register. Here we rely on the fact that
1179 get_reg() tries first to free r2 of an SValue. */
1180 r2 = get_reg(rc2);
1181 load(r2, vtop);
1182 vpop();
1183 /* write second register */
1184 vtop->r2 = r2;
1185 vtop->type.t = original_type;
1186 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1187 int t1, t;
1188 /* lvalue of scalar type : need to use lvalue type
1189 because of possible cast */
1190 t = vtop->type.t;
1191 t1 = t;
1192 /* compute memory access type */
1193 if (vtop->r & VT_REF)
1194 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1195 t = VT_PTR;
1196 #else
1197 t = VT_INT;
1198 #endif
1199 else if (vtop->r & VT_LVAL_BYTE)
1200 t = VT_BYTE;
1201 else if (vtop->r & VT_LVAL_SHORT)
1202 t = VT_SHORT;
1203 if (vtop->r & VT_LVAL_UNSIGNED)
1204 t |= VT_UNSIGNED;
1205 vtop->type.t = t;
1206 load(r, vtop);
1207 /* restore wanted type */
1208 vtop->type.t = t1;
1209 } else {
1210 /* one register type load */
1211 load(r, vtop);
1214 vtop->r = r;
1215 #ifdef TCC_TARGET_C67
1216 /* uses register pairs for doubles */
1217 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1218 vtop->r2 = r+1;
1219 #endif
1221 return r;
1224 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1225 ST_FUNC void gv2(int rc1, int rc2)
1227 int v;
1229 /* generate more generic register first. But VT_JMP or VT_CMP
1230 values must be generated first in all cases to avoid possible
1231 reload errors */
1232 v = vtop[0].r & VT_VALMASK;
1233 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1234 vswap();
1235 gv(rc1);
1236 vswap();
1237 gv(rc2);
1238 /* test if reload is needed for first register */
1239 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1240 vswap();
1241 gv(rc1);
1242 vswap();
1244 } else {
1245 gv(rc2);
1246 vswap();
1247 gv(rc1);
1248 vswap();
1249 /* test if reload is needed for first register */
1250 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1251 gv(rc2);
1256 #ifndef TCC_TARGET_ARM64
1257 /* wrapper around RC_FRET to return a register by type */
1258 static int rc_fret(int t)
1260 #ifdef TCC_TARGET_X86_64
1261 if (t == VT_LDOUBLE) {
1262 return RC_ST0;
1264 #endif
1265 return RC_FRET;
1267 #endif
1269 /* wrapper around REG_FRET to return a register by type */
1270 static int reg_fret(int t)
1272 #ifdef TCC_TARGET_X86_64
1273 if (t == VT_LDOUBLE) {
1274 return TREG_ST0;
1276 #endif
1277 return REG_FRET;
1280 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1281 /* expand 64bit on stack in two ints */
1282 static void lexpand(void)
1284 int u, v;
1285 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1286 v = vtop->r & (VT_VALMASK | VT_LVAL);
1287 if (v == VT_CONST) {
1288 vdup();
1289 vtop[0].c.i >>= 32;
1290 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1291 vdup();
1292 vtop[0].c.i += 4;
1293 } else {
1294 gv(RC_INT);
1295 vdup();
1296 vtop[0].r = vtop[-1].r2;
1297 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1299 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1301 #endif
1303 #ifdef TCC_TARGET_ARM
1304 /* expand long long on stack */
1305 ST_FUNC void lexpand_nr(void)
1307 int u,v;
1309 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1310 vdup();
1311 vtop->r2 = VT_CONST;
1312 vtop->type.t = VT_INT | u;
1313 v=vtop[-1].r & (VT_VALMASK | VT_LVAL);
1314 if (v == VT_CONST) {
1315 vtop[-1].c.i = vtop->c.i;
1316 vtop->c.i = vtop->c.i >> 32;
1317 vtop->r = VT_CONST;
1318 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1319 vtop->c.i += 4;
1320 vtop->r = vtop[-1].r;
1321 } else if (v > VT_CONST) {
1322 vtop--;
1323 lexpand();
1324 } else
1325 vtop->r = vtop[-1].r2;
1326 vtop[-1].r2 = VT_CONST;
1327 vtop[-1].type.t = VT_INT | u;
1329 #endif
1331 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1332 /* build a long long from two ints */
1333 static void lbuild(int t)
1335 gv2(RC_INT, RC_INT);
1336 vtop[-1].r2 = vtop[0].r;
1337 vtop[-1].type.t = t;
1338 vpop();
1340 #endif
1342 /* convert stack entry to register and duplicate its value in another
1343 register */
1344 static void gv_dup(void)
1346 int rc, t, r, r1;
1347 SValue sv;
1349 t = vtop->type.t;
1350 #if !defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_ARM64)
1351 if ((t & VT_BTYPE) == VT_LLONG) {
1352 lexpand();
1353 gv_dup();
1354 vswap();
1355 vrotb(3);
1356 gv_dup();
1357 vrotb(4);
1358 /* stack: H L L1 H1 */
1359 lbuild(t);
1360 vrotb(3);
1361 vrotb(3);
1362 vswap();
1363 lbuild(t);
1364 vswap();
1365 } else
1366 #endif
1368 /* duplicate value */
1369 rc = RC_INT;
1370 sv.type.t = VT_INT;
1371 if (is_float(t)) {
1372 rc = RC_FLOAT;
1373 #ifdef TCC_TARGET_X86_64
1374 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1375 rc = RC_ST0;
1377 #endif
1378 sv.type.t = t;
1380 r = gv(rc);
1381 r1 = get_reg(rc);
1382 sv.r = r;
1383 sv.c.i = 0;
1384 load(r1, &sv); /* move r to r1 */
1385 vdup();
1386 /* duplicates value */
1387 if (r != r1)
1388 vtop->r = r1;
1392 /* Generate value test
1394 * Generate a test for any value (jump, comparison and integers) */
1395 ST_FUNC int gvtst(int inv, int t)
1397 int v = vtop->r & VT_VALMASK;
1398 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1399 vpushi(0);
1400 gen_op(TOK_NE);
1402 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1403 /* constant jmp optimization */
1404 if ((vtop->c.i != 0) != inv)
1405 t = gjmp(t);
1406 vtop--;
1407 return t;
1409 return gtst(inv, t);
1412 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1413 /* generate CPU independent (unsigned) long long operations */
1414 static void gen_opl(int op)
1416 int t, a, b, op1, c, i;
1417 int func;
1418 unsigned short reg_iret = REG_IRET;
1419 unsigned short reg_lret = REG_LRET;
1420 SValue tmp;
1422 switch(op) {
1423 case '/':
1424 case TOK_PDIV:
1425 func = TOK___divdi3;
1426 goto gen_func;
1427 case TOK_UDIV:
1428 func = TOK___udivdi3;
1429 goto gen_func;
1430 case '%':
1431 func = TOK___moddi3;
1432 goto gen_mod_func;
1433 case TOK_UMOD:
1434 func = TOK___umoddi3;
1435 gen_mod_func:
1436 #ifdef TCC_ARM_EABI
1437 reg_iret = TREG_R2;
1438 reg_lret = TREG_R3;
1439 #endif
1440 gen_func:
1441 /* call generic long long function */
1442 vpush_global_sym(&func_old_type, func);
1443 vrott(3);
1444 gfunc_call(2);
1445 vpushi(0);
1446 vtop->r = reg_iret;
1447 vtop->r2 = reg_lret;
1448 break;
1449 case '^':
1450 case '&':
1451 case '|':
1452 case '*':
1453 case '+':
1454 case '-':
1455 //pv("gen_opl A",0,2);
1456 t = vtop->type.t;
1457 vswap();
1458 lexpand();
1459 vrotb(3);
1460 lexpand();
1461 /* stack: L1 H1 L2 H2 */
1462 tmp = vtop[0];
1463 vtop[0] = vtop[-3];
1464 vtop[-3] = tmp;
1465 tmp = vtop[-2];
1466 vtop[-2] = vtop[-3];
1467 vtop[-3] = tmp;
1468 vswap();
1469 /* stack: H1 H2 L1 L2 */
1470 //pv("gen_opl B",0,4);
1471 if (op == '*') {
1472 vpushv(vtop - 1);
1473 vpushv(vtop - 1);
1474 gen_op(TOK_UMULL);
1475 lexpand();
1476 /* stack: H1 H2 L1 L2 ML MH */
1477 for(i=0;i<4;i++)
1478 vrotb(6);
1479 /* stack: ML MH H1 H2 L1 L2 */
1480 tmp = vtop[0];
1481 vtop[0] = vtop[-2];
1482 vtop[-2] = tmp;
1483 /* stack: ML MH H1 L2 H2 L1 */
1484 gen_op('*');
1485 vrotb(3);
1486 vrotb(3);
1487 gen_op('*');
1488 /* stack: ML MH M1 M2 */
1489 gen_op('+');
1490 gen_op('+');
1491 } else if (op == '+' || op == '-') {
1492 /* XXX: add non carry method too (for MIPS or alpha) */
1493 if (op == '+')
1494 op1 = TOK_ADDC1;
1495 else
1496 op1 = TOK_SUBC1;
1497 gen_op(op1);
1498 /* stack: H1 H2 (L1 op L2) */
1499 vrotb(3);
1500 vrotb(3);
1501 gen_op(op1 + 1); /* TOK_xxxC2 */
1502 } else {
1503 gen_op(op);
1504 /* stack: H1 H2 (L1 op L2) */
1505 vrotb(3);
1506 vrotb(3);
1507 /* stack: (L1 op L2) H1 H2 */
1508 gen_op(op);
1509 /* stack: (L1 op L2) (H1 op H2) */
1511 /* stack: L H */
1512 lbuild(t);
1513 break;
1514 case TOK_SAR:
1515 case TOK_SHR:
1516 case TOK_SHL:
1517 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1518 t = vtop[-1].type.t;
1519 vswap();
1520 lexpand();
1521 vrotb(3);
1522 /* stack: L H shift */
1523 c = (int)vtop->c.i;
1524 /* constant: simpler */
1525 /* NOTE: all comments are for SHL. the other cases are
1526 done by swaping words */
1527 vpop();
1528 if (op != TOK_SHL)
1529 vswap();
1530 if (c >= 32) {
1531 /* stack: L H */
1532 vpop();
1533 if (c > 32) {
1534 vpushi(c - 32);
1535 gen_op(op);
1537 if (op != TOK_SAR) {
1538 vpushi(0);
1539 } else {
1540 gv_dup();
1541 vpushi(31);
1542 gen_op(TOK_SAR);
1544 vswap();
1545 } else {
1546 vswap();
1547 gv_dup();
1548 /* stack: H L L */
1549 vpushi(c);
1550 gen_op(op);
1551 vswap();
1552 vpushi(32 - c);
1553 if (op == TOK_SHL)
1554 gen_op(TOK_SHR);
1555 else
1556 gen_op(TOK_SHL);
1557 vrotb(3);
1558 /* stack: L L H */
1559 vpushi(c);
1560 if (op == TOK_SHL)
1561 gen_op(TOK_SHL);
1562 else
1563 gen_op(TOK_SHR);
1564 gen_op('|');
1566 if (op != TOK_SHL)
1567 vswap();
1568 lbuild(t);
1569 } else {
1570 /* XXX: should provide a faster fallback on x86 ? */
1571 switch(op) {
1572 case TOK_SAR:
1573 func = TOK___ashrdi3;
1574 goto gen_func;
1575 case TOK_SHR:
1576 func = TOK___lshrdi3;
1577 goto gen_func;
1578 case TOK_SHL:
1579 func = TOK___ashldi3;
1580 goto gen_func;
1583 break;
1584 default:
1585 /* compare operations */
1586 t = vtop->type.t;
1587 vswap();
1588 lexpand();
1589 vrotb(3);
1590 lexpand();
1591 /* stack: L1 H1 L2 H2 */
1592 tmp = vtop[-1];
1593 vtop[-1] = vtop[-2];
1594 vtop[-2] = tmp;
1595 /* stack: L1 L2 H1 H2 */
1596 /* compare high */
1597 op1 = op;
1598 /* when values are equal, we need to compare low words. since
1599 the jump is inverted, we invert the test too. */
1600 if (op1 == TOK_LT)
1601 op1 = TOK_LE;
1602 else if (op1 == TOK_GT)
1603 op1 = TOK_GE;
1604 else if (op1 == TOK_ULT)
1605 op1 = TOK_ULE;
1606 else if (op1 == TOK_UGT)
1607 op1 = TOK_UGE;
1608 a = 0;
1609 b = 0;
1610 gen_op(op1);
1611 if (op1 != TOK_NE) {
1612 a = gvtst(1, 0);
1614 if (op != TOK_EQ) {
1615 /* generate non equal test */
1616 /* XXX: NOT PORTABLE yet */
1617 if (a == 0) {
1618 b = gvtst(0, 0);
1619 } else {
1620 #if defined(TCC_TARGET_I386)
1621 b = gjmp2(0x850f, 0);
1622 #elif defined(TCC_TARGET_ARM)
1623 b = ind;
1624 o(0x1A000000 | encbranch(ind, 0, 1));
1625 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1626 tcc_error("not implemented");
1627 #else
1628 #error not supported
1629 #endif
1632 /* compare low. Always unsigned */
1633 op1 = op;
1634 if (op1 == TOK_LT)
1635 op1 = TOK_ULT;
1636 else if (op1 == TOK_LE)
1637 op1 = TOK_ULE;
1638 else if (op1 == TOK_GT)
1639 op1 = TOK_UGT;
1640 else if (op1 == TOK_GE)
1641 op1 = TOK_UGE;
1642 gen_op(op1);
1643 a = gvtst(1, a);
1644 gsym(b);
1645 vseti(VT_JMPI, a);
1646 break;
1649 #endif
1651 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1653 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1654 return (a ^ b) >> 63 ? -x : x;
1657 static int gen_opic_lt(uint64_t a, uint64_t b)
1659 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1662 /* handle integer constant optimizations and various machine
1663 independent opt */
1664 static void gen_opic(int op)
1666 SValue *v1 = vtop - 1;
1667 SValue *v2 = vtop;
1668 int t1 = v1->type.t & VT_BTYPE;
1669 int t2 = v2->type.t & VT_BTYPE;
1670 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1671 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1672 uint64_t l1 = c1 ? v1->c.i : 0;
1673 uint64_t l2 = c2 ? v2->c.i : 0;
1674 int shm = (t1 == VT_LLONG) ? 63 : 31;
1676 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1677 l1 = ((uint32_t)l1 |
1678 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1679 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1680 l2 = ((uint32_t)l2 |
1681 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1683 if (c1 && c2) {
1684 switch(op) {
1685 case '+': l1 += l2; break;
1686 case '-': l1 -= l2; break;
1687 case '&': l1 &= l2; break;
1688 case '^': l1 ^= l2; break;
1689 case '|': l1 |= l2; break;
1690 case '*': l1 *= l2; break;
1692 case TOK_PDIV:
1693 case '/':
1694 case '%':
1695 case TOK_UDIV:
1696 case TOK_UMOD:
1697 /* if division by zero, generate explicit division */
1698 if (l2 == 0) {
1699 if (const_wanted)
1700 tcc_error("division by zero in constant");
1701 goto general_case;
1703 switch(op) {
1704 default: l1 = gen_opic_sdiv(l1, l2); break;
1705 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1706 case TOK_UDIV: l1 = l1 / l2; break;
1707 case TOK_UMOD: l1 = l1 % l2; break;
1709 break;
1710 case TOK_SHL: l1 <<= (l2 & shm); break;
1711 case TOK_SHR: l1 >>= (l2 & shm); break;
1712 case TOK_SAR:
1713 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1714 break;
1715 /* tests */
1716 case TOK_ULT: l1 = l1 < l2; break;
1717 case TOK_UGE: l1 = l1 >= l2; break;
1718 case TOK_EQ: l1 = l1 == l2; break;
1719 case TOK_NE: l1 = l1 != l2; break;
1720 case TOK_ULE: l1 = l1 <= l2; break;
1721 case TOK_UGT: l1 = l1 > l2; break;
1722 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
1723 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
1724 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
1725 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
1726 /* logical */
1727 case TOK_LAND: l1 = l1 && l2; break;
1728 case TOK_LOR: l1 = l1 || l2; break;
1729 default:
1730 goto general_case;
1732 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1733 l1 = ((uint32_t)l1 |
1734 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1735 v1->c.i = l1;
1736 vtop--;
1737 } else {
1738 /* if commutative ops, put c2 as constant */
1739 if (c1 && (op == '+' || op == '&' || op == '^' ||
1740 op == '|' || op == '*')) {
1741 vswap();
1742 c2 = c1; //c = c1, c1 = c2, c2 = c;
1743 l2 = l1; //l = l1, l1 = l2, l2 = l;
1745 if (!const_wanted &&
1746 c1 && ((l1 == 0 &&
1747 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
1748 (l1 == -1 && op == TOK_SAR))) {
1749 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1750 vtop--;
1751 } else if (!const_wanted &&
1752 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
1753 (l2 == -1 && op == '|') ||
1754 (l2 == 0xffffffff && t2 != VT_LLONG && op == '|') ||
1755 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
1756 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1757 if (l2 == 1)
1758 vtop->c.i = 0;
1759 vswap();
1760 vtop--;
1761 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
1762 op == TOK_PDIV) &&
1763 l2 == 1) ||
1764 ((op == '+' || op == '-' || op == '|' || op == '^' ||
1765 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
1766 l2 == 0) ||
1767 (op == '&' &&
1768 l2 == -1))) {
1769 /* filter out NOP operations like x*1, x-0, x&-1... */
1770 vtop--;
1771 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
1772 /* try to use shifts instead of muls or divs */
1773 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
1774 int n = -1;
1775 while (l2) {
1776 l2 >>= 1;
1777 n++;
1779 vtop->c.i = n;
1780 if (op == '*')
1781 op = TOK_SHL;
1782 else if (op == TOK_PDIV)
1783 op = TOK_SAR;
1784 else
1785 op = TOK_SHR;
1787 goto general_case;
1788 } else if (c2 && (op == '+' || op == '-') &&
1789 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
1790 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
1791 /* symbol + constant case */
1792 if (op == '-')
1793 l2 = -l2;
1794 l2 += vtop[-1].c.i;
1795 /* The backends can't always deal with addends to symbols
1796 larger than +-1<<31. Don't construct such. */
1797 if ((int)l2 != l2)
1798 goto general_case;
1799 vtop--;
1800 vtop->c.i = l2;
1801 } else {
1802 general_case:
1803 /* call low level op generator */
1804 if (t1 == VT_LLONG || t2 == VT_LLONG ||
1805 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
1806 gen_opl(op);
1807 else
1808 gen_opi(op);
1813 /* generate a floating point operation with constant propagation */
1814 static void gen_opif(int op)
1816 int c1, c2;
1817 SValue *v1, *v2;
1818 long double f1, f2;
1820 v1 = vtop - 1;
1821 v2 = vtop;
1822 /* currently, we cannot do computations with forward symbols */
1823 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1824 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1825 if (c1 && c2) {
1826 if (v1->type.t == VT_FLOAT) {
1827 f1 = v1->c.f;
1828 f2 = v2->c.f;
1829 } else if (v1->type.t == VT_DOUBLE) {
1830 f1 = v1->c.d;
1831 f2 = v2->c.d;
1832 } else {
1833 f1 = v1->c.ld;
1834 f2 = v2->c.ld;
1837 /* NOTE: we only do constant propagation if finite number (not
1838 NaN or infinity) (ANSI spec) */
1839 if (!ieee_finite(f1) || !ieee_finite(f2))
1840 goto general_case;
1842 switch(op) {
1843 case '+': f1 += f2; break;
1844 case '-': f1 -= f2; break;
1845 case '*': f1 *= f2; break;
1846 case '/':
1847 if (f2 == 0.0) {
1848 if (const_wanted)
1849 tcc_error("division by zero in constant");
1850 goto general_case;
1852 f1 /= f2;
1853 break;
1854 /* XXX: also handles tests ? */
1855 default:
1856 goto general_case;
1858 /* XXX: overflow test ? */
1859 if (v1->type.t == VT_FLOAT) {
1860 v1->c.f = f1;
1861 } else if (v1->type.t == VT_DOUBLE) {
1862 v1->c.d = f1;
1863 } else {
1864 v1->c.ld = f1;
1866 vtop--;
1867 } else {
1868 general_case:
1869 gen_opf(op);
1873 static int pointed_size(CType *type)
1875 int align;
1876 return type_size(pointed_type(type), &align);
1879 static void vla_runtime_pointed_size(CType *type)
1881 int align;
1882 vla_runtime_type_size(pointed_type(type), &align);
1885 static inline int is_null_pointer(SValue *p)
1887 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
1888 return 0;
1889 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
1890 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
1891 ((p->type.t & VT_BTYPE) == VT_PTR &&
1892 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0));
1895 static inline int is_integer_btype(int bt)
1897 return (bt == VT_BYTE || bt == VT_SHORT ||
1898 bt == VT_INT || bt == VT_LLONG);
1901 /* check types for comparison or subtraction of pointers */
1902 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
1904 CType *type1, *type2, tmp_type1, tmp_type2;
1905 int bt1, bt2;
1907 /* null pointers are accepted for all comparisons as gcc */
1908 if (is_null_pointer(p1) || is_null_pointer(p2))
1909 return;
1910 type1 = &p1->type;
1911 type2 = &p2->type;
1912 bt1 = type1->t & VT_BTYPE;
1913 bt2 = type2->t & VT_BTYPE;
1914 /* accept comparison between pointer and integer with a warning */
1915 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
1916 if (op != TOK_LOR && op != TOK_LAND )
1917 tcc_warning("comparison between pointer and integer");
1918 return;
1921 /* both must be pointers or implicit function pointers */
1922 if (bt1 == VT_PTR) {
1923 type1 = pointed_type(type1);
1924 } else if (bt1 != VT_FUNC)
1925 goto invalid_operands;
1927 if (bt2 == VT_PTR) {
1928 type2 = pointed_type(type2);
1929 } else if (bt2 != VT_FUNC) {
1930 invalid_operands:
1931 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
1933 if ((type1->t & VT_BTYPE) == VT_VOID ||
1934 (type2->t & VT_BTYPE) == VT_VOID)
1935 return;
1936 tmp_type1 = *type1;
1937 tmp_type2 = *type2;
1938 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1939 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
1940 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
1941 /* gcc-like error if '-' is used */
1942 if (op == '-')
1943 goto invalid_operands;
1944 else
1945 tcc_warning("comparison of distinct pointer types lacks a cast");
1949 /* generic gen_op: handles types problems */
1950 ST_FUNC void gen_op(int op)
1952 int u, t1, t2, bt1, bt2, t;
1953 CType type1;
1955 redo:
1956 t1 = vtop[-1].type.t;
1957 t2 = vtop[0].type.t;
1958 bt1 = t1 & VT_BTYPE;
1959 bt2 = t2 & VT_BTYPE;
1961 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
1962 tcc_error("operation on a struct");
1963 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
1964 if (bt2 == VT_FUNC) {
1965 mk_pointer(&vtop->type);
1966 gaddrof();
1968 if (bt1 == VT_FUNC) {
1969 vswap();
1970 mk_pointer(&vtop->type);
1971 gaddrof();
1972 vswap();
1974 goto redo;
1975 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
1976 /* at least one operand is a pointer */
1977 /* relationnal op: must be both pointers */
1978 if (op >= TOK_ULT && op <= TOK_LOR) {
1979 check_comparison_pointer_types(vtop - 1, vtop, op);
1980 /* pointers are handled are unsigned */
1981 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1982 t = VT_LLONG | VT_UNSIGNED;
1983 #else
1984 t = VT_INT | VT_UNSIGNED;
1985 #endif
1986 goto std_op;
1988 /* if both pointers, then it must be the '-' op */
1989 if (bt1 == VT_PTR && bt2 == VT_PTR) {
1990 if (op != '-')
1991 tcc_error("cannot use pointers here");
1992 check_comparison_pointer_types(vtop - 1, vtop, op);
1993 /* XXX: check that types are compatible */
1994 if (vtop[-1].type.t & VT_VLA) {
1995 vla_runtime_pointed_size(&vtop[-1].type);
1996 } else {
1997 vpushi(pointed_size(&vtop[-1].type));
1999 vrott(3);
2000 gen_opic(op);
2001 /* set to integer type */
2002 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2003 vtop->type.t = VT_LLONG;
2004 #else
2005 vtop->type.t = VT_INT;
2006 #endif
2007 vswap();
2008 gen_op(TOK_PDIV);
2009 } else {
2010 /* exactly one pointer : must be '+' or '-'. */
2011 if (op != '-' && op != '+')
2012 tcc_error("cannot use pointers here");
2013 /* Put pointer as first operand */
2014 if (bt2 == VT_PTR) {
2015 vswap();
2016 t = t1, t1 = t2, t2 = t;
2018 #if PTR_SIZE == 4
2019 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2020 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2021 gen_cast(&int_type);
2022 #endif
2023 type1 = vtop[-1].type;
2024 type1.t &= ~VT_ARRAY;
2025 if (vtop[-1].type.t & VT_VLA)
2026 vla_runtime_pointed_size(&vtop[-1].type);
2027 else {
2028 u = pointed_size(&vtop[-1].type);
2029 if (u < 0)
2030 tcc_error("unknown array element size");
2031 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2032 vpushll(u);
2033 #else
2034 /* XXX: cast to int ? (long long case) */
2035 vpushi(u);
2036 #endif
2038 gen_op('*');
2039 #if 0
2040 /* #ifdef CONFIG_TCC_BCHECK
2041 The main reason to removing this code:
2042 #include <stdio.h>
2043 int main ()
2045 int v[10];
2046 int i = 10;
2047 int j = 9;
2048 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2049 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2051 When this code is on. then the output looks like
2052 v+i-j = 0xfffffffe
2053 v+(i-j) = 0xbff84000
2055 /* if evaluating constant expression, no code should be
2056 generated, so no bound check */
2057 if (tcc_state->do_bounds_check && !const_wanted) {
2058 /* if bounded pointers, we generate a special code to
2059 test bounds */
2060 if (op == '-') {
2061 vpushi(0);
2062 vswap();
2063 gen_op('-');
2065 gen_bounded_ptr_add();
2066 } else
2067 #endif
2069 gen_opic(op);
2071 /* put again type if gen_opic() swaped operands */
2072 vtop->type = type1;
2074 } else if (is_float(bt1) || is_float(bt2)) {
2075 /* compute bigger type and do implicit casts */
2076 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2077 t = VT_LDOUBLE;
2078 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2079 t = VT_DOUBLE;
2080 } else {
2081 t = VT_FLOAT;
2083 /* floats can only be used for a few operations */
2084 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2085 (op < TOK_ULT || op > TOK_GT))
2086 tcc_error("invalid operands for binary operation");
2087 goto std_op;
2088 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2089 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2090 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (t | VT_UNSIGNED))
2091 t |= VT_UNSIGNED;
2092 goto std_op;
2093 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2094 /* cast to biggest op */
2095 t = VT_LLONG;
2096 /* convert to unsigned if it does not fit in a long long */
2097 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
2098 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
2099 t |= VT_UNSIGNED;
2100 goto std_op;
2101 } else {
2102 /* integer operations */
2103 t = VT_INT;
2104 /* convert to unsigned if it does not fit in an integer */
2105 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
2106 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
2107 t |= VT_UNSIGNED;
2108 std_op:
2109 /* XXX: currently, some unsigned operations are explicit, so
2110 we modify them here */
2111 if (t & VT_UNSIGNED) {
2112 if (op == TOK_SAR)
2113 op = TOK_SHR;
2114 else if (op == '/')
2115 op = TOK_UDIV;
2116 else if (op == '%')
2117 op = TOK_UMOD;
2118 else if (op == TOK_LT)
2119 op = TOK_ULT;
2120 else if (op == TOK_GT)
2121 op = TOK_UGT;
2122 else if (op == TOK_LE)
2123 op = TOK_ULE;
2124 else if (op == TOK_GE)
2125 op = TOK_UGE;
2127 vswap();
2128 type1.t = t;
2129 gen_cast(&type1);
2130 vswap();
2131 /* special case for shifts and long long: we keep the shift as
2132 an integer */
2133 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2134 type1.t = VT_INT;
2135 gen_cast(&type1);
2136 if (is_float(t))
2137 gen_opif(op);
2138 else
2139 gen_opic(op);
2140 if (op >= TOK_ULT && op <= TOK_GT) {
2141 /* relationnal op: the result is an int */
2142 vtop->type.t = VT_INT;
2143 } else {
2144 vtop->type.t = t;
2147 // Make sure that we have converted to an rvalue:
2148 if (vtop->r & VT_LVAL)
2149 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2152 #ifndef TCC_TARGET_ARM
2153 /* generic itof for unsigned long long case */
2154 static void gen_cvt_itof1(int t)
2156 #ifdef TCC_TARGET_ARM64
2157 gen_cvt_itof(t);
2158 #else
2159 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2160 (VT_LLONG | VT_UNSIGNED)) {
2162 if (t == VT_FLOAT)
2163 vpush_global_sym(&func_old_type, TOK___floatundisf);
2164 #if LDOUBLE_SIZE != 8
2165 else if (t == VT_LDOUBLE)
2166 vpush_global_sym(&func_old_type, TOK___floatundixf);
2167 #endif
2168 else
2169 vpush_global_sym(&func_old_type, TOK___floatundidf);
2170 vrott(2);
2171 gfunc_call(1);
2172 vpushi(0);
2173 vtop->r = reg_fret(t);
2174 } else {
2175 gen_cvt_itof(t);
2177 #endif
2179 #endif
2181 /* generic ftoi for unsigned long long case */
2182 static void gen_cvt_ftoi1(int t)
2184 #ifdef TCC_TARGET_ARM64
2185 gen_cvt_ftoi(t);
2186 #else
2187 int st;
2189 if (t == (VT_LLONG | VT_UNSIGNED)) {
2190 /* not handled natively */
2191 st = vtop->type.t & VT_BTYPE;
2192 if (st == VT_FLOAT)
2193 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2194 #if LDOUBLE_SIZE != 8
2195 else if (st == VT_LDOUBLE)
2196 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2197 #endif
2198 else
2199 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2200 vrott(2);
2201 gfunc_call(1);
2202 vpushi(0);
2203 vtop->r = REG_IRET;
2204 vtop->r2 = REG_LRET;
2205 } else {
2206 gen_cvt_ftoi(t);
2208 #endif
2211 /* force char or short cast */
2212 static void force_charshort_cast(int t)
2214 int bits, dbt;
2215 dbt = t & VT_BTYPE;
2216 /* XXX: add optimization if lvalue : just change type and offset */
2217 if (dbt == VT_BYTE)
2218 bits = 8;
2219 else
2220 bits = 16;
2221 if (t & VT_UNSIGNED) {
2222 vpushi((1 << bits) - 1);
2223 gen_op('&');
2224 } else {
2225 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2226 bits = 64 - bits;
2227 else
2228 bits = 32 - bits;
2229 vpushi(bits);
2230 gen_op(TOK_SHL);
2231 /* result must be signed or the SAR is converted to an SHL
2232 This was not the case when "t" was a signed short
2233 and the last value on the stack was an unsigned int */
2234 vtop->type.t &= ~VT_UNSIGNED;
2235 vpushi(bits);
2236 gen_op(TOK_SAR);
2240 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2241 static void gen_cast(CType *type)
2243 int sbt, dbt, sf, df, c, p;
2245 /* special delayed cast for char/short */
2246 /* XXX: in some cases (multiple cascaded casts), it may still
2247 be incorrect */
2248 if (vtop->r & VT_MUSTCAST) {
2249 vtop->r &= ~VT_MUSTCAST;
2250 force_charshort_cast(vtop->type.t);
2253 /* bitfields first get cast to ints */
2254 if (vtop->type.t & VT_BITFIELD) {
2255 gv(RC_INT);
2258 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2259 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2261 if (sbt != dbt) {
2262 sf = is_float(sbt);
2263 df = is_float(dbt);
2264 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2265 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2266 if (c) {
2267 /* constant case: we can do it now */
2268 /* XXX: in ISOC, cannot do it if error in convert */
2269 if (sbt == VT_FLOAT)
2270 vtop->c.ld = vtop->c.f;
2271 else if (sbt == VT_DOUBLE)
2272 vtop->c.ld = vtop->c.d;
2274 if (df) {
2275 if ((sbt & VT_BTYPE) == VT_LLONG) {
2276 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2277 vtop->c.ld = vtop->c.i;
2278 else
2279 vtop->c.ld = -(long double)-vtop->c.i;
2280 } else if(!sf) {
2281 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2282 vtop->c.ld = (uint32_t)vtop->c.i;
2283 else
2284 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2287 if (dbt == VT_FLOAT)
2288 vtop->c.f = (float)vtop->c.ld;
2289 else if (dbt == VT_DOUBLE)
2290 vtop->c.d = (double)vtop->c.ld;
2291 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2292 vtop->c.i = vtop->c.ld;
2293 } else if (sf && dbt == VT_BOOL) {
2294 vtop->c.i = (vtop->c.ld != 0);
2295 } else {
2296 if(sf)
2297 vtop->c.i = vtop->c.ld;
2298 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2300 else if (sbt & VT_UNSIGNED)
2301 vtop->c.i = (uint32_t)vtop->c.i;
2302 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2303 else if (sbt == VT_PTR)
2305 #endif
2306 else if (sbt != VT_LLONG)
2307 vtop->c.i = ((uint32_t)vtop->c.i |
2308 -(vtop->c.i & 0x80000000));
2310 if (dbt == (VT_LLONG|VT_UNSIGNED))
2312 else if (dbt == VT_BOOL)
2313 vtop->c.i = (vtop->c.i != 0);
2314 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2315 else if (dbt == VT_PTR)
2317 #endif
2318 else if (dbt != VT_LLONG) {
2319 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2320 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2321 0xffffffff);
2322 vtop->c.i &= m;
2323 if (!(dbt & VT_UNSIGNED))
2324 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2327 } else if (p && dbt == VT_BOOL) {
2328 vtop->r = VT_CONST;
2329 vtop->c.i = 1;
2330 } else {
2331 /* non constant case: generate code */
2332 if (sf && df) {
2333 /* convert from fp to fp */
2334 gen_cvt_ftof(dbt);
2335 } else if (df) {
2336 /* convert int to fp */
2337 gen_cvt_itof1(dbt);
2338 } else if (sf) {
2339 /* convert fp to int */
2340 if (dbt == VT_BOOL) {
2341 vpushi(0);
2342 gen_op(TOK_NE);
2343 } else {
2344 if (sbt == VT_FLOAT) {
2345 /* cast to DOUBLE to avoid precision loss */
2346 gen_cvt_ftof(VT_DOUBLE);
2347 vtop->type.t = (vtop->type.t & ~VT_BTYPE) | VT_DOUBLE;
2349 /* we handle char/short/etc... with generic code */
2350 if (dbt != (VT_INT | VT_UNSIGNED) &&
2351 dbt != (VT_LLONG | VT_UNSIGNED) &&
2352 dbt != VT_LLONG)
2353 dbt = VT_INT;
2354 gen_cvt_ftoi1(dbt);
2355 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2356 /* additional cast for char/short... */
2357 vtop->type.t = dbt;
2358 gen_cast(type);
2361 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2362 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2363 if ((sbt & VT_BTYPE) != VT_LLONG) {
2364 /* scalar to long long */
2365 /* machine independent conversion */
2366 gv(RC_INT);
2367 /* generate high word */
2368 if (sbt == (VT_INT | VT_UNSIGNED)) {
2369 vpushi(0);
2370 gv(RC_INT);
2371 } else {
2372 if (sbt == VT_PTR) {
2373 /* cast from pointer to int before we apply
2374 shift operation, which pointers don't support*/
2375 gen_cast(&int_type);
2377 gv_dup();
2378 vpushi(31);
2379 gen_op(TOK_SAR);
2381 /* patch second register */
2382 vtop[-1].r2 = vtop->r;
2383 vpop();
2385 #else
2386 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2387 (dbt & VT_BTYPE) == VT_PTR ||
2388 (dbt & VT_BTYPE) == VT_FUNC) {
2389 if ((sbt & VT_BTYPE) != VT_LLONG &&
2390 (sbt & VT_BTYPE) != VT_PTR &&
2391 (sbt & VT_BTYPE) != VT_FUNC) {
2392 /* need to convert from 32bit to 64bit */
2393 gv(RC_INT);
2394 if (sbt != (VT_INT | VT_UNSIGNED)) {
2395 #if defined(TCC_TARGET_ARM64)
2396 gen_cvt_sxtw();
2397 #elif defined(TCC_TARGET_X86_64)
2398 int r = gv(RC_INT);
2399 /* x86_64 specific: movslq */
2400 o(0x6348);
2401 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2402 #else
2403 #error
2404 #endif
2407 #endif
2408 } else if (dbt == VT_BOOL) {
2409 /* scalar to bool */
2410 vpushi(0);
2411 gen_op(TOK_NE);
2412 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2413 (dbt & VT_BTYPE) == VT_SHORT) {
2414 if (sbt == VT_PTR) {
2415 vtop->type.t = VT_INT;
2416 tcc_warning("nonportable conversion from pointer to char/short");
2418 force_charshort_cast(dbt);
2419 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2420 } else if ((dbt & VT_BTYPE) == VT_INT) {
2421 /* scalar to int */
2422 if ((sbt & VT_BTYPE) == VT_LLONG) {
2423 /* from long long: just take low order word */
2424 lexpand();
2425 vpop();
2427 /* if lvalue and single word type, nothing to do because
2428 the lvalue already contains the real type size (see
2429 VT_LVAL_xxx constants) */
2430 #endif
2433 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2434 /* if we are casting between pointer types,
2435 we must update the VT_LVAL_xxx size */
2436 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2437 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2439 vtop->type = *type;
2442 /* return type size as known at compile time. Put alignment at 'a' */
2443 ST_FUNC int type_size(CType *type, int *a)
2445 Sym *s;
2446 int bt;
2448 bt = type->t & VT_BTYPE;
2449 if (bt == VT_STRUCT) {
2450 /* struct/union */
2451 s = type->ref;
2452 *a = s->r;
2453 return s->c;
2454 } else if (bt == VT_PTR) {
2455 if (type->t & VT_ARRAY) {
2456 int ts;
2458 s = type->ref;
2459 ts = type_size(&s->type, a);
2461 if (ts < 0 && s->c < 0)
2462 ts = -ts;
2464 return ts * s->c;
2465 } else {
2466 *a = PTR_SIZE;
2467 return PTR_SIZE;
2469 } else if (bt == VT_LDOUBLE) {
2470 *a = LDOUBLE_ALIGN;
2471 return LDOUBLE_SIZE;
2472 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2473 #ifdef TCC_TARGET_I386
2474 #ifdef TCC_TARGET_PE
2475 *a = 8;
2476 #else
2477 *a = 4;
2478 #endif
2479 #elif defined(TCC_TARGET_ARM)
2480 #ifdef TCC_ARM_EABI
2481 *a = 8;
2482 #else
2483 *a = 4;
2484 #endif
2485 #else
2486 *a = 8;
2487 #endif
2488 return 8;
2489 } else if (bt == VT_INT || bt == VT_FLOAT) {
2490 *a = 4;
2491 return 4;
2492 } else if (bt == VT_SHORT) {
2493 *a = 2;
2494 return 2;
2495 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2496 *a = 8;
2497 return 16;
2498 } else if (bt == VT_ENUM) {
2499 *a = 4;
2500 /* Enums might be incomplete, so don't just return '4' here. */
2501 return type->ref->c;
2502 } else {
2503 /* char, void, function, _Bool */
2504 *a = 1;
2505 return 1;
2509 /* push type size as known at runtime time on top of value stack. Put
2510 alignment at 'a' */
2511 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2513 if (type->t & VT_VLA) {
2514 type_size(&type->ref->type, a);
2515 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2516 } else {
2517 vpushi(type_size(type, a));
2521 static void vla_sp_restore(void) {
2522 if (vlas_in_scope) {
2523 gen_vla_sp_restore(vla_sp_loc);
2527 static void vla_sp_restore_root(void) {
2528 if (vlas_in_scope) {
2529 gen_vla_sp_restore(vla_sp_root_loc);
2533 /* return the pointed type of t */
2534 static inline CType *pointed_type(CType *type)
2536 return &type->ref->type;
2539 /* modify type so that its it is a pointer to type. */
2540 ST_FUNC void mk_pointer(CType *type)
2542 Sym *s;
2543 s = sym_push(SYM_FIELD, type, 0, -1);
2544 type->t = VT_PTR | (type->t & ~VT_TYPE);
2545 type->ref = s;
2548 /* compare function types. OLD functions match any new functions */
2549 static int is_compatible_func(CType *type1, CType *type2)
2551 Sym *s1, *s2;
2553 s1 = type1->ref;
2554 s2 = type2->ref;
2555 if (!is_compatible_types(&s1->type, &s2->type))
2556 return 0;
2557 /* check func_call */
2558 if (s1->a.func_call != s2->a.func_call)
2559 return 0;
2560 /* XXX: not complete */
2561 if (s1->c == FUNC_OLD || s2->c == FUNC_OLD)
2562 return 1;
2563 if (s1->c != s2->c)
2564 return 0;
2565 while (s1 != NULL) {
2566 if (s2 == NULL)
2567 return 0;
2568 if (!is_compatible_parameter_types(&s1->type, &s2->type))
2569 return 0;
2570 s1 = s1->next;
2571 s2 = s2->next;
2573 if (s2)
2574 return 0;
2575 return 1;
2578 /* return true if type1 and type2 are the same. If unqualified is
2579 true, qualifiers on the types are ignored.
2581 - enums are not checked as gcc __builtin_types_compatible_p ()
2583 static int compare_types(CType *type1, CType *type2, int unqualified)
2585 int bt1, t1, t2;
2587 t1 = type1->t & VT_TYPE;
2588 t2 = type2->t & VT_TYPE;
2589 if (unqualified) {
2590 /* strip qualifiers before comparing */
2591 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2592 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2594 /* Default Vs explicit signedness only matters for char */
2595 if ((t1 & VT_BTYPE) != VT_BYTE) {
2596 t1 &= ~VT_DEFSIGN;
2597 t2 &= ~VT_DEFSIGN;
2599 /* An enum is compatible with (unsigned) int. Ideally we would
2600 store the enums signedness in type->ref.a.<some_bit> and
2601 only accept unsigned enums with unsigned int and vice versa.
2602 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2603 from pointer target types, so we can't add it here either. */
2604 if ((t1 & VT_BTYPE) == VT_ENUM) {
2605 t1 = VT_INT;
2606 if (type1->ref->a.unsigned_enum)
2607 t1 |= VT_UNSIGNED;
2609 if ((t2 & VT_BTYPE) == VT_ENUM) {
2610 t2 = VT_INT;
2611 if (type2->ref->a.unsigned_enum)
2612 t2 |= VT_UNSIGNED;
2614 /* XXX: bitfields ? */
2615 if (t1 != t2)
2616 return 0;
2617 /* test more complicated cases */
2618 bt1 = t1 & VT_BTYPE;
2619 if (bt1 == VT_PTR) {
2620 type1 = pointed_type(type1);
2621 type2 = pointed_type(type2);
2622 return is_compatible_types(type1, type2);
2623 } else if (bt1 == VT_STRUCT) {
2624 return (type1->ref == type2->ref);
2625 } else if (bt1 == VT_FUNC) {
2626 return is_compatible_func(type1, type2);
2627 } else {
2628 return 1;
2632 /* return true if type1 and type2 are exactly the same (including
2633 qualifiers).
2635 static int is_compatible_types(CType *type1, CType *type2)
2637 return compare_types(type1,type2,0);
2640 /* return true if type1 and type2 are the same (ignoring qualifiers).
2642 static int is_compatible_parameter_types(CType *type1, CType *type2)
2644 return compare_types(type1,type2,1);
2647 /* print a type. If 'varstr' is not NULL, then the variable is also
2648 printed in the type */
2649 /* XXX: union */
2650 /* XXX: add array and function pointers */
2651 static void type_to_str(char *buf, int buf_size,
2652 CType *type, const char *varstr)
2654 int bt, v, t;
2655 Sym *s, *sa;
2656 char buf1[256];
2657 const char *tstr;
2659 t = type->t & VT_TYPE;
2660 bt = t & VT_BTYPE;
2661 buf[0] = '\0';
2662 if (t & VT_CONSTANT)
2663 pstrcat(buf, buf_size, "const ");
2664 if (t & VT_VOLATILE)
2665 pstrcat(buf, buf_size, "volatile ");
2666 if ((t & (VT_DEFSIGN | VT_UNSIGNED)) == (VT_DEFSIGN | VT_UNSIGNED))
2667 pstrcat(buf, buf_size, "unsigned ");
2668 else if (t & VT_DEFSIGN)
2669 pstrcat(buf, buf_size, "signed ");
2670 switch(bt) {
2671 case VT_VOID:
2672 tstr = "void";
2673 goto add_tstr;
2674 case VT_BOOL:
2675 tstr = "_Bool";
2676 goto add_tstr;
2677 case VT_BYTE:
2678 tstr = "char";
2679 goto add_tstr;
2680 case VT_SHORT:
2681 tstr = "short";
2682 goto add_tstr;
2683 case VT_INT:
2684 tstr = "int";
2685 goto add_tstr;
2686 case VT_LONG:
2687 tstr = "long";
2688 goto add_tstr;
2689 case VT_LLONG:
2690 tstr = "long long";
2691 goto add_tstr;
2692 case VT_FLOAT:
2693 tstr = "float";
2694 goto add_tstr;
2695 case VT_DOUBLE:
2696 tstr = "double";
2697 goto add_tstr;
2698 case VT_LDOUBLE:
2699 tstr = "long double";
2700 add_tstr:
2701 pstrcat(buf, buf_size, tstr);
2702 break;
2703 case VT_ENUM:
2704 case VT_STRUCT:
2705 if (bt == VT_STRUCT)
2706 tstr = "struct ";
2707 else
2708 tstr = "enum ";
2709 pstrcat(buf, buf_size, tstr);
2710 v = type->ref->v & ~SYM_STRUCT;
2711 if (v >= SYM_FIRST_ANOM)
2712 pstrcat(buf, buf_size, "<anonymous>");
2713 else
2714 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2715 break;
2716 case VT_FUNC:
2717 s = type->ref;
2718 type_to_str(buf, buf_size, &s->type, varstr);
2719 pstrcat(buf, buf_size, "(");
2720 sa = s->next;
2721 while (sa != NULL) {
2722 type_to_str(buf1, sizeof(buf1), &sa->type, NULL);
2723 pstrcat(buf, buf_size, buf1);
2724 sa = sa->next;
2725 if (sa)
2726 pstrcat(buf, buf_size, ", ");
2728 pstrcat(buf, buf_size, ")");
2729 goto no_var;
2730 case VT_PTR:
2731 s = type->ref;
2732 if (t & VT_ARRAY) {
2733 snprintf(buf1, sizeof(buf1), "%s[%ld]", varstr ? varstr : "", s->c);
2734 type_to_str(buf, buf_size, &s->type, buf1);
2735 goto no_var;
2737 pstrcpy(buf1, sizeof(buf1), "*");
2738 if (t & VT_CONSTANT)
2739 pstrcat(buf1, buf_size, "const ");
2740 if (t & VT_VOLATILE)
2741 pstrcat(buf1, buf_size, "volatile ");
2742 if (varstr)
2743 pstrcat(buf1, sizeof(buf1), varstr);
2744 type_to_str(buf, buf_size, &s->type, buf1);
2745 goto no_var;
2747 if (varstr) {
2748 pstrcat(buf, buf_size, " ");
2749 pstrcat(buf, buf_size, varstr);
2751 no_var: ;
2754 /* verify type compatibility to store vtop in 'dt' type, and generate
2755 casts if needed. */
2756 static void gen_assign_cast(CType *dt)
2758 CType *st, *type1, *type2, tmp_type1, tmp_type2;
2759 char buf1[256], buf2[256];
2760 int dbt, sbt;
2762 st = &vtop->type; /* source type */
2763 dbt = dt->t & VT_BTYPE;
2764 sbt = st->t & VT_BTYPE;
2765 if (sbt == VT_VOID || dbt == VT_VOID) {
2766 if (sbt == VT_VOID && dbt == VT_VOID)
2767 ; /*
2768 It is Ok if both are void
2769 A test program:
2770 void func1() {}
2771 void func2() {
2772 return func1();
2774 gcc accepts this program
2776 else
2777 tcc_error("cannot cast from/to void");
2779 if (dt->t & VT_CONSTANT)
2780 tcc_warning("assignment of read-only location");
2781 switch(dbt) {
2782 case VT_PTR:
2783 /* special cases for pointers */
2784 /* '0' can also be a pointer */
2785 if (is_null_pointer(vtop))
2786 goto type_ok;
2787 /* accept implicit pointer to integer cast with warning */
2788 if (is_integer_btype(sbt)) {
2789 tcc_warning("assignment makes pointer from integer without a cast");
2790 goto type_ok;
2792 type1 = pointed_type(dt);
2793 /* a function is implicitely a function pointer */
2794 if (sbt == VT_FUNC) {
2795 if ((type1->t & VT_BTYPE) != VT_VOID &&
2796 !is_compatible_types(pointed_type(dt), st))
2797 tcc_warning("assignment from incompatible pointer type");
2798 goto type_ok;
2800 if (sbt != VT_PTR)
2801 goto error;
2802 type2 = pointed_type(st);
2803 if ((type1->t & VT_BTYPE) == VT_VOID ||
2804 (type2->t & VT_BTYPE) == VT_VOID) {
2805 /* void * can match anything */
2806 } else {
2807 /* exact type match, except for qualifiers */
2808 tmp_type1 = *type1;
2809 tmp_type2 = *type2;
2810 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2811 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2812 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2813 /* Like GCC don't warn by default for merely changes
2814 in pointer target signedness. Do warn for different
2815 base types, though, in particular for unsigned enums
2816 and signed int targets. */
2817 if ((tmp_type1.t & (VT_DEFSIGN | VT_UNSIGNED)) !=
2818 (tmp_type2.t & (VT_DEFSIGN | VT_UNSIGNED)) &&
2819 (tmp_type1.t & VT_BTYPE) == (tmp_type2.t & VT_BTYPE))
2821 else
2822 tcc_warning("assignment from incompatible pointer type");
2825 /* check const and volatile */
2826 if ((!(type1->t & VT_CONSTANT) && (type2->t & VT_CONSTANT)) ||
2827 (!(type1->t & VT_VOLATILE) && (type2->t & VT_VOLATILE)))
2828 tcc_warning("assignment discards qualifiers from pointer target type");
2829 break;
2830 case VT_BYTE:
2831 case VT_SHORT:
2832 case VT_INT:
2833 case VT_LLONG:
2834 if (sbt == VT_PTR || sbt == VT_FUNC) {
2835 tcc_warning("assignment makes integer from pointer without a cast");
2836 } else if (sbt == VT_STRUCT) {
2837 goto case_VT_STRUCT;
2839 /* XXX: more tests */
2840 break;
2841 case VT_STRUCT:
2842 case_VT_STRUCT:
2843 tmp_type1 = *dt;
2844 tmp_type2 = *st;
2845 tmp_type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
2846 tmp_type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
2847 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2848 error:
2849 type_to_str(buf1, sizeof(buf1), st, NULL);
2850 type_to_str(buf2, sizeof(buf2), dt, NULL);
2851 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
2853 break;
2855 type_ok:
2856 gen_cast(dt);
2859 /* store vtop in lvalue pushed on stack */
2860 ST_FUNC void vstore(void)
2862 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
2864 ft = vtop[-1].type.t;
2865 sbt = vtop->type.t & VT_BTYPE;
2866 dbt = ft & VT_BTYPE;
2867 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
2868 (sbt == VT_INT && dbt == VT_SHORT))
2869 && !(vtop->type.t & VT_BITFIELD)) {
2870 /* optimize char/short casts */
2871 delayed_cast = VT_MUSTCAST;
2872 vtop->type.t = (ft & VT_TYPE & ~VT_BITFIELD &
2873 ((1 << VT_STRUCT_SHIFT) - 1));
2874 /* XXX: factorize */
2875 if (ft & VT_CONSTANT)
2876 tcc_warning("assignment of read-only location");
2877 } else {
2878 delayed_cast = 0;
2879 if (!(ft & VT_BITFIELD))
2880 gen_assign_cast(&vtop[-1].type);
2883 if (sbt == VT_STRUCT) {
2884 /* if structure, only generate pointer */
2885 /* structure assignment : generate memcpy */
2886 /* XXX: optimize if small size */
2887 size = type_size(&vtop->type, &align);
2889 /* destination */
2890 vswap();
2891 vtop->type.t = VT_PTR;
2892 gaddrof();
2894 /* address of memcpy() */
2895 #ifdef TCC_ARM_EABI
2896 if(!(align & 7))
2897 vpush_global_sym(&func_old_type, TOK_memcpy8);
2898 else if(!(align & 3))
2899 vpush_global_sym(&func_old_type, TOK_memcpy4);
2900 else
2901 #endif
2902 /* Use memmove, rather than memcpy, as dest and src may be same: */
2903 vpush_global_sym(&func_old_type, TOK_memmove);
2905 vswap();
2906 /* source */
2907 vpushv(vtop - 2);
2908 vtop->type.t = VT_PTR;
2909 gaddrof();
2910 /* type size */
2911 vpushi(size);
2912 gfunc_call(3);
2914 /* leave source on stack */
2915 } else if (ft & VT_BITFIELD) {
2916 /* bitfield store handling */
2918 /* save lvalue as expression result (example: s.b = s.a = n;) */
2919 vdup(), vtop[-1] = vtop[-2];
2921 bit_pos = (ft >> VT_STRUCT_SHIFT) & 0x3f;
2922 bit_size = (ft >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
2923 /* remove bit field info to avoid loops */
2924 vtop[-1].type.t = ft & ~VT_BITFIELD & ((1 << VT_STRUCT_SHIFT) - 1);
2926 if((ft & VT_BTYPE) == VT_BOOL) {
2927 gen_cast(&vtop[-1].type);
2928 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
2931 /* duplicate destination */
2932 vdup();
2933 vtop[-1] = vtop[-2];
2935 /* mask and shift source */
2936 if((ft & VT_BTYPE) != VT_BOOL) {
2937 if((ft & VT_BTYPE) == VT_LLONG) {
2938 vpushll((1ULL << bit_size) - 1ULL);
2939 } else {
2940 vpushi((1 << bit_size) - 1);
2942 gen_op('&');
2944 vpushi(bit_pos);
2945 gen_op(TOK_SHL);
2946 /* load destination, mask and or with source */
2947 vswap();
2948 if((ft & VT_BTYPE) == VT_LLONG) {
2949 vpushll(~(((1ULL << bit_size) - 1ULL) << bit_pos));
2950 } else {
2951 vpushi(~(((1 << bit_size) - 1) << bit_pos));
2953 gen_op('&');
2954 gen_op('|');
2955 /* store result */
2956 vstore();
2957 /* ... and discard */
2958 vpop();
2960 } else {
2961 #ifdef CONFIG_TCC_BCHECK
2962 /* bound check case */
2963 if (vtop[-1].r & VT_MUSTBOUND) {
2964 vswap();
2965 gbound();
2966 vswap();
2968 #endif
2969 rc = RC_INT;
2970 if (is_float(ft)) {
2971 rc = RC_FLOAT;
2972 #ifdef TCC_TARGET_X86_64
2973 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
2974 rc = RC_ST0;
2975 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
2976 rc = RC_FRET;
2978 #endif
2980 r = gv(rc); /* generate value */
2981 /* if lvalue was saved on stack, must read it */
2982 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
2983 SValue sv;
2984 t = get_reg(RC_INT);
2985 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2986 sv.type.t = VT_PTR;
2987 #else
2988 sv.type.t = VT_INT;
2989 #endif
2990 sv.r = VT_LOCAL | VT_LVAL;
2991 sv.c.i = vtop[-1].c.i;
2992 load(t, &sv);
2993 vtop[-1].r = t | VT_LVAL;
2995 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2996 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2997 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
2998 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
2999 #else
3000 if ((ft & VT_BTYPE) == VT_LLONG) {
3001 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3002 #endif
3003 vtop[-1].type.t = load_type;
3004 store(r, vtop - 1);
3005 vswap();
3006 /* convert to int to increment easily */
3007 vtop->type.t = addr_type;
3008 gaddrof();
3009 vpushi(load_size);
3010 gen_op('+');
3011 vtop->r |= VT_LVAL;
3012 vswap();
3013 vtop[-1].type.t = load_type;
3014 /* XXX: it works because r2 is spilled last ! */
3015 store(vtop->r2, vtop - 1);
3016 } else {
3017 store(r, vtop - 1);
3020 vswap();
3021 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3022 vtop->r |= delayed_cast;
3026 /* post defines POST/PRE add. c is the token ++ or -- */
3027 ST_FUNC void inc(int post, int c)
3029 test_lvalue();
3030 vdup(); /* save lvalue */
3031 if (post) {
3032 gv_dup(); /* duplicate value */
3033 vrotb(3);
3034 vrotb(3);
3036 /* add constant */
3037 vpushi(c - TOK_MID);
3038 gen_op('+');
3039 vstore(); /* store value */
3040 if (post)
3041 vpop(); /* if post op, return saved value */
3044 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3046 /* read the string */
3047 if (tok != TOK_STR)
3048 expect(msg);
3049 cstr_new(astr);
3050 while (tok == TOK_STR) {
3051 /* XXX: add \0 handling too ? */
3052 cstr_cat(astr, tokc.str.data, -1);
3053 next();
3055 cstr_ccat(astr, '\0');
3058 /* If I is >= 1 and a power of two, returns log2(i)+1.
3059 If I is 0 returns 0. */
3060 static int exact_log2p1(int i)
3062 int ret;
3063 if (!i)
3064 return 0;
3065 for (ret = 1; i >= 1 << 8; ret += 8)
3066 i >>= 8;
3067 if (i >= 1 << 4)
3068 ret += 4, i >>= 4;
3069 if (i >= 1 << 2)
3070 ret += 2, i >>= 2;
3071 if (i >= 1 << 1)
3072 ret++;
3073 return ret;
3076 /* Parse GNUC __attribute__ extension. Currently, the following
3077 extensions are recognized:
3078 - aligned(n) : set data/function alignment.
3079 - packed : force data alignment to 1
3080 - section(x) : generate data/code in this section.
3081 - unused : currently ignored, but may be used someday.
3082 - regparm(n) : pass function parameters in registers (i386 only)
3084 static void parse_attribute(AttributeDef *ad)
3086 int t, n;
3087 CString astr;
3089 while (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2) {
3090 next();
3091 skip('(');
3092 skip('(');
3093 while (tok != ')') {
3094 if (tok < TOK_IDENT)
3095 expect("attribute name");
3096 t = tok;
3097 next();
3098 switch(t) {
3099 case TOK_SECTION1:
3100 case TOK_SECTION2:
3101 skip('(');
3102 parse_mult_str(&astr, "section name");
3103 ad->section = find_section(tcc_state, (char *)astr.data);
3104 skip(')');
3105 cstr_free(&astr);
3106 break;
3107 case TOK_ALIAS1:
3108 case TOK_ALIAS2:
3109 skip('(');
3110 parse_mult_str(&astr, "alias(\"target\")");
3111 ad->alias_target = /* save string as token, for later */
3112 tok_alloc((char*)astr.data, astr.size-1)->tok;
3113 skip(')');
3114 cstr_free(&astr);
3115 break;
3116 case TOK_VISIBILITY1:
3117 case TOK_VISIBILITY2:
3118 skip('(');
3119 parse_mult_str(&astr,
3120 "visibility(\"default|hidden|internal|protected\")");
3121 if (!strcmp (astr.data, "default"))
3122 ad->a.visibility = STV_DEFAULT;
3123 else if (!strcmp (astr.data, "hidden"))
3124 ad->a.visibility = STV_HIDDEN;
3125 else if (!strcmp (astr.data, "internal"))
3126 ad->a.visibility = STV_INTERNAL;
3127 else if (!strcmp (astr.data, "protected"))
3128 ad->a.visibility = STV_PROTECTED;
3129 else
3130 expect("visibility(\"default|hidden|internal|protected\")");
3131 skip(')');
3132 cstr_free(&astr);
3133 break;
3134 case TOK_ALIGNED1:
3135 case TOK_ALIGNED2:
3136 if (tok == '(') {
3137 next();
3138 n = expr_const();
3139 if (n <= 0 || (n & (n - 1)) != 0)
3140 tcc_error("alignment must be a positive power of two");
3141 skip(')');
3142 } else {
3143 n = MAX_ALIGN;
3145 ad->a.aligned = exact_log2p1(n);
3146 if (n != 1 << (ad->a.aligned - 1))
3147 tcc_error("alignment of %d is larger than implemented", n);
3148 break;
3149 case TOK_PACKED1:
3150 case TOK_PACKED2:
3151 ad->a.packed = 1;
3152 break;
3153 case TOK_WEAK1:
3154 case TOK_WEAK2:
3155 ad->a.weak = 1;
3156 break;
3157 case TOK_UNUSED1:
3158 case TOK_UNUSED2:
3159 /* currently, no need to handle it because tcc does not
3160 track unused objects */
3161 break;
3162 case TOK_NORETURN1:
3163 case TOK_NORETURN2:
3164 /* currently, no need to handle it because tcc does not
3165 track unused objects */
3166 break;
3167 case TOK_CDECL1:
3168 case TOK_CDECL2:
3169 case TOK_CDECL3:
3170 ad->a.func_call = FUNC_CDECL;
3171 break;
3172 case TOK_STDCALL1:
3173 case TOK_STDCALL2:
3174 case TOK_STDCALL3:
3175 ad->a.func_call = FUNC_STDCALL;
3176 break;
3177 #ifdef TCC_TARGET_I386
3178 case TOK_REGPARM1:
3179 case TOK_REGPARM2:
3180 skip('(');
3181 n = expr_const();
3182 if (n > 3)
3183 n = 3;
3184 else if (n < 0)
3185 n = 0;
3186 if (n > 0)
3187 ad->a.func_call = FUNC_FASTCALL1 + n - 1;
3188 skip(')');
3189 break;
3190 case TOK_FASTCALL1:
3191 case TOK_FASTCALL2:
3192 case TOK_FASTCALL3:
3193 ad->a.func_call = FUNC_FASTCALLW;
3194 break;
3195 #endif
3196 case TOK_MODE:
3197 skip('(');
3198 switch(tok) {
3199 case TOK_MODE_DI:
3200 ad->a.mode = VT_LLONG + 1;
3201 break;
3202 case TOK_MODE_QI:
3203 ad->a.mode = VT_BYTE + 1;
3204 break;
3205 case TOK_MODE_HI:
3206 ad->a.mode = VT_SHORT + 1;
3207 break;
3208 case TOK_MODE_SI:
3209 case TOK_MODE_word:
3210 ad->a.mode = VT_INT + 1;
3211 break;
3212 default:
3213 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3214 break;
3216 next();
3217 skip(')');
3218 break;
3219 case TOK_DLLEXPORT:
3220 ad->a.func_export = 1;
3221 break;
3222 case TOK_DLLIMPORT:
3223 ad->a.func_import = 1;
3224 break;
3225 default:
3226 if (tcc_state->warn_unsupported)
3227 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3228 /* skip parameters */
3229 if (tok == '(') {
3230 int parenthesis = 0;
3231 do {
3232 if (tok == '(')
3233 parenthesis++;
3234 else if (tok == ')')
3235 parenthesis--;
3236 next();
3237 } while (parenthesis && tok != -1);
3239 break;
3241 if (tok != ',')
3242 break;
3243 next();
3245 skip(')');
3246 skip(')');
3250 static Sym * find_field (CType *type, int v)
3252 Sym *s = type->ref;
3253 v |= SYM_FIELD;
3254 while ((s = s->next) != NULL) {
3255 if ((s->v & SYM_FIELD) &&
3256 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3257 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3258 Sym *ret = find_field (&s->type, v);
3259 if (ret)
3260 return ret;
3262 if (s->v == v)
3263 break;
3265 return s;
3268 static void struct_add_offset (Sym *s, int offset)
3270 while ((s = s->next) != NULL) {
3271 if ((s->v & SYM_FIELD) &&
3272 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3273 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3274 struct_add_offset(s->type.ref, offset);
3275 } else
3276 s->c += offset;
3280 static void struct_layout(CType *type, AttributeDef *ad)
3282 int align, maxalign, offset, c, bit_pos, bt, prevbt, prev_bit_size;
3283 int pcc = !tcc_state->ms_bitfields;
3284 Sym *f;
3285 if (ad->a.aligned)
3286 maxalign = 1 << (ad->a.aligned - 1);
3287 else
3288 maxalign = 1;
3289 offset = 0;
3290 c = 0;
3291 bit_pos = 0;
3292 prevbt = VT_STRUCT; /* make it never match */
3293 prev_bit_size = 0;
3294 for (f = type->ref->next; f; f = f->next) {
3295 int typealign, bit_size;
3296 int size = type_size(&f->type, &typealign);
3297 if (f->type.t & VT_BITFIELD)
3298 bit_size = (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
3299 else
3300 bit_size = -1;
3301 if (bit_size == 0 && pcc) {
3302 /* Zero-width bit-fields in PCC mode aren't affected
3303 by any packing (attribute or pragma). */
3304 align = typealign;
3305 } else if (f->r > 1) {
3306 align = f->r;
3307 } else if (ad->a.packed || f->r == 1) {
3308 align = 1;
3309 /* Packed fields or packed records don't let the base type
3310 influence the records type alignment. */
3311 typealign = 1;
3312 } else {
3313 align = typealign;
3315 if (type->ref->type.t != TOK_STRUCT) {
3316 if (pcc && bit_size >= 0)
3317 size = (bit_size + 7) >> 3;
3318 /* Bit position is already zero from our caller. */
3319 offset = 0;
3320 if (size > c)
3321 c = size;
3322 } else if (bit_size < 0) {
3323 int addbytes = pcc ? (bit_pos + 7) >> 3 : 0;
3324 prevbt = VT_STRUCT;
3325 prev_bit_size = 0;
3326 c = (c + addbytes + align - 1) & -align;
3327 offset = c;
3328 if (size > 0)
3329 c += size;
3330 bit_pos = 0;
3331 } else {
3332 /* A bit-field. Layout is more complicated. There are two
3333 options TCC implements: PCC compatible and MS compatible
3334 (PCC compatible is what GCC uses for almost all targets).
3335 In PCC layout the overall size of the struct (in c) is
3336 _excluding_ the current run of bit-fields (that is,
3337 there's at least additional bit_pos bits after c). In
3338 MS layout c does include the current run of bit-fields.
3340 This matters for calculating the natural alignment buckets
3341 in PCC mode. */
3343 /* 'align' will be used to influence records alignment,
3344 so it's the max of specified and type alignment, except
3345 in certain cases that depend on the mode. */
3346 if (align < typealign)
3347 align = typealign;
3348 if (pcc) {
3349 /* In PCC layout a non-packed bit-field is placed adjacent
3350 to the preceding bit-fields, except if it would overflow
3351 its container (depending on base type) or it's a zero-width
3352 bit-field. Packed non-zero-width bit-fields always are
3353 placed adjacent. */
3354 int ofs = (c * 8 + bit_pos) % (typealign * 8);
3355 int ofs2 = ofs + bit_size + (typealign * 8) - 1;
3356 if (bit_size == 0 ||
3357 (typealign != 1 &&
3358 (ofs2 / (typealign * 8)) > (size/typealign))) {
3359 c = (c + ((bit_pos + 7) >> 3) + typealign - 1) & -typealign;
3360 bit_pos = 0;
3362 offset = c;
3363 /* In PCC layout named bit-fields influence the alignment
3364 of the containing struct using the base types alignment,
3365 except for packed fields (which here have correct
3366 align/typealign). */
3367 if ((f->v & SYM_FIRST_ANOM))
3368 align = 1;
3369 } else {
3370 bt = f->type.t & VT_BTYPE;
3371 if ((bit_pos + bit_size > size * 8) ||
3372 (bit_size > 0) == (bt != prevbt)) {
3373 c = (c + typealign - 1) & -typealign;
3374 offset = c;
3375 bit_pos = 0;
3376 /* In MS bitfield mode a bit-field run always uses
3377 at least as many bits as the underlying type.
3378 To start a new run it's also required that this
3379 or the last bit-field had non-zero width. */
3380 if (bit_size || prev_bit_size)
3381 c += size;
3383 /* In MS layout the records alignment is normally
3384 influenced by the field, except for a zero-width
3385 field at the start of a run (but by further zero-width
3386 fields it is again). */
3387 if (bit_size == 0 && prevbt != bt)
3388 align = 1;
3389 prevbt = bt;
3390 prev_bit_size = bit_size;
3392 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3393 | (bit_pos << VT_STRUCT_SHIFT);
3394 bit_pos += bit_size;
3395 if (pcc && bit_pos >= size * 8) {
3396 c += size;
3397 bit_pos -= size * 8;
3400 if (align > maxalign)
3401 maxalign = align;
3402 #if 0
3403 printf("set field %s offset=%d c=%d",
3404 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, c);
3405 if (f->type.t & VT_BITFIELD) {
3406 printf(" pos=%d size=%d",
3407 (f->type.t >> VT_STRUCT_SHIFT) & 0x3f,
3408 (f->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f);
3410 printf("\n");
3411 #endif
3413 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3414 Sym *ass;
3415 /* An anonymous struct/union. Adjust member offsets
3416 to reflect the real offset of our containing struct.
3417 Also set the offset of this anon member inside
3418 the outer struct to be zero. Via this it
3419 works when accessing the field offset directly
3420 (from base object), as well as when recursing
3421 members in initializer handling. */
3422 int v2 = f->type.ref->v;
3423 if (!(v2 & SYM_FIELD) &&
3424 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3425 Sym **pps;
3426 /* This happens only with MS extensions. The
3427 anon member has a named struct type, so it
3428 potentially is shared with other references.
3429 We need to unshare members so we can modify
3430 them. */
3431 ass = f->type.ref;
3432 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3433 &f->type.ref->type, 0,
3434 f->type.ref->c);
3435 pps = &f->type.ref->next;
3436 while ((ass = ass->next) != NULL) {
3437 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3438 pps = &((*pps)->next);
3440 *pps = NULL;
3442 struct_add_offset(f->type.ref, offset);
3443 f->c = 0;
3444 } else {
3445 f->c = offset;
3448 f->r = 0;
3450 /* store size and alignment */
3451 type->ref->c = (c + (pcc ? (bit_pos + 7) >> 3 : 0)
3452 + maxalign - 1) & -maxalign;
3453 type->ref->r = maxalign;
3456 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3457 static void struct_decl(CType *type, AttributeDef *ad, int u)
3459 int a, v, size, align, flexible, alignoverride;
3460 long c;
3461 int bit_size, bsize, bt;
3462 Sym *s, *ss, **ps;
3463 AttributeDef ad1;
3464 CType type1, btype;
3466 a = tok; /* save decl type */
3467 next();
3468 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3469 parse_attribute(ad);
3470 if (tok != '{') {
3471 v = tok;
3472 next();
3473 /* struct already defined ? return it */
3474 if (v < TOK_IDENT)
3475 expect("struct/union/enum name");
3476 s = struct_find(v);
3477 if (s && (s->scope == local_scope || (tok != '{' && tok != ';'))) {
3478 if (s->type.t != a)
3479 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3480 goto do_decl;
3482 } else {
3483 v = anon_sym++;
3485 /* Record the original enum/struct/union token. */
3486 type1.t = a;
3487 type1.ref = NULL;
3488 /* we put an undefined size for struct/union */
3489 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3490 s->r = 0; /* default alignment is zero as gcc */
3491 /* put struct/union/enum name in type */
3492 do_decl:
3493 type->t = u;
3494 type->ref = s;
3496 if (tok == '{') {
3497 next();
3498 if (s->c != -1)
3499 tcc_error("struct/union/enum already defined");
3500 /* cannot be empty */
3501 c = 0;
3502 /* non empty enums are not allowed */
3503 if (a == TOK_ENUM) {
3504 int seen_neg = 0;
3505 int seen_wide = 0;
3506 for(;;) {
3507 CType *t = &int_type;
3508 v = tok;
3509 if (v < TOK_UIDENT)
3510 expect("identifier");
3511 ss = sym_find(v);
3512 if (ss && !local_stack)
3513 tcc_error("redefinition of enumerator '%s'",
3514 get_tok_str(v, NULL));
3515 next();
3516 if (tok == '=') {
3517 next();
3518 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
3519 c = expr_const64();
3520 #else
3521 /* We really want to support long long enums
3522 on i386 as well, but the Sym structure only
3523 holds a 'long' for associated constants,
3524 and enlarging it would bump its size (no
3525 available padding). So punt for now. */
3526 c = expr_const();
3527 #endif
3529 if (c < 0)
3530 seen_neg = 1;
3531 if (c != (int)c && (unsigned long)c != (unsigned int)c)
3532 seen_wide = 1, t = &size_type;
3533 /* enum symbols have static storage */
3534 ss = sym_push(v, t, VT_CONST, c);
3535 ss->type.t |= VT_STATIC;
3536 if (tok != ',')
3537 break;
3538 next();
3539 c++;
3540 /* NOTE: we accept a trailing comma */
3541 if (tok == '}')
3542 break;
3544 if (!seen_neg)
3545 s->a.unsigned_enum = 1;
3546 s->c = type_size(seen_wide ? &size_type : &int_type, &align);
3547 skip('}');
3548 } else {
3549 ps = &s->next;
3550 flexible = 0;
3551 while (tok != '}') {
3552 if (!parse_btype(&btype, &ad1)) {
3553 skip(';');
3554 continue;
3556 while (1) {
3557 if (flexible)
3558 tcc_error("flexible array member '%s' not at the end of struct",
3559 get_tok_str(v, NULL));
3560 bit_size = -1;
3561 v = 0;
3562 type1 = btype;
3563 if (tok != ':') {
3564 type_decl(&type1, &ad1, &v, TYPE_DIRECT | TYPE_ABSTRACT);
3565 if (v == 0) {
3566 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3567 expect("identifier");
3568 else {
3569 int v = btype.ref->v;
3570 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3571 if (tcc_state->ms_extensions == 0)
3572 expect("identifier");
3576 if (type_size(&type1, &align) < 0) {
3577 if ((a == TOK_STRUCT) && (type1.t & VT_ARRAY) && c)
3578 flexible = 1;
3579 else
3580 tcc_error("field '%s' has incomplete type",
3581 get_tok_str(v, NULL));
3583 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3584 (type1.t & (VT_TYPEDEF | VT_STATIC | VT_EXTERN | VT_INLINE)))
3585 tcc_error("invalid type for '%s'",
3586 get_tok_str(v, NULL));
3588 if (tok == ':') {
3589 next();
3590 bit_size = expr_const();
3591 /* XXX: handle v = 0 case for messages */
3592 if (bit_size < 0)
3593 tcc_error("negative width in bit-field '%s'",
3594 get_tok_str(v, NULL));
3595 if (v && bit_size == 0)
3596 tcc_error("zero width for bit-field '%s'",
3597 get_tok_str(v, NULL));
3598 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3599 parse_attribute(&ad1);
3601 size = type_size(&type1, &align);
3602 /* Only remember non-default alignment. */
3603 alignoverride = 0;
3604 if (ad1.a.aligned) {
3605 int speca = 1 << (ad1.a.aligned - 1);
3606 alignoverride = speca;
3607 } else if (ad1.a.packed || ad->a.packed) {
3608 alignoverride = 1;
3609 } else if (*tcc_state->pack_stack_ptr) {
3610 if (align > *tcc_state->pack_stack_ptr)
3611 alignoverride = *tcc_state->pack_stack_ptr;
3613 if (bit_size >= 0) {
3614 bt = type1.t & VT_BTYPE;
3615 if (bt != VT_INT &&
3616 bt != VT_BYTE &&
3617 bt != VT_SHORT &&
3618 bt != VT_BOOL &&
3619 bt != VT_ENUM &&
3620 bt != VT_LLONG)
3621 tcc_error("bitfields must have scalar type");
3622 bsize = size * 8;
3623 if (bit_size > bsize) {
3624 tcc_error("width of '%s' exceeds its type",
3625 get_tok_str(v, NULL));
3626 } else if (bit_size == bsize) {
3627 /* no need for bit fields */
3629 } else {
3630 type1.t |= VT_BITFIELD |
3631 (0 << VT_STRUCT_SHIFT) |
3632 (bit_size << (VT_STRUCT_SHIFT + 6));
3635 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
3636 /* Remember we've seen a real field to check
3637 for placement of flexible array member. */
3638 c = 1;
3640 /* If member is a struct or bit-field, enforce
3641 placing into the struct (as anonymous). */
3642 if (v == 0 &&
3643 ((type1.t & VT_BTYPE) == VT_STRUCT ||
3644 bit_size >= 0)) {
3645 v = anon_sym++;
3647 if (v) {
3648 ss = sym_push(v | SYM_FIELD, &type1, alignoverride, 0);
3649 *ps = ss;
3650 ps = &ss->next;
3652 if (tok == ';' || tok == TOK_EOF)
3653 break;
3654 skip(',');
3656 skip(';');
3658 skip('}');
3659 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
3660 parse_attribute(ad);
3661 struct_layout(type, ad);
3666 /* return 1 if basic type is a type size (short, long, long long) */
3667 ST_FUNC int is_btype_size(int bt)
3669 return bt == VT_SHORT || bt == VT_LONG || bt == VT_LLONG;
3672 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3673 are added to the element type, copied because it could be a typedef. */
3674 static void parse_btype_qualify(CType *type, int qualifiers)
3676 while (type->t & VT_ARRAY) {
3677 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
3678 type = &type->ref->type;
3680 type->t |= qualifiers;
3683 /* return 0 if no type declaration. otherwise, return the basic type
3684 and skip it.
3686 static int parse_btype(CType *type, AttributeDef *ad)
3688 int t, u, bt_size, complete, type_found, typespec_found;
3689 Sym *s;
3690 CType type1;
3692 memset(ad, 0, sizeof(AttributeDef));
3693 complete = 0;
3694 type_found = 0;
3695 typespec_found = 0;
3696 t = 0;
3697 while(1) {
3698 switch(tok) {
3699 case TOK_EXTENSION:
3700 /* currently, we really ignore extension */
3701 next();
3702 continue;
3704 /* basic types */
3705 case TOK_CHAR:
3706 u = VT_BYTE;
3707 basic_type:
3708 next();
3709 basic_type1:
3710 if (complete)
3711 tcc_error("too many basic types");
3712 t |= u;
3713 bt_size = is_btype_size (u & VT_BTYPE);
3714 if (u == VT_INT || (!bt_size && !(t & VT_TYPEDEF)))
3715 complete = 1;
3716 typespec_found = 1;
3717 break;
3718 case TOK_VOID:
3719 u = VT_VOID;
3720 goto basic_type;
3721 case TOK_SHORT:
3722 u = VT_SHORT;
3723 goto basic_type;
3724 case TOK_INT:
3725 u = VT_INT;
3726 goto basic_type;
3727 case TOK_LONG:
3728 next();
3729 if ((t & VT_BTYPE) == VT_DOUBLE) {
3730 #ifndef TCC_TARGET_PE
3731 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3732 #endif
3733 } else if ((t & VT_BTYPE) == VT_LONG) {
3734 t = (t & ~VT_BTYPE) | VT_LLONG;
3735 } else {
3736 u = VT_LONG;
3737 goto basic_type1;
3739 break;
3740 #ifdef TCC_TARGET_ARM64
3741 case TOK_UINT128:
3742 /* GCC's __uint128_t appears in some Linux header files. Make it a
3743 synonym for long double to get the size and alignment right. */
3744 u = VT_LDOUBLE;
3745 goto basic_type;
3746 #endif
3747 case TOK_BOOL:
3748 u = VT_BOOL;
3749 goto basic_type;
3750 case TOK_FLOAT:
3751 u = VT_FLOAT;
3752 goto basic_type;
3753 case TOK_DOUBLE:
3754 next();
3755 if ((t & VT_BTYPE) == VT_LONG) {
3756 #ifdef TCC_TARGET_PE
3757 t = (t & ~VT_BTYPE) | VT_DOUBLE;
3758 #else
3759 t = (t & ~VT_BTYPE) | VT_LDOUBLE;
3760 #endif
3761 } else {
3762 u = VT_DOUBLE;
3763 goto basic_type1;
3765 break;
3766 case TOK_ENUM:
3767 struct_decl(&type1, ad, VT_ENUM);
3768 basic_type2:
3769 u = type1.t;
3770 type->ref = type1.ref;
3771 goto basic_type1;
3772 case TOK_STRUCT:
3773 case TOK_UNION:
3774 struct_decl(&type1, ad, VT_STRUCT);
3775 goto basic_type2;
3777 /* type modifiers */
3778 case TOK_CONST1:
3779 case TOK_CONST2:
3780 case TOK_CONST3:
3781 type->t = t;
3782 parse_btype_qualify(type, VT_CONSTANT);
3783 t = type->t;
3784 next();
3785 break;
3786 case TOK_VOLATILE1:
3787 case TOK_VOLATILE2:
3788 case TOK_VOLATILE3:
3789 type->t = t;
3790 parse_btype_qualify(type, VT_VOLATILE);
3791 t = type->t;
3792 next();
3793 break;
3794 case TOK_SIGNED1:
3795 case TOK_SIGNED2:
3796 case TOK_SIGNED3:
3797 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
3798 tcc_error("signed and unsigned modifier");
3799 typespec_found = 1;
3800 t |= VT_DEFSIGN;
3801 next();
3802 break;
3803 case TOK_REGISTER:
3804 case TOK_AUTO:
3805 case TOK_RESTRICT1:
3806 case TOK_RESTRICT2:
3807 case TOK_RESTRICT3:
3808 next();
3809 break;
3810 case TOK_UNSIGNED:
3811 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
3812 tcc_error("signed and unsigned modifier");
3813 t |= VT_DEFSIGN | VT_UNSIGNED;
3814 next();
3815 typespec_found = 1;
3816 break;
3818 /* storage */
3819 case TOK_EXTERN:
3820 t |= VT_EXTERN;
3821 next();
3822 break;
3823 case TOK_STATIC:
3824 t |= VT_STATIC;
3825 next();
3826 break;
3827 case TOK_TYPEDEF:
3828 t |= VT_TYPEDEF;
3829 next();
3830 break;
3831 case TOK_INLINE1:
3832 case TOK_INLINE2:
3833 case TOK_INLINE3:
3834 t |= VT_INLINE;
3835 next();
3836 break;
3838 /* GNUC attribute */
3839 case TOK_ATTRIBUTE1:
3840 case TOK_ATTRIBUTE2:
3841 parse_attribute(ad);
3842 if (ad->a.mode) {
3843 u = ad->a.mode -1;
3844 t = (t & ~VT_BTYPE) | u;
3846 break;
3847 /* GNUC typeof */
3848 case TOK_TYPEOF1:
3849 case TOK_TYPEOF2:
3850 case TOK_TYPEOF3:
3851 next();
3852 parse_expr_type(&type1);
3853 /* remove all storage modifiers except typedef */
3854 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
3855 goto basic_type2;
3856 default:
3857 if (typespec_found)
3858 goto the_end;
3859 s = sym_find(tok);
3860 if (!s || !(s->type.t & VT_TYPEDEF))
3861 goto the_end;
3863 type->t = ((s->type.t & ~VT_TYPEDEF) |
3864 (t & ~(VT_CONSTANT | VT_VOLATILE)));
3865 type->ref = s->type.ref;
3866 if (t & (VT_CONSTANT | VT_VOLATILE))
3867 parse_btype_qualify(type, t & (VT_CONSTANT | VT_VOLATILE));
3868 t = type->t;
3870 if (s->r) {
3871 /* get attributes from typedef */
3872 if (0 == ad->a.aligned)
3873 ad->a.aligned = s->a.aligned;
3874 if (0 == ad->a.func_call)
3875 ad->a.func_call = s->a.func_call;
3876 ad->a.packed |= s->a.packed;
3878 next();
3879 typespec_found = 1;
3880 break;
3882 type_found = 1;
3884 the_end:
3885 if (tcc_state->char_is_unsigned) {
3886 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
3887 t |= VT_UNSIGNED;
3890 /* long is never used as type */
3891 if ((t & VT_BTYPE) == VT_LONG)
3892 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3893 defined TCC_TARGET_PE
3894 t = (t & ~VT_BTYPE) | VT_INT;
3895 #else
3896 t = (t & ~VT_BTYPE) | VT_LLONG;
3897 #endif
3898 type->t = t;
3899 return type_found;
3902 /* convert a function parameter type (array to pointer and function to
3903 function pointer) */
3904 static inline void convert_parameter_type(CType *pt)
3906 /* remove const and volatile qualifiers (XXX: const could be used
3907 to indicate a const function parameter */
3908 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
3909 /* array must be transformed to pointer according to ANSI C */
3910 pt->t &= ~VT_ARRAY;
3911 if ((pt->t & VT_BTYPE) == VT_FUNC) {
3912 mk_pointer(pt);
3916 ST_FUNC void parse_asm_str(CString *astr)
3918 skip('(');
3919 parse_mult_str(astr, "string constant");
3922 /* Parse an asm label and return the token */
3923 static int asm_label_instr(void)
3925 int v;
3926 CString astr;
3928 next();
3929 parse_asm_str(&astr);
3930 skip(')');
3931 #ifdef ASM_DEBUG
3932 printf("asm_alias: \"%s\"\n", (char *)astr.data);
3933 #endif
3934 v = tok_alloc(astr.data, astr.size - 1)->tok;
3935 cstr_free(&astr);
3936 return v;
3939 static void post_type(CType *type, AttributeDef *ad, int storage)
3941 int n, l, t1, arg_size, align;
3942 Sym **plast, *s, *first;
3943 AttributeDef ad1;
3944 CType pt;
3946 if (tok == '(') {
3947 /* function declaration */
3948 next();
3949 l = 0;
3950 first = NULL;
3951 plast = &first;
3952 arg_size = 0;
3953 if (tok != ')') {
3954 for(;;) {
3955 /* read param name and compute offset */
3956 if (l != FUNC_OLD) {
3957 if (!parse_btype(&pt, &ad1)) {
3958 if (l) {
3959 tcc_error("invalid type");
3960 } else {
3961 l = FUNC_OLD;
3962 goto old_proto;
3965 l = FUNC_NEW;
3966 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
3967 break;
3968 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
3969 if ((pt.t & VT_BTYPE) == VT_VOID)
3970 tcc_error("parameter declared as void");
3971 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
3972 } else {
3973 old_proto:
3974 n = tok;
3975 if (n < TOK_UIDENT)
3976 expect("identifier");
3977 pt.t = VT_INT;
3978 next();
3980 convert_parameter_type(&pt);
3981 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
3982 *plast = s;
3983 plast = &s->next;
3984 if (tok == ')')
3985 break;
3986 skip(',');
3987 if (l == FUNC_NEW && tok == TOK_DOTS) {
3988 l = FUNC_ELLIPSIS;
3989 next();
3990 break;
3994 /* if no parameters, then old type prototype */
3995 if (l == 0)
3996 l = FUNC_OLD;
3997 skip(')');
3998 /* NOTE: const is ignored in returned type as it has a special
3999 meaning in gcc / C++ */
4000 type->t &= ~VT_CONSTANT;
4001 /* some ancient pre-K&R C allows a function to return an array
4002 and the array brackets to be put after the arguments, such
4003 that "int c()[]" means something like "int[] c()" */
4004 if (tok == '[') {
4005 next();
4006 skip(']'); /* only handle simple "[]" */
4007 type->t |= VT_PTR;
4009 /* we push a anonymous symbol which will contain the function prototype */
4010 ad->a.func_args = arg_size;
4011 s = sym_push(SYM_FIELD, type, 0, l);
4012 s->a = ad->a;
4013 s->next = first;
4014 type->t = VT_FUNC;
4015 type->ref = s;
4016 } else if (tok == '[') {
4017 int saved_nocode_wanted = nocode_wanted;
4018 /* array definition */
4019 next();
4020 if (tok == TOK_RESTRICT1)
4021 next();
4022 n = -1;
4023 t1 = 0;
4024 if (tok != ']') {
4025 if (!local_stack || (storage & VT_STATIC))
4026 vpushi(expr_const());
4027 else {
4028 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4029 length must always be evaluated, even under nocode_wanted,
4030 so that its size slot is initialized (e.g. under sizeof
4031 or typeof). */
4032 nocode_wanted = 0;
4033 gexpr();
4035 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4036 n = vtop->c.i;
4037 if (n < 0)
4038 tcc_error("invalid array size");
4039 } else {
4040 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4041 tcc_error("size of variable length array should be an integer");
4042 t1 = VT_VLA;
4045 skip(']');
4046 /* parse next post type */
4047 post_type(type, ad, storage);
4048 if (type->t == VT_FUNC)
4049 tcc_error("declaration of an array of functions");
4050 t1 |= type->t & VT_VLA;
4052 if (t1 & VT_VLA) {
4053 loc -= type_size(&int_type, &align);
4054 loc &= -align;
4055 n = loc;
4057 vla_runtime_type_size(type, &align);
4058 gen_op('*');
4059 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4060 vswap();
4061 vstore();
4063 if (n != -1)
4064 vpop();
4065 nocode_wanted = saved_nocode_wanted;
4067 /* we push an anonymous symbol which will contain the array
4068 element type */
4069 s = sym_push(SYM_FIELD, type, 0, n);
4070 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4071 type->ref = s;
4075 /* Parse a type declaration (except basic type), and return the type
4076 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4077 expected. 'type' should contain the basic type. 'ad' is the
4078 attribute definition of the basic type. It can be modified by
4079 type_decl().
4081 static void type_decl(CType *type, AttributeDef *ad, int *v, int td)
4083 Sym *s;
4084 CType type1, *type2;
4085 int qualifiers, storage;
4087 while (tok == '*') {
4088 qualifiers = 0;
4089 redo:
4090 next();
4091 switch(tok) {
4092 case TOK_CONST1:
4093 case TOK_CONST2:
4094 case TOK_CONST3:
4095 qualifiers |= VT_CONSTANT;
4096 goto redo;
4097 case TOK_VOLATILE1:
4098 case TOK_VOLATILE2:
4099 case TOK_VOLATILE3:
4100 qualifiers |= VT_VOLATILE;
4101 goto redo;
4102 case TOK_RESTRICT1:
4103 case TOK_RESTRICT2:
4104 case TOK_RESTRICT3:
4105 goto redo;
4106 /* XXX: clarify attribute handling */
4107 case TOK_ATTRIBUTE1:
4108 case TOK_ATTRIBUTE2:
4109 parse_attribute(ad);
4110 break;
4112 mk_pointer(type);
4113 type->t |= qualifiers;
4116 /* recursive type */
4117 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
4118 type1.t = 0; /* XXX: same as int */
4119 if (tok == '(') {
4120 next();
4121 /* XXX: this is not correct to modify 'ad' at this point, but
4122 the syntax is not clear */
4123 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4124 parse_attribute(ad);
4125 type_decl(&type1, ad, v, td);
4126 skip(')');
4127 } else {
4128 /* type identifier */
4129 if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4130 *v = tok;
4131 next();
4132 } else {
4133 if (!(td & TYPE_ABSTRACT))
4134 expect("identifier");
4135 *v = 0;
4138 storage = type->t & VT_STORAGE;
4139 type->t &= ~VT_STORAGE;
4140 post_type(type, ad, storage);
4141 type->t |= storage;
4142 if (tok == TOK_ATTRIBUTE1 || tok == TOK_ATTRIBUTE2)
4143 parse_attribute(ad);
4145 if (!type1.t)
4146 return;
4147 /* append type at the end of type1 */
4148 type2 = &type1;
4149 for(;;) {
4150 s = type2->ref;
4151 type2 = &s->type;
4152 if (!type2->t) {
4153 *type2 = *type;
4154 break;
4157 *type = type1;
4160 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4161 ST_FUNC int lvalue_type(int t)
4163 int bt, r;
4164 r = VT_LVAL;
4165 bt = t & VT_BTYPE;
4166 if (bt == VT_BYTE || bt == VT_BOOL)
4167 r |= VT_LVAL_BYTE;
4168 else if (bt == VT_SHORT)
4169 r |= VT_LVAL_SHORT;
4170 else
4171 return r;
4172 if (t & VT_UNSIGNED)
4173 r |= VT_LVAL_UNSIGNED;
4174 return r;
4177 /* indirection with full error checking and bound check */
4178 ST_FUNC void indir(void)
4180 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4181 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4182 return;
4183 expect("pointer");
4185 if (vtop->r & VT_LVAL)
4186 gv(RC_INT);
4187 vtop->type = *pointed_type(&vtop->type);
4188 /* Arrays and functions are never lvalues */
4189 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4190 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4191 vtop->r |= lvalue_type(vtop->type.t);
4192 /* if bound checking, the referenced pointer must be checked */
4193 #ifdef CONFIG_TCC_BCHECK
4194 if (tcc_state->do_bounds_check)
4195 vtop->r |= VT_MUSTBOUND;
4196 #endif
4200 /* pass a parameter to a function and do type checking and casting */
4201 static void gfunc_param_typed(Sym *func, Sym *arg)
4203 int func_type;
4204 CType type;
4206 func_type = func->c;
4207 if (func_type == FUNC_OLD ||
4208 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4209 /* default casting : only need to convert float to double */
4210 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4211 type.t = VT_DOUBLE;
4212 gen_cast(&type);
4213 } else if (vtop->type.t & VT_BITFIELD) {
4214 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4215 type.ref = vtop->type.ref;
4216 gen_cast(&type);
4218 } else if (arg == NULL) {
4219 tcc_error("too many arguments to function");
4220 } else {
4221 type = arg->type;
4222 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4223 gen_assign_cast(&type);
4227 /* parse an expression of the form '(type)' or '(expr)' and return its
4228 type */
4229 static void parse_expr_type(CType *type)
4231 int n;
4232 AttributeDef ad;
4234 skip('(');
4235 if (parse_btype(type, &ad)) {
4236 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4237 } else {
4238 expr_type(type);
4240 skip(')');
4243 static void parse_type(CType *type)
4245 AttributeDef ad;
4246 int n;
4248 if (!parse_btype(type, &ad)) {
4249 expect("type");
4251 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4254 static void vpush_tokc(int t)
4256 CType type;
4257 type.t = t;
4258 type.ref = 0;
4259 vsetc(&type, VT_CONST, &tokc);
4262 ST_FUNC void unary(void)
4264 int n, t, align, size, r, sizeof_caller;
4265 CType type;
4266 Sym *s;
4267 AttributeDef ad;
4269 sizeof_caller = in_sizeof;
4270 in_sizeof = 0;
4271 /* XXX: GCC 2.95.3 does not generate a table although it should be
4272 better here */
4273 tok_next:
4274 switch(tok) {
4275 case TOK_EXTENSION:
4276 next();
4277 goto tok_next;
4278 case TOK_CINT:
4279 case TOK_CCHAR:
4280 case TOK_LCHAR:
4281 vpushi(tokc.i);
4282 next();
4283 break;
4284 case TOK_CUINT:
4285 vpush_tokc(VT_INT | VT_UNSIGNED);
4286 next();
4287 break;
4288 case TOK_CLLONG:
4289 vpush_tokc(VT_LLONG);
4290 next();
4291 break;
4292 case TOK_CULLONG:
4293 vpush_tokc(VT_LLONG | VT_UNSIGNED);
4294 next();
4295 break;
4296 case TOK_CFLOAT:
4297 vpush_tokc(VT_FLOAT);
4298 next();
4299 break;
4300 case TOK_CDOUBLE:
4301 vpush_tokc(VT_DOUBLE);
4302 next();
4303 break;
4304 case TOK_CLDOUBLE:
4305 vpush_tokc(VT_LDOUBLE);
4306 next();
4307 break;
4308 case TOK___FUNCTION__:
4309 if (!gnu_ext)
4310 goto tok_identifier;
4311 /* fall thru */
4312 case TOK___FUNC__:
4314 void *ptr;
4315 int len;
4316 /* special function name identifier */
4317 len = strlen(funcname) + 1;
4318 /* generate char[len] type */
4319 type.t = VT_BYTE;
4320 mk_pointer(&type);
4321 type.t |= VT_ARRAY;
4322 type.ref->c = len;
4323 vpush_ref(&type, data_section, data_section->data_offset, len);
4324 ptr = section_ptr_add(data_section, len);
4325 memcpy(ptr, funcname, len);
4326 next();
4328 break;
4329 case TOK_LSTR:
4330 #ifdef TCC_TARGET_PE
4331 t = VT_SHORT | VT_UNSIGNED;
4332 #else
4333 t = VT_INT;
4334 #endif
4335 goto str_init;
4336 case TOK_STR:
4337 /* string parsing */
4338 t = VT_BYTE;
4339 str_init:
4340 if (tcc_state->warn_write_strings)
4341 t |= VT_CONSTANT;
4342 type.t = t;
4343 mk_pointer(&type);
4344 type.t |= VT_ARRAY;
4345 memset(&ad, 0, sizeof(AttributeDef));
4346 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4347 break;
4348 case '(':
4349 next();
4350 /* cast ? */
4351 if (parse_btype(&type, &ad)) {
4352 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4353 skip(')');
4354 /* check ISOC99 compound literal */
4355 if (tok == '{') {
4356 /* data is allocated locally by default */
4357 if (global_expr)
4358 r = VT_CONST;
4359 else
4360 r = VT_LOCAL;
4361 /* all except arrays are lvalues */
4362 if (!(type.t & VT_ARRAY))
4363 r |= lvalue_type(type.t);
4364 memset(&ad, 0, sizeof(AttributeDef));
4365 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4366 } else {
4367 if (sizeof_caller) {
4368 vpush(&type);
4369 return;
4371 unary();
4372 gen_cast(&type);
4374 } else if (tok == '{') {
4375 int saved_nocode_wanted = nocode_wanted;
4376 if (const_wanted)
4377 tcc_error("expected constant");
4378 /* save all registers */
4379 save_regs(0);
4380 /* statement expression : we do not accept break/continue
4381 inside as GCC does. We do retain the nocode_wanted state,
4382 as statement expressions can't ever be entered from the
4383 outside, so any reactivation of code emission (from labels
4384 or loop heads) can be disabled again after the end of it. */
4385 block(NULL, NULL, 1);
4386 nocode_wanted = saved_nocode_wanted;
4387 skip(')');
4388 } else {
4389 gexpr();
4390 skip(')');
4392 break;
4393 case '*':
4394 next();
4395 unary();
4396 indir();
4397 break;
4398 case '&':
4399 next();
4400 unary();
4401 /* functions names must be treated as function pointers,
4402 except for unary '&' and sizeof. Since we consider that
4403 functions are not lvalues, we only have to handle it
4404 there and in function calls. */
4405 /* arrays can also be used although they are not lvalues */
4406 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4407 !(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_LLOCAL))
4408 test_lvalue();
4409 mk_pointer(&vtop->type);
4410 gaddrof();
4411 break;
4412 case '!':
4413 next();
4414 unary();
4415 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4416 CType boolean;
4417 boolean.t = VT_BOOL;
4418 gen_cast(&boolean);
4419 vtop->c.i = !vtop->c.i;
4420 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4421 vtop->c.i ^= 1;
4422 else {
4423 save_regs(1);
4424 vseti(VT_JMP, gvtst(1, 0));
4426 break;
4427 case '~':
4428 next();
4429 unary();
4430 vpushi(-1);
4431 gen_op('^');
4432 break;
4433 case '+':
4434 next();
4435 unary();
4436 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4437 tcc_error("pointer not accepted for unary plus");
4438 /* In order to force cast, we add zero, except for floating point
4439 where we really need an noop (otherwise -0.0 will be transformed
4440 into +0.0). */
4441 if (!is_float(vtop->type.t)) {
4442 vpushi(0);
4443 gen_op('+');
4445 break;
4446 case TOK_SIZEOF:
4447 case TOK_ALIGNOF1:
4448 case TOK_ALIGNOF2:
4449 t = tok;
4450 next();
4451 in_sizeof++;
4452 unary_type(&type); // Perform a in_sizeof = 0;
4453 size = type_size(&type, &align);
4454 if (t == TOK_SIZEOF) {
4455 if (!(type.t & VT_VLA)) {
4456 if (size < 0)
4457 tcc_error("sizeof applied to an incomplete type");
4458 vpushs(size);
4459 } else {
4460 vla_runtime_type_size(&type, &align);
4462 } else {
4463 vpushs(align);
4465 vtop->type.t |= VT_UNSIGNED;
4466 break;
4468 case TOK_builtin_expect:
4470 /* __builtin_expect is a no-op for now */
4471 next();
4472 skip('(');
4473 expr_eq();
4474 skip(',');
4475 nocode_wanted++;
4476 expr_lor_const();
4477 vpop();
4478 nocode_wanted--;
4479 skip(')');
4481 break;
4482 case TOK_builtin_types_compatible_p:
4484 CType type1, type2;
4485 next();
4486 skip('(');
4487 parse_type(&type1);
4488 skip(',');
4489 parse_type(&type2);
4490 skip(')');
4491 type1.t &= ~(VT_CONSTANT | VT_VOLATILE);
4492 type2.t &= ~(VT_CONSTANT | VT_VOLATILE);
4493 vpushi(is_compatible_types(&type1, &type2));
4495 break;
4496 case TOK_builtin_choose_expr:
4498 int64_t c;
4499 next();
4500 skip('(');
4501 c = expr_const64();
4502 skip(',');
4503 if (!c) {
4504 nocode_wanted++;
4506 expr_eq();
4507 if (!c) {
4508 vpop();
4509 nocode_wanted--;
4511 skip(',');
4512 if (c) {
4513 nocode_wanted++;
4515 expr_eq();
4516 if (c) {
4517 vpop();
4518 nocode_wanted--;
4520 skip(')');
4522 break;
4523 case TOK_builtin_constant_p:
4525 int res;
4526 next();
4527 skip('(');
4528 nocode_wanted++;
4529 gexpr();
4530 res = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
4531 vpop();
4532 nocode_wanted--;
4533 skip(')');
4534 vpushi(res);
4536 break;
4537 case TOK_builtin_frame_address:
4538 case TOK_builtin_return_address:
4540 int tok1 = tok;
4541 int level;
4542 CType type;
4543 next();
4544 skip('(');
4545 if (tok != TOK_CINT) {
4546 tcc_error("%s only takes positive integers",
4547 tok1 == TOK_builtin_return_address ?
4548 "__builtin_return_address" :
4549 "__builtin_frame_address");
4551 level = (uint32_t)tokc.i;
4552 next();
4553 skip(')');
4554 type.t = VT_VOID;
4555 mk_pointer(&type);
4556 vset(&type, VT_LOCAL, 0); /* local frame */
4557 while (level--) {
4558 mk_pointer(&vtop->type);
4559 indir(); /* -> parent frame */
4561 if (tok1 == TOK_builtin_return_address) {
4562 // assume return address is just above frame pointer on stack
4563 vpushi(PTR_SIZE);
4564 gen_op('+');
4565 mk_pointer(&vtop->type);
4566 indir();
4569 break;
4570 #ifdef TCC_TARGET_X86_64
4571 #ifdef TCC_TARGET_PE
4572 case TOK_builtin_va_start:
4574 next();
4575 skip('(');
4576 expr_eq();
4577 skip(',');
4578 expr_eq();
4579 skip(')');
4580 if ((vtop->r & VT_VALMASK) != VT_LOCAL)
4581 tcc_error("__builtin_va_start expects a local variable");
4582 vtop->r &= ~(VT_LVAL | VT_REF);
4583 vtop->type = char_pointer_type;
4584 vtop->c.i += 8;
4585 vstore();
4587 break;
4588 #else
4589 case TOK_builtin_va_arg_types:
4591 CType type;
4592 next();
4593 skip('(');
4594 parse_type(&type);
4595 skip(')');
4596 vpushi(classify_x86_64_va_arg(&type));
4598 break;
4599 #endif
4600 #endif
4602 #ifdef TCC_TARGET_ARM64
4603 case TOK___va_start: {
4604 next();
4605 skip('(');
4606 expr_eq();
4607 skip(',');
4608 expr_eq();
4609 skip(')');
4610 //xx check types
4611 gen_va_start();
4612 vpushi(0);
4613 vtop->type.t = VT_VOID;
4614 break;
4616 case TOK___va_arg: {
4617 CType type;
4618 next();
4619 skip('(');
4620 expr_eq();
4621 skip(',');
4622 parse_type(&type);
4623 skip(')');
4624 //xx check types
4625 gen_va_arg(&type);
4626 vtop->type = type;
4627 break;
4629 case TOK___arm64_clear_cache: {
4630 next();
4631 skip('(');
4632 expr_eq();
4633 skip(',');
4634 expr_eq();
4635 skip(')');
4636 gen_clear_cache();
4637 vpushi(0);
4638 vtop->type.t = VT_VOID;
4639 break;
4641 #endif
4642 /* pre operations */
4643 case TOK_INC:
4644 case TOK_DEC:
4645 t = tok;
4646 next();
4647 unary();
4648 inc(0, t);
4649 break;
4650 case '-':
4651 next();
4652 unary();
4653 t = vtop->type.t & VT_BTYPE;
4654 if (is_float(t)) {
4655 /* In IEEE negate(x) isn't subtract(0,x), but rather
4656 subtract(-0, x). */
4657 vpush(&vtop->type);
4658 if (t == VT_FLOAT)
4659 vtop->c.f = -1.0 * 0.0;
4660 else if (t == VT_DOUBLE)
4661 vtop->c.d = -1.0 * 0.0;
4662 else
4663 vtop->c.ld = -1.0 * 0.0;
4664 } else
4665 vpushi(0);
4666 vswap();
4667 gen_op('-');
4668 break;
4669 case TOK_LAND:
4670 if (!gnu_ext)
4671 goto tok_identifier;
4672 next();
4673 /* allow to take the address of a label */
4674 if (tok < TOK_UIDENT)
4675 expect("label identifier");
4676 s = label_find(tok);
4677 if (!s) {
4678 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
4679 } else {
4680 if (s->r == LABEL_DECLARED)
4681 s->r = LABEL_FORWARD;
4683 if (!s->type.t) {
4684 s->type.t = VT_VOID;
4685 mk_pointer(&s->type);
4686 s->type.t |= VT_STATIC;
4688 vpushsym(&s->type, s);
4689 next();
4690 break;
4692 // special qnan , snan and infinity values
4693 case TOK___NAN__:
4694 vpush64(VT_DOUBLE, 0x7ff8000000000000ULL);
4695 next();
4696 break;
4697 case TOK___SNAN__:
4698 vpush64(VT_DOUBLE, 0x7ff0000000000001ULL);
4699 next();
4700 break;
4701 case TOK___INF__:
4702 vpush64(VT_DOUBLE, 0x7ff0000000000000ULL);
4703 next();
4704 break;
4706 default:
4707 tok_identifier:
4708 t = tok;
4709 next();
4710 if (t < TOK_UIDENT)
4711 expect("identifier");
4712 s = sym_find(t);
4713 if (!s) {
4714 const char *name = get_tok_str(t, NULL);
4715 if (tok != '(')
4716 tcc_error("'%s' undeclared", name);
4717 /* for simple function calls, we tolerate undeclared
4718 external reference to int() function */
4719 if (tcc_state->warn_implicit_function_declaration
4720 #ifdef TCC_TARGET_PE
4721 /* people must be warned about using undeclared WINAPI functions
4722 (which usually start with uppercase letter) */
4723 || (name[0] >= 'A' && name[0] <= 'Z')
4724 #endif
4726 tcc_warning("implicit declaration of function '%s'", name);
4727 s = external_global_sym(t, &func_old_type, 0);
4729 if ((s->type.t & (VT_STATIC | VT_INLINE | VT_BTYPE)) ==
4730 (VT_STATIC | VT_INLINE | VT_FUNC)) {
4731 /* if referencing an inline function, then we generate a
4732 symbol to it if not already done. It will have the
4733 effect to generate code for it at the end of the
4734 compilation unit. Inline function as always
4735 generated in the text section. */
4736 if (!s->c && !nocode_wanted)
4737 put_extern_sym(s, text_section, 0, 0);
4738 r = VT_SYM | VT_CONST;
4739 } else {
4740 r = s->r;
4741 /* A symbol that has a register is a local register variable,
4742 which starts out as VT_LOCAL value. */
4743 if ((r & VT_VALMASK) < VT_CONST)
4744 r = (r & ~VT_VALMASK) | VT_LOCAL;
4746 vset(&s->type, r, s->c);
4747 /* Point to s as backpointer (even without r&VT_SYM).
4748 Will be used by at least the x86 inline asm parser for
4749 regvars. */
4750 vtop->sym = s;
4751 if (vtop->r & VT_SYM) {
4752 vtop->c.i = 0;
4754 break;
4757 /* post operations */
4758 while (1) {
4759 if (tok == TOK_INC || tok == TOK_DEC) {
4760 inc(1, tok);
4761 next();
4762 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
4763 int qualifiers;
4764 /* field */
4765 if (tok == TOK_ARROW)
4766 indir();
4767 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
4768 test_lvalue();
4769 gaddrof();
4770 /* expect pointer on structure */
4771 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
4772 expect("struct or union");
4773 if (tok == TOK_CDOUBLE)
4774 expect("field name");
4775 next();
4776 if (tok == TOK_CINT || tok == TOK_CUINT)
4777 expect("field name");
4778 s = find_field(&vtop->type, tok);
4779 if (!s)
4780 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
4781 /* add field offset to pointer */
4782 vtop->type = char_pointer_type; /* change type to 'char *' */
4783 vpushi(s->c);
4784 gen_op('+');
4785 /* change type to field type, and set to lvalue */
4786 vtop->type = s->type;
4787 vtop->type.t |= qualifiers;
4788 /* an array is never an lvalue */
4789 if (!(vtop->type.t & VT_ARRAY)) {
4790 vtop->r |= lvalue_type(vtop->type.t);
4791 #ifdef CONFIG_TCC_BCHECK
4792 /* if bound checking, the referenced pointer must be checked */
4793 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
4794 vtop->r |= VT_MUSTBOUND;
4795 #endif
4797 next();
4798 } else if (tok == '[') {
4799 next();
4800 gexpr();
4801 gen_op('+');
4802 indir();
4803 skip(']');
4804 } else if (tok == '(') {
4805 SValue ret;
4806 Sym *sa;
4807 int nb_args, ret_nregs, ret_align, regsize, variadic;
4809 /* function call */
4810 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
4811 /* pointer test (no array accepted) */
4812 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
4813 vtop->type = *pointed_type(&vtop->type);
4814 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
4815 goto error_func;
4816 } else {
4817 error_func:
4818 expect("function pointer");
4820 } else {
4821 vtop->r &= ~VT_LVAL; /* no lvalue */
4823 /* get return type */
4824 s = vtop->type.ref;
4825 next();
4826 sa = s->next; /* first parameter */
4827 nb_args = 0;
4828 ret.r2 = VT_CONST;
4829 /* compute first implicit argument if a structure is returned */
4830 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
4831 variadic = (s->c == FUNC_ELLIPSIS);
4832 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
4833 &ret_align, &regsize);
4834 if (!ret_nregs) {
4835 /* get some space for the returned structure */
4836 size = type_size(&s->type, &align);
4837 #ifdef TCC_TARGET_ARM64
4838 /* On arm64, a small struct is return in registers.
4839 It is much easier to write it to memory if we know
4840 that we are allowed to write some extra bytes, so
4841 round the allocated space up to a power of 2: */
4842 if (size < 16)
4843 while (size & (size - 1))
4844 size = (size | (size - 1)) + 1;
4845 #endif
4846 loc = (loc - size) & -align;
4847 ret.type = s->type;
4848 ret.r = VT_LOCAL | VT_LVAL;
4849 /* pass it as 'int' to avoid structure arg passing
4850 problems */
4851 vseti(VT_LOCAL, loc);
4852 ret.c = vtop->c;
4853 nb_args++;
4855 } else {
4856 ret_nregs = 1;
4857 ret.type = s->type;
4860 if (ret_nregs) {
4861 /* return in register */
4862 if (is_float(ret.type.t)) {
4863 ret.r = reg_fret(ret.type.t);
4864 #ifdef TCC_TARGET_X86_64
4865 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
4866 ret.r2 = REG_QRET;
4867 #endif
4868 } else {
4869 #ifndef TCC_TARGET_ARM64
4870 #ifdef TCC_TARGET_X86_64
4871 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
4872 #else
4873 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
4874 #endif
4875 ret.r2 = REG_LRET;
4876 #endif
4877 ret.r = REG_IRET;
4879 ret.c.i = 0;
4881 if (tok != ')') {
4882 for(;;) {
4883 expr_eq();
4884 gfunc_param_typed(s, sa);
4885 nb_args++;
4886 if (sa)
4887 sa = sa->next;
4888 if (tok == ')')
4889 break;
4890 skip(',');
4893 if (sa)
4894 tcc_error("too few arguments to function");
4895 skip(')');
4896 gfunc_call(nb_args);
4898 /* return value */
4899 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
4900 vsetc(&ret.type, r, &ret.c);
4901 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
4904 /* handle packed struct return */
4905 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
4906 int addr, offset;
4908 size = type_size(&s->type, &align);
4909 /* We're writing whole regs often, make sure there's enough
4910 space. Assume register size is power of 2. */
4911 if (regsize > align)
4912 align = regsize;
4913 loc = (loc - size) & -align;
4914 addr = loc;
4915 offset = 0;
4916 for (;;) {
4917 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
4918 vswap();
4919 vstore();
4920 vtop--;
4921 if (--ret_nregs == 0)
4922 break;
4923 offset += regsize;
4925 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
4927 } else {
4928 break;
4933 ST_FUNC void expr_prod(void)
4935 int t;
4937 unary();
4938 while (tok == '*' || tok == '/' || tok == '%') {
4939 t = tok;
4940 next();
4941 unary();
4942 gen_op(t);
4946 ST_FUNC void expr_sum(void)
4948 int t;
4950 expr_prod();
4951 while (tok == '+' || tok == '-') {
4952 t = tok;
4953 next();
4954 expr_prod();
4955 gen_op(t);
4959 static void expr_shift(void)
4961 int t;
4963 expr_sum();
4964 while (tok == TOK_SHL || tok == TOK_SAR) {
4965 t = tok;
4966 next();
4967 expr_sum();
4968 gen_op(t);
4972 static void expr_cmp(void)
4974 int t;
4976 expr_shift();
4977 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
4978 tok == TOK_ULT || tok == TOK_UGE) {
4979 t = tok;
4980 next();
4981 expr_shift();
4982 gen_op(t);
4986 static void expr_cmpeq(void)
4988 int t;
4990 expr_cmp();
4991 while (tok == TOK_EQ || tok == TOK_NE) {
4992 t = tok;
4993 next();
4994 expr_cmp();
4995 gen_op(t);
4999 static void expr_and(void)
5001 expr_cmpeq();
5002 while (tok == '&') {
5003 next();
5004 expr_cmpeq();
5005 gen_op('&');
5009 static void expr_xor(void)
5011 expr_and();
5012 while (tok == '^') {
5013 next();
5014 expr_and();
5015 gen_op('^');
5019 static void expr_or(void)
5021 expr_xor();
5022 while (tok == '|') {
5023 next();
5024 expr_xor();
5025 gen_op('|');
5029 /* XXX: fix this mess */
5030 static void expr_land_const(void)
5032 expr_or();
5033 while (tok == TOK_LAND) {
5034 next();
5035 expr_or();
5036 gen_op(TOK_LAND);
5039 static void expr_lor_const(void)
5041 expr_land_const();
5042 while (tok == TOK_LOR) {
5043 next();
5044 expr_land_const();
5045 gen_op(TOK_LOR);
5049 static void expr_land(void)
5051 expr_or();
5052 if (tok == TOK_LAND) {
5053 int t = 0;
5054 for(;;) {
5055 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5056 CType ctb;
5057 ctb.t = VT_BOOL;
5058 gen_cast(&ctb);
5059 if (vtop->c.i) {
5060 vpop();
5061 } else {
5062 nocode_wanted++;
5063 while (tok == TOK_LAND) {
5064 next();
5065 expr_or();
5066 vpop();
5068 nocode_wanted--;
5069 if (t)
5070 gsym(t);
5071 gen_cast(&int_type);
5072 break;
5074 } else {
5075 if (!t)
5076 save_regs(1);
5077 t = gvtst(1, t);
5079 if (tok != TOK_LAND) {
5080 if (t)
5081 vseti(VT_JMPI, t);
5082 else
5083 vpushi(1);
5084 break;
5086 next();
5087 expr_or();
5092 static void expr_lor(void)
5094 expr_land();
5095 if (tok == TOK_LOR) {
5096 int t = 0;
5097 for(;;) {
5098 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5099 CType ctb;
5100 ctb.t = VT_BOOL;
5101 gen_cast(&ctb);
5102 if (!vtop->c.i) {
5103 vpop();
5104 } else {
5105 nocode_wanted++;
5106 while (tok == TOK_LOR) {
5107 next();
5108 expr_land();
5109 vpop();
5111 nocode_wanted--;
5112 if (t)
5113 gsym(t);
5114 gen_cast(&int_type);
5115 break;
5117 } else {
5118 if (!t)
5119 save_regs(1);
5120 t = gvtst(0, t);
5122 if (tok != TOK_LOR) {
5123 if (t)
5124 vseti(VT_JMP, t);
5125 else
5126 vpushi(0);
5127 break;
5129 next();
5130 expr_land();
5135 /* Assuming vtop is a value used in a conditional context
5136 (i.e. compared with zero) return 0 if it's false, 1 if
5137 true and -1 if it can't be statically determined. */
5138 static int condition_3way(void)
5140 int c = -1;
5141 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5142 (!(vtop->r & VT_SYM) ||
5143 !(vtop->sym->type.t & VT_WEAK))) {
5144 CType boolean;
5145 boolean.t = VT_BOOL;
5146 vdup();
5147 gen_cast(&boolean);
5148 c = vtop->c.i;
5149 vpop();
5151 return c;
5154 static void expr_cond(void)
5156 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5157 SValue sv;
5158 CType type, type1, type2;
5160 expr_lor();
5161 if (tok == '?') {
5162 next();
5163 c = condition_3way();
5164 g = (tok == ':' && gnu_ext);
5165 if (c < 0) {
5166 /* needed to avoid having different registers saved in
5167 each branch */
5168 if (is_float(vtop->type.t)) {
5169 rc = RC_FLOAT;
5170 #ifdef TCC_TARGET_X86_64
5171 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5172 rc = RC_ST0;
5174 #endif
5175 } else
5176 rc = RC_INT;
5177 gv(rc);
5178 save_regs(1);
5179 if (g)
5180 gv_dup();
5181 tt = gvtst(1, 0);
5183 } else {
5184 if (!g)
5185 vpop();
5186 tt = 0;
5189 if (1) {
5190 if (c == 0)
5191 nocode_wanted++;
5192 if (!g)
5193 gexpr();
5195 type1 = vtop->type;
5196 sv = *vtop; /* save value to handle it later */
5197 vtop--; /* no vpop so that FP stack is not flushed */
5198 skip(':');
5200 u = 0;
5201 if (c < 0)
5202 u = gjmp(0);
5203 gsym(tt);
5205 if (c == 0)
5206 nocode_wanted--;
5207 if (c == 1)
5208 nocode_wanted++;
5209 expr_cond();
5210 if (c == 1)
5211 nocode_wanted--;
5213 type2 = vtop->type;
5214 t1 = type1.t;
5215 bt1 = t1 & VT_BTYPE;
5216 t2 = type2.t;
5217 bt2 = t2 & VT_BTYPE;
5218 /* cast operands to correct type according to ISOC rules */
5219 if (is_float(bt1) || is_float(bt2)) {
5220 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5221 type.t = VT_LDOUBLE;
5223 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5224 type.t = VT_DOUBLE;
5225 } else {
5226 type.t = VT_FLOAT;
5228 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5229 /* cast to biggest op */
5230 type.t = VT_LLONG;
5231 /* convert to unsigned if it does not fit in a long long */
5232 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED) ||
5233 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_LLONG | VT_UNSIGNED))
5234 type.t |= VT_UNSIGNED;
5235 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5236 /* If one is a null ptr constant the result type
5237 is the other. */
5238 if (is_null_pointer (vtop))
5239 type = type1;
5240 else if (is_null_pointer (&sv))
5241 type = type2;
5242 /* XXX: test pointer compatibility, C99 has more elaborate
5243 rules here. */
5244 else
5245 type = type1;
5246 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
5247 /* XXX: test function pointer compatibility */
5248 type = bt1 == VT_FUNC ? type1 : type2;
5249 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5250 /* XXX: test structure compatibility */
5251 type = bt1 == VT_STRUCT ? type1 : type2;
5252 } else if (bt1 == VT_VOID || bt2 == VT_VOID) {
5253 /* NOTE: as an extension, we accept void on only one side */
5254 type.t = VT_VOID;
5255 } else {
5256 /* integer operations */
5257 type.t = VT_INT;
5258 /* convert to unsigned if it does not fit in an integer */
5259 if ((t1 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED) ||
5260 (t2 & (VT_BTYPE | VT_UNSIGNED)) == (VT_INT | VT_UNSIGNED))
5261 type.t |= VT_UNSIGNED;
5263 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5264 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5265 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5266 islv &= c < 0;
5268 /* now we convert second operand */
5269 if (c != 1) {
5270 gen_cast(&type);
5271 if (islv) {
5272 mk_pointer(&vtop->type);
5273 gaddrof();
5274 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5275 gaddrof();
5278 rc = RC_INT;
5279 if (is_float(type.t)) {
5280 rc = RC_FLOAT;
5281 #ifdef TCC_TARGET_X86_64
5282 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5283 rc = RC_ST0;
5285 #endif
5286 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5287 /* for long longs, we use fixed registers to avoid having
5288 to handle a complicated move */
5289 rc = RC_IRET;
5292 tt = r2 = 0;
5293 if (c < 0) {
5294 r2 = gv(rc);
5295 tt = gjmp(0);
5297 gsym(u);
5299 /* this is horrible, but we must also convert first
5300 operand */
5301 if (c != 0) {
5302 *vtop = sv;
5303 gen_cast(&type);
5304 if (islv) {
5305 mk_pointer(&vtop->type);
5306 gaddrof();
5307 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5308 gaddrof();
5311 if (c < 0) {
5312 r1 = gv(rc);
5313 move_reg(r2, r1, type.t);
5314 vtop->r = r2;
5315 gsym(tt);
5316 if (islv)
5317 indir();
5323 static void expr_eq(void)
5325 int t;
5327 expr_cond();
5328 if (tok == '=' ||
5329 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5330 tok == TOK_A_XOR || tok == TOK_A_OR ||
5331 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5332 test_lvalue();
5333 t = tok;
5334 next();
5335 if (t == '=') {
5336 expr_eq();
5337 } else {
5338 vdup();
5339 expr_eq();
5340 gen_op(t & 0x7f);
5342 vstore();
5346 ST_FUNC void gexpr(void)
5348 while (1) {
5349 expr_eq();
5350 if (tok != ',')
5351 break;
5352 vpop();
5353 next();
5357 /* parse an expression and return its type without any side effect. */
5358 static void expr_type(CType *type)
5361 nocode_wanted++;
5362 gexpr();
5363 *type = vtop->type;
5364 vpop();
5365 nocode_wanted--;
5368 /* parse a unary expression and return its type without any side
5369 effect. */
5370 static void unary_type(CType *type)
5372 nocode_wanted++;
5373 unary();
5374 *type = vtop->type;
5375 vpop();
5376 nocode_wanted--;
5379 /* parse a constant expression and return value in vtop. */
5380 static void expr_const1(void)
5382 const_wanted++;
5383 expr_cond();
5384 const_wanted--;
5387 /* parse an integer constant and return its value. */
5388 static inline int64_t expr_const64(void)
5390 int64_t c;
5391 expr_const1();
5392 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5393 expect("constant expression");
5394 c = vtop->c.i;
5395 vpop();
5396 return c;
5399 /* parse an integer constant and return its value.
5400 Complain if it doesn't fit 32bit (signed or unsigned). */
5401 ST_FUNC int expr_const(void)
5403 int c;
5404 int64_t wc = expr_const64();
5405 c = wc;
5406 if (c != wc && (unsigned)c != wc)
5407 tcc_error("constant exceeds 32 bit");
5408 return c;
5411 /* return the label token if current token is a label, otherwise
5412 return zero */
5413 static int is_label(void)
5415 int last_tok;
5417 /* fast test first */
5418 if (tok < TOK_UIDENT)
5419 return 0;
5420 /* no need to save tokc because tok is an identifier */
5421 last_tok = tok;
5422 next();
5423 if (tok == ':') {
5424 next();
5425 return last_tok;
5426 } else {
5427 unget_tok(last_tok);
5428 return 0;
5432 static void label_or_decl(int l)
5434 int last_tok;
5436 /* fast test first */
5437 if (tok >= TOK_UIDENT)
5439 /* no need to save tokc because tok is an identifier */
5440 last_tok = tok;
5441 next();
5442 if (tok == ':') {
5443 unget_tok(last_tok);
5444 return;
5446 unget_tok(last_tok);
5448 decl(l);
5451 #ifndef TCC_TARGET_ARM64
5452 static void gfunc_return(CType *func_type)
5454 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5455 CType type, ret_type;
5456 int ret_align, ret_nregs, regsize;
5457 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5458 &ret_align, &regsize);
5459 if (0 == ret_nregs) {
5460 /* if returning structure, must copy it to implicit
5461 first pointer arg location */
5462 type = *func_type;
5463 mk_pointer(&type);
5464 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5465 indir();
5466 vswap();
5467 /* copy structure value to pointer */
5468 vstore();
5469 } else {
5470 /* returning structure packed into registers */
5471 int r, size, addr, align;
5472 size = type_size(func_type,&align);
5473 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5474 (vtop->c.i & (ret_align-1)))
5475 && (align & (ret_align-1))) {
5476 loc = (loc - size) & -ret_align;
5477 addr = loc;
5478 type = *func_type;
5479 vset(&type, VT_LOCAL | VT_LVAL, addr);
5480 vswap();
5481 vstore();
5482 vpop();
5483 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5485 vtop->type = ret_type;
5486 if (is_float(ret_type.t))
5487 r = rc_fret(ret_type.t);
5488 else
5489 r = RC_IRET;
5491 if (ret_nregs == 1)
5492 gv(r);
5493 else {
5494 for (;;) {
5495 vdup();
5496 gv(r);
5497 vpop();
5498 if (--ret_nregs == 0)
5499 break;
5500 /* We assume that when a structure is returned in multiple
5501 registers, their classes are consecutive values of the
5502 suite s(n) = 2^n */
5503 r <<= 1;
5504 vtop->c.i += regsize;
5508 } else if (is_float(func_type->t)) {
5509 gv(rc_fret(func_type->t));
5510 } else {
5511 gv(RC_IRET);
5513 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
5515 #endif
5517 static int case_cmp(const void *pa, const void *pb)
5519 int64_t a = (*(struct case_t**) pa)->v1;
5520 int64_t b = (*(struct case_t**) pb)->v1;
5521 return a < b ? -1 : a > b;
5524 static void gcase(struct case_t **base, int len, int *bsym)
5526 struct case_t *p;
5527 int e;
5528 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
5529 gv(RC_INT);
5530 while (len > 4) {
5531 /* binary search */
5532 p = base[len/2];
5533 vdup();
5534 if (ll)
5535 vpushll(p->v2);
5536 else
5537 vpushi(p->v2);
5538 gen_op(TOK_LE);
5539 e = gtst(1, 0);
5540 vdup();
5541 if (ll)
5542 vpushll(p->v1);
5543 else
5544 vpushi(p->v1);
5545 gen_op(TOK_GE);
5546 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
5547 /* x < v1 */
5548 gcase(base, len/2, bsym);
5549 if (cur_switch->def_sym)
5550 gjmp_addr(cur_switch->def_sym);
5551 else
5552 *bsym = gjmp(*bsym);
5553 /* x > v2 */
5554 gsym(e);
5555 e = len/2 + 1;
5556 base += e; len -= e;
5558 /* linear scan */
5559 while (len--) {
5560 p = *base++;
5561 vdup();
5562 if (ll)
5563 vpushll(p->v2);
5564 else
5565 vpushi(p->v2);
5566 if (p->v1 == p->v2) {
5567 gen_op(TOK_EQ);
5568 gtst_addr(0, p->sym);
5569 } else {
5570 gen_op(TOK_LE);
5571 e = gtst(1, 0);
5572 vdup();
5573 if (ll)
5574 vpushll(p->v1);
5575 else
5576 vpushi(p->v1);
5577 gen_op(TOK_GE);
5578 gtst_addr(0, p->sym);
5579 gsym(e);
5584 static void block(int *bsym, int *csym, int is_expr)
5586 int a, b, c, d, cond;
5587 Sym *s;
5589 /* generate line number info */
5590 if (tcc_state->do_debug &&
5591 (last_line_num != file->line_num || last_ind != ind)) {
5592 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
5593 last_ind = ind;
5594 last_line_num = file->line_num;
5597 if (is_expr) {
5598 /* default return value is (void) */
5599 vpushi(0);
5600 vtop->type.t = VT_VOID;
5603 if (tok == TOK_IF) {
5604 /* if test */
5605 int saved_nocode_wanted = nocode_wanted;
5606 next();
5607 skip('(');
5608 gexpr();
5609 skip(')');
5610 cond = condition_3way();
5611 if (cond == 1)
5612 a = 0, vpop();
5613 else
5614 a = gvtst(1, 0);
5615 if (cond == 0)
5616 nocode_wanted |= 0x20000000;
5617 block(bsym, csym, 0);
5618 if (cond != 1)
5619 nocode_wanted = saved_nocode_wanted;
5620 c = tok;
5621 if (c == TOK_ELSE) {
5622 next();
5623 d = gjmp(0);
5624 gsym(a);
5625 if (cond == 1)
5626 nocode_wanted |= 0x20000000;
5627 block(bsym, csym, 0);
5628 gsym(d); /* patch else jmp */
5629 if (cond != 0)
5630 nocode_wanted = saved_nocode_wanted;
5631 } else
5632 gsym(a);
5633 } else if (tok == TOK_WHILE) {
5634 int saved_nocode_wanted;
5635 nocode_wanted &= ~0x20000000;
5636 next();
5637 d = ind;
5638 vla_sp_restore();
5639 skip('(');
5640 gexpr();
5641 skip(')');
5642 a = gvtst(1, 0);
5643 b = 0;
5644 ++local_scope;
5645 saved_nocode_wanted = nocode_wanted;
5646 block(&a, &b, 0);
5647 nocode_wanted = saved_nocode_wanted;
5648 --local_scope;
5649 gjmp_addr(d);
5650 gsym(a);
5651 gsym_addr(b, d);
5652 } else if (tok == '{') {
5653 Sym *llabel;
5654 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
5656 next();
5657 /* record local declaration stack position */
5658 s = local_stack;
5659 llabel = local_label_stack;
5660 ++local_scope;
5662 /* handle local labels declarations */
5663 if (tok == TOK_LABEL) {
5664 next();
5665 for(;;) {
5666 if (tok < TOK_UIDENT)
5667 expect("label identifier");
5668 label_push(&local_label_stack, tok, LABEL_DECLARED);
5669 next();
5670 if (tok == ',') {
5671 next();
5672 } else {
5673 skip(';');
5674 break;
5678 while (tok != '}') {
5679 label_or_decl(VT_LOCAL);
5680 if (tok != '}') {
5681 if (is_expr)
5682 vpop();
5683 block(bsym, csym, is_expr);
5686 /* pop locally defined labels */
5687 label_pop(&local_label_stack, llabel);
5688 /* pop locally defined symbols */
5689 --local_scope;
5690 /* In the is_expr case (a statement expression is finished here),
5691 vtop might refer to symbols on the local_stack. Either via the
5692 type or via vtop->sym. We can't pop those nor any that in turn
5693 might be referred to. To make it easier we don't roll back
5694 any symbols in that case; some upper level call to block() will
5695 do that. We do have to remove such symbols from the lookup
5696 tables, though. sym_pop will do that. */
5697 sym_pop(&local_stack, s, is_expr);
5699 /* Pop VLA frames and restore stack pointer if required */
5700 if (vlas_in_scope > saved_vlas_in_scope) {
5701 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
5702 vla_sp_restore();
5704 vlas_in_scope = saved_vlas_in_scope;
5706 next();
5707 } else if (tok == TOK_RETURN) {
5708 next();
5709 if (tok != ';') {
5710 gexpr();
5711 gen_assign_cast(&func_vt);
5712 gfunc_return(&func_vt);
5714 skip(';');
5715 /* jump unless last stmt in top-level block */
5716 if (tok != '}' || local_scope != 1)
5717 rsym = gjmp(rsym);
5718 nocode_wanted |= 0x20000000;
5719 } else if (tok == TOK_BREAK) {
5720 /* compute jump */
5721 if (!bsym)
5722 tcc_error("cannot break");
5723 *bsym = gjmp(*bsym);
5724 next();
5725 skip(';');
5726 nocode_wanted |= 0x20000000;
5727 } else if (tok == TOK_CONTINUE) {
5728 /* compute jump */
5729 if (!csym)
5730 tcc_error("cannot continue");
5731 vla_sp_restore_root();
5732 *csym = gjmp(*csym);
5733 next();
5734 skip(';');
5735 } else if (tok == TOK_FOR) {
5736 int e;
5737 int saved_nocode_wanted;
5738 nocode_wanted &= ~0x20000000;
5739 next();
5740 skip('(');
5741 s = local_stack;
5742 ++local_scope;
5743 if (tok != ';') {
5744 /* c99 for-loop init decl? */
5745 if (!decl0(VT_LOCAL, 1)) {
5746 /* no, regular for-loop init expr */
5747 gexpr();
5748 vpop();
5751 skip(';');
5752 d = ind;
5753 c = ind;
5754 vla_sp_restore();
5755 a = 0;
5756 b = 0;
5757 if (tok != ';') {
5758 gexpr();
5759 a = gvtst(1, 0);
5761 skip(';');
5762 if (tok != ')') {
5763 e = gjmp(0);
5764 c = ind;
5765 vla_sp_restore();
5766 gexpr();
5767 vpop();
5768 gjmp_addr(d);
5769 gsym(e);
5771 skip(')');
5772 saved_nocode_wanted = nocode_wanted;
5773 block(&a, &b, 0);
5774 nocode_wanted = saved_nocode_wanted;
5775 gjmp_addr(c);
5776 gsym(a);
5777 gsym_addr(b, c);
5778 --local_scope;
5779 sym_pop(&local_stack, s, 0);
5781 } else
5782 if (tok == TOK_DO) {
5783 int saved_nocode_wanted;
5784 nocode_wanted &= ~0x20000000;
5785 next();
5786 a = 0;
5787 b = 0;
5788 d = ind;
5789 vla_sp_restore();
5790 saved_nocode_wanted = nocode_wanted;
5791 block(&a, &b, 0);
5792 skip(TOK_WHILE);
5793 skip('(');
5794 gsym(b);
5795 gexpr();
5796 c = gvtst(0, 0);
5797 gsym_addr(c, d);
5798 nocode_wanted = saved_nocode_wanted;
5799 skip(')');
5800 gsym(a);
5801 skip(';');
5802 } else
5803 if (tok == TOK_SWITCH) {
5804 struct switch_t *saved, sw;
5805 int saved_nocode_wanted = nocode_wanted;
5806 SValue switchval;
5807 next();
5808 skip('(');
5809 gexpr();
5810 skip(')');
5811 switchval = *vtop--;
5812 a = 0;
5813 b = gjmp(0); /* jump to first case */
5814 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
5815 saved = cur_switch;
5816 cur_switch = &sw;
5817 block(&a, csym, 0);
5818 nocode_wanted = saved_nocode_wanted;
5819 a = gjmp(a); /* add implicit break */
5820 /* case lookup */
5821 gsym(b);
5822 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
5823 for (b = 1; b < sw.n; b++)
5824 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
5825 tcc_error("duplicate case value");
5826 /* Our switch table sorting is signed, so the compared
5827 value needs to be as well when it's 64bit. */
5828 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
5829 switchval.type.t &= ~VT_UNSIGNED;
5830 vpushv(&switchval);
5831 gcase(sw.p, sw.n, &a);
5832 vpop();
5833 if (sw.def_sym)
5834 gjmp_addr(sw.def_sym);
5835 dynarray_reset(&sw.p, &sw.n);
5836 cur_switch = saved;
5837 /* break label */
5838 gsym(a);
5839 } else
5840 if (tok == TOK_CASE) {
5841 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
5842 if (!cur_switch)
5843 expect("switch");
5844 nocode_wanted &= ~0x20000000;
5845 next();
5846 cr->v1 = cr->v2 = expr_const64();
5847 if (gnu_ext && tok == TOK_DOTS) {
5848 next();
5849 cr->v2 = expr_const64();
5850 if (cr->v2 < cr->v1)
5851 tcc_warning("empty case range");
5853 cr->sym = ind;
5854 dynarray_add((void***) &cur_switch->p, &cur_switch->n, cr);
5855 skip(':');
5856 is_expr = 0;
5857 goto block_after_label;
5858 } else
5859 if (tok == TOK_DEFAULT) {
5860 next();
5861 skip(':');
5862 if (!cur_switch)
5863 expect("switch");
5864 if (cur_switch->def_sym)
5865 tcc_error("too many 'default'");
5866 cur_switch->def_sym = ind;
5867 is_expr = 0;
5868 goto block_after_label;
5869 } else
5870 if (tok == TOK_GOTO) {
5871 next();
5872 if (tok == '*' && gnu_ext) {
5873 /* computed goto */
5874 next();
5875 gexpr();
5876 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5877 expect("pointer");
5878 ggoto();
5879 } else if (tok >= TOK_UIDENT) {
5880 s = label_find(tok);
5881 /* put forward definition if needed */
5882 if (!s) {
5883 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5884 } else {
5885 if (s->r == LABEL_DECLARED)
5886 s->r = LABEL_FORWARD;
5888 vla_sp_restore_root();
5889 if (s->r & LABEL_FORWARD)
5890 s->jnext = gjmp(s->jnext);
5891 else
5892 gjmp_addr(s->jnext);
5893 next();
5894 } else {
5895 expect("label identifier");
5897 skip(';');
5898 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
5899 asm_instr();
5900 } else {
5901 b = is_label();
5902 if (b) {
5903 /* label case */
5904 s = label_find(b);
5905 if (s) {
5906 if (s->r == LABEL_DEFINED)
5907 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
5908 gsym(s->jnext);
5909 s->r = LABEL_DEFINED;
5910 } else {
5911 s = label_push(&global_label_stack, b, LABEL_DEFINED);
5913 s->jnext = ind;
5914 vla_sp_restore();
5915 /* we accept this, but it is a mistake */
5916 block_after_label:
5917 nocode_wanted &= ~0x20000000;
5918 if (tok == '}') {
5919 tcc_warning("deprecated use of label at end of compound statement");
5920 } else {
5921 if (is_expr)
5922 vpop();
5923 block(bsym, csym, is_expr);
5925 } else {
5926 /* expression case */
5927 if (tok != ';') {
5928 if (is_expr) {
5929 vpop();
5930 gexpr();
5931 } else {
5932 gexpr();
5933 vpop();
5936 skip(';');
5941 #define EXPR_CONST 1
5942 #define EXPR_ANY 2
5944 static void parse_init_elem(int expr_type)
5946 int saved_global_expr;
5947 switch(expr_type) {
5948 case EXPR_CONST:
5949 /* compound literals must be allocated globally in this case */
5950 saved_global_expr = global_expr;
5951 global_expr = 1;
5952 expr_const1();
5953 global_expr = saved_global_expr;
5954 /* NOTE: symbols are accepted */
5955 if ((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST)
5956 tcc_error("initializer element is not constant");
5957 break;
5958 case EXPR_ANY:
5959 expr_eq();
5960 break;
5964 /* t is the array or struct type. c is the array or struct
5965 address. cur_field is the pointer to the current
5966 value, for arrays the 'c' member contains the current start
5967 index and the 'r' contains the end index (in case of range init).
5968 'size_only' is true if only size info is needed (only used
5969 in arrays) */
5970 static void decl_designator(CType *type, Section *sec, unsigned long c,
5971 Sym **cur_field, int size_only)
5973 Sym *s, *f;
5974 int notfirst, index, index_last, align, l, nb_elems, elem_size;
5975 CType type1;
5977 notfirst = 0;
5978 elem_size = 0;
5979 nb_elems = 1;
5980 if (gnu_ext && (l = is_label()) != 0)
5981 goto struct_field;
5982 while (tok == '[' || tok == '.') {
5983 if (tok == '[') {
5984 if (!(type->t & VT_ARRAY))
5985 expect("array type");
5986 s = type->ref;
5987 next();
5988 index = expr_const();
5989 if (index < 0 || (s->c >= 0 && index >= s->c))
5990 tcc_error("invalid index");
5991 if (tok == TOK_DOTS && gnu_ext) {
5992 next();
5993 index_last = expr_const();
5994 if (index_last < 0 ||
5995 (s->c >= 0 && index_last >= s->c) ||
5996 index_last < index)
5997 tcc_error("invalid index");
5998 } else {
5999 index_last = index;
6001 skip(']');
6002 if (!notfirst) {
6003 (*cur_field)->c = index;
6004 (*cur_field)->r = index_last;
6006 type = pointed_type(type);
6007 elem_size = type_size(type, &align);
6008 c += index * elem_size;
6009 /* NOTE: we only support ranges for last designator */
6010 nb_elems = index_last - index + 1;
6011 if (nb_elems != 1) {
6012 notfirst = 1;
6013 break;
6015 } else {
6016 next();
6017 l = tok;
6018 next();
6019 struct_field:
6020 if ((type->t & VT_BTYPE) != VT_STRUCT)
6021 expect("struct/union type");
6022 f = find_field(type, l);
6023 if (!f)
6024 expect("field");
6025 if (!notfirst)
6026 *cur_field = f;
6027 /* XXX: fix this mess by using explicit storage field */
6028 type1 = f->type;
6029 type1.t |= (type->t & ~VT_TYPE);
6030 type = &type1;
6031 c += f->c;
6033 notfirst = 1;
6035 if (notfirst) {
6036 if (tok == '=') {
6037 next();
6038 } else {
6039 if (!gnu_ext)
6040 expect("=");
6042 } else {
6043 if (type->t & VT_ARRAY) {
6044 index = (*cur_field)->c;
6045 if (type->ref->c >= 0 && index >= type->ref->c)
6046 tcc_error("index too large");
6047 type = pointed_type(type);
6048 c += index * type_size(type, &align);
6049 } else {
6050 f = *cur_field;
6051 if (!f)
6052 tcc_error("too many field init");
6053 /* XXX: fix this mess by using explicit storage field */
6054 type1 = f->type;
6055 type1.t |= (type->t & ~VT_TYPE);
6056 type = &type1;
6057 c += f->c;
6060 decl_initializer(type, sec, c, 0, size_only);
6062 /* XXX: make it more general */
6063 if (!size_only && nb_elems > 1) {
6064 unsigned long c_end;
6065 uint8_t *src, *dst;
6066 int i;
6068 if (!sec) {
6069 vset(type, VT_LOCAL|VT_LVAL, c);
6070 for (i = 1; i < nb_elems; i++) {
6071 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6072 vswap();
6073 vstore();
6075 vpop();
6076 } else {
6077 c_end = c + nb_elems * elem_size;
6078 if (c_end > sec->data_allocated)
6079 section_realloc(sec, c_end);
6080 src = sec->data + c;
6081 dst = src;
6082 for(i = 1; i < nb_elems; i++) {
6083 dst += elem_size;
6084 memcpy(dst, src, elem_size);
6090 /* store a value or an expression directly in global data or in local array */
6091 static void init_putv(CType *type, Section *sec, unsigned long c)
6093 int bt, bit_pos, bit_size;
6094 void *ptr;
6095 unsigned long long bit_mask;
6096 CType dtype;
6098 dtype = *type;
6099 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6101 if (sec) {
6102 int size, align;
6103 /* XXX: not portable */
6104 /* XXX: generate error if incorrect relocation */
6105 gen_assign_cast(&dtype);
6106 bt = type->t & VT_BTYPE;
6107 size = type_size(type, &align);
6108 if (c + size > sec->data_allocated) {
6109 section_realloc(sec, c + size);
6111 ptr = sec->data + c;
6112 /* XXX: make code faster ? */
6113 if (!(type->t & VT_BITFIELD)) {
6114 bit_pos = 0;
6115 bit_size = PTR_SIZE * 8;
6116 bit_mask = -1LL;
6117 } else {
6118 bit_pos = (vtop->type.t >> VT_STRUCT_SHIFT) & 0x3f;
6119 bit_size = (vtop->type.t >> (VT_STRUCT_SHIFT + 6)) & 0x3f;
6120 bit_mask = (1LL << bit_size) - 1;
6122 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6123 vtop->sym->v >= SYM_FIRST_ANOM &&
6124 /* XXX This rejects compount literals like
6125 '(void *){ptr}'. The problem is that '&sym' is
6126 represented the same way, which would be ruled out
6127 by the SYM_FIRST_ANOM check above, but also '"string"'
6128 in 'char *p = "string"' is represented the same
6129 with the type being VT_PTR and the symbol being an
6130 anonymous one. That is, there's no difference in vtop
6131 between '(void *){x}' and '&(void *){x}'. Ignore
6132 pointer typed entities here. Hopefully no real code
6133 will every use compound literals with scalar type. */
6134 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6135 /* These come from compound literals, memcpy stuff over. */
6136 Section *ssec;
6137 ElfW(Sym) *esym;
6138 ElfW_Rel *rel;
6139 esym = &((ElfW(Sym) *)symtab_section->data)[vtop->sym->c];
6140 ssec = tcc_state->sections[esym->st_shndx];
6141 memmove (ptr, ssec->data + esym->st_value, size);
6142 if (ssec->reloc) {
6143 /* We need to copy over all memory contents, and that
6144 includes relocations. Use the fact that relocs are
6145 created it order, so look from the end of relocs
6146 until we hit one before the copied region. */
6147 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6148 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6149 while (num_relocs--) {
6150 rel--;
6151 if (rel->r_offset >= esym->st_value + size)
6152 continue;
6153 if (rel->r_offset < esym->st_value)
6154 break;
6155 /* Note: if the same fields are initialized multiple
6156 times (possible with designators) then we possibly
6157 add multiple relocations for the same offset here.
6158 That would lead to wrong code, the last reloc needs
6159 to win. We clean this up later after the whole
6160 initializer is parsed. */
6161 put_elf_reloca(symtab_section, sec,
6162 c + rel->r_offset - esym->st_value,
6163 ELFW(R_TYPE)(rel->r_info),
6164 ELFW(R_SYM)(rel->r_info),
6165 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6166 rel->r_addend
6167 #else
6169 #endif
6173 } else {
6174 if ((vtop->r & VT_SYM) &&
6175 (bt == VT_BYTE ||
6176 bt == VT_SHORT ||
6177 bt == VT_DOUBLE ||
6178 bt == VT_LDOUBLE ||
6179 #if PTR_SIZE == 8
6180 (bt == VT_LLONG && bit_size != 64) ||
6181 bt == VT_INT
6182 #else
6183 bt == VT_LLONG ||
6184 (bt == VT_INT && bit_size != 32)
6185 #endif
6187 tcc_error("initializer element is not computable at load time");
6188 switch(bt) {
6189 /* XXX: when cross-compiling we assume that each type has the
6190 same representation on host and target, which is likely to
6191 be wrong in the case of long double */
6192 case VT_BOOL:
6193 vtop->c.i = (vtop->c.i != 0);
6194 case VT_BYTE:
6195 *(char *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6196 break;
6197 case VT_SHORT:
6198 *(short *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6199 break;
6200 case VT_DOUBLE:
6201 *(double *)ptr = vtop->c.d;
6202 break;
6203 case VT_LDOUBLE:
6204 if (sizeof(long double) == LDOUBLE_SIZE)
6205 *(long double *)ptr = vtop->c.ld;
6206 else if (sizeof(double) == LDOUBLE_SIZE)
6207 *(double *)ptr = vtop->c.ld;
6208 else
6209 tcc_error("can't cross compile long double constants");
6210 break;
6211 #if PTR_SIZE != 8
6212 case VT_LLONG:
6213 *(long long *)ptr |= (vtop->c.i & bit_mask) << bit_pos;
6214 break;
6215 #else
6216 case VT_LLONG:
6217 #endif
6218 case VT_PTR:
6220 addr_t val = (vtop->c.i & bit_mask) << bit_pos;
6221 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6222 if (vtop->r & VT_SYM)
6223 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6224 else
6225 *(addr_t *)ptr |= val;
6226 #else
6227 if (vtop->r & VT_SYM)
6228 greloc(sec, vtop->sym, c, R_DATA_PTR);
6229 *(addr_t *)ptr |= val;
6230 #endif
6231 break;
6233 default:
6235 int val = (vtop->c.i & bit_mask) << bit_pos;
6236 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
6237 if (vtop->r & VT_SYM)
6238 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6239 else
6240 *(int *)ptr |= val;
6241 #else
6242 if (vtop->r & VT_SYM)
6243 greloc(sec, vtop->sym, c, R_DATA_PTR);
6244 *(int *)ptr |= val;
6245 #endif
6246 break;
6250 vtop--;
6251 } else {
6252 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6253 vswap();
6254 vstore();
6255 vpop();
6259 /* put zeros for variable based init */
6260 static void init_putz(Section *sec, unsigned long c, int size)
6262 if (sec) {
6263 /* nothing to do because globals are already set to zero */
6264 } else {
6265 vpush_global_sym(&func_old_type, TOK_memset);
6266 vseti(VT_LOCAL, c);
6267 #ifdef TCC_TARGET_ARM
6268 vpushs(size);
6269 vpushi(0);
6270 #else
6271 vpushi(0);
6272 vpushs(size);
6273 #endif
6274 gfunc_call(3);
6278 /* 't' contains the type and storage info. 'c' is the offset of the
6279 object in section 'sec'. If 'sec' is NULL, it means stack based
6280 allocation. 'first' is true if array '{' must be read (multi
6281 dimension implicit array init handling). 'size_only' is true if
6282 size only evaluation is wanted (only for arrays). */
6283 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6284 int first, int size_only)
6286 int index, array_length, n, no_oblock, nb, parlevel, parlevel1, i;
6287 int size1, align1;
6288 int have_elem;
6289 Sym *s, *f;
6290 Sym indexsym;
6291 CType *t1;
6293 /* If we currently are at an '}' or ',' we have read an initializer
6294 element in one of our callers, and not yet consumed it. */
6295 have_elem = tok == '}' || tok == ',';
6296 if (!have_elem && tok != '{' &&
6297 /* In case of strings we have special handling for arrays, so
6298 don't consume them as initializer value (which would commit them
6299 to some anonymous symbol). */
6300 tok != TOK_LSTR && tok != TOK_STR &&
6301 !size_only) {
6302 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6303 have_elem = 1;
6306 if (have_elem &&
6307 !(type->t & VT_ARRAY) &&
6308 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6309 The source type might have VT_CONSTANT set, which is
6310 of course assignable to non-const elements. */
6311 is_compatible_parameter_types(type, &vtop->type)) {
6312 init_putv(type, sec, c);
6313 } else if (type->t & VT_ARRAY) {
6314 s = type->ref;
6315 n = s->c;
6316 array_length = 0;
6317 t1 = pointed_type(type);
6318 size1 = type_size(t1, &align1);
6320 no_oblock = 1;
6321 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6322 tok == '{') {
6323 if (tok != '{')
6324 tcc_error("character array initializer must be a literal,"
6325 " optionally enclosed in braces");
6326 skip('{');
6327 no_oblock = 0;
6330 /* only parse strings here if correct type (otherwise: handle
6331 them as ((w)char *) expressions */
6332 if ((tok == TOK_LSTR &&
6333 #ifdef TCC_TARGET_PE
6334 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6335 #else
6336 (t1->t & VT_BTYPE) == VT_INT
6337 #endif
6338 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6339 while (tok == TOK_STR || tok == TOK_LSTR) {
6340 int cstr_len, ch;
6342 /* compute maximum number of chars wanted */
6343 if (tok == TOK_STR)
6344 cstr_len = tokc.str.size;
6345 else
6346 cstr_len = tokc.str.size / sizeof(nwchar_t);
6347 cstr_len--;
6348 nb = cstr_len;
6349 if (n >= 0 && nb > (n - array_length))
6350 nb = n - array_length;
6351 if (!size_only) {
6352 if (cstr_len > nb)
6353 tcc_warning("initializer-string for array is too long");
6354 /* in order to go faster for common case (char
6355 string in global variable, we handle it
6356 specifically */
6357 if (sec && tok == TOK_STR && size1 == 1) {
6358 memcpy(sec->data + c + array_length, tokc.str.data, nb);
6359 } else {
6360 for(i=0;i<nb;i++) {
6361 if (tok == TOK_STR)
6362 ch = ((unsigned char *)tokc.str.data)[i];
6363 else
6364 ch = ((nwchar_t *)tokc.str.data)[i];
6365 vpushi(ch);
6366 init_putv(t1, sec, c + (array_length + i) * size1);
6370 array_length += nb;
6371 next();
6373 /* only add trailing zero if enough storage (no
6374 warning in this case since it is standard) */
6375 if (n < 0 || array_length < n) {
6376 if (!size_only) {
6377 vpushi(0);
6378 init_putv(t1, sec, c + (array_length * size1));
6380 array_length++;
6382 } else {
6383 indexsym.c = 0;
6384 indexsym.r = 0;
6385 f = &indexsym;
6387 do_init_list:
6388 while (tok != '}' || have_elem) {
6389 decl_designator(type, sec, c, &f, size_only);
6390 have_elem = 0;
6391 index = f->c;
6392 /* must put zero in holes (note that doing it that way
6393 ensures that it even works with designators) */
6394 if (!size_only && array_length < index) {
6395 init_putz(sec, c + array_length * size1,
6396 (index - array_length) * size1);
6398 if (type->t & VT_ARRAY) {
6399 index = indexsym.c = ++indexsym.r;
6400 } else {
6401 index = index + type_size(&f->type, &align1);
6402 if (s->type.t == TOK_UNION)
6403 f = NULL;
6404 else
6405 f = f->next;
6407 if (index > array_length)
6408 array_length = index;
6410 if (type->t & VT_ARRAY) {
6411 /* special test for multi dimensional arrays (may not
6412 be strictly correct if designators are used at the
6413 same time) */
6414 if (no_oblock && index >= n)
6415 break;
6416 } else {
6417 if (no_oblock && f == NULL)
6418 break;
6420 if (tok == '}')
6421 break;
6422 skip(',');
6425 /* put zeros at the end */
6426 if (!size_only && array_length < n) {
6427 init_putz(sec, c + array_length * size1,
6428 (n - array_length) * size1);
6430 if (!no_oblock)
6431 skip('}');
6432 /* patch type size if needed, which happens only for array types */
6433 if (n < 0)
6434 s->c = array_length;
6435 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
6436 size1 = 1;
6437 no_oblock = 1;
6438 if (first || tok == '{') {
6439 skip('{');
6440 no_oblock = 0;
6442 s = type->ref;
6443 f = s->next;
6444 array_length = 0;
6445 n = s->c;
6446 goto do_init_list;
6447 } else if (tok == '{') {
6448 next();
6449 decl_initializer(type, sec, c, first, size_only);
6450 skip('}');
6451 } else if (size_only) {
6452 /* If we supported only ISO C we wouldn't have to accept calling
6453 this on anything than an array size_only==1 (and even then
6454 only on the outermost level, so no recursion would be needed),
6455 because initializing a flex array member isn't supported.
6456 But GNU C supports it, so we need to recurse even into
6457 subfields of structs and arrays when size_only is set. */
6458 /* just skip expression */
6459 parlevel = parlevel1 = 0;
6460 while ((parlevel > 0 || parlevel1 > 0 ||
6461 (tok != '}' && tok != ',')) && tok != -1) {
6462 if (tok == '(')
6463 parlevel++;
6464 else if (tok == ')') {
6465 if (parlevel == 0 && parlevel1 == 0)
6466 break;
6467 parlevel--;
6469 else if (tok == '{')
6470 parlevel1++;
6471 else if (tok == '}') {
6472 if (parlevel == 0 && parlevel1 == 0)
6473 break;
6474 parlevel1--;
6476 next();
6478 } else {
6479 if (!have_elem) {
6480 /* This should happen only when we haven't parsed
6481 the init element above for fear of committing a
6482 string constant to memory too early. */
6483 if (tok != TOK_STR && tok != TOK_LSTR)
6484 expect("string constant");
6485 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6487 init_putv(type, sec, c);
6491 /* parse an initializer for type 't' if 'has_init' is non zero, and
6492 allocate space in local or global data space ('r' is either
6493 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6494 variable 'v' of scope 'scope' is declared before initializers
6495 are parsed. If 'v' is zero, then a reference to the new object
6496 is put in the value stack. If 'has_init' is 2, a special parsing
6497 is done to handle string constants. */
6498 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
6499 int has_init, int v, int scope)
6501 int size, align, addr, data_offset;
6502 int level;
6503 ParseState saved_parse_state = {0};
6504 TokenString *init_str = NULL;
6505 Section *sec;
6506 Sym *flexible_array;
6508 flexible_array = NULL;
6509 if ((type->t & VT_BTYPE) == VT_STRUCT) {
6510 Sym *field = type->ref->next;
6511 if (field) {
6512 while (field->next)
6513 field = field->next;
6514 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
6515 flexible_array = field;
6519 size = type_size(type, &align);
6520 /* If unknown size, we must evaluate it before
6521 evaluating initializers because
6522 initializers can generate global data too
6523 (e.g. string pointers or ISOC99 compound
6524 literals). It also simplifies local
6525 initializers handling */
6526 if (size < 0 || (flexible_array && has_init)) {
6527 if (!has_init)
6528 tcc_error("unknown type size");
6529 /* get all init string */
6530 init_str = tok_str_alloc();
6531 if (has_init == 2) {
6532 /* only get strings */
6533 while (tok == TOK_STR || tok == TOK_LSTR) {
6534 tok_str_add_tok(init_str);
6535 next();
6537 } else {
6538 level = 0;
6539 while (level > 0 || (tok != ',' && tok != ';')) {
6540 if (tok < 0)
6541 tcc_error("unexpected end of file in initializer");
6542 tok_str_add_tok(init_str);
6543 if (tok == '{')
6544 level++;
6545 else if (tok == '}') {
6546 level--;
6547 if (level <= 0) {
6548 next();
6549 break;
6552 next();
6555 tok_str_add(init_str, -1);
6556 tok_str_add(init_str, 0);
6558 /* compute size */
6559 save_parse_state(&saved_parse_state);
6561 begin_macro(init_str, 1);
6562 next();
6563 decl_initializer(type, NULL, 0, 1, 1);
6564 /* prepare second initializer parsing */
6565 macro_ptr = init_str->str;
6566 next();
6568 /* if still unknown size, error */
6569 size = type_size(type, &align);
6570 if (size < 0)
6571 tcc_error("unknown type size");
6573 /* If there's a flex member and it was used in the initializer
6574 adjust size. */
6575 if (flexible_array &&
6576 flexible_array->type.ref->c > 0)
6577 size += flexible_array->type.ref->c
6578 * pointed_size(&flexible_array->type);
6579 /* take into account specified alignment if bigger */
6580 if (ad->a.aligned) {
6581 int speca = 1 << (ad->a.aligned - 1);
6582 if (speca > align)
6583 align = speca;
6584 } else if (ad->a.packed) {
6585 align = 1;
6587 if ((r & VT_VALMASK) == VT_LOCAL) {
6588 sec = NULL;
6589 #ifdef CONFIG_TCC_BCHECK
6590 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6591 loc--;
6593 #endif
6594 loc = (loc - size) & -align;
6595 addr = loc;
6596 #ifdef CONFIG_TCC_BCHECK
6597 /* handles bounds */
6598 /* XXX: currently, since we do only one pass, we cannot track
6599 '&' operators, so we add only arrays */
6600 if (tcc_state->do_bounds_check && (type->t & VT_ARRAY)) {
6601 addr_t *bounds_ptr;
6602 /* add padding between regions */
6603 loc--;
6604 /* then add local bound info */
6605 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
6606 bounds_ptr[0] = addr;
6607 bounds_ptr[1] = size;
6609 #endif
6610 if (v) {
6611 /* local variable */
6612 #ifdef CONFIG_TCC_ASM
6613 if (ad->asm_label) {
6614 int reg = asm_parse_regvar(ad->asm_label);
6615 if (reg >= 0)
6616 r = (r & ~VT_VALMASK) | reg;
6618 #endif
6619 sym_push(v, type, r, addr);
6620 } else {
6621 /* push local reference */
6622 vset(type, r, addr);
6624 } else {
6625 Sym *sym;
6627 sym = NULL;
6628 if (v && scope == VT_CONST) {
6629 /* see if the symbol was already defined */
6630 sym = sym_find(v);
6631 if (sym) {
6632 if (!is_compatible_types(&sym->type, type))
6633 tcc_error("incompatible types for redefinition of '%s'",
6634 get_tok_str(v, NULL));
6635 if (sym->type.t & VT_EXTERN) {
6636 /* if the variable is extern, it was not allocated */
6637 sym->type.t &= ~VT_EXTERN;
6638 /* set array size if it was omitted in extern
6639 declaration */
6640 if ((sym->type.t & VT_ARRAY) &&
6641 sym->type.ref->c < 0 &&
6642 type->ref->c >= 0)
6643 sym->type.ref->c = type->ref->c;
6644 } else {
6645 /* we accept several definitions of the same
6646 global variable. this is tricky, because we
6647 must play with the SHN_COMMON type of the symbol */
6648 /* XXX: should check if the variable was already
6649 initialized. It is incorrect to initialized it
6650 twice */
6651 /* no init data, we won't add more to the symbol */
6652 if (!has_init)
6653 goto no_alloc;
6658 /* allocate symbol in corresponding section */
6659 sec = ad->section;
6660 if (!sec) {
6661 if (has_init)
6662 sec = data_section;
6663 else if (tcc_state->nocommon)
6664 sec = bss_section;
6666 if (sec) {
6667 data_offset = sec->data_offset;
6668 data_offset = (data_offset + align - 1) & -align;
6669 addr = data_offset;
6670 /* very important to increment global pointer at this time
6671 because initializers themselves can create new initializers */
6672 data_offset += size;
6673 #ifdef CONFIG_TCC_BCHECK
6674 /* add padding if bound check */
6675 if (tcc_state->do_bounds_check)
6676 data_offset++;
6677 #endif
6678 sec->data_offset = data_offset;
6679 /* allocate section space to put the data */
6680 if (sec->sh_type != SHT_NOBITS &&
6681 data_offset > sec->data_allocated)
6682 section_realloc(sec, data_offset);
6683 /* align section if needed */
6684 if (align > sec->sh_addralign)
6685 sec->sh_addralign = align;
6686 } else {
6687 addr = 0; /* avoid warning */
6690 if (v) {
6691 if (scope != VT_CONST || !sym) {
6692 sym = sym_push(v, type, r | VT_SYM, 0);
6693 sym->asm_label = ad->asm_label;
6695 /* update symbol definition */
6696 if (sec) {
6697 put_extern_sym(sym, sec, addr, size);
6698 } else {
6699 ElfW(Sym) *esym;
6700 /* put a common area */
6701 put_extern_sym(sym, NULL, align, size);
6702 /* XXX: find a nicer way */
6703 esym = &((ElfW(Sym) *)symtab_section->data)[sym->c];
6704 esym->st_shndx = SHN_COMMON;
6706 } else {
6707 /* push global reference */
6708 sym = get_sym_ref(type, sec, addr, size);
6709 vpushsym(type, sym);
6711 /* patch symbol weakness */
6712 if (type->t & VT_WEAK)
6713 weaken_symbol(sym);
6714 apply_visibility(sym, type);
6715 #ifdef CONFIG_TCC_BCHECK
6716 /* handles bounds now because the symbol must be defined
6717 before for the relocation */
6718 if (tcc_state->do_bounds_check) {
6719 addr_t *bounds_ptr;
6721 greloc(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR);
6722 /* then add global bound info */
6723 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
6724 bounds_ptr[0] = 0; /* relocated */
6725 bounds_ptr[1] = size;
6727 #endif
6729 if (type->t & VT_VLA) {
6730 int a;
6732 /* save current stack pointer */
6733 if (vlas_in_scope == 0) {
6734 if (vla_sp_root_loc == -1)
6735 vla_sp_root_loc = (loc -= PTR_SIZE);
6736 gen_vla_sp_save(vla_sp_root_loc);
6739 vla_runtime_type_size(type, &a);
6740 gen_vla_alloc(type, a);
6741 gen_vla_sp_save(addr);
6742 vla_sp_loc = addr;
6743 vlas_in_scope++;
6744 } else if (has_init) {
6745 size_t oldreloc_offset = 0;
6746 if (sec && sec->reloc)
6747 oldreloc_offset = sec->reloc->data_offset;
6748 decl_initializer(type, sec, addr, 1, 0);
6749 if (sec && sec->reloc)
6750 squeeze_multi_relocs(sec, oldreloc_offset);
6751 /* patch flexible array member size back to -1, */
6752 /* for possible subsequent similar declarations */
6753 if (flexible_array)
6754 flexible_array->type.ref->c = -1;
6756 no_alloc: ;
6757 /* restore parse state if needed */
6758 if (init_str) {
6759 end_macro();
6760 restore_parse_state(&saved_parse_state);
6764 static void put_func_debug(Sym *sym)
6766 char buf[512];
6768 /* stabs info */
6769 /* XXX: we put here a dummy type */
6770 snprintf(buf, sizeof(buf), "%s:%c1",
6771 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
6772 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
6773 cur_text_section, sym->c);
6774 /* //gr gdb wants a line at the function */
6775 put_stabn(N_SLINE, 0, file->line_num, 0);
6776 last_ind = 0;
6777 last_line_num = 0;
6780 /* parse an old style function declaration list */
6781 /* XXX: check multiple parameter */
6782 static void func_decl_list(Sym *func_sym)
6784 AttributeDef ad;
6785 int v;
6786 Sym *s;
6787 CType btype, type;
6789 /* parse each declaration */
6790 while (tok != '{' && tok != ';' && tok != ',' && tok != TOK_EOF &&
6791 tok != TOK_ASM1 && tok != TOK_ASM2 && tok != TOK_ASM3) {
6792 if (!parse_btype(&btype, &ad))
6793 expect("declaration list");
6794 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6795 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6796 tok == ';') {
6797 /* we accept no variable after */
6798 } else {
6799 for(;;) {
6800 type = btype;
6801 type_decl(&type, &ad, &v, TYPE_DIRECT);
6802 /* find parameter in function parameter list */
6803 s = func_sym->next;
6804 while (s != NULL) {
6805 if ((s->v & ~SYM_FIELD) == v)
6806 goto found;
6807 s = s->next;
6809 tcc_error("declaration for parameter '%s' but no such parameter",
6810 get_tok_str(v, NULL));
6811 found:
6812 /* check that no storage specifier except 'register' was given */
6813 if (type.t & VT_STORAGE)
6814 tcc_error("storage class specified for '%s'", get_tok_str(v, NULL));
6815 convert_parameter_type(&type);
6816 /* we can add the type (NOTE: it could be local to the function) */
6817 s->type = type;
6818 /* accept other parameters */
6819 if (tok == ',')
6820 next();
6821 else
6822 break;
6825 skip(';');
6829 /* parse a function defined by symbol 'sym' and generate its code in
6830 'cur_text_section' */
6831 static void gen_function(Sym *sym)
6833 nocode_wanted = 0;
6834 ind = cur_text_section->data_offset;
6835 /* NOTE: we patch the symbol size later */
6836 put_extern_sym(sym, cur_text_section, ind, 0);
6837 funcname = get_tok_str(sym->v, NULL);
6838 func_ind = ind;
6839 /* Initialize VLA state */
6840 vla_sp_loc = -1;
6841 vla_sp_root_loc = -1;
6842 /* put debug symbol */
6843 if (tcc_state->do_debug)
6844 put_func_debug(sym);
6846 /* push a dummy symbol to enable local sym storage */
6847 sym_push2(&local_stack, SYM_FIELD, 0, 0);
6848 local_scope = 1; /* for function parameters */
6849 gfunc_prolog(&sym->type);
6850 local_scope = 0;
6852 rsym = 0;
6853 block(NULL, NULL, 0);
6854 nocode_wanted = 0;
6855 gsym(rsym);
6856 gfunc_epilog();
6857 cur_text_section->data_offset = ind;
6858 label_pop(&global_label_stack, NULL);
6859 /* reset local stack */
6860 local_scope = 0;
6861 sym_pop(&local_stack, NULL, 0);
6862 /* end of function */
6863 /* patch symbol size */
6864 ((ElfW(Sym) *)symtab_section->data)[sym->c].st_size =
6865 ind - func_ind;
6866 /* patch symbol weakness (this definition overrules any prototype) */
6867 if (sym->type.t & VT_WEAK)
6868 weaken_symbol(sym);
6869 apply_visibility(sym, &sym->type);
6870 if (tcc_state->do_debug) {
6871 put_stabn(N_FUN, 0, 0, ind - func_ind);
6873 /* It's better to crash than to generate wrong code */
6874 cur_text_section = NULL;
6875 funcname = ""; /* for safety */
6876 func_vt.t = VT_VOID; /* for safety */
6877 func_var = 0; /* for safety */
6878 ind = 0; /* for safety */
6879 nocode_wanted = 1;
6880 check_vstack();
6883 static void gen_inline_functions(TCCState *s)
6885 Sym *sym;
6886 int inline_generated, i, ln;
6887 struct InlineFunc *fn;
6889 ln = file->line_num;
6890 /* iterate while inline function are referenced */
6891 for(;;) {
6892 inline_generated = 0;
6893 for (i = 0; i < s->nb_inline_fns; ++i) {
6894 fn = s->inline_fns[i];
6895 sym = fn->sym;
6896 if (sym && sym->c) {
6897 /* the function was used: generate its code and
6898 convert it to a normal function */
6899 fn->sym = NULL;
6900 if (file)
6901 pstrcpy(file->filename, sizeof file->filename, fn->filename);
6902 sym->r = VT_SYM | VT_CONST;
6903 sym->type.t &= ~VT_INLINE;
6905 begin_macro(fn->func_str, 1);
6906 next();
6907 cur_text_section = text_section;
6908 gen_function(sym);
6909 end_macro();
6911 inline_generated = 1;
6914 if (!inline_generated)
6915 break;
6917 file->line_num = ln;
6920 ST_FUNC void free_inline_functions(TCCState *s)
6922 int i;
6923 /* free tokens of unused inline functions */
6924 for (i = 0; i < s->nb_inline_fns; ++i) {
6925 struct InlineFunc *fn = s->inline_fns[i];
6926 if (fn->sym)
6927 tok_str_free(fn->func_str);
6929 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
6932 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6933 static int decl0(int l, int is_for_loop_init)
6935 int v, has_init, r;
6936 CType type, btype;
6937 Sym *sym;
6938 AttributeDef ad;
6940 while (1) {
6941 if (!parse_btype(&btype, &ad)) {
6942 if (is_for_loop_init)
6943 return 0;
6944 /* skip redundant ';' */
6945 /* XXX: find more elegant solution */
6946 if (tok == ';') {
6947 next();
6948 continue;
6950 if (l == VT_CONST &&
6951 (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
6952 /* global asm block */
6953 asm_global_instr();
6954 continue;
6956 /* special test for old K&R protos without explicit int
6957 type. Only accepted when defining global data */
6958 if (l == VT_LOCAL || tok < TOK_UIDENT)
6959 break;
6960 btype.t = VT_INT;
6962 if (((btype.t & VT_BTYPE) == VT_ENUM ||
6963 (btype.t & VT_BTYPE) == VT_STRUCT) &&
6964 tok == ';') {
6965 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
6966 int v = btype.ref->v;
6967 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
6968 tcc_warning("unnamed struct/union that defines no instances");
6970 next();
6971 continue;
6973 while (1) { /* iterate thru each declaration */
6974 type = btype;
6975 /* If the base type itself was an array type of unspecified
6976 size (like in 'typedef int arr[]; arr x = {1};') then
6977 we will overwrite the unknown size by the real one for
6978 this decl. We need to unshare the ref symbol holding
6979 that size. */
6980 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
6981 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
6983 type_decl(&type, &ad, &v, TYPE_DIRECT);
6984 #if 0
6986 char buf[500];
6987 type_to_str(buf, sizeof(buf), t, get_tok_str(v, NULL));
6988 printf("type = '%s'\n", buf);
6990 #endif
6991 if ((type.t & VT_BTYPE) == VT_FUNC) {
6992 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
6993 tcc_error("function without file scope cannot be static");
6995 /* if old style function prototype, we accept a
6996 declaration list */
6997 sym = type.ref;
6998 if (sym->c == FUNC_OLD)
6999 func_decl_list(sym);
7002 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7003 ad.asm_label = asm_label_instr();
7004 /* parse one last attribute list, after asm label */
7005 parse_attribute(&ad);
7006 if (tok == '{')
7007 expect(";");
7010 if (ad.a.weak)
7011 type.t |= VT_WEAK;
7012 #ifdef TCC_TARGET_PE
7013 if (ad.a.func_import)
7014 type.t |= VT_IMPORT;
7015 if (ad.a.func_export)
7016 type.t |= VT_EXPORT;
7017 #endif
7018 type.t |= ad.a.visibility << VT_VIS_SHIFT;
7020 if (tok == '{') {
7021 if (l == VT_LOCAL)
7022 tcc_error("cannot use local functions");
7023 if ((type.t & VT_BTYPE) != VT_FUNC)
7024 expect("function definition");
7026 /* reject abstract declarators in function definition */
7027 sym = type.ref;
7028 while ((sym = sym->next) != NULL)
7029 if (!(sym->v & ~SYM_FIELD))
7030 expect("identifier");
7032 /* XXX: cannot do better now: convert extern line to static inline */
7033 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7034 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7036 sym = sym_find(v);
7037 if (sym) {
7038 Sym *ref;
7039 if ((sym->type.t & VT_BTYPE) != VT_FUNC)
7040 goto func_error1;
7042 ref = sym->type.ref;
7043 if (0 == ref->a.func_proto)
7044 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
7046 /* use func_call from prototype if not defined */
7047 if (ref->a.func_call != FUNC_CDECL
7048 && type.ref->a.func_call == FUNC_CDECL)
7049 type.ref->a.func_call = ref->a.func_call;
7051 /* use export from prototype */
7052 if (ref->a.func_export)
7053 type.ref->a.func_export = 1;
7055 /* use static from prototype */
7056 if (sym->type.t & VT_STATIC)
7057 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7059 /* If the definition has no visibility use the
7060 one from prototype. */
7061 if (! (type.t & VT_VIS_MASK))
7062 type.t |= sym->type.t & VT_VIS_MASK;
7064 if (!is_compatible_types(&sym->type, &type)) {
7065 func_error1:
7066 tcc_error("incompatible types for redefinition of '%s'",
7067 get_tok_str(v, NULL));
7069 type.ref->a.func_proto = 0;
7070 /* if symbol is already defined, then put complete type */
7071 sym->type = type;
7072 } else {
7073 /* put function symbol */
7074 sym = global_identifier_push(v, type.t, 0);
7075 sym->type.ref = type.ref;
7078 /* static inline functions are just recorded as a kind
7079 of macro. Their code will be emitted at the end of
7080 the compilation unit only if they are used */
7081 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7082 (VT_INLINE | VT_STATIC)) {
7083 int block_level;
7084 struct InlineFunc *fn;
7085 const char *filename;
7087 filename = file ? file->filename : "";
7088 fn = tcc_malloc(sizeof *fn + strlen(filename));
7089 strcpy(fn->filename, filename);
7090 fn->sym = sym;
7091 fn->func_str = tok_str_alloc();
7093 block_level = 0;
7094 for(;;) {
7095 int t;
7096 if (tok == TOK_EOF)
7097 tcc_error("unexpected end of file");
7098 tok_str_add_tok(fn->func_str);
7099 t = tok;
7100 next();
7101 if (t == '{') {
7102 block_level++;
7103 } else if (t == '}') {
7104 block_level--;
7105 if (block_level == 0)
7106 break;
7109 tok_str_add(fn->func_str, -1);
7110 tok_str_add(fn->func_str, 0);
7111 dynarray_add((void ***)&tcc_state->inline_fns, &tcc_state->nb_inline_fns, fn);
7113 } else {
7114 /* compute text section */
7115 cur_text_section = ad.section;
7116 if (!cur_text_section)
7117 cur_text_section = text_section;
7118 sym->r = VT_SYM | VT_CONST;
7119 gen_function(sym);
7121 break;
7122 } else {
7123 if (btype.t & VT_TYPEDEF) {
7124 /* save typedefed type */
7125 /* XXX: test storage specifiers ? */
7126 sym = sym_find(v);
7127 if (sym && sym->scope == local_scope) {
7128 if (!is_compatible_types(&sym->type, &type)
7129 || !(sym->type.t & VT_TYPEDEF))
7130 tcc_error("incompatible redefinition of '%s'",
7131 get_tok_str(v, NULL));
7132 sym->type = type;
7133 } else {
7134 sym = sym_push(v, &type, 0, 0);
7136 sym->a = ad.a;
7137 sym->type.t |= VT_TYPEDEF;
7138 } else {
7139 r = 0;
7140 if ((type.t & VT_BTYPE) == VT_FUNC) {
7141 /* external function definition */
7142 /* specific case for func_call attribute */
7143 ad.a.func_proto = 1;
7144 type.ref->a = ad.a;
7145 } else if (!(type.t & VT_ARRAY)) {
7146 /* not lvalue if array */
7147 r |= lvalue_type(type.t);
7149 has_init = (tok == '=');
7150 if (has_init && (type.t & VT_VLA))
7151 tcc_error("variable length array cannot be initialized");
7152 if ((btype.t & VT_EXTERN) || ((type.t & VT_BTYPE) == VT_FUNC) ||
7153 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7154 !has_init && l == VT_CONST && type.ref->c < 0)) {
7155 /* external variable or function */
7156 /* NOTE: as GCC, uninitialized global static
7157 arrays of null size are considered as
7158 extern */
7159 sym = external_sym(v, &type, r);
7160 sym->asm_label = ad.asm_label;
7162 if (ad.alias_target) {
7163 Section tsec;
7164 ElfW(Sym) *esym;
7165 Sym *alias_target;
7167 alias_target = sym_find(ad.alias_target);
7168 if (!alias_target || !alias_target->c)
7169 tcc_error("unsupported forward __alias__ attribute");
7170 esym = &((ElfW(Sym) *)symtab_section->data)[alias_target->c];
7171 tsec.sh_num = esym->st_shndx;
7172 put_extern_sym2(sym, &tsec, esym->st_value, esym->st_size, 0);
7174 } else {
7175 type.t |= (btype.t & VT_STATIC); /* Retain "static". */
7176 if (type.t & VT_STATIC)
7177 r |= VT_CONST;
7178 else
7179 r |= l;
7180 if (has_init)
7181 next();
7182 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7185 if (tok != ',') {
7186 if (is_for_loop_init)
7187 return 1;
7188 skip(';');
7189 break;
7191 next();
7193 ad.a.aligned = 0;
7196 return 0;
7199 ST_FUNC void decl(int l)
7201 decl0(l, 0);
7204 /* ------------------------------------------------------------------------- */