x86-64 codegen: avoid allocating VLA of size 0
[tinycc.git] / tccgen.c
blob3726109fa0ed5a555253c8d6e87959f6e752fc57
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
97 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
267 /* define some often used types */
268 int_type.t = VT_INT;
269 char_pointer_type.t = VT_BYTE;
270 mk_pointer(&char_pointer_type);
271 #if PTR_SIZE == 4
272 size_type.t = VT_INT | VT_UNSIGNED;
273 ptrdiff_type.t = VT_INT;
274 #elif LONG_SIZE == 4
275 size_type.t = VT_LLONG | VT_UNSIGNED;
276 ptrdiff_type.t = VT_LLONG;
277 #else
278 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
279 ptrdiff_type.t = VT_LONG | VT_LLONG;
280 #endif
281 func_old_type.t = VT_FUNC;
282 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
283 func_old_type.ref->f.func_call = FUNC_CDECL;
284 func_old_type.ref->f.func_type = FUNC_OLD;
286 tcc_debug_start(s1);
288 #ifdef TCC_TARGET_ARM
289 arm_init(s1);
290 #endif
292 #ifdef INC_DEBUG
293 printf("%s: **** new file\n", file->filename);
294 #endif
296 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
297 next();
298 decl(VT_CONST);
299 gen_inline_functions(s1);
300 check_vstack();
301 /* end of translation unit info */
302 tcc_debug_end(s1);
303 return 0;
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym *elfsym(Sym *s)
309 if (!s || !s->c)
310 return NULL;
311 return &((ElfSym *)symtab_section->data)[s->c];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC void update_storage(Sym *sym)
317 ElfSym *esym;
318 int sym_bind, old_sym_bind;
320 esym = elfsym(sym);
321 if (!esym)
322 return;
324 if (sym->a.visibility)
325 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
326 | sym->a.visibility;
328 if ((sym->type.t & VT_STATIC)
329 || (sym->type.t & (VT_EXTERN | VT_INLINE)) == VT_INLINE)
330 sym_bind = STB_LOCAL;
331 else if (sym->a.weak)
332 sym_bind = STB_WEAK;
333 else
334 sym_bind = STB_GLOBAL;
335 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
336 if (sym_bind != old_sym_bind) {
337 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
340 #ifdef TCC_TARGET_PE
341 if (sym->a.dllimport)
342 esym->st_other |= ST_PE_IMPORT;
343 if (sym->a.dllexport)
344 esym->st_other |= ST_PE_EXPORT;
345 #endif
347 #if 0
348 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
349 get_tok_str(sym->v, NULL),
350 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
351 sym->a.visibility,
352 sym->a.dllexport,
353 sym->a.dllimport
355 #endif
358 /* ------------------------------------------------------------------------- */
359 /* update sym->c so that it points to an external symbol in section
360 'section' with value 'value' */
362 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
363 addr_t value, unsigned long size,
364 int can_add_underscore)
366 int sym_type, sym_bind, info, other, t;
367 ElfSym *esym;
368 const char *name;
369 char buf1[256];
370 #ifdef CONFIG_TCC_BCHECK
371 char buf[32];
372 #endif
374 if (!sym->c) {
375 name = get_tok_str(sym->v, NULL);
376 #ifdef CONFIG_TCC_BCHECK
377 if (tcc_state->do_bounds_check) {
378 /* XXX: avoid doing that for statics ? */
379 /* if bound checking is activated, we change some function
380 names by adding the "__bound" prefix */
381 switch(sym->v) {
382 #ifdef TCC_TARGET_PE
383 /* XXX: we rely only on malloc hooks */
384 case TOK_malloc:
385 case TOK_free:
386 case TOK_realloc:
387 case TOK_memalign:
388 case TOK_calloc:
389 #endif
390 case TOK_memcpy:
391 case TOK_memmove:
392 case TOK_memset:
393 case TOK_strlen:
394 case TOK_strcpy:
395 case TOK_alloca:
396 strcpy(buf, "__bound_");
397 strcat(buf, name);
398 name = buf;
399 break;
402 #endif
403 t = sym->type.t;
404 if ((t & VT_BTYPE) == VT_FUNC) {
405 sym_type = STT_FUNC;
406 } else if ((t & VT_BTYPE) == VT_VOID) {
407 sym_type = STT_NOTYPE;
408 } else {
409 sym_type = STT_OBJECT;
411 if ((t & VT_STATIC) || (t & (VT_EXTERN | VT_INLINE)) == VT_INLINE)
412 sym_bind = STB_LOCAL;
413 else
414 sym_bind = STB_GLOBAL;
415 other = 0;
416 #ifdef TCC_TARGET_PE
417 if (sym_type == STT_FUNC && sym->type.ref) {
418 Sym *ref = sym->type.ref;
419 if (ref->a.nodecorate) {
420 can_add_underscore = 0;
422 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
423 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
424 name = buf1;
425 other |= ST_PE_STDCALL;
426 can_add_underscore = 0;
429 #endif
430 if (tcc_state->leading_underscore && can_add_underscore) {
431 buf1[0] = '_';
432 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
433 name = buf1;
435 if (sym->asm_label)
436 name = get_tok_str(sym->asm_label, NULL);
437 info = ELFW(ST_INFO)(sym_bind, sym_type);
438 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
439 } else {
440 esym = elfsym(sym);
441 esym->st_value = value;
442 esym->st_size = size;
443 esym->st_shndx = sh_num;
445 update_storage(sym);
448 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
449 addr_t value, unsigned long size)
451 int sh_num = section ? section->sh_num : SHN_UNDEF;
452 put_extern_sym2(sym, sh_num, value, size, 1);
455 /* add a new relocation entry to symbol 'sym' in section 's' */
456 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
457 addr_t addend)
459 int c = 0;
461 if (nocode_wanted && s == cur_text_section)
462 return;
464 if (sym) {
465 if (0 == sym->c)
466 put_extern_sym(sym, NULL, 0, 0);
467 c = sym->c;
470 /* now we can add ELF relocation info */
471 put_elf_reloca(symtab_section, s, offset, type, c, addend);
474 #if PTR_SIZE == 4
475 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
477 greloca(s, sym, offset, type, 0);
479 #endif
481 /* ------------------------------------------------------------------------- */
482 /* symbol allocator */
483 static Sym *__sym_malloc(void)
485 Sym *sym_pool, *sym, *last_sym;
486 int i;
488 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
489 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
491 last_sym = sym_free_first;
492 sym = sym_pool;
493 for(i = 0; i < SYM_POOL_NB; i++) {
494 sym->next = last_sym;
495 last_sym = sym;
496 sym++;
498 sym_free_first = last_sym;
499 return last_sym;
502 static inline Sym *sym_malloc(void)
504 Sym *sym;
505 #ifndef SYM_DEBUG
506 sym = sym_free_first;
507 if (!sym)
508 sym = __sym_malloc();
509 sym_free_first = sym->next;
510 return sym;
511 #else
512 sym = tcc_malloc(sizeof(Sym));
513 return sym;
514 #endif
517 ST_INLN void sym_free(Sym *sym)
519 #ifndef SYM_DEBUG
520 sym->next = sym_free_first;
521 sym_free_first = sym;
522 #else
523 tcc_free(sym);
524 #endif
527 /* push, without hashing */
528 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
530 Sym *s;
532 s = sym_malloc();
533 memset(s, 0, sizeof *s);
534 s->v = v;
535 s->type.t = t;
536 s->c = c;
537 /* add in stack */
538 s->prev = *ps;
539 *ps = s;
540 return s;
543 /* find a symbol and return its associated structure. 's' is the top
544 of the symbol stack */
545 ST_FUNC Sym *sym_find2(Sym *s, int v)
547 while (s) {
548 if (s->v == v)
549 return s;
550 else if (s->v == -1)
551 return NULL;
552 s = s->prev;
554 return NULL;
557 /* structure lookup */
558 ST_INLN Sym *struct_find(int v)
560 v -= TOK_IDENT;
561 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
562 return NULL;
563 return table_ident[v]->sym_struct;
566 /* find an identifier */
567 ST_INLN Sym *sym_find(int v)
569 v -= TOK_IDENT;
570 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
571 return NULL;
572 return table_ident[v]->sym_identifier;
575 static int sym_scope(Sym *s)
577 if (IS_ENUM_VAL (s->type.t))
578 return s->type.ref->sym_scope;
579 else
580 return s->sym_scope;
583 /* push a given symbol on the symbol stack */
584 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
586 Sym *s, **ps;
587 TokenSym *ts;
589 if (local_stack)
590 ps = &local_stack;
591 else
592 ps = &global_stack;
593 s = sym_push2(ps, v, type->t, c);
594 s->type.ref = type->ref;
595 s->r = r;
596 /* don't record fields or anonymous symbols */
597 /* XXX: simplify */
598 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
599 /* record symbol in token array */
600 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
601 if (v & SYM_STRUCT)
602 ps = &ts->sym_struct;
603 else
604 ps = &ts->sym_identifier;
605 s->prev_tok = *ps;
606 *ps = s;
607 s->sym_scope = local_scope;
608 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
609 tcc_error("redeclaration of '%s'",
610 get_tok_str(v & ~SYM_STRUCT, NULL));
612 return s;
615 /* push a global identifier */
616 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
618 Sym *s, **ps;
619 s = sym_push2(&global_stack, v, t, c);
620 s->r = VT_CONST | VT_SYM;
621 /* don't record anonymous symbol */
622 if (v < SYM_FIRST_ANOM) {
623 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
624 /* modify the top most local identifier, so that sym_identifier will
625 point to 's' when popped; happens when called from inline asm */
626 while (*ps != NULL && (*ps)->sym_scope)
627 ps = &(*ps)->prev_tok;
628 s->prev_tok = *ps;
629 *ps = s;
631 return s;
634 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
635 pop them yet from the list, but do remove them from the token array. */
636 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
638 Sym *s, *ss, **ps;
639 TokenSym *ts;
640 int v;
642 s = *ptop;
643 while(s != b) {
644 ss = s->prev;
645 v = s->v;
646 /* remove symbol in token array */
647 /* XXX: simplify */
648 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
649 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
650 if (v & SYM_STRUCT)
651 ps = &ts->sym_struct;
652 else
653 ps = &ts->sym_identifier;
654 *ps = s->prev_tok;
656 if (!keep)
657 sym_free(s);
658 s = ss;
660 if (!keep)
661 *ptop = b;
664 /* ------------------------------------------------------------------------- */
666 static void vsetc(CType *type, int r, CValue *vc)
668 int v;
670 if (vtop >= vstack + (VSTACK_SIZE - 1))
671 tcc_error("memory full (vstack)");
672 /* cannot let cpu flags if other instruction are generated. Also
673 avoid leaving VT_JMP anywhere except on the top of the stack
674 because it would complicate the code generator.
676 Don't do this when nocode_wanted. vtop might come from
677 !nocode_wanted regions (see 88_codeopt.c) and transforming
678 it to a register without actually generating code is wrong
679 as their value might still be used for real. All values
680 we push under nocode_wanted will eventually be popped
681 again, so that the VT_CMP/VT_JMP value will be in vtop
682 when code is unsuppressed again.
684 Same logic below in vswap(); */
685 if (vtop >= vstack && !nocode_wanted) {
686 v = vtop->r & VT_VALMASK;
687 if (v == VT_CMP || (v & ~1) == VT_JMP)
688 gv(RC_INT);
691 vtop++;
692 vtop->type = *type;
693 vtop->r = r;
694 vtop->r2 = VT_CONST;
695 vtop->c = *vc;
696 vtop->sym = NULL;
699 ST_FUNC void vswap(void)
701 SValue tmp;
702 /* cannot vswap cpu flags. See comment at vsetc() above */
703 if (vtop >= vstack && !nocode_wanted) {
704 int v = vtop->r & VT_VALMASK;
705 if (v == VT_CMP || (v & ~1) == VT_JMP)
706 gv(RC_INT);
708 tmp = vtop[0];
709 vtop[0] = vtop[-1];
710 vtop[-1] = tmp;
713 /* pop stack value */
714 ST_FUNC void vpop(void)
716 int v;
717 v = vtop->r & VT_VALMASK;
718 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
719 /* for x86, we need to pop the FP stack */
720 if (v == TREG_ST0) {
721 o(0xd8dd); /* fstp %st(0) */
722 } else
723 #endif
724 if (v == VT_JMP || v == VT_JMPI) {
725 /* need to put correct jump if && or || without test */
726 gsym(vtop->c.i);
728 vtop--;
731 /* push constant of type "type" with useless value */
732 ST_FUNC void vpush(CType *type)
734 vset(type, VT_CONST, 0);
737 /* push integer constant */
738 ST_FUNC void vpushi(int v)
740 CValue cval;
741 cval.i = v;
742 vsetc(&int_type, VT_CONST, &cval);
745 /* push a pointer sized constant */
746 static void vpushs(addr_t v)
748 CValue cval;
749 cval.i = v;
750 vsetc(&size_type, VT_CONST, &cval);
753 /* push arbitrary 64bit constant */
754 ST_FUNC void vpush64(int ty, unsigned long long v)
756 CValue cval;
757 CType ctype;
758 ctype.t = ty;
759 ctype.ref = NULL;
760 cval.i = v;
761 vsetc(&ctype, VT_CONST, &cval);
764 /* push long long constant */
765 static inline void vpushll(long long v)
767 vpush64(VT_LLONG, v);
770 ST_FUNC void vset(CType *type, int r, int v)
772 CValue cval;
774 cval.i = v;
775 vsetc(type, r, &cval);
778 static void vseti(int r, int v)
780 CType type;
781 type.t = VT_INT;
782 type.ref = NULL;
783 vset(&type, r, v);
786 ST_FUNC void vpushv(SValue *v)
788 if (vtop >= vstack + (VSTACK_SIZE - 1))
789 tcc_error("memory full (vstack)");
790 vtop++;
791 *vtop = *v;
794 static void vdup(void)
796 vpushv(vtop);
799 /* rotate n first stack elements to the bottom
800 I1 ... In -> I2 ... In I1 [top is right]
802 ST_FUNC void vrotb(int n)
804 int i;
805 SValue tmp;
807 tmp = vtop[-n + 1];
808 for(i=-n+1;i!=0;i++)
809 vtop[i] = vtop[i+1];
810 vtop[0] = tmp;
813 /* rotate the n elements before entry e towards the top
814 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
816 ST_FUNC void vrote(SValue *e, int n)
818 int i;
819 SValue tmp;
821 tmp = *e;
822 for(i = 0;i < n - 1; i++)
823 e[-i] = e[-i - 1];
824 e[-n + 1] = tmp;
827 /* rotate n first stack elements to the top
828 I1 ... In -> In I1 ... I(n-1) [top is right]
830 ST_FUNC void vrott(int n)
832 vrote(vtop, n);
835 /* push a symbol value of TYPE */
836 static inline void vpushsym(CType *type, Sym *sym)
838 CValue cval;
839 cval.i = 0;
840 vsetc(type, VT_CONST | VT_SYM, &cval);
841 vtop->sym = sym;
844 /* Return a static symbol pointing to a section */
845 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
847 int v;
848 Sym *sym;
850 v = anon_sym++;
851 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
852 sym->type.t |= VT_STATIC;
853 put_extern_sym(sym, sec, offset, size);
854 return sym;
857 /* push a reference to a section offset by adding a dummy symbol */
858 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
860 vpushsym(type, get_sym_ref(type, sec, offset, size));
863 /* define a new external reference to a symbol 'v' of type 'u' */
864 ST_FUNC Sym *external_global_sym(int v, CType *type)
866 Sym *s;
868 s = sym_find(v);
869 if (!s) {
870 /* push forward reference */
871 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
872 s->type.ref = type->ref;
873 } else if (IS_ASM_SYM(s)) {
874 s->type.t = type->t | (s->type.t & VT_EXTERN);
875 s->type.ref = type->ref;
876 update_storage(s);
878 return s;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
884 if (sa1->aligned && !sa->aligned)
885 sa->aligned = sa1->aligned;
886 sa->packed |= sa1->packed;
887 sa->weak |= sa1->weak;
888 if (sa1->visibility != STV_DEFAULT) {
889 int vis = sa->visibility;
890 if (vis == STV_DEFAULT
891 || vis > sa1->visibility)
892 vis = sa1->visibility;
893 sa->visibility = vis;
895 sa->dllexport |= sa1->dllexport;
896 sa->nodecorate |= sa1->nodecorate;
897 sa->dllimport |= sa1->dllimport;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
903 if (fa1->func_call && !fa->func_call)
904 fa->func_call = fa1->func_call;
905 if (fa1->func_type && !fa->func_type)
906 fa->func_type = fa1->func_type;
907 if (fa1->func_args && !fa->func_args)
908 fa->func_args = fa1->func_args;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
914 merge_symattr(&ad->a, &ad1->a);
915 merge_funcattr(&ad->f, &ad1->f);
917 if (ad1->section)
918 ad->section = ad1->section;
919 if (ad1->alias_target)
920 ad->alias_target = ad1->alias_target;
921 if (ad1->asm_label)
922 ad->asm_label = ad1->asm_label;
923 if (ad1->attr_mode)
924 ad->attr_mode = ad1->attr_mode;
927 /* Merge some type attributes. */
928 static void patch_type(Sym *sym, CType *type)
930 if ((!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t))
931 && (type->t & VT_BTYPE) != VT_FUNC) {
932 if (!(sym->type.t & VT_EXTERN))
933 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
934 sym->type.t &= ~VT_EXTERN;
937 if (IS_ASM_SYM(sym)) {
938 /* stay static if both are static */
939 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
940 sym->type.ref = type->ref;
943 if (!is_compatible_types(&sym->type, type)) {
944 tcc_error("incompatible types for redefinition of '%s'",
945 get_tok_str(sym->v, NULL));
947 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
948 int static_proto = sym->type.t & VT_STATIC;
949 /* warn if static follows non-static function declaration */
950 if ((type->t & VT_STATIC) && !static_proto
951 /* XXX this test for inline shouldn't be here. Until we
952 implement gnu-inline mode again it silences a warning for
953 mingw caused by our workarounds. */
954 && !((type->t | sym->type.t) & VT_INLINE))
955 tcc_warning("static storage ignored for redefinition of '%s'",
956 get_tok_str(sym->v, NULL));
958 /* Force external definition if unequal inline specifier
959 or an explicit extern one. */
960 if ((sym->type.t | type->t) & VT_STATIC) {
961 type->t |= sym->type.t & VT_INLINE;
962 sym->type.t |= type->t & VT_INLINE;
963 } else if (((type->t & VT_INLINE) != (sym->type.t & VT_INLINE)
964 || (type->t | sym->type.t) & VT_EXTERN)
965 && !static_proto) {
966 type->t &= ~VT_INLINE;
967 sym->type.t &= ~VT_INLINE;
969 if (0 == (type->t & VT_EXTERN)) {
970 /* put complete type, use static from prototype, but don't
971 overwrite type.ref, it might contain parameter names */
972 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
974 } else {
975 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
976 /* set array size if it was omitted in extern declaration */
977 if (sym->type.ref->c < 0)
978 sym->type.ref->c = type->ref->c;
979 else if (sym->type.ref->c != type->ref->c)
980 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
982 if ((type->t ^ sym->type.t) & VT_STATIC)
983 tcc_warning("storage mismatch for redefinition of '%s'",
984 get_tok_str(sym->v, NULL));
989 /* Merge some storage attributes. */
990 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
992 if (type)
993 patch_type(sym, type);
995 #ifdef TCC_TARGET_PE
996 if (sym->a.dllimport != ad->a.dllimport)
997 tcc_error("incompatible dll linkage for redefinition of '%s'",
998 get_tok_str(sym->v, NULL));
999 #endif
1000 merge_symattr(&sym->a, &ad->a);
1001 if (ad->asm_label)
1002 sym->asm_label = ad->asm_label;
1003 update_storage(sym);
1006 /* define a new external reference to a symbol 'v' */
1007 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1009 Sym *s;
1010 s = sym_find(v);
1011 if (!s || (!IS_ASM_SYM(s) && !(s->type.t & VT_EXTERN)
1012 && (!(type->t & VT_EXTERN) || s->sym_scope)
1013 && (s->type.t & VT_BTYPE) != VT_FUNC)) {
1014 if (s && !is_compatible_types(&s->type, type))
1015 tcc_error("conflicting types for '%s'", get_tok_str(s->v, NULL));
1016 /* push forward reference */
1017 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
1018 s->a = ad->a;
1019 s->asm_label = ad->asm_label;
1020 s->sym_scope = 0;
1021 } else {
1022 if (s->type.ref == func_old_type.ref) {
1023 s->type.ref = type->ref;
1024 s->r = r | VT_CONST | VT_SYM;
1025 s->type.t |= VT_EXTERN;
1027 patch_storage(s, ad, type);
1029 return s;
1032 /* push a reference to global symbol v */
1033 ST_FUNC void vpush_global_sym(CType *type, int v)
1035 vpushsym(type, external_global_sym(v, type));
1038 /* save registers up to (vtop - n) stack entry */
1039 ST_FUNC void save_regs(int n)
1041 SValue *p, *p1;
1042 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1043 save_reg(p->r);
1046 /* save r to the memory stack, and mark it as being free */
1047 ST_FUNC void save_reg(int r)
1049 save_reg_upstack(r, 0);
1052 /* save r to the memory stack, and mark it as being free,
1053 if seen up to (vtop - n) stack entry */
1054 ST_FUNC void save_reg_upstack(int r, int n)
1056 int l, saved, size, align;
1057 SValue *p, *p1, sv;
1058 CType *type;
1060 if ((r &= VT_VALMASK) >= VT_CONST)
1061 return;
1062 if (nocode_wanted)
1063 return;
1065 /* modify all stack values */
1066 saved = 0;
1067 l = 0;
1068 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1069 if ((p->r & VT_VALMASK) == r ||
1070 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1071 /* must save value on stack if not already done */
1072 if (!saved) {
1073 /* NOTE: must reload 'r' because r might be equal to r2 */
1074 r = p->r & VT_VALMASK;
1075 /* store register in the stack */
1076 type = &p->type;
1077 if ((p->r & VT_LVAL) ||
1078 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1079 #if PTR_SIZE == 8
1080 type = &char_pointer_type;
1081 #else
1082 type = &int_type;
1083 #endif
1084 size = type_size(type, &align);
1085 l=get_temp_local_var(size,align);
1086 sv.type.t = type->t;
1087 sv.r = VT_LOCAL | VT_LVAL;
1088 sv.c.i = l;
1089 store(r, &sv);
1090 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1091 /* x86 specific: need to pop fp register ST0 if saved */
1092 if (r == TREG_ST0) {
1093 o(0xd8dd); /* fstp %st(0) */
1095 #endif
1096 #if PTR_SIZE == 4
1097 /* special long long case */
1098 if ((type->t & VT_BTYPE) == VT_LLONG) {
1099 sv.c.i += 4;
1100 store(p->r2, &sv);
1102 #endif
1103 saved = 1;
1105 /* mark that stack entry as being saved on the stack */
1106 if (p->r & VT_LVAL) {
1107 /* also clear the bounded flag because the
1108 relocation address of the function was stored in
1109 p->c.i */
1110 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1111 } else {
1112 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1114 p->r2 = VT_CONST;
1115 p->c.i = l;
1120 #ifdef TCC_TARGET_ARM
1121 /* find a register of class 'rc2' with at most one reference on stack.
1122 * If none, call get_reg(rc) */
1123 ST_FUNC int get_reg_ex(int rc, int rc2)
1125 int r;
1126 SValue *p;
1128 for(r=0;r<NB_REGS;r++) {
1129 if (reg_classes[r] & rc2) {
1130 int n;
1131 n=0;
1132 for(p = vstack; p <= vtop; p++) {
1133 if ((p->r & VT_VALMASK) == r ||
1134 (p->r2 & VT_VALMASK) == r)
1135 n++;
1137 if (n <= 1)
1138 return r;
1141 return get_reg(rc);
1143 #endif
1145 /* find a free register of class 'rc'. If none, save one register */
1146 ST_FUNC int get_reg(int rc)
1148 int r;
1149 SValue *p;
1151 /* find a free register */
1152 for(r=0;r<NB_REGS;r++) {
1153 if (reg_classes[r] & rc) {
1154 if (nocode_wanted)
1155 return r;
1156 for(p=vstack;p<=vtop;p++) {
1157 if ((p->r & VT_VALMASK) == r ||
1158 (p->r2 & VT_VALMASK) == r)
1159 goto notfound;
1161 return r;
1163 notfound: ;
1166 /* no register left : free the first one on the stack (VERY
1167 IMPORTANT to start from the bottom to ensure that we don't
1168 spill registers used in gen_opi()) */
1169 for(p=vstack;p<=vtop;p++) {
1170 /* look at second register (if long long) */
1171 r = p->r2 & VT_VALMASK;
1172 if (r < VT_CONST && (reg_classes[r] & rc))
1173 goto save_found;
1174 r = p->r & VT_VALMASK;
1175 if (r < VT_CONST && (reg_classes[r] & rc)) {
1176 save_found:
1177 save_reg(r);
1178 return r;
1181 /* Should never comes here */
1182 return -1;
1185 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1186 static int get_temp_local_var(int size,int align){
1187 int i;
1188 struct temp_local_variable *temp_var;
1189 int found_var;
1190 SValue *p;
1191 int r;
1192 char free;
1193 char found;
1194 found=0;
1195 for(i=0;i<nb_temp_local_vars;i++){
1196 temp_var=&arr_temp_local_vars[i];
1197 if(temp_var->size<size||align!=temp_var->align){
1198 continue;
1200 /*check if temp_var is free*/
1201 free=1;
1202 for(p=vstack;p<=vtop;p++) {
1203 r=p->r&VT_VALMASK;
1204 if(r==VT_LOCAL||r==VT_LLOCAL){
1205 if(p->c.i==temp_var->location){
1206 free=0;
1207 break;
1211 if(free){
1212 found_var=temp_var->location;
1213 found=1;
1214 break;
1217 if(!found){
1218 loc = (loc - size) & -align;
1219 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1220 temp_var=&arr_temp_local_vars[i];
1221 temp_var->location=loc;
1222 temp_var->size=size;
1223 temp_var->align=align;
1224 nb_temp_local_vars++;
1226 found_var=loc;
1228 return found_var;
1231 static void clear_temp_local_var_list(){
1232 nb_temp_local_vars=0;
1235 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1236 if needed */
1237 static void move_reg(int r, int s, int t)
1239 SValue sv;
1241 if (r != s) {
1242 save_reg(r);
1243 sv.type.t = t;
1244 sv.type.ref = NULL;
1245 sv.r = s;
1246 sv.c.i = 0;
1247 load(r, &sv);
1251 /* get address of vtop (vtop MUST BE an lvalue) */
1252 ST_FUNC void gaddrof(void)
1254 vtop->r &= ~VT_LVAL;
1255 /* tricky: if saved lvalue, then we can go back to lvalue */
1256 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1257 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1262 #ifdef CONFIG_TCC_BCHECK
1263 /* generate lvalue bound code */
1264 static void gbound(void)
1266 int lval_type;
1267 CType type1;
1269 vtop->r &= ~VT_MUSTBOUND;
1270 /* if lvalue, then use checking code before dereferencing */
1271 if (vtop->r & VT_LVAL) {
1272 /* if not VT_BOUNDED value, then make one */
1273 if (!(vtop->r & VT_BOUNDED)) {
1274 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1275 /* must save type because we must set it to int to get pointer */
1276 type1 = vtop->type;
1277 vtop->type.t = VT_PTR;
1278 gaddrof();
1279 vpushi(0);
1280 gen_bounded_ptr_add();
1281 vtop->r |= lval_type;
1282 vtop->type = type1;
1284 /* then check for dereferencing */
1285 gen_bounded_ptr_deref();
1288 #endif
1290 static void incr_bf_adr(int o)
1292 vtop->type = char_pointer_type;
1293 gaddrof();
1294 vpushi(o);
1295 gen_op('+');
1296 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1297 | (VT_BYTE|VT_UNSIGNED);
1298 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1299 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1302 /* single-byte load mode for packed or otherwise unaligned bitfields */
1303 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1305 int n, o, bits;
1306 save_reg_upstack(vtop->r, 1);
1307 vpush64(type->t & VT_BTYPE, 0); // B X
1308 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1309 do {
1310 vswap(); // X B
1311 incr_bf_adr(o);
1312 vdup(); // X B B
1313 n = 8 - bit_pos;
1314 if (n > bit_size)
1315 n = bit_size;
1316 if (bit_pos)
1317 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1318 if (n < 8)
1319 vpushi((1 << n) - 1), gen_op('&');
1320 gen_cast(type);
1321 if (bits)
1322 vpushi(bits), gen_op(TOK_SHL);
1323 vrotb(3); // B Y X
1324 gen_op('|'); // B X
1325 bits += n, bit_size -= n, o = 1;
1326 } while (bit_size);
1327 vswap(), vpop();
1328 if (!(type->t & VT_UNSIGNED)) {
1329 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1330 vpushi(n), gen_op(TOK_SHL);
1331 vpushi(n), gen_op(TOK_SAR);
1335 /* single-byte store mode for packed or otherwise unaligned bitfields */
1336 static void store_packed_bf(int bit_pos, int bit_size)
1338 int bits, n, o, m, c;
1340 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1341 vswap(); // X B
1342 save_reg_upstack(vtop->r, 1);
1343 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1344 do {
1345 incr_bf_adr(o); // X B
1346 vswap(); //B X
1347 c ? vdup() : gv_dup(); // B V X
1348 vrott(3); // X B V
1349 if (bits)
1350 vpushi(bits), gen_op(TOK_SHR);
1351 if (bit_pos)
1352 vpushi(bit_pos), gen_op(TOK_SHL);
1353 n = 8 - bit_pos;
1354 if (n > bit_size)
1355 n = bit_size;
1356 if (n < 8) {
1357 m = ((1 << n) - 1) << bit_pos;
1358 vpushi(m), gen_op('&'); // X B V1
1359 vpushv(vtop-1); // X B V1 B
1360 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1361 gen_op('&'); // X B V1 B1
1362 gen_op('|'); // X B V2
1364 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1365 vstore(), vpop(); // X B
1366 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1367 } while (bit_size);
1368 vpop(), vpop();
1371 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1373 int t;
1374 if (0 == sv->type.ref)
1375 return 0;
1376 t = sv->type.ref->auxtype;
1377 if (t != -1 && t != VT_STRUCT) {
1378 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1379 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1381 return t;
1384 /* store vtop a register belonging to class 'rc'. lvalues are
1385 converted to values. Cannot be used if cannot be converted to
1386 register value (such as structures). */
1387 ST_FUNC int gv(int rc)
1389 int r, bit_pos, bit_size, size, align, rc2;
1391 /* NOTE: get_reg can modify vstack[] */
1392 if (vtop->type.t & VT_BITFIELD) {
1393 CType type;
1395 bit_pos = BIT_POS(vtop->type.t);
1396 bit_size = BIT_SIZE(vtop->type.t);
1397 /* remove bit field info to avoid loops */
1398 vtop->type.t &= ~VT_STRUCT_MASK;
1400 type.ref = NULL;
1401 type.t = vtop->type.t & VT_UNSIGNED;
1402 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1403 type.t |= VT_UNSIGNED;
1405 r = adjust_bf(vtop, bit_pos, bit_size);
1407 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1408 type.t |= VT_LLONG;
1409 else
1410 type.t |= VT_INT;
1412 if (r == VT_STRUCT) {
1413 load_packed_bf(&type, bit_pos, bit_size);
1414 } else {
1415 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1416 /* cast to int to propagate signedness in following ops */
1417 gen_cast(&type);
1418 /* generate shifts */
1419 vpushi(bits - (bit_pos + bit_size));
1420 gen_op(TOK_SHL);
1421 vpushi(bits - bit_size);
1422 /* NOTE: transformed to SHR if unsigned */
1423 gen_op(TOK_SAR);
1425 r = gv(rc);
1426 } else {
1427 if (is_float(vtop->type.t) &&
1428 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1429 unsigned long offset;
1430 /* CPUs usually cannot use float constants, so we store them
1431 generically in data segment */
1432 size = type_size(&vtop->type, &align);
1433 if (NODATA_WANTED)
1434 size = 0, align = 1;
1435 offset = section_add(data_section, size, align);
1436 vpush_ref(&vtop->type, data_section, offset, size);
1437 vswap();
1438 init_putv(&vtop->type, data_section, offset);
1439 vtop->r |= VT_LVAL;
1441 #ifdef CONFIG_TCC_BCHECK
1442 if (vtop->r & VT_MUSTBOUND)
1443 gbound();
1444 #endif
1446 r = vtop->r & VT_VALMASK;
1447 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1448 #ifndef TCC_TARGET_ARM64
1449 if (rc == RC_IRET)
1450 rc2 = RC_LRET;
1451 #ifdef TCC_TARGET_X86_64
1452 else if (rc == RC_FRET)
1453 rc2 = RC_QRET;
1454 #endif
1455 #endif
1456 /* need to reload if:
1457 - constant
1458 - lvalue (need to dereference pointer)
1459 - already a register, but not in the right class */
1460 if (r >= VT_CONST
1461 || (vtop->r & VT_LVAL)
1462 || !(reg_classes[r] & rc)
1463 #if PTR_SIZE == 8
1464 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1465 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1466 #else
1467 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1468 #endif
1471 r = get_reg(rc);
1472 #if PTR_SIZE == 8
1473 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1474 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1475 #else
1476 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1477 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1478 unsigned long long ll;
1479 #endif
1480 int r2, original_type;
1481 original_type = vtop->type.t;
1482 /* two register type load : expand to two words
1483 temporarily */
1484 #if PTR_SIZE == 4
1485 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1486 /* load constant */
1487 ll = vtop->c.i;
1488 vtop->c.i = ll; /* first word */
1489 load(r, vtop);
1490 vtop->r = r; /* save register value */
1491 vpushi(ll >> 32); /* second word */
1492 } else
1493 #endif
1494 if (vtop->r & VT_LVAL) {
1495 /* We do not want to modifier the long long
1496 pointer here, so the safest (and less
1497 efficient) is to save all the other registers
1498 in the stack. XXX: totally inefficient. */
1499 #if 0
1500 save_regs(1);
1501 #else
1502 /* lvalue_save: save only if used further down the stack */
1503 save_reg_upstack(vtop->r, 1);
1504 #endif
1505 /* load from memory */
1506 vtop->type.t = load_type;
1507 load(r, vtop);
1508 vdup();
1509 vtop[-1].r = r; /* save register value */
1510 /* increment pointer to get second word */
1511 vtop->type.t = addr_type;
1512 gaddrof();
1513 vpushi(load_size);
1514 gen_op('+');
1515 vtop->r |= VT_LVAL;
1516 vtop->type.t = load_type;
1517 } else {
1518 /* move registers */
1519 load(r, vtop);
1520 vdup();
1521 vtop[-1].r = r; /* save register value */
1522 vtop->r = vtop[-1].r2;
1524 /* Allocate second register. Here we rely on the fact that
1525 get_reg() tries first to free r2 of an SValue. */
1526 r2 = get_reg(rc2);
1527 load(r2, vtop);
1528 vpop();
1529 /* write second register */
1530 vtop->r2 = r2;
1531 vtop->type.t = original_type;
1532 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1533 int t1, t;
1534 /* lvalue of scalar type : need to use lvalue type
1535 because of possible cast */
1536 t = vtop->type.t;
1537 t1 = t;
1538 /* compute memory access type */
1539 if (vtop->r & VT_LVAL_BYTE)
1540 t = VT_BYTE;
1541 else if (vtop->r & VT_LVAL_SHORT)
1542 t = VT_SHORT;
1543 if (vtop->r & VT_LVAL_UNSIGNED)
1544 t |= VT_UNSIGNED;
1545 vtop->type.t = t;
1546 load(r, vtop);
1547 /* restore wanted type */
1548 vtop->type.t = t1;
1549 } else {
1550 /* one register type load */
1551 load(r, vtop);
1554 vtop->r = r;
1555 #ifdef TCC_TARGET_C67
1556 /* uses register pairs for doubles */
1557 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1558 vtop->r2 = r+1;
1559 #endif
1561 return r;
1564 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1565 ST_FUNC void gv2(int rc1, int rc2)
1567 int v;
1569 /* generate more generic register first. But VT_JMP or VT_CMP
1570 values must be generated first in all cases to avoid possible
1571 reload errors */
1572 v = vtop[0].r & VT_VALMASK;
1573 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1574 vswap();
1575 gv(rc1);
1576 vswap();
1577 gv(rc2);
1578 /* test if reload is needed for first register */
1579 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1580 vswap();
1581 gv(rc1);
1582 vswap();
1584 } else {
1585 gv(rc2);
1586 vswap();
1587 gv(rc1);
1588 vswap();
1589 /* test if reload is needed for first register */
1590 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1591 gv(rc2);
1596 #ifndef TCC_TARGET_ARM64
1597 /* wrapper around RC_FRET to return a register by type */
1598 static int rc_fret(int t)
1600 #ifdef TCC_TARGET_X86_64
1601 if (t == VT_LDOUBLE) {
1602 return RC_ST0;
1604 #endif
1605 return RC_FRET;
1607 #endif
1609 /* wrapper around REG_FRET to return a register by type */
1610 static int reg_fret(int t)
1612 #ifdef TCC_TARGET_X86_64
1613 if (t == VT_LDOUBLE) {
1614 return TREG_ST0;
1616 #endif
1617 return REG_FRET;
1620 #if PTR_SIZE == 4
1621 /* expand 64bit on stack in two ints */
1622 ST_FUNC void lexpand(void)
1624 int u, v;
1625 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1626 v = vtop->r & (VT_VALMASK | VT_LVAL);
1627 if (v == VT_CONST) {
1628 vdup();
1629 vtop[0].c.i >>= 32;
1630 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1631 vdup();
1632 vtop[0].c.i += 4;
1633 } else {
1634 gv(RC_INT);
1635 vdup();
1636 vtop[0].r = vtop[-1].r2;
1637 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1639 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1641 #endif
1643 #if PTR_SIZE == 4
1644 /* build a long long from two ints */
1645 static void lbuild(int t)
1647 gv2(RC_INT, RC_INT);
1648 vtop[-1].r2 = vtop[0].r;
1649 vtop[-1].type.t = t;
1650 vpop();
1652 #endif
1654 /* convert stack entry to register and duplicate its value in another
1655 register */
1656 static void gv_dup(void)
1658 int rc, t, r, r1;
1659 SValue sv;
1661 t = vtop->type.t;
1662 #if PTR_SIZE == 4
1663 if ((t & VT_BTYPE) == VT_LLONG) {
1664 if (t & VT_BITFIELD) {
1665 gv(RC_INT);
1666 t = vtop->type.t;
1668 lexpand();
1669 gv_dup();
1670 vswap();
1671 vrotb(3);
1672 gv_dup();
1673 vrotb(4);
1674 /* stack: H L L1 H1 */
1675 lbuild(t);
1676 vrotb(3);
1677 vrotb(3);
1678 vswap();
1679 lbuild(t);
1680 vswap();
1681 } else
1682 #endif
1684 /* duplicate value */
1685 rc = RC_INT;
1686 sv.type.t = VT_INT;
1687 if (is_float(t)) {
1688 rc = RC_FLOAT;
1689 #ifdef TCC_TARGET_X86_64
1690 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1691 rc = RC_ST0;
1693 #endif
1694 sv.type.t = t;
1696 r = gv(rc);
1697 r1 = get_reg(rc);
1698 sv.r = r;
1699 sv.c.i = 0;
1700 load(r1, &sv); /* move r to r1 */
1701 vdup();
1702 /* duplicates value */
1703 if (r != r1)
1704 vtop->r = r1;
1708 /* Generate value test
1710 * Generate a test for any value (jump, comparison and integers) */
1711 ST_FUNC int gvtst(int inv, int t)
1713 int v = vtop->r & VT_VALMASK;
1714 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1715 vpushi(0);
1716 gen_op(TOK_NE);
1718 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1719 /* constant jmp optimization */
1720 if ((vtop->c.i != 0) != inv)
1721 t = gjmp(t);
1722 vtop--;
1723 return t;
1725 return gtst(inv, t);
1728 #if PTR_SIZE == 4
1729 /* generate CPU independent (unsigned) long long operations */
1730 static void gen_opl(int op)
1732 int t, a, b, op1, c, i;
1733 int func;
1734 unsigned short reg_iret = REG_IRET;
1735 unsigned short reg_lret = REG_LRET;
1736 SValue tmp;
1738 switch(op) {
1739 case '/':
1740 case TOK_PDIV:
1741 func = TOK___divdi3;
1742 goto gen_func;
1743 case TOK_UDIV:
1744 func = TOK___udivdi3;
1745 goto gen_func;
1746 case '%':
1747 func = TOK___moddi3;
1748 goto gen_mod_func;
1749 case TOK_UMOD:
1750 func = TOK___umoddi3;
1751 gen_mod_func:
1752 #ifdef TCC_ARM_EABI
1753 reg_iret = TREG_R2;
1754 reg_lret = TREG_R3;
1755 #endif
1756 gen_func:
1757 /* call generic long long function */
1758 vpush_global_sym(&func_old_type, func);
1759 vrott(3);
1760 gfunc_call(2);
1761 vpushi(0);
1762 vtop->r = reg_iret;
1763 vtop->r2 = reg_lret;
1764 break;
1765 case '^':
1766 case '&':
1767 case '|':
1768 case '*':
1769 case '+':
1770 case '-':
1771 //pv("gen_opl A",0,2);
1772 t = vtop->type.t;
1773 vswap();
1774 lexpand();
1775 vrotb(3);
1776 lexpand();
1777 /* stack: L1 H1 L2 H2 */
1778 tmp = vtop[0];
1779 vtop[0] = vtop[-3];
1780 vtop[-3] = tmp;
1781 tmp = vtop[-2];
1782 vtop[-2] = vtop[-3];
1783 vtop[-3] = tmp;
1784 vswap();
1785 /* stack: H1 H2 L1 L2 */
1786 //pv("gen_opl B",0,4);
1787 if (op == '*') {
1788 vpushv(vtop - 1);
1789 vpushv(vtop - 1);
1790 gen_op(TOK_UMULL);
1791 lexpand();
1792 /* stack: H1 H2 L1 L2 ML MH */
1793 for(i=0;i<4;i++)
1794 vrotb(6);
1795 /* stack: ML MH H1 H2 L1 L2 */
1796 tmp = vtop[0];
1797 vtop[0] = vtop[-2];
1798 vtop[-2] = tmp;
1799 /* stack: ML MH H1 L2 H2 L1 */
1800 gen_op('*');
1801 vrotb(3);
1802 vrotb(3);
1803 gen_op('*');
1804 /* stack: ML MH M1 M2 */
1805 gen_op('+');
1806 gen_op('+');
1807 } else if (op == '+' || op == '-') {
1808 /* XXX: add non carry method too (for MIPS or alpha) */
1809 if (op == '+')
1810 op1 = TOK_ADDC1;
1811 else
1812 op1 = TOK_SUBC1;
1813 gen_op(op1);
1814 /* stack: H1 H2 (L1 op L2) */
1815 vrotb(3);
1816 vrotb(3);
1817 gen_op(op1 + 1); /* TOK_xxxC2 */
1818 } else {
1819 gen_op(op);
1820 /* stack: H1 H2 (L1 op L2) */
1821 vrotb(3);
1822 vrotb(3);
1823 /* stack: (L1 op L2) H1 H2 */
1824 gen_op(op);
1825 /* stack: (L1 op L2) (H1 op H2) */
1827 /* stack: L H */
1828 lbuild(t);
1829 break;
1830 case TOK_SAR:
1831 case TOK_SHR:
1832 case TOK_SHL:
1833 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1834 t = vtop[-1].type.t;
1835 vswap();
1836 lexpand();
1837 vrotb(3);
1838 /* stack: L H shift */
1839 c = (int)vtop->c.i;
1840 /* constant: simpler */
1841 /* NOTE: all comments are for SHL. the other cases are
1842 done by swapping words */
1843 vpop();
1844 if (op != TOK_SHL)
1845 vswap();
1846 if (c >= 32) {
1847 /* stack: L H */
1848 vpop();
1849 if (c > 32) {
1850 vpushi(c - 32);
1851 gen_op(op);
1853 if (op != TOK_SAR) {
1854 vpushi(0);
1855 } else {
1856 gv_dup();
1857 vpushi(31);
1858 gen_op(TOK_SAR);
1860 vswap();
1861 } else {
1862 vswap();
1863 gv_dup();
1864 /* stack: H L L */
1865 vpushi(c);
1866 gen_op(op);
1867 vswap();
1868 vpushi(32 - c);
1869 if (op == TOK_SHL)
1870 gen_op(TOK_SHR);
1871 else
1872 gen_op(TOK_SHL);
1873 vrotb(3);
1874 /* stack: L L H */
1875 vpushi(c);
1876 if (op == TOK_SHL)
1877 gen_op(TOK_SHL);
1878 else
1879 gen_op(TOK_SHR);
1880 gen_op('|');
1882 if (op != TOK_SHL)
1883 vswap();
1884 lbuild(t);
1885 } else {
1886 /* XXX: should provide a faster fallback on x86 ? */
1887 switch(op) {
1888 case TOK_SAR:
1889 func = TOK___ashrdi3;
1890 goto gen_func;
1891 case TOK_SHR:
1892 func = TOK___lshrdi3;
1893 goto gen_func;
1894 case TOK_SHL:
1895 func = TOK___ashldi3;
1896 goto gen_func;
1899 break;
1900 default:
1901 /* compare operations */
1902 t = vtop->type.t;
1903 vswap();
1904 lexpand();
1905 vrotb(3);
1906 lexpand();
1907 /* stack: L1 H1 L2 H2 */
1908 tmp = vtop[-1];
1909 vtop[-1] = vtop[-2];
1910 vtop[-2] = tmp;
1911 /* stack: L1 L2 H1 H2 */
1912 /* compare high */
1913 op1 = op;
1914 /* when values are equal, we need to compare low words. since
1915 the jump is inverted, we invert the test too. */
1916 if (op1 == TOK_LT)
1917 op1 = TOK_LE;
1918 else if (op1 == TOK_GT)
1919 op1 = TOK_GE;
1920 else if (op1 == TOK_ULT)
1921 op1 = TOK_ULE;
1922 else if (op1 == TOK_UGT)
1923 op1 = TOK_UGE;
1924 a = 0;
1925 b = 0;
1926 gen_op(op1);
1927 if (op == TOK_NE) {
1928 b = gvtst(0, 0);
1929 } else {
1930 a = gvtst(1, 0);
1931 if (op != TOK_EQ) {
1932 /* generate non equal test */
1933 vpushi(TOK_NE);
1934 vtop->r = VT_CMP;
1935 b = gvtst(0, 0);
1938 /* compare low. Always unsigned */
1939 op1 = op;
1940 if (op1 == TOK_LT)
1941 op1 = TOK_ULT;
1942 else if (op1 == TOK_LE)
1943 op1 = TOK_ULE;
1944 else if (op1 == TOK_GT)
1945 op1 = TOK_UGT;
1946 else if (op1 == TOK_GE)
1947 op1 = TOK_UGE;
1948 gen_op(op1);
1949 a = gvtst(1, a);
1950 gsym(b);
1951 vseti(VT_JMPI, a);
1952 break;
1955 #endif
1957 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1959 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1960 return (a ^ b) >> 63 ? -x : x;
1963 static int gen_opic_lt(uint64_t a, uint64_t b)
1965 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1968 /* handle integer constant optimizations and various machine
1969 independent opt */
1970 static void gen_opic(int op)
1972 SValue *v1 = vtop - 1;
1973 SValue *v2 = vtop;
1974 int t1 = v1->type.t & VT_BTYPE;
1975 int t2 = v2->type.t & VT_BTYPE;
1976 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1977 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1978 uint64_t l1 = c1 ? v1->c.i : 0;
1979 uint64_t l2 = c2 ? v2->c.i : 0;
1980 int shm = (t1 == VT_LLONG) ? 63 : 31;
1982 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1983 l1 = ((uint32_t)l1 |
1984 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1985 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1986 l2 = ((uint32_t)l2 |
1987 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1989 if (c1 && c2) {
1990 switch(op) {
1991 case '+': l1 += l2; break;
1992 case '-': l1 -= l2; break;
1993 case '&': l1 &= l2; break;
1994 case '^': l1 ^= l2; break;
1995 case '|': l1 |= l2; break;
1996 case '*': l1 *= l2; break;
1998 case TOK_PDIV:
1999 case '/':
2000 case '%':
2001 case TOK_UDIV:
2002 case TOK_UMOD:
2003 /* if division by zero, generate explicit division */
2004 if (l2 == 0) {
2005 if (const_wanted)
2006 tcc_error("division by zero in constant");
2007 goto general_case;
2009 switch(op) {
2010 default: l1 = gen_opic_sdiv(l1, l2); break;
2011 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2012 case TOK_UDIV: l1 = l1 / l2; break;
2013 case TOK_UMOD: l1 = l1 % l2; break;
2015 break;
2016 case TOK_SHL: l1 <<= (l2 & shm); break;
2017 case TOK_SHR: l1 >>= (l2 & shm); break;
2018 case TOK_SAR:
2019 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2020 break;
2021 /* tests */
2022 case TOK_ULT: l1 = l1 < l2; break;
2023 case TOK_UGE: l1 = l1 >= l2; break;
2024 case TOK_EQ: l1 = l1 == l2; break;
2025 case TOK_NE: l1 = l1 != l2; break;
2026 case TOK_ULE: l1 = l1 <= l2; break;
2027 case TOK_UGT: l1 = l1 > l2; break;
2028 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2029 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2030 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2031 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2032 /* logical */
2033 case TOK_LAND: l1 = l1 && l2; break;
2034 case TOK_LOR: l1 = l1 || l2; break;
2035 default:
2036 goto general_case;
2038 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2039 l1 = ((uint32_t)l1 |
2040 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2041 v1->c.i = l1;
2042 vtop--;
2043 } else {
2044 /* if commutative ops, put c2 as constant */
2045 if (c1 && (op == '+' || op == '&' || op == '^' ||
2046 op == '|' || op == '*')) {
2047 vswap();
2048 c2 = c1; //c = c1, c1 = c2, c2 = c;
2049 l2 = l1; //l = l1, l1 = l2, l2 = l;
2051 if (!const_wanted &&
2052 c1 && ((l1 == 0 &&
2053 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2054 (l1 == -1 && op == TOK_SAR))) {
2055 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2056 vtop--;
2057 } else if (!const_wanted &&
2058 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2059 (op == '|' &&
2060 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2061 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2062 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2063 if (l2 == 1)
2064 vtop->c.i = 0;
2065 vswap();
2066 vtop--;
2067 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2068 op == TOK_PDIV) &&
2069 l2 == 1) ||
2070 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2071 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2072 l2 == 0) ||
2073 (op == '&' &&
2074 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2075 /* filter out NOP operations like x*1, x-0, x&-1... */
2076 vtop--;
2077 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2078 /* try to use shifts instead of muls or divs */
2079 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2080 int n = -1;
2081 while (l2) {
2082 l2 >>= 1;
2083 n++;
2085 vtop->c.i = n;
2086 if (op == '*')
2087 op = TOK_SHL;
2088 else if (op == TOK_PDIV)
2089 op = TOK_SAR;
2090 else
2091 op = TOK_SHR;
2093 goto general_case;
2094 } else if (c2 && (op == '+' || op == '-') &&
2095 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2096 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2097 /* symbol + constant case */
2098 if (op == '-')
2099 l2 = -l2;
2100 l2 += vtop[-1].c.i;
2101 /* The backends can't always deal with addends to symbols
2102 larger than +-1<<31. Don't construct such. */
2103 if ((int)l2 != l2)
2104 goto general_case;
2105 vtop--;
2106 vtop->c.i = l2;
2107 } else {
2108 general_case:
2109 /* call low level op generator */
2110 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2111 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2112 gen_opl(op);
2113 else
2114 gen_opi(op);
2119 /* generate a floating point operation with constant propagation */
2120 static void gen_opif(int op)
2122 int c1, c2;
2123 SValue *v1, *v2;
2124 #if defined _MSC_VER && defined _AMD64_
2125 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2126 volatile
2127 #endif
2128 long double f1, f2;
2130 v1 = vtop - 1;
2131 v2 = vtop;
2132 /* currently, we cannot do computations with forward symbols */
2133 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2134 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2135 if (c1 && c2) {
2136 if (v1->type.t == VT_FLOAT) {
2137 f1 = v1->c.f;
2138 f2 = v2->c.f;
2139 } else if (v1->type.t == VT_DOUBLE) {
2140 f1 = v1->c.d;
2141 f2 = v2->c.d;
2142 } else {
2143 f1 = v1->c.ld;
2144 f2 = v2->c.ld;
2147 /* NOTE: we only do constant propagation if finite number (not
2148 NaN or infinity) (ANSI spec) */
2149 if (!ieee_finite(f1) || !ieee_finite(f2))
2150 goto general_case;
2152 switch(op) {
2153 case '+': f1 += f2; break;
2154 case '-': f1 -= f2; break;
2155 case '*': f1 *= f2; break;
2156 case '/':
2157 if (f2 == 0.0) {
2158 /* If not in initializer we need to potentially generate
2159 FP exceptions at runtime, otherwise we want to fold. */
2160 if (!const_wanted)
2161 goto general_case;
2163 f1 /= f2;
2164 break;
2165 /* XXX: also handles tests ? */
2166 default:
2167 goto general_case;
2169 /* XXX: overflow test ? */
2170 if (v1->type.t == VT_FLOAT) {
2171 v1->c.f = f1;
2172 } else if (v1->type.t == VT_DOUBLE) {
2173 v1->c.d = f1;
2174 } else {
2175 v1->c.ld = f1;
2177 vtop--;
2178 } else {
2179 general_case:
2180 gen_opf(op);
2184 static int pointed_size(CType *type)
2186 int align;
2187 return type_size(pointed_type(type), &align);
2190 static void vla_runtime_pointed_size(CType *type)
2192 int align;
2193 vla_runtime_type_size(pointed_type(type), &align);
2196 static inline int is_null_pointer(SValue *p)
2198 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2199 return 0;
2200 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2201 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2202 ((p->type.t & VT_BTYPE) == VT_PTR &&
2203 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2204 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2205 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2208 static inline int is_integer_btype(int bt)
2210 return (bt == VT_BYTE || bt == VT_SHORT ||
2211 bt == VT_INT || bt == VT_LLONG);
2214 /* check types for comparison or subtraction of pointers */
2215 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2217 CType *type1, *type2, tmp_type1, tmp_type2;
2218 int bt1, bt2;
2220 /* null pointers are accepted for all comparisons as gcc */
2221 if (is_null_pointer(p1) || is_null_pointer(p2))
2222 return;
2223 type1 = &p1->type;
2224 type2 = &p2->type;
2225 bt1 = type1->t & VT_BTYPE;
2226 bt2 = type2->t & VT_BTYPE;
2227 /* accept comparison between pointer and integer with a warning */
2228 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2229 if (op != TOK_LOR && op != TOK_LAND )
2230 tcc_warning("comparison between pointer and integer");
2231 return;
2234 /* both must be pointers or implicit function pointers */
2235 if (bt1 == VT_PTR) {
2236 type1 = pointed_type(type1);
2237 } else if (bt1 != VT_FUNC)
2238 goto invalid_operands;
2240 if (bt2 == VT_PTR) {
2241 type2 = pointed_type(type2);
2242 } else if (bt2 != VT_FUNC) {
2243 invalid_operands:
2244 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2246 if ((type1->t & VT_BTYPE) == VT_VOID ||
2247 (type2->t & VT_BTYPE) == VT_VOID)
2248 return;
2249 tmp_type1 = *type1;
2250 tmp_type2 = *type2;
2251 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2252 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2253 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2254 /* gcc-like error if '-' is used */
2255 if (op == '-')
2256 goto invalid_operands;
2257 else
2258 tcc_warning("comparison of distinct pointer types lacks a cast");
2262 /* generic gen_op: handles types problems */
2263 ST_FUNC void gen_op(int op)
2265 int u, t1, t2, bt1, bt2, t;
2266 CType type1;
2268 redo:
2269 t1 = vtop[-1].type.t;
2270 t2 = vtop[0].type.t;
2271 bt1 = t1 & VT_BTYPE;
2272 bt2 = t2 & VT_BTYPE;
2274 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2275 tcc_error("operation on a struct");
2276 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2277 if (bt2 == VT_FUNC) {
2278 mk_pointer(&vtop->type);
2279 gaddrof();
2281 if (bt1 == VT_FUNC) {
2282 vswap();
2283 mk_pointer(&vtop->type);
2284 gaddrof();
2285 vswap();
2287 goto redo;
2288 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2289 /* at least one operand is a pointer */
2290 /* relational op: must be both pointers */
2291 if (op >= TOK_ULT && op <= TOK_LOR) {
2292 check_comparison_pointer_types(vtop - 1, vtop, op);
2293 /* pointers are handled are unsigned */
2294 #if PTR_SIZE == 8
2295 t = VT_LLONG | VT_UNSIGNED;
2296 #else
2297 t = VT_INT | VT_UNSIGNED;
2298 #endif
2299 goto std_op;
2301 /* if both pointers, then it must be the '-' op */
2302 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2303 if (op != '-')
2304 tcc_error("cannot use pointers here");
2305 check_comparison_pointer_types(vtop - 1, vtop, op);
2306 /* XXX: check that types are compatible */
2307 if (vtop[-1].type.t & VT_VLA) {
2308 vla_runtime_pointed_size(&vtop[-1].type);
2309 } else {
2310 vpushi(pointed_size(&vtop[-1].type));
2312 vrott(3);
2313 gen_opic(op);
2314 vtop->type.t = ptrdiff_type.t;
2315 vswap();
2316 gen_op(TOK_PDIV);
2317 } else {
2318 /* exactly one pointer : must be '+' or '-'. */
2319 if (op != '-' && op != '+')
2320 tcc_error("cannot use pointers here");
2321 /* Put pointer as first operand */
2322 if (bt2 == VT_PTR) {
2323 vswap();
2324 t = t1, t1 = t2, t2 = t;
2326 #if PTR_SIZE == 4
2327 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2328 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2329 gen_cast_s(VT_INT);
2330 #endif
2331 type1 = vtop[-1].type;
2332 type1.t &= ~VT_ARRAY;
2333 if (vtop[-1].type.t & VT_VLA)
2334 vla_runtime_pointed_size(&vtop[-1].type);
2335 else {
2336 u = pointed_size(&vtop[-1].type);
2337 if (u < 0)
2338 tcc_error("unknown array element size");
2339 #if PTR_SIZE == 8
2340 vpushll(u);
2341 #else
2342 /* XXX: cast to int ? (long long case) */
2343 vpushi(u);
2344 #endif
2346 gen_op('*');
2347 #if 0
2348 /* #ifdef CONFIG_TCC_BCHECK
2349 The main reason to removing this code:
2350 #include <stdio.h>
2351 int main ()
2353 int v[10];
2354 int i = 10;
2355 int j = 9;
2356 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2357 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2359 When this code is on. then the output looks like
2360 v+i-j = 0xfffffffe
2361 v+(i-j) = 0xbff84000
2363 /* if evaluating constant expression, no code should be
2364 generated, so no bound check */
2365 if (tcc_state->do_bounds_check && !const_wanted) {
2366 /* if bounded pointers, we generate a special code to
2367 test bounds */
2368 if (op == '-') {
2369 vpushi(0);
2370 vswap();
2371 gen_op('-');
2373 gen_bounded_ptr_add();
2374 } else
2375 #endif
2377 gen_opic(op);
2379 /* put again type if gen_opic() swaped operands */
2380 vtop->type = type1;
2382 } else if (is_float(bt1) || is_float(bt2)) {
2383 /* compute bigger type and do implicit casts */
2384 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2385 t = VT_LDOUBLE;
2386 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2387 t = VT_DOUBLE;
2388 } else {
2389 t = VT_FLOAT;
2391 /* floats can only be used for a few operations */
2392 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2393 (op < TOK_ULT || op > TOK_GT))
2394 tcc_error("invalid operands for binary operation");
2395 goto std_op;
2396 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2397 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2398 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2399 t |= VT_UNSIGNED;
2400 t |= (VT_LONG & t1);
2401 goto std_op;
2402 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2403 /* cast to biggest op */
2404 t = VT_LLONG | VT_LONG;
2405 if (bt1 == VT_LLONG)
2406 t &= t1;
2407 if (bt2 == VT_LLONG)
2408 t &= t2;
2409 /* convert to unsigned if it does not fit in a long long */
2410 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2411 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2412 t |= VT_UNSIGNED;
2413 goto std_op;
2414 } else {
2415 /* integer operations */
2416 t = VT_INT | (VT_LONG & (t1 | t2));
2417 /* convert to unsigned if it does not fit in an integer */
2418 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2419 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2420 t |= VT_UNSIGNED;
2421 std_op:
2422 /* XXX: currently, some unsigned operations are explicit, so
2423 we modify them here */
2424 if (t & VT_UNSIGNED) {
2425 if (op == TOK_SAR)
2426 op = TOK_SHR;
2427 else if (op == '/')
2428 op = TOK_UDIV;
2429 else if (op == '%')
2430 op = TOK_UMOD;
2431 else if (op == TOK_LT)
2432 op = TOK_ULT;
2433 else if (op == TOK_GT)
2434 op = TOK_UGT;
2435 else if (op == TOK_LE)
2436 op = TOK_ULE;
2437 else if (op == TOK_GE)
2438 op = TOK_UGE;
2440 vswap();
2441 type1.t = t;
2442 type1.ref = NULL;
2443 gen_cast(&type1);
2444 vswap();
2445 /* special case for shifts and long long: we keep the shift as
2446 an integer */
2447 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2448 type1.t = VT_INT;
2449 gen_cast(&type1);
2450 if (is_float(t))
2451 gen_opif(op);
2452 else
2453 gen_opic(op);
2454 if (op >= TOK_ULT && op <= TOK_GT) {
2455 /* relational op: the result is an int */
2456 vtop->type.t = VT_INT;
2457 } else {
2458 vtop->type.t = t;
2461 // Make sure that we have converted to an rvalue:
2462 if (vtop->r & VT_LVAL)
2463 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2466 #ifndef TCC_TARGET_ARM
2467 /* generic itof for unsigned long long case */
2468 static void gen_cvt_itof1(int t)
2470 #ifdef TCC_TARGET_ARM64
2471 gen_cvt_itof(t);
2472 #else
2473 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2474 (VT_LLONG | VT_UNSIGNED)) {
2476 if (t == VT_FLOAT)
2477 vpush_global_sym(&func_old_type, TOK___floatundisf);
2478 #if LDOUBLE_SIZE != 8
2479 else if (t == VT_LDOUBLE)
2480 vpush_global_sym(&func_old_type, TOK___floatundixf);
2481 #endif
2482 else
2483 vpush_global_sym(&func_old_type, TOK___floatundidf);
2484 vrott(2);
2485 gfunc_call(1);
2486 vpushi(0);
2487 vtop->r = reg_fret(t);
2488 } else {
2489 gen_cvt_itof(t);
2491 #endif
2493 #endif
2495 /* generic ftoi for unsigned long long case */
2496 static void gen_cvt_ftoi1(int t)
2498 #ifdef TCC_TARGET_ARM64
2499 gen_cvt_ftoi(t);
2500 #else
2501 int st;
2503 if (t == (VT_LLONG | VT_UNSIGNED)) {
2504 /* not handled natively */
2505 st = vtop->type.t & VT_BTYPE;
2506 if (st == VT_FLOAT)
2507 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2508 #if LDOUBLE_SIZE != 8
2509 else if (st == VT_LDOUBLE)
2510 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2511 #endif
2512 else
2513 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2514 vrott(2);
2515 gfunc_call(1);
2516 vpushi(0);
2517 vtop->r = REG_IRET;
2518 vtop->r2 = REG_LRET;
2519 } else {
2520 gen_cvt_ftoi(t);
2522 #endif
2525 /* force char or short cast */
2526 static void force_charshort_cast(int t)
2528 int bits, dbt;
2530 /* cannot cast static initializers */
2531 if (STATIC_DATA_WANTED)
2532 return;
2534 dbt = t & VT_BTYPE;
2535 /* XXX: add optimization if lvalue : just change type and offset */
2536 if (dbt == VT_BYTE)
2537 bits = 8;
2538 else
2539 bits = 16;
2540 if (t & VT_UNSIGNED) {
2541 vpushi((1 << bits) - 1);
2542 gen_op('&');
2543 } else {
2544 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2545 bits = 64 - bits;
2546 else
2547 bits = 32 - bits;
2548 vpushi(bits);
2549 gen_op(TOK_SHL);
2550 /* result must be signed or the SAR is converted to an SHL
2551 This was not the case when "t" was a signed short
2552 and the last value on the stack was an unsigned int */
2553 vtop->type.t &= ~VT_UNSIGNED;
2554 vpushi(bits);
2555 gen_op(TOK_SAR);
2559 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2560 static void gen_cast_s(int t)
2562 CType type;
2563 type.t = t;
2564 type.ref = NULL;
2565 gen_cast(&type);
2568 static void gen_cast(CType *type)
2570 int sbt, dbt, sf, df, c, p;
2572 /* special delayed cast for char/short */
2573 /* XXX: in some cases (multiple cascaded casts), it may still
2574 be incorrect */
2575 if (vtop->r & VT_MUSTCAST) {
2576 vtop->r &= ~VT_MUSTCAST;
2577 force_charshort_cast(vtop->type.t);
2580 /* bitfields first get cast to ints */
2581 if (vtop->type.t & VT_BITFIELD) {
2582 gv(RC_INT);
2585 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2586 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2588 if (sbt != dbt) {
2589 sf = is_float(sbt);
2590 df = is_float(dbt);
2591 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2592 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2593 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2594 c &= dbt != VT_LDOUBLE;
2595 #endif
2596 if (c) {
2597 /* constant case: we can do it now */
2598 /* XXX: in ISOC, cannot do it if error in convert */
2599 if (sbt == VT_FLOAT)
2600 vtop->c.ld = vtop->c.f;
2601 else if (sbt == VT_DOUBLE)
2602 vtop->c.ld = vtop->c.d;
2604 if (df) {
2605 if ((sbt & VT_BTYPE) == VT_LLONG) {
2606 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2607 vtop->c.ld = vtop->c.i;
2608 else
2609 vtop->c.ld = -(long double)-vtop->c.i;
2610 } else if(!sf) {
2611 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2612 vtop->c.ld = (uint32_t)vtop->c.i;
2613 else
2614 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2617 if (dbt == VT_FLOAT)
2618 vtop->c.f = (float)vtop->c.ld;
2619 else if (dbt == VT_DOUBLE)
2620 vtop->c.d = (double)vtop->c.ld;
2621 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2622 vtop->c.i = vtop->c.ld;
2623 } else if (sf && dbt == VT_BOOL) {
2624 vtop->c.i = (vtop->c.ld != 0);
2625 } else {
2626 if(sf)
2627 vtop->c.i = vtop->c.ld;
2628 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2630 else if (sbt & VT_UNSIGNED)
2631 vtop->c.i = (uint32_t)vtop->c.i;
2632 #if PTR_SIZE == 8
2633 else if (sbt == VT_PTR)
2635 #endif
2636 else if (sbt != VT_LLONG)
2637 vtop->c.i = ((uint32_t)vtop->c.i |
2638 -(vtop->c.i & 0x80000000));
2640 if (dbt == (VT_LLONG|VT_UNSIGNED))
2642 else if (dbt == VT_BOOL)
2643 vtop->c.i = (vtop->c.i != 0);
2644 #if PTR_SIZE == 8
2645 else if (dbt == VT_PTR)
2647 #endif
2648 else if (dbt != VT_LLONG) {
2649 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2650 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2651 0xffffffff);
2652 vtop->c.i &= m;
2653 if (!(dbt & VT_UNSIGNED))
2654 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2657 } else if (p && dbt == VT_BOOL) {
2658 vtop->r = VT_CONST;
2659 vtop->c.i = 1;
2660 } else {
2661 /* non constant case: generate code */
2662 if (sf && df) {
2663 /* convert from fp to fp */
2664 gen_cvt_ftof(dbt);
2665 } else if (df) {
2666 /* convert int to fp */
2667 gen_cvt_itof1(dbt);
2668 } else if (sf) {
2669 /* convert fp to int */
2670 if (dbt == VT_BOOL) {
2671 vpushi(0);
2672 gen_op(TOK_NE);
2673 } else {
2674 /* we handle char/short/etc... with generic code */
2675 if (dbt != (VT_INT | VT_UNSIGNED) &&
2676 dbt != (VT_LLONG | VT_UNSIGNED) &&
2677 dbt != VT_LLONG)
2678 dbt = VT_INT;
2679 gen_cvt_ftoi1(dbt);
2680 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2681 /* additional cast for char/short... */
2682 vtop->type.t = dbt;
2683 gen_cast(type);
2686 #if PTR_SIZE == 4
2687 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2688 if ((sbt & VT_BTYPE) != VT_LLONG) {
2689 /* scalar to long long */
2690 /* machine independent conversion */
2691 gv(RC_INT);
2692 /* generate high word */
2693 if (sbt == (VT_INT | VT_UNSIGNED)) {
2694 vpushi(0);
2695 gv(RC_INT);
2696 } else {
2697 if (sbt == VT_PTR) {
2698 /* cast from pointer to int before we apply
2699 shift operation, which pointers don't support*/
2700 gen_cast_s(VT_INT);
2702 gv_dup();
2703 vpushi(31);
2704 gen_op(TOK_SAR);
2706 /* patch second register */
2707 vtop[-1].r2 = vtop->r;
2708 vpop();
2710 #else
2711 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2712 (dbt & VT_BTYPE) == VT_PTR ||
2713 (dbt & VT_BTYPE) == VT_FUNC) {
2714 if ((sbt & VT_BTYPE) != VT_LLONG &&
2715 (sbt & VT_BTYPE) != VT_PTR &&
2716 (sbt & VT_BTYPE) != VT_FUNC) {
2717 /* need to convert from 32bit to 64bit */
2718 gv(RC_INT);
2719 if (sbt != (VT_INT | VT_UNSIGNED)) {
2720 #if defined(TCC_TARGET_ARM64)
2721 gen_cvt_sxtw();
2722 #elif defined(TCC_TARGET_X86_64)
2723 int r = gv(RC_INT);
2724 /* x86_64 specific: movslq */
2725 o(0x6348);
2726 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2727 #else
2728 #error
2729 #endif
2732 #endif
2733 } else if (dbt == VT_BOOL) {
2734 /* scalar to bool */
2735 vpushi(0);
2736 gen_op(TOK_NE);
2737 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2738 (dbt & VT_BTYPE) == VT_SHORT) {
2739 if (sbt == VT_PTR) {
2740 vtop->type.t = VT_INT;
2741 tcc_warning("nonportable conversion from pointer to char/short");
2743 force_charshort_cast(dbt);
2744 } else if ((dbt & VT_BTYPE) == VT_INT) {
2745 /* scalar to int */
2746 if ((sbt & VT_BTYPE) == VT_LLONG) {
2747 #if PTR_SIZE == 4
2748 /* from long long: just take low order word */
2749 lexpand();
2750 vpop();
2751 #else
2752 vpushi(0xffffffff);
2753 vtop->type.t |= VT_UNSIGNED;
2754 gen_op('&');
2755 #endif
2757 /* if lvalue and single word type, nothing to do because
2758 the lvalue already contains the real type size (see
2759 VT_LVAL_xxx constants) */
2762 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2763 /* if we are casting between pointer types,
2764 we must update the VT_LVAL_xxx size */
2765 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2766 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2768 vtop->type = *type;
2769 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2772 /* return type size as known at compile time. Put alignment at 'a' */
2773 ST_FUNC int type_size(CType *type, int *a)
2775 Sym *s;
2776 int bt;
2778 bt = type->t & VT_BTYPE;
2779 if (bt == VT_STRUCT) {
2780 /* struct/union */
2781 s = type->ref;
2782 *a = s->r;
2783 return s->c;
2784 } else if (bt == VT_PTR) {
2785 if (type->t & VT_ARRAY) {
2786 int ts;
2788 s = type->ref;
2789 ts = type_size(&s->type, a);
2791 if (ts < 0 && s->c < 0)
2792 ts = -ts;
2794 return ts * s->c;
2795 } else {
2796 *a = PTR_SIZE;
2797 return PTR_SIZE;
2799 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2800 return -1; /* incomplete enum */
2801 } else if (bt == VT_LDOUBLE) {
2802 *a = LDOUBLE_ALIGN;
2803 return LDOUBLE_SIZE;
2804 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2805 #ifdef TCC_TARGET_I386
2806 #ifdef TCC_TARGET_PE
2807 *a = 8;
2808 #else
2809 *a = 4;
2810 #endif
2811 #elif defined(TCC_TARGET_ARM)
2812 #ifdef TCC_ARM_EABI
2813 *a = 8;
2814 #else
2815 *a = 4;
2816 #endif
2817 #else
2818 *a = 8;
2819 #endif
2820 return 8;
2821 } else if (bt == VT_INT || bt == VT_FLOAT) {
2822 *a = 4;
2823 return 4;
2824 } else if (bt == VT_SHORT) {
2825 *a = 2;
2826 return 2;
2827 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2828 *a = 8;
2829 return 16;
2830 } else {
2831 /* char, void, function, _Bool */
2832 *a = 1;
2833 return 1;
2837 /* push type size as known at runtime time on top of value stack. Put
2838 alignment at 'a' */
2839 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2841 if (type->t & VT_VLA) {
2842 type_size(&type->ref->type, a);
2843 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2844 } else {
2845 vpushi(type_size(type, a));
2849 static void vla_sp_restore(void) {
2850 if (vlas_in_scope) {
2851 gen_vla_sp_restore(vla_sp_loc);
2855 static void vla_sp_restore_root(void) {
2856 if (vlas_in_scope) {
2857 gen_vla_sp_restore(vla_sp_root_loc);
2861 /* return the pointed type of t */
2862 static inline CType *pointed_type(CType *type)
2864 return &type->ref->type;
2867 /* modify type so that its it is a pointer to type. */
2868 ST_FUNC void mk_pointer(CType *type)
2870 Sym *s;
2871 s = sym_push(SYM_FIELD, type, 0, -1);
2872 type->t = VT_PTR | (type->t & VT_STORAGE);
2873 type->ref = s;
2876 /* compare function types. OLD functions match any new functions */
2877 static int is_compatible_func(CType *type1, CType *type2)
2879 Sym *s1, *s2;
2881 s1 = type1->ref;
2882 s2 = type2->ref;
2883 if (!is_compatible_types(&s1->type, &s2->type))
2884 return 0;
2885 /* check func_call */
2886 if (s1->f.func_call != s2->f.func_call)
2887 return 0;
2888 /* XXX: not complete */
2889 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2890 return 1;
2891 if (s1->f.func_type != s2->f.func_type)
2892 return 0;
2893 while (s1 != NULL) {
2894 if (s2 == NULL)
2895 return 0;
2896 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2897 return 0;
2898 s1 = s1->next;
2899 s2 = s2->next;
2901 if (s2)
2902 return 0;
2903 return 1;
2906 /* return true if type1 and type2 are the same. If unqualified is
2907 true, qualifiers on the types are ignored.
2909 static int compare_types(CType *type1, CType *type2, int unqualified)
2911 int bt1, t1, t2;
2913 t1 = type1->t & VT_TYPE;
2914 t2 = type2->t & VT_TYPE;
2915 if (unqualified) {
2916 /* strip qualifiers before comparing */
2917 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2918 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2921 /* Default Vs explicit signedness only matters for char */
2922 if ((t1 & VT_BTYPE) != VT_BYTE) {
2923 t1 &= ~VT_DEFSIGN;
2924 t2 &= ~VT_DEFSIGN;
2926 /* XXX: bitfields ? */
2927 if (t1 != t2)
2928 return 0;
2929 /* test more complicated cases */
2930 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2931 if (bt1 == VT_PTR) {
2932 type1 = pointed_type(type1);
2933 type2 = pointed_type(type2);
2934 return is_compatible_types(type1, type2);
2935 } else if (bt1 & VT_ARRAY) {
2936 return type1->ref->c < 0 || type2->ref->c < 0
2937 || type1->ref->c == type2->ref->c;
2938 } else if (bt1 == VT_STRUCT) {
2939 return (type1->ref == type2->ref);
2940 } else if (bt1 == VT_FUNC) {
2941 return is_compatible_func(type1, type2);
2942 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2943 return type1->ref == type2->ref;
2944 } else {
2945 return 1;
2949 /* return true if type1 and type2 are exactly the same (including
2950 qualifiers).
2952 static int is_compatible_types(CType *type1, CType *type2)
2954 return compare_types(type1,type2,0);
2957 /* return true if type1 and type2 are the same (ignoring qualifiers).
2959 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2961 return compare_types(type1,type2,1);
2964 /* print a type. If 'varstr' is not NULL, then the variable is also
2965 printed in the type */
2966 /* XXX: union */
2967 /* XXX: add array and function pointers */
2968 static void type_to_str(char *buf, int buf_size,
2969 CType *type, const char *varstr)
2971 int bt, v, t;
2972 Sym *s, *sa;
2973 char buf1[256];
2974 const char *tstr;
2976 t = type->t;
2977 bt = t & VT_BTYPE;
2978 buf[0] = '\0';
2980 if (t & VT_EXTERN)
2981 pstrcat(buf, buf_size, "extern ");
2982 if (t & VT_STATIC)
2983 pstrcat(buf, buf_size, "static ");
2984 if (t & VT_TYPEDEF)
2985 pstrcat(buf, buf_size, "typedef ");
2986 if (t & VT_INLINE)
2987 pstrcat(buf, buf_size, "inline ");
2988 if (t & VT_VOLATILE)
2989 pstrcat(buf, buf_size, "volatile ");
2990 if (t & VT_CONSTANT)
2991 pstrcat(buf, buf_size, "const ");
2993 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2994 || ((t & VT_UNSIGNED)
2995 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2996 && !IS_ENUM(t)
2998 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3000 buf_size -= strlen(buf);
3001 buf += strlen(buf);
3003 switch(bt) {
3004 case VT_VOID:
3005 tstr = "void";
3006 goto add_tstr;
3007 case VT_BOOL:
3008 tstr = "_Bool";
3009 goto add_tstr;
3010 case VT_BYTE:
3011 tstr = "char";
3012 goto add_tstr;
3013 case VT_SHORT:
3014 tstr = "short";
3015 goto add_tstr;
3016 case VT_INT:
3017 tstr = "int";
3018 goto maybe_long;
3019 case VT_LLONG:
3020 tstr = "long long";
3021 maybe_long:
3022 if (t & VT_LONG)
3023 tstr = "long";
3024 if (!IS_ENUM(t))
3025 goto add_tstr;
3026 tstr = "enum ";
3027 goto tstruct;
3028 case VT_FLOAT:
3029 tstr = "float";
3030 goto add_tstr;
3031 case VT_DOUBLE:
3032 tstr = "double";
3033 goto add_tstr;
3034 case VT_LDOUBLE:
3035 tstr = "long double";
3036 add_tstr:
3037 pstrcat(buf, buf_size, tstr);
3038 break;
3039 case VT_STRUCT:
3040 tstr = "struct ";
3041 if (IS_UNION(t))
3042 tstr = "union ";
3043 tstruct:
3044 pstrcat(buf, buf_size, tstr);
3045 v = type->ref->v & ~SYM_STRUCT;
3046 if (v >= SYM_FIRST_ANOM)
3047 pstrcat(buf, buf_size, "<anonymous>");
3048 else
3049 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3050 break;
3051 case VT_FUNC:
3052 s = type->ref;
3053 buf1[0]=0;
3054 if (varstr && '*' == *varstr) {
3055 pstrcat(buf1, sizeof(buf1), "(");
3056 pstrcat(buf1, sizeof(buf1), varstr);
3057 pstrcat(buf1, sizeof(buf1), ")");
3059 pstrcat(buf1, buf_size, "(");
3060 sa = s->next;
3061 while (sa != NULL) {
3062 char buf2[256];
3063 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3064 pstrcat(buf1, sizeof(buf1), buf2);
3065 sa = sa->next;
3066 if (sa)
3067 pstrcat(buf1, sizeof(buf1), ", ");
3069 if (s->f.func_type == FUNC_ELLIPSIS)
3070 pstrcat(buf1, sizeof(buf1), ", ...");
3071 pstrcat(buf1, sizeof(buf1), ")");
3072 type_to_str(buf, buf_size, &s->type, buf1);
3073 goto no_var;
3074 case VT_PTR:
3075 s = type->ref;
3076 if (t & VT_ARRAY) {
3077 if (varstr && '*' == *varstr)
3078 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3079 else
3080 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3081 type_to_str(buf, buf_size, &s->type, buf1);
3082 goto no_var;
3084 pstrcpy(buf1, sizeof(buf1), "*");
3085 if (t & VT_CONSTANT)
3086 pstrcat(buf1, buf_size, "const ");
3087 if (t & VT_VOLATILE)
3088 pstrcat(buf1, buf_size, "volatile ");
3089 if (varstr)
3090 pstrcat(buf1, sizeof(buf1), varstr);
3091 type_to_str(buf, buf_size, &s->type, buf1);
3092 goto no_var;
3094 if (varstr) {
3095 pstrcat(buf, buf_size, " ");
3096 pstrcat(buf, buf_size, varstr);
3098 no_var: ;
3101 /* verify type compatibility to store vtop in 'dt' type, and generate
3102 casts if needed. */
3103 static void gen_assign_cast(CType *dt)
3105 CType *st, *type1, *type2;
3106 char buf1[256], buf2[256];
3107 int dbt, sbt, qualwarn, lvl;
3109 st = &vtop->type; /* source type */
3110 dbt = dt->t & VT_BTYPE;
3111 sbt = st->t & VT_BTYPE;
3112 if (sbt == VT_VOID || dbt == VT_VOID) {
3113 if (sbt == VT_VOID && dbt == VT_VOID)
3114 ; /* It is Ok if both are void */
3115 else
3116 tcc_error("cannot cast from/to void");
3118 if (dt->t & VT_CONSTANT)
3119 tcc_warning("assignment of read-only location");
3120 switch(dbt) {
3121 case VT_PTR:
3122 /* special cases for pointers */
3123 /* '0' can also be a pointer */
3124 if (is_null_pointer(vtop))
3125 break;
3126 /* accept implicit pointer to integer cast with warning */
3127 if (is_integer_btype(sbt)) {
3128 tcc_warning("assignment makes pointer from integer without a cast");
3129 break;
3131 type1 = pointed_type(dt);
3132 if (sbt == VT_PTR)
3133 type2 = pointed_type(st);
3134 else if (sbt == VT_FUNC)
3135 type2 = st; /* a function is implicitly a function pointer */
3136 else
3137 goto error;
3138 if (is_compatible_types(type1, type2))
3139 break;
3140 for (qualwarn = lvl = 0;; ++lvl) {
3141 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3142 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3143 qualwarn = 1;
3144 dbt = type1->t & (VT_BTYPE|VT_LONG);
3145 sbt = type2->t & (VT_BTYPE|VT_LONG);
3146 if (dbt != VT_PTR || sbt != VT_PTR)
3147 break;
3148 type1 = pointed_type(type1);
3149 type2 = pointed_type(type2);
3151 if (!is_compatible_unqualified_types(type1, type2)) {
3152 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3153 /* void * can match anything */
3154 } else if (dbt == sbt
3155 && is_integer_btype(sbt & VT_BTYPE)
3156 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3157 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3158 /* Like GCC don't warn by default for merely changes
3159 in pointer target signedness. Do warn for different
3160 base types, though, in particular for unsigned enums
3161 and signed int targets. */
3162 } else {
3163 tcc_warning("assignment from incompatible pointer type");
3164 break;
3167 if (qualwarn)
3168 tcc_warning("assignment discards qualifiers from pointer target type");
3169 break;
3170 case VT_BYTE:
3171 case VT_SHORT:
3172 case VT_INT:
3173 case VT_LLONG:
3174 if (sbt == VT_PTR || sbt == VT_FUNC) {
3175 tcc_warning("assignment makes integer from pointer without a cast");
3176 } else if (sbt == VT_STRUCT) {
3177 goto case_VT_STRUCT;
3179 /* XXX: more tests */
3180 break;
3181 case VT_STRUCT:
3182 case_VT_STRUCT:
3183 if (!is_compatible_unqualified_types(dt, st)) {
3184 error:
3185 type_to_str(buf1, sizeof(buf1), st, NULL);
3186 type_to_str(buf2, sizeof(buf2), dt, NULL);
3187 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3189 break;
3191 gen_cast(dt);
3194 /* store vtop in lvalue pushed on stack */
3195 ST_FUNC void vstore(void)
3197 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3199 ft = vtop[-1].type.t;
3200 sbt = vtop->type.t & VT_BTYPE;
3201 dbt = ft & VT_BTYPE;
3202 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3203 (sbt == VT_INT && dbt == VT_SHORT))
3204 && !(vtop->type.t & VT_BITFIELD)) {
3205 /* optimize char/short casts */
3206 delayed_cast = VT_MUSTCAST;
3207 vtop->type.t = ft & VT_TYPE;
3208 /* XXX: factorize */
3209 if (ft & VT_CONSTANT)
3210 tcc_warning("assignment of read-only location");
3211 } else {
3212 delayed_cast = 0;
3213 if (!(ft & VT_BITFIELD))
3214 gen_assign_cast(&vtop[-1].type);
3217 if (sbt == VT_STRUCT) {
3218 /* if structure, only generate pointer */
3219 /* structure assignment : generate memcpy */
3220 /* XXX: optimize if small size */
3221 size = type_size(&vtop->type, &align);
3223 /* destination */
3224 vswap();
3225 vtop->type.t = VT_PTR;
3226 gaddrof();
3228 /* address of memcpy() */
3229 #ifdef TCC_ARM_EABI
3230 if(!(align & 7))
3231 vpush_global_sym(&func_old_type, TOK_memcpy8);
3232 else if(!(align & 3))
3233 vpush_global_sym(&func_old_type, TOK_memcpy4);
3234 else
3235 #endif
3236 /* Use memmove, rather than memcpy, as dest and src may be same: */
3237 vpush_global_sym(&func_old_type, TOK_memmove);
3239 vswap();
3240 /* source */
3241 vpushv(vtop - 2);
3242 vtop->type.t = VT_PTR;
3243 gaddrof();
3244 /* type size */
3245 vpushi(size);
3246 gfunc_call(3);
3248 /* leave source on stack */
3249 } else if (ft & VT_BITFIELD) {
3250 /* bitfield store handling */
3252 /* save lvalue as expression result (example: s.b = s.a = n;) */
3253 vdup(), vtop[-1] = vtop[-2];
3255 bit_pos = BIT_POS(ft);
3256 bit_size = BIT_SIZE(ft);
3257 /* remove bit field info to avoid loops */
3258 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3260 if ((ft & VT_BTYPE) == VT_BOOL) {
3261 gen_cast(&vtop[-1].type);
3262 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3265 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3266 if (r == VT_STRUCT) {
3267 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3268 store_packed_bf(bit_pos, bit_size);
3269 } else {
3270 unsigned long long mask = (1ULL << bit_size) - 1;
3271 if ((ft & VT_BTYPE) != VT_BOOL) {
3272 /* mask source */
3273 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3274 vpushll(mask);
3275 else
3276 vpushi((unsigned)mask);
3277 gen_op('&');
3279 /* shift source */
3280 vpushi(bit_pos);
3281 gen_op(TOK_SHL);
3282 vswap();
3283 /* duplicate destination */
3284 vdup();
3285 vrott(3);
3286 /* load destination, mask and or with source */
3287 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3288 vpushll(~(mask << bit_pos));
3289 else
3290 vpushi(~((unsigned)mask << bit_pos));
3291 gen_op('&');
3292 gen_op('|');
3293 /* store result */
3294 vstore();
3295 /* ... and discard */
3296 vpop();
3298 } else if (dbt == VT_VOID) {
3299 --vtop;
3300 } else {
3301 #ifdef CONFIG_TCC_BCHECK
3302 /* bound check case */
3303 if (vtop[-1].r & VT_MUSTBOUND) {
3304 vswap();
3305 gbound();
3306 vswap();
3308 #endif
3309 rc = RC_INT;
3310 if (is_float(ft)) {
3311 rc = RC_FLOAT;
3312 #ifdef TCC_TARGET_X86_64
3313 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3314 rc = RC_ST0;
3315 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3316 rc = RC_FRET;
3318 #endif
3320 r = gv(rc); /* generate value */
3321 /* if lvalue was saved on stack, must read it */
3322 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3323 SValue sv;
3324 t = get_reg(RC_INT);
3325 #if PTR_SIZE == 8
3326 sv.type.t = VT_PTR;
3327 #else
3328 sv.type.t = VT_INT;
3329 #endif
3330 sv.r = VT_LOCAL | VT_LVAL;
3331 sv.c.i = vtop[-1].c.i;
3332 load(t, &sv);
3333 vtop[-1].r = t | VT_LVAL;
3335 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3336 #if PTR_SIZE == 8
3337 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3338 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3339 #else
3340 if ((ft & VT_BTYPE) == VT_LLONG) {
3341 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3342 #endif
3343 vtop[-1].type.t = load_type;
3344 store(r, vtop - 1);
3345 vswap();
3346 /* convert to int to increment easily */
3347 vtop->type.t = addr_type;
3348 gaddrof();
3349 vpushi(load_size);
3350 gen_op('+');
3351 vtop->r |= VT_LVAL;
3352 vswap();
3353 vtop[-1].type.t = load_type;
3354 /* XXX: it works because r2 is spilled last ! */
3355 store(vtop->r2, vtop - 1);
3356 } else {
3357 store(r, vtop - 1);
3360 vswap();
3361 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3362 vtop->r |= delayed_cast;
3366 /* post defines POST/PRE add. c is the token ++ or -- */
3367 ST_FUNC void inc(int post, int c)
3369 test_lvalue();
3370 vdup(); /* save lvalue */
3371 if (post) {
3372 gv_dup(); /* duplicate value */
3373 vrotb(3);
3374 vrotb(3);
3376 /* add constant */
3377 vpushi(c - TOK_MID);
3378 gen_op('+');
3379 vstore(); /* store value */
3380 if (post)
3381 vpop(); /* if post op, return saved value */
3384 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3386 /* read the string */
3387 if (tok != TOK_STR)
3388 expect(msg);
3389 cstr_new(astr);
3390 while (tok == TOK_STR) {
3391 /* XXX: add \0 handling too ? */
3392 cstr_cat(astr, tokc.str.data, -1);
3393 next();
3395 cstr_ccat(astr, '\0');
3398 /* If I is >= 1 and a power of two, returns log2(i)+1.
3399 If I is 0 returns 0. */
3400 static int exact_log2p1(int i)
3402 int ret;
3403 if (!i)
3404 return 0;
3405 for (ret = 1; i >= 1 << 8; ret += 8)
3406 i >>= 8;
3407 if (i >= 1 << 4)
3408 ret += 4, i >>= 4;
3409 if (i >= 1 << 2)
3410 ret += 2, i >>= 2;
3411 if (i >= 1 << 1)
3412 ret++;
3413 return ret;
3416 /* Parse __attribute__((...)) GNUC extension. */
3417 static void parse_attribute(AttributeDef *ad)
3419 int t, n;
3420 CString astr;
3422 redo:
3423 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3424 return;
3425 next();
3426 skip('(');
3427 skip('(');
3428 while (tok != ')') {
3429 if (tok < TOK_IDENT)
3430 expect("attribute name");
3431 t = tok;
3432 next();
3433 switch(t) {
3434 case TOK_CLEANUP1:
3435 case TOK_CLEANUP2:
3437 Sym *s;
3439 skip('(');
3440 s = sym_find(tok);
3441 if (!s) {
3442 tcc_warning("implicit declaration of function '%s'",
3443 get_tok_str(tok, &tokc));
3444 s = external_global_sym(tok, &func_old_type);
3446 ad->cleanup_func = s;
3447 next();
3448 skip(')');
3449 break;
3451 case TOK_SECTION1:
3452 case TOK_SECTION2:
3453 skip('(');
3454 parse_mult_str(&astr, "section name");
3455 ad->section = find_section(tcc_state, (char *)astr.data);
3456 skip(')');
3457 cstr_free(&astr);
3458 break;
3459 case TOK_ALIAS1:
3460 case TOK_ALIAS2:
3461 skip('(');
3462 parse_mult_str(&astr, "alias(\"target\")");
3463 ad->alias_target = /* save string as token, for later */
3464 tok_alloc((char*)astr.data, astr.size-1)->tok;
3465 skip(')');
3466 cstr_free(&astr);
3467 break;
3468 case TOK_VISIBILITY1:
3469 case TOK_VISIBILITY2:
3470 skip('(');
3471 parse_mult_str(&astr,
3472 "visibility(\"default|hidden|internal|protected\")");
3473 if (!strcmp (astr.data, "default"))
3474 ad->a.visibility = STV_DEFAULT;
3475 else if (!strcmp (astr.data, "hidden"))
3476 ad->a.visibility = STV_HIDDEN;
3477 else if (!strcmp (astr.data, "internal"))
3478 ad->a.visibility = STV_INTERNAL;
3479 else if (!strcmp (astr.data, "protected"))
3480 ad->a.visibility = STV_PROTECTED;
3481 else
3482 expect("visibility(\"default|hidden|internal|protected\")");
3483 skip(')');
3484 cstr_free(&astr);
3485 break;
3486 case TOK_ALIGNED1:
3487 case TOK_ALIGNED2:
3488 if (tok == '(') {
3489 next();
3490 n = expr_const();
3491 if (n <= 0 || (n & (n - 1)) != 0)
3492 tcc_error("alignment must be a positive power of two");
3493 skip(')');
3494 } else {
3495 n = MAX_ALIGN;
3497 ad->a.aligned = exact_log2p1(n);
3498 if (n != 1 << (ad->a.aligned - 1))
3499 tcc_error("alignment of %d is larger than implemented", n);
3500 break;
3501 case TOK_PACKED1:
3502 case TOK_PACKED2:
3503 ad->a.packed = 1;
3504 break;
3505 case TOK_WEAK1:
3506 case TOK_WEAK2:
3507 ad->a.weak = 1;
3508 break;
3509 case TOK_UNUSED1:
3510 case TOK_UNUSED2:
3511 /* currently, no need to handle it because tcc does not
3512 track unused objects */
3513 break;
3514 case TOK_NORETURN1:
3515 case TOK_NORETURN2:
3516 /* currently, no need to handle it because tcc does not
3517 track unused objects */
3518 break;
3519 case TOK_CDECL1:
3520 case TOK_CDECL2:
3521 case TOK_CDECL3:
3522 ad->f.func_call = FUNC_CDECL;
3523 break;
3524 case TOK_STDCALL1:
3525 case TOK_STDCALL2:
3526 case TOK_STDCALL3:
3527 ad->f.func_call = FUNC_STDCALL;
3528 break;
3529 #ifdef TCC_TARGET_I386
3530 case TOK_REGPARM1:
3531 case TOK_REGPARM2:
3532 skip('(');
3533 n = expr_const();
3534 if (n > 3)
3535 n = 3;
3536 else if (n < 0)
3537 n = 0;
3538 if (n > 0)
3539 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3540 skip(')');
3541 break;
3542 case TOK_FASTCALL1:
3543 case TOK_FASTCALL2:
3544 case TOK_FASTCALL3:
3545 ad->f.func_call = FUNC_FASTCALLW;
3546 break;
3547 #endif
3548 case TOK_MODE:
3549 skip('(');
3550 switch(tok) {
3551 case TOK_MODE_DI:
3552 ad->attr_mode = VT_LLONG + 1;
3553 break;
3554 case TOK_MODE_QI:
3555 ad->attr_mode = VT_BYTE + 1;
3556 break;
3557 case TOK_MODE_HI:
3558 ad->attr_mode = VT_SHORT + 1;
3559 break;
3560 case TOK_MODE_SI:
3561 case TOK_MODE_word:
3562 ad->attr_mode = VT_INT + 1;
3563 break;
3564 default:
3565 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3566 break;
3568 next();
3569 skip(')');
3570 break;
3571 case TOK_DLLEXPORT:
3572 ad->a.dllexport = 1;
3573 break;
3574 case TOK_NODECORATE:
3575 ad->a.nodecorate = 1;
3576 break;
3577 case TOK_DLLIMPORT:
3578 ad->a.dllimport = 1;
3579 break;
3580 default:
3581 if (tcc_state->warn_unsupported)
3582 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3583 /* skip parameters */
3584 if (tok == '(') {
3585 int parenthesis = 0;
3586 do {
3587 if (tok == '(')
3588 parenthesis++;
3589 else if (tok == ')')
3590 parenthesis--;
3591 next();
3592 } while (parenthesis && tok != -1);
3594 break;
3596 if (tok != ',')
3597 break;
3598 next();
3600 skip(')');
3601 skip(')');
3602 goto redo;
3605 static Sym * find_field (CType *type, int v, int *cumofs)
3607 Sym *s = type->ref;
3608 v |= SYM_FIELD;
3609 while ((s = s->next) != NULL) {
3610 if ((s->v & SYM_FIELD) &&
3611 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3612 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3613 Sym *ret = find_field (&s->type, v, cumofs);
3614 if (ret) {
3615 *cumofs += s->c;
3616 return ret;
3619 if (s->v == v)
3620 break;
3622 return s;
3625 static void struct_layout(CType *type, AttributeDef *ad)
3627 int size, align, maxalign, offset, c, bit_pos, bit_size;
3628 int packed, a, bt, prevbt, prev_bit_size;
3629 int pcc = !tcc_state->ms_bitfields;
3630 int pragma_pack = *tcc_state->pack_stack_ptr;
3631 Sym *f;
3633 maxalign = 1;
3634 offset = 0;
3635 c = 0;
3636 bit_pos = 0;
3637 prevbt = VT_STRUCT; /* make it never match */
3638 prev_bit_size = 0;
3640 //#define BF_DEBUG
3642 for (f = type->ref->next; f; f = f->next) {
3643 if (f->type.t & VT_BITFIELD)
3644 bit_size = BIT_SIZE(f->type.t);
3645 else
3646 bit_size = -1;
3647 size = type_size(&f->type, &align);
3648 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3649 packed = 0;
3651 if (pcc && bit_size == 0) {
3652 /* in pcc mode, packing does not affect zero-width bitfields */
3654 } else {
3655 /* in pcc mode, attribute packed overrides if set. */
3656 if (pcc && (f->a.packed || ad->a.packed))
3657 align = packed = 1;
3659 /* pragma pack overrides align if lesser and packs bitfields always */
3660 if (pragma_pack) {
3661 packed = 1;
3662 if (pragma_pack < align)
3663 align = pragma_pack;
3664 /* in pcc mode pragma pack also overrides individual align */
3665 if (pcc && pragma_pack < a)
3666 a = 0;
3669 /* some individual align was specified */
3670 if (a)
3671 align = a;
3673 if (type->ref->type.t == VT_UNION) {
3674 if (pcc && bit_size >= 0)
3675 size = (bit_size + 7) >> 3;
3676 offset = 0;
3677 if (size > c)
3678 c = size;
3680 } else if (bit_size < 0) {
3681 if (pcc)
3682 c += (bit_pos + 7) >> 3;
3683 c = (c + align - 1) & -align;
3684 offset = c;
3685 if (size > 0)
3686 c += size;
3687 bit_pos = 0;
3688 prevbt = VT_STRUCT;
3689 prev_bit_size = 0;
3691 } else {
3692 /* A bit-field. Layout is more complicated. There are two
3693 options: PCC (GCC) compatible and MS compatible */
3694 if (pcc) {
3695 /* In PCC layout a bit-field is placed adjacent to the
3696 preceding bit-fields, except if:
3697 - it has zero-width
3698 - an individual alignment was given
3699 - it would overflow its base type container and
3700 there is no packing */
3701 if (bit_size == 0) {
3702 new_field:
3703 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3704 bit_pos = 0;
3705 } else if (f->a.aligned) {
3706 goto new_field;
3707 } else if (!packed) {
3708 int a8 = align * 8;
3709 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3710 if (ofs > size / align)
3711 goto new_field;
3714 /* in pcc mode, long long bitfields have type int if they fit */
3715 if (size == 8 && bit_size <= 32)
3716 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3718 while (bit_pos >= align * 8)
3719 c += align, bit_pos -= align * 8;
3720 offset = c;
3722 /* In PCC layout named bit-fields influence the alignment
3723 of the containing struct using the base types alignment,
3724 except for packed fields (which here have correct align). */
3725 if (f->v & SYM_FIRST_ANOM
3726 // && bit_size // ??? gcc on ARM/rpi does that
3728 align = 1;
3730 } else {
3731 bt = f->type.t & VT_BTYPE;
3732 if ((bit_pos + bit_size > size * 8)
3733 || (bit_size > 0) == (bt != prevbt)
3735 c = (c + align - 1) & -align;
3736 offset = c;
3737 bit_pos = 0;
3738 /* In MS bitfield mode a bit-field run always uses
3739 at least as many bits as the underlying type.
3740 To start a new run it's also required that this
3741 or the last bit-field had non-zero width. */
3742 if (bit_size || prev_bit_size)
3743 c += size;
3745 /* In MS layout the records alignment is normally
3746 influenced by the field, except for a zero-width
3747 field at the start of a run (but by further zero-width
3748 fields it is again). */
3749 if (bit_size == 0 && prevbt != bt)
3750 align = 1;
3751 prevbt = bt;
3752 prev_bit_size = bit_size;
3755 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3756 | (bit_pos << VT_STRUCT_SHIFT);
3757 bit_pos += bit_size;
3759 if (align > maxalign)
3760 maxalign = align;
3762 #ifdef BF_DEBUG
3763 printf("set field %s offset %-2d size %-2d align %-2d",
3764 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3765 if (f->type.t & VT_BITFIELD) {
3766 printf(" pos %-2d bits %-2d",
3767 BIT_POS(f->type.t),
3768 BIT_SIZE(f->type.t)
3771 printf("\n");
3772 #endif
3774 f->c = offset;
3775 f->r = 0;
3778 if (pcc)
3779 c += (bit_pos + 7) >> 3;
3781 /* store size and alignment */
3782 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3783 if (a < maxalign)
3784 a = maxalign;
3785 type->ref->r = a;
3786 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3787 /* can happen if individual align for some member was given. In
3788 this case MSVC ignores maxalign when aligning the size */
3789 a = pragma_pack;
3790 if (a < bt)
3791 a = bt;
3793 c = (c + a - 1) & -a;
3794 type->ref->c = c;
3796 #ifdef BF_DEBUG
3797 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3798 #endif
3800 /* check whether we can access bitfields by their type */
3801 for (f = type->ref->next; f; f = f->next) {
3802 int s, px, cx, c0;
3803 CType t;
3805 if (0 == (f->type.t & VT_BITFIELD))
3806 continue;
3807 f->type.ref = f;
3808 f->auxtype = -1;
3809 bit_size = BIT_SIZE(f->type.t);
3810 if (bit_size == 0)
3811 continue;
3812 bit_pos = BIT_POS(f->type.t);
3813 size = type_size(&f->type, &align);
3814 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3815 continue;
3817 /* try to access the field using a different type */
3818 c0 = -1, s = align = 1;
3819 for (;;) {
3820 px = f->c * 8 + bit_pos;
3821 cx = (px >> 3) & -align;
3822 px = px - (cx << 3);
3823 if (c0 == cx)
3824 break;
3825 s = (px + bit_size + 7) >> 3;
3826 if (s > 4) {
3827 t.t = VT_LLONG;
3828 } else if (s > 2) {
3829 t.t = VT_INT;
3830 } else if (s > 1) {
3831 t.t = VT_SHORT;
3832 } else {
3833 t.t = VT_BYTE;
3835 s = type_size(&t, &align);
3836 c0 = cx;
3839 if (px + bit_size <= s * 8 && cx + s <= c) {
3840 /* update offset and bit position */
3841 f->c = cx;
3842 bit_pos = px;
3843 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3844 | (bit_pos << VT_STRUCT_SHIFT);
3845 if (s != size)
3846 f->auxtype = t.t;
3847 #ifdef BF_DEBUG
3848 printf("FIX field %s offset %-2d size %-2d align %-2d "
3849 "pos %-2d bits %-2d\n",
3850 get_tok_str(f->v & ~SYM_FIELD, NULL),
3851 cx, s, align, px, bit_size);
3852 #endif
3853 } else {
3854 /* fall back to load/store single-byte wise */
3855 f->auxtype = VT_STRUCT;
3856 #ifdef BF_DEBUG
3857 printf("FIX field %s : load byte-wise\n",
3858 get_tok_str(f->v & ~SYM_FIELD, NULL));
3859 #endif
3864 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3865 static void struct_decl(CType *type, int u)
3867 int v, c, size, align, flexible;
3868 int bit_size, bsize, bt;
3869 Sym *s, *ss, **ps;
3870 AttributeDef ad, ad1;
3871 CType type1, btype;
3873 memset(&ad, 0, sizeof ad);
3874 next();
3875 parse_attribute(&ad);
3876 if (tok != '{') {
3877 v = tok;
3878 next();
3879 /* struct already defined ? return it */
3880 if (v < TOK_IDENT)
3881 expect("struct/union/enum name");
3882 s = struct_find(v);
3883 if (s && (s->sym_scope == local_scope || tok != '{')) {
3884 if (u == s->type.t)
3885 goto do_decl;
3886 if (u == VT_ENUM && IS_ENUM(s->type.t))
3887 goto do_decl;
3888 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3890 } else {
3891 v = anon_sym++;
3893 /* Record the original enum/struct/union token. */
3894 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3895 type1.ref = NULL;
3896 /* we put an undefined size for struct/union */
3897 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3898 s->r = 0; /* default alignment is zero as gcc */
3899 do_decl:
3900 type->t = s->type.t;
3901 type->ref = s;
3903 if (tok == '{') {
3904 next();
3905 if (s->c != -1)
3906 tcc_error("struct/union/enum already defined");
3907 s->c = -2;
3908 /* cannot be empty */
3909 /* non empty enums are not allowed */
3910 ps = &s->next;
3911 if (u == VT_ENUM) {
3912 long long ll = 0, pl = 0, nl = 0;
3913 CType t;
3914 t.ref = s;
3915 /* enum symbols have static storage */
3916 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3917 for(;;) {
3918 v = tok;
3919 if (v < TOK_UIDENT)
3920 expect("identifier");
3921 ss = sym_find(v);
3922 if (ss && !local_stack)
3923 tcc_error("redefinition of enumerator '%s'",
3924 get_tok_str(v, NULL));
3925 next();
3926 if (tok == '=') {
3927 next();
3928 ll = expr_const64();
3930 ss = sym_push(v, &t, VT_CONST, 0);
3931 ss->enum_val = ll;
3932 *ps = ss, ps = &ss->next;
3933 if (ll < nl)
3934 nl = ll;
3935 if (ll > pl)
3936 pl = ll;
3937 if (tok != ',')
3938 break;
3939 next();
3940 ll++;
3941 /* NOTE: we accept a trailing comma */
3942 if (tok == '}')
3943 break;
3945 skip('}');
3946 /* set integral type of the enum */
3947 t.t = VT_INT;
3948 if (nl >= 0) {
3949 if (pl != (unsigned)pl)
3950 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3951 t.t |= VT_UNSIGNED;
3952 } else if (pl != (int)pl || nl != (int)nl)
3953 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3954 s->type.t = type->t = t.t | VT_ENUM;
3955 s->c = 0;
3956 /* set type for enum members */
3957 for (ss = s->next; ss; ss = ss->next) {
3958 ll = ss->enum_val;
3959 if (ll == (int)ll) /* default is int if it fits */
3960 continue;
3961 if (t.t & VT_UNSIGNED) {
3962 ss->type.t |= VT_UNSIGNED;
3963 if (ll == (unsigned)ll)
3964 continue;
3966 ss->type.t = (ss->type.t & ~VT_BTYPE)
3967 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3969 } else {
3970 c = 0;
3971 flexible = 0;
3972 while (tok != '}') {
3973 if (!parse_btype(&btype, &ad1)) {
3974 skip(';');
3975 continue;
3977 while (1) {
3978 if (flexible)
3979 tcc_error("flexible array member '%s' not at the end of struct",
3980 get_tok_str(v, NULL));
3981 bit_size = -1;
3982 v = 0;
3983 type1 = btype;
3984 if (tok != ':') {
3985 if (tok != ';')
3986 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3987 if (v == 0) {
3988 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3989 expect("identifier");
3990 else {
3991 int v = btype.ref->v;
3992 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3993 if (tcc_state->ms_extensions == 0)
3994 expect("identifier");
3998 if (type_size(&type1, &align) < 0) {
3999 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4000 flexible = 1;
4001 else
4002 tcc_error("field '%s' has incomplete type",
4003 get_tok_str(v, NULL));
4005 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4006 (type1.t & VT_BTYPE) == VT_VOID ||
4007 (type1.t & VT_STORAGE))
4008 tcc_error("invalid type for '%s'",
4009 get_tok_str(v, NULL));
4011 if (tok == ':') {
4012 next();
4013 bit_size = expr_const();
4014 /* XXX: handle v = 0 case for messages */
4015 if (bit_size < 0)
4016 tcc_error("negative width in bit-field '%s'",
4017 get_tok_str(v, NULL));
4018 if (v && bit_size == 0)
4019 tcc_error("zero width for bit-field '%s'",
4020 get_tok_str(v, NULL));
4021 parse_attribute(&ad1);
4023 size = type_size(&type1, &align);
4024 if (bit_size >= 0) {
4025 bt = type1.t & VT_BTYPE;
4026 if (bt != VT_INT &&
4027 bt != VT_BYTE &&
4028 bt != VT_SHORT &&
4029 bt != VT_BOOL &&
4030 bt != VT_LLONG)
4031 tcc_error("bitfields must have scalar type");
4032 bsize = size * 8;
4033 if (bit_size > bsize) {
4034 tcc_error("width of '%s' exceeds its type",
4035 get_tok_str(v, NULL));
4036 } else if (bit_size == bsize
4037 && !ad.a.packed && !ad1.a.packed) {
4038 /* no need for bit fields */
4040 } else if (bit_size == 64) {
4041 tcc_error("field width 64 not implemented");
4042 } else {
4043 type1.t = (type1.t & ~VT_STRUCT_MASK)
4044 | VT_BITFIELD
4045 | (bit_size << (VT_STRUCT_SHIFT + 6));
4048 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4049 /* Remember we've seen a real field to check
4050 for placement of flexible array member. */
4051 c = 1;
4053 /* If member is a struct or bit-field, enforce
4054 placing into the struct (as anonymous). */
4055 if (v == 0 &&
4056 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4057 bit_size >= 0)) {
4058 v = anon_sym++;
4060 if (v) {
4061 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4062 ss->a = ad1.a;
4063 *ps = ss;
4064 ps = &ss->next;
4066 if (tok == ';' || tok == TOK_EOF)
4067 break;
4068 skip(',');
4070 skip(';');
4072 skip('}');
4073 parse_attribute(&ad);
4074 struct_layout(type, &ad);
4079 static void sym_to_attr(AttributeDef *ad, Sym *s)
4081 merge_symattr(&ad->a, &s->a);
4082 merge_funcattr(&ad->f, &s->f);
4085 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4086 are added to the element type, copied because it could be a typedef. */
4087 static void parse_btype_qualify(CType *type, int qualifiers)
4089 while (type->t & VT_ARRAY) {
4090 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4091 type = &type->ref->type;
4093 type->t |= qualifiers;
4096 /* return 0 if no type declaration. otherwise, return the basic type
4097 and skip it.
4099 static int parse_btype(CType *type, AttributeDef *ad)
4101 int t, u, bt, st, type_found, typespec_found, g;
4102 Sym *s;
4103 CType type1;
4105 memset(ad, 0, sizeof(AttributeDef));
4106 type_found = 0;
4107 typespec_found = 0;
4108 t = VT_INT;
4109 bt = st = -1;
4110 type->ref = NULL;
4112 while(1) {
4113 switch(tok) {
4114 case TOK_EXTENSION:
4115 /* currently, we really ignore extension */
4116 next();
4117 continue;
4119 /* basic types */
4120 case TOK_CHAR:
4121 u = VT_BYTE;
4122 basic_type:
4123 next();
4124 basic_type1:
4125 if (u == VT_SHORT || u == VT_LONG) {
4126 if (st != -1 || (bt != -1 && bt != VT_INT))
4127 tmbt: tcc_error("too many basic types");
4128 st = u;
4129 } else {
4130 if (bt != -1 || (st != -1 && u != VT_INT))
4131 goto tmbt;
4132 bt = u;
4134 if (u != VT_INT)
4135 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4136 typespec_found = 1;
4137 break;
4138 case TOK_VOID:
4139 u = VT_VOID;
4140 goto basic_type;
4141 case TOK_SHORT:
4142 u = VT_SHORT;
4143 goto basic_type;
4144 case TOK_INT:
4145 u = VT_INT;
4146 goto basic_type;
4147 case TOK_ALIGNAS:
4148 { int n;
4149 AttributeDef ad1;
4150 next();
4151 skip('(');
4152 memset(&ad1, 0, sizeof(AttributeDef));
4153 if (parse_btype(&type1, &ad1)) {
4154 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4155 if (ad1.a.aligned)
4156 n = 1 << (ad1.a.aligned - 1);
4157 else
4158 type_size(&type1, &n);
4159 } else {
4160 n = expr_const();
4161 if (n <= 0 || (n & (n - 1)) != 0)
4162 tcc_error("alignment must be a positive power of two");
4164 skip(')');
4165 ad->a.aligned = exact_log2p1(n);
4167 continue;
4168 case TOK_LONG:
4169 if ((t & VT_BTYPE) == VT_DOUBLE) {
4170 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4171 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4172 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4173 } else {
4174 u = VT_LONG;
4175 goto basic_type;
4177 next();
4178 break;
4179 #ifdef TCC_TARGET_ARM64
4180 case TOK_UINT128:
4181 /* GCC's __uint128_t appears in some Linux header files. Make it a
4182 synonym for long double to get the size and alignment right. */
4183 u = VT_LDOUBLE;
4184 goto basic_type;
4185 #endif
4186 case TOK_BOOL:
4187 u = VT_BOOL;
4188 goto basic_type;
4189 case TOK_FLOAT:
4190 u = VT_FLOAT;
4191 goto basic_type;
4192 case TOK_DOUBLE:
4193 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4194 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4195 } else {
4196 u = VT_DOUBLE;
4197 goto basic_type;
4199 next();
4200 break;
4201 case TOK_ENUM:
4202 struct_decl(&type1, VT_ENUM);
4203 basic_type2:
4204 u = type1.t;
4205 type->ref = type1.ref;
4206 goto basic_type1;
4207 case TOK_STRUCT:
4208 struct_decl(&type1, VT_STRUCT);
4209 goto basic_type2;
4210 case TOK_UNION:
4211 struct_decl(&type1, VT_UNION);
4212 goto basic_type2;
4214 /* type modifiers */
4215 case TOK_CONST1:
4216 case TOK_CONST2:
4217 case TOK_CONST3:
4218 type->t = t;
4219 parse_btype_qualify(type, VT_CONSTANT);
4220 t = type->t;
4221 next();
4222 break;
4223 case TOK_VOLATILE1:
4224 case TOK_VOLATILE2:
4225 case TOK_VOLATILE3:
4226 type->t = t;
4227 parse_btype_qualify(type, VT_VOLATILE);
4228 t = type->t;
4229 next();
4230 break;
4231 case TOK_SIGNED1:
4232 case TOK_SIGNED2:
4233 case TOK_SIGNED3:
4234 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4235 tcc_error("signed and unsigned modifier");
4236 t |= VT_DEFSIGN;
4237 next();
4238 typespec_found = 1;
4239 break;
4240 case TOK_REGISTER:
4241 case TOK_AUTO:
4242 case TOK_RESTRICT1:
4243 case TOK_RESTRICT2:
4244 case TOK_RESTRICT3:
4245 next();
4246 break;
4247 case TOK_UNSIGNED:
4248 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4249 tcc_error("signed and unsigned modifier");
4250 t |= VT_DEFSIGN | VT_UNSIGNED;
4251 next();
4252 typespec_found = 1;
4253 break;
4255 /* storage */
4256 case TOK_EXTERN:
4257 g = VT_EXTERN;
4258 goto storage;
4259 case TOK_STATIC:
4260 g = VT_STATIC;
4261 goto storage;
4262 case TOK_TYPEDEF:
4263 g = VT_TYPEDEF;
4264 goto storage;
4265 storage:
4266 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4267 tcc_error("multiple storage classes");
4268 t |= g;
4269 next();
4270 break;
4271 case TOK_INLINE1:
4272 case TOK_INLINE2:
4273 case TOK_INLINE3:
4274 t |= VT_INLINE;
4275 next();
4276 break;
4277 case TOK_NORETURN3:
4278 /* currently, no need to handle it because tcc does not
4279 track unused objects */
4280 next();
4281 break;
4282 /* GNUC attribute */
4283 case TOK_ATTRIBUTE1:
4284 case TOK_ATTRIBUTE2:
4285 parse_attribute(ad);
4286 if (ad->attr_mode) {
4287 u = ad->attr_mode -1;
4288 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4290 continue;
4291 /* GNUC typeof */
4292 case TOK_TYPEOF1:
4293 case TOK_TYPEOF2:
4294 case TOK_TYPEOF3:
4295 next();
4296 parse_expr_type(&type1);
4297 /* remove all storage modifiers except typedef */
4298 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4299 if (type1.ref)
4300 sym_to_attr(ad, type1.ref);
4301 goto basic_type2;
4302 default:
4303 if (typespec_found)
4304 goto the_end;
4305 s = sym_find(tok);
4306 if (!s || !(s->type.t & VT_TYPEDEF))
4307 goto the_end;
4308 t &= ~(VT_BTYPE|VT_LONG);
4309 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4310 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4311 type->ref = s->type.ref;
4312 if (t)
4313 parse_btype_qualify(type, t);
4314 t = type->t;
4315 /* get attributes from typedef */
4316 sym_to_attr(ad, s);
4317 next();
4318 typespec_found = 1;
4319 st = bt = -2;
4320 break;
4322 type_found = 1;
4324 the_end:
4325 if (tcc_state->char_is_unsigned) {
4326 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4327 t |= VT_UNSIGNED;
4329 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4330 bt = t & (VT_BTYPE|VT_LONG);
4331 if (bt == VT_LONG)
4332 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4333 #ifdef TCC_TARGET_PE
4334 if (bt == VT_LDOUBLE)
4335 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4336 #endif
4337 type->t = t;
4338 return type_found;
4341 /* convert a function parameter type (array to pointer and function to
4342 function pointer) */
4343 static inline void convert_parameter_type(CType *pt)
4345 /* remove const and volatile qualifiers (XXX: const could be used
4346 to indicate a const function parameter */
4347 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4348 /* array must be transformed to pointer according to ANSI C */
4349 pt->t &= ~VT_ARRAY;
4350 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4351 mk_pointer(pt);
4355 ST_FUNC void parse_asm_str(CString *astr)
4357 skip('(');
4358 parse_mult_str(astr, "string constant");
4361 /* Parse an asm label and return the token */
4362 static int asm_label_instr(void)
4364 int v;
4365 CString astr;
4367 next();
4368 parse_asm_str(&astr);
4369 skip(')');
4370 #ifdef ASM_DEBUG
4371 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4372 #endif
4373 v = tok_alloc(astr.data, astr.size - 1)->tok;
4374 cstr_free(&astr);
4375 return v;
4378 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4380 int n, l, t1, arg_size, align;
4381 Sym **plast, *s, *first;
4382 AttributeDef ad1;
4383 CType pt;
4385 if (tok == '(') {
4386 /* function type, or recursive declarator (return if so) */
4387 next();
4388 if (td && !(td & TYPE_ABSTRACT))
4389 return 0;
4390 if (tok == ')')
4391 l = 0;
4392 else if (parse_btype(&pt, &ad1))
4393 l = FUNC_NEW;
4394 else if (td) {
4395 merge_attr (ad, &ad1);
4396 return 0;
4397 } else
4398 l = FUNC_OLD;
4399 first = NULL;
4400 plast = &first;
4401 arg_size = 0;
4402 if (l) {
4403 for(;;) {
4404 /* read param name and compute offset */
4405 if (l != FUNC_OLD) {
4406 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4407 break;
4408 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4409 if ((pt.t & VT_BTYPE) == VT_VOID)
4410 tcc_error("parameter declared as void");
4411 } else {
4412 n = tok;
4413 if (n < TOK_UIDENT)
4414 expect("identifier");
4415 pt.t = VT_VOID; /* invalid type */
4416 next();
4418 convert_parameter_type(&pt);
4419 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4420 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4421 *plast = s;
4422 plast = &s->next;
4423 if (tok == ')')
4424 break;
4425 skip(',');
4426 if (l == FUNC_NEW && tok == TOK_DOTS) {
4427 l = FUNC_ELLIPSIS;
4428 next();
4429 break;
4431 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4432 tcc_error("invalid type");
4434 } else
4435 /* if no parameters, then old type prototype */
4436 l = FUNC_OLD;
4437 skip(')');
4438 /* NOTE: const is ignored in returned type as it has a special
4439 meaning in gcc / C++ */
4440 type->t &= ~VT_CONSTANT;
4441 /* some ancient pre-K&R C allows a function to return an array
4442 and the array brackets to be put after the arguments, such
4443 that "int c()[]" means something like "int[] c()" */
4444 if (tok == '[') {
4445 next();
4446 skip(']'); /* only handle simple "[]" */
4447 mk_pointer(type);
4449 /* we push a anonymous symbol which will contain the function prototype */
4450 ad->f.func_args = arg_size;
4451 ad->f.func_type = l;
4452 s = sym_push(SYM_FIELD, type, 0, 0);
4453 s->a = ad->a;
4454 s->f = ad->f;
4455 s->next = first;
4456 type->t = VT_FUNC;
4457 type->ref = s;
4458 } else if (tok == '[') {
4459 int saved_nocode_wanted = nocode_wanted;
4460 /* array definition */
4461 next();
4462 while (1) {
4463 /* XXX The optional type-quals and static should only be accepted
4464 in parameter decls. The '*' as well, and then even only
4465 in prototypes (not function defs). */
4466 switch (tok) {
4467 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4468 case TOK_CONST1:
4469 case TOK_VOLATILE1:
4470 case TOK_STATIC:
4471 case '*':
4472 next();
4473 continue;
4474 default:
4475 break;
4477 break;
4479 n = -1;
4480 t1 = 0;
4481 if (tok != ']') {
4482 if (!local_stack || (storage & VT_STATIC))
4483 vpushi(expr_const());
4484 else {
4485 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4486 length must always be evaluated, even under nocode_wanted,
4487 so that its size slot is initialized (e.g. under sizeof
4488 or typeof). */
4489 nocode_wanted = 0;
4490 gexpr();
4492 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4493 n = vtop->c.i;
4494 if (n < 0)
4495 tcc_error("invalid array size");
4496 } else {
4497 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4498 tcc_error("size of variable length array should be an integer");
4499 n = 0;
4500 t1 = VT_VLA;
4503 skip(']');
4504 /* parse next post type */
4505 post_type(type, ad, storage, 0);
4506 if (type->t == VT_FUNC)
4507 tcc_error("declaration of an array of functions");
4508 t1 |= type->t & VT_VLA;
4510 if (t1 & VT_VLA) {
4511 if (n < 0)
4512 tcc_error("need explicit inner array size in VLAs");
4513 loc -= type_size(&int_type, &align);
4514 loc &= -align;
4515 n = loc;
4517 vla_runtime_type_size(type, &align);
4518 gen_op('*');
4519 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4520 vswap();
4521 vstore();
4523 if (n != -1)
4524 vpop();
4525 nocode_wanted = saved_nocode_wanted;
4527 /* we push an anonymous symbol which will contain the array
4528 element type */
4529 s = sym_push(SYM_FIELD, type, 0, n);
4530 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4531 type->ref = s;
4533 return 1;
4536 /* Parse a type declarator (except basic type), and return the type
4537 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4538 expected. 'type' should contain the basic type. 'ad' is the
4539 attribute definition of the basic type. It can be modified by
4540 type_decl(). If this (possibly abstract) declarator is a pointer chain
4541 it returns the innermost pointed to type (equals *type, but is a different
4542 pointer), otherwise returns type itself, that's used for recursive calls. */
4543 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4545 CType *post, *ret;
4546 int qualifiers, storage;
4548 /* recursive type, remove storage bits first, apply them later again */
4549 storage = type->t & VT_STORAGE;
4550 type->t &= ~VT_STORAGE;
4551 post = ret = type;
4553 while (tok == '*') {
4554 qualifiers = 0;
4555 redo:
4556 next();
4557 switch(tok) {
4558 case TOK_CONST1:
4559 case TOK_CONST2:
4560 case TOK_CONST3:
4561 qualifiers |= VT_CONSTANT;
4562 goto redo;
4563 case TOK_VOLATILE1:
4564 case TOK_VOLATILE2:
4565 case TOK_VOLATILE3:
4566 qualifiers |= VT_VOLATILE;
4567 goto redo;
4568 case TOK_RESTRICT1:
4569 case TOK_RESTRICT2:
4570 case TOK_RESTRICT3:
4571 goto redo;
4572 /* XXX: clarify attribute handling */
4573 case TOK_ATTRIBUTE1:
4574 case TOK_ATTRIBUTE2:
4575 parse_attribute(ad);
4576 break;
4578 mk_pointer(type);
4579 type->t |= qualifiers;
4580 if (ret == type)
4581 /* innermost pointed to type is the one for the first derivation */
4582 ret = pointed_type(type);
4585 if (tok == '(') {
4586 /* This is possibly a parameter type list for abstract declarators
4587 ('int ()'), use post_type for testing this. */
4588 if (!post_type(type, ad, 0, td)) {
4589 /* It's not, so it's a nested declarator, and the post operations
4590 apply to the innermost pointed to type (if any). */
4591 /* XXX: this is not correct to modify 'ad' at this point, but
4592 the syntax is not clear */
4593 parse_attribute(ad);
4594 post = type_decl(type, ad, v, td);
4595 skip(')');
4596 } else
4597 goto abstract;
4598 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4599 /* type identifier */
4600 *v = tok;
4601 next();
4602 } else {
4603 abstract:
4604 if (!(td & TYPE_ABSTRACT))
4605 expect("identifier");
4606 *v = 0;
4608 post_type(post, ad, storage, 0);
4609 parse_attribute(ad);
4610 type->t |= storage;
4611 return ret;
4614 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4615 ST_FUNC int lvalue_type(int t)
4617 int bt, r;
4618 r = VT_LVAL;
4619 bt = t & VT_BTYPE;
4620 if (bt == VT_BYTE || bt == VT_BOOL)
4621 r |= VT_LVAL_BYTE;
4622 else if (bt == VT_SHORT)
4623 r |= VT_LVAL_SHORT;
4624 else
4625 return r;
4626 if (t & VT_UNSIGNED)
4627 r |= VT_LVAL_UNSIGNED;
4628 return r;
4631 /* indirection with full error checking and bound check */
4632 ST_FUNC void indir(void)
4634 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4635 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4636 return;
4637 expect("pointer");
4639 if (vtop->r & VT_LVAL)
4640 gv(RC_INT);
4641 vtop->type = *pointed_type(&vtop->type);
4642 /* Arrays and functions are never lvalues */
4643 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4644 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4645 vtop->r |= lvalue_type(vtop->type.t);
4646 /* if bound checking, the referenced pointer must be checked */
4647 #ifdef CONFIG_TCC_BCHECK
4648 if (tcc_state->do_bounds_check)
4649 vtop->r |= VT_MUSTBOUND;
4650 #endif
4654 /* pass a parameter to a function and do type checking and casting */
4655 static void gfunc_param_typed(Sym *func, Sym *arg)
4657 int func_type;
4658 CType type;
4660 func_type = func->f.func_type;
4661 if (func_type == FUNC_OLD ||
4662 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4663 /* default casting : only need to convert float to double */
4664 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4665 gen_cast_s(VT_DOUBLE);
4666 } else if (vtop->type.t & VT_BITFIELD) {
4667 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4668 type.ref = vtop->type.ref;
4669 gen_cast(&type);
4671 } else if (arg == NULL) {
4672 tcc_error("too many arguments to function");
4673 } else {
4674 type = arg->type;
4675 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4676 gen_assign_cast(&type);
4680 /* parse an expression and return its type without any side effect. */
4681 static void expr_type(CType *type, void (*expr_fn)(void))
4683 nocode_wanted++;
4684 expr_fn();
4685 *type = vtop->type;
4686 vpop();
4687 nocode_wanted--;
4690 /* parse an expression of the form '(type)' or '(expr)' and return its
4691 type */
4692 static void parse_expr_type(CType *type)
4694 int n;
4695 AttributeDef ad;
4697 skip('(');
4698 if (parse_btype(type, &ad)) {
4699 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4700 } else {
4701 expr_type(type, gexpr);
4703 skip(')');
4706 static void parse_type(CType *type)
4708 AttributeDef ad;
4709 int n;
4711 if (!parse_btype(type, &ad)) {
4712 expect("type");
4714 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4717 static void parse_builtin_params(int nc, const char *args)
4719 char c, sep = '(';
4720 CType t;
4721 if (nc)
4722 nocode_wanted++;
4723 next();
4724 while ((c = *args++)) {
4725 skip(sep);
4726 sep = ',';
4727 switch (c) {
4728 case 'e': expr_eq(); continue;
4729 case 't': parse_type(&t); vpush(&t); continue;
4730 default: tcc_error("internal error"); break;
4733 skip(')');
4734 if (nc)
4735 nocode_wanted--;
4738 static void try_call_scope_cleanup(Sym *stop)
4740 Sym *cls = current_cleanups;
4742 for (; cls != stop; cls = cls->ncl) {
4743 Sym *fs = cls->next;
4744 Sym *vs = cls->prev_tok;
4746 vpushsym(&fs->type, fs);
4747 vset(&vs->type, vs->r, vs->c);
4748 vtop->sym = vs;
4749 mk_pointer(&vtop->type);
4750 gaddrof();
4751 gfunc_call(1);
4755 static void try_call_cleanup_goto(Sym *cleanupstate)
4757 Sym *oc, *cc;
4758 int ocd, ccd;
4760 if (!current_cleanups)
4761 return;
4763 /* search NCA of both cleanup chains given parents and initial depth */
4764 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4765 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4767 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4769 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4772 try_call_scope_cleanup(cc);
4775 ST_FUNC void unary(void)
4777 int n, t, align, size, r, sizeof_caller;
4778 CType type;
4779 Sym *s;
4780 AttributeDef ad;
4782 sizeof_caller = in_sizeof;
4783 in_sizeof = 0;
4784 type.ref = NULL;
4785 /* XXX: GCC 2.95.3 does not generate a table although it should be
4786 better here */
4787 tok_next:
4788 switch(tok) {
4789 case TOK_EXTENSION:
4790 next();
4791 goto tok_next;
4792 case TOK_LCHAR:
4793 #ifdef TCC_TARGET_PE
4794 t = VT_SHORT|VT_UNSIGNED;
4795 goto push_tokc;
4796 #endif
4797 case TOK_CINT:
4798 case TOK_CCHAR:
4799 t = VT_INT;
4800 push_tokc:
4801 type.t = t;
4802 vsetc(&type, VT_CONST, &tokc);
4803 next();
4804 break;
4805 case TOK_CUINT:
4806 t = VT_INT | VT_UNSIGNED;
4807 goto push_tokc;
4808 case TOK_CLLONG:
4809 t = VT_LLONG;
4810 goto push_tokc;
4811 case TOK_CULLONG:
4812 t = VT_LLONG | VT_UNSIGNED;
4813 goto push_tokc;
4814 case TOK_CFLOAT:
4815 t = VT_FLOAT;
4816 goto push_tokc;
4817 case TOK_CDOUBLE:
4818 t = VT_DOUBLE;
4819 goto push_tokc;
4820 case TOK_CLDOUBLE:
4821 t = VT_LDOUBLE;
4822 goto push_tokc;
4823 case TOK_CLONG:
4824 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4825 goto push_tokc;
4826 case TOK_CULONG:
4827 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4828 goto push_tokc;
4829 case TOK___FUNCTION__:
4830 if (!gnu_ext)
4831 goto tok_identifier;
4832 /* fall thru */
4833 case TOK___FUNC__:
4835 void *ptr;
4836 int len;
4837 /* special function name identifier */
4838 len = strlen(funcname) + 1;
4839 /* generate char[len] type */
4840 type.t = VT_BYTE;
4841 mk_pointer(&type);
4842 type.t |= VT_ARRAY;
4843 type.ref->c = len;
4844 vpush_ref(&type, data_section, data_section->data_offset, len);
4845 if (!NODATA_WANTED) {
4846 ptr = section_ptr_add(data_section, len);
4847 memcpy(ptr, funcname, len);
4849 next();
4851 break;
4852 case TOK_LSTR:
4853 #ifdef TCC_TARGET_PE
4854 t = VT_SHORT | VT_UNSIGNED;
4855 #else
4856 t = VT_INT;
4857 #endif
4858 goto str_init;
4859 case TOK_STR:
4860 /* string parsing */
4861 t = VT_BYTE;
4862 if (tcc_state->char_is_unsigned)
4863 t = VT_BYTE | VT_UNSIGNED;
4864 str_init:
4865 if (tcc_state->warn_write_strings)
4866 t |= VT_CONSTANT;
4867 type.t = t;
4868 mk_pointer(&type);
4869 type.t |= VT_ARRAY;
4870 memset(&ad, 0, sizeof(AttributeDef));
4871 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4872 break;
4873 case '(':
4874 next();
4875 /* cast ? */
4876 if (parse_btype(&type, &ad)) {
4877 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4878 skip(')');
4879 /* check ISOC99 compound literal */
4880 if (tok == '{') {
4881 /* data is allocated locally by default */
4882 if (global_expr)
4883 r = VT_CONST;
4884 else
4885 r = VT_LOCAL;
4886 /* all except arrays are lvalues */
4887 if (!(type.t & VT_ARRAY))
4888 r |= lvalue_type(type.t);
4889 memset(&ad, 0, sizeof(AttributeDef));
4890 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4891 } else {
4892 if (sizeof_caller) {
4893 vpush(&type);
4894 return;
4896 unary();
4897 gen_cast(&type);
4899 } else if (tok == '{') {
4900 int saved_nocode_wanted = nocode_wanted;
4901 if (const_wanted)
4902 tcc_error("expected constant");
4903 /* save all registers */
4904 save_regs(0);
4905 /* statement expression : we do not accept break/continue
4906 inside as GCC does. We do retain the nocode_wanted state,
4907 as statement expressions can't ever be entered from the
4908 outside, so any reactivation of code emission (from labels
4909 or loop heads) can be disabled again after the end of it. */
4910 block(NULL, NULL, NULL, NULL, 1);
4911 nocode_wanted = saved_nocode_wanted;
4912 skip(')');
4913 } else {
4914 gexpr();
4915 skip(')');
4917 break;
4918 case '*':
4919 next();
4920 unary();
4921 indir();
4922 break;
4923 case '&':
4924 next();
4925 unary();
4926 /* functions names must be treated as function pointers,
4927 except for unary '&' and sizeof. Since we consider that
4928 functions are not lvalues, we only have to handle it
4929 there and in function calls. */
4930 /* arrays can also be used although they are not lvalues */
4931 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4932 !(vtop->type.t & VT_ARRAY))
4933 test_lvalue();
4934 mk_pointer(&vtop->type);
4935 gaddrof();
4936 break;
4937 case '!':
4938 next();
4939 unary();
4940 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4941 gen_cast_s(VT_BOOL);
4942 vtop->c.i = !vtop->c.i;
4943 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4944 vtop->c.i ^= 1;
4945 else {
4946 save_regs(1);
4947 vseti(VT_JMP, gvtst(1, 0));
4949 break;
4950 case '~':
4951 next();
4952 unary();
4953 vpushi(-1);
4954 gen_op('^');
4955 break;
4956 case '+':
4957 next();
4958 unary();
4959 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4960 tcc_error("pointer not accepted for unary plus");
4961 /* In order to force cast, we add zero, except for floating point
4962 where we really need an noop (otherwise -0.0 will be transformed
4963 into +0.0). */
4964 if (!is_float(vtop->type.t)) {
4965 vpushi(0);
4966 gen_op('+');
4968 break;
4969 case TOK_SIZEOF:
4970 case TOK_ALIGNOF1:
4971 case TOK_ALIGNOF2:
4972 case TOK_ALIGNOF3:
4973 t = tok;
4974 next();
4975 in_sizeof++;
4976 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4977 s = vtop[1].sym; /* hack: accessing previous vtop */
4978 size = type_size(&type, &align);
4979 if (s && s->a.aligned)
4980 align = 1 << (s->a.aligned - 1);
4981 if (t == TOK_SIZEOF) {
4982 if (!(type.t & VT_VLA)) {
4983 if (size < 0)
4984 tcc_error("sizeof applied to an incomplete type");
4985 vpushs(size);
4986 } else {
4987 vla_runtime_type_size(&type, &align);
4989 } else {
4990 vpushs(align);
4992 vtop->type.t |= VT_UNSIGNED;
4993 break;
4995 case TOK_builtin_expect:
4996 /* __builtin_expect is a no-op for now */
4997 parse_builtin_params(0, "ee");
4998 vpop();
4999 break;
5000 case TOK_builtin_types_compatible_p:
5001 parse_builtin_params(0, "tt");
5002 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5003 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5004 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5005 vtop -= 2;
5006 vpushi(n);
5007 break;
5008 case TOK_builtin_choose_expr:
5010 int64_t c;
5011 next();
5012 skip('(');
5013 c = expr_const64();
5014 skip(',');
5015 if (!c) {
5016 nocode_wanted++;
5018 expr_eq();
5019 if (!c) {
5020 vpop();
5021 nocode_wanted--;
5023 skip(',');
5024 if (c) {
5025 nocode_wanted++;
5027 expr_eq();
5028 if (c) {
5029 vpop();
5030 nocode_wanted--;
5032 skip(')');
5034 break;
5035 case TOK_builtin_constant_p:
5036 parse_builtin_params(1, "e");
5037 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5038 vtop--;
5039 vpushi(n);
5040 break;
5041 case TOK_builtin_frame_address:
5042 case TOK_builtin_return_address:
5044 int tok1 = tok;
5045 int level;
5046 next();
5047 skip('(');
5048 if (tok != TOK_CINT) {
5049 tcc_error("%s only takes positive integers",
5050 tok1 == TOK_builtin_return_address ?
5051 "__builtin_return_address" :
5052 "__builtin_frame_address");
5054 level = (uint32_t)tokc.i;
5055 next();
5056 skip(')');
5057 type.t = VT_VOID;
5058 mk_pointer(&type);
5059 vset(&type, VT_LOCAL, 0); /* local frame */
5060 while (level--) {
5061 mk_pointer(&vtop->type);
5062 indir(); /* -> parent frame */
5064 if (tok1 == TOK_builtin_return_address) {
5065 // assume return address is just above frame pointer on stack
5066 vpushi(PTR_SIZE);
5067 gen_op('+');
5068 mk_pointer(&vtop->type);
5069 indir();
5072 break;
5073 #ifdef TCC_TARGET_X86_64
5074 #ifdef TCC_TARGET_PE
5075 case TOK_builtin_va_start:
5076 parse_builtin_params(0, "ee");
5077 r = vtop->r & VT_VALMASK;
5078 if (r == VT_LLOCAL)
5079 r = VT_LOCAL;
5080 if (r != VT_LOCAL)
5081 tcc_error("__builtin_va_start expects a local variable");
5082 vtop->r = r;
5083 vtop->type = char_pointer_type;
5084 vtop->c.i += 8;
5085 vstore();
5086 break;
5087 #else
5088 case TOK_builtin_va_arg_types:
5089 parse_builtin_params(0, "t");
5090 vpushi(classify_x86_64_va_arg(&vtop->type));
5091 vswap();
5092 vpop();
5093 break;
5094 #endif
5095 #endif
5097 #ifdef TCC_TARGET_ARM64
5098 case TOK___va_start: {
5099 parse_builtin_params(0, "ee");
5100 //xx check types
5101 gen_va_start();
5102 vpushi(0);
5103 vtop->type.t = VT_VOID;
5104 break;
5106 case TOK___va_arg: {
5107 parse_builtin_params(0, "et");
5108 type = vtop->type;
5109 vpop();
5110 //xx check types
5111 gen_va_arg(&type);
5112 vtop->type = type;
5113 break;
5115 case TOK___arm64_clear_cache: {
5116 parse_builtin_params(0, "ee");
5117 gen_clear_cache();
5118 vpushi(0);
5119 vtop->type.t = VT_VOID;
5120 break;
5122 #endif
5123 /* pre operations */
5124 case TOK_INC:
5125 case TOK_DEC:
5126 t = tok;
5127 next();
5128 unary();
5129 inc(0, t);
5130 break;
5131 case '-':
5132 next();
5133 unary();
5134 t = vtop->type.t & VT_BTYPE;
5135 if (is_float(t)) {
5136 /* In IEEE negate(x) isn't subtract(0,x), but rather
5137 subtract(-0, x). */
5138 vpush(&vtop->type);
5139 if (t == VT_FLOAT)
5140 vtop->c.f = -1.0 * 0.0;
5141 else if (t == VT_DOUBLE)
5142 vtop->c.d = -1.0 * 0.0;
5143 else
5144 vtop->c.ld = -1.0 * 0.0;
5145 } else
5146 vpushi(0);
5147 vswap();
5148 gen_op('-');
5149 break;
5150 case TOK_LAND:
5151 if (!gnu_ext)
5152 goto tok_identifier;
5153 next();
5154 /* allow to take the address of a label */
5155 if (tok < TOK_UIDENT)
5156 expect("label identifier");
5157 s = label_find(tok);
5158 if (!s) {
5159 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5160 } else {
5161 if (s->r == LABEL_DECLARED)
5162 s->r = LABEL_FORWARD;
5164 if (!s->type.t) {
5165 s->type.t = VT_VOID;
5166 mk_pointer(&s->type);
5167 s->type.t |= VT_STATIC;
5169 vpushsym(&s->type, s);
5170 next();
5171 break;
5173 case TOK_GENERIC:
5175 CType controlling_type;
5176 int has_default = 0;
5177 int has_match = 0;
5178 int learn = 0;
5179 TokenString *str = NULL;
5180 int saved_const_wanted = const_wanted;
5182 next();
5183 skip('(');
5184 const_wanted = 0;
5185 expr_type(&controlling_type, expr_eq);
5186 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5187 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5188 mk_pointer(&controlling_type);
5189 const_wanted = saved_const_wanted;
5190 for (;;) {
5191 learn = 0;
5192 skip(',');
5193 if (tok == TOK_DEFAULT) {
5194 if (has_default)
5195 tcc_error("too many 'default'");
5196 has_default = 1;
5197 if (!has_match)
5198 learn = 1;
5199 next();
5200 } else {
5201 AttributeDef ad_tmp;
5202 int itmp;
5203 CType cur_type;
5204 parse_btype(&cur_type, &ad_tmp);
5205 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5206 if (compare_types(&controlling_type, &cur_type, 0)) {
5207 if (has_match) {
5208 tcc_error("type match twice");
5210 has_match = 1;
5211 learn = 1;
5214 skip(':');
5215 if (learn) {
5216 if (str)
5217 tok_str_free(str);
5218 skip_or_save_block(&str);
5219 } else {
5220 skip_or_save_block(NULL);
5222 if (tok == ')')
5223 break;
5225 if (!str) {
5226 char buf[60];
5227 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5228 tcc_error("type '%s' does not match any association", buf);
5230 begin_macro(str, 1);
5231 next();
5232 expr_eq();
5233 if (tok != TOK_EOF)
5234 expect(",");
5235 end_macro();
5236 next();
5237 break;
5239 // special qnan , snan and infinity values
5240 case TOK___NAN__:
5241 n = 0x7fc00000;
5242 special_math_val:
5243 vpushi(n);
5244 vtop->type.t = VT_FLOAT;
5245 next();
5246 break;
5247 case TOK___SNAN__:
5248 n = 0x7f800001;
5249 goto special_math_val;
5250 case TOK___INF__:
5251 n = 0x7f800000;
5252 goto special_math_val;
5254 default:
5255 tok_identifier:
5256 t = tok;
5257 next();
5258 if (t < TOK_UIDENT)
5259 expect("identifier");
5260 s = sym_find(t);
5261 if (!s || IS_ASM_SYM(s)) {
5262 const char *name = get_tok_str(t, NULL);
5263 if (tok != '(')
5264 tcc_error("'%s' undeclared", name);
5265 /* for simple function calls, we tolerate undeclared
5266 external reference to int() function */
5267 if (tcc_state->warn_implicit_function_declaration
5268 #ifdef TCC_TARGET_PE
5269 /* people must be warned about using undeclared WINAPI functions
5270 (which usually start with uppercase letter) */
5271 || (name[0] >= 'A' && name[0] <= 'Z')
5272 #endif
5274 tcc_warning("implicit declaration of function '%s'", name);
5275 s = external_global_sym(t, &func_old_type);
5278 r = s->r;
5279 /* A symbol that has a register is a local register variable,
5280 which starts out as VT_LOCAL value. */
5281 if ((r & VT_VALMASK) < VT_CONST)
5282 r = (r & ~VT_VALMASK) | VT_LOCAL;
5284 vset(&s->type, r, s->c);
5285 /* Point to s as backpointer (even without r&VT_SYM).
5286 Will be used by at least the x86 inline asm parser for
5287 regvars. */
5288 vtop->sym = s;
5290 if (r & VT_SYM) {
5291 vtop->c.i = 0;
5292 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5293 vtop->c.i = s->enum_val;
5295 break;
5298 /* post operations */
5299 while (1) {
5300 if (tok == TOK_INC || tok == TOK_DEC) {
5301 inc(1, tok);
5302 next();
5303 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5304 int qualifiers, cumofs = 0;
5305 /* field */
5306 if (tok == TOK_ARROW)
5307 indir();
5308 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5309 test_lvalue();
5310 gaddrof();
5311 /* expect pointer on structure */
5312 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5313 expect("struct or union");
5314 if (tok == TOK_CDOUBLE)
5315 expect("field name");
5316 next();
5317 if (tok == TOK_CINT || tok == TOK_CUINT)
5318 expect("field name");
5319 s = find_field(&vtop->type, tok, &cumofs);
5320 if (!s)
5321 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5322 /* add field offset to pointer */
5323 vtop->type = char_pointer_type; /* change type to 'char *' */
5324 vpushi(cumofs + s->c);
5325 gen_op('+');
5326 /* change type to field type, and set to lvalue */
5327 vtop->type = s->type;
5328 vtop->type.t |= qualifiers;
5329 /* an array is never an lvalue */
5330 if (!(vtop->type.t & VT_ARRAY)) {
5331 vtop->r |= lvalue_type(vtop->type.t);
5332 #ifdef CONFIG_TCC_BCHECK
5333 /* if bound checking, the referenced pointer must be checked */
5334 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5335 vtop->r |= VT_MUSTBOUND;
5336 #endif
5338 next();
5339 } else if (tok == '[') {
5340 next();
5341 gexpr();
5342 gen_op('+');
5343 indir();
5344 skip(']');
5345 } else if (tok == '(') {
5346 SValue ret;
5347 Sym *sa;
5348 int nb_args, ret_nregs, ret_align, regsize, variadic;
5350 /* function call */
5351 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5352 /* pointer test (no array accepted) */
5353 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5354 vtop->type = *pointed_type(&vtop->type);
5355 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5356 goto error_func;
5357 } else {
5358 error_func:
5359 expect("function pointer");
5361 } else {
5362 vtop->r &= ~VT_LVAL; /* no lvalue */
5364 /* get return type */
5365 s = vtop->type.ref;
5366 next();
5367 sa = s->next; /* first parameter */
5368 nb_args = regsize = 0;
5369 ret.r2 = VT_CONST;
5370 /* compute first implicit argument if a structure is returned */
5371 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5372 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5373 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5374 &ret_align, &regsize);
5375 if (!ret_nregs) {
5376 /* get some space for the returned structure */
5377 size = type_size(&s->type, &align);
5378 #ifdef TCC_TARGET_ARM64
5379 /* On arm64, a small struct is return in registers.
5380 It is much easier to write it to memory if we know
5381 that we are allowed to write some extra bytes, so
5382 round the allocated space up to a power of 2: */
5383 if (size < 16)
5384 while (size & (size - 1))
5385 size = (size | (size - 1)) + 1;
5386 #endif
5387 loc = (loc - size) & -align;
5388 ret.type = s->type;
5389 ret.r = VT_LOCAL | VT_LVAL;
5390 /* pass it as 'int' to avoid structure arg passing
5391 problems */
5392 vseti(VT_LOCAL, loc);
5393 ret.c = vtop->c;
5394 nb_args++;
5396 } else {
5397 ret_nregs = 1;
5398 ret.type = s->type;
5401 if (ret_nregs) {
5402 /* return in register */
5403 if (is_float(ret.type.t)) {
5404 ret.r = reg_fret(ret.type.t);
5405 #ifdef TCC_TARGET_X86_64
5406 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5407 ret.r2 = REG_QRET;
5408 #endif
5409 } else {
5410 #ifndef TCC_TARGET_ARM64
5411 #ifdef TCC_TARGET_X86_64
5412 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5413 #else
5414 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5415 #endif
5416 ret.r2 = REG_LRET;
5417 #endif
5418 ret.r = REG_IRET;
5420 ret.c.i = 0;
5422 if (tok != ')') {
5423 for(;;) {
5424 expr_eq();
5425 gfunc_param_typed(s, sa);
5426 nb_args++;
5427 if (sa)
5428 sa = sa->next;
5429 if (tok == ')')
5430 break;
5431 skip(',');
5434 if (sa)
5435 tcc_error("too few arguments to function");
5436 skip(')');
5437 gfunc_call(nb_args);
5439 /* return value */
5440 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5441 vsetc(&ret.type, r, &ret.c);
5442 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5445 /* handle packed struct return */
5446 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5447 int addr, offset;
5449 size = type_size(&s->type, &align);
5450 /* We're writing whole regs often, make sure there's enough
5451 space. Assume register size is power of 2. */
5452 if (regsize > align)
5453 align = regsize;
5454 loc = (loc - size) & -align;
5455 addr = loc;
5456 offset = 0;
5457 for (;;) {
5458 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5459 vswap();
5460 vstore();
5461 vtop--;
5462 if (--ret_nregs == 0)
5463 break;
5464 offset += regsize;
5466 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5468 } else {
5469 break;
5474 ST_FUNC void expr_prod(void)
5476 int t;
5478 unary();
5479 while (tok == '*' || tok == '/' || tok == '%') {
5480 t = tok;
5481 next();
5482 unary();
5483 gen_op(t);
5487 ST_FUNC void expr_sum(void)
5489 int t;
5491 expr_prod();
5492 while (tok == '+' || tok == '-') {
5493 t = tok;
5494 next();
5495 expr_prod();
5496 gen_op(t);
5500 static void expr_shift(void)
5502 int t;
5504 expr_sum();
5505 while (tok == TOK_SHL || tok == TOK_SAR) {
5506 t = tok;
5507 next();
5508 expr_sum();
5509 gen_op(t);
5513 static void expr_cmp(void)
5515 int t;
5517 expr_shift();
5518 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5519 tok == TOK_ULT || tok == TOK_UGE) {
5520 t = tok;
5521 next();
5522 expr_shift();
5523 gen_op(t);
5527 static void expr_cmpeq(void)
5529 int t;
5531 expr_cmp();
5532 while (tok == TOK_EQ || tok == TOK_NE) {
5533 t = tok;
5534 next();
5535 expr_cmp();
5536 gen_op(t);
5540 static void expr_and(void)
5542 expr_cmpeq();
5543 while (tok == '&') {
5544 next();
5545 expr_cmpeq();
5546 gen_op('&');
5550 static void expr_xor(void)
5552 expr_and();
5553 while (tok == '^') {
5554 next();
5555 expr_and();
5556 gen_op('^');
5560 static void expr_or(void)
5562 expr_xor();
5563 while (tok == '|') {
5564 next();
5565 expr_xor();
5566 gen_op('|');
5570 static void expr_land(void)
5572 expr_or();
5573 if (tok == TOK_LAND) {
5574 int t = 0;
5575 for(;;) {
5576 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5577 gen_cast_s(VT_BOOL);
5578 if (vtop->c.i) {
5579 vpop();
5580 } else {
5581 nocode_wanted++;
5582 while (tok == TOK_LAND) {
5583 next();
5584 expr_or();
5585 vpop();
5587 nocode_wanted--;
5588 if (t)
5589 gsym(t);
5590 gen_cast_s(VT_INT);
5591 break;
5593 } else {
5594 if (!t)
5595 save_regs(1);
5596 t = gvtst(1, t);
5598 if (tok != TOK_LAND) {
5599 if (t)
5600 vseti(VT_JMPI, t);
5601 else
5602 vpushi(1);
5603 break;
5605 next();
5606 expr_or();
5611 static void expr_lor(void)
5613 expr_land();
5614 if (tok == TOK_LOR) {
5615 int t = 0;
5616 for(;;) {
5617 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5618 gen_cast_s(VT_BOOL);
5619 if (!vtop->c.i) {
5620 vpop();
5621 } else {
5622 nocode_wanted++;
5623 while (tok == TOK_LOR) {
5624 next();
5625 expr_land();
5626 vpop();
5628 nocode_wanted--;
5629 if (t)
5630 gsym(t);
5631 gen_cast_s(VT_INT);
5632 break;
5634 } else {
5635 if (!t)
5636 save_regs(1);
5637 t = gvtst(0, t);
5639 if (tok != TOK_LOR) {
5640 if (t)
5641 vseti(VT_JMP, t);
5642 else
5643 vpushi(0);
5644 break;
5646 next();
5647 expr_land();
5652 /* Assuming vtop is a value used in a conditional context
5653 (i.e. compared with zero) return 0 if it's false, 1 if
5654 true and -1 if it can't be statically determined. */
5655 static int condition_3way(void)
5657 int c = -1;
5658 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5659 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5660 vdup();
5661 gen_cast_s(VT_BOOL);
5662 c = vtop->c.i;
5663 vpop();
5665 return c;
5668 static void expr_cond(void)
5670 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5671 SValue sv;
5672 CType type, type1, type2;
5674 expr_lor();
5675 if (tok == '?') {
5676 next();
5677 c = condition_3way();
5678 g = (tok == ':' && gnu_ext);
5679 if (c < 0) {
5680 /* needed to avoid having different registers saved in
5681 each branch */
5682 if (is_float(vtop->type.t)) {
5683 rc = RC_FLOAT;
5684 #ifdef TCC_TARGET_X86_64
5685 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5686 rc = RC_ST0;
5688 #endif
5689 } else
5690 rc = RC_INT;
5691 gv(rc);
5692 save_regs(1);
5693 if (g)
5694 gv_dup();
5695 tt = gvtst(1, 0);
5697 } else {
5698 if (!g)
5699 vpop();
5700 tt = 0;
5703 if (1) {
5704 if (c == 0)
5705 nocode_wanted++;
5706 if (!g)
5707 gexpr();
5709 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5710 mk_pointer(&vtop->type);
5711 type1 = vtop->type;
5712 sv = *vtop; /* save value to handle it later */
5713 vtop--; /* no vpop so that FP stack is not flushed */
5714 skip(':');
5716 u = 0;
5717 if (c < 0)
5718 u = gjmp(0);
5719 gsym(tt);
5721 if (c == 0)
5722 nocode_wanted--;
5723 if (c == 1)
5724 nocode_wanted++;
5725 expr_cond();
5726 if (c == 1)
5727 nocode_wanted--;
5729 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5730 mk_pointer(&vtop->type);
5731 type2=vtop->type;
5732 t1 = type1.t;
5733 bt1 = t1 & VT_BTYPE;
5734 t2 = type2.t;
5735 bt2 = t2 & VT_BTYPE;
5736 type.ref = NULL;
5739 /* cast operands to correct type according to ISOC rules */
5740 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5741 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5742 } else if (is_float(bt1) || is_float(bt2)) {
5743 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5744 type.t = VT_LDOUBLE;
5746 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5747 type.t = VT_DOUBLE;
5748 } else {
5749 type.t = VT_FLOAT;
5751 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5752 /* cast to biggest op */
5753 type.t = VT_LLONG | VT_LONG;
5754 if (bt1 == VT_LLONG)
5755 type.t &= t1;
5756 if (bt2 == VT_LLONG)
5757 type.t &= t2;
5758 /* convert to unsigned if it does not fit in a long long */
5759 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5760 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5761 type.t |= VT_UNSIGNED;
5762 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5763 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5764 /* If one is a null ptr constant the result type
5765 is the other. */
5766 if (is_null_pointer (vtop)) type = type1;
5767 else if (is_null_pointer (&sv)) type = type2;
5768 else if (bt1 != bt2)
5769 tcc_error("incompatible types in conditional expressions");
5770 else {
5771 CType *pt1 = pointed_type(&type1);
5772 CType *pt2 = pointed_type(&type2);
5773 int pbt1 = pt1->t & VT_BTYPE;
5774 int pbt2 = pt2->t & VT_BTYPE;
5775 int newquals, copied = 0;
5776 /* pointers to void get preferred, otherwise the
5777 pointed to types minus qualifs should be compatible */
5778 type = (pbt1 == VT_VOID) ? type1 : type2;
5779 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5780 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5781 tcc_warning("pointer type mismatch in conditional expression\n");
5783 /* combine qualifs */
5784 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5785 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5786 & newquals)
5788 /* copy the pointer target symbol */
5789 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5790 0, type.ref->c);
5791 copied = 1;
5792 pointed_type(&type)->t |= newquals;
5794 /* pointers to incomplete arrays get converted to
5795 pointers to completed ones if possible */
5796 if (pt1->t & VT_ARRAY
5797 && pt2->t & VT_ARRAY
5798 && pointed_type(&type)->ref->c < 0
5799 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5801 if (!copied)
5802 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5803 0, type.ref->c);
5804 pointed_type(&type)->ref =
5805 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5806 0, pointed_type(&type)->ref->c);
5807 pointed_type(&type)->ref->c =
5808 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5811 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5812 /* XXX: test structure compatibility */
5813 type = bt1 == VT_STRUCT ? type1 : type2;
5814 } else {
5815 /* integer operations */
5816 type.t = VT_INT | (VT_LONG & (t1 | t2));
5817 /* convert to unsigned if it does not fit in an integer */
5818 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5819 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5820 type.t |= VT_UNSIGNED;
5822 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5823 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5824 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5826 /* now we convert second operand */
5827 if (c != 1) {
5828 gen_cast(&type);
5829 if (islv) {
5830 mk_pointer(&vtop->type);
5831 gaddrof();
5832 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5833 gaddrof();
5836 rc = RC_INT;
5837 if (is_float(type.t)) {
5838 rc = RC_FLOAT;
5839 #ifdef TCC_TARGET_X86_64
5840 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5841 rc = RC_ST0;
5843 #endif
5844 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5845 /* for long longs, we use fixed registers to avoid having
5846 to handle a complicated move */
5847 rc = RC_IRET;
5850 tt = r2 = 0;
5851 if (c < 0) {
5852 r2 = gv(rc);
5853 tt = gjmp(0);
5855 gsym(u);
5857 /* this is horrible, but we must also convert first
5858 operand */
5859 if (c != 0) {
5860 *vtop = sv;
5861 gen_cast(&type);
5862 if (islv) {
5863 mk_pointer(&vtop->type);
5864 gaddrof();
5865 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5866 gaddrof();
5869 if (c < 0 || islv) {
5870 r1 = gv(rc);
5871 move_reg(r2, r1, type.t);
5872 vtop->r = r2;
5873 gsym(tt);
5874 if (islv)
5875 indir();
5881 static void expr_eq(void)
5883 int t;
5885 expr_cond();
5886 if (tok == '=' ||
5887 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5888 tok == TOK_A_XOR || tok == TOK_A_OR ||
5889 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5890 test_lvalue();
5891 t = tok;
5892 next();
5893 if (t == '=') {
5894 expr_eq();
5895 } else {
5896 vdup();
5897 expr_eq();
5898 gen_op(t & 0x7f);
5900 vstore();
5904 ST_FUNC void gexpr(void)
5906 while (1) {
5907 expr_eq();
5908 if (tok != ',')
5909 break;
5910 vpop();
5911 next();
5915 /* parse a constant expression and return value in vtop. */
5916 static void expr_const1(void)
5918 const_wanted++;
5919 nocode_wanted++;
5920 expr_cond();
5921 nocode_wanted--;
5922 const_wanted--;
5925 /* parse an integer constant and return its value. */
5926 static inline int64_t expr_const64(void)
5928 int64_t c;
5929 expr_const1();
5930 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5931 expect("constant expression");
5932 c = vtop->c.i;
5933 vpop();
5934 return c;
5937 /* parse an integer constant and return its value.
5938 Complain if it doesn't fit 32bit (signed or unsigned). */
5939 ST_FUNC int expr_const(void)
5941 int c;
5942 int64_t wc = expr_const64();
5943 c = wc;
5944 if (c != wc && (unsigned)c != wc)
5945 tcc_error("constant exceeds 32 bit");
5946 return c;
5949 /* return the label token if current token is a label, otherwise
5950 return zero */
5951 static int is_label(void)
5953 int last_tok;
5955 /* fast test first */
5956 if (tok < TOK_UIDENT)
5957 return 0;
5958 /* no need to save tokc because tok is an identifier */
5959 last_tok = tok;
5960 next();
5961 if (tok == ':') {
5962 return last_tok;
5963 } else {
5964 unget_tok(last_tok);
5965 return 0;
5969 #ifndef TCC_TARGET_ARM64
5970 static void gfunc_return(CType *func_type)
5972 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5973 CType type, ret_type;
5974 int ret_align, ret_nregs, regsize;
5975 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5976 &ret_align, &regsize);
5977 if (0 == ret_nregs) {
5978 /* if returning structure, must copy it to implicit
5979 first pointer arg location */
5980 type = *func_type;
5981 mk_pointer(&type);
5982 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5983 indir();
5984 vswap();
5985 /* copy structure value to pointer */
5986 vstore();
5987 } else {
5988 /* returning structure packed into registers */
5989 int r, size, addr, align;
5990 size = type_size(func_type,&align);
5991 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5992 (vtop->c.i & (ret_align-1)))
5993 && (align & (ret_align-1))) {
5994 loc = (loc - size) & -ret_align;
5995 addr = loc;
5996 type = *func_type;
5997 vset(&type, VT_LOCAL | VT_LVAL, addr);
5998 vswap();
5999 vstore();
6000 vpop();
6001 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6003 vtop->type = ret_type;
6004 if (is_float(ret_type.t))
6005 r = rc_fret(ret_type.t);
6006 else
6007 r = RC_IRET;
6009 if (ret_nregs == 1)
6010 gv(r);
6011 else {
6012 for (;;) {
6013 vdup();
6014 gv(r);
6015 vpop();
6016 if (--ret_nregs == 0)
6017 break;
6018 /* We assume that when a structure is returned in multiple
6019 registers, their classes are consecutive values of the
6020 suite s(n) = 2^n */
6021 r <<= 1;
6022 vtop->c.i += regsize;
6026 } else if (is_float(func_type->t)) {
6027 gv(rc_fret(func_type->t));
6028 } else {
6029 gv(RC_IRET);
6031 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6033 #endif
6035 static int case_cmp(const void *pa, const void *pb)
6037 int64_t a = (*(struct case_t**) pa)->v1;
6038 int64_t b = (*(struct case_t**) pb)->v1;
6039 return a < b ? -1 : a > b;
6042 static void gcase(struct case_t **base, int len, int *bsym)
6044 struct case_t *p;
6045 int e;
6046 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6047 gv(RC_INT);
6048 while (len > 4) {
6049 /* binary search */
6050 p = base[len/2];
6051 vdup();
6052 if (ll)
6053 vpushll(p->v2);
6054 else
6055 vpushi(p->v2);
6056 gen_op(TOK_LE);
6057 e = gtst(1, 0);
6058 vdup();
6059 if (ll)
6060 vpushll(p->v1);
6061 else
6062 vpushi(p->v1);
6063 gen_op(TOK_GE);
6064 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6065 /* x < v1 */
6066 gcase(base, len/2, bsym);
6067 if (cur_switch->def_sym)
6068 gjmp_addr(cur_switch->def_sym);
6069 else
6070 *bsym = gjmp(*bsym);
6071 /* x > v2 */
6072 gsym(e);
6073 e = len/2 + 1;
6074 base += e; len -= e;
6076 /* linear scan */
6077 while (len--) {
6078 p = *base++;
6079 vdup();
6080 if (ll)
6081 vpushll(p->v2);
6082 else
6083 vpushi(p->v2);
6084 if (p->v1 == p->v2) {
6085 gen_op(TOK_EQ);
6086 gtst_addr(0, p->sym);
6087 } else {
6088 gen_op(TOK_LE);
6089 e = gtst(1, 0);
6090 vdup();
6091 if (ll)
6092 vpushll(p->v1);
6093 else
6094 vpushi(p->v1);
6095 gen_op(TOK_GE);
6096 gtst_addr(0, p->sym);
6097 gsym(e);
6102 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr)
6104 int a, b, c, d, cond;
6105 Sym *s;
6107 /* generate line number info */
6108 if (tcc_state->do_debug)
6109 tcc_debug_line(tcc_state);
6111 if (is_expr) {
6112 /* default return value is (void) */
6113 vpushi(0);
6114 vtop->type.t = VT_VOID;
6117 if (tok == TOK_IF) {
6118 /* if test */
6119 int saved_nocode_wanted = nocode_wanted;
6120 next();
6121 skip('(');
6122 gexpr();
6123 skip(')');
6124 cond = condition_3way();
6125 if (cond == 1)
6126 a = 0, vpop();
6127 else
6128 a = gvtst(1, 0);
6129 if (cond == 0)
6130 nocode_wanted |= 0x20000000;
6131 block(bsym, bcl, csym, ccl, 0);
6132 if (cond != 1)
6133 nocode_wanted = saved_nocode_wanted;
6134 if (tok == TOK_ELSE) {
6135 next();
6136 d = gjmp(0);
6137 gsym(a);
6138 if (cond == 1)
6139 nocode_wanted |= 0x20000000;
6140 block(bsym, bcl, csym, ccl, 0);
6141 gsym(d); /* patch else jmp */
6142 if (cond != 0)
6143 nocode_wanted = saved_nocode_wanted;
6144 } else
6145 gsym(a);
6146 } else if (tok == TOK_WHILE) {
6147 int saved_nocode_wanted;
6148 nocode_wanted &= ~0x20000000;
6149 next();
6150 d = ind;
6151 vla_sp_restore();
6152 skip('(');
6153 gexpr();
6154 skip(')');
6155 a = gvtst(1, 0);
6156 b = 0;
6157 ++local_scope;
6158 saved_nocode_wanted = nocode_wanted;
6159 block(&a, current_cleanups, &b, current_cleanups, 0);
6160 nocode_wanted = saved_nocode_wanted;
6161 --local_scope;
6162 gjmp_addr(d);
6163 gsym(a);
6164 gsym_addr(b, d);
6165 } else if (tok == '{') {
6166 Sym *llabel, *lcleanup;
6167 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6168 int lncleanups = ncleanups;
6170 next();
6171 /* record local declaration stack position */
6172 s = local_stack;
6173 llabel = local_label_stack;
6174 lcleanup = current_cleanups;
6175 ++local_scope;
6177 /* handle local labels declarations */
6178 while (tok == TOK_LABEL) {
6179 next();
6180 for(;;) {
6181 if (tok < TOK_UIDENT)
6182 expect("label identifier");
6183 label_push(&local_label_stack, tok, LABEL_DECLARED);
6184 next();
6185 if (tok == ',') {
6186 next();
6187 } else {
6188 skip(';');
6189 break;
6193 while (tok != '}') {
6194 if ((a = is_label()))
6195 unget_tok(a);
6196 else
6197 decl(VT_LOCAL);
6198 if (tok != '}') {
6199 if (is_expr)
6200 vpop();
6201 block(bsym, bcl, csym, ccl, is_expr);
6205 if (current_cleanups != lcleanup) {
6206 int jmp = 0;
6207 Sym *g, **pg;
6209 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6210 if (g->prev_tok->r & LABEL_FORWARD) {
6211 Sym *pcl = g->next;
6212 if (!jmp)
6213 jmp = gjmp(0);
6214 gsym(pcl->jnext);
6215 try_call_scope_cleanup(lcleanup);
6216 pcl->jnext = gjmp(0);
6217 if (!lncleanups)
6218 goto remove_pending;
6219 g->c = lncleanups;
6220 pg = &g->prev;
6221 } else {
6222 remove_pending:
6223 *pg = g->prev;
6224 sym_free(g);
6226 gsym(jmp);
6227 if (!nocode_wanted) {
6228 try_call_scope_cleanup(lcleanup);
6232 current_cleanups = lcleanup;
6233 ncleanups = lncleanups;
6234 /* pop locally defined labels */
6235 label_pop(&local_label_stack, llabel, is_expr);
6236 /* pop locally defined symbols */
6237 --local_scope;
6238 /* In the is_expr case (a statement expression is finished here),
6239 vtop might refer to symbols on the local_stack. Either via the
6240 type or via vtop->sym. We can't pop those nor any that in turn
6241 might be referred to. To make it easier we don't roll back
6242 any symbols in that case; some upper level call to block() will
6243 do that. We do have to remove such symbols from the lookup
6244 tables, though. sym_pop will do that. */
6245 sym_pop(&local_stack, s, is_expr);
6247 /* Pop VLA frames and restore stack pointer if required */
6248 if (vlas_in_scope > saved_vlas_in_scope) {
6249 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6250 vla_sp_restore();
6252 vlas_in_scope = saved_vlas_in_scope;
6254 next();
6255 } else if (tok == TOK_RETURN) {
6256 next();
6257 if (tok != ';') {
6258 gexpr();
6259 gen_assign_cast(&func_vt);
6260 try_call_scope_cleanup(NULL);
6261 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6262 vtop--;
6263 else
6264 gfunc_return(&func_vt);
6265 } else {
6266 try_call_scope_cleanup(NULL);
6268 skip(';');
6269 /* jump unless last stmt in top-level block */
6270 if (tok != '}' || local_scope != 1)
6271 rsym = gjmp(rsym);
6272 nocode_wanted |= 0x20000000;
6273 } else if (tok == TOK_BREAK) {
6274 /* compute jump */
6275 if (!bsym)
6276 tcc_error("cannot break");
6277 try_call_scope_cleanup(bcl);
6278 *bsym = gjmp(*bsym);
6279 next();
6280 skip(';');
6281 nocode_wanted |= 0x20000000;
6282 } else if (tok == TOK_CONTINUE) {
6283 /* compute jump */
6284 if (!csym)
6285 tcc_error("cannot continue");
6286 try_call_scope_cleanup(ccl);
6287 vla_sp_restore_root();
6288 *csym = gjmp(*csym);
6289 next();
6290 skip(';');
6291 nocode_wanted |= 0x20000000;
6292 } else if (tok == TOK_FOR) {
6293 int e;
6294 int saved_nocode_wanted;
6295 Sym *lcleanup = current_cleanups;
6296 int lncleanups = ncleanups;
6298 nocode_wanted &= ~0x20000000;
6299 next();
6300 skip('(');
6301 s = local_stack;
6302 ++local_scope;
6303 if (tok != ';') {
6304 /* c99 for-loop init decl? */
6305 if (!decl0(VT_LOCAL, 1, NULL)) {
6306 /* no, regular for-loop init expr */
6307 gexpr();
6308 vpop();
6311 skip(';');
6312 d = ind;
6313 c = ind;
6314 vla_sp_restore();
6315 a = 0;
6316 b = 0;
6317 if (tok != ';') {
6318 gexpr();
6319 a = gvtst(1, 0);
6321 skip(';');
6322 if (tok != ')') {
6323 e = gjmp(0);
6324 c = ind;
6325 vla_sp_restore();
6326 gexpr();
6327 vpop();
6328 gjmp_addr(d);
6329 gsym(e);
6331 skip(')');
6332 saved_nocode_wanted = nocode_wanted;
6333 block(&a, current_cleanups, &b, current_cleanups, 0);
6334 nocode_wanted = saved_nocode_wanted;
6335 gjmp_addr(c);
6336 gsym(a);
6337 gsym_addr(b, c);
6338 --local_scope;
6339 try_call_scope_cleanup(lcleanup);
6340 ncleanups = lncleanups;
6341 current_cleanups = lcleanup;
6342 sym_pop(&local_stack, s, 0);
6344 } else
6345 if (tok == TOK_DO) {
6346 int saved_nocode_wanted;
6347 nocode_wanted &= ~0x20000000;
6348 next();
6349 a = 0;
6350 b = 0;
6351 d = ind;
6352 vla_sp_restore();
6353 saved_nocode_wanted = nocode_wanted;
6354 block(&a, current_cleanups, &b, current_cleanups, 0);
6355 skip(TOK_WHILE);
6356 skip('(');
6357 gsym(b);
6358 if (b)
6359 nocode_wanted = saved_nocode_wanted;
6360 gexpr();
6361 c = gvtst(0, 0);
6362 gsym_addr(c, d);
6363 nocode_wanted = saved_nocode_wanted;
6364 skip(')');
6365 gsym(a);
6366 skip(';');
6367 } else
6368 if (tok == TOK_SWITCH) {
6369 struct switch_t *saved, sw;
6370 int saved_nocode_wanted = nocode_wanted;
6371 SValue switchval;
6372 next();
6373 skip('(');
6374 gexpr();
6375 skip(')');
6376 switchval = *vtop--;
6377 a = 0;
6378 b = gjmp(0); /* jump to first case */
6379 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6380 saved = cur_switch;
6381 cur_switch = &sw;
6382 block(&a, current_cleanups, csym, ccl, 0);
6383 nocode_wanted = saved_nocode_wanted;
6384 a = gjmp(a); /* add implicit break */
6385 /* case lookup */
6386 gsym(b);
6387 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6388 for (b = 1; b < sw.n; b++)
6389 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6390 tcc_error("duplicate case value");
6391 /* Our switch table sorting is signed, so the compared
6392 value needs to be as well when it's 64bit. */
6393 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6394 switchval.type.t &= ~VT_UNSIGNED;
6395 vpushv(&switchval);
6396 gcase(sw.p, sw.n, &a);
6397 vpop();
6398 if (sw.def_sym)
6399 gjmp_addr(sw.def_sym);
6400 dynarray_reset(&sw.p, &sw.n);
6401 cur_switch = saved;
6402 /* break label */
6403 gsym(a);
6404 } else
6405 if (tok == TOK_CASE) {
6406 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6407 if (!cur_switch)
6408 expect("switch");
6409 nocode_wanted &= ~0x20000000;
6410 next();
6411 cr->v1 = cr->v2 = expr_const64();
6412 if (gnu_ext && tok == TOK_DOTS) {
6413 next();
6414 cr->v2 = expr_const64();
6415 if (cr->v2 < cr->v1)
6416 tcc_warning("empty case range");
6418 cr->sym = ind;
6419 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6420 skip(':');
6421 is_expr = 0;
6422 goto block_after_label;
6423 } else
6424 if (tok == TOK_DEFAULT) {
6425 next();
6426 skip(':');
6427 if (!cur_switch)
6428 expect("switch");
6429 if (cur_switch->def_sym)
6430 tcc_error("too many 'default'");
6431 cur_switch->def_sym = ind;
6432 is_expr = 0;
6433 goto block_after_label;
6434 } else
6435 if (tok == TOK_GOTO) {
6436 next();
6437 if (tok == '*' && gnu_ext) {
6438 /* computed goto */
6439 next();
6440 gexpr();
6441 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6442 expect("pointer");
6443 ggoto();
6444 } else if (tok >= TOK_UIDENT) {
6445 s = label_find(tok);
6446 /* put forward definition if needed */
6447 if (!s)
6448 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6449 else if (s->r == LABEL_DECLARED)
6450 s->r = LABEL_FORWARD;
6452 vla_sp_restore_root();
6453 if (s->r & LABEL_FORWARD) {
6454 /* start new goto chain for cleanups, linked via label->next */
6455 if (current_cleanups) {
6456 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6457 pending_gotos->prev_tok = s;
6458 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6459 pending_gotos->next = s;
6461 s->jnext = gjmp(s->jnext);
6462 } else {
6463 try_call_cleanup_goto(s->cleanupstate);
6464 gjmp_addr(s->jnext);
6466 next();
6467 } else {
6468 expect("label identifier");
6470 skip(';');
6471 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6472 asm_instr();
6473 } else {
6474 b = is_label();
6475 if (b) {
6476 /* label case */
6477 next();
6478 s = label_find(b);
6479 if (s) {
6480 if (s->r == LABEL_DEFINED)
6481 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6482 s->r = LABEL_DEFINED;
6483 if (s->next) {
6484 Sym *pcl; /* pending cleanup goto */
6485 for (pcl = s->next; pcl; pcl = pcl->prev)
6486 gsym(pcl->jnext);
6487 sym_pop(&s->next, NULL, 0);
6488 } else
6489 gsym(s->jnext);
6490 } else {
6491 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6493 s->jnext = ind;
6494 s->cleanupstate = current_cleanups;
6495 vla_sp_restore();
6496 /* we accept this, but it is a mistake */
6497 block_after_label:
6498 nocode_wanted &= ~0x20000000;
6499 if (tok == '}') {
6500 tcc_warning("deprecated use of label at end of compound statement");
6501 } else {
6502 if (is_expr)
6503 vpop();
6504 block(bsym, bcl, csym, ccl, is_expr);
6506 } else {
6507 /* expression case */
6508 if (tok != ';') {
6509 if (is_expr) {
6510 vpop();
6511 gexpr();
6512 } else {
6513 gexpr();
6514 vpop();
6517 skip(';');
6522 /* This skips over a stream of tokens containing balanced {} and ()
6523 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6524 with a '{'). If STR then allocates and stores the skipped tokens
6525 in *STR. This doesn't check if () and {} are nested correctly,
6526 i.e. "({)}" is accepted. */
6527 static void skip_or_save_block(TokenString **str)
6529 int braces = tok == '{';
6530 int level = 0;
6531 if (str)
6532 *str = tok_str_alloc();
6534 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6535 int t;
6536 if (tok == TOK_EOF) {
6537 if (str || level > 0)
6538 tcc_error("unexpected end of file");
6539 else
6540 break;
6542 if (str)
6543 tok_str_add_tok(*str);
6544 t = tok;
6545 next();
6546 if (t == '{' || t == '(') {
6547 level++;
6548 } else if (t == '}' || t == ')') {
6549 level--;
6550 if (level == 0 && braces && t == '}')
6551 break;
6554 if (str) {
6555 tok_str_add(*str, -1);
6556 tok_str_add(*str, 0);
6560 #define EXPR_CONST 1
6561 #define EXPR_ANY 2
6563 static void parse_init_elem(int expr_type)
6565 int saved_global_expr;
6566 switch(expr_type) {
6567 case EXPR_CONST:
6568 /* compound literals must be allocated globally in this case */
6569 saved_global_expr = global_expr;
6570 global_expr = 1;
6571 expr_const1();
6572 global_expr = saved_global_expr;
6573 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6574 (compound literals). */
6575 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6576 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6577 || vtop->sym->v < SYM_FIRST_ANOM))
6578 #ifdef TCC_TARGET_PE
6579 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6580 #endif
6582 tcc_error("initializer element is not constant");
6583 break;
6584 case EXPR_ANY:
6585 expr_eq();
6586 break;
6590 /* put zeros for variable based init */
6591 static void init_putz(Section *sec, unsigned long c, int size)
6593 if (sec) {
6594 /* nothing to do because globals are already set to zero */
6595 } else {
6596 vpush_global_sym(&func_old_type, TOK_memset);
6597 vseti(VT_LOCAL, c);
6598 #ifdef TCC_TARGET_ARM
6599 vpushs(size);
6600 vpushi(0);
6601 #else
6602 vpushi(0);
6603 vpushs(size);
6604 #endif
6605 gfunc_call(3);
6609 #define DIF_FIRST 1
6610 #define DIF_SIZE_ONLY 2
6611 #define DIF_HAVE_ELEM 4
6613 /* t is the array or struct type. c is the array or struct
6614 address. cur_field is the pointer to the current
6615 field, for arrays the 'c' member contains the current start
6616 index. 'flags' is as in decl_initializer.
6617 'al' contains the already initialized length of the
6618 current container (starting at c). This returns the new length of that. */
6619 static int decl_designator(CType *type, Section *sec, unsigned long c,
6620 Sym **cur_field, int flags, int al)
6622 Sym *s, *f;
6623 int index, index_last, align, l, nb_elems, elem_size;
6624 unsigned long corig = c;
6626 elem_size = 0;
6627 nb_elems = 1;
6628 if (flags & DIF_HAVE_ELEM)
6629 goto no_designator;
6630 if (gnu_ext && (l = is_label()) != 0)
6631 goto struct_field;
6632 /* NOTE: we only support ranges for last designator */
6633 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6634 if (tok == '[') {
6635 if (!(type->t & VT_ARRAY))
6636 expect("array type");
6637 next();
6638 index = index_last = expr_const();
6639 if (tok == TOK_DOTS && gnu_ext) {
6640 next();
6641 index_last = expr_const();
6643 skip(']');
6644 s = type->ref;
6645 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6646 index_last < index)
6647 tcc_error("invalid index");
6648 if (cur_field)
6649 (*cur_field)->c = index_last;
6650 type = pointed_type(type);
6651 elem_size = type_size(type, &align);
6652 c += index * elem_size;
6653 nb_elems = index_last - index + 1;
6654 } else {
6655 int cumofs = 0;
6656 next();
6657 l = tok;
6658 struct_field:
6659 next();
6660 if ((type->t & VT_BTYPE) != VT_STRUCT)
6661 expect("struct/union type");
6662 f = find_field(type, l, &cumofs);
6663 if (!f)
6664 expect("field");
6665 if (cur_field)
6666 *cur_field = f;
6667 type = &f->type;
6668 c += cumofs + f->c;
6670 cur_field = NULL;
6672 if (!cur_field) {
6673 if (tok == '=') {
6674 next();
6675 } else if (!gnu_ext) {
6676 expect("=");
6678 } else {
6679 no_designator:
6680 if (type->t & VT_ARRAY) {
6681 index = (*cur_field)->c;
6682 if (type->ref->c >= 0 && index >= type->ref->c)
6683 tcc_error("index too large");
6684 type = pointed_type(type);
6685 c += index * type_size(type, &align);
6686 } else {
6687 f = *cur_field;
6688 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6689 *cur_field = f = f->next;
6690 if (!f)
6691 tcc_error("too many field init");
6692 type = &f->type;
6693 c += f->c;
6696 /* must put zero in holes (note that doing it that way
6697 ensures that it even works with designators) */
6698 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6699 init_putz(sec, corig + al, c - corig - al);
6700 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6702 /* XXX: make it more general */
6703 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6704 unsigned long c_end;
6705 uint8_t *src, *dst;
6706 int i;
6708 if (!sec) {
6709 vset(type, VT_LOCAL|VT_LVAL, c);
6710 for (i = 1; i < nb_elems; i++) {
6711 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6712 vswap();
6713 vstore();
6715 vpop();
6716 } else if (!NODATA_WANTED) {
6717 c_end = c + nb_elems * elem_size;
6718 if (c_end > sec->data_allocated)
6719 section_realloc(sec, c_end);
6720 src = sec->data + c;
6721 dst = src;
6722 for(i = 1; i < nb_elems; i++) {
6723 dst += elem_size;
6724 memcpy(dst, src, elem_size);
6728 c += nb_elems * type_size(type, &align);
6729 if (c - corig > al)
6730 al = c - corig;
6731 return al;
6734 /* store a value or an expression directly in global data or in local array */
6735 static void init_putv(CType *type, Section *sec, unsigned long c)
6737 int bt;
6738 void *ptr;
6739 CType dtype;
6741 dtype = *type;
6742 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6744 if (sec) {
6745 int size, align;
6746 /* XXX: not portable */
6747 /* XXX: generate error if incorrect relocation */
6748 gen_assign_cast(&dtype);
6749 bt = type->t & VT_BTYPE;
6751 if ((vtop->r & VT_SYM)
6752 && bt != VT_PTR
6753 && bt != VT_FUNC
6754 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6755 || (type->t & VT_BITFIELD))
6756 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6758 tcc_error("initializer element is not computable at load time");
6760 if (NODATA_WANTED) {
6761 vtop--;
6762 return;
6765 size = type_size(type, &align);
6766 section_reserve(sec, c + size);
6767 ptr = sec->data + c;
6769 /* XXX: make code faster ? */
6770 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6771 vtop->sym->v >= SYM_FIRST_ANOM &&
6772 /* XXX This rejects compound literals like
6773 '(void *){ptr}'. The problem is that '&sym' is
6774 represented the same way, which would be ruled out
6775 by the SYM_FIRST_ANOM check above, but also '"string"'
6776 in 'char *p = "string"' is represented the same
6777 with the type being VT_PTR and the symbol being an
6778 anonymous one. That is, there's no difference in vtop
6779 between '(void *){x}' and '&(void *){x}'. Ignore
6780 pointer typed entities here. Hopefully no real code
6781 will every use compound literals with scalar type. */
6782 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6783 /* These come from compound literals, memcpy stuff over. */
6784 Section *ssec;
6785 ElfSym *esym;
6786 ElfW_Rel *rel;
6787 esym = elfsym(vtop->sym);
6788 ssec = tcc_state->sections[esym->st_shndx];
6789 memmove (ptr, ssec->data + esym->st_value, size);
6790 if (ssec->reloc) {
6791 /* We need to copy over all memory contents, and that
6792 includes relocations. Use the fact that relocs are
6793 created it order, so look from the end of relocs
6794 until we hit one before the copied region. */
6795 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6796 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6797 while (num_relocs--) {
6798 rel--;
6799 if (rel->r_offset >= esym->st_value + size)
6800 continue;
6801 if (rel->r_offset < esym->st_value)
6802 break;
6803 /* Note: if the same fields are initialized multiple
6804 times (possible with designators) then we possibly
6805 add multiple relocations for the same offset here.
6806 That would lead to wrong code, the last reloc needs
6807 to win. We clean this up later after the whole
6808 initializer is parsed. */
6809 put_elf_reloca(symtab_section, sec,
6810 c + rel->r_offset - esym->st_value,
6811 ELFW(R_TYPE)(rel->r_info),
6812 ELFW(R_SYM)(rel->r_info),
6813 #if PTR_SIZE == 8
6814 rel->r_addend
6815 #else
6817 #endif
6821 } else {
6822 if (type->t & VT_BITFIELD) {
6823 int bit_pos, bit_size, bits, n;
6824 unsigned char *p, v, m;
6825 bit_pos = BIT_POS(vtop->type.t);
6826 bit_size = BIT_SIZE(vtop->type.t);
6827 p = (unsigned char*)ptr + (bit_pos >> 3);
6828 bit_pos &= 7, bits = 0;
6829 while (bit_size) {
6830 n = 8 - bit_pos;
6831 if (n > bit_size)
6832 n = bit_size;
6833 v = vtop->c.i >> bits << bit_pos;
6834 m = ((1 << n) - 1) << bit_pos;
6835 *p = (*p & ~m) | (v & m);
6836 bits += n, bit_size -= n, bit_pos = 0, ++p;
6838 } else
6839 switch(bt) {
6840 /* XXX: when cross-compiling we assume that each type has the
6841 same representation on host and target, which is likely to
6842 be wrong in the case of long double */
6843 case VT_BOOL:
6844 vtop->c.i = vtop->c.i != 0;
6845 case VT_BYTE:
6846 *(char *)ptr |= vtop->c.i;
6847 break;
6848 case VT_SHORT:
6849 *(short *)ptr |= vtop->c.i;
6850 break;
6851 case VT_FLOAT:
6852 *(float*)ptr = vtop->c.f;
6853 break;
6854 case VT_DOUBLE:
6855 *(double *)ptr = vtop->c.d;
6856 break;
6857 case VT_LDOUBLE:
6858 #if defined TCC_IS_NATIVE_387
6859 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6860 memcpy(ptr, &vtop->c.ld, 10);
6861 #ifdef __TINYC__
6862 else if (sizeof (long double) == sizeof (double))
6863 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6864 #endif
6865 else if (vtop->c.ld == 0.0)
6867 else
6868 #endif
6869 if (sizeof(long double) == LDOUBLE_SIZE)
6870 *(long double*)ptr = vtop->c.ld;
6871 else if (sizeof(double) == LDOUBLE_SIZE)
6872 *(double *)ptr = (double)vtop->c.ld;
6873 else
6874 tcc_error("can't cross compile long double constants");
6875 break;
6876 #if PTR_SIZE != 8
6877 case VT_LLONG:
6878 *(long long *)ptr |= vtop->c.i;
6879 break;
6880 #else
6881 case VT_LLONG:
6882 #endif
6883 case VT_PTR:
6885 addr_t val = vtop->c.i;
6886 #if PTR_SIZE == 8
6887 if (vtop->r & VT_SYM)
6888 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6889 else
6890 *(addr_t *)ptr |= val;
6891 #else
6892 if (vtop->r & VT_SYM)
6893 greloc(sec, vtop->sym, c, R_DATA_PTR);
6894 *(addr_t *)ptr |= val;
6895 #endif
6896 break;
6898 default:
6900 int val = vtop->c.i;
6901 #if PTR_SIZE == 8
6902 if (vtop->r & VT_SYM)
6903 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6904 else
6905 *(int *)ptr |= val;
6906 #else
6907 if (vtop->r & VT_SYM)
6908 greloc(sec, vtop->sym, c, R_DATA_PTR);
6909 *(int *)ptr |= val;
6910 #endif
6911 break;
6915 vtop--;
6916 } else {
6917 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6918 vswap();
6919 vstore();
6920 vpop();
6924 /* 't' contains the type and storage info. 'c' is the offset of the
6925 object in section 'sec'. If 'sec' is NULL, it means stack based
6926 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6927 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6928 size only evaluation is wanted (only for arrays). */
6929 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6930 int flags)
6932 int len, n, no_oblock, nb, i;
6933 int size1, align1;
6934 Sym *s, *f;
6935 Sym indexsym;
6936 CType *t1;
6938 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
6939 /* In case of strings we have special handling for arrays, so
6940 don't consume them as initializer value (which would commit them
6941 to some anonymous symbol). */
6942 tok != TOK_LSTR && tok != TOK_STR &&
6943 !(flags & DIF_SIZE_ONLY)) {
6944 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6945 flags |= DIF_HAVE_ELEM;
6948 if ((flags & DIF_HAVE_ELEM) &&
6949 !(type->t & VT_ARRAY) &&
6950 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6951 The source type might have VT_CONSTANT set, which is
6952 of course assignable to non-const elements. */
6953 is_compatible_unqualified_types(type, &vtop->type)) {
6954 init_putv(type, sec, c);
6955 } else if (type->t & VT_ARRAY) {
6956 s = type->ref;
6957 n = s->c;
6958 t1 = pointed_type(type);
6959 size1 = type_size(t1, &align1);
6961 no_oblock = 1;
6962 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
6963 tok == '{') {
6964 if (tok != '{')
6965 tcc_error("character array initializer must be a literal,"
6966 " optionally enclosed in braces");
6967 skip('{');
6968 no_oblock = 0;
6971 /* only parse strings here if correct type (otherwise: handle
6972 them as ((w)char *) expressions */
6973 if ((tok == TOK_LSTR &&
6974 #ifdef TCC_TARGET_PE
6975 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6976 #else
6977 (t1->t & VT_BTYPE) == VT_INT
6978 #endif
6979 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6980 len = 0;
6981 while (tok == TOK_STR || tok == TOK_LSTR) {
6982 int cstr_len, ch;
6984 /* compute maximum number of chars wanted */
6985 if (tok == TOK_STR)
6986 cstr_len = tokc.str.size;
6987 else
6988 cstr_len = tokc.str.size / sizeof(nwchar_t);
6989 cstr_len--;
6990 nb = cstr_len;
6991 if (n >= 0 && nb > (n - len))
6992 nb = n - len;
6993 if (!(flags & DIF_SIZE_ONLY)) {
6994 if (cstr_len > nb)
6995 tcc_warning("initializer-string for array is too long");
6996 /* in order to go faster for common case (char
6997 string in global variable, we handle it
6998 specifically */
6999 if (sec && tok == TOK_STR && size1 == 1) {
7000 if (!NODATA_WANTED)
7001 memcpy(sec->data + c + len, tokc.str.data, nb);
7002 } else {
7003 for(i=0;i<nb;i++) {
7004 if (tok == TOK_STR)
7005 ch = ((unsigned char *)tokc.str.data)[i];
7006 else
7007 ch = ((nwchar_t *)tokc.str.data)[i];
7008 vpushi(ch);
7009 init_putv(t1, sec, c + (len + i) * size1);
7013 len += nb;
7014 next();
7016 /* only add trailing zero if enough storage (no
7017 warning in this case since it is standard) */
7018 if (n < 0 || len < n) {
7019 if (!(flags & DIF_SIZE_ONLY)) {
7020 vpushi(0);
7021 init_putv(t1, sec, c + (len * size1));
7023 len++;
7025 len *= size1;
7026 } else {
7027 indexsym.c = 0;
7028 f = &indexsym;
7030 do_init_list:
7031 len = 0;
7032 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7033 len = decl_designator(type, sec, c, &f, flags, len);
7034 flags &= ~DIF_HAVE_ELEM;
7035 if (type->t & VT_ARRAY) {
7036 ++indexsym.c;
7037 /* special test for multi dimensional arrays (may not
7038 be strictly correct if designators are used at the
7039 same time) */
7040 if (no_oblock && len >= n*size1)
7041 break;
7042 } else {
7043 if (s->type.t == VT_UNION)
7044 f = NULL;
7045 else
7046 f = f->next;
7047 if (no_oblock && f == NULL)
7048 break;
7051 if (tok == '}')
7052 break;
7053 skip(',');
7056 /* put zeros at the end */
7057 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7058 init_putz(sec, c + len, n*size1 - len);
7059 if (!no_oblock)
7060 skip('}');
7061 /* patch type size if needed, which happens only for array types */
7062 if (n < 0)
7063 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7064 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7065 size1 = 1;
7066 no_oblock = 1;
7067 if ((flags & DIF_FIRST) || tok == '{') {
7068 skip('{');
7069 no_oblock = 0;
7071 s = type->ref;
7072 f = s->next;
7073 n = s->c;
7074 goto do_init_list;
7075 } else if (tok == '{') {
7076 if (flags & DIF_HAVE_ELEM)
7077 skip(';');
7078 next();
7079 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7080 skip('}');
7081 } else if ((flags & DIF_SIZE_ONLY)) {
7082 /* If we supported only ISO C we wouldn't have to accept calling
7083 this on anything than an array if DIF_SIZE_ONLY (and even then
7084 only on the outermost level, so no recursion would be needed),
7085 because initializing a flex array member isn't supported.
7086 But GNU C supports it, so we need to recurse even into
7087 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7088 /* just skip expression */
7089 skip_or_save_block(NULL);
7090 } else {
7091 if (!(flags & DIF_HAVE_ELEM)) {
7092 /* This should happen only when we haven't parsed
7093 the init element above for fear of committing a
7094 string constant to memory too early. */
7095 if (tok != TOK_STR && tok != TOK_LSTR)
7096 expect("string constant");
7097 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7099 init_putv(type, sec, c);
7103 /* parse an initializer for type 't' if 'has_init' is non zero, and
7104 allocate space in local or global data space ('r' is either
7105 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7106 variable 'v' of scope 'scope' is declared before initializers
7107 are parsed. If 'v' is zero, then a reference to the new object
7108 is put in the value stack. If 'has_init' is 2, a special parsing
7109 is done to handle string constants. */
7110 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7111 int has_init, int v, int scope)
7113 int size, align, addr;
7114 TokenString *init_str = NULL;
7116 Section *sec;
7117 Sym *flexible_array;
7118 Sym *sym = NULL;
7119 int saved_nocode_wanted = nocode_wanted;
7120 #ifdef CONFIG_TCC_BCHECK
7121 int bcheck;
7122 #endif
7124 /* Always allocate static or global variables */
7125 if (v && (r & VT_VALMASK) == VT_CONST)
7126 nocode_wanted |= 0x80000000;
7128 #ifdef CONFIG_TCC_BCHECK
7129 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7130 #endif
7132 flexible_array = NULL;
7133 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7134 Sym *field = type->ref->next;
7135 if (field) {
7136 while (field->next)
7137 field = field->next;
7138 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7139 flexible_array = field;
7143 size = type_size(type, &align);
7144 /* If unknown size, we must evaluate it before
7145 evaluating initializers because
7146 initializers can generate global data too
7147 (e.g. string pointers or ISOC99 compound
7148 literals). It also simplifies local
7149 initializers handling */
7150 if (size < 0 || (flexible_array && has_init)) {
7151 if (!has_init)
7152 tcc_error("unknown type size");
7153 /* get all init string */
7154 if (has_init == 2) {
7155 init_str = tok_str_alloc();
7156 /* only get strings */
7157 while (tok == TOK_STR || tok == TOK_LSTR) {
7158 tok_str_add_tok(init_str);
7159 next();
7161 tok_str_add(init_str, -1);
7162 tok_str_add(init_str, 0);
7163 } else {
7164 skip_or_save_block(&init_str);
7166 unget_tok(0);
7168 /* compute size */
7169 begin_macro(init_str, 1);
7170 next();
7171 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7172 /* prepare second initializer parsing */
7173 macro_ptr = init_str->str;
7174 next();
7176 /* if still unknown size, error */
7177 size = type_size(type, &align);
7178 if (size < 0)
7179 tcc_error("unknown type size");
7181 /* If there's a flex member and it was used in the initializer
7182 adjust size. */
7183 if (flexible_array &&
7184 flexible_array->type.ref->c > 0)
7185 size += flexible_array->type.ref->c
7186 * pointed_size(&flexible_array->type);
7187 /* take into account specified alignment if bigger */
7188 if (ad->a.aligned) {
7189 int speca = 1 << (ad->a.aligned - 1);
7190 if (speca > align)
7191 align = speca;
7192 } else if (ad->a.packed) {
7193 align = 1;
7196 if (!v && NODATA_WANTED)
7197 size = 0, align = 1;
7199 if ((r & VT_VALMASK) == VT_LOCAL) {
7200 sec = NULL;
7201 #ifdef CONFIG_TCC_BCHECK
7202 if (bcheck && (type->t & VT_ARRAY)) {
7203 loc--;
7205 #endif
7206 loc = (loc - size) & -align;
7207 addr = loc;
7208 #ifdef CONFIG_TCC_BCHECK
7209 /* handles bounds */
7210 /* XXX: currently, since we do only one pass, we cannot track
7211 '&' operators, so we add only arrays */
7212 if (bcheck && (type->t & VT_ARRAY)) {
7213 addr_t *bounds_ptr;
7214 /* add padding between regions */
7215 loc--;
7216 /* then add local bound info */
7217 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7218 bounds_ptr[0] = addr;
7219 bounds_ptr[1] = size;
7221 #endif
7222 if (v) {
7223 /* local variable */
7224 #ifdef CONFIG_TCC_ASM
7225 if (ad->asm_label) {
7226 int reg = asm_parse_regvar(ad->asm_label);
7227 if (reg >= 0)
7228 r = (r & ~VT_VALMASK) | reg;
7230 #endif
7231 sym = sym_push(v, type, r, addr);
7232 if (ad->cleanup_func) {
7233 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7234 cls->prev_tok = sym;
7235 cls->next = ad->cleanup_func;
7236 cls->ncl = current_cleanups;
7237 current_cleanups = cls;
7240 sym->a = ad->a;
7241 } else {
7242 /* push local reference */
7243 vset(type, r, addr);
7245 } else {
7246 if (v && scope == VT_CONST) {
7247 /* see if the symbol was already defined */
7248 sym = sym_find(v);
7249 if (sym) {
7250 patch_storage(sym, ad, type);
7251 /* we accept several definitions of the same global variable. */
7252 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7253 goto no_alloc;
7257 /* allocate symbol in corresponding section */
7258 sec = ad->section;
7259 if (!sec) {
7260 if (has_init)
7261 sec = data_section;
7262 else if (tcc_state->nocommon)
7263 sec = bss_section;
7266 if (sec) {
7267 addr = section_add(sec, size, align);
7268 #ifdef CONFIG_TCC_BCHECK
7269 /* add padding if bound check */
7270 if (bcheck)
7271 section_add(sec, 1, 1);
7272 #endif
7273 } else {
7274 addr = align; /* SHN_COMMON is special, symbol value is align */
7275 sec = common_section;
7278 if (v) {
7279 if (!sym) {
7280 sym = sym_push(v, type, r | VT_SYM, 0);
7281 patch_storage(sym, ad, NULL);
7283 /* Local statics have a scope until now (for
7284 warnings), remove it here. */
7285 sym->sym_scope = 0;
7286 /* update symbol definition */
7287 put_extern_sym(sym, sec, addr, size);
7288 } else {
7289 /* push global reference */
7290 vpush_ref(type, sec, addr, size);
7291 sym = vtop->sym;
7292 vtop->r |= r;
7295 #ifdef CONFIG_TCC_BCHECK
7296 /* handles bounds now because the symbol must be defined
7297 before for the relocation */
7298 if (bcheck) {
7299 addr_t *bounds_ptr;
7301 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7302 /* then add global bound info */
7303 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7304 bounds_ptr[0] = 0; /* relocated */
7305 bounds_ptr[1] = size;
7307 #endif
7310 if (type->t & VT_VLA) {
7311 int a;
7313 if (NODATA_WANTED)
7314 goto no_alloc;
7316 /* save current stack pointer */
7317 if (vlas_in_scope == 0) {
7318 if (vla_sp_root_loc == -1)
7319 vla_sp_root_loc = (loc -= PTR_SIZE);
7320 gen_vla_sp_save(vla_sp_root_loc);
7323 vla_runtime_type_size(type, &a);
7324 gen_vla_alloc(type, a);
7325 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7326 /* on _WIN64, because of the function args scratch area, the
7327 result of alloca differs from RSP and is returned in RAX. */
7328 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7329 #endif
7330 gen_vla_sp_save(addr);
7331 vla_sp_loc = addr;
7332 vlas_in_scope++;
7334 } else if (has_init) {
7335 size_t oldreloc_offset = 0;
7336 if (sec && sec->reloc)
7337 oldreloc_offset = sec->reloc->data_offset;
7338 decl_initializer(type, sec, addr, DIF_FIRST);
7339 if (sec && sec->reloc)
7340 squeeze_multi_relocs(sec, oldreloc_offset);
7341 /* patch flexible array member size back to -1, */
7342 /* for possible subsequent similar declarations */
7343 if (flexible_array)
7344 flexible_array->type.ref->c = -1;
7347 no_alloc:
7348 /* restore parse state if needed */
7349 if (init_str) {
7350 end_macro();
7351 next();
7354 nocode_wanted = saved_nocode_wanted;
7357 /* parse a function defined by symbol 'sym' and generate its code in
7358 'cur_text_section' */
7359 static void gen_function(Sym *sym)
7361 nocode_wanted = 0;
7362 ind = cur_text_section->data_offset;
7363 if (sym->a.aligned) {
7364 size_t newoff = section_add(cur_text_section, 0,
7365 1 << (sym->a.aligned - 1));
7366 gen_fill_nops(newoff - ind);
7368 /* NOTE: we patch the symbol size later */
7369 put_extern_sym(sym, cur_text_section, ind, 0);
7370 funcname = get_tok_str(sym->v, NULL);
7371 func_ind = ind;
7372 /* Initialize VLA state */
7373 vla_sp_loc = -1;
7374 vla_sp_root_loc = -1;
7375 /* put debug symbol */
7376 tcc_debug_funcstart(tcc_state, sym);
7377 /* push a dummy symbol to enable local sym storage */
7378 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7379 local_scope = 1; /* for function parameters */
7380 gfunc_prolog(&sym->type);
7381 reset_local_scope();
7382 rsym = 0;
7383 clear_temp_local_var_list();
7384 block(NULL, NULL, NULL, NULL, 0);
7385 if (!(nocode_wanted & 0x20000000)
7386 && ((func_vt.t & VT_BTYPE) == VT_INT)
7387 && !strcmp (funcname, "main"))
7389 nocode_wanted = 0;
7390 vpushi(0);
7391 gen_assign_cast(&func_vt);
7392 gfunc_return(&func_vt);
7394 nocode_wanted = 0;
7395 gsym(rsym);
7396 gfunc_epilog();
7397 cur_text_section->data_offset = ind;
7398 label_pop(&global_label_stack, NULL, 0);
7399 /* reset local stack */
7400 reset_local_scope();
7401 sym_pop(&local_stack, NULL, 0);
7402 /* end of function */
7403 /* patch symbol size */
7404 elfsym(sym)->st_size = ind - func_ind;
7405 tcc_debug_funcend(tcc_state, ind - func_ind);
7406 /* It's better to crash than to generate wrong code */
7407 cur_text_section = NULL;
7408 funcname = ""; /* for safety */
7409 func_vt.t = VT_VOID; /* for safety */
7410 func_var = 0; /* for safety */
7411 ind = 0; /* for safety */
7412 nocode_wanted = 0x80000000;
7413 check_vstack();
7416 static void gen_inline_functions(TCCState *s)
7418 Sym *sym;
7419 int inline_generated, i, ln;
7420 struct InlineFunc *fn;
7422 ln = file->line_num;
7423 /* iterate while inline function are referenced */
7424 do {
7425 inline_generated = 0;
7426 for (i = 0; i < s->nb_inline_fns; ++i) {
7427 fn = s->inline_fns[i];
7428 sym = fn->sym;
7429 if (sym && (sym->c || !(sym->type.t & (VT_INLINE | VT_STATIC)) )) {
7430 /* the function was used or forced (and then not internal):
7431 generate its code and convert it to a normal function */
7432 fn->sym = NULL;
7433 if (file)
7434 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7435 begin_macro(fn->func_str, 1);
7436 next();
7437 cur_text_section = text_section;
7438 gen_function(sym);
7439 end_macro();
7441 inline_generated = 1;
7444 } while (inline_generated);
7445 file->line_num = ln;
7448 ST_FUNC void free_inline_functions(TCCState *s)
7450 int i;
7451 /* free tokens of unused inline functions */
7452 for (i = 0; i < s->nb_inline_fns; ++i) {
7453 struct InlineFunc *fn = s->inline_fns[i];
7454 if (fn->sym)
7455 tok_str_free(fn->func_str);
7457 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7460 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7461 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7462 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7464 int v, has_init, r;
7465 CType type, btype;
7466 Sym *sym;
7467 AttributeDef ad, adbase;
7469 while (1) {
7470 if (tok == TOK_STATIC_ASSERT) {
7471 int c;
7473 next();
7474 skip('(');
7475 c = expr_const();
7476 skip(',');
7477 if (c == 0)
7478 tcc_error("%s", get_tok_str(tok, &tokc));
7479 next();
7480 skip(')');
7481 skip(';');
7482 continue;
7484 if (!parse_btype(&btype, &adbase)) {
7485 if (is_for_loop_init)
7486 return 0;
7487 /* skip redundant ';' if not in old parameter decl scope */
7488 if (tok == ';' && l != VT_CMP) {
7489 next();
7490 continue;
7492 if (l != VT_CONST)
7493 break;
7494 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7495 /* global asm block */
7496 asm_global_instr();
7497 continue;
7499 if (tok >= TOK_UIDENT) {
7500 /* special test for old K&R protos without explicit int
7501 type. Only accepted when defining global data */
7502 btype.t = VT_INT;
7503 } else {
7504 if (tok != TOK_EOF)
7505 expect("declaration");
7506 break;
7509 if (tok == ';') {
7510 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7511 int v = btype.ref->v;
7512 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7513 tcc_warning("unnamed struct/union that defines no instances");
7514 next();
7515 continue;
7517 if (IS_ENUM(btype.t)) {
7518 next();
7519 continue;
7522 while (1) { /* iterate thru each declaration */
7523 type = btype;
7524 /* If the base type itself was an array type of unspecified
7525 size (like in 'typedef int arr[]; arr x = {1};') then
7526 we will overwrite the unknown size by the real one for
7527 this decl. We need to unshare the ref symbol holding
7528 that size. */
7529 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7530 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7532 ad = adbase;
7533 type_decl(&type, &ad, &v, TYPE_DIRECT);
7534 #if 0
7536 char buf[500];
7537 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7538 printf("type = '%s'\n", buf);
7540 #endif
7541 if ((type.t & VT_BTYPE) == VT_FUNC) {
7542 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7543 tcc_error("function without file scope cannot be static");
7545 /* if old style function prototype, we accept a
7546 declaration list */
7547 sym = type.ref;
7548 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7549 decl0(VT_CMP, 0, sym);
7552 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7553 ad.asm_label = asm_label_instr();
7554 /* parse one last attribute list, after asm label */
7555 parse_attribute(&ad);
7556 if (tok == '{')
7557 expect(";");
7560 #ifdef TCC_TARGET_PE
7561 if (ad.a.dllimport || ad.a.dllexport) {
7562 if (type.t & (VT_STATIC|VT_TYPEDEF))
7563 tcc_error("cannot have dll linkage with static or typedef");
7564 if (ad.a.dllimport) {
7565 if ((type.t & VT_BTYPE) == VT_FUNC)
7566 ad.a.dllimport = 0;
7567 else
7568 type.t |= VT_EXTERN;
7571 #endif
7572 if (tok == '{') {
7573 if (l != VT_CONST)
7574 tcc_error("cannot use local functions");
7575 if ((type.t & VT_BTYPE) != VT_FUNC)
7576 expect("function definition");
7578 /* reject abstract declarators in function definition
7579 make old style params without decl have int type */
7580 sym = type.ref;
7581 while ((sym = sym->next) != NULL) {
7582 if (!(sym->v & ~SYM_FIELD))
7583 expect("identifier");
7584 if (sym->type.t == VT_VOID)
7585 sym->type = int_type;
7588 /* put function symbol */
7589 sym = external_sym(v, &type, 0, &ad);
7590 /* This is the def, so overwrite any other parameter names
7591 we got from prototypes. */
7592 sym->type.ref = type.ref;
7593 if (sym->c && elfsym(sym)->st_shndx != SHN_UNDEF
7594 && !(elfsym(sym)->st_other & ST_ASM_SET))
7595 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
7597 /* static inline functions are just recorded as a kind
7598 of macro. Their code will be emitted at the end of
7599 the compilation unit only if they are used */
7600 if ((sym->type.t & (VT_INLINE | VT_EXTERN)) == VT_INLINE) {
7601 struct InlineFunc *fn;
7602 const char *filename;
7604 filename = file ? file->filename : "";
7605 fn = tcc_malloc(sizeof *fn + strlen(filename));
7606 strcpy(fn->filename, filename);
7607 fn->sym = sym;
7608 skip_or_save_block(&fn->func_str);
7609 dynarray_add(&tcc_state->inline_fns,
7610 &tcc_state->nb_inline_fns, fn);
7611 } else {
7612 /* compute text section */
7613 cur_text_section = ad.section;
7614 if (!cur_text_section)
7615 cur_text_section = text_section;
7616 gen_function(sym);
7618 break;
7619 } else {
7620 if (l == VT_CMP) {
7621 /* find parameter in function parameter list */
7622 for (sym = func_sym->next; sym; sym = sym->next)
7623 if ((sym->v & ~SYM_FIELD) == v)
7624 goto found;
7625 tcc_error("declaration for parameter '%s' but no such parameter",
7626 get_tok_str(v, NULL));
7627 found:
7628 if (type.t & VT_STORAGE) /* 'register' is okay */
7629 tcc_error("storage class specified for '%s'",
7630 get_tok_str(v, NULL));
7631 if (sym->type.t != VT_VOID)
7632 tcc_error("redefinition of parameter '%s'",
7633 get_tok_str(v, NULL));
7634 convert_parameter_type(&type);
7635 sym->type = type;
7636 } else if (type.t & VT_TYPEDEF) {
7637 /* save typedefed type */
7638 /* XXX: test storage specifiers ? */
7639 sym = sym_find(v);
7640 if (sym && sym->sym_scope == local_scope) {
7641 if (!is_compatible_types(&sym->type, &type)
7642 || !(sym->type.t & VT_TYPEDEF))
7643 tcc_error("incompatible redefinition of '%s'",
7644 get_tok_str(v, NULL));
7645 sym->type = type;
7646 } else {
7647 sym = sym_push(v, &type, 0, 0);
7649 sym->a = ad.a;
7650 sym->f = ad.f;
7651 } else if ((type.t & VT_BTYPE) == VT_VOID
7652 && !(type.t & VT_EXTERN)) {
7653 tcc_error("declaration of void object");
7654 } else {
7655 r = 0;
7656 if ((type.t & VT_BTYPE) == VT_FUNC) {
7657 /* external function definition */
7658 /* specific case for func_call attribute */
7659 type.ref->f = ad.f;
7660 } else if (!(type.t & VT_ARRAY)) {
7661 /* not lvalue if array */
7662 r |= lvalue_type(type.t);
7664 has_init = (tok == '=');
7665 if (has_init && (type.t & VT_VLA))
7666 tcc_error("variable length array cannot be initialized");
7667 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7668 ((type.t & VT_BTYPE) == VT_FUNC) ||
7669 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7670 !has_init && l == VT_CONST && type.ref->c < 0)) {
7671 /* external variable or function */
7672 /* NOTE: as GCC, uninitialized global static
7673 arrays of null size are considered as
7674 extern */
7675 if ((type.t & VT_BTYPE) != VT_FUNC)
7676 type.t |= VT_EXTERN;
7677 sym = external_sym(v, &type, r, &ad);
7678 if (ad.alias_target) {
7679 ElfSym *esym;
7680 Sym *alias_target;
7681 alias_target = sym_find(ad.alias_target);
7682 esym = elfsym(alias_target);
7683 if (!esym)
7684 tcc_error("unsupported forward __alias__ attribute");
7685 /* Local statics have a scope until now (for
7686 warnings), remove it here. */
7687 sym->sym_scope = 0;
7688 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7690 } else {
7691 if (type.t & VT_STATIC)
7692 r |= VT_CONST;
7693 else
7694 r |= l;
7695 if (has_init)
7696 next();
7697 else if (l == VT_CONST
7698 && (type.t & VT_BTYPE) != VT_FUNC)
7699 /* uninitialized global variables may be overridden */
7700 type.t |= VT_EXTERN;
7701 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7704 if (tok != ',') {
7705 if (is_for_loop_init)
7706 return 1;
7707 skip(';');
7708 break;
7710 next();
7714 return 0;
7717 static void decl(int l)
7719 decl0(l, 0, NULL);
7722 /* ------------------------------------------------------------------------- */