More cleanup tests
[tinycc.git] / tccgen.c
blobf4289d60f7c84e9f28e3713c5be7d2c4d63fa413
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
97 static void block(int *bsym, int *csym, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
116 static void incr_local_scope(void)
118 ++local_scope;
121 static void decr_local_scope(void)
123 --local_scope;
126 static void reset_local_scope(void)
128 if (current_cleanups)
129 tcc_error("ICE current_cleanups");
130 sym_pop(&all_cleanups, NULL, 0);
131 local_scope = 0;
134 ST_INLN int is_float(int t)
136 int bt;
137 bt = t & VT_BTYPE;
138 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
141 /* we use our own 'finite' function to avoid potential problems with
142 non standard math libs */
143 /* XXX: endianness dependent */
144 ST_FUNC int ieee_finite(double d)
146 int p[4];
147 memcpy(p, &d, sizeof(double));
148 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
151 /* compiling intel long double natively */
152 #if (defined __i386__ || defined __x86_64__) \
153 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
154 # define TCC_IS_NATIVE_387
155 #endif
157 ST_FUNC void test_lvalue(void)
159 if (!(vtop->r & VT_LVAL))
160 expect("lvalue");
163 ST_FUNC void check_vstack(void)
165 if (pvtop != vtop)
166 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
169 /* ------------------------------------------------------------------------- */
170 /* vstack debugging aid */
172 #if 0
173 void pv (const char *lbl, int a, int b)
175 int i;
176 for (i = a; i < a + b; ++i) {
177 SValue *p = &vtop[-i];
178 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
179 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
182 #endif
184 /* ------------------------------------------------------------------------- */
185 /* start of translation unit info */
186 ST_FUNC void tcc_debug_start(TCCState *s1)
188 if (s1->do_debug) {
189 char buf[512];
191 /* file info: full path + filename */
192 section_sym = put_elf_sym(symtab_section, 0, 0,
193 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
194 text_section->sh_num, NULL);
195 getcwd(buf, sizeof(buf));
196 #ifdef _WIN32
197 normalize_slashes(buf);
198 #endif
199 pstrcat(buf, sizeof(buf), "/");
200 put_stabs_r(buf, N_SO, 0, 0,
201 text_section->data_offset, text_section, section_sym);
202 put_stabs_r(file->filename, N_SO, 0, 0,
203 text_section->data_offset, text_section, section_sym);
204 last_ind = 0;
205 last_line_num = 0;
208 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
209 symbols can be safely used */
210 put_elf_sym(symtab_section, 0, 0,
211 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
212 SHN_ABS, file->filename);
215 /* put end of translation unit info */
216 ST_FUNC void tcc_debug_end(TCCState *s1)
218 if (!s1->do_debug)
219 return;
220 put_stabs_r(NULL, N_SO, 0, 0,
221 text_section->data_offset, text_section, section_sym);
225 /* generate line number info */
226 ST_FUNC void tcc_debug_line(TCCState *s1)
228 if (!s1->do_debug)
229 return;
230 if ((last_line_num != file->line_num || last_ind != ind)) {
231 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
232 last_ind = ind;
233 last_line_num = file->line_num;
237 /* put function symbol */
238 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
240 char buf[512];
242 if (!s1->do_debug)
243 return;
245 /* stabs info */
246 /* XXX: we put here a dummy type */
247 snprintf(buf, sizeof(buf), "%s:%c1",
248 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
249 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
250 cur_text_section, sym->c);
251 /* //gr gdb wants a line at the function */
252 put_stabn(N_SLINE, 0, file->line_num, 0);
254 last_ind = 0;
255 last_line_num = 0;
258 /* put function size */
259 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
261 if (!s1->do_debug)
262 return;
263 put_stabn(N_FUN, 0, 0, size);
266 /* ------------------------------------------------------------------------- */
267 ST_FUNC int tccgen_compile(TCCState *s1)
269 cur_text_section = NULL;
270 funcname = "";
271 anon_sym = SYM_FIRST_ANOM;
272 section_sym = 0;
273 const_wanted = 0;
274 nocode_wanted = 0x80000000;
276 /* define some often used types */
277 int_type.t = VT_INT;
278 char_pointer_type.t = VT_BYTE;
279 mk_pointer(&char_pointer_type);
280 #if PTR_SIZE == 4
281 size_type.t = VT_INT | VT_UNSIGNED;
282 ptrdiff_type.t = VT_INT;
283 #elif LONG_SIZE == 4
284 size_type.t = VT_LLONG | VT_UNSIGNED;
285 ptrdiff_type.t = VT_LLONG;
286 #else
287 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
288 ptrdiff_type.t = VT_LONG | VT_LLONG;
289 #endif
290 func_old_type.t = VT_FUNC;
291 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
292 func_old_type.ref->f.func_call = FUNC_CDECL;
293 func_old_type.ref->f.func_type = FUNC_OLD;
295 tcc_debug_start(s1);
297 #ifdef TCC_TARGET_ARM
298 arm_init(s1);
299 #endif
301 #ifdef INC_DEBUG
302 printf("%s: **** new file\n", file->filename);
303 #endif
305 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
306 next();
307 decl(VT_CONST);
308 gen_inline_functions(s1);
309 check_vstack();
310 /* end of translation unit info */
311 tcc_debug_end(s1);
312 return 0;
315 /* ------------------------------------------------------------------------- */
316 ST_FUNC ElfSym *elfsym(Sym *s)
318 if (!s || !s->c)
319 return NULL;
320 return &((ElfSym *)symtab_section->data)[s->c];
323 /* apply storage attributes to Elf symbol */
324 ST_FUNC void update_storage(Sym *sym)
326 ElfSym *esym;
327 int sym_bind, old_sym_bind;
329 esym = elfsym(sym);
330 if (!esym)
331 return;
333 if (sym->a.visibility)
334 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
335 | sym->a.visibility;
337 if (sym->type.t & VT_STATIC)
338 sym_bind = STB_LOCAL;
339 else if (sym->a.weak)
340 sym_bind = STB_WEAK;
341 else
342 sym_bind = STB_GLOBAL;
343 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
344 if (sym_bind != old_sym_bind) {
345 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
348 #ifdef TCC_TARGET_PE
349 if (sym->a.dllimport)
350 esym->st_other |= ST_PE_IMPORT;
351 if (sym->a.dllexport)
352 esym->st_other |= ST_PE_EXPORT;
353 #endif
355 #if 0
356 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
357 get_tok_str(sym->v, NULL),
358 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
359 sym->a.visibility,
360 sym->a.dllexport,
361 sym->a.dllimport
363 #endif
366 /* ------------------------------------------------------------------------- */
367 /* update sym->c so that it points to an external symbol in section
368 'section' with value 'value' */
370 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
371 addr_t value, unsigned long size,
372 int can_add_underscore)
374 int sym_type, sym_bind, info, other, t;
375 ElfSym *esym;
376 const char *name;
377 char buf1[256];
378 #ifdef CONFIG_TCC_BCHECK
379 char buf[32];
380 #endif
382 if (!sym->c) {
383 name = get_tok_str(sym->v, NULL);
384 #ifdef CONFIG_TCC_BCHECK
385 if (tcc_state->do_bounds_check) {
386 /* XXX: avoid doing that for statics ? */
387 /* if bound checking is activated, we change some function
388 names by adding the "__bound" prefix */
389 switch(sym->v) {
390 #ifdef TCC_TARGET_PE
391 /* XXX: we rely only on malloc hooks */
392 case TOK_malloc:
393 case TOK_free:
394 case TOK_realloc:
395 case TOK_memalign:
396 case TOK_calloc:
397 #endif
398 case TOK_memcpy:
399 case TOK_memmove:
400 case TOK_memset:
401 case TOK_strlen:
402 case TOK_strcpy:
403 case TOK_alloca:
404 strcpy(buf, "__bound_");
405 strcat(buf, name);
406 name = buf;
407 break;
410 #endif
411 t = sym->type.t;
412 if ((t & VT_BTYPE) == VT_FUNC) {
413 sym_type = STT_FUNC;
414 } else if ((t & VT_BTYPE) == VT_VOID) {
415 sym_type = STT_NOTYPE;
416 } else {
417 sym_type = STT_OBJECT;
419 if (t & VT_STATIC)
420 sym_bind = STB_LOCAL;
421 else
422 sym_bind = STB_GLOBAL;
423 other = 0;
424 #ifdef TCC_TARGET_PE
425 if (sym_type == STT_FUNC && sym->type.ref) {
426 Sym *ref = sym->type.ref;
427 if (ref->a.nodecorate) {
428 can_add_underscore = 0;
430 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
431 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
432 name = buf1;
433 other |= ST_PE_STDCALL;
434 can_add_underscore = 0;
437 #endif
438 if (tcc_state->leading_underscore && can_add_underscore) {
439 buf1[0] = '_';
440 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
441 name = buf1;
443 if (sym->asm_label)
444 name = get_tok_str(sym->asm_label, NULL);
445 info = ELFW(ST_INFO)(sym_bind, sym_type);
446 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
447 } else {
448 esym = elfsym(sym);
449 esym->st_value = value;
450 esym->st_size = size;
451 esym->st_shndx = sh_num;
453 update_storage(sym);
456 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
457 addr_t value, unsigned long size)
459 int sh_num = section ? section->sh_num : SHN_UNDEF;
460 put_extern_sym2(sym, sh_num, value, size, 1);
463 /* add a new relocation entry to symbol 'sym' in section 's' */
464 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
465 addr_t addend)
467 int c = 0;
469 if (nocode_wanted && s == cur_text_section)
470 return;
472 if (sym) {
473 if (0 == sym->c)
474 put_extern_sym(sym, NULL, 0, 0);
475 c = sym->c;
478 /* now we can add ELF relocation info */
479 put_elf_reloca(symtab_section, s, offset, type, c, addend);
482 #if PTR_SIZE == 4
483 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
485 greloca(s, sym, offset, type, 0);
487 #endif
489 /* ------------------------------------------------------------------------- */
490 /* symbol allocator */
491 static Sym *__sym_malloc(void)
493 Sym *sym_pool, *sym, *last_sym;
494 int i;
496 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
497 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
499 last_sym = sym_free_first;
500 sym = sym_pool;
501 for(i = 0; i < SYM_POOL_NB; i++) {
502 sym->next = last_sym;
503 last_sym = sym;
504 sym++;
506 sym_free_first = last_sym;
507 return last_sym;
510 static inline Sym *sym_malloc(void)
512 Sym *sym;
513 #ifndef SYM_DEBUG
514 sym = sym_free_first;
515 if (!sym)
516 sym = __sym_malloc();
517 sym_free_first = sym->next;
518 return sym;
519 #else
520 sym = tcc_malloc(sizeof(Sym));
521 return sym;
522 #endif
525 ST_INLN void sym_free(Sym *sym)
527 #ifndef SYM_DEBUG
528 sym->next = sym_free_first;
529 sym_free_first = sym;
530 #else
531 tcc_free(sym);
532 #endif
535 /* push, without hashing */
536 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
538 Sym *s;
540 s = sym_malloc();
541 memset(s, 0, sizeof *s);
542 s->v = v;
543 s->type.t = t;
544 s->c = c;
545 /* add in stack */
546 s->prev = *ps;
547 *ps = s;
548 return s;
551 /* find a symbol and return its associated structure. 's' is the top
552 of the symbol stack */
553 ST_FUNC Sym *sym_find2(Sym *s, int v)
555 while (s) {
556 if (s->v == v)
557 return s;
558 else if (s->v == -1)
559 return NULL;
560 s = s->prev;
562 return NULL;
565 /* structure lookup */
566 ST_INLN Sym *struct_find(int v)
568 v -= TOK_IDENT;
569 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
570 return NULL;
571 return table_ident[v]->sym_struct;
574 /* find an identifier */
575 ST_INLN Sym *sym_find(int v)
577 v -= TOK_IDENT;
578 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
579 return NULL;
580 return table_ident[v]->sym_identifier;
583 /* push a given symbol on the symbol stack */
584 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
586 Sym *s, **ps;
587 TokenSym *ts;
589 if (local_stack)
590 ps = &local_stack;
591 else
592 ps = &global_stack;
593 s = sym_push2(ps, v, type->t, c);
594 s->type.ref = type->ref;
595 s->r = r;
596 /* don't record fields or anonymous symbols */
597 /* XXX: simplify */
598 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
599 /* record symbol in token array */
600 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
601 if (v & SYM_STRUCT)
602 ps = &ts->sym_struct;
603 else
604 ps = &ts->sym_identifier;
605 s->prev_tok = *ps;
606 *ps = s;
607 s->sym_scope = local_scope;
608 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
609 tcc_error("redeclaration of '%s'",
610 get_tok_str(v & ~SYM_STRUCT, NULL));
612 return s;
615 /* push a global identifier */
616 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
618 Sym *s, **ps;
619 s = sym_push2(&global_stack, v, t, c);
620 /* don't record anonymous symbol */
621 if (v < SYM_FIRST_ANOM) {
622 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
623 /* modify the top most local identifier, so that
624 sym_identifier will point to 's' when popped */
625 while (*ps != NULL && (*ps)->sym_scope)
626 ps = &(*ps)->prev_tok;
627 s->prev_tok = *ps;
628 *ps = s;
630 return s;
633 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
634 pop them yet from the list, but do remove them from the token array. */
635 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
637 Sym *s, *ss, **ps;
638 TokenSym *ts;
639 int v;
641 s = *ptop;
642 while(s != b) {
643 ss = s->prev;
644 v = s->v;
645 /* remove symbol in token array */
646 /* XXX: simplify */
647 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
648 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
649 if (v & SYM_STRUCT)
650 ps = &ts->sym_struct;
651 else
652 ps = &ts->sym_identifier;
653 *ps = s->prev_tok;
655 if (!keep)
656 sym_free(s);
657 s = ss;
659 if (!keep)
660 *ptop = b;
663 /* ------------------------------------------------------------------------- */
665 static void vsetc(CType *type, int r, CValue *vc)
667 int v;
669 if (vtop >= vstack + (VSTACK_SIZE - 1))
670 tcc_error("memory full (vstack)");
671 /* cannot let cpu flags if other instruction are generated. Also
672 avoid leaving VT_JMP anywhere except on the top of the stack
673 because it would complicate the code generator.
675 Don't do this when nocode_wanted. vtop might come from
676 !nocode_wanted regions (see 88_codeopt.c) and transforming
677 it to a register without actually generating code is wrong
678 as their value might still be used for real. All values
679 we push under nocode_wanted will eventually be popped
680 again, so that the VT_CMP/VT_JMP value will be in vtop
681 when code is unsuppressed again.
683 Same logic below in vswap(); */
684 if (vtop >= vstack && !nocode_wanted) {
685 v = vtop->r & VT_VALMASK;
686 if (v == VT_CMP || (v & ~1) == VT_JMP)
687 gv(RC_INT);
690 vtop++;
691 vtop->type = *type;
692 vtop->r = r;
693 vtop->r2 = VT_CONST;
694 vtop->c = *vc;
695 vtop->sym = NULL;
698 ST_FUNC void vswap(void)
700 SValue tmp;
701 /* cannot vswap cpu flags. See comment at vsetc() above */
702 if (vtop >= vstack && !nocode_wanted) {
703 int v = vtop->r & VT_VALMASK;
704 if (v == VT_CMP || (v & ~1) == VT_JMP)
705 gv(RC_INT);
707 tmp = vtop[0];
708 vtop[0] = vtop[-1];
709 vtop[-1] = tmp;
712 /* pop stack value */
713 ST_FUNC void vpop(void)
715 int v;
716 v = vtop->r & VT_VALMASK;
717 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
718 /* for x86, we need to pop the FP stack */
719 if (v == TREG_ST0) {
720 o(0xd8dd); /* fstp %st(0) */
721 } else
722 #endif
723 if (v == VT_JMP || v == VT_JMPI) {
724 /* need to put correct jump if && or || without test */
725 gsym(vtop->c.i);
727 vtop--;
730 /* push constant of type "type" with useless value */
731 ST_FUNC void vpush(CType *type)
733 vset(type, VT_CONST, 0);
736 /* push integer constant */
737 ST_FUNC void vpushi(int v)
739 CValue cval;
740 cval.i = v;
741 vsetc(&int_type, VT_CONST, &cval);
744 /* push a pointer sized constant */
745 static void vpushs(addr_t v)
747 CValue cval;
748 cval.i = v;
749 vsetc(&size_type, VT_CONST, &cval);
752 /* push arbitrary 64bit constant */
753 ST_FUNC void vpush64(int ty, unsigned long long v)
755 CValue cval;
756 CType ctype;
757 ctype.t = ty;
758 ctype.ref = NULL;
759 cval.i = v;
760 vsetc(&ctype, VT_CONST, &cval);
763 /* push long long constant */
764 static inline void vpushll(long long v)
766 vpush64(VT_LLONG, v);
769 ST_FUNC void vset(CType *type, int r, int v)
771 CValue cval;
773 cval.i = v;
774 vsetc(type, r, &cval);
777 static void vseti(int r, int v)
779 CType type;
780 type.t = VT_INT;
781 type.ref = NULL;
782 vset(&type, r, v);
785 ST_FUNC void vpushv(SValue *v)
787 if (vtop >= vstack + (VSTACK_SIZE - 1))
788 tcc_error("memory full (vstack)");
789 vtop++;
790 *vtop = *v;
793 static void vdup(void)
795 vpushv(vtop);
798 /* rotate n first stack elements to the bottom
799 I1 ... In -> I2 ... In I1 [top is right]
801 ST_FUNC void vrotb(int n)
803 int i;
804 SValue tmp;
806 tmp = vtop[-n + 1];
807 for(i=-n+1;i!=0;i++)
808 vtop[i] = vtop[i+1];
809 vtop[0] = tmp;
812 /* rotate the n elements before entry e towards the top
813 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
815 ST_FUNC void vrote(SValue *e, int n)
817 int i;
818 SValue tmp;
820 tmp = *e;
821 for(i = 0;i < n - 1; i++)
822 e[-i] = e[-i - 1];
823 e[-n + 1] = tmp;
826 /* rotate n first stack elements to the top
827 I1 ... In -> In I1 ... I(n-1) [top is right]
829 ST_FUNC void vrott(int n)
831 vrote(vtop, n);
834 /* push a symbol value of TYPE */
835 static inline void vpushsym(CType *type, Sym *sym)
837 CValue cval;
838 cval.i = 0;
839 vsetc(type, VT_CONST | VT_SYM, &cval);
840 vtop->sym = sym;
843 /* Return a static symbol pointing to a section */
844 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
846 int v;
847 Sym *sym;
849 v = anon_sym++;
850 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
851 sym->type.ref = type->ref;
852 sym->r = VT_CONST | VT_SYM;
853 put_extern_sym(sym, sec, offset, size);
854 return sym;
857 /* push a reference to a section offset by adding a dummy symbol */
858 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
860 vpushsym(type, get_sym_ref(type, sec, offset, size));
863 /* define a new external reference to a symbol 'v' of type 'u' */
864 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
866 Sym *s;
868 s = sym_find(v);
869 if (!s) {
870 /* push forward reference */
871 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
872 s->type.ref = type->ref;
873 s->r = r | VT_CONST | VT_SYM;
874 } else if (IS_ASM_SYM(s)) {
875 s->type.t = type->t | (s->type.t & VT_EXTERN);
876 s->type.ref = type->ref;
877 update_storage(s);
879 return s;
882 /* Merge symbol attributes. */
883 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
885 if (sa1->aligned && !sa->aligned)
886 sa->aligned = sa1->aligned;
887 sa->packed |= sa1->packed;
888 sa->weak |= sa1->weak;
889 if (sa1->visibility != STV_DEFAULT) {
890 int vis = sa->visibility;
891 if (vis == STV_DEFAULT
892 || vis > sa1->visibility)
893 vis = sa1->visibility;
894 sa->visibility = vis;
896 sa->dllexport |= sa1->dllexport;
897 sa->nodecorate |= sa1->nodecorate;
898 sa->dllimport |= sa1->dllimport;
901 /* Merge function attributes. */
902 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
904 if (fa1->func_call && !fa->func_call)
905 fa->func_call = fa1->func_call;
906 if (fa1->func_type && !fa->func_type)
907 fa->func_type = fa1->func_type;
908 if (fa1->func_args && !fa->func_args)
909 fa->func_args = fa1->func_args;
912 /* Merge attributes. */
913 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
915 merge_symattr(&ad->a, &ad1->a);
916 merge_funcattr(&ad->f, &ad1->f);
918 if (ad1->section)
919 ad->section = ad1->section;
920 if (ad1->alias_target)
921 ad->alias_target = ad1->alias_target;
922 if (ad1->asm_label)
923 ad->asm_label = ad1->asm_label;
924 if (ad1->attr_mode)
925 ad->attr_mode = ad1->attr_mode;
928 /* Merge some type attributes. */
929 static void patch_type(Sym *sym, CType *type)
931 if (!(type->t & VT_EXTERN)) {
932 if (!(sym->type.t & VT_EXTERN))
933 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
934 sym->type.t &= ~VT_EXTERN;
937 if (IS_ASM_SYM(sym)) {
938 /* stay static if both are static */
939 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
940 sym->type.ref = type->ref;
943 if (!is_compatible_types(&sym->type, type)) {
944 tcc_error("incompatible types for redefinition of '%s'",
945 get_tok_str(sym->v, NULL));
947 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
948 int static_proto = sym->type.t & VT_STATIC;
949 /* warn if static follows non-static function declaration */
950 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
951 tcc_warning("static storage ignored for redefinition of '%s'",
952 get_tok_str(sym->v, NULL));
954 if (0 == (type->t & VT_EXTERN)) {
955 /* put complete type, use static from prototype */
956 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
957 if (type->t & VT_INLINE)
958 sym->type.t = type->t;
959 sym->type.ref = type->ref;
962 } else {
963 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
964 /* set array size if it was omitted in extern declaration */
965 if (sym->type.ref->c < 0)
966 sym->type.ref->c = type->ref->c;
967 else if (sym->type.ref->c != type->ref->c)
968 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
970 if ((type->t ^ sym->type.t) & VT_STATIC)
971 tcc_warning("storage mismatch for redefinition of '%s'",
972 get_tok_str(sym->v, NULL));
977 /* Merge some storage attributes. */
978 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
980 if (type)
981 patch_type(sym, type);
983 #ifdef TCC_TARGET_PE
984 if (sym->a.dllimport != ad->a.dllimport)
985 tcc_error("incompatible dll linkage for redefinition of '%s'",
986 get_tok_str(sym->v, NULL));
987 #endif
988 merge_symattr(&sym->a, &ad->a);
989 if (ad->asm_label)
990 sym->asm_label = ad->asm_label;
991 update_storage(sym);
994 /* define a new external reference to a symbol 'v' */
995 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
997 Sym *s;
998 s = sym_find(v);
999 if (!s) {
1000 /* push forward reference */
1001 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
1002 s->type.t |= VT_EXTERN;
1003 s->a = ad->a;
1004 s->sym_scope = 0;
1005 } else {
1006 if (s->type.ref == func_old_type.ref) {
1007 s->type.ref = type->ref;
1008 s->r = r | VT_CONST | VT_SYM;
1009 s->type.t |= VT_EXTERN;
1011 patch_storage(s, ad, type);
1013 return s;
1016 /* push a reference to global symbol v */
1017 ST_FUNC void vpush_global_sym(CType *type, int v)
1019 vpushsym(type, external_global_sym(v, type, 0));
1022 /* save registers up to (vtop - n) stack entry */
1023 ST_FUNC void save_regs(int n)
1025 SValue *p, *p1;
1026 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1027 save_reg(p->r);
1030 /* save r to the memory stack, and mark it as being free */
1031 ST_FUNC void save_reg(int r)
1033 save_reg_upstack(r, 0);
1036 /* save r to the memory stack, and mark it as being free,
1037 if seen up to (vtop - n) stack entry */
1038 ST_FUNC void save_reg_upstack(int r, int n)
1040 int l, saved, size, align;
1041 SValue *p, *p1, sv;
1042 CType *type;
1044 if ((r &= VT_VALMASK) >= VT_CONST)
1045 return;
1046 if (nocode_wanted)
1047 return;
1049 /* modify all stack values */
1050 saved = 0;
1051 l = 0;
1052 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1053 if ((p->r & VT_VALMASK) == r ||
1054 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1055 /* must save value on stack if not already done */
1056 if (!saved) {
1057 /* NOTE: must reload 'r' because r might be equal to r2 */
1058 r = p->r & VT_VALMASK;
1059 /* store register in the stack */
1060 type = &p->type;
1061 if ((p->r & VT_LVAL) ||
1062 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1063 #if PTR_SIZE == 8
1064 type = &char_pointer_type;
1065 #else
1066 type = &int_type;
1067 #endif
1068 size = type_size(type, &align);
1069 l=get_temp_local_var(size,align);
1070 sv.type.t = type->t;
1071 sv.r = VT_LOCAL | VT_LVAL;
1072 sv.c.i = l;
1073 store(r, &sv);
1074 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1075 /* x86 specific: need to pop fp register ST0 if saved */
1076 if (r == TREG_ST0) {
1077 o(0xd8dd); /* fstp %st(0) */
1079 #endif
1080 #if PTR_SIZE == 4
1081 /* special long long case */
1082 if ((type->t & VT_BTYPE) == VT_LLONG) {
1083 sv.c.i += 4;
1084 store(p->r2, &sv);
1086 #endif
1087 saved = 1;
1089 /* mark that stack entry as being saved on the stack */
1090 if (p->r & VT_LVAL) {
1091 /* also clear the bounded flag because the
1092 relocation address of the function was stored in
1093 p->c.i */
1094 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1095 } else {
1096 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1098 p->r2 = VT_CONST;
1099 p->c.i = l;
1104 #ifdef TCC_TARGET_ARM
1105 /* find a register of class 'rc2' with at most one reference on stack.
1106 * If none, call get_reg(rc) */
1107 ST_FUNC int get_reg_ex(int rc, int rc2)
1109 int r;
1110 SValue *p;
1112 for(r=0;r<NB_REGS;r++) {
1113 if (reg_classes[r] & rc2) {
1114 int n;
1115 n=0;
1116 for(p = vstack; p <= vtop; p++) {
1117 if ((p->r & VT_VALMASK) == r ||
1118 (p->r2 & VT_VALMASK) == r)
1119 n++;
1121 if (n <= 1)
1122 return r;
1125 return get_reg(rc);
1127 #endif
1129 /* find a free register of class 'rc'. If none, save one register */
1130 ST_FUNC int get_reg(int rc)
1132 int r;
1133 SValue *p;
1135 /* find a free register */
1136 for(r=0;r<NB_REGS;r++) {
1137 if (reg_classes[r] & rc) {
1138 if (nocode_wanted)
1139 return r;
1140 for(p=vstack;p<=vtop;p++) {
1141 if ((p->r & VT_VALMASK) == r ||
1142 (p->r2 & VT_VALMASK) == r)
1143 goto notfound;
1145 return r;
1147 notfound: ;
1150 /* no register left : free the first one on the stack (VERY
1151 IMPORTANT to start from the bottom to ensure that we don't
1152 spill registers used in gen_opi()) */
1153 for(p=vstack;p<=vtop;p++) {
1154 /* look at second register (if long long) */
1155 r = p->r2 & VT_VALMASK;
1156 if (r < VT_CONST && (reg_classes[r] & rc))
1157 goto save_found;
1158 r = p->r & VT_VALMASK;
1159 if (r < VT_CONST && (reg_classes[r] & rc)) {
1160 save_found:
1161 save_reg(r);
1162 return r;
1165 /* Should never comes here */
1166 return -1;
1169 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1170 static int get_temp_local_var(int size,int align){
1171 int i;
1172 struct temp_local_variable *temp_var;
1173 int found_var;
1174 SValue *p;
1175 int r;
1176 char free;
1177 char found;
1178 found=0;
1179 for(i=0;i<nb_temp_local_vars;i++){
1180 temp_var=&arr_temp_local_vars[i];
1181 if(temp_var->size<size||align!=temp_var->align){
1182 continue;
1184 /*check if temp_var is free*/
1185 free=1;
1186 for(p=vstack;p<=vtop;p++) {
1187 r=p->r&VT_VALMASK;
1188 if(r==VT_LOCAL||r==VT_LLOCAL){
1189 if(p->c.i==temp_var->location){
1190 free=0;
1191 break;
1195 if(free){
1196 found_var=temp_var->location;
1197 found=1;
1198 break;
1201 if(!found){
1202 loc = (loc - size) & -align;
1203 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1204 temp_var=&arr_temp_local_vars[i];
1205 temp_var->location=loc;
1206 temp_var->size=size;
1207 temp_var->align=align;
1208 nb_temp_local_vars++;
1210 found_var=loc;
1212 return found_var;
1215 static void clear_temp_local_var_list(){
1216 nb_temp_local_vars=0;
1219 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1220 if needed */
1221 static void move_reg(int r, int s, int t)
1223 SValue sv;
1225 if (r != s) {
1226 save_reg(r);
1227 sv.type.t = t;
1228 sv.type.ref = NULL;
1229 sv.r = s;
1230 sv.c.i = 0;
1231 load(r, &sv);
1235 /* get address of vtop (vtop MUST BE an lvalue) */
1236 ST_FUNC void gaddrof(void)
1238 vtop->r &= ~VT_LVAL;
1239 /* tricky: if saved lvalue, then we can go back to lvalue */
1240 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1241 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1246 #ifdef CONFIG_TCC_BCHECK
1247 /* generate lvalue bound code */
1248 static void gbound(void)
1250 int lval_type;
1251 CType type1;
1253 vtop->r &= ~VT_MUSTBOUND;
1254 /* if lvalue, then use checking code before dereferencing */
1255 if (vtop->r & VT_LVAL) {
1256 /* if not VT_BOUNDED value, then make one */
1257 if (!(vtop->r & VT_BOUNDED)) {
1258 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1259 /* must save type because we must set it to int to get pointer */
1260 type1 = vtop->type;
1261 vtop->type.t = VT_PTR;
1262 gaddrof();
1263 vpushi(0);
1264 gen_bounded_ptr_add();
1265 vtop->r |= lval_type;
1266 vtop->type = type1;
1268 /* then check for dereferencing */
1269 gen_bounded_ptr_deref();
1272 #endif
1274 static void incr_bf_adr(int o)
1276 vtop->type = char_pointer_type;
1277 gaddrof();
1278 vpushi(o);
1279 gen_op('+');
1280 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1281 | (VT_BYTE|VT_UNSIGNED);
1282 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1283 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1286 /* single-byte load mode for packed or otherwise unaligned bitfields */
1287 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1289 int n, o, bits;
1290 save_reg_upstack(vtop->r, 1);
1291 vpush64(type->t & VT_BTYPE, 0); // B X
1292 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1293 do {
1294 vswap(); // X B
1295 incr_bf_adr(o);
1296 vdup(); // X B B
1297 n = 8 - bit_pos;
1298 if (n > bit_size)
1299 n = bit_size;
1300 if (bit_pos)
1301 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1302 if (n < 8)
1303 vpushi((1 << n) - 1), gen_op('&');
1304 gen_cast(type);
1305 if (bits)
1306 vpushi(bits), gen_op(TOK_SHL);
1307 vrotb(3); // B Y X
1308 gen_op('|'); // B X
1309 bits += n, bit_size -= n, o = 1;
1310 } while (bit_size);
1311 vswap(), vpop();
1312 if (!(type->t & VT_UNSIGNED)) {
1313 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1314 vpushi(n), gen_op(TOK_SHL);
1315 vpushi(n), gen_op(TOK_SAR);
1319 /* single-byte store mode for packed or otherwise unaligned bitfields */
1320 static void store_packed_bf(int bit_pos, int bit_size)
1322 int bits, n, o, m, c;
1324 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1325 vswap(); // X B
1326 save_reg_upstack(vtop->r, 1);
1327 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1328 do {
1329 incr_bf_adr(o); // X B
1330 vswap(); //B X
1331 c ? vdup() : gv_dup(); // B V X
1332 vrott(3); // X B V
1333 if (bits)
1334 vpushi(bits), gen_op(TOK_SHR);
1335 if (bit_pos)
1336 vpushi(bit_pos), gen_op(TOK_SHL);
1337 n = 8 - bit_pos;
1338 if (n > bit_size)
1339 n = bit_size;
1340 if (n < 8) {
1341 m = ((1 << n) - 1) << bit_pos;
1342 vpushi(m), gen_op('&'); // X B V1
1343 vpushv(vtop-1); // X B V1 B
1344 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1345 gen_op('&'); // X B V1 B1
1346 gen_op('|'); // X B V2
1348 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1349 vstore(), vpop(); // X B
1350 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1351 } while (bit_size);
1352 vpop(), vpop();
1355 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1357 int t;
1358 if (0 == sv->type.ref)
1359 return 0;
1360 t = sv->type.ref->auxtype;
1361 if (t != -1 && t != VT_STRUCT) {
1362 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1363 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1365 return t;
1368 /* store vtop a register belonging to class 'rc'. lvalues are
1369 converted to values. Cannot be used if cannot be converted to
1370 register value (such as structures). */
1371 ST_FUNC int gv(int rc)
1373 int r, bit_pos, bit_size, size, align, rc2;
1375 /* NOTE: get_reg can modify vstack[] */
1376 if (vtop->type.t & VT_BITFIELD) {
1377 CType type;
1379 bit_pos = BIT_POS(vtop->type.t);
1380 bit_size = BIT_SIZE(vtop->type.t);
1381 /* remove bit field info to avoid loops */
1382 vtop->type.t &= ~VT_STRUCT_MASK;
1384 type.ref = NULL;
1385 type.t = vtop->type.t & VT_UNSIGNED;
1386 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1387 type.t |= VT_UNSIGNED;
1389 r = adjust_bf(vtop, bit_pos, bit_size);
1391 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1392 type.t |= VT_LLONG;
1393 else
1394 type.t |= VT_INT;
1396 if (r == VT_STRUCT) {
1397 load_packed_bf(&type, bit_pos, bit_size);
1398 } else {
1399 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1400 /* cast to int to propagate signedness in following ops */
1401 gen_cast(&type);
1402 /* generate shifts */
1403 vpushi(bits - (bit_pos + bit_size));
1404 gen_op(TOK_SHL);
1405 vpushi(bits - bit_size);
1406 /* NOTE: transformed to SHR if unsigned */
1407 gen_op(TOK_SAR);
1409 r = gv(rc);
1410 } else {
1411 if (is_float(vtop->type.t) &&
1412 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1413 unsigned long offset;
1414 /* CPUs usually cannot use float constants, so we store them
1415 generically in data segment */
1416 size = type_size(&vtop->type, &align);
1417 if (NODATA_WANTED)
1418 size = 0, align = 1;
1419 offset = section_add(data_section, size, align);
1420 vpush_ref(&vtop->type, data_section, offset, size);
1421 vswap();
1422 init_putv(&vtop->type, data_section, offset);
1423 vtop->r |= VT_LVAL;
1425 #ifdef CONFIG_TCC_BCHECK
1426 if (vtop->r & VT_MUSTBOUND)
1427 gbound();
1428 #endif
1430 r = vtop->r & VT_VALMASK;
1431 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1432 #ifndef TCC_TARGET_ARM64
1433 if (rc == RC_IRET)
1434 rc2 = RC_LRET;
1435 #ifdef TCC_TARGET_X86_64
1436 else if (rc == RC_FRET)
1437 rc2 = RC_QRET;
1438 #endif
1439 #endif
1440 /* need to reload if:
1441 - constant
1442 - lvalue (need to dereference pointer)
1443 - already a register, but not in the right class */
1444 if (r >= VT_CONST
1445 || (vtop->r & VT_LVAL)
1446 || !(reg_classes[r] & rc)
1447 #if PTR_SIZE == 8
1448 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1449 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1450 #else
1451 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1452 #endif
1455 r = get_reg(rc);
1456 #if PTR_SIZE == 8
1457 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1458 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1459 #else
1460 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1461 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1462 unsigned long long ll;
1463 #endif
1464 int r2, original_type;
1465 original_type = vtop->type.t;
1466 /* two register type load : expand to two words
1467 temporarily */
1468 #if PTR_SIZE == 4
1469 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1470 /* load constant */
1471 ll = vtop->c.i;
1472 vtop->c.i = ll; /* first word */
1473 load(r, vtop);
1474 vtop->r = r; /* save register value */
1475 vpushi(ll >> 32); /* second word */
1476 } else
1477 #endif
1478 if (vtop->r & VT_LVAL) {
1479 /* We do not want to modifier the long long
1480 pointer here, so the safest (and less
1481 efficient) is to save all the other registers
1482 in the stack. XXX: totally inefficient. */
1483 #if 0
1484 save_regs(1);
1485 #else
1486 /* lvalue_save: save only if used further down the stack */
1487 save_reg_upstack(vtop->r, 1);
1488 #endif
1489 /* load from memory */
1490 vtop->type.t = load_type;
1491 load(r, vtop);
1492 vdup();
1493 vtop[-1].r = r; /* save register value */
1494 /* increment pointer to get second word */
1495 vtop->type.t = addr_type;
1496 gaddrof();
1497 vpushi(load_size);
1498 gen_op('+');
1499 vtop->r |= VT_LVAL;
1500 vtop->type.t = load_type;
1501 } else {
1502 /* move registers */
1503 load(r, vtop);
1504 vdup();
1505 vtop[-1].r = r; /* save register value */
1506 vtop->r = vtop[-1].r2;
1508 /* Allocate second register. Here we rely on the fact that
1509 get_reg() tries first to free r2 of an SValue. */
1510 r2 = get_reg(rc2);
1511 load(r2, vtop);
1512 vpop();
1513 /* write second register */
1514 vtop->r2 = r2;
1515 vtop->type.t = original_type;
1516 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1517 int t1, t;
1518 /* lvalue of scalar type : need to use lvalue type
1519 because of possible cast */
1520 t = vtop->type.t;
1521 t1 = t;
1522 /* compute memory access type */
1523 if (vtop->r & VT_LVAL_BYTE)
1524 t = VT_BYTE;
1525 else if (vtop->r & VT_LVAL_SHORT)
1526 t = VT_SHORT;
1527 if (vtop->r & VT_LVAL_UNSIGNED)
1528 t |= VT_UNSIGNED;
1529 vtop->type.t = t;
1530 load(r, vtop);
1531 /* restore wanted type */
1532 vtop->type.t = t1;
1533 } else {
1534 /* one register type load */
1535 load(r, vtop);
1538 vtop->r = r;
1539 #ifdef TCC_TARGET_C67
1540 /* uses register pairs for doubles */
1541 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1542 vtop->r2 = r+1;
1543 #endif
1545 return r;
1548 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1549 ST_FUNC void gv2(int rc1, int rc2)
1551 int v;
1553 /* generate more generic register first. But VT_JMP or VT_CMP
1554 values must be generated first in all cases to avoid possible
1555 reload errors */
1556 v = vtop[0].r & VT_VALMASK;
1557 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1558 vswap();
1559 gv(rc1);
1560 vswap();
1561 gv(rc2);
1562 /* test if reload is needed for first register */
1563 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1564 vswap();
1565 gv(rc1);
1566 vswap();
1568 } else {
1569 gv(rc2);
1570 vswap();
1571 gv(rc1);
1572 vswap();
1573 /* test if reload is needed for first register */
1574 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1575 gv(rc2);
1580 #ifndef TCC_TARGET_ARM64
1581 /* wrapper around RC_FRET to return a register by type */
1582 static int rc_fret(int t)
1584 #ifdef TCC_TARGET_X86_64
1585 if (t == VT_LDOUBLE) {
1586 return RC_ST0;
1588 #endif
1589 return RC_FRET;
1591 #endif
1593 /* wrapper around REG_FRET to return a register by type */
1594 static int reg_fret(int t)
1596 #ifdef TCC_TARGET_X86_64
1597 if (t == VT_LDOUBLE) {
1598 return TREG_ST0;
1600 #endif
1601 return REG_FRET;
1604 #if PTR_SIZE == 4
1605 /* expand 64bit on stack in two ints */
1606 ST_FUNC void lexpand(void)
1608 int u, v;
1609 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1610 v = vtop->r & (VT_VALMASK | VT_LVAL);
1611 if (v == VT_CONST) {
1612 vdup();
1613 vtop[0].c.i >>= 32;
1614 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1615 vdup();
1616 vtop[0].c.i += 4;
1617 } else {
1618 gv(RC_INT);
1619 vdup();
1620 vtop[0].r = vtop[-1].r2;
1621 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1623 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1625 #endif
1627 #if PTR_SIZE == 4
1628 /* build a long long from two ints */
1629 static void lbuild(int t)
1631 gv2(RC_INT, RC_INT);
1632 vtop[-1].r2 = vtop[0].r;
1633 vtop[-1].type.t = t;
1634 vpop();
1636 #endif
1638 /* convert stack entry to register and duplicate its value in another
1639 register */
1640 static void gv_dup(void)
1642 int rc, t, r, r1;
1643 SValue sv;
1645 t = vtop->type.t;
1646 #if PTR_SIZE == 4
1647 if ((t & VT_BTYPE) == VT_LLONG) {
1648 if (t & VT_BITFIELD) {
1649 gv(RC_INT);
1650 t = vtop->type.t;
1652 lexpand();
1653 gv_dup();
1654 vswap();
1655 vrotb(3);
1656 gv_dup();
1657 vrotb(4);
1658 /* stack: H L L1 H1 */
1659 lbuild(t);
1660 vrotb(3);
1661 vrotb(3);
1662 vswap();
1663 lbuild(t);
1664 vswap();
1665 } else
1666 #endif
1668 /* duplicate value */
1669 rc = RC_INT;
1670 sv.type.t = VT_INT;
1671 if (is_float(t)) {
1672 rc = RC_FLOAT;
1673 #ifdef TCC_TARGET_X86_64
1674 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1675 rc = RC_ST0;
1677 #endif
1678 sv.type.t = t;
1680 r = gv(rc);
1681 r1 = get_reg(rc);
1682 sv.r = r;
1683 sv.c.i = 0;
1684 load(r1, &sv); /* move r to r1 */
1685 vdup();
1686 /* duplicates value */
1687 if (r != r1)
1688 vtop->r = r1;
1692 /* Generate value test
1694 * Generate a test for any value (jump, comparison and integers) */
1695 ST_FUNC int gvtst(int inv, int t)
1697 int v = vtop->r & VT_VALMASK;
1698 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1699 vpushi(0);
1700 gen_op(TOK_NE);
1702 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1703 /* constant jmp optimization */
1704 if ((vtop->c.i != 0) != inv)
1705 t = gjmp(t);
1706 vtop--;
1707 return t;
1709 return gtst(inv, t);
1712 #if PTR_SIZE == 4
1713 /* generate CPU independent (unsigned) long long operations */
1714 static void gen_opl(int op)
1716 int t, a, b, op1, c, i;
1717 int func;
1718 unsigned short reg_iret = REG_IRET;
1719 unsigned short reg_lret = REG_LRET;
1720 SValue tmp;
1722 switch(op) {
1723 case '/':
1724 case TOK_PDIV:
1725 func = TOK___divdi3;
1726 goto gen_func;
1727 case TOK_UDIV:
1728 func = TOK___udivdi3;
1729 goto gen_func;
1730 case '%':
1731 func = TOK___moddi3;
1732 goto gen_mod_func;
1733 case TOK_UMOD:
1734 func = TOK___umoddi3;
1735 gen_mod_func:
1736 #ifdef TCC_ARM_EABI
1737 reg_iret = TREG_R2;
1738 reg_lret = TREG_R3;
1739 #endif
1740 gen_func:
1741 /* call generic long long function */
1742 vpush_global_sym(&func_old_type, func);
1743 vrott(3);
1744 gfunc_call(2);
1745 vpushi(0);
1746 vtop->r = reg_iret;
1747 vtop->r2 = reg_lret;
1748 break;
1749 case '^':
1750 case '&':
1751 case '|':
1752 case '*':
1753 case '+':
1754 case '-':
1755 //pv("gen_opl A",0,2);
1756 t = vtop->type.t;
1757 vswap();
1758 lexpand();
1759 vrotb(3);
1760 lexpand();
1761 /* stack: L1 H1 L2 H2 */
1762 tmp = vtop[0];
1763 vtop[0] = vtop[-3];
1764 vtop[-3] = tmp;
1765 tmp = vtop[-2];
1766 vtop[-2] = vtop[-3];
1767 vtop[-3] = tmp;
1768 vswap();
1769 /* stack: H1 H2 L1 L2 */
1770 //pv("gen_opl B",0,4);
1771 if (op == '*') {
1772 vpushv(vtop - 1);
1773 vpushv(vtop - 1);
1774 gen_op(TOK_UMULL);
1775 lexpand();
1776 /* stack: H1 H2 L1 L2 ML MH */
1777 for(i=0;i<4;i++)
1778 vrotb(6);
1779 /* stack: ML MH H1 H2 L1 L2 */
1780 tmp = vtop[0];
1781 vtop[0] = vtop[-2];
1782 vtop[-2] = tmp;
1783 /* stack: ML MH H1 L2 H2 L1 */
1784 gen_op('*');
1785 vrotb(3);
1786 vrotb(3);
1787 gen_op('*');
1788 /* stack: ML MH M1 M2 */
1789 gen_op('+');
1790 gen_op('+');
1791 } else if (op == '+' || op == '-') {
1792 /* XXX: add non carry method too (for MIPS or alpha) */
1793 if (op == '+')
1794 op1 = TOK_ADDC1;
1795 else
1796 op1 = TOK_SUBC1;
1797 gen_op(op1);
1798 /* stack: H1 H2 (L1 op L2) */
1799 vrotb(3);
1800 vrotb(3);
1801 gen_op(op1 + 1); /* TOK_xxxC2 */
1802 } else {
1803 gen_op(op);
1804 /* stack: H1 H2 (L1 op L2) */
1805 vrotb(3);
1806 vrotb(3);
1807 /* stack: (L1 op L2) H1 H2 */
1808 gen_op(op);
1809 /* stack: (L1 op L2) (H1 op H2) */
1811 /* stack: L H */
1812 lbuild(t);
1813 break;
1814 case TOK_SAR:
1815 case TOK_SHR:
1816 case TOK_SHL:
1817 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1818 t = vtop[-1].type.t;
1819 vswap();
1820 lexpand();
1821 vrotb(3);
1822 /* stack: L H shift */
1823 c = (int)vtop->c.i;
1824 /* constant: simpler */
1825 /* NOTE: all comments are for SHL. the other cases are
1826 done by swapping words */
1827 vpop();
1828 if (op != TOK_SHL)
1829 vswap();
1830 if (c >= 32) {
1831 /* stack: L H */
1832 vpop();
1833 if (c > 32) {
1834 vpushi(c - 32);
1835 gen_op(op);
1837 if (op != TOK_SAR) {
1838 vpushi(0);
1839 } else {
1840 gv_dup();
1841 vpushi(31);
1842 gen_op(TOK_SAR);
1844 vswap();
1845 } else {
1846 vswap();
1847 gv_dup();
1848 /* stack: H L L */
1849 vpushi(c);
1850 gen_op(op);
1851 vswap();
1852 vpushi(32 - c);
1853 if (op == TOK_SHL)
1854 gen_op(TOK_SHR);
1855 else
1856 gen_op(TOK_SHL);
1857 vrotb(3);
1858 /* stack: L L H */
1859 vpushi(c);
1860 if (op == TOK_SHL)
1861 gen_op(TOK_SHL);
1862 else
1863 gen_op(TOK_SHR);
1864 gen_op('|');
1866 if (op != TOK_SHL)
1867 vswap();
1868 lbuild(t);
1869 } else {
1870 /* XXX: should provide a faster fallback on x86 ? */
1871 switch(op) {
1872 case TOK_SAR:
1873 func = TOK___ashrdi3;
1874 goto gen_func;
1875 case TOK_SHR:
1876 func = TOK___lshrdi3;
1877 goto gen_func;
1878 case TOK_SHL:
1879 func = TOK___ashldi3;
1880 goto gen_func;
1883 break;
1884 default:
1885 /* compare operations */
1886 t = vtop->type.t;
1887 vswap();
1888 lexpand();
1889 vrotb(3);
1890 lexpand();
1891 /* stack: L1 H1 L2 H2 */
1892 tmp = vtop[-1];
1893 vtop[-1] = vtop[-2];
1894 vtop[-2] = tmp;
1895 /* stack: L1 L2 H1 H2 */
1896 /* compare high */
1897 op1 = op;
1898 /* when values are equal, we need to compare low words. since
1899 the jump is inverted, we invert the test too. */
1900 if (op1 == TOK_LT)
1901 op1 = TOK_LE;
1902 else if (op1 == TOK_GT)
1903 op1 = TOK_GE;
1904 else if (op1 == TOK_ULT)
1905 op1 = TOK_ULE;
1906 else if (op1 == TOK_UGT)
1907 op1 = TOK_UGE;
1908 a = 0;
1909 b = 0;
1910 gen_op(op1);
1911 if (op == TOK_NE) {
1912 b = gvtst(0, 0);
1913 } else {
1914 a = gvtst(1, 0);
1915 if (op != TOK_EQ) {
1916 /* generate non equal test */
1917 vpushi(TOK_NE);
1918 vtop->r = VT_CMP;
1919 b = gvtst(0, 0);
1922 /* compare low. Always unsigned */
1923 op1 = op;
1924 if (op1 == TOK_LT)
1925 op1 = TOK_ULT;
1926 else if (op1 == TOK_LE)
1927 op1 = TOK_ULE;
1928 else if (op1 == TOK_GT)
1929 op1 = TOK_UGT;
1930 else if (op1 == TOK_GE)
1931 op1 = TOK_UGE;
1932 gen_op(op1);
1933 a = gvtst(1, a);
1934 gsym(b);
1935 vseti(VT_JMPI, a);
1936 break;
1939 #endif
1941 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1943 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1944 return (a ^ b) >> 63 ? -x : x;
1947 static int gen_opic_lt(uint64_t a, uint64_t b)
1949 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1952 /* handle integer constant optimizations and various machine
1953 independent opt */
1954 static void gen_opic(int op)
1956 SValue *v1 = vtop - 1;
1957 SValue *v2 = vtop;
1958 int t1 = v1->type.t & VT_BTYPE;
1959 int t2 = v2->type.t & VT_BTYPE;
1960 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1961 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1962 uint64_t l1 = c1 ? v1->c.i : 0;
1963 uint64_t l2 = c2 ? v2->c.i : 0;
1964 int shm = (t1 == VT_LLONG) ? 63 : 31;
1966 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1967 l1 = ((uint32_t)l1 |
1968 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1969 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1970 l2 = ((uint32_t)l2 |
1971 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1973 if (c1 && c2) {
1974 switch(op) {
1975 case '+': l1 += l2; break;
1976 case '-': l1 -= l2; break;
1977 case '&': l1 &= l2; break;
1978 case '^': l1 ^= l2; break;
1979 case '|': l1 |= l2; break;
1980 case '*': l1 *= l2; break;
1982 case TOK_PDIV:
1983 case '/':
1984 case '%':
1985 case TOK_UDIV:
1986 case TOK_UMOD:
1987 /* if division by zero, generate explicit division */
1988 if (l2 == 0) {
1989 if (const_wanted)
1990 tcc_error("division by zero in constant");
1991 goto general_case;
1993 switch(op) {
1994 default: l1 = gen_opic_sdiv(l1, l2); break;
1995 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1996 case TOK_UDIV: l1 = l1 / l2; break;
1997 case TOK_UMOD: l1 = l1 % l2; break;
1999 break;
2000 case TOK_SHL: l1 <<= (l2 & shm); break;
2001 case TOK_SHR: l1 >>= (l2 & shm); break;
2002 case TOK_SAR:
2003 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2004 break;
2005 /* tests */
2006 case TOK_ULT: l1 = l1 < l2; break;
2007 case TOK_UGE: l1 = l1 >= l2; break;
2008 case TOK_EQ: l1 = l1 == l2; break;
2009 case TOK_NE: l1 = l1 != l2; break;
2010 case TOK_ULE: l1 = l1 <= l2; break;
2011 case TOK_UGT: l1 = l1 > l2; break;
2012 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2013 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2014 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2015 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2016 /* logical */
2017 case TOK_LAND: l1 = l1 && l2; break;
2018 case TOK_LOR: l1 = l1 || l2; break;
2019 default:
2020 goto general_case;
2022 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2023 l1 = ((uint32_t)l1 |
2024 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2025 v1->c.i = l1;
2026 vtop--;
2027 } else {
2028 /* if commutative ops, put c2 as constant */
2029 if (c1 && (op == '+' || op == '&' || op == '^' ||
2030 op == '|' || op == '*')) {
2031 vswap();
2032 c2 = c1; //c = c1, c1 = c2, c2 = c;
2033 l2 = l1; //l = l1, l1 = l2, l2 = l;
2035 if (!const_wanted &&
2036 c1 && ((l1 == 0 &&
2037 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2038 (l1 == -1 && op == TOK_SAR))) {
2039 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2040 vtop--;
2041 } else if (!const_wanted &&
2042 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2043 (op == '|' &&
2044 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2045 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2046 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2047 if (l2 == 1)
2048 vtop->c.i = 0;
2049 vswap();
2050 vtop--;
2051 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2052 op == TOK_PDIV) &&
2053 l2 == 1) ||
2054 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2055 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2056 l2 == 0) ||
2057 (op == '&' &&
2058 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2059 /* filter out NOP operations like x*1, x-0, x&-1... */
2060 vtop--;
2061 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2062 /* try to use shifts instead of muls or divs */
2063 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2064 int n = -1;
2065 while (l2) {
2066 l2 >>= 1;
2067 n++;
2069 vtop->c.i = n;
2070 if (op == '*')
2071 op = TOK_SHL;
2072 else if (op == TOK_PDIV)
2073 op = TOK_SAR;
2074 else
2075 op = TOK_SHR;
2077 goto general_case;
2078 } else if (c2 && (op == '+' || op == '-') &&
2079 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2080 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2081 /* symbol + constant case */
2082 if (op == '-')
2083 l2 = -l2;
2084 l2 += vtop[-1].c.i;
2085 /* The backends can't always deal with addends to symbols
2086 larger than +-1<<31. Don't construct such. */
2087 if ((int)l2 != l2)
2088 goto general_case;
2089 vtop--;
2090 vtop->c.i = l2;
2091 } else {
2092 general_case:
2093 /* call low level op generator */
2094 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2095 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2096 gen_opl(op);
2097 else
2098 gen_opi(op);
2103 /* generate a floating point operation with constant propagation */
2104 static void gen_opif(int op)
2106 int c1, c2;
2107 SValue *v1, *v2;
2108 #if defined _MSC_VER && defined _AMD64_
2109 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2110 volatile
2111 #endif
2112 long double f1, f2;
2114 v1 = vtop - 1;
2115 v2 = vtop;
2116 /* currently, we cannot do computations with forward symbols */
2117 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2118 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2119 if (c1 && c2) {
2120 if (v1->type.t == VT_FLOAT) {
2121 f1 = v1->c.f;
2122 f2 = v2->c.f;
2123 } else if (v1->type.t == VT_DOUBLE) {
2124 f1 = v1->c.d;
2125 f2 = v2->c.d;
2126 } else {
2127 f1 = v1->c.ld;
2128 f2 = v2->c.ld;
2131 /* NOTE: we only do constant propagation if finite number (not
2132 NaN or infinity) (ANSI spec) */
2133 if (!ieee_finite(f1) || !ieee_finite(f2))
2134 goto general_case;
2136 switch(op) {
2137 case '+': f1 += f2; break;
2138 case '-': f1 -= f2; break;
2139 case '*': f1 *= f2; break;
2140 case '/':
2141 if (f2 == 0.0) {
2142 /* If not in initializer we need to potentially generate
2143 FP exceptions at runtime, otherwise we want to fold. */
2144 if (!const_wanted)
2145 goto general_case;
2147 f1 /= f2;
2148 break;
2149 /* XXX: also handles tests ? */
2150 default:
2151 goto general_case;
2153 /* XXX: overflow test ? */
2154 if (v1->type.t == VT_FLOAT) {
2155 v1->c.f = f1;
2156 } else if (v1->type.t == VT_DOUBLE) {
2157 v1->c.d = f1;
2158 } else {
2159 v1->c.ld = f1;
2161 vtop--;
2162 } else {
2163 general_case:
2164 gen_opf(op);
2168 static int pointed_size(CType *type)
2170 int align;
2171 return type_size(pointed_type(type), &align);
2174 static void vla_runtime_pointed_size(CType *type)
2176 int align;
2177 vla_runtime_type_size(pointed_type(type), &align);
2180 static inline int is_null_pointer(SValue *p)
2182 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2183 return 0;
2184 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2185 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2186 ((p->type.t & VT_BTYPE) == VT_PTR &&
2187 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2188 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2189 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2192 static inline int is_integer_btype(int bt)
2194 return (bt == VT_BYTE || bt == VT_SHORT ||
2195 bt == VT_INT || bt == VT_LLONG);
2198 /* check types for comparison or subtraction of pointers */
2199 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2201 CType *type1, *type2, tmp_type1, tmp_type2;
2202 int bt1, bt2;
2204 /* null pointers are accepted for all comparisons as gcc */
2205 if (is_null_pointer(p1) || is_null_pointer(p2))
2206 return;
2207 type1 = &p1->type;
2208 type2 = &p2->type;
2209 bt1 = type1->t & VT_BTYPE;
2210 bt2 = type2->t & VT_BTYPE;
2211 /* accept comparison between pointer and integer with a warning */
2212 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2213 if (op != TOK_LOR && op != TOK_LAND )
2214 tcc_warning("comparison between pointer and integer");
2215 return;
2218 /* both must be pointers or implicit function pointers */
2219 if (bt1 == VT_PTR) {
2220 type1 = pointed_type(type1);
2221 } else if (bt1 != VT_FUNC)
2222 goto invalid_operands;
2224 if (bt2 == VT_PTR) {
2225 type2 = pointed_type(type2);
2226 } else if (bt2 != VT_FUNC) {
2227 invalid_operands:
2228 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2230 if ((type1->t & VT_BTYPE) == VT_VOID ||
2231 (type2->t & VT_BTYPE) == VT_VOID)
2232 return;
2233 tmp_type1 = *type1;
2234 tmp_type2 = *type2;
2235 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2236 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2237 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2238 /* gcc-like error if '-' is used */
2239 if (op == '-')
2240 goto invalid_operands;
2241 else
2242 tcc_warning("comparison of distinct pointer types lacks a cast");
2246 /* generic gen_op: handles types problems */
2247 ST_FUNC void gen_op(int op)
2249 int u, t1, t2, bt1, bt2, t;
2250 CType type1;
2252 redo:
2253 t1 = vtop[-1].type.t;
2254 t2 = vtop[0].type.t;
2255 bt1 = t1 & VT_BTYPE;
2256 bt2 = t2 & VT_BTYPE;
2258 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2259 tcc_error("operation on a struct");
2260 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2261 if (bt2 == VT_FUNC) {
2262 mk_pointer(&vtop->type);
2263 gaddrof();
2265 if (bt1 == VT_FUNC) {
2266 vswap();
2267 mk_pointer(&vtop->type);
2268 gaddrof();
2269 vswap();
2271 goto redo;
2272 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2273 /* at least one operand is a pointer */
2274 /* relational op: must be both pointers */
2275 if (op >= TOK_ULT && op <= TOK_LOR) {
2276 check_comparison_pointer_types(vtop - 1, vtop, op);
2277 /* pointers are handled are unsigned */
2278 #if PTR_SIZE == 8
2279 t = VT_LLONG | VT_UNSIGNED;
2280 #else
2281 t = VT_INT | VT_UNSIGNED;
2282 #endif
2283 goto std_op;
2285 /* if both pointers, then it must be the '-' op */
2286 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2287 if (op != '-')
2288 tcc_error("cannot use pointers here");
2289 check_comparison_pointer_types(vtop - 1, vtop, op);
2290 /* XXX: check that types are compatible */
2291 if (vtop[-1].type.t & VT_VLA) {
2292 vla_runtime_pointed_size(&vtop[-1].type);
2293 } else {
2294 vpushi(pointed_size(&vtop[-1].type));
2296 vrott(3);
2297 gen_opic(op);
2298 vtop->type.t = ptrdiff_type.t;
2299 vswap();
2300 gen_op(TOK_PDIV);
2301 } else {
2302 /* exactly one pointer : must be '+' or '-'. */
2303 if (op != '-' && op != '+')
2304 tcc_error("cannot use pointers here");
2305 /* Put pointer as first operand */
2306 if (bt2 == VT_PTR) {
2307 vswap();
2308 t = t1, t1 = t2, t2 = t;
2310 #if PTR_SIZE == 4
2311 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2312 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2313 gen_cast_s(VT_INT);
2314 #endif
2315 type1 = vtop[-1].type;
2316 type1.t &= ~VT_ARRAY;
2317 if (vtop[-1].type.t & VT_VLA)
2318 vla_runtime_pointed_size(&vtop[-1].type);
2319 else {
2320 u = pointed_size(&vtop[-1].type);
2321 if (u < 0)
2322 tcc_error("unknown array element size");
2323 #if PTR_SIZE == 8
2324 vpushll(u);
2325 #else
2326 /* XXX: cast to int ? (long long case) */
2327 vpushi(u);
2328 #endif
2330 gen_op('*');
2331 #if 0
2332 /* #ifdef CONFIG_TCC_BCHECK
2333 The main reason to removing this code:
2334 #include <stdio.h>
2335 int main ()
2337 int v[10];
2338 int i = 10;
2339 int j = 9;
2340 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2341 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2343 When this code is on. then the output looks like
2344 v+i-j = 0xfffffffe
2345 v+(i-j) = 0xbff84000
2347 /* if evaluating constant expression, no code should be
2348 generated, so no bound check */
2349 if (tcc_state->do_bounds_check && !const_wanted) {
2350 /* if bounded pointers, we generate a special code to
2351 test bounds */
2352 if (op == '-') {
2353 vpushi(0);
2354 vswap();
2355 gen_op('-');
2357 gen_bounded_ptr_add();
2358 } else
2359 #endif
2361 gen_opic(op);
2363 /* put again type if gen_opic() swaped operands */
2364 vtop->type = type1;
2366 } else if (is_float(bt1) || is_float(bt2)) {
2367 /* compute bigger type and do implicit casts */
2368 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2369 t = VT_LDOUBLE;
2370 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2371 t = VT_DOUBLE;
2372 } else {
2373 t = VT_FLOAT;
2375 /* floats can only be used for a few operations */
2376 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2377 (op < TOK_ULT || op > TOK_GT))
2378 tcc_error("invalid operands for binary operation");
2379 goto std_op;
2380 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2381 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2382 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2383 t |= VT_UNSIGNED;
2384 t |= (VT_LONG & t1);
2385 goto std_op;
2386 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2387 /* cast to biggest op */
2388 t = VT_LLONG | VT_LONG;
2389 if (bt1 == VT_LLONG)
2390 t &= t1;
2391 if (bt2 == VT_LLONG)
2392 t &= t2;
2393 /* convert to unsigned if it does not fit in a long long */
2394 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2395 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2396 t |= VT_UNSIGNED;
2397 goto std_op;
2398 } else {
2399 /* integer operations */
2400 t = VT_INT | (VT_LONG & (t1 | t2));
2401 /* convert to unsigned if it does not fit in an integer */
2402 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2403 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2404 t |= VT_UNSIGNED;
2405 std_op:
2406 /* XXX: currently, some unsigned operations are explicit, so
2407 we modify them here */
2408 if (t & VT_UNSIGNED) {
2409 if (op == TOK_SAR)
2410 op = TOK_SHR;
2411 else if (op == '/')
2412 op = TOK_UDIV;
2413 else if (op == '%')
2414 op = TOK_UMOD;
2415 else if (op == TOK_LT)
2416 op = TOK_ULT;
2417 else if (op == TOK_GT)
2418 op = TOK_UGT;
2419 else if (op == TOK_LE)
2420 op = TOK_ULE;
2421 else if (op == TOK_GE)
2422 op = TOK_UGE;
2424 vswap();
2425 type1.t = t;
2426 type1.ref = NULL;
2427 gen_cast(&type1);
2428 vswap();
2429 /* special case for shifts and long long: we keep the shift as
2430 an integer */
2431 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2432 type1.t = VT_INT;
2433 gen_cast(&type1);
2434 if (is_float(t))
2435 gen_opif(op);
2436 else
2437 gen_opic(op);
2438 if (op >= TOK_ULT && op <= TOK_GT) {
2439 /* relational op: the result is an int */
2440 vtop->type.t = VT_INT;
2441 } else {
2442 vtop->type.t = t;
2445 // Make sure that we have converted to an rvalue:
2446 if (vtop->r & VT_LVAL)
2447 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2450 #ifndef TCC_TARGET_ARM
2451 /* generic itof for unsigned long long case */
2452 static void gen_cvt_itof1(int t)
2454 #ifdef TCC_TARGET_ARM64
2455 gen_cvt_itof(t);
2456 #else
2457 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2458 (VT_LLONG | VT_UNSIGNED)) {
2460 if (t == VT_FLOAT)
2461 vpush_global_sym(&func_old_type, TOK___floatundisf);
2462 #if LDOUBLE_SIZE != 8
2463 else if (t == VT_LDOUBLE)
2464 vpush_global_sym(&func_old_type, TOK___floatundixf);
2465 #endif
2466 else
2467 vpush_global_sym(&func_old_type, TOK___floatundidf);
2468 vrott(2);
2469 gfunc_call(1);
2470 vpushi(0);
2471 vtop->r = reg_fret(t);
2472 } else {
2473 gen_cvt_itof(t);
2475 #endif
2477 #endif
2479 /* generic ftoi for unsigned long long case */
2480 static void gen_cvt_ftoi1(int t)
2482 #ifdef TCC_TARGET_ARM64
2483 gen_cvt_ftoi(t);
2484 #else
2485 int st;
2487 if (t == (VT_LLONG | VT_UNSIGNED)) {
2488 /* not handled natively */
2489 st = vtop->type.t & VT_BTYPE;
2490 if (st == VT_FLOAT)
2491 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2492 #if LDOUBLE_SIZE != 8
2493 else if (st == VT_LDOUBLE)
2494 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2495 #endif
2496 else
2497 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2498 vrott(2);
2499 gfunc_call(1);
2500 vpushi(0);
2501 vtop->r = REG_IRET;
2502 vtop->r2 = REG_LRET;
2503 } else {
2504 gen_cvt_ftoi(t);
2506 #endif
2509 /* force char or short cast */
2510 static void force_charshort_cast(int t)
2512 int bits, dbt;
2514 /* cannot cast static initializers */
2515 if (STATIC_DATA_WANTED)
2516 return;
2518 dbt = t & VT_BTYPE;
2519 /* XXX: add optimization if lvalue : just change type and offset */
2520 if (dbt == VT_BYTE)
2521 bits = 8;
2522 else
2523 bits = 16;
2524 if (t & VT_UNSIGNED) {
2525 vpushi((1 << bits) - 1);
2526 gen_op('&');
2527 } else {
2528 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2529 bits = 64 - bits;
2530 else
2531 bits = 32 - bits;
2532 vpushi(bits);
2533 gen_op(TOK_SHL);
2534 /* result must be signed or the SAR is converted to an SHL
2535 This was not the case when "t" was a signed short
2536 and the last value on the stack was an unsigned int */
2537 vtop->type.t &= ~VT_UNSIGNED;
2538 vpushi(bits);
2539 gen_op(TOK_SAR);
2543 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2544 static void gen_cast_s(int t)
2546 CType type;
2547 type.t = t;
2548 type.ref = NULL;
2549 gen_cast(&type);
2552 static void gen_cast(CType *type)
2554 int sbt, dbt, sf, df, c, p;
2556 /* special delayed cast for char/short */
2557 /* XXX: in some cases (multiple cascaded casts), it may still
2558 be incorrect */
2559 if (vtop->r & VT_MUSTCAST) {
2560 vtop->r &= ~VT_MUSTCAST;
2561 force_charshort_cast(vtop->type.t);
2564 /* bitfields first get cast to ints */
2565 if (vtop->type.t & VT_BITFIELD) {
2566 gv(RC_INT);
2569 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2570 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2572 if (sbt != dbt) {
2573 sf = is_float(sbt);
2574 df = is_float(dbt);
2575 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2576 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2577 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2578 c &= dbt != VT_LDOUBLE;
2579 #endif
2580 if (c) {
2581 /* constant case: we can do it now */
2582 /* XXX: in ISOC, cannot do it if error in convert */
2583 if (sbt == VT_FLOAT)
2584 vtop->c.ld = vtop->c.f;
2585 else if (sbt == VT_DOUBLE)
2586 vtop->c.ld = vtop->c.d;
2588 if (df) {
2589 if ((sbt & VT_BTYPE) == VT_LLONG) {
2590 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2591 vtop->c.ld = vtop->c.i;
2592 else
2593 vtop->c.ld = -(long double)-vtop->c.i;
2594 } else if(!sf) {
2595 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2596 vtop->c.ld = (uint32_t)vtop->c.i;
2597 else
2598 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2601 if (dbt == VT_FLOAT)
2602 vtop->c.f = (float)vtop->c.ld;
2603 else if (dbt == VT_DOUBLE)
2604 vtop->c.d = (double)vtop->c.ld;
2605 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2606 vtop->c.i = vtop->c.ld;
2607 } else if (sf && dbt == VT_BOOL) {
2608 vtop->c.i = (vtop->c.ld != 0);
2609 } else {
2610 if(sf)
2611 vtop->c.i = vtop->c.ld;
2612 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2614 else if (sbt & VT_UNSIGNED)
2615 vtop->c.i = (uint32_t)vtop->c.i;
2616 #if PTR_SIZE == 8
2617 else if (sbt == VT_PTR)
2619 #endif
2620 else if (sbt != VT_LLONG)
2621 vtop->c.i = ((uint32_t)vtop->c.i |
2622 -(vtop->c.i & 0x80000000));
2624 if (dbt == (VT_LLONG|VT_UNSIGNED))
2626 else if (dbt == VT_BOOL)
2627 vtop->c.i = (vtop->c.i != 0);
2628 #if PTR_SIZE == 8
2629 else if (dbt == VT_PTR)
2631 #endif
2632 else if (dbt != VT_LLONG) {
2633 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2634 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2635 0xffffffff);
2636 vtop->c.i &= m;
2637 if (!(dbt & VT_UNSIGNED))
2638 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2641 } else if (p && dbt == VT_BOOL) {
2642 vtop->r = VT_CONST;
2643 vtop->c.i = 1;
2644 } else {
2645 /* non constant case: generate code */
2646 if (sf && df) {
2647 /* convert from fp to fp */
2648 gen_cvt_ftof(dbt);
2649 } else if (df) {
2650 /* convert int to fp */
2651 gen_cvt_itof1(dbt);
2652 } else if (sf) {
2653 /* convert fp to int */
2654 if (dbt == VT_BOOL) {
2655 vpushi(0);
2656 gen_op(TOK_NE);
2657 } else {
2658 /* we handle char/short/etc... with generic code */
2659 if (dbt != (VT_INT | VT_UNSIGNED) &&
2660 dbt != (VT_LLONG | VT_UNSIGNED) &&
2661 dbt != VT_LLONG)
2662 dbt = VT_INT;
2663 gen_cvt_ftoi1(dbt);
2664 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2665 /* additional cast for char/short... */
2666 vtop->type.t = dbt;
2667 gen_cast(type);
2670 #if PTR_SIZE == 4
2671 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2672 if ((sbt & VT_BTYPE) != VT_LLONG) {
2673 /* scalar to long long */
2674 /* machine independent conversion */
2675 gv(RC_INT);
2676 /* generate high word */
2677 if (sbt == (VT_INT | VT_UNSIGNED)) {
2678 vpushi(0);
2679 gv(RC_INT);
2680 } else {
2681 if (sbt == VT_PTR) {
2682 /* cast from pointer to int before we apply
2683 shift operation, which pointers don't support*/
2684 gen_cast_s(VT_INT);
2686 gv_dup();
2687 vpushi(31);
2688 gen_op(TOK_SAR);
2690 /* patch second register */
2691 vtop[-1].r2 = vtop->r;
2692 vpop();
2694 #else
2695 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2696 (dbt & VT_BTYPE) == VT_PTR ||
2697 (dbt & VT_BTYPE) == VT_FUNC) {
2698 if ((sbt & VT_BTYPE) != VT_LLONG &&
2699 (sbt & VT_BTYPE) != VT_PTR &&
2700 (sbt & VT_BTYPE) != VT_FUNC) {
2701 /* need to convert from 32bit to 64bit */
2702 gv(RC_INT);
2703 if (sbt != (VT_INT | VT_UNSIGNED)) {
2704 #if defined(TCC_TARGET_ARM64)
2705 gen_cvt_sxtw();
2706 #elif defined(TCC_TARGET_X86_64)
2707 int r = gv(RC_INT);
2708 /* x86_64 specific: movslq */
2709 o(0x6348);
2710 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2711 #else
2712 #error
2713 #endif
2716 #endif
2717 } else if (dbt == VT_BOOL) {
2718 /* scalar to bool */
2719 vpushi(0);
2720 gen_op(TOK_NE);
2721 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2722 (dbt & VT_BTYPE) == VT_SHORT) {
2723 if (sbt == VT_PTR) {
2724 vtop->type.t = VT_INT;
2725 tcc_warning("nonportable conversion from pointer to char/short");
2727 force_charshort_cast(dbt);
2728 } else if ((dbt & VT_BTYPE) == VT_INT) {
2729 /* scalar to int */
2730 if ((sbt & VT_BTYPE) == VT_LLONG) {
2731 #if PTR_SIZE == 4
2732 /* from long long: just take low order word */
2733 lexpand();
2734 vpop();
2735 #else
2736 vpushi(0xffffffff);
2737 vtop->type.t |= VT_UNSIGNED;
2738 gen_op('&');
2739 #endif
2741 /* if lvalue and single word type, nothing to do because
2742 the lvalue already contains the real type size (see
2743 VT_LVAL_xxx constants) */
2746 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2747 /* if we are casting between pointer types,
2748 we must update the VT_LVAL_xxx size */
2749 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2750 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2752 vtop->type = *type;
2753 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2756 /* return type size as known at compile time. Put alignment at 'a' */
2757 ST_FUNC int type_size(CType *type, int *a)
2759 Sym *s;
2760 int bt;
2762 bt = type->t & VT_BTYPE;
2763 if (bt == VT_STRUCT) {
2764 /* struct/union */
2765 s = type->ref;
2766 *a = s->r;
2767 return s->c;
2768 } else if (bt == VT_PTR) {
2769 if (type->t & VT_ARRAY) {
2770 int ts;
2772 s = type->ref;
2773 ts = type_size(&s->type, a);
2775 if (ts < 0 && s->c < 0)
2776 ts = -ts;
2778 return ts * s->c;
2779 } else {
2780 *a = PTR_SIZE;
2781 return PTR_SIZE;
2783 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2784 return -1; /* incomplete enum */
2785 } else if (bt == VT_LDOUBLE) {
2786 *a = LDOUBLE_ALIGN;
2787 return LDOUBLE_SIZE;
2788 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2789 #ifdef TCC_TARGET_I386
2790 #ifdef TCC_TARGET_PE
2791 *a = 8;
2792 #else
2793 *a = 4;
2794 #endif
2795 #elif defined(TCC_TARGET_ARM)
2796 #ifdef TCC_ARM_EABI
2797 *a = 8;
2798 #else
2799 *a = 4;
2800 #endif
2801 #else
2802 *a = 8;
2803 #endif
2804 return 8;
2805 } else if (bt == VT_INT || bt == VT_FLOAT) {
2806 *a = 4;
2807 return 4;
2808 } else if (bt == VT_SHORT) {
2809 *a = 2;
2810 return 2;
2811 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2812 *a = 8;
2813 return 16;
2814 } else {
2815 /* char, void, function, _Bool */
2816 *a = 1;
2817 return 1;
2821 /* push type size as known at runtime time on top of value stack. Put
2822 alignment at 'a' */
2823 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2825 if (type->t & VT_VLA) {
2826 type_size(&type->ref->type, a);
2827 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2828 } else {
2829 vpushi(type_size(type, a));
2833 static void vla_sp_restore(void) {
2834 if (vlas_in_scope) {
2835 gen_vla_sp_restore(vla_sp_loc);
2839 static void vla_sp_restore_root(void) {
2840 if (vlas_in_scope) {
2841 gen_vla_sp_restore(vla_sp_root_loc);
2845 /* return the pointed type of t */
2846 static inline CType *pointed_type(CType *type)
2848 return &type->ref->type;
2851 /* modify type so that its it is a pointer to type. */
2852 ST_FUNC void mk_pointer(CType *type)
2854 Sym *s;
2855 s = sym_push(SYM_FIELD, type, 0, -1);
2856 type->t = VT_PTR | (type->t & VT_STORAGE);
2857 type->ref = s;
2860 /* compare function types. OLD functions match any new functions */
2861 static int is_compatible_func(CType *type1, CType *type2)
2863 Sym *s1, *s2;
2865 s1 = type1->ref;
2866 s2 = type2->ref;
2867 if (!is_compatible_types(&s1->type, &s2->type))
2868 return 0;
2869 /* check func_call */
2870 if (s1->f.func_call != s2->f.func_call)
2871 return 0;
2872 /* XXX: not complete */
2873 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2874 return 1;
2875 if (s1->f.func_type != s2->f.func_type)
2876 return 0;
2877 while (s1 != NULL) {
2878 if (s2 == NULL)
2879 return 0;
2880 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2881 return 0;
2882 s1 = s1->next;
2883 s2 = s2->next;
2885 if (s2)
2886 return 0;
2887 return 1;
2890 /* return true if type1 and type2 are the same. If unqualified is
2891 true, qualifiers on the types are ignored.
2893 static int compare_types(CType *type1, CType *type2, int unqualified)
2895 int bt1, t1, t2;
2897 t1 = type1->t & VT_TYPE;
2898 t2 = type2->t & VT_TYPE;
2899 if (unqualified) {
2900 /* strip qualifiers before comparing */
2901 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2902 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2905 /* Default Vs explicit signedness only matters for char */
2906 if ((t1 & VT_BTYPE) != VT_BYTE) {
2907 t1 &= ~VT_DEFSIGN;
2908 t2 &= ~VT_DEFSIGN;
2910 /* XXX: bitfields ? */
2911 if (t1 != t2)
2912 return 0;
2913 /* test more complicated cases */
2914 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2915 if (bt1 == VT_PTR) {
2916 type1 = pointed_type(type1);
2917 type2 = pointed_type(type2);
2918 return is_compatible_types(type1, type2);
2919 } else if (bt1 & VT_ARRAY) {
2920 return type1->ref->c < 0 || type2->ref->c < 0
2921 || type1->ref->c == type2->ref->c;
2922 } else if (bt1 == VT_STRUCT) {
2923 return (type1->ref == type2->ref);
2924 } else if (bt1 == VT_FUNC) {
2925 return is_compatible_func(type1, type2);
2926 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2927 return type1->ref == type2->ref;
2928 } else {
2929 return 1;
2933 /* return true if type1 and type2 are exactly the same (including
2934 qualifiers).
2936 static int is_compatible_types(CType *type1, CType *type2)
2938 return compare_types(type1,type2,0);
2941 /* return true if type1 and type2 are the same (ignoring qualifiers).
2943 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2945 return compare_types(type1,type2,1);
2948 /* print a type. If 'varstr' is not NULL, then the variable is also
2949 printed in the type */
2950 /* XXX: union */
2951 /* XXX: add array and function pointers */
2952 static void type_to_str(char *buf, int buf_size,
2953 CType *type, const char *varstr)
2955 int bt, v, t;
2956 Sym *s, *sa;
2957 char buf1[256];
2958 const char *tstr;
2960 t = type->t;
2961 bt = t & VT_BTYPE;
2962 buf[0] = '\0';
2964 if (t & VT_EXTERN)
2965 pstrcat(buf, buf_size, "extern ");
2966 if (t & VT_STATIC)
2967 pstrcat(buf, buf_size, "static ");
2968 if (t & VT_TYPEDEF)
2969 pstrcat(buf, buf_size, "typedef ");
2970 if (t & VT_INLINE)
2971 pstrcat(buf, buf_size, "inline ");
2972 if (t & VT_VOLATILE)
2973 pstrcat(buf, buf_size, "volatile ");
2974 if (t & VT_CONSTANT)
2975 pstrcat(buf, buf_size, "const ");
2977 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2978 || ((t & VT_UNSIGNED)
2979 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2980 && !IS_ENUM(t)
2982 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2984 buf_size -= strlen(buf);
2985 buf += strlen(buf);
2987 switch(bt) {
2988 case VT_VOID:
2989 tstr = "void";
2990 goto add_tstr;
2991 case VT_BOOL:
2992 tstr = "_Bool";
2993 goto add_tstr;
2994 case VT_BYTE:
2995 tstr = "char";
2996 goto add_tstr;
2997 case VT_SHORT:
2998 tstr = "short";
2999 goto add_tstr;
3000 case VT_INT:
3001 tstr = "int";
3002 goto maybe_long;
3003 case VT_LLONG:
3004 tstr = "long long";
3005 maybe_long:
3006 if (t & VT_LONG)
3007 tstr = "long";
3008 if (!IS_ENUM(t))
3009 goto add_tstr;
3010 tstr = "enum ";
3011 goto tstruct;
3012 case VT_FLOAT:
3013 tstr = "float";
3014 goto add_tstr;
3015 case VT_DOUBLE:
3016 tstr = "double";
3017 goto add_tstr;
3018 case VT_LDOUBLE:
3019 tstr = "long double";
3020 add_tstr:
3021 pstrcat(buf, buf_size, tstr);
3022 break;
3023 case VT_STRUCT:
3024 tstr = "struct ";
3025 if (IS_UNION(t))
3026 tstr = "union ";
3027 tstruct:
3028 pstrcat(buf, buf_size, tstr);
3029 v = type->ref->v & ~SYM_STRUCT;
3030 if (v >= SYM_FIRST_ANOM)
3031 pstrcat(buf, buf_size, "<anonymous>");
3032 else
3033 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3034 break;
3035 case VT_FUNC:
3036 s = type->ref;
3037 buf1[0]=0;
3038 if (varstr && '*' == *varstr) {
3039 pstrcat(buf1, sizeof(buf1), "(");
3040 pstrcat(buf1, sizeof(buf1), varstr);
3041 pstrcat(buf1, sizeof(buf1), ")");
3043 pstrcat(buf1, buf_size, "(");
3044 sa = s->next;
3045 while (sa != NULL) {
3046 char buf2[256];
3047 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3048 pstrcat(buf1, sizeof(buf1), buf2);
3049 sa = sa->next;
3050 if (sa)
3051 pstrcat(buf1, sizeof(buf1), ", ");
3053 if (s->f.func_type == FUNC_ELLIPSIS)
3054 pstrcat(buf1, sizeof(buf1), ", ...");
3055 pstrcat(buf1, sizeof(buf1), ")");
3056 type_to_str(buf, buf_size, &s->type, buf1);
3057 goto no_var;
3058 case VT_PTR:
3059 s = type->ref;
3060 if (t & VT_ARRAY) {
3061 if (varstr && '*' == *varstr)
3062 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3063 else
3064 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3065 type_to_str(buf, buf_size, &s->type, buf1);
3066 goto no_var;
3068 pstrcpy(buf1, sizeof(buf1), "*");
3069 if (t & VT_CONSTANT)
3070 pstrcat(buf1, buf_size, "const ");
3071 if (t & VT_VOLATILE)
3072 pstrcat(buf1, buf_size, "volatile ");
3073 if (varstr)
3074 pstrcat(buf1, sizeof(buf1), varstr);
3075 type_to_str(buf, buf_size, &s->type, buf1);
3076 goto no_var;
3078 if (varstr) {
3079 pstrcat(buf, buf_size, " ");
3080 pstrcat(buf, buf_size, varstr);
3082 no_var: ;
3085 /* verify type compatibility to store vtop in 'dt' type, and generate
3086 casts if needed. */
3087 static void gen_assign_cast(CType *dt)
3089 CType *st, *type1, *type2;
3090 char buf1[256], buf2[256];
3091 int dbt, sbt, qualwarn, lvl;
3093 st = &vtop->type; /* source type */
3094 dbt = dt->t & VT_BTYPE;
3095 sbt = st->t & VT_BTYPE;
3096 if (sbt == VT_VOID || dbt == VT_VOID) {
3097 if (sbt == VT_VOID && dbt == VT_VOID)
3098 ; /* It is Ok if both are void */
3099 else
3100 tcc_error("cannot cast from/to void");
3102 if (dt->t & VT_CONSTANT)
3103 tcc_warning("assignment of read-only location");
3104 switch(dbt) {
3105 case VT_PTR:
3106 /* special cases for pointers */
3107 /* '0' can also be a pointer */
3108 if (is_null_pointer(vtop))
3109 break;
3110 /* accept implicit pointer to integer cast with warning */
3111 if (is_integer_btype(sbt)) {
3112 tcc_warning("assignment makes pointer from integer without a cast");
3113 break;
3115 type1 = pointed_type(dt);
3116 if (sbt == VT_PTR)
3117 type2 = pointed_type(st);
3118 else if (sbt == VT_FUNC)
3119 type2 = st; /* a function is implicitly a function pointer */
3120 else
3121 goto error;
3122 if (is_compatible_types(type1, type2))
3123 break;
3124 for (qualwarn = lvl = 0;; ++lvl) {
3125 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3126 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3127 qualwarn = 1;
3128 dbt = type1->t & (VT_BTYPE|VT_LONG);
3129 sbt = type2->t & (VT_BTYPE|VT_LONG);
3130 if (dbt != VT_PTR || sbt != VT_PTR)
3131 break;
3132 type1 = pointed_type(type1);
3133 type2 = pointed_type(type2);
3135 if (!is_compatible_unqualified_types(type1, type2)) {
3136 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3137 /* void * can match anything */
3138 } else if (dbt == sbt
3139 && is_integer_btype(sbt & VT_BTYPE)
3140 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3141 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3142 /* Like GCC don't warn by default for merely changes
3143 in pointer target signedness. Do warn for different
3144 base types, though, in particular for unsigned enums
3145 and signed int targets. */
3146 } else {
3147 tcc_warning("assignment from incompatible pointer type");
3148 break;
3151 if (qualwarn)
3152 tcc_warning("assignment discards qualifiers from pointer target type");
3153 break;
3154 case VT_BYTE:
3155 case VT_SHORT:
3156 case VT_INT:
3157 case VT_LLONG:
3158 if (sbt == VT_PTR || sbt == VT_FUNC) {
3159 tcc_warning("assignment makes integer from pointer without a cast");
3160 } else if (sbt == VT_STRUCT) {
3161 goto case_VT_STRUCT;
3163 /* XXX: more tests */
3164 break;
3165 case VT_STRUCT:
3166 case_VT_STRUCT:
3167 if (!is_compatible_unqualified_types(dt, st)) {
3168 error:
3169 type_to_str(buf1, sizeof(buf1), st, NULL);
3170 type_to_str(buf2, sizeof(buf2), dt, NULL);
3171 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3173 break;
3175 gen_cast(dt);
3178 /* store vtop in lvalue pushed on stack */
3179 ST_FUNC void vstore(void)
3181 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3183 ft = vtop[-1].type.t;
3184 sbt = vtop->type.t & VT_BTYPE;
3185 dbt = ft & VT_BTYPE;
3186 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3187 (sbt == VT_INT && dbt == VT_SHORT))
3188 && !(vtop->type.t & VT_BITFIELD)) {
3189 /* optimize char/short casts */
3190 delayed_cast = VT_MUSTCAST;
3191 vtop->type.t = ft & VT_TYPE;
3192 /* XXX: factorize */
3193 if (ft & VT_CONSTANT)
3194 tcc_warning("assignment of read-only location");
3195 } else {
3196 delayed_cast = 0;
3197 if (!(ft & VT_BITFIELD))
3198 gen_assign_cast(&vtop[-1].type);
3201 if (sbt == VT_STRUCT) {
3202 /* if structure, only generate pointer */
3203 /* structure assignment : generate memcpy */
3204 /* XXX: optimize if small size */
3205 size = type_size(&vtop->type, &align);
3207 /* destination */
3208 vswap();
3209 vtop->type.t = VT_PTR;
3210 gaddrof();
3212 /* address of memcpy() */
3213 #ifdef TCC_ARM_EABI
3214 if(!(align & 7))
3215 vpush_global_sym(&func_old_type, TOK_memcpy8);
3216 else if(!(align & 3))
3217 vpush_global_sym(&func_old_type, TOK_memcpy4);
3218 else
3219 #endif
3220 /* Use memmove, rather than memcpy, as dest and src may be same: */
3221 vpush_global_sym(&func_old_type, TOK_memmove);
3223 vswap();
3224 /* source */
3225 vpushv(vtop - 2);
3226 vtop->type.t = VT_PTR;
3227 gaddrof();
3228 /* type size */
3229 vpushi(size);
3230 gfunc_call(3);
3232 /* leave source on stack */
3233 } else if (ft & VT_BITFIELD) {
3234 /* bitfield store handling */
3236 /* save lvalue as expression result (example: s.b = s.a = n;) */
3237 vdup(), vtop[-1] = vtop[-2];
3239 bit_pos = BIT_POS(ft);
3240 bit_size = BIT_SIZE(ft);
3241 /* remove bit field info to avoid loops */
3242 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3244 if ((ft & VT_BTYPE) == VT_BOOL) {
3245 gen_cast(&vtop[-1].type);
3246 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3249 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3250 if (r == VT_STRUCT) {
3251 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3252 store_packed_bf(bit_pos, bit_size);
3253 } else {
3254 unsigned long long mask = (1ULL << bit_size) - 1;
3255 if ((ft & VT_BTYPE) != VT_BOOL) {
3256 /* mask source */
3257 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3258 vpushll(mask);
3259 else
3260 vpushi((unsigned)mask);
3261 gen_op('&');
3263 /* shift source */
3264 vpushi(bit_pos);
3265 gen_op(TOK_SHL);
3266 vswap();
3267 /* duplicate destination */
3268 vdup();
3269 vrott(3);
3270 /* load destination, mask and or with source */
3271 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3272 vpushll(~(mask << bit_pos));
3273 else
3274 vpushi(~((unsigned)mask << bit_pos));
3275 gen_op('&');
3276 gen_op('|');
3277 /* store result */
3278 vstore();
3279 /* ... and discard */
3280 vpop();
3282 } else if (dbt == VT_VOID) {
3283 --vtop;
3284 } else {
3285 #ifdef CONFIG_TCC_BCHECK
3286 /* bound check case */
3287 if (vtop[-1].r & VT_MUSTBOUND) {
3288 vswap();
3289 gbound();
3290 vswap();
3292 #endif
3293 rc = RC_INT;
3294 if (is_float(ft)) {
3295 rc = RC_FLOAT;
3296 #ifdef TCC_TARGET_X86_64
3297 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3298 rc = RC_ST0;
3299 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3300 rc = RC_FRET;
3302 #endif
3304 r = gv(rc); /* generate value */
3305 /* if lvalue was saved on stack, must read it */
3306 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3307 SValue sv;
3308 t = get_reg(RC_INT);
3309 #if PTR_SIZE == 8
3310 sv.type.t = VT_PTR;
3311 #else
3312 sv.type.t = VT_INT;
3313 #endif
3314 sv.r = VT_LOCAL | VT_LVAL;
3315 sv.c.i = vtop[-1].c.i;
3316 load(t, &sv);
3317 vtop[-1].r = t | VT_LVAL;
3319 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3320 #if PTR_SIZE == 8
3321 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3322 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3323 #else
3324 if ((ft & VT_BTYPE) == VT_LLONG) {
3325 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3326 #endif
3327 vtop[-1].type.t = load_type;
3328 store(r, vtop - 1);
3329 vswap();
3330 /* convert to int to increment easily */
3331 vtop->type.t = addr_type;
3332 gaddrof();
3333 vpushi(load_size);
3334 gen_op('+');
3335 vtop->r |= VT_LVAL;
3336 vswap();
3337 vtop[-1].type.t = load_type;
3338 /* XXX: it works because r2 is spilled last ! */
3339 store(vtop->r2, vtop - 1);
3340 } else {
3341 store(r, vtop - 1);
3344 vswap();
3345 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3346 vtop->r |= delayed_cast;
3350 /* post defines POST/PRE add. c is the token ++ or -- */
3351 ST_FUNC void inc(int post, int c)
3353 test_lvalue();
3354 vdup(); /* save lvalue */
3355 if (post) {
3356 gv_dup(); /* duplicate value */
3357 vrotb(3);
3358 vrotb(3);
3360 /* add constant */
3361 vpushi(c - TOK_MID);
3362 gen_op('+');
3363 vstore(); /* store value */
3364 if (post)
3365 vpop(); /* if post op, return saved value */
3368 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3370 /* read the string */
3371 if (tok != TOK_STR)
3372 expect(msg);
3373 cstr_new(astr);
3374 while (tok == TOK_STR) {
3375 /* XXX: add \0 handling too ? */
3376 cstr_cat(astr, tokc.str.data, -1);
3377 next();
3379 cstr_ccat(astr, '\0');
3382 /* If I is >= 1 and a power of two, returns log2(i)+1.
3383 If I is 0 returns 0. */
3384 static int exact_log2p1(int i)
3386 int ret;
3387 if (!i)
3388 return 0;
3389 for (ret = 1; i >= 1 << 8; ret += 8)
3390 i >>= 8;
3391 if (i >= 1 << 4)
3392 ret += 4, i >>= 4;
3393 if (i >= 1 << 2)
3394 ret += 2, i >>= 2;
3395 if (i >= 1 << 1)
3396 ret++;
3397 return ret;
3400 /* Parse __attribute__((...)) GNUC extension. */
3401 static void parse_attribute(AttributeDef *ad)
3403 int t, n;
3404 CString astr;
3406 redo:
3407 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3408 return;
3409 next();
3410 skip('(');
3411 skip('(');
3412 while (tok != ')') {
3413 if (tok < TOK_IDENT)
3414 expect("attribute name");
3415 t = tok;
3416 next();
3417 switch(t) {
3418 case TOK_CLEANUP1:
3419 case TOK_CLEANUP2:
3421 Sym *s;
3423 skip('(');
3424 s = sym_find(tok);
3425 if (!s) {
3426 tcc_warning("implicit declaration of function '%s'",
3427 get_tok_str(tok, &tokc));
3428 s = external_global_sym(tok, &func_old_type, 0);
3430 ad->cleanup_func = s;
3431 next();
3432 skip(')');
3433 break;
3435 case TOK_SECTION1:
3436 case TOK_SECTION2:
3437 skip('(');
3438 parse_mult_str(&astr, "section name");
3439 ad->section = find_section(tcc_state, (char *)astr.data);
3440 skip(')');
3441 cstr_free(&astr);
3442 break;
3443 case TOK_ALIAS1:
3444 case TOK_ALIAS2:
3445 skip('(');
3446 parse_mult_str(&astr, "alias(\"target\")");
3447 ad->alias_target = /* save string as token, for later */
3448 tok_alloc((char*)astr.data, astr.size-1)->tok;
3449 skip(')');
3450 cstr_free(&astr);
3451 break;
3452 case TOK_VISIBILITY1:
3453 case TOK_VISIBILITY2:
3454 skip('(');
3455 parse_mult_str(&astr,
3456 "visibility(\"default|hidden|internal|protected\")");
3457 if (!strcmp (astr.data, "default"))
3458 ad->a.visibility = STV_DEFAULT;
3459 else if (!strcmp (astr.data, "hidden"))
3460 ad->a.visibility = STV_HIDDEN;
3461 else if (!strcmp (astr.data, "internal"))
3462 ad->a.visibility = STV_INTERNAL;
3463 else if (!strcmp (astr.data, "protected"))
3464 ad->a.visibility = STV_PROTECTED;
3465 else
3466 expect("visibility(\"default|hidden|internal|protected\")");
3467 skip(')');
3468 cstr_free(&astr);
3469 break;
3470 case TOK_ALIGNED1:
3471 case TOK_ALIGNED2:
3472 if (tok == '(') {
3473 next();
3474 n = expr_const();
3475 if (n <= 0 || (n & (n - 1)) != 0)
3476 tcc_error("alignment must be a positive power of two");
3477 skip(')');
3478 } else {
3479 n = MAX_ALIGN;
3481 ad->a.aligned = exact_log2p1(n);
3482 if (n != 1 << (ad->a.aligned - 1))
3483 tcc_error("alignment of %d is larger than implemented", n);
3484 break;
3485 case TOK_PACKED1:
3486 case TOK_PACKED2:
3487 ad->a.packed = 1;
3488 break;
3489 case TOK_WEAK1:
3490 case TOK_WEAK2:
3491 ad->a.weak = 1;
3492 break;
3493 case TOK_UNUSED1:
3494 case TOK_UNUSED2:
3495 /* currently, no need to handle it because tcc does not
3496 track unused objects */
3497 break;
3498 case TOK_NORETURN1:
3499 case TOK_NORETURN2:
3500 /* currently, no need to handle it because tcc does not
3501 track unused objects */
3502 break;
3503 case TOK_CDECL1:
3504 case TOK_CDECL2:
3505 case TOK_CDECL3:
3506 ad->f.func_call = FUNC_CDECL;
3507 break;
3508 case TOK_STDCALL1:
3509 case TOK_STDCALL2:
3510 case TOK_STDCALL3:
3511 ad->f.func_call = FUNC_STDCALL;
3512 break;
3513 #ifdef TCC_TARGET_I386
3514 case TOK_REGPARM1:
3515 case TOK_REGPARM2:
3516 skip('(');
3517 n = expr_const();
3518 if (n > 3)
3519 n = 3;
3520 else if (n < 0)
3521 n = 0;
3522 if (n > 0)
3523 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3524 skip(')');
3525 break;
3526 case TOK_FASTCALL1:
3527 case TOK_FASTCALL2:
3528 case TOK_FASTCALL3:
3529 ad->f.func_call = FUNC_FASTCALLW;
3530 break;
3531 #endif
3532 case TOK_MODE:
3533 skip('(');
3534 switch(tok) {
3535 case TOK_MODE_DI:
3536 ad->attr_mode = VT_LLONG + 1;
3537 break;
3538 case TOK_MODE_QI:
3539 ad->attr_mode = VT_BYTE + 1;
3540 break;
3541 case TOK_MODE_HI:
3542 ad->attr_mode = VT_SHORT + 1;
3543 break;
3544 case TOK_MODE_SI:
3545 case TOK_MODE_word:
3546 ad->attr_mode = VT_INT + 1;
3547 break;
3548 default:
3549 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3550 break;
3552 next();
3553 skip(')');
3554 break;
3555 case TOK_DLLEXPORT:
3556 ad->a.dllexport = 1;
3557 break;
3558 case TOK_NODECORATE:
3559 ad->a.nodecorate = 1;
3560 break;
3561 case TOK_DLLIMPORT:
3562 ad->a.dllimport = 1;
3563 break;
3564 default:
3565 if (tcc_state->warn_unsupported)
3566 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3567 /* skip parameters */
3568 if (tok == '(') {
3569 int parenthesis = 0;
3570 do {
3571 if (tok == '(')
3572 parenthesis++;
3573 else if (tok == ')')
3574 parenthesis--;
3575 next();
3576 } while (parenthesis && tok != -1);
3578 break;
3580 if (tok != ',')
3581 break;
3582 next();
3584 skip(')');
3585 skip(')');
3586 goto redo;
3589 static Sym * find_field (CType *type, int v)
3591 Sym *s = type->ref;
3592 v |= SYM_FIELD;
3593 while ((s = s->next) != NULL) {
3594 if ((s->v & SYM_FIELD) &&
3595 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3596 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3597 Sym *ret = find_field (&s->type, v);
3598 if (ret)
3599 return ret;
3601 if (s->v == v)
3602 break;
3604 return s;
3607 static void struct_add_offset (Sym *s, int offset)
3609 while ((s = s->next) != NULL) {
3610 if ((s->v & SYM_FIELD) &&
3611 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3612 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3613 struct_add_offset(s->type.ref, offset);
3614 } else
3615 s->c += offset;
3619 static void struct_layout(CType *type, AttributeDef *ad)
3621 int size, align, maxalign, offset, c, bit_pos, bit_size;
3622 int packed, a, bt, prevbt, prev_bit_size;
3623 int pcc = !tcc_state->ms_bitfields;
3624 int pragma_pack = *tcc_state->pack_stack_ptr;
3625 Sym *f;
3627 maxalign = 1;
3628 offset = 0;
3629 c = 0;
3630 bit_pos = 0;
3631 prevbt = VT_STRUCT; /* make it never match */
3632 prev_bit_size = 0;
3634 //#define BF_DEBUG
3636 for (f = type->ref->next; f; f = f->next) {
3637 if (f->type.t & VT_BITFIELD)
3638 bit_size = BIT_SIZE(f->type.t);
3639 else
3640 bit_size = -1;
3641 size = type_size(&f->type, &align);
3642 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3643 packed = 0;
3645 if (pcc && bit_size == 0) {
3646 /* in pcc mode, packing does not affect zero-width bitfields */
3648 } else {
3649 /* in pcc mode, attribute packed overrides if set. */
3650 if (pcc && (f->a.packed || ad->a.packed))
3651 align = packed = 1;
3653 /* pragma pack overrides align if lesser and packs bitfields always */
3654 if (pragma_pack) {
3655 packed = 1;
3656 if (pragma_pack < align)
3657 align = pragma_pack;
3658 /* in pcc mode pragma pack also overrides individual align */
3659 if (pcc && pragma_pack < a)
3660 a = 0;
3663 /* some individual align was specified */
3664 if (a)
3665 align = a;
3667 if (type->ref->type.t == VT_UNION) {
3668 if (pcc && bit_size >= 0)
3669 size = (bit_size + 7) >> 3;
3670 offset = 0;
3671 if (size > c)
3672 c = size;
3674 } else if (bit_size < 0) {
3675 if (pcc)
3676 c += (bit_pos + 7) >> 3;
3677 c = (c + align - 1) & -align;
3678 offset = c;
3679 if (size > 0)
3680 c += size;
3681 bit_pos = 0;
3682 prevbt = VT_STRUCT;
3683 prev_bit_size = 0;
3685 } else {
3686 /* A bit-field. Layout is more complicated. There are two
3687 options: PCC (GCC) compatible and MS compatible */
3688 if (pcc) {
3689 /* In PCC layout a bit-field is placed adjacent to the
3690 preceding bit-fields, except if:
3691 - it has zero-width
3692 - an individual alignment was given
3693 - it would overflow its base type container and
3694 there is no packing */
3695 if (bit_size == 0) {
3696 new_field:
3697 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3698 bit_pos = 0;
3699 } else if (f->a.aligned) {
3700 goto new_field;
3701 } else if (!packed) {
3702 int a8 = align * 8;
3703 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3704 if (ofs > size / align)
3705 goto new_field;
3708 /* in pcc mode, long long bitfields have type int if they fit */
3709 if (size == 8 && bit_size <= 32)
3710 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3712 while (bit_pos >= align * 8)
3713 c += align, bit_pos -= align * 8;
3714 offset = c;
3716 /* In PCC layout named bit-fields influence the alignment
3717 of the containing struct using the base types alignment,
3718 except for packed fields (which here have correct align). */
3719 if (f->v & SYM_FIRST_ANOM
3720 // && bit_size // ??? gcc on ARM/rpi does that
3722 align = 1;
3724 } else {
3725 bt = f->type.t & VT_BTYPE;
3726 if ((bit_pos + bit_size > size * 8)
3727 || (bit_size > 0) == (bt != prevbt)
3729 c = (c + align - 1) & -align;
3730 offset = c;
3731 bit_pos = 0;
3732 /* In MS bitfield mode a bit-field run always uses
3733 at least as many bits as the underlying type.
3734 To start a new run it's also required that this
3735 or the last bit-field had non-zero width. */
3736 if (bit_size || prev_bit_size)
3737 c += size;
3739 /* In MS layout the records alignment is normally
3740 influenced by the field, except for a zero-width
3741 field at the start of a run (but by further zero-width
3742 fields it is again). */
3743 if (bit_size == 0 && prevbt != bt)
3744 align = 1;
3745 prevbt = bt;
3746 prev_bit_size = bit_size;
3749 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3750 | (bit_pos << VT_STRUCT_SHIFT);
3751 bit_pos += bit_size;
3753 if (align > maxalign)
3754 maxalign = align;
3756 #ifdef BF_DEBUG
3757 printf("set field %s offset %-2d size %-2d align %-2d",
3758 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3759 if (f->type.t & VT_BITFIELD) {
3760 printf(" pos %-2d bits %-2d",
3761 BIT_POS(f->type.t),
3762 BIT_SIZE(f->type.t)
3765 printf("\n");
3766 #endif
3768 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3769 Sym *ass;
3770 /* An anonymous struct/union. Adjust member offsets
3771 to reflect the real offset of our containing struct.
3772 Also set the offset of this anon member inside
3773 the outer struct to be zero. Via this it
3774 works when accessing the field offset directly
3775 (from base object), as well as when recursing
3776 members in initializer handling. */
3777 int v2 = f->type.ref->v;
3778 if (!(v2 & SYM_FIELD) &&
3779 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3780 Sym **pps;
3781 /* This happens only with MS extensions. The
3782 anon member has a named struct type, so it
3783 potentially is shared with other references.
3784 We need to unshare members so we can modify
3785 them. */
3786 ass = f->type.ref;
3787 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3788 &f->type.ref->type, 0,
3789 f->type.ref->c);
3790 pps = &f->type.ref->next;
3791 while ((ass = ass->next) != NULL) {
3792 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3793 pps = &((*pps)->next);
3795 *pps = NULL;
3797 struct_add_offset(f->type.ref, offset);
3798 f->c = 0;
3799 } else {
3800 f->c = offset;
3803 f->r = 0;
3806 if (pcc)
3807 c += (bit_pos + 7) >> 3;
3809 /* store size and alignment */
3810 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3811 if (a < maxalign)
3812 a = maxalign;
3813 type->ref->r = a;
3814 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3815 /* can happen if individual align for some member was given. In
3816 this case MSVC ignores maxalign when aligning the size */
3817 a = pragma_pack;
3818 if (a < bt)
3819 a = bt;
3821 c = (c + a - 1) & -a;
3822 type->ref->c = c;
3824 #ifdef BF_DEBUG
3825 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3826 #endif
3828 /* check whether we can access bitfields by their type */
3829 for (f = type->ref->next; f; f = f->next) {
3830 int s, px, cx, c0;
3831 CType t;
3833 if (0 == (f->type.t & VT_BITFIELD))
3834 continue;
3835 f->type.ref = f;
3836 f->auxtype = -1;
3837 bit_size = BIT_SIZE(f->type.t);
3838 if (bit_size == 0)
3839 continue;
3840 bit_pos = BIT_POS(f->type.t);
3841 size = type_size(&f->type, &align);
3842 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3843 continue;
3845 /* try to access the field using a different type */
3846 c0 = -1, s = align = 1;
3847 for (;;) {
3848 px = f->c * 8 + bit_pos;
3849 cx = (px >> 3) & -align;
3850 px = px - (cx << 3);
3851 if (c0 == cx)
3852 break;
3853 s = (px + bit_size + 7) >> 3;
3854 if (s > 4) {
3855 t.t = VT_LLONG;
3856 } else if (s > 2) {
3857 t.t = VT_INT;
3858 } else if (s > 1) {
3859 t.t = VT_SHORT;
3860 } else {
3861 t.t = VT_BYTE;
3863 s = type_size(&t, &align);
3864 c0 = cx;
3867 if (px + bit_size <= s * 8 && cx + s <= c) {
3868 /* update offset and bit position */
3869 f->c = cx;
3870 bit_pos = px;
3871 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3872 | (bit_pos << VT_STRUCT_SHIFT);
3873 if (s != size)
3874 f->auxtype = t.t;
3875 #ifdef BF_DEBUG
3876 printf("FIX field %s offset %-2d size %-2d align %-2d "
3877 "pos %-2d bits %-2d\n",
3878 get_tok_str(f->v & ~SYM_FIELD, NULL),
3879 cx, s, align, px, bit_size);
3880 #endif
3881 } else {
3882 /* fall back to load/store single-byte wise */
3883 f->auxtype = VT_STRUCT;
3884 #ifdef BF_DEBUG
3885 printf("FIX field %s : load byte-wise\n",
3886 get_tok_str(f->v & ~SYM_FIELD, NULL));
3887 #endif
3892 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3893 static void struct_decl(CType *type, int u)
3895 int v, c, size, align, flexible;
3896 int bit_size, bsize, bt;
3897 Sym *s, *ss, **ps;
3898 AttributeDef ad, ad1;
3899 CType type1, btype;
3901 memset(&ad, 0, sizeof ad);
3902 next();
3903 parse_attribute(&ad);
3904 if (tok != '{') {
3905 v = tok;
3906 next();
3907 /* struct already defined ? return it */
3908 if (v < TOK_IDENT)
3909 expect("struct/union/enum name");
3910 s = struct_find(v);
3911 if (s && (s->sym_scope == local_scope || tok != '{')) {
3912 if (u == s->type.t)
3913 goto do_decl;
3914 if (u == VT_ENUM && IS_ENUM(s->type.t))
3915 goto do_decl;
3916 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3918 } else {
3919 v = anon_sym++;
3921 /* Record the original enum/struct/union token. */
3922 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3923 type1.ref = NULL;
3924 /* we put an undefined size for struct/union */
3925 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3926 s->r = 0; /* default alignment is zero as gcc */
3927 do_decl:
3928 type->t = s->type.t;
3929 type->ref = s;
3931 if (tok == '{') {
3932 next();
3933 if (s->c != -1)
3934 tcc_error("struct/union/enum already defined");
3935 /* cannot be empty */
3936 /* non empty enums are not allowed */
3937 ps = &s->next;
3938 if (u == VT_ENUM) {
3939 long long ll = 0, pl = 0, nl = 0;
3940 CType t;
3941 t.ref = s;
3942 /* enum symbols have static storage */
3943 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3944 for(;;) {
3945 v = tok;
3946 if (v < TOK_UIDENT)
3947 expect("identifier");
3948 ss = sym_find(v);
3949 if (ss && !local_stack)
3950 tcc_error("redefinition of enumerator '%s'",
3951 get_tok_str(v, NULL));
3952 next();
3953 if (tok == '=') {
3954 next();
3955 ll = expr_const64();
3957 ss = sym_push(v, &t, VT_CONST, 0);
3958 ss->enum_val = ll;
3959 *ps = ss, ps = &ss->next;
3960 if (ll < nl)
3961 nl = ll;
3962 if (ll > pl)
3963 pl = ll;
3964 if (tok != ',')
3965 break;
3966 next();
3967 ll++;
3968 /* NOTE: we accept a trailing comma */
3969 if (tok == '}')
3970 break;
3972 skip('}');
3973 /* set integral type of the enum */
3974 t.t = VT_INT;
3975 if (nl >= 0) {
3976 if (pl != (unsigned)pl)
3977 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3978 t.t |= VT_UNSIGNED;
3979 } else if (pl != (int)pl || nl != (int)nl)
3980 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3981 s->type.t = type->t = t.t | VT_ENUM;
3982 s->c = 0;
3983 /* set type for enum members */
3984 for (ss = s->next; ss; ss = ss->next) {
3985 ll = ss->enum_val;
3986 if (ll == (int)ll) /* default is int if it fits */
3987 continue;
3988 if (t.t & VT_UNSIGNED) {
3989 ss->type.t |= VT_UNSIGNED;
3990 if (ll == (unsigned)ll)
3991 continue;
3993 ss->type.t = (ss->type.t & ~VT_BTYPE)
3994 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3996 } else {
3997 c = 0;
3998 flexible = 0;
3999 while (tok != '}') {
4000 if (!parse_btype(&btype, &ad1)) {
4001 skip(';');
4002 continue;
4004 while (1) {
4005 if (flexible)
4006 tcc_error("flexible array member '%s' not at the end of struct",
4007 get_tok_str(v, NULL));
4008 bit_size = -1;
4009 v = 0;
4010 type1 = btype;
4011 if (tok != ':') {
4012 if (tok != ';')
4013 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4014 if (v == 0) {
4015 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4016 expect("identifier");
4017 else {
4018 int v = btype.ref->v;
4019 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4020 if (tcc_state->ms_extensions == 0)
4021 expect("identifier");
4025 if (type_size(&type1, &align) < 0) {
4026 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4027 flexible = 1;
4028 else
4029 tcc_error("field '%s' has incomplete type",
4030 get_tok_str(v, NULL));
4032 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4033 (type1.t & VT_BTYPE) == VT_VOID ||
4034 (type1.t & VT_STORAGE))
4035 tcc_error("invalid type for '%s'",
4036 get_tok_str(v, NULL));
4038 if (tok == ':') {
4039 next();
4040 bit_size = expr_const();
4041 /* XXX: handle v = 0 case for messages */
4042 if (bit_size < 0)
4043 tcc_error("negative width in bit-field '%s'",
4044 get_tok_str(v, NULL));
4045 if (v && bit_size == 0)
4046 tcc_error("zero width for bit-field '%s'",
4047 get_tok_str(v, NULL));
4048 parse_attribute(&ad1);
4050 size = type_size(&type1, &align);
4051 if (bit_size >= 0) {
4052 bt = type1.t & VT_BTYPE;
4053 if (bt != VT_INT &&
4054 bt != VT_BYTE &&
4055 bt != VT_SHORT &&
4056 bt != VT_BOOL &&
4057 bt != VT_LLONG)
4058 tcc_error("bitfields must have scalar type");
4059 bsize = size * 8;
4060 if (bit_size > bsize) {
4061 tcc_error("width of '%s' exceeds its type",
4062 get_tok_str(v, NULL));
4063 } else if (bit_size == bsize
4064 && !ad.a.packed && !ad1.a.packed) {
4065 /* no need for bit fields */
4067 } else if (bit_size == 64) {
4068 tcc_error("field width 64 not implemented");
4069 } else {
4070 type1.t = (type1.t & ~VT_STRUCT_MASK)
4071 | VT_BITFIELD
4072 | (bit_size << (VT_STRUCT_SHIFT + 6));
4075 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4076 /* Remember we've seen a real field to check
4077 for placement of flexible array member. */
4078 c = 1;
4080 /* If member is a struct or bit-field, enforce
4081 placing into the struct (as anonymous). */
4082 if (v == 0 &&
4083 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4084 bit_size >= 0)) {
4085 v = anon_sym++;
4087 if (v) {
4088 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4089 ss->a = ad1.a;
4090 *ps = ss;
4091 ps = &ss->next;
4093 if (tok == ';' || tok == TOK_EOF)
4094 break;
4095 skip(',');
4097 skip(';');
4099 skip('}');
4100 parse_attribute(&ad);
4101 struct_layout(type, &ad);
4106 static void sym_to_attr(AttributeDef *ad, Sym *s)
4108 merge_symattr(&ad->a, &s->a);
4109 merge_funcattr(&ad->f, &s->f);
4112 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4113 are added to the element type, copied because it could be a typedef. */
4114 static void parse_btype_qualify(CType *type, int qualifiers)
4116 while (type->t & VT_ARRAY) {
4117 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4118 type = &type->ref->type;
4120 type->t |= qualifiers;
4123 /* return 0 if no type declaration. otherwise, return the basic type
4124 and skip it.
4126 static int parse_btype(CType *type, AttributeDef *ad)
4128 int t, u, bt, st, type_found, typespec_found, g;
4129 Sym *s;
4130 CType type1;
4132 memset(ad, 0, sizeof(AttributeDef));
4133 type_found = 0;
4134 typespec_found = 0;
4135 t = VT_INT;
4136 bt = st = -1;
4137 type->ref = NULL;
4139 while(1) {
4140 switch(tok) {
4141 case TOK_EXTENSION:
4142 /* currently, we really ignore extension */
4143 next();
4144 continue;
4146 /* basic types */
4147 case TOK_CHAR:
4148 u = VT_BYTE;
4149 basic_type:
4150 next();
4151 basic_type1:
4152 if (u == VT_SHORT || u == VT_LONG) {
4153 if (st != -1 || (bt != -1 && bt != VT_INT))
4154 tmbt: tcc_error("too many basic types");
4155 st = u;
4156 } else {
4157 if (bt != -1 || (st != -1 && u != VT_INT))
4158 goto tmbt;
4159 bt = u;
4161 if (u != VT_INT)
4162 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4163 typespec_found = 1;
4164 break;
4165 case TOK_VOID:
4166 u = VT_VOID;
4167 goto basic_type;
4168 case TOK_SHORT:
4169 u = VT_SHORT;
4170 goto basic_type;
4171 case TOK_INT:
4172 u = VT_INT;
4173 goto basic_type;
4174 case TOK_LONG:
4175 if ((t & VT_BTYPE) == VT_DOUBLE) {
4176 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4177 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4178 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4179 } else {
4180 u = VT_LONG;
4181 goto basic_type;
4183 next();
4184 break;
4185 #ifdef TCC_TARGET_ARM64
4186 case TOK_UINT128:
4187 /* GCC's __uint128_t appears in some Linux header files. Make it a
4188 synonym for long double to get the size and alignment right. */
4189 u = VT_LDOUBLE;
4190 goto basic_type;
4191 #endif
4192 case TOK_BOOL:
4193 u = VT_BOOL;
4194 goto basic_type;
4195 case TOK_FLOAT:
4196 u = VT_FLOAT;
4197 goto basic_type;
4198 case TOK_DOUBLE:
4199 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4200 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4201 } else {
4202 u = VT_DOUBLE;
4203 goto basic_type;
4205 next();
4206 break;
4207 case TOK_ENUM:
4208 struct_decl(&type1, VT_ENUM);
4209 basic_type2:
4210 u = type1.t;
4211 type->ref = type1.ref;
4212 goto basic_type1;
4213 case TOK_STRUCT:
4214 struct_decl(&type1, VT_STRUCT);
4215 goto basic_type2;
4216 case TOK_UNION:
4217 struct_decl(&type1, VT_UNION);
4218 goto basic_type2;
4220 /* type modifiers */
4221 case TOK_CONST1:
4222 case TOK_CONST2:
4223 case TOK_CONST3:
4224 type->t = t;
4225 parse_btype_qualify(type, VT_CONSTANT);
4226 t = type->t;
4227 next();
4228 break;
4229 case TOK_VOLATILE1:
4230 case TOK_VOLATILE2:
4231 case TOK_VOLATILE3:
4232 type->t = t;
4233 parse_btype_qualify(type, VT_VOLATILE);
4234 t = type->t;
4235 next();
4236 break;
4237 case TOK_SIGNED1:
4238 case TOK_SIGNED2:
4239 case TOK_SIGNED3:
4240 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4241 tcc_error("signed and unsigned modifier");
4242 t |= VT_DEFSIGN;
4243 next();
4244 typespec_found = 1;
4245 break;
4246 case TOK_REGISTER:
4247 case TOK_AUTO:
4248 case TOK_RESTRICT1:
4249 case TOK_RESTRICT2:
4250 case TOK_RESTRICT3:
4251 next();
4252 break;
4253 case TOK_UNSIGNED:
4254 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4255 tcc_error("signed and unsigned modifier");
4256 t |= VT_DEFSIGN | VT_UNSIGNED;
4257 next();
4258 typespec_found = 1;
4259 break;
4261 /* storage */
4262 case TOK_EXTERN:
4263 g = VT_EXTERN;
4264 goto storage;
4265 case TOK_STATIC:
4266 g = VT_STATIC;
4267 goto storage;
4268 case TOK_TYPEDEF:
4269 g = VT_TYPEDEF;
4270 goto storage;
4271 storage:
4272 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4273 tcc_error("multiple storage classes");
4274 t |= g;
4275 next();
4276 break;
4277 case TOK_INLINE1:
4278 case TOK_INLINE2:
4279 case TOK_INLINE3:
4280 t |= VT_INLINE;
4281 next();
4282 break;
4284 /* GNUC attribute */
4285 case TOK_ATTRIBUTE1:
4286 case TOK_ATTRIBUTE2:
4287 parse_attribute(ad);
4288 if (ad->attr_mode) {
4289 u = ad->attr_mode -1;
4290 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4292 continue;
4293 /* GNUC typeof */
4294 case TOK_TYPEOF1:
4295 case TOK_TYPEOF2:
4296 case TOK_TYPEOF3:
4297 next();
4298 parse_expr_type(&type1);
4299 /* remove all storage modifiers except typedef */
4300 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4301 if (type1.ref)
4302 sym_to_attr(ad, type1.ref);
4303 goto basic_type2;
4304 default:
4305 if (typespec_found)
4306 goto the_end;
4307 s = sym_find(tok);
4308 if (!s || !(s->type.t & VT_TYPEDEF))
4309 goto the_end;
4310 t &= ~(VT_BTYPE|VT_LONG);
4311 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4312 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4313 type->ref = s->type.ref;
4314 if (t)
4315 parse_btype_qualify(type, t);
4316 t = type->t;
4317 /* get attributes from typedef */
4318 sym_to_attr(ad, s);
4319 next();
4320 typespec_found = 1;
4321 st = bt = -2;
4322 break;
4324 type_found = 1;
4326 the_end:
4327 if (tcc_state->char_is_unsigned) {
4328 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4329 t |= VT_UNSIGNED;
4331 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4332 bt = t & (VT_BTYPE|VT_LONG);
4333 if (bt == VT_LONG)
4334 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4335 #ifdef TCC_TARGET_PE
4336 if (bt == VT_LDOUBLE)
4337 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4338 #endif
4339 type->t = t;
4340 return type_found;
4343 /* convert a function parameter type (array to pointer and function to
4344 function pointer) */
4345 static inline void convert_parameter_type(CType *pt)
4347 /* remove const and volatile qualifiers (XXX: const could be used
4348 to indicate a const function parameter */
4349 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4350 /* array must be transformed to pointer according to ANSI C */
4351 pt->t &= ~VT_ARRAY;
4352 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4353 mk_pointer(pt);
4357 ST_FUNC void parse_asm_str(CString *astr)
4359 skip('(');
4360 parse_mult_str(astr, "string constant");
4363 /* Parse an asm label and return the token */
4364 static int asm_label_instr(void)
4366 int v;
4367 CString astr;
4369 next();
4370 parse_asm_str(&astr);
4371 skip(')');
4372 #ifdef ASM_DEBUG
4373 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4374 #endif
4375 v = tok_alloc(astr.data, astr.size - 1)->tok;
4376 cstr_free(&astr);
4377 return v;
4380 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4382 int n, l, t1, arg_size, align;
4383 Sym **plast, *s, *first;
4384 AttributeDef ad1;
4385 CType pt;
4387 if (tok == '(') {
4388 /* function type, or recursive declarator (return if so) */
4389 next();
4390 if (td && !(td & TYPE_ABSTRACT))
4391 return 0;
4392 if (tok == ')')
4393 l = 0;
4394 else if (parse_btype(&pt, &ad1))
4395 l = FUNC_NEW;
4396 else if (td) {
4397 merge_attr (ad, &ad1);
4398 return 0;
4399 } else
4400 l = FUNC_OLD;
4401 first = NULL;
4402 plast = &first;
4403 arg_size = 0;
4404 if (l) {
4405 for(;;) {
4406 /* read param name and compute offset */
4407 if (l != FUNC_OLD) {
4408 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4409 break;
4410 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4411 if ((pt.t & VT_BTYPE) == VT_VOID)
4412 tcc_error("parameter declared as void");
4413 } else {
4414 n = tok;
4415 if (n < TOK_UIDENT)
4416 expect("identifier");
4417 pt.t = VT_VOID; /* invalid type */
4418 next();
4420 convert_parameter_type(&pt);
4421 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4422 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4423 *plast = s;
4424 plast = &s->next;
4425 if (tok == ')')
4426 break;
4427 skip(',');
4428 if (l == FUNC_NEW && tok == TOK_DOTS) {
4429 l = FUNC_ELLIPSIS;
4430 next();
4431 break;
4433 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4434 tcc_error("invalid type");
4436 } else
4437 /* if no parameters, then old type prototype */
4438 l = FUNC_OLD;
4439 skip(')');
4440 /* NOTE: const is ignored in returned type as it has a special
4441 meaning in gcc / C++ */
4442 type->t &= ~VT_CONSTANT;
4443 /* some ancient pre-K&R C allows a function to return an array
4444 and the array brackets to be put after the arguments, such
4445 that "int c()[]" means something like "int[] c()" */
4446 if (tok == '[') {
4447 next();
4448 skip(']'); /* only handle simple "[]" */
4449 mk_pointer(type);
4451 /* we push a anonymous symbol which will contain the function prototype */
4452 ad->f.func_args = arg_size;
4453 ad->f.func_type = l;
4454 s = sym_push(SYM_FIELD, type, 0, 0);
4455 s->a = ad->a;
4456 s->f = ad->f;
4457 s->next = first;
4458 type->t = VT_FUNC;
4459 type->ref = s;
4460 } else if (tok == '[') {
4461 int saved_nocode_wanted = nocode_wanted;
4462 /* array definition */
4463 next();
4464 while (1) {
4465 /* XXX The optional type-quals and static should only be accepted
4466 in parameter decls. The '*' as well, and then even only
4467 in prototypes (not function defs). */
4468 switch (tok) {
4469 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4470 case TOK_CONST1:
4471 case TOK_VOLATILE1:
4472 case TOK_STATIC:
4473 case '*':
4474 next();
4475 continue;
4476 default:
4477 break;
4479 break;
4481 n = -1;
4482 t1 = 0;
4483 if (tok != ']') {
4484 if (!local_stack || (storage & VT_STATIC))
4485 vpushi(expr_const());
4486 else {
4487 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4488 length must always be evaluated, even under nocode_wanted,
4489 so that its size slot is initialized (e.g. under sizeof
4490 or typeof). */
4491 nocode_wanted = 0;
4492 gexpr();
4494 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4495 n = vtop->c.i;
4496 if (n < 0)
4497 tcc_error("invalid array size");
4498 } else {
4499 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4500 tcc_error("size of variable length array should be an integer");
4501 t1 = VT_VLA;
4504 skip(']');
4505 /* parse next post type */
4506 post_type(type, ad, storage, 0);
4507 if (type->t == VT_FUNC)
4508 tcc_error("declaration of an array of functions");
4509 t1 |= type->t & VT_VLA;
4511 if (t1 & VT_VLA) {
4512 loc -= type_size(&int_type, &align);
4513 loc &= -align;
4514 n = loc;
4516 vla_runtime_type_size(type, &align);
4517 gen_op('*');
4518 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4519 vswap();
4520 vstore();
4522 if (n != -1)
4523 vpop();
4524 nocode_wanted = saved_nocode_wanted;
4526 /* we push an anonymous symbol which will contain the array
4527 element type */
4528 s = sym_push(SYM_FIELD, type, 0, n);
4529 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4530 type->ref = s;
4532 return 1;
4535 /* Parse a type declarator (except basic type), and return the type
4536 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4537 expected. 'type' should contain the basic type. 'ad' is the
4538 attribute definition of the basic type. It can be modified by
4539 type_decl(). If this (possibly abstract) declarator is a pointer chain
4540 it returns the innermost pointed to type (equals *type, but is a different
4541 pointer), otherwise returns type itself, that's used for recursive calls. */
4542 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4544 CType *post, *ret;
4545 int qualifiers, storage;
4547 /* recursive type, remove storage bits first, apply them later again */
4548 storage = type->t & VT_STORAGE;
4549 type->t &= ~VT_STORAGE;
4550 post = ret = type;
4552 while (tok == '*') {
4553 qualifiers = 0;
4554 redo:
4555 next();
4556 switch(tok) {
4557 case TOK_CONST1:
4558 case TOK_CONST2:
4559 case TOK_CONST3:
4560 qualifiers |= VT_CONSTANT;
4561 goto redo;
4562 case TOK_VOLATILE1:
4563 case TOK_VOLATILE2:
4564 case TOK_VOLATILE3:
4565 qualifiers |= VT_VOLATILE;
4566 goto redo;
4567 case TOK_RESTRICT1:
4568 case TOK_RESTRICT2:
4569 case TOK_RESTRICT3:
4570 goto redo;
4571 /* XXX: clarify attribute handling */
4572 case TOK_ATTRIBUTE1:
4573 case TOK_ATTRIBUTE2:
4574 parse_attribute(ad);
4575 break;
4577 mk_pointer(type);
4578 type->t |= qualifiers;
4579 if (ret == type)
4580 /* innermost pointed to type is the one for the first derivation */
4581 ret = pointed_type(type);
4584 if (tok == '(') {
4585 /* This is possibly a parameter type list for abstract declarators
4586 ('int ()'), use post_type for testing this. */
4587 if (!post_type(type, ad, 0, td)) {
4588 /* It's not, so it's a nested declarator, and the post operations
4589 apply to the innermost pointed to type (if any). */
4590 /* XXX: this is not correct to modify 'ad' at this point, but
4591 the syntax is not clear */
4592 parse_attribute(ad);
4593 post = type_decl(type, ad, v, td);
4594 skip(')');
4596 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4597 /* type identifier */
4598 *v = tok;
4599 next();
4600 } else {
4601 if (!(td & TYPE_ABSTRACT))
4602 expect("identifier");
4603 *v = 0;
4605 post_type(post, ad, storage, 0);
4606 parse_attribute(ad);
4607 type->t |= storage;
4608 return ret;
4611 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4612 ST_FUNC int lvalue_type(int t)
4614 int bt, r;
4615 r = VT_LVAL;
4616 bt = t & VT_BTYPE;
4617 if (bt == VT_BYTE || bt == VT_BOOL)
4618 r |= VT_LVAL_BYTE;
4619 else if (bt == VT_SHORT)
4620 r |= VT_LVAL_SHORT;
4621 else
4622 return r;
4623 if (t & VT_UNSIGNED)
4624 r |= VT_LVAL_UNSIGNED;
4625 return r;
4628 /* indirection with full error checking and bound check */
4629 ST_FUNC void indir(void)
4631 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4632 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4633 return;
4634 expect("pointer");
4636 if (vtop->r & VT_LVAL)
4637 gv(RC_INT);
4638 vtop->type = *pointed_type(&vtop->type);
4639 /* Arrays and functions are never lvalues */
4640 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4641 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4642 vtop->r |= lvalue_type(vtop->type.t);
4643 /* if bound checking, the referenced pointer must be checked */
4644 #ifdef CONFIG_TCC_BCHECK
4645 if (tcc_state->do_bounds_check)
4646 vtop->r |= VT_MUSTBOUND;
4647 #endif
4651 /* pass a parameter to a function and do type checking and casting */
4652 static void gfunc_param_typed(Sym *func, Sym *arg)
4654 int func_type;
4655 CType type;
4657 func_type = func->f.func_type;
4658 if (func_type == FUNC_OLD ||
4659 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4660 /* default casting : only need to convert float to double */
4661 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4662 gen_cast_s(VT_DOUBLE);
4663 } else if (vtop->type.t & VT_BITFIELD) {
4664 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4665 type.ref = vtop->type.ref;
4666 gen_cast(&type);
4668 } else if (arg == NULL) {
4669 tcc_error("too many arguments to function");
4670 } else {
4671 type = arg->type;
4672 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4673 gen_assign_cast(&type);
4677 /* parse an expression and return its type without any side effect. */
4678 static void expr_type(CType *type, void (*expr_fn)(void))
4680 nocode_wanted++;
4681 expr_fn();
4682 *type = vtop->type;
4683 vpop();
4684 nocode_wanted--;
4687 /* parse an expression of the form '(type)' or '(expr)' and return its
4688 type */
4689 static void parse_expr_type(CType *type)
4691 int n;
4692 AttributeDef ad;
4694 skip('(');
4695 if (parse_btype(type, &ad)) {
4696 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4697 } else {
4698 expr_type(type, gexpr);
4700 skip(')');
4703 static void parse_type(CType *type)
4705 AttributeDef ad;
4706 int n;
4708 if (!parse_btype(type, &ad)) {
4709 expect("type");
4711 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4714 static void parse_builtin_params(int nc, const char *args)
4716 char c, sep = '(';
4717 CType t;
4718 if (nc)
4719 nocode_wanted++;
4720 next();
4721 while ((c = *args++)) {
4722 skip(sep);
4723 sep = ',';
4724 switch (c) {
4725 case 'e': expr_eq(); continue;
4726 case 't': parse_type(&t); vpush(&t); continue;
4727 default: tcc_error("internal error"); break;
4730 skip(')');
4731 if (nc)
4732 nocode_wanted--;
4735 static void try_call_scope_cleanup(Sym *stop)
4737 Sym *cls = current_cleanups;
4739 for (; cls != stop; cls = cls->ncl) {
4740 Sym *fs = cls->next;
4741 Sym *vs = cls->prev_tok;
4743 vpushsym(&fs->type, fs);
4744 vset(&vs->type, vs->r, vs->c);
4745 vtop->sym = vs;
4746 mk_pointer(&vtop->type);
4747 gaddrof();
4748 gfunc_call(1);
4752 static void try_call_cleanup_goto(Sym *cleanupstate)
4754 Sym *oc, *cc;
4755 int ocd, ccd;
4757 if (!current_cleanups)
4758 return;
4760 /* search NCA of both cleanup chains given parents and initial depth */
4761 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4762 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4764 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4766 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4769 try_call_scope_cleanup(cc);
4772 ST_FUNC void unary(void)
4774 int n, t, align, size, r, sizeof_caller;
4775 CType type;
4776 Sym *s;
4777 AttributeDef ad;
4779 sizeof_caller = in_sizeof;
4780 in_sizeof = 0;
4781 type.ref = NULL;
4782 /* XXX: GCC 2.95.3 does not generate a table although it should be
4783 better here */
4784 tok_next:
4785 switch(tok) {
4786 case TOK_EXTENSION:
4787 next();
4788 goto tok_next;
4789 case TOK_LCHAR:
4790 #ifdef TCC_TARGET_PE
4791 t = VT_SHORT|VT_UNSIGNED;
4792 goto push_tokc;
4793 #endif
4794 case TOK_CINT:
4795 case TOK_CCHAR:
4796 t = VT_INT;
4797 push_tokc:
4798 type.t = t;
4799 vsetc(&type, VT_CONST, &tokc);
4800 next();
4801 break;
4802 case TOK_CUINT:
4803 t = VT_INT | VT_UNSIGNED;
4804 goto push_tokc;
4805 case TOK_CLLONG:
4806 t = VT_LLONG;
4807 goto push_tokc;
4808 case TOK_CULLONG:
4809 t = VT_LLONG | VT_UNSIGNED;
4810 goto push_tokc;
4811 case TOK_CFLOAT:
4812 t = VT_FLOAT;
4813 goto push_tokc;
4814 case TOK_CDOUBLE:
4815 t = VT_DOUBLE;
4816 goto push_tokc;
4817 case TOK_CLDOUBLE:
4818 t = VT_LDOUBLE;
4819 goto push_tokc;
4820 case TOK_CLONG:
4821 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4822 goto push_tokc;
4823 case TOK_CULONG:
4824 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4825 goto push_tokc;
4826 case TOK___FUNCTION__:
4827 if (!gnu_ext)
4828 goto tok_identifier;
4829 /* fall thru */
4830 case TOK___FUNC__:
4832 void *ptr;
4833 int len;
4834 /* special function name identifier */
4835 len = strlen(funcname) + 1;
4836 /* generate char[len] type */
4837 type.t = VT_BYTE;
4838 mk_pointer(&type);
4839 type.t |= VT_ARRAY;
4840 type.ref->c = len;
4841 vpush_ref(&type, data_section, data_section->data_offset, len);
4842 if (!NODATA_WANTED) {
4843 ptr = section_ptr_add(data_section, len);
4844 memcpy(ptr, funcname, len);
4846 next();
4848 break;
4849 case TOK_LSTR:
4850 #ifdef TCC_TARGET_PE
4851 t = VT_SHORT | VT_UNSIGNED;
4852 #else
4853 t = VT_INT;
4854 #endif
4855 goto str_init;
4856 case TOK_STR:
4857 /* string parsing */
4858 t = VT_BYTE;
4859 if (tcc_state->char_is_unsigned)
4860 t = VT_BYTE | VT_UNSIGNED;
4861 str_init:
4862 if (tcc_state->warn_write_strings)
4863 t |= VT_CONSTANT;
4864 type.t = t;
4865 mk_pointer(&type);
4866 type.t |= VT_ARRAY;
4867 memset(&ad, 0, sizeof(AttributeDef));
4868 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4869 break;
4870 case '(':
4871 next();
4872 /* cast ? */
4873 if (parse_btype(&type, &ad)) {
4874 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4875 skip(')');
4876 /* check ISOC99 compound literal */
4877 if (tok == '{') {
4878 /* data is allocated locally by default */
4879 if (global_expr)
4880 r = VT_CONST;
4881 else
4882 r = VT_LOCAL;
4883 /* all except arrays are lvalues */
4884 if (!(type.t & VT_ARRAY))
4885 r |= lvalue_type(type.t);
4886 memset(&ad, 0, sizeof(AttributeDef));
4887 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4888 } else {
4889 if (sizeof_caller) {
4890 vpush(&type);
4891 return;
4893 unary();
4894 gen_cast(&type);
4896 } else if (tok == '{') {
4897 int saved_nocode_wanted = nocode_wanted;
4898 if (const_wanted)
4899 tcc_error("expected constant");
4900 /* save all registers */
4901 save_regs(0);
4902 /* statement expression : we do not accept break/continue
4903 inside as GCC does. We do retain the nocode_wanted state,
4904 as statement expressions can't ever be entered from the
4905 outside, so any reactivation of code emission (from labels
4906 or loop heads) can be disabled again after the end of it. */
4907 block(NULL, NULL, 1);
4908 nocode_wanted = saved_nocode_wanted;
4909 skip(')');
4910 } else {
4911 gexpr();
4912 skip(')');
4914 break;
4915 case '*':
4916 next();
4917 unary();
4918 indir();
4919 break;
4920 case '&':
4921 next();
4922 unary();
4923 /* functions names must be treated as function pointers,
4924 except for unary '&' and sizeof. Since we consider that
4925 functions are not lvalues, we only have to handle it
4926 there and in function calls. */
4927 /* arrays can also be used although they are not lvalues */
4928 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4929 !(vtop->type.t & VT_ARRAY))
4930 test_lvalue();
4931 mk_pointer(&vtop->type);
4932 gaddrof();
4933 break;
4934 case '!':
4935 next();
4936 unary();
4937 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4938 gen_cast_s(VT_BOOL);
4939 vtop->c.i = !vtop->c.i;
4940 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4941 vtop->c.i ^= 1;
4942 else {
4943 save_regs(1);
4944 vseti(VT_JMP, gvtst(1, 0));
4946 break;
4947 case '~':
4948 next();
4949 unary();
4950 vpushi(-1);
4951 gen_op('^');
4952 break;
4953 case '+':
4954 next();
4955 unary();
4956 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4957 tcc_error("pointer not accepted for unary plus");
4958 /* In order to force cast, we add zero, except for floating point
4959 where we really need an noop (otherwise -0.0 will be transformed
4960 into +0.0). */
4961 if (!is_float(vtop->type.t)) {
4962 vpushi(0);
4963 gen_op('+');
4965 break;
4966 case TOK_SIZEOF:
4967 case TOK_ALIGNOF1:
4968 case TOK_ALIGNOF2:
4969 case TOK_ALIGNOF3:
4970 t = tok;
4971 next();
4972 in_sizeof++;
4973 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4974 s = vtop[1].sym; /* hack: accessing previous vtop */
4975 size = type_size(&type, &align);
4976 if (s && s->a.aligned)
4977 align = 1 << (s->a.aligned - 1);
4978 if (t == TOK_SIZEOF) {
4979 if (!(type.t & VT_VLA)) {
4980 if (size < 0)
4981 tcc_error("sizeof applied to an incomplete type");
4982 vpushs(size);
4983 } else {
4984 vla_runtime_type_size(&type, &align);
4986 } else {
4987 vpushs(align);
4989 vtop->type.t |= VT_UNSIGNED;
4990 break;
4992 case TOK_builtin_expect:
4993 /* __builtin_expect is a no-op for now */
4994 parse_builtin_params(0, "ee");
4995 vpop();
4996 break;
4997 case TOK_builtin_types_compatible_p:
4998 parse_builtin_params(0, "tt");
4999 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5000 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5001 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5002 vtop -= 2;
5003 vpushi(n);
5004 break;
5005 case TOK_builtin_choose_expr:
5007 int64_t c;
5008 next();
5009 skip('(');
5010 c = expr_const64();
5011 skip(',');
5012 if (!c) {
5013 nocode_wanted++;
5015 expr_eq();
5016 if (!c) {
5017 vpop();
5018 nocode_wanted--;
5020 skip(',');
5021 if (c) {
5022 nocode_wanted++;
5024 expr_eq();
5025 if (c) {
5026 vpop();
5027 nocode_wanted--;
5029 skip(')');
5031 break;
5032 case TOK_builtin_constant_p:
5033 parse_builtin_params(1, "e");
5034 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5035 vtop--;
5036 vpushi(n);
5037 break;
5038 case TOK_builtin_frame_address:
5039 case TOK_builtin_return_address:
5041 int tok1 = tok;
5042 int level;
5043 next();
5044 skip('(');
5045 if (tok != TOK_CINT) {
5046 tcc_error("%s only takes positive integers",
5047 tok1 == TOK_builtin_return_address ?
5048 "__builtin_return_address" :
5049 "__builtin_frame_address");
5051 level = (uint32_t)tokc.i;
5052 next();
5053 skip(')');
5054 type.t = VT_VOID;
5055 mk_pointer(&type);
5056 vset(&type, VT_LOCAL, 0); /* local frame */
5057 while (level--) {
5058 mk_pointer(&vtop->type);
5059 indir(); /* -> parent frame */
5061 if (tok1 == TOK_builtin_return_address) {
5062 // assume return address is just above frame pointer on stack
5063 vpushi(PTR_SIZE);
5064 gen_op('+');
5065 mk_pointer(&vtop->type);
5066 indir();
5069 break;
5070 #ifdef TCC_TARGET_X86_64
5071 #ifdef TCC_TARGET_PE
5072 case TOK_builtin_va_start:
5073 parse_builtin_params(0, "ee");
5074 r = vtop->r & VT_VALMASK;
5075 if (r == VT_LLOCAL)
5076 r = VT_LOCAL;
5077 if (r != VT_LOCAL)
5078 tcc_error("__builtin_va_start expects a local variable");
5079 vtop->r = r;
5080 vtop->type = char_pointer_type;
5081 vtop->c.i += 8;
5082 vstore();
5083 break;
5084 #else
5085 case TOK_builtin_va_arg_types:
5086 parse_builtin_params(0, "t");
5087 vpushi(classify_x86_64_va_arg(&vtop->type));
5088 vswap();
5089 vpop();
5090 break;
5091 #endif
5092 #endif
5094 #ifdef TCC_TARGET_ARM64
5095 case TOK___va_start: {
5096 parse_builtin_params(0, "ee");
5097 //xx check types
5098 gen_va_start();
5099 vpushi(0);
5100 vtop->type.t = VT_VOID;
5101 break;
5103 case TOK___va_arg: {
5104 parse_builtin_params(0, "et");
5105 type = vtop->type;
5106 vpop();
5107 //xx check types
5108 gen_va_arg(&type);
5109 vtop->type = type;
5110 break;
5112 case TOK___arm64_clear_cache: {
5113 parse_builtin_params(0, "ee");
5114 gen_clear_cache();
5115 vpushi(0);
5116 vtop->type.t = VT_VOID;
5117 break;
5119 #endif
5120 /* pre operations */
5121 case TOK_INC:
5122 case TOK_DEC:
5123 t = tok;
5124 next();
5125 unary();
5126 inc(0, t);
5127 break;
5128 case '-':
5129 next();
5130 unary();
5131 t = vtop->type.t & VT_BTYPE;
5132 if (is_float(t)) {
5133 /* In IEEE negate(x) isn't subtract(0,x), but rather
5134 subtract(-0, x). */
5135 vpush(&vtop->type);
5136 if (t == VT_FLOAT)
5137 vtop->c.f = -1.0 * 0.0;
5138 else if (t == VT_DOUBLE)
5139 vtop->c.d = -1.0 * 0.0;
5140 else
5141 vtop->c.ld = -1.0 * 0.0;
5142 } else
5143 vpushi(0);
5144 vswap();
5145 gen_op('-');
5146 break;
5147 case TOK_LAND:
5148 if (!gnu_ext)
5149 goto tok_identifier;
5150 next();
5151 /* allow to take the address of a label */
5152 if (tok < TOK_UIDENT)
5153 expect("label identifier");
5154 s = label_find(tok);
5155 if (!s) {
5156 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5157 } else {
5158 if (s->r == LABEL_DECLARED)
5159 s->r = LABEL_FORWARD;
5161 if (!s->type.t) {
5162 s->type.t = VT_VOID;
5163 mk_pointer(&s->type);
5164 s->type.t |= VT_STATIC;
5166 vpushsym(&s->type, s);
5167 next();
5168 break;
5170 case TOK_GENERIC:
5172 CType controlling_type;
5173 int has_default = 0;
5174 int has_match = 0;
5175 int learn = 0;
5176 TokenString *str = NULL;
5177 int saved_const_wanted = const_wanted;
5179 next();
5180 skip('(');
5181 const_wanted = 0;
5182 expr_type(&controlling_type, expr_eq);
5183 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5184 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5185 mk_pointer(&controlling_type);
5186 const_wanted = saved_const_wanted;
5187 for (;;) {
5188 learn = 0;
5189 skip(',');
5190 if (tok == TOK_DEFAULT) {
5191 if (has_default)
5192 tcc_error("too many 'default'");
5193 has_default = 1;
5194 if (!has_match)
5195 learn = 1;
5196 next();
5197 } else {
5198 AttributeDef ad_tmp;
5199 int itmp;
5200 CType cur_type;
5201 parse_btype(&cur_type, &ad_tmp);
5202 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5203 if (compare_types(&controlling_type, &cur_type, 0)) {
5204 if (has_match) {
5205 tcc_error("type match twice");
5207 has_match = 1;
5208 learn = 1;
5211 skip(':');
5212 if (learn) {
5213 if (str)
5214 tok_str_free(str);
5215 skip_or_save_block(&str);
5216 } else {
5217 skip_or_save_block(NULL);
5219 if (tok == ')')
5220 break;
5222 if (!str) {
5223 char buf[60];
5224 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5225 tcc_error("type '%s' does not match any association", buf);
5227 begin_macro(str, 1);
5228 next();
5229 expr_eq();
5230 if (tok != TOK_EOF)
5231 expect(",");
5232 end_macro();
5233 next();
5234 break;
5236 // special qnan , snan and infinity values
5237 case TOK___NAN__:
5238 n = 0x7fc00000;
5239 special_math_val:
5240 vpushi(n);
5241 vtop->type.t = VT_FLOAT;
5242 next();
5243 break;
5244 case TOK___SNAN__:
5245 n = 0x7f800001;
5246 goto special_math_val;
5247 case TOK___INF__:
5248 n = 0x7f800000;
5249 goto special_math_val;
5251 default:
5252 tok_identifier:
5253 t = tok;
5254 next();
5255 if (t < TOK_UIDENT)
5256 expect("identifier");
5257 s = sym_find(t);
5258 if (!s || IS_ASM_SYM(s)) {
5259 const char *name = get_tok_str(t, NULL);
5260 if (tok != '(')
5261 tcc_error("'%s' undeclared", name);
5262 /* for simple function calls, we tolerate undeclared
5263 external reference to int() function */
5264 if (tcc_state->warn_implicit_function_declaration
5265 #ifdef TCC_TARGET_PE
5266 /* people must be warned about using undeclared WINAPI functions
5267 (which usually start with uppercase letter) */
5268 || (name[0] >= 'A' && name[0] <= 'Z')
5269 #endif
5271 tcc_warning("implicit declaration of function '%s'", name);
5272 s = external_global_sym(t, &func_old_type, 0);
5275 r = s->r;
5276 /* A symbol that has a register is a local register variable,
5277 which starts out as VT_LOCAL value. */
5278 if ((r & VT_VALMASK) < VT_CONST)
5279 r = (r & ~VT_VALMASK) | VT_LOCAL;
5281 vset(&s->type, r, s->c);
5282 /* Point to s as backpointer (even without r&VT_SYM).
5283 Will be used by at least the x86 inline asm parser for
5284 regvars. */
5285 vtop->sym = s;
5287 if (r & VT_SYM) {
5288 vtop->c.i = 0;
5289 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5290 vtop->c.i = s->enum_val;
5292 break;
5295 /* post operations */
5296 while (1) {
5297 if (tok == TOK_INC || tok == TOK_DEC) {
5298 inc(1, tok);
5299 next();
5300 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5301 int qualifiers;
5302 /* field */
5303 if (tok == TOK_ARROW)
5304 indir();
5305 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5306 test_lvalue();
5307 gaddrof();
5308 /* expect pointer on structure */
5309 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5310 expect("struct or union");
5311 if (tok == TOK_CDOUBLE)
5312 expect("field name");
5313 next();
5314 if (tok == TOK_CINT || tok == TOK_CUINT)
5315 expect("field name");
5316 s = find_field(&vtop->type, tok);
5317 if (!s)
5318 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5319 /* add field offset to pointer */
5320 vtop->type = char_pointer_type; /* change type to 'char *' */
5321 vpushi(s->c);
5322 gen_op('+');
5323 /* change type to field type, and set to lvalue */
5324 vtop->type = s->type;
5325 vtop->type.t |= qualifiers;
5326 /* an array is never an lvalue */
5327 if (!(vtop->type.t & VT_ARRAY)) {
5328 vtop->r |= lvalue_type(vtop->type.t);
5329 #ifdef CONFIG_TCC_BCHECK
5330 /* if bound checking, the referenced pointer must be checked */
5331 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5332 vtop->r |= VT_MUSTBOUND;
5333 #endif
5335 next();
5336 } else if (tok == '[') {
5337 next();
5338 gexpr();
5339 gen_op('+');
5340 indir();
5341 skip(']');
5342 } else if (tok == '(') {
5343 SValue ret;
5344 Sym *sa;
5345 int nb_args, ret_nregs, ret_align, regsize, variadic;
5347 /* function call */
5348 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5349 /* pointer test (no array accepted) */
5350 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5351 vtop->type = *pointed_type(&vtop->type);
5352 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5353 goto error_func;
5354 } else {
5355 error_func:
5356 expect("function pointer");
5358 } else {
5359 vtop->r &= ~VT_LVAL; /* no lvalue */
5361 /* get return type */
5362 s = vtop->type.ref;
5363 next();
5364 sa = s->next; /* first parameter */
5365 nb_args = regsize = 0;
5366 ret.r2 = VT_CONST;
5367 /* compute first implicit argument if a structure is returned */
5368 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5369 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5370 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5371 &ret_align, &regsize);
5372 if (!ret_nregs) {
5373 /* get some space for the returned structure */
5374 size = type_size(&s->type, &align);
5375 #ifdef TCC_TARGET_ARM64
5376 /* On arm64, a small struct is return in registers.
5377 It is much easier to write it to memory if we know
5378 that we are allowed to write some extra bytes, so
5379 round the allocated space up to a power of 2: */
5380 if (size < 16)
5381 while (size & (size - 1))
5382 size = (size | (size - 1)) + 1;
5383 #endif
5384 loc = (loc - size) & -align;
5385 ret.type = s->type;
5386 ret.r = VT_LOCAL | VT_LVAL;
5387 /* pass it as 'int' to avoid structure arg passing
5388 problems */
5389 vseti(VT_LOCAL, loc);
5390 ret.c = vtop->c;
5391 nb_args++;
5393 } else {
5394 ret_nregs = 1;
5395 ret.type = s->type;
5398 if (ret_nregs) {
5399 /* return in register */
5400 if (is_float(ret.type.t)) {
5401 ret.r = reg_fret(ret.type.t);
5402 #ifdef TCC_TARGET_X86_64
5403 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5404 ret.r2 = REG_QRET;
5405 #endif
5406 } else {
5407 #ifndef TCC_TARGET_ARM64
5408 #ifdef TCC_TARGET_X86_64
5409 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5410 #else
5411 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5412 #endif
5413 ret.r2 = REG_LRET;
5414 #endif
5415 ret.r = REG_IRET;
5417 ret.c.i = 0;
5419 if (tok != ')') {
5420 for(;;) {
5421 expr_eq();
5422 gfunc_param_typed(s, sa);
5423 nb_args++;
5424 if (sa)
5425 sa = sa->next;
5426 if (tok == ')')
5427 break;
5428 skip(',');
5431 if (sa)
5432 tcc_error("too few arguments to function");
5433 skip(')');
5434 gfunc_call(nb_args);
5436 /* return value */
5437 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5438 vsetc(&ret.type, r, &ret.c);
5439 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5442 /* handle packed struct return */
5443 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5444 int addr, offset;
5446 size = type_size(&s->type, &align);
5447 /* We're writing whole regs often, make sure there's enough
5448 space. Assume register size is power of 2. */
5449 if (regsize > align)
5450 align = regsize;
5451 loc = (loc - size) & -align;
5452 addr = loc;
5453 offset = 0;
5454 for (;;) {
5455 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5456 vswap();
5457 vstore();
5458 vtop--;
5459 if (--ret_nregs == 0)
5460 break;
5461 offset += regsize;
5463 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5465 } else {
5466 break;
5471 ST_FUNC void expr_prod(void)
5473 int t;
5475 unary();
5476 while (tok == '*' || tok == '/' || tok == '%') {
5477 t = tok;
5478 next();
5479 unary();
5480 gen_op(t);
5484 ST_FUNC void expr_sum(void)
5486 int t;
5488 expr_prod();
5489 while (tok == '+' || tok == '-') {
5490 t = tok;
5491 next();
5492 expr_prod();
5493 gen_op(t);
5497 static void expr_shift(void)
5499 int t;
5501 expr_sum();
5502 while (tok == TOK_SHL || tok == TOK_SAR) {
5503 t = tok;
5504 next();
5505 expr_sum();
5506 gen_op(t);
5510 static void expr_cmp(void)
5512 int t;
5514 expr_shift();
5515 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5516 tok == TOK_ULT || tok == TOK_UGE) {
5517 t = tok;
5518 next();
5519 expr_shift();
5520 gen_op(t);
5524 static void expr_cmpeq(void)
5526 int t;
5528 expr_cmp();
5529 while (tok == TOK_EQ || tok == TOK_NE) {
5530 t = tok;
5531 next();
5532 expr_cmp();
5533 gen_op(t);
5537 static void expr_and(void)
5539 expr_cmpeq();
5540 while (tok == '&') {
5541 next();
5542 expr_cmpeq();
5543 gen_op('&');
5547 static void expr_xor(void)
5549 expr_and();
5550 while (tok == '^') {
5551 next();
5552 expr_and();
5553 gen_op('^');
5557 static void expr_or(void)
5559 expr_xor();
5560 while (tok == '|') {
5561 next();
5562 expr_xor();
5563 gen_op('|');
5567 static void expr_land(void)
5569 expr_or();
5570 if (tok == TOK_LAND) {
5571 int t = 0;
5572 for(;;) {
5573 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5574 gen_cast_s(VT_BOOL);
5575 if (vtop->c.i) {
5576 vpop();
5577 } else {
5578 nocode_wanted++;
5579 while (tok == TOK_LAND) {
5580 next();
5581 expr_or();
5582 vpop();
5584 nocode_wanted--;
5585 if (t)
5586 gsym(t);
5587 gen_cast_s(VT_INT);
5588 break;
5590 } else {
5591 if (!t)
5592 save_regs(1);
5593 t = gvtst(1, t);
5595 if (tok != TOK_LAND) {
5596 if (t)
5597 vseti(VT_JMPI, t);
5598 else
5599 vpushi(1);
5600 break;
5602 next();
5603 expr_or();
5608 static void expr_lor(void)
5610 expr_land();
5611 if (tok == TOK_LOR) {
5612 int t = 0;
5613 for(;;) {
5614 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5615 gen_cast_s(VT_BOOL);
5616 if (!vtop->c.i) {
5617 vpop();
5618 } else {
5619 nocode_wanted++;
5620 while (tok == TOK_LOR) {
5621 next();
5622 expr_land();
5623 vpop();
5625 nocode_wanted--;
5626 if (t)
5627 gsym(t);
5628 gen_cast_s(VT_INT);
5629 break;
5631 } else {
5632 if (!t)
5633 save_regs(1);
5634 t = gvtst(0, t);
5636 if (tok != TOK_LOR) {
5637 if (t)
5638 vseti(VT_JMP, t);
5639 else
5640 vpushi(0);
5641 break;
5643 next();
5644 expr_land();
5649 /* Assuming vtop is a value used in a conditional context
5650 (i.e. compared with zero) return 0 if it's false, 1 if
5651 true and -1 if it can't be statically determined. */
5652 static int condition_3way(void)
5654 int c = -1;
5655 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5656 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5657 vdup();
5658 gen_cast_s(VT_BOOL);
5659 c = vtop->c.i;
5660 vpop();
5662 return c;
5665 static void expr_cond(void)
5667 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5668 SValue sv;
5669 CType type, type1, type2;
5671 expr_lor();
5672 if (tok == '?') {
5673 next();
5674 c = condition_3way();
5675 g = (tok == ':' && gnu_ext);
5676 if (c < 0) {
5677 /* needed to avoid having different registers saved in
5678 each branch */
5679 if (is_float(vtop->type.t)) {
5680 rc = RC_FLOAT;
5681 #ifdef TCC_TARGET_X86_64
5682 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5683 rc = RC_ST0;
5685 #endif
5686 } else
5687 rc = RC_INT;
5688 gv(rc);
5689 save_regs(1);
5690 if (g)
5691 gv_dup();
5692 tt = gvtst(1, 0);
5694 } else {
5695 if (!g)
5696 vpop();
5697 tt = 0;
5700 if (1) {
5701 if (c == 0)
5702 nocode_wanted++;
5703 if (!g)
5704 gexpr();
5706 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5707 mk_pointer(&vtop->type);
5708 type1 = vtop->type;
5709 sv = *vtop; /* save value to handle it later */
5710 vtop--; /* no vpop so that FP stack is not flushed */
5711 skip(':');
5713 u = 0;
5714 if (c < 0)
5715 u = gjmp(0);
5716 gsym(tt);
5718 if (c == 0)
5719 nocode_wanted--;
5720 if (c == 1)
5721 nocode_wanted++;
5722 expr_cond();
5723 if (c == 1)
5724 nocode_wanted--;
5726 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5727 mk_pointer(&vtop->type);
5728 type2=vtop->type;
5729 t1 = type1.t;
5730 bt1 = t1 & VT_BTYPE;
5731 t2 = type2.t;
5732 bt2 = t2 & VT_BTYPE;
5733 type.ref = NULL;
5736 /* cast operands to correct type according to ISOC rules */
5737 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5738 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5739 } else if (is_float(bt1) || is_float(bt2)) {
5740 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5741 type.t = VT_LDOUBLE;
5743 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5744 type.t = VT_DOUBLE;
5745 } else {
5746 type.t = VT_FLOAT;
5748 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5749 /* cast to biggest op */
5750 type.t = VT_LLONG | VT_LONG;
5751 if (bt1 == VT_LLONG)
5752 type.t &= t1;
5753 if (bt2 == VT_LLONG)
5754 type.t &= t2;
5755 /* convert to unsigned if it does not fit in a long long */
5756 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5757 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5758 type.t |= VT_UNSIGNED;
5759 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5760 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5761 /* If one is a null ptr constant the result type
5762 is the other. */
5763 if (is_null_pointer (vtop)) type = type1;
5764 else if (is_null_pointer (&sv)) type = type2;
5765 else if (bt1 != bt2)
5766 tcc_error("incompatible types in conditional expressions");
5767 else {
5768 CType *pt1 = pointed_type(&type1);
5769 CType *pt2 = pointed_type(&type2);
5770 int pbt1 = pt1->t & VT_BTYPE;
5771 int pbt2 = pt2->t & VT_BTYPE;
5772 int newquals, copied = 0;
5773 /* pointers to void get preferred, otherwise the
5774 pointed to types minus qualifs should be compatible */
5775 type = (pbt1 == VT_VOID) ? type1 : type2;
5776 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5777 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5778 tcc_warning("pointer type mismatch in conditional expression\n");
5780 /* combine qualifs */
5781 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5782 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5783 & newquals)
5785 /* copy the pointer target symbol */
5786 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5787 0, type.ref->c);
5788 copied = 1;
5789 pointed_type(&type)->t |= newquals;
5791 /* pointers to incomplete arrays get converted to
5792 pointers to completed ones if possible */
5793 if (pt1->t & VT_ARRAY
5794 && pt2->t & VT_ARRAY
5795 && pointed_type(&type)->ref->c < 0
5796 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5798 if (!copied)
5799 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5800 0, type.ref->c);
5801 pointed_type(&type)->ref =
5802 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5803 0, pointed_type(&type)->ref->c);
5804 pointed_type(&type)->ref->c =
5805 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5808 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5809 /* XXX: test structure compatibility */
5810 type = bt1 == VT_STRUCT ? type1 : type2;
5811 } else {
5812 /* integer operations */
5813 type.t = VT_INT | (VT_LONG & (t1 | t2));
5814 /* convert to unsigned if it does not fit in an integer */
5815 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5816 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5817 type.t |= VT_UNSIGNED;
5819 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5820 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5821 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5823 /* now we convert second operand */
5824 if (c != 1) {
5825 gen_cast(&type);
5826 if (islv) {
5827 mk_pointer(&vtop->type);
5828 gaddrof();
5829 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5830 gaddrof();
5833 rc = RC_INT;
5834 if (is_float(type.t)) {
5835 rc = RC_FLOAT;
5836 #ifdef TCC_TARGET_X86_64
5837 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5838 rc = RC_ST0;
5840 #endif
5841 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5842 /* for long longs, we use fixed registers to avoid having
5843 to handle a complicated move */
5844 rc = RC_IRET;
5847 tt = r2 = 0;
5848 if (c < 0) {
5849 r2 = gv(rc);
5850 tt = gjmp(0);
5852 gsym(u);
5854 /* this is horrible, but we must also convert first
5855 operand */
5856 if (c != 0) {
5857 *vtop = sv;
5858 gen_cast(&type);
5859 if (islv) {
5860 mk_pointer(&vtop->type);
5861 gaddrof();
5862 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5863 gaddrof();
5866 if (c < 0 || islv) {
5867 r1 = gv(rc);
5868 move_reg(r2, r1, type.t);
5869 vtop->r = r2;
5870 gsym(tt);
5871 if (islv)
5872 indir();
5878 static void expr_eq(void)
5880 int t;
5882 expr_cond();
5883 if (tok == '=' ||
5884 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5885 tok == TOK_A_XOR || tok == TOK_A_OR ||
5886 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5887 test_lvalue();
5888 t = tok;
5889 next();
5890 if (t == '=') {
5891 expr_eq();
5892 } else {
5893 vdup();
5894 expr_eq();
5895 gen_op(t & 0x7f);
5897 vstore();
5901 ST_FUNC void gexpr(void)
5903 while (1) {
5904 expr_eq();
5905 if (tok != ',')
5906 break;
5907 vpop();
5908 next();
5912 /* parse a constant expression and return value in vtop. */
5913 static void expr_const1(void)
5915 const_wanted++;
5916 nocode_wanted++;
5917 expr_cond();
5918 nocode_wanted--;
5919 const_wanted--;
5922 /* parse an integer constant and return its value. */
5923 static inline int64_t expr_const64(void)
5925 int64_t c;
5926 expr_const1();
5927 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5928 expect("constant expression");
5929 c = vtop->c.i;
5930 vpop();
5931 return c;
5934 /* parse an integer constant and return its value.
5935 Complain if it doesn't fit 32bit (signed or unsigned). */
5936 ST_FUNC int expr_const(void)
5938 int c;
5939 int64_t wc = expr_const64();
5940 c = wc;
5941 if (c != wc && (unsigned)c != wc)
5942 tcc_error("constant exceeds 32 bit");
5943 return c;
5946 /* return the label token if current token is a label, otherwise
5947 return zero */
5948 static int is_label(void)
5950 int last_tok;
5952 /* fast test first */
5953 if (tok < TOK_UIDENT)
5954 return 0;
5955 /* no need to save tokc because tok is an identifier */
5956 last_tok = tok;
5957 next();
5958 if (tok == ':') {
5959 return last_tok;
5960 } else {
5961 unget_tok(last_tok);
5962 return 0;
5966 #ifndef TCC_TARGET_ARM64
5967 static void gfunc_return(CType *func_type)
5969 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5970 CType type, ret_type;
5971 int ret_align, ret_nregs, regsize;
5972 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5973 &ret_align, &regsize);
5974 if (0 == ret_nregs) {
5975 /* if returning structure, must copy it to implicit
5976 first pointer arg location */
5977 type = *func_type;
5978 mk_pointer(&type);
5979 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5980 indir();
5981 vswap();
5982 /* copy structure value to pointer */
5983 vstore();
5984 } else {
5985 /* returning structure packed into registers */
5986 int r, size, addr, align;
5987 size = type_size(func_type,&align);
5988 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5989 (vtop->c.i & (ret_align-1)))
5990 && (align & (ret_align-1))) {
5991 loc = (loc - size) & -ret_align;
5992 addr = loc;
5993 type = *func_type;
5994 vset(&type, VT_LOCAL | VT_LVAL, addr);
5995 vswap();
5996 vstore();
5997 vpop();
5998 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6000 vtop->type = ret_type;
6001 if (is_float(ret_type.t))
6002 r = rc_fret(ret_type.t);
6003 else
6004 r = RC_IRET;
6006 if (ret_nregs == 1)
6007 gv(r);
6008 else {
6009 for (;;) {
6010 vdup();
6011 gv(r);
6012 vpop();
6013 if (--ret_nregs == 0)
6014 break;
6015 /* We assume that when a structure is returned in multiple
6016 registers, their classes are consecutive values of the
6017 suite s(n) = 2^n */
6018 r <<= 1;
6019 vtop->c.i += regsize;
6023 } else if (is_float(func_type->t)) {
6024 gv(rc_fret(func_type->t));
6025 } else {
6026 gv(RC_IRET);
6028 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6030 #endif
6032 static int case_cmp(const void *pa, const void *pb)
6034 int64_t a = (*(struct case_t**) pa)->v1;
6035 int64_t b = (*(struct case_t**) pb)->v1;
6036 return a < b ? -1 : a > b;
6039 static void gcase(struct case_t **base, int len, int *bsym)
6041 struct case_t *p;
6042 int e;
6043 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6044 gv(RC_INT);
6045 while (len > 4) {
6046 /* binary search */
6047 p = base[len/2];
6048 vdup();
6049 if (ll)
6050 vpushll(p->v2);
6051 else
6052 vpushi(p->v2);
6053 gen_op(TOK_LE);
6054 e = gtst(1, 0);
6055 vdup();
6056 if (ll)
6057 vpushll(p->v1);
6058 else
6059 vpushi(p->v1);
6060 gen_op(TOK_GE);
6061 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6062 /* x < v1 */
6063 gcase(base, len/2, bsym);
6064 if (cur_switch->def_sym)
6065 gjmp_addr(cur_switch->def_sym);
6066 else
6067 *bsym = gjmp(*bsym);
6068 /* x > v2 */
6069 gsym(e);
6070 e = len/2 + 1;
6071 base += e; len -= e;
6073 /* linear scan */
6074 while (len--) {
6075 p = *base++;
6076 vdup();
6077 if (ll)
6078 vpushll(p->v2);
6079 else
6080 vpushi(p->v2);
6081 if (p->v1 == p->v2) {
6082 gen_op(TOK_EQ);
6083 gtst_addr(0, p->sym);
6084 } else {
6085 gen_op(TOK_LE);
6086 e = gtst(1, 0);
6087 vdup();
6088 if (ll)
6089 vpushll(p->v1);
6090 else
6091 vpushi(p->v1);
6092 gen_op(TOK_GE);
6093 gtst_addr(0, p->sym);
6094 gsym(e);
6099 static void block(int *bsym, int *csym, int is_expr)
6101 int a, b, c, d, cond;
6102 Sym *s;
6104 /* generate line number info */
6105 if (tcc_state->do_debug)
6106 tcc_debug_line(tcc_state);
6108 if (is_expr) {
6109 /* default return value is (void) */
6110 vpushi(0);
6111 vtop->type.t = VT_VOID;
6114 if (tok == TOK_IF) {
6115 /* if test */
6116 int saved_nocode_wanted = nocode_wanted;
6117 next();
6118 skip('(');
6119 gexpr();
6120 skip(')');
6121 cond = condition_3way();
6122 if (cond == 1)
6123 a = 0, vpop();
6124 else
6125 a = gvtst(1, 0);
6126 if (cond == 0)
6127 nocode_wanted |= 0x20000000;
6128 block(bsym, csym, 0);
6129 if (cond != 1)
6130 nocode_wanted = saved_nocode_wanted;
6131 c = tok;
6132 if (c == TOK_ELSE) {
6133 next();
6134 d = gjmp(0);
6135 gsym(a);
6136 if (cond == 1)
6137 nocode_wanted |= 0x20000000;
6138 block(bsym, csym, 0);
6139 gsym(d); /* patch else jmp */
6140 if (cond != 0)
6141 nocode_wanted = saved_nocode_wanted;
6142 } else
6143 gsym(a);
6144 } else if (tok == TOK_WHILE) {
6145 int saved_nocode_wanted;
6146 nocode_wanted &= ~0x20000000;
6147 next();
6148 d = ind;
6149 vla_sp_restore();
6150 skip('(');
6151 gexpr();
6152 skip(')');
6153 a = gvtst(1, 0);
6154 b = 0;
6155 incr_local_scope();
6156 saved_nocode_wanted = nocode_wanted;
6157 block(&a, &b, 0);
6158 nocode_wanted = saved_nocode_wanted;
6159 decr_local_scope();
6160 gjmp_addr(d);
6161 gsym(a);
6162 gsym_addr(b, d);
6163 } else if (tok == '{') {
6164 Sym *llabel, *lcleanup;
6165 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6166 int lncleanups = ncleanups;
6168 next();
6169 /* record local declaration stack position */
6170 s = local_stack;
6171 llabel = local_label_stack;
6172 lcleanup = current_cleanups;
6173 incr_local_scope();
6175 /* handle local labels declarations */
6176 while (tok == TOK_LABEL) {
6177 next();
6178 for(;;) {
6179 if (tok < TOK_UIDENT)
6180 expect("label identifier");
6181 label_push(&local_label_stack, tok, LABEL_DECLARED);
6182 next();
6183 if (tok == ',') {
6184 next();
6185 } else {
6186 skip(';');
6187 break;
6191 while (tok != '}') {
6192 if ((a = is_label()))
6193 unget_tok(a);
6194 else
6195 decl(VT_LOCAL);
6196 if (tok != '}') {
6197 if (is_expr)
6198 vpop();
6199 block(bsym, csym, is_expr);
6203 if (current_cleanups != lcleanup) {
6204 int jmp = 0;
6205 Sym *g, **pg;
6207 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6208 if (g->prev_tok->r & LABEL_FORWARD) {
6209 Sym *pcl = g->next;
6210 if (!jmp)
6211 jmp = gjmp(0);
6212 gsym(pcl->jnext);
6213 try_call_scope_cleanup(lcleanup);
6214 pcl->jnext = gjmp(0);
6215 if (!lncleanups)
6216 goto remove_pending;
6217 g->c = lncleanups;
6218 pg = &g->prev;
6219 } else {
6220 remove_pending:
6221 *pg = g->prev;
6222 sym_free(g);
6224 gsym(jmp);
6225 if (!nocode_wanted) {
6226 try_call_scope_cleanup(lcleanup);
6230 current_cleanups = lcleanup;
6231 ncleanups = lncleanups;
6232 /* pop locally defined labels */
6233 label_pop(&local_label_stack, llabel, is_expr);
6234 /* pop locally defined symbols */
6235 decr_local_scope();
6236 /* In the is_expr case (a statement expression is finished here),
6237 vtop might refer to symbols on the local_stack. Either via the
6238 type or via vtop->sym. We can't pop those nor any that in turn
6239 might be referred to. To make it easier we don't roll back
6240 any symbols in that case; some upper level call to block() will
6241 do that. We do have to remove such symbols from the lookup
6242 tables, though. sym_pop will do that. */
6243 sym_pop(&local_stack, s, is_expr);
6245 /* Pop VLA frames and restore stack pointer if required */
6246 if (vlas_in_scope > saved_vlas_in_scope) {
6247 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6248 vla_sp_restore();
6250 vlas_in_scope = saved_vlas_in_scope;
6252 next();
6253 } else if (tok == TOK_RETURN) {
6254 next();
6255 if (tok != ';') {
6256 gexpr();
6257 gen_assign_cast(&func_vt);
6258 try_call_scope_cleanup(NULL);
6259 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6260 vtop--;
6261 else
6262 gfunc_return(&func_vt);
6263 } else {
6264 try_call_scope_cleanup(NULL);
6266 skip(';');
6267 /* jump unless last stmt in top-level block */
6268 if (tok != '}' || local_scope != 1)
6269 rsym = gjmp(rsym);
6270 nocode_wanted |= 0x20000000;
6271 } else if (tok == TOK_BREAK) {
6272 /* compute jump */
6273 if (!bsym)
6274 tcc_error("cannot break");
6275 *bsym = gjmp(*bsym);
6276 next();
6277 skip(';');
6278 nocode_wanted |= 0x20000000;
6279 } else if (tok == TOK_CONTINUE) {
6280 /* compute jump */
6281 if (!csym)
6282 tcc_error("cannot continue");
6283 vla_sp_restore_root();
6284 *csym = gjmp(*csym);
6285 next();
6286 skip(';');
6287 } else if (tok == TOK_FOR) {
6288 int e;
6289 int saved_nocode_wanted;
6290 nocode_wanted &= ~0x20000000;
6291 next();
6292 skip('(');
6293 s = local_stack;
6294 incr_local_scope();
6295 if (tok != ';') {
6296 /* c99 for-loop init decl? */
6297 if (!decl0(VT_LOCAL, 1, NULL)) {
6298 /* no, regular for-loop init expr */
6299 gexpr();
6300 vpop();
6303 skip(';');
6304 d = ind;
6305 c = ind;
6306 vla_sp_restore();
6307 a = 0;
6308 b = 0;
6309 if (tok != ';') {
6310 gexpr();
6311 a = gvtst(1, 0);
6313 skip(';');
6314 if (tok != ')') {
6315 e = gjmp(0);
6316 c = ind;
6317 vla_sp_restore();
6318 gexpr();
6319 vpop();
6320 gjmp_addr(d);
6321 gsym(e);
6323 skip(')');
6324 saved_nocode_wanted = nocode_wanted;
6325 block(&a, &b, 0);
6326 nocode_wanted = saved_nocode_wanted;
6327 gjmp_addr(c);
6328 gsym(a);
6329 gsym_addr(b, c);
6330 decr_local_scope();
6331 sym_pop(&local_stack, s, 0);
6333 } else
6334 if (tok == TOK_DO) {
6335 int saved_nocode_wanted;
6336 nocode_wanted &= ~0x20000000;
6337 next();
6338 a = 0;
6339 b = 0;
6340 d = ind;
6341 vla_sp_restore();
6342 saved_nocode_wanted = nocode_wanted;
6343 block(&a, &b, 0);
6344 skip(TOK_WHILE);
6345 skip('(');
6346 gsym(b);
6347 if (b)
6348 nocode_wanted = saved_nocode_wanted;
6349 gexpr();
6350 c = gvtst(0, 0);
6351 gsym_addr(c, d);
6352 nocode_wanted = saved_nocode_wanted;
6353 skip(')');
6354 gsym(a);
6355 skip(';');
6356 } else
6357 if (tok == TOK_SWITCH) {
6358 struct switch_t *saved, sw;
6359 int saved_nocode_wanted = nocode_wanted;
6360 SValue switchval;
6361 next();
6362 skip('(');
6363 gexpr();
6364 skip(')');
6365 switchval = *vtop--;
6366 a = 0;
6367 b = gjmp(0); /* jump to first case */
6368 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6369 saved = cur_switch;
6370 cur_switch = &sw;
6371 block(&a, csym, 0);
6372 nocode_wanted = saved_nocode_wanted;
6373 a = gjmp(a); /* add implicit break */
6374 /* case lookup */
6375 gsym(b);
6376 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6377 for (b = 1; b < sw.n; b++)
6378 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6379 tcc_error("duplicate case value");
6380 /* Our switch table sorting is signed, so the compared
6381 value needs to be as well when it's 64bit. */
6382 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6383 switchval.type.t &= ~VT_UNSIGNED;
6384 vpushv(&switchval);
6385 gcase(sw.p, sw.n, &a);
6386 vpop();
6387 if (sw.def_sym)
6388 gjmp_addr(sw.def_sym);
6389 dynarray_reset(&sw.p, &sw.n);
6390 cur_switch = saved;
6391 /* break label */
6392 gsym(a);
6393 } else
6394 if (tok == TOK_CASE) {
6395 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6396 if (!cur_switch)
6397 expect("switch");
6398 nocode_wanted &= ~0x20000000;
6399 next();
6400 cr->v1 = cr->v2 = expr_const64();
6401 if (gnu_ext && tok == TOK_DOTS) {
6402 next();
6403 cr->v2 = expr_const64();
6404 if (cr->v2 < cr->v1)
6405 tcc_warning("empty case range");
6407 cr->sym = ind;
6408 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6409 skip(':');
6410 is_expr = 0;
6411 goto block_after_label;
6412 } else
6413 if (tok == TOK_DEFAULT) {
6414 next();
6415 skip(':');
6416 if (!cur_switch)
6417 expect("switch");
6418 if (cur_switch->def_sym)
6419 tcc_error("too many 'default'");
6420 cur_switch->def_sym = ind;
6421 is_expr = 0;
6422 goto block_after_label;
6423 } else
6424 if (tok == TOK_GOTO) {
6425 next();
6426 if (tok == '*' && gnu_ext) {
6427 /* computed goto */
6428 next();
6429 gexpr();
6430 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6431 expect("pointer");
6432 ggoto();
6433 } else if (tok >= TOK_UIDENT) {
6434 s = label_find(tok);
6435 /* put forward definition if needed */
6436 if (!s)
6437 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6438 else if (s->r == LABEL_DECLARED)
6439 s->r = LABEL_FORWARD;
6441 vla_sp_restore_root();
6442 if (s->r & LABEL_FORWARD) {
6443 /* start new goto chain for cleanups, linked via label->next */
6444 if (current_cleanups) {
6445 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6446 pending_gotos->prev_tok = s;
6447 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6448 pending_gotos->next = s;
6450 s->jnext = gjmp(s->jnext);
6451 } else {
6452 try_call_cleanup_goto(s->cleanupstate);
6453 gjmp_addr(s->jnext);
6455 next();
6456 } else {
6457 expect("label identifier");
6459 skip(';');
6460 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6461 asm_instr();
6462 } else {
6463 b = is_label();
6464 if (b) {
6465 /* label case */
6466 next();
6467 s = label_find(b);
6468 if (s) {
6469 if (s->r == LABEL_DEFINED)
6470 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6471 s->r = LABEL_DEFINED;
6472 if (s->next) {
6473 Sym *pcl; /* pending cleanup goto */
6474 for (pcl = s->next; pcl; pcl = pcl->prev)
6475 gsym(pcl->jnext);
6476 sym_pop(&s->next, NULL, 0);
6477 } else
6478 gsym(s->jnext);
6479 } else {
6480 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6482 s->jnext = ind;
6483 s->cleanupstate = current_cleanups;
6484 vla_sp_restore();
6485 /* we accept this, but it is a mistake */
6486 block_after_label:
6487 nocode_wanted &= ~0x20000000;
6488 if (tok == '}') {
6489 tcc_warning("deprecated use of label at end of compound statement");
6490 } else {
6491 if (is_expr)
6492 vpop();
6493 block(bsym, csym, is_expr);
6495 } else {
6496 /* expression case */
6497 if (tok != ';') {
6498 if (is_expr) {
6499 vpop();
6500 gexpr();
6501 } else {
6502 gexpr();
6503 vpop();
6506 skip(';');
6511 /* This skips over a stream of tokens containing balanced {} and ()
6512 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6513 with a '{'). If STR then allocates and stores the skipped tokens
6514 in *STR. This doesn't check if () and {} are nested correctly,
6515 i.e. "({)}" is accepted. */
6516 static void skip_or_save_block(TokenString **str)
6518 int braces = tok == '{';
6519 int level = 0;
6520 if (str)
6521 *str = tok_str_alloc();
6523 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6524 int t;
6525 if (tok == TOK_EOF) {
6526 if (str || level > 0)
6527 tcc_error("unexpected end of file");
6528 else
6529 break;
6531 if (str)
6532 tok_str_add_tok(*str);
6533 t = tok;
6534 next();
6535 if (t == '{' || t == '(') {
6536 level++;
6537 } else if (t == '}' || t == ')') {
6538 level--;
6539 if (level == 0 && braces && t == '}')
6540 break;
6543 if (str) {
6544 tok_str_add(*str, -1);
6545 tok_str_add(*str, 0);
6549 #define EXPR_CONST 1
6550 #define EXPR_ANY 2
6552 static void parse_init_elem(int expr_type)
6554 int saved_global_expr;
6555 switch(expr_type) {
6556 case EXPR_CONST:
6557 /* compound literals must be allocated globally in this case */
6558 saved_global_expr = global_expr;
6559 global_expr = 1;
6560 expr_const1();
6561 global_expr = saved_global_expr;
6562 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6563 (compound literals). */
6564 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6565 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6566 || vtop->sym->v < SYM_FIRST_ANOM))
6567 #ifdef TCC_TARGET_PE
6568 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6569 #endif
6571 tcc_error("initializer element is not constant");
6572 break;
6573 case EXPR_ANY:
6574 expr_eq();
6575 break;
6579 /* put zeros for variable based init */
6580 static void init_putz(Section *sec, unsigned long c, int size)
6582 if (sec) {
6583 /* nothing to do because globals are already set to zero */
6584 } else {
6585 vpush_global_sym(&func_old_type, TOK_memset);
6586 vseti(VT_LOCAL, c);
6587 #ifdef TCC_TARGET_ARM
6588 vpushs(size);
6589 vpushi(0);
6590 #else
6591 vpushi(0);
6592 vpushs(size);
6593 #endif
6594 gfunc_call(3);
6598 /* t is the array or struct type. c is the array or struct
6599 address. cur_field is the pointer to the current
6600 field, for arrays the 'c' member contains the current start
6601 index. 'size_only' is true if only size info is needed (only used
6602 in arrays). al contains the already initialized length of the
6603 current container (starting at c). This returns the new length of that. */
6604 static int decl_designator(CType *type, Section *sec, unsigned long c,
6605 Sym **cur_field, int size_only, int al)
6607 Sym *s, *f;
6608 int index, index_last, align, l, nb_elems, elem_size;
6609 unsigned long corig = c;
6611 elem_size = 0;
6612 nb_elems = 1;
6613 if (gnu_ext && (l = is_label()) != 0)
6614 goto struct_field;
6615 /* NOTE: we only support ranges for last designator */
6616 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6617 if (tok == '[') {
6618 if (!(type->t & VT_ARRAY))
6619 expect("array type");
6620 next();
6621 index = index_last = expr_const();
6622 if (tok == TOK_DOTS && gnu_ext) {
6623 next();
6624 index_last = expr_const();
6626 skip(']');
6627 s = type->ref;
6628 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6629 index_last < index)
6630 tcc_error("invalid index");
6631 if (cur_field)
6632 (*cur_field)->c = index_last;
6633 type = pointed_type(type);
6634 elem_size = type_size(type, &align);
6635 c += index * elem_size;
6636 nb_elems = index_last - index + 1;
6637 } else {
6638 next();
6639 l = tok;
6640 struct_field:
6641 next();
6642 if ((type->t & VT_BTYPE) != VT_STRUCT)
6643 expect("struct/union type");
6644 f = find_field(type, l);
6645 if (!f)
6646 expect("field");
6647 if (cur_field)
6648 *cur_field = f;
6649 type = &f->type;
6650 c += f->c;
6652 cur_field = NULL;
6654 if (!cur_field) {
6655 if (tok == '=') {
6656 next();
6657 } else if (!gnu_ext) {
6658 expect("=");
6660 } else {
6661 if (type->t & VT_ARRAY) {
6662 index = (*cur_field)->c;
6663 if (type->ref->c >= 0 && index >= type->ref->c)
6664 tcc_error("index too large");
6665 type = pointed_type(type);
6666 c += index * type_size(type, &align);
6667 } else {
6668 f = *cur_field;
6669 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6670 *cur_field = f = f->next;
6671 if (!f)
6672 tcc_error("too many field init");
6673 type = &f->type;
6674 c += f->c;
6677 /* must put zero in holes (note that doing it that way
6678 ensures that it even works with designators) */
6679 if (!size_only && c - corig > al)
6680 init_putz(sec, corig + al, c - corig - al);
6681 decl_initializer(type, sec, c, 0, size_only);
6683 /* XXX: make it more general */
6684 if (!size_only && nb_elems > 1) {
6685 unsigned long c_end;
6686 uint8_t *src, *dst;
6687 int i;
6689 if (!sec) {
6690 vset(type, VT_LOCAL|VT_LVAL, c);
6691 for (i = 1; i < nb_elems; i++) {
6692 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6693 vswap();
6694 vstore();
6696 vpop();
6697 } else if (!NODATA_WANTED) {
6698 c_end = c + nb_elems * elem_size;
6699 if (c_end > sec->data_allocated)
6700 section_realloc(sec, c_end);
6701 src = sec->data + c;
6702 dst = src;
6703 for(i = 1; i < nb_elems; i++) {
6704 dst += elem_size;
6705 memcpy(dst, src, elem_size);
6709 c += nb_elems * type_size(type, &align);
6710 if (c - corig > al)
6711 al = c - corig;
6712 return al;
6715 /* store a value or an expression directly in global data or in local array */
6716 static void init_putv(CType *type, Section *sec, unsigned long c)
6718 int bt;
6719 void *ptr;
6720 CType dtype;
6722 dtype = *type;
6723 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6725 if (sec) {
6726 int size, align;
6727 /* XXX: not portable */
6728 /* XXX: generate error if incorrect relocation */
6729 gen_assign_cast(&dtype);
6730 bt = type->t & VT_BTYPE;
6732 if ((vtop->r & VT_SYM)
6733 && bt != VT_PTR
6734 && bt != VT_FUNC
6735 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6736 || (type->t & VT_BITFIELD))
6737 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6739 tcc_error("initializer element is not computable at load time");
6741 if (NODATA_WANTED) {
6742 vtop--;
6743 return;
6746 size = type_size(type, &align);
6747 section_reserve(sec, c + size);
6748 ptr = sec->data + c;
6750 /* XXX: make code faster ? */
6751 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6752 vtop->sym->v >= SYM_FIRST_ANOM &&
6753 /* XXX This rejects compound literals like
6754 '(void *){ptr}'. The problem is that '&sym' is
6755 represented the same way, which would be ruled out
6756 by the SYM_FIRST_ANOM check above, but also '"string"'
6757 in 'char *p = "string"' is represented the same
6758 with the type being VT_PTR and the symbol being an
6759 anonymous one. That is, there's no difference in vtop
6760 between '(void *){x}' and '&(void *){x}'. Ignore
6761 pointer typed entities here. Hopefully no real code
6762 will every use compound literals with scalar type. */
6763 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6764 /* These come from compound literals, memcpy stuff over. */
6765 Section *ssec;
6766 ElfSym *esym;
6767 ElfW_Rel *rel;
6768 esym = elfsym(vtop->sym);
6769 ssec = tcc_state->sections[esym->st_shndx];
6770 memmove (ptr, ssec->data + esym->st_value, size);
6771 if (ssec->reloc) {
6772 /* We need to copy over all memory contents, and that
6773 includes relocations. Use the fact that relocs are
6774 created it order, so look from the end of relocs
6775 until we hit one before the copied region. */
6776 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6777 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6778 while (num_relocs--) {
6779 rel--;
6780 if (rel->r_offset >= esym->st_value + size)
6781 continue;
6782 if (rel->r_offset < esym->st_value)
6783 break;
6784 /* Note: if the same fields are initialized multiple
6785 times (possible with designators) then we possibly
6786 add multiple relocations for the same offset here.
6787 That would lead to wrong code, the last reloc needs
6788 to win. We clean this up later after the whole
6789 initializer is parsed. */
6790 put_elf_reloca(symtab_section, sec,
6791 c + rel->r_offset - esym->st_value,
6792 ELFW(R_TYPE)(rel->r_info),
6793 ELFW(R_SYM)(rel->r_info),
6794 #if PTR_SIZE == 8
6795 rel->r_addend
6796 #else
6798 #endif
6802 } else {
6803 if (type->t & VT_BITFIELD) {
6804 int bit_pos, bit_size, bits, n;
6805 unsigned char *p, v, m;
6806 bit_pos = BIT_POS(vtop->type.t);
6807 bit_size = BIT_SIZE(vtop->type.t);
6808 p = (unsigned char*)ptr + (bit_pos >> 3);
6809 bit_pos &= 7, bits = 0;
6810 while (bit_size) {
6811 n = 8 - bit_pos;
6812 if (n > bit_size)
6813 n = bit_size;
6814 v = vtop->c.i >> bits << bit_pos;
6815 m = ((1 << n) - 1) << bit_pos;
6816 *p = (*p & ~m) | (v & m);
6817 bits += n, bit_size -= n, bit_pos = 0, ++p;
6819 } else
6820 switch(bt) {
6821 /* XXX: when cross-compiling we assume that each type has the
6822 same representation on host and target, which is likely to
6823 be wrong in the case of long double */
6824 case VT_BOOL:
6825 vtop->c.i = vtop->c.i != 0;
6826 case VT_BYTE:
6827 *(char *)ptr |= vtop->c.i;
6828 break;
6829 case VT_SHORT:
6830 *(short *)ptr |= vtop->c.i;
6831 break;
6832 case VT_FLOAT:
6833 *(float*)ptr = vtop->c.f;
6834 break;
6835 case VT_DOUBLE:
6836 *(double *)ptr = vtop->c.d;
6837 break;
6838 case VT_LDOUBLE:
6839 #if defined TCC_IS_NATIVE_387
6840 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6841 memcpy(ptr, &vtop->c.ld, 10);
6842 #ifdef __TINYC__
6843 else if (sizeof (long double) == sizeof (double))
6844 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6845 #endif
6846 else if (vtop->c.ld == 0.0)
6848 else
6849 #endif
6850 if (sizeof(long double) == LDOUBLE_SIZE)
6851 *(long double*)ptr = vtop->c.ld;
6852 else if (sizeof(double) == LDOUBLE_SIZE)
6853 *(double *)ptr = (double)vtop->c.ld;
6854 else
6855 tcc_error("can't cross compile long double constants");
6856 break;
6857 #if PTR_SIZE != 8
6858 case VT_LLONG:
6859 *(long long *)ptr |= vtop->c.i;
6860 break;
6861 #else
6862 case VT_LLONG:
6863 #endif
6864 case VT_PTR:
6866 addr_t val = vtop->c.i;
6867 #if PTR_SIZE == 8
6868 if (vtop->r & VT_SYM)
6869 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6870 else
6871 *(addr_t *)ptr |= val;
6872 #else
6873 if (vtop->r & VT_SYM)
6874 greloc(sec, vtop->sym, c, R_DATA_PTR);
6875 *(addr_t *)ptr |= val;
6876 #endif
6877 break;
6879 default:
6881 int val = vtop->c.i;
6882 #if PTR_SIZE == 8
6883 if (vtop->r & VT_SYM)
6884 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6885 else
6886 *(int *)ptr |= val;
6887 #else
6888 if (vtop->r & VT_SYM)
6889 greloc(sec, vtop->sym, c, R_DATA_PTR);
6890 *(int *)ptr |= val;
6891 #endif
6892 break;
6896 vtop--;
6897 } else {
6898 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6899 vswap();
6900 vstore();
6901 vpop();
6905 /* 't' contains the type and storage info. 'c' is the offset of the
6906 object in section 'sec'. If 'sec' is NULL, it means stack based
6907 allocation. 'first' is true if array '{' must be read (multi
6908 dimension implicit array init handling). 'size_only' is true if
6909 size only evaluation is wanted (only for arrays). */
6910 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6911 int first, int size_only)
6913 int len, n, no_oblock, nb, i;
6914 int size1, align1;
6915 int have_elem;
6916 Sym *s, *f;
6917 Sym indexsym;
6918 CType *t1;
6920 /* If we currently are at an '}' or ',' we have read an initializer
6921 element in one of our callers, and not yet consumed it. */
6922 have_elem = tok == '}' || tok == ',';
6923 if (!have_elem && tok != '{' &&
6924 /* In case of strings we have special handling for arrays, so
6925 don't consume them as initializer value (which would commit them
6926 to some anonymous symbol). */
6927 tok != TOK_LSTR && tok != TOK_STR &&
6928 !size_only) {
6929 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6930 have_elem = 1;
6933 if (have_elem &&
6934 !(type->t & VT_ARRAY) &&
6935 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6936 The source type might have VT_CONSTANT set, which is
6937 of course assignable to non-const elements. */
6938 is_compatible_unqualified_types(type, &vtop->type)) {
6939 init_putv(type, sec, c);
6940 } else if (type->t & VT_ARRAY) {
6941 s = type->ref;
6942 n = s->c;
6943 t1 = pointed_type(type);
6944 size1 = type_size(t1, &align1);
6946 no_oblock = 1;
6947 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6948 tok == '{') {
6949 if (tok != '{')
6950 tcc_error("character array initializer must be a literal,"
6951 " optionally enclosed in braces");
6952 skip('{');
6953 no_oblock = 0;
6956 /* only parse strings here if correct type (otherwise: handle
6957 them as ((w)char *) expressions */
6958 if ((tok == TOK_LSTR &&
6959 #ifdef TCC_TARGET_PE
6960 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6961 #else
6962 (t1->t & VT_BTYPE) == VT_INT
6963 #endif
6964 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6965 len = 0;
6966 while (tok == TOK_STR || tok == TOK_LSTR) {
6967 int cstr_len, ch;
6969 /* compute maximum number of chars wanted */
6970 if (tok == TOK_STR)
6971 cstr_len = tokc.str.size;
6972 else
6973 cstr_len = tokc.str.size / sizeof(nwchar_t);
6974 cstr_len--;
6975 nb = cstr_len;
6976 if (n >= 0 && nb > (n - len))
6977 nb = n - len;
6978 if (!size_only) {
6979 if (cstr_len > nb)
6980 tcc_warning("initializer-string for array is too long");
6981 /* in order to go faster for common case (char
6982 string in global variable, we handle it
6983 specifically */
6984 if (sec && tok == TOK_STR && size1 == 1) {
6985 if (!NODATA_WANTED)
6986 memcpy(sec->data + c + len, tokc.str.data, nb);
6987 } else {
6988 for(i=0;i<nb;i++) {
6989 if (tok == TOK_STR)
6990 ch = ((unsigned char *)tokc.str.data)[i];
6991 else
6992 ch = ((nwchar_t *)tokc.str.data)[i];
6993 vpushi(ch);
6994 init_putv(t1, sec, c + (len + i) * size1);
6998 len += nb;
6999 next();
7001 /* only add trailing zero if enough storage (no
7002 warning in this case since it is standard) */
7003 if (n < 0 || len < n) {
7004 if (!size_only) {
7005 vpushi(0);
7006 init_putv(t1, sec, c + (len * size1));
7008 len++;
7010 len *= size1;
7011 } else {
7012 indexsym.c = 0;
7013 f = &indexsym;
7015 do_init_list:
7016 len = 0;
7017 while (tok != '}' || have_elem) {
7018 len = decl_designator(type, sec, c, &f, size_only, len);
7019 have_elem = 0;
7020 if (type->t & VT_ARRAY) {
7021 ++indexsym.c;
7022 /* special test for multi dimensional arrays (may not
7023 be strictly correct if designators are used at the
7024 same time) */
7025 if (no_oblock && len >= n*size1)
7026 break;
7027 } else {
7028 if (s->type.t == VT_UNION)
7029 f = NULL;
7030 else
7031 f = f->next;
7032 if (no_oblock && f == NULL)
7033 break;
7036 if (tok == '}')
7037 break;
7038 skip(',');
7041 /* put zeros at the end */
7042 if (!size_only && len < n*size1)
7043 init_putz(sec, c + len, n*size1 - len);
7044 if (!no_oblock)
7045 skip('}');
7046 /* patch type size if needed, which happens only for array types */
7047 if (n < 0)
7048 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7049 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7050 size1 = 1;
7051 no_oblock = 1;
7052 if (first || tok == '{') {
7053 skip('{');
7054 no_oblock = 0;
7056 s = type->ref;
7057 f = s->next;
7058 n = s->c;
7059 goto do_init_list;
7060 } else if (tok == '{') {
7061 next();
7062 decl_initializer(type, sec, c, first, size_only);
7063 skip('}');
7064 } else if (size_only) {
7065 /* If we supported only ISO C we wouldn't have to accept calling
7066 this on anything than an array size_only==1 (and even then
7067 only on the outermost level, so no recursion would be needed),
7068 because initializing a flex array member isn't supported.
7069 But GNU C supports it, so we need to recurse even into
7070 subfields of structs and arrays when size_only is set. */
7071 /* just skip expression */
7072 skip_or_save_block(NULL);
7073 } else {
7074 if (!have_elem) {
7075 /* This should happen only when we haven't parsed
7076 the init element above for fear of committing a
7077 string constant to memory too early. */
7078 if (tok != TOK_STR && tok != TOK_LSTR)
7079 expect("string constant");
7080 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7082 init_putv(type, sec, c);
7086 /* parse an initializer for type 't' if 'has_init' is non zero, and
7087 allocate space in local or global data space ('r' is either
7088 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7089 variable 'v' of scope 'scope' is declared before initializers
7090 are parsed. If 'v' is zero, then a reference to the new object
7091 is put in the value stack. If 'has_init' is 2, a special parsing
7092 is done to handle string constants. */
7093 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7094 int has_init, int v, int scope)
7096 int size, align, addr;
7097 TokenString *init_str = NULL;
7099 Section *sec;
7100 Sym *flexible_array;
7101 Sym *sym = NULL;
7102 int saved_nocode_wanted = nocode_wanted;
7103 #ifdef CONFIG_TCC_BCHECK
7104 int bcheck;
7105 #endif
7107 /* Always allocate static or global variables */
7108 if (v && (r & VT_VALMASK) == VT_CONST)
7109 nocode_wanted |= 0x80000000;
7111 #ifdef CONFIG_TCC_BCHECK
7112 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7113 #endif
7115 flexible_array = NULL;
7116 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7117 Sym *field = type->ref->next;
7118 if (field) {
7119 while (field->next)
7120 field = field->next;
7121 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7122 flexible_array = field;
7126 size = type_size(type, &align);
7127 /* If unknown size, we must evaluate it before
7128 evaluating initializers because
7129 initializers can generate global data too
7130 (e.g. string pointers or ISOC99 compound
7131 literals). It also simplifies local
7132 initializers handling */
7133 if (size < 0 || (flexible_array && has_init)) {
7134 if (!has_init)
7135 tcc_error("unknown type size");
7136 /* get all init string */
7137 if (has_init == 2) {
7138 init_str = tok_str_alloc();
7139 /* only get strings */
7140 while (tok == TOK_STR || tok == TOK_LSTR) {
7141 tok_str_add_tok(init_str);
7142 next();
7144 tok_str_add(init_str, -1);
7145 tok_str_add(init_str, 0);
7146 } else {
7147 skip_or_save_block(&init_str);
7149 unget_tok(0);
7151 /* compute size */
7152 begin_macro(init_str, 1);
7153 next();
7154 decl_initializer(type, NULL, 0, 1, 1);
7155 /* prepare second initializer parsing */
7156 macro_ptr = init_str->str;
7157 next();
7159 /* if still unknown size, error */
7160 size = type_size(type, &align);
7161 if (size < 0)
7162 tcc_error("unknown type size");
7164 /* If there's a flex member and it was used in the initializer
7165 adjust size. */
7166 if (flexible_array &&
7167 flexible_array->type.ref->c > 0)
7168 size += flexible_array->type.ref->c
7169 * pointed_size(&flexible_array->type);
7170 /* take into account specified alignment if bigger */
7171 if (ad->a.aligned) {
7172 int speca = 1 << (ad->a.aligned - 1);
7173 if (speca > align)
7174 align = speca;
7175 } else if (ad->a.packed) {
7176 align = 1;
7179 if (!v && NODATA_WANTED)
7180 size = 0, align = 1;
7182 if ((r & VT_VALMASK) == VT_LOCAL) {
7183 sec = NULL;
7184 #ifdef CONFIG_TCC_BCHECK
7185 if (bcheck && (type->t & VT_ARRAY)) {
7186 loc--;
7188 #endif
7189 loc = (loc - size) & -align;
7190 addr = loc;
7191 #ifdef CONFIG_TCC_BCHECK
7192 /* handles bounds */
7193 /* XXX: currently, since we do only one pass, we cannot track
7194 '&' operators, so we add only arrays */
7195 if (bcheck && (type->t & VT_ARRAY)) {
7196 addr_t *bounds_ptr;
7197 /* add padding between regions */
7198 loc--;
7199 /* then add local bound info */
7200 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7201 bounds_ptr[0] = addr;
7202 bounds_ptr[1] = size;
7204 #endif
7205 if (v) {
7206 /* local variable */
7207 #ifdef CONFIG_TCC_ASM
7208 if (ad->asm_label) {
7209 int reg = asm_parse_regvar(ad->asm_label);
7210 if (reg >= 0)
7211 r = (r & ~VT_VALMASK) | reg;
7213 #endif
7214 sym = sym_push(v, type, r, addr);
7215 if (ad->cleanup_func) {
7216 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7217 cls->prev_tok = sym;
7218 cls->next = ad->cleanup_func;
7219 cls->ncl = current_cleanups;
7220 current_cleanups = cls;
7223 sym->a = ad->a;
7224 } else {
7225 /* push local reference */
7226 vset(type, r, addr);
7228 } else {
7229 if (v && scope == VT_CONST) {
7230 /* see if the symbol was already defined */
7231 sym = sym_find(v);
7232 if (sym) {
7233 patch_storage(sym, ad, type);
7234 /* we accept several definitions of the same global variable. */
7235 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7236 goto no_alloc;
7240 /* allocate symbol in corresponding section */
7241 sec = ad->section;
7242 if (!sec) {
7243 if (has_init)
7244 sec = data_section;
7245 else if (tcc_state->nocommon)
7246 sec = bss_section;
7249 if (sec) {
7250 addr = section_add(sec, size, align);
7251 #ifdef CONFIG_TCC_BCHECK
7252 /* add padding if bound check */
7253 if (bcheck)
7254 section_add(sec, 1, 1);
7255 #endif
7256 } else {
7257 addr = align; /* SHN_COMMON is special, symbol value is align */
7258 sec = common_section;
7261 if (v) {
7262 if (!sym) {
7263 sym = sym_push(v, type, r | VT_SYM, 0);
7264 patch_storage(sym, ad, NULL);
7266 /* Local statics have a scope until now (for
7267 warnings), remove it here. */
7268 sym->sym_scope = 0;
7269 /* update symbol definition */
7270 put_extern_sym(sym, sec, addr, size);
7271 } else {
7272 /* push global reference */
7273 sym = get_sym_ref(type, sec, addr, size);
7274 vpushsym(type, sym);
7275 vtop->r |= r;
7278 #ifdef CONFIG_TCC_BCHECK
7279 /* handles bounds now because the symbol must be defined
7280 before for the relocation */
7281 if (bcheck) {
7282 addr_t *bounds_ptr;
7284 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7285 /* then add global bound info */
7286 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7287 bounds_ptr[0] = 0; /* relocated */
7288 bounds_ptr[1] = size;
7290 #endif
7293 if (type->t & VT_VLA) {
7294 int a;
7296 if (NODATA_WANTED)
7297 goto no_alloc;
7299 /* save current stack pointer */
7300 if (vlas_in_scope == 0) {
7301 if (vla_sp_root_loc == -1)
7302 vla_sp_root_loc = (loc -= PTR_SIZE);
7303 gen_vla_sp_save(vla_sp_root_loc);
7306 vla_runtime_type_size(type, &a);
7307 gen_vla_alloc(type, a);
7308 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7309 /* on _WIN64, because of the function args scratch area, the
7310 result of alloca differs from RSP and is returned in RAX. */
7311 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7312 #endif
7313 gen_vla_sp_save(addr);
7314 vla_sp_loc = addr;
7315 vlas_in_scope++;
7317 } else if (has_init) {
7318 size_t oldreloc_offset = 0;
7319 if (sec && sec->reloc)
7320 oldreloc_offset = sec->reloc->data_offset;
7321 decl_initializer(type, sec, addr, 1, 0);
7322 if (sec && sec->reloc)
7323 squeeze_multi_relocs(sec, oldreloc_offset);
7324 /* patch flexible array member size back to -1, */
7325 /* for possible subsequent similar declarations */
7326 if (flexible_array)
7327 flexible_array->type.ref->c = -1;
7330 no_alloc:
7331 /* restore parse state if needed */
7332 if (init_str) {
7333 end_macro();
7334 next();
7337 nocode_wanted = saved_nocode_wanted;
7340 /* parse a function defined by symbol 'sym' and generate its code in
7341 'cur_text_section' */
7342 static void gen_function(Sym *sym)
7344 nocode_wanted = 0;
7345 ind = cur_text_section->data_offset;
7346 if (sym->a.aligned) {
7347 size_t newoff = section_add(cur_text_section, 0,
7348 1 << (sym->a.aligned - 1));
7349 gen_fill_nops(newoff - ind);
7351 /* NOTE: we patch the symbol size later */
7352 put_extern_sym(sym, cur_text_section, ind, 0);
7353 funcname = get_tok_str(sym->v, NULL);
7354 func_ind = ind;
7355 /* Initialize VLA state */
7356 vla_sp_loc = -1;
7357 vla_sp_root_loc = -1;
7358 /* put debug symbol */
7359 tcc_debug_funcstart(tcc_state, sym);
7360 /* push a dummy symbol to enable local sym storage */
7361 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7362 local_scope = 1; /* for function parameters */
7363 gfunc_prolog(&sym->type);
7364 reset_local_scope();
7365 rsym = 0;
7366 clear_temp_local_var_list();
7367 block(NULL, NULL, 0);
7368 if (!(nocode_wanted & 0x20000000)
7369 && ((func_vt.t & VT_BTYPE) == VT_INT)
7370 && !strcmp (funcname, "main"))
7372 nocode_wanted = 0;
7373 vpushi(0);
7374 gen_assign_cast(&func_vt);
7375 gfunc_return(&func_vt);
7377 nocode_wanted = 0;
7378 gsym(rsym);
7379 gfunc_epilog();
7380 cur_text_section->data_offset = ind;
7381 label_pop(&global_label_stack, NULL, 0);
7382 /* reset local stack */
7383 reset_local_scope();
7384 sym_pop(&local_stack, NULL, 0);
7385 /* end of function */
7386 /* patch symbol size */
7387 elfsym(sym)->st_size = ind - func_ind;
7388 tcc_debug_funcend(tcc_state, ind - func_ind);
7389 /* It's better to crash than to generate wrong code */
7390 cur_text_section = NULL;
7391 funcname = ""; /* for safety */
7392 func_vt.t = VT_VOID; /* for safety */
7393 func_var = 0; /* for safety */
7394 ind = 0; /* for safety */
7395 nocode_wanted = 0x80000000;
7396 check_vstack();
7399 static void gen_inline_functions(TCCState *s)
7401 Sym *sym;
7402 int inline_generated, i, ln;
7403 struct InlineFunc *fn;
7405 ln = file->line_num;
7406 /* iterate while inline function are referenced */
7407 do {
7408 inline_generated = 0;
7409 for (i = 0; i < s->nb_inline_fns; ++i) {
7410 fn = s->inline_fns[i];
7411 sym = fn->sym;
7412 if (sym && sym->c) {
7413 /* the function was used: generate its code and
7414 convert it to a normal function */
7415 fn->sym = NULL;
7416 if (file)
7417 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7418 sym->type.t &= ~VT_INLINE;
7420 begin_macro(fn->func_str, 1);
7421 next();
7422 cur_text_section = text_section;
7423 gen_function(sym);
7424 end_macro();
7426 inline_generated = 1;
7429 } while (inline_generated);
7430 file->line_num = ln;
7433 ST_FUNC void free_inline_functions(TCCState *s)
7435 int i;
7436 /* free tokens of unused inline functions */
7437 for (i = 0; i < s->nb_inline_fns; ++i) {
7438 struct InlineFunc *fn = s->inline_fns[i];
7439 if (fn->sym)
7440 tok_str_free(fn->func_str);
7442 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7445 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7446 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7447 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7449 int v, has_init, r;
7450 CType type, btype;
7451 Sym *sym;
7452 AttributeDef ad, adbase;
7454 while (1) {
7455 if (!parse_btype(&btype, &adbase)) {
7456 if (is_for_loop_init)
7457 return 0;
7458 /* skip redundant ';' if not in old parameter decl scope */
7459 if (tok == ';' && l != VT_CMP) {
7460 next();
7461 continue;
7463 if (l != VT_CONST)
7464 break;
7465 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7466 /* global asm block */
7467 asm_global_instr();
7468 continue;
7470 if (tok >= TOK_UIDENT) {
7471 /* special test for old K&R protos without explicit int
7472 type. Only accepted when defining global data */
7473 btype.t = VT_INT;
7474 } else {
7475 if (tok != TOK_EOF)
7476 expect("declaration");
7477 break;
7480 if (tok == ';') {
7481 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7482 int v = btype.ref->v;
7483 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7484 tcc_warning("unnamed struct/union that defines no instances");
7485 next();
7486 continue;
7488 if (IS_ENUM(btype.t)) {
7489 next();
7490 continue;
7493 while (1) { /* iterate thru each declaration */
7494 type = btype;
7495 /* If the base type itself was an array type of unspecified
7496 size (like in 'typedef int arr[]; arr x = {1};') then
7497 we will overwrite the unknown size by the real one for
7498 this decl. We need to unshare the ref symbol holding
7499 that size. */
7500 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7501 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7503 ad = adbase;
7504 type_decl(&type, &ad, &v, TYPE_DIRECT);
7505 #if 0
7507 char buf[500];
7508 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7509 printf("type = '%s'\n", buf);
7511 #endif
7512 if ((type.t & VT_BTYPE) == VT_FUNC) {
7513 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7514 tcc_error("function without file scope cannot be static");
7516 /* if old style function prototype, we accept a
7517 declaration list */
7518 sym = type.ref;
7519 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7520 decl0(VT_CMP, 0, sym);
7523 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7524 ad.asm_label = asm_label_instr();
7525 /* parse one last attribute list, after asm label */
7526 parse_attribute(&ad);
7527 if (tok == '{')
7528 expect(";");
7531 #ifdef TCC_TARGET_PE
7532 if (ad.a.dllimport || ad.a.dllexport) {
7533 if (type.t & (VT_STATIC|VT_TYPEDEF))
7534 tcc_error("cannot have dll linkage with static or typedef");
7535 if (ad.a.dllimport) {
7536 if ((type.t & VT_BTYPE) == VT_FUNC)
7537 ad.a.dllimport = 0;
7538 else
7539 type.t |= VT_EXTERN;
7542 #endif
7543 if (tok == '{') {
7544 if (l != VT_CONST)
7545 tcc_error("cannot use local functions");
7546 if ((type.t & VT_BTYPE) != VT_FUNC)
7547 expect("function definition");
7549 /* reject abstract declarators in function definition
7550 make old style params without decl have int type */
7551 sym = type.ref;
7552 while ((sym = sym->next) != NULL) {
7553 if (!(sym->v & ~SYM_FIELD))
7554 expect("identifier");
7555 if (sym->type.t == VT_VOID)
7556 sym->type = int_type;
7559 /* XXX: cannot do better now: convert extern line to static inline */
7560 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7561 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7563 /* put function symbol */
7564 sym = external_global_sym(v, &type, 0);
7565 type.t &= ~VT_EXTERN;
7566 patch_storage(sym, &ad, &type);
7568 /* static inline functions are just recorded as a kind
7569 of macro. Their code will be emitted at the end of
7570 the compilation unit only if they are used */
7571 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7572 (VT_INLINE | VT_STATIC)) {
7573 struct InlineFunc *fn;
7574 const char *filename;
7576 filename = file ? file->filename : "";
7577 fn = tcc_malloc(sizeof *fn + strlen(filename));
7578 strcpy(fn->filename, filename);
7579 fn->sym = sym;
7580 skip_or_save_block(&fn->func_str);
7581 dynarray_add(&tcc_state->inline_fns,
7582 &tcc_state->nb_inline_fns, fn);
7583 } else {
7584 /* compute text section */
7585 cur_text_section = ad.section;
7586 if (!cur_text_section)
7587 cur_text_section = text_section;
7588 gen_function(sym);
7590 break;
7591 } else {
7592 if (l == VT_CMP) {
7593 /* find parameter in function parameter list */
7594 for (sym = func_sym->next; sym; sym = sym->next)
7595 if ((sym->v & ~SYM_FIELD) == v)
7596 goto found;
7597 tcc_error("declaration for parameter '%s' but no such parameter",
7598 get_tok_str(v, NULL));
7599 found:
7600 if (type.t & VT_STORAGE) /* 'register' is okay */
7601 tcc_error("storage class specified for '%s'",
7602 get_tok_str(v, NULL));
7603 if (sym->type.t != VT_VOID)
7604 tcc_error("redefinition of parameter '%s'",
7605 get_tok_str(v, NULL));
7606 convert_parameter_type(&type);
7607 sym->type = type;
7608 } else if (type.t & VT_TYPEDEF) {
7609 /* save typedefed type */
7610 /* XXX: test storage specifiers ? */
7611 sym = sym_find(v);
7612 if (sym && sym->sym_scope == local_scope) {
7613 if (!is_compatible_types(&sym->type, &type)
7614 || !(sym->type.t & VT_TYPEDEF))
7615 tcc_error("incompatible redefinition of '%s'",
7616 get_tok_str(v, NULL));
7617 sym->type = type;
7618 } else {
7619 sym = sym_push(v, &type, 0, 0);
7621 sym->a = ad.a;
7622 sym->f = ad.f;
7623 } else if ((type.t & VT_BTYPE) == VT_VOID
7624 && !(type.t & VT_EXTERN)) {
7625 tcc_error("declaration of void object");
7626 } else {
7627 r = 0;
7628 if ((type.t & VT_BTYPE) == VT_FUNC) {
7629 /* external function definition */
7630 /* specific case for func_call attribute */
7631 type.ref->f = ad.f;
7632 } else if (!(type.t & VT_ARRAY)) {
7633 /* not lvalue if array */
7634 r |= lvalue_type(type.t);
7636 has_init = (tok == '=');
7637 if (has_init && (type.t & VT_VLA))
7638 tcc_error("variable length array cannot be initialized");
7639 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7640 ((type.t & VT_BTYPE) == VT_FUNC) ||
7641 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7642 !has_init && l == VT_CONST && type.ref->c < 0)) {
7643 /* external variable or function */
7644 /* NOTE: as GCC, uninitialized global static
7645 arrays of null size are considered as
7646 extern */
7647 type.t |= VT_EXTERN;
7648 sym = external_sym(v, &type, r, &ad);
7649 if (ad.alias_target) {
7650 ElfSym *esym;
7651 Sym *alias_target;
7652 alias_target = sym_find(ad.alias_target);
7653 esym = elfsym(alias_target);
7654 if (!esym)
7655 tcc_error("unsupported forward __alias__ attribute");
7656 /* Local statics have a scope until now (for
7657 warnings), remove it here. */
7658 sym->sym_scope = 0;
7659 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7661 } else {
7662 if (type.t & VT_STATIC)
7663 r |= VT_CONST;
7664 else
7665 r |= l;
7666 if (has_init)
7667 next();
7668 else if (l == VT_CONST)
7669 /* uninitialized global variables may be overridden */
7670 type.t |= VT_EXTERN;
7671 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7674 if (tok != ',') {
7675 if (is_for_loop_init)
7676 return 1;
7677 skip(';');
7678 break;
7680 next();
7682 ad.a.aligned = 0;
7685 return 0;
7688 static void decl(int l)
7690 decl0(l, 0, NULL);
7693 /* ------------------------------------------------------------------------- */