tidy code
[tinycc.git] / tccgen.c
blob0cb08324125461d651201792e9156b0b73a1724c
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int first, int size_only);
97 static void block(int *bsym, int *csym, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
267 /* define some often used types */
268 int_type.t = VT_INT;
269 char_pointer_type.t = VT_BYTE;
270 mk_pointer(&char_pointer_type);
271 #if PTR_SIZE == 4
272 size_type.t = VT_INT | VT_UNSIGNED;
273 ptrdiff_type.t = VT_INT;
274 #elif LONG_SIZE == 4
275 size_type.t = VT_LLONG | VT_UNSIGNED;
276 ptrdiff_type.t = VT_LLONG;
277 #else
278 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
279 ptrdiff_type.t = VT_LONG | VT_LLONG;
280 #endif
281 func_old_type.t = VT_FUNC;
282 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
283 func_old_type.ref->f.func_call = FUNC_CDECL;
284 func_old_type.ref->f.func_type = FUNC_OLD;
286 tcc_debug_start(s1);
288 #ifdef TCC_TARGET_ARM
289 arm_init(s1);
290 #endif
292 #ifdef INC_DEBUG
293 printf("%s: **** new file\n", file->filename);
294 #endif
296 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
297 next();
298 decl(VT_CONST);
299 gen_inline_functions(s1);
300 check_vstack();
301 /* end of translation unit info */
302 tcc_debug_end(s1);
303 return 0;
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym *elfsym(Sym *s)
309 if (!s || !s->c)
310 return NULL;
311 return &((ElfSym *)symtab_section->data)[s->c];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC void update_storage(Sym *sym)
317 ElfSym *esym;
318 int sym_bind, old_sym_bind;
320 esym = elfsym(sym);
321 if (!esym)
322 return;
324 if (sym->a.visibility)
325 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
326 | sym->a.visibility;
328 if (sym->type.t & VT_STATIC)
329 sym_bind = STB_LOCAL;
330 else if (sym->a.weak)
331 sym_bind = STB_WEAK;
332 else
333 sym_bind = STB_GLOBAL;
334 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
335 if (sym_bind != old_sym_bind) {
336 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
339 #ifdef TCC_TARGET_PE
340 if (sym->a.dllimport)
341 esym->st_other |= ST_PE_IMPORT;
342 if (sym->a.dllexport)
343 esym->st_other |= ST_PE_EXPORT;
344 #endif
346 #if 0
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym->v, NULL),
349 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
350 sym->a.visibility,
351 sym->a.dllexport,
352 sym->a.dllimport
354 #endif
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
362 addr_t value, unsigned long size,
363 int can_add_underscore)
365 int sym_type, sym_bind, info, other, t;
366 ElfSym *esym;
367 const char *name;
368 char buf1[256];
369 #ifdef CONFIG_TCC_BCHECK
370 char buf[32];
371 #endif
373 if (!sym->c) {
374 name = get_tok_str(sym->v, NULL);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state->do_bounds_check) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
380 switch(sym->v) {
381 #ifdef TCC_TARGET_PE
382 /* XXX: we rely only on malloc hooks */
383 case TOK_malloc:
384 case TOK_free:
385 case TOK_realloc:
386 case TOK_memalign:
387 case TOK_calloc:
388 #endif
389 case TOK_memcpy:
390 case TOK_memmove:
391 case TOK_memset:
392 case TOK_strlen:
393 case TOK_strcpy:
394 case TOK_alloca:
395 strcpy(buf, "__bound_");
396 strcat(buf, name);
397 name = buf;
398 break;
401 #endif
402 t = sym->type.t;
403 if ((t & VT_BTYPE) == VT_FUNC) {
404 sym_type = STT_FUNC;
405 } else if ((t & VT_BTYPE) == VT_VOID) {
406 sym_type = STT_NOTYPE;
407 } else {
408 sym_type = STT_OBJECT;
410 if (t & VT_STATIC)
411 sym_bind = STB_LOCAL;
412 else
413 sym_bind = STB_GLOBAL;
414 other = 0;
415 #ifdef TCC_TARGET_PE
416 if (sym_type == STT_FUNC && sym->type.ref) {
417 Sym *ref = sym->type.ref;
418 if (ref->a.nodecorate) {
419 can_add_underscore = 0;
421 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
422 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
423 name = buf1;
424 other |= ST_PE_STDCALL;
425 can_add_underscore = 0;
428 #endif
429 if (tcc_state->leading_underscore && can_add_underscore) {
430 buf1[0] = '_';
431 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
432 name = buf1;
434 if (sym->asm_label)
435 name = get_tok_str(sym->asm_label, NULL);
436 info = ELFW(ST_INFO)(sym_bind, sym_type);
437 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
438 } else {
439 esym = elfsym(sym);
440 esym->st_value = value;
441 esym->st_size = size;
442 esym->st_shndx = sh_num;
444 update_storage(sym);
447 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
448 addr_t value, unsigned long size)
450 int sh_num = section ? section->sh_num : SHN_UNDEF;
451 put_extern_sym2(sym, sh_num, value, size, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
456 addr_t addend)
458 int c = 0;
460 if (nocode_wanted && s == cur_text_section)
461 return;
463 if (sym) {
464 if (0 == sym->c)
465 put_extern_sym(sym, NULL, 0, 0);
466 c = sym->c;
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section, s, offset, type, c, addend);
473 #if PTR_SIZE == 4
474 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
476 greloca(s, sym, offset, type, 0);
478 #endif
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym *__sym_malloc(void)
484 Sym *sym_pool, *sym, *last_sym;
485 int i;
487 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
488 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
490 last_sym = sym_free_first;
491 sym = sym_pool;
492 for(i = 0; i < SYM_POOL_NB; i++) {
493 sym->next = last_sym;
494 last_sym = sym;
495 sym++;
497 sym_free_first = last_sym;
498 return last_sym;
501 static inline Sym *sym_malloc(void)
503 Sym *sym;
504 #ifndef SYM_DEBUG
505 sym = sym_free_first;
506 if (!sym)
507 sym = __sym_malloc();
508 sym_free_first = sym->next;
509 return sym;
510 #else
511 sym = tcc_malloc(sizeof(Sym));
512 return sym;
513 #endif
516 ST_INLN void sym_free(Sym *sym)
518 #ifndef SYM_DEBUG
519 sym->next = sym_free_first;
520 sym_free_first = sym;
521 #else
522 tcc_free(sym);
523 #endif
526 /* push, without hashing */
527 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
529 Sym *s;
531 s = sym_malloc();
532 memset(s, 0, sizeof *s);
533 s->v = v;
534 s->type.t = t;
535 s->c = c;
536 /* add in stack */
537 s->prev = *ps;
538 *ps = s;
539 return s;
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym *sym_find2(Sym *s, int v)
546 while (s) {
547 if (s->v == v)
548 return s;
549 else if (s->v == -1)
550 return NULL;
551 s = s->prev;
553 return NULL;
556 /* structure lookup */
557 ST_INLN Sym *struct_find(int v)
559 v -= TOK_IDENT;
560 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
561 return NULL;
562 return table_ident[v]->sym_struct;
565 /* find an identifier */
566 ST_INLN Sym *sym_find(int v)
568 v -= TOK_IDENT;
569 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
570 return NULL;
571 return table_ident[v]->sym_identifier;
574 /* push a given symbol on the symbol stack */
575 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
577 Sym *s, **ps;
578 TokenSym *ts;
580 if (local_stack)
581 ps = &local_stack;
582 else
583 ps = &global_stack;
584 s = sym_push2(ps, v, type->t, c);
585 s->type.ref = type->ref;
586 s->r = r;
587 /* don't record fields or anonymous symbols */
588 /* XXX: simplify */
589 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
590 /* record symbol in token array */
591 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
592 if (v & SYM_STRUCT)
593 ps = &ts->sym_struct;
594 else
595 ps = &ts->sym_identifier;
596 s->prev_tok = *ps;
597 *ps = s;
598 s->sym_scope = local_scope;
599 if (s->prev_tok && s->prev_tok->sym_scope == s->sym_scope)
600 tcc_error("redeclaration of '%s'",
601 get_tok_str(v & ~SYM_STRUCT, NULL));
603 return s;
606 /* push a global identifier */
607 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
609 Sym *s, **ps;
610 s = sym_push2(&global_stack, v, t, c);
611 /* don't record anonymous symbol */
612 if (v < SYM_FIRST_ANOM) {
613 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
614 /* modify the top most local identifier, so that
615 sym_identifier will point to 's' when popped */
616 while (*ps != NULL && (*ps)->sym_scope)
617 ps = &(*ps)->prev_tok;
618 s->prev_tok = *ps;
619 *ps = s;
621 return s;
624 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
625 pop them yet from the list, but do remove them from the token array. */
626 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
628 Sym *s, *ss, **ps;
629 TokenSym *ts;
630 int v;
632 s = *ptop;
633 while(s != b) {
634 ss = s->prev;
635 v = s->v;
636 /* remove symbol in token array */
637 /* XXX: simplify */
638 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
639 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
640 if (v & SYM_STRUCT)
641 ps = &ts->sym_struct;
642 else
643 ps = &ts->sym_identifier;
644 *ps = s->prev_tok;
646 if (!keep)
647 sym_free(s);
648 s = ss;
650 if (!keep)
651 *ptop = b;
654 /* ------------------------------------------------------------------------- */
656 static void vsetc(CType *type, int r, CValue *vc)
658 int v;
660 if (vtop >= vstack + (VSTACK_SIZE - 1))
661 tcc_error("memory full (vstack)");
662 /* cannot let cpu flags if other instruction are generated. Also
663 avoid leaving VT_JMP anywhere except on the top of the stack
664 because it would complicate the code generator.
666 Don't do this when nocode_wanted. vtop might come from
667 !nocode_wanted regions (see 88_codeopt.c) and transforming
668 it to a register without actually generating code is wrong
669 as their value might still be used for real. All values
670 we push under nocode_wanted will eventually be popped
671 again, so that the VT_CMP/VT_JMP value will be in vtop
672 when code is unsuppressed again.
674 Same logic below in vswap(); */
675 if (vtop >= vstack && !nocode_wanted) {
676 v = vtop->r & VT_VALMASK;
677 if (v == VT_CMP || (v & ~1) == VT_JMP)
678 gv(RC_INT);
681 vtop++;
682 vtop->type = *type;
683 vtop->r = r;
684 vtop->r2 = VT_CONST;
685 vtop->c = *vc;
686 vtop->sym = NULL;
689 ST_FUNC void vswap(void)
691 SValue tmp;
692 /* cannot vswap cpu flags. See comment at vsetc() above */
693 if (vtop >= vstack && !nocode_wanted) {
694 int v = vtop->r & VT_VALMASK;
695 if (v == VT_CMP || (v & ~1) == VT_JMP)
696 gv(RC_INT);
698 tmp = vtop[0];
699 vtop[0] = vtop[-1];
700 vtop[-1] = tmp;
703 /* pop stack value */
704 ST_FUNC void vpop(void)
706 int v;
707 v = vtop->r & VT_VALMASK;
708 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
709 /* for x86, we need to pop the FP stack */
710 if (v == TREG_ST0) {
711 o(0xd8dd); /* fstp %st(0) */
712 } else
713 #endif
714 if (v == VT_JMP || v == VT_JMPI) {
715 /* need to put correct jump if && or || without test */
716 gsym(vtop->c.i);
718 vtop--;
721 /* push constant of type "type" with useless value */
722 ST_FUNC void vpush(CType *type)
724 vset(type, VT_CONST, 0);
727 /* push integer constant */
728 ST_FUNC void vpushi(int v)
730 CValue cval;
731 cval.i = v;
732 vsetc(&int_type, VT_CONST, &cval);
735 /* push a pointer sized constant */
736 static void vpushs(addr_t v)
738 CValue cval;
739 cval.i = v;
740 vsetc(&size_type, VT_CONST, &cval);
743 /* push arbitrary 64bit constant */
744 ST_FUNC void vpush64(int ty, unsigned long long v)
746 CValue cval;
747 CType ctype;
748 ctype.t = ty;
749 ctype.ref = NULL;
750 cval.i = v;
751 vsetc(&ctype, VT_CONST, &cval);
754 /* push long long constant */
755 static inline void vpushll(long long v)
757 vpush64(VT_LLONG, v);
760 ST_FUNC void vset(CType *type, int r, int v)
762 CValue cval;
764 cval.i = v;
765 vsetc(type, r, &cval);
768 static void vseti(int r, int v)
770 CType type;
771 type.t = VT_INT;
772 type.ref = NULL;
773 vset(&type, r, v);
776 ST_FUNC void vpushv(SValue *v)
778 if (vtop >= vstack + (VSTACK_SIZE - 1))
779 tcc_error("memory full (vstack)");
780 vtop++;
781 *vtop = *v;
784 static void vdup(void)
786 vpushv(vtop);
789 /* rotate n first stack elements to the bottom
790 I1 ... In -> I2 ... In I1 [top is right]
792 ST_FUNC void vrotb(int n)
794 int i;
795 SValue tmp;
797 tmp = vtop[-n + 1];
798 for(i=-n+1;i!=0;i++)
799 vtop[i] = vtop[i+1];
800 vtop[0] = tmp;
803 /* rotate the n elements before entry e towards the top
804 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
806 ST_FUNC void vrote(SValue *e, int n)
808 int i;
809 SValue tmp;
811 tmp = *e;
812 for(i = 0;i < n - 1; i++)
813 e[-i] = e[-i - 1];
814 e[-n + 1] = tmp;
817 /* rotate n first stack elements to the top
818 I1 ... In -> In I1 ... I(n-1) [top is right]
820 ST_FUNC void vrott(int n)
822 vrote(vtop, n);
825 /* push a symbol value of TYPE */
826 static inline void vpushsym(CType *type, Sym *sym)
828 CValue cval;
829 cval.i = 0;
830 vsetc(type, VT_CONST | VT_SYM, &cval);
831 vtop->sym = sym;
834 /* Return a static symbol pointing to a section */
835 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
837 int v;
838 Sym *sym;
840 v = anon_sym++;
841 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
842 sym->type.ref = type->ref;
843 sym->r = VT_CONST | VT_SYM;
844 put_extern_sym(sym, sec, offset, size);
845 return sym;
848 /* push a reference to a section offset by adding a dummy symbol */
849 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
851 vpushsym(type, get_sym_ref(type, sec, offset, size));
854 /* define a new external reference to a symbol 'v' of type 'u' */
855 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
857 Sym *s;
859 s = sym_find(v);
860 if (!s) {
861 /* push forward reference */
862 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
863 s->type.ref = type->ref;
864 s->r = r | VT_CONST | VT_SYM;
865 } else if (IS_ASM_SYM(s)) {
866 s->type.t = type->t | (s->type.t & VT_EXTERN);
867 s->type.ref = type->ref;
868 update_storage(s);
870 return s;
873 /* Merge symbol attributes. */
874 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
876 if (sa1->aligned && !sa->aligned)
877 sa->aligned = sa1->aligned;
878 sa->packed |= sa1->packed;
879 sa->weak |= sa1->weak;
880 if (sa1->visibility != STV_DEFAULT) {
881 int vis = sa->visibility;
882 if (vis == STV_DEFAULT
883 || vis > sa1->visibility)
884 vis = sa1->visibility;
885 sa->visibility = vis;
887 sa->dllexport |= sa1->dllexport;
888 sa->nodecorate |= sa1->nodecorate;
889 sa->dllimport |= sa1->dllimport;
892 /* Merge function attributes. */
893 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
895 if (fa1->func_call && !fa->func_call)
896 fa->func_call = fa1->func_call;
897 if (fa1->func_type && !fa->func_type)
898 fa->func_type = fa1->func_type;
899 if (fa1->func_args && !fa->func_args)
900 fa->func_args = fa1->func_args;
903 /* Merge attributes. */
904 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
906 merge_symattr(&ad->a, &ad1->a);
907 merge_funcattr(&ad->f, &ad1->f);
909 if (ad1->section)
910 ad->section = ad1->section;
911 if (ad1->alias_target)
912 ad->alias_target = ad1->alias_target;
913 if (ad1->asm_label)
914 ad->asm_label = ad1->asm_label;
915 if (ad1->attr_mode)
916 ad->attr_mode = ad1->attr_mode;
919 /* Merge some type attributes. */
920 static void patch_type(Sym *sym, CType *type)
922 if (!(type->t & VT_EXTERN)) {
923 if (!(sym->type.t & VT_EXTERN))
924 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
925 sym->type.t &= ~VT_EXTERN;
928 if (IS_ASM_SYM(sym)) {
929 /* stay static if both are static */
930 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
931 sym->type.ref = type->ref;
934 if (!is_compatible_types(&sym->type, type)) {
935 tcc_error("incompatible types for redefinition of '%s'",
936 get_tok_str(sym->v, NULL));
938 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
939 int static_proto = sym->type.t & VT_STATIC;
940 /* warn if static follows non-static function declaration */
941 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
942 tcc_warning("static storage ignored for redefinition of '%s'",
943 get_tok_str(sym->v, NULL));
945 if (0 == (type->t & VT_EXTERN)) {
946 /* put complete type, use static from prototype */
947 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
948 if (type->t & VT_INLINE)
949 sym->type.t = type->t;
950 sym->type.ref = type->ref;
953 } else {
954 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
955 /* set array size if it was omitted in extern declaration */
956 if (sym->type.ref->c < 0)
957 sym->type.ref->c = type->ref->c;
958 else if (sym->type.ref->c != type->ref->c)
959 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
961 if ((type->t ^ sym->type.t) & VT_STATIC)
962 tcc_warning("storage mismatch for redefinition of '%s'",
963 get_tok_str(sym->v, NULL));
968 /* Merge some storage attributes. */
969 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
971 if (type)
972 patch_type(sym, type);
974 #ifdef TCC_TARGET_PE
975 if (sym->a.dllimport != ad->a.dllimport)
976 tcc_error("incompatible dll linkage for redefinition of '%s'",
977 get_tok_str(sym->v, NULL));
978 #endif
979 merge_symattr(&sym->a, &ad->a);
980 if (ad->asm_label)
981 sym->asm_label = ad->asm_label;
982 update_storage(sym);
985 /* define a new external reference to a symbol 'v' */
986 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
988 Sym *s;
989 s = sym_find(v);
990 if (!s) {
991 /* push forward reference */
992 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
993 s->type.t |= VT_EXTERN;
994 s->a = ad->a;
995 s->sym_scope = 0;
996 } else {
997 if (s->type.ref == func_old_type.ref) {
998 s->type.ref = type->ref;
999 s->r = r | VT_CONST | VT_SYM;
1000 s->type.t |= VT_EXTERN;
1002 patch_storage(s, ad, type);
1004 return s;
1007 /* push a reference to global symbol v */
1008 ST_FUNC void vpush_global_sym(CType *type, int v)
1010 vpushsym(type, external_global_sym(v, type, 0));
1013 /* save registers up to (vtop - n) stack entry */
1014 ST_FUNC void save_regs(int n)
1016 SValue *p, *p1;
1017 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1018 save_reg(p->r);
1021 /* save r to the memory stack, and mark it as being free */
1022 ST_FUNC void save_reg(int r)
1024 save_reg_upstack(r, 0);
1027 /* save r to the memory stack, and mark it as being free,
1028 if seen up to (vtop - n) stack entry */
1029 ST_FUNC void save_reg_upstack(int r, int n)
1031 int l, saved, size, align;
1032 SValue *p, *p1, sv;
1033 CType *type;
1035 if ((r &= VT_VALMASK) >= VT_CONST)
1036 return;
1037 if (nocode_wanted)
1038 return;
1040 /* modify all stack values */
1041 saved = 0;
1042 l = 0;
1043 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1044 if ((p->r & VT_VALMASK) == r ||
1045 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1046 /* must save value on stack if not already done */
1047 if (!saved) {
1048 /* NOTE: must reload 'r' because r might be equal to r2 */
1049 r = p->r & VT_VALMASK;
1050 /* store register in the stack */
1051 type = &p->type;
1052 if ((p->r & VT_LVAL) ||
1053 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1054 #if PTR_SIZE == 8
1055 type = &char_pointer_type;
1056 #else
1057 type = &int_type;
1058 #endif
1059 size = type_size(type, &align);
1060 l=get_temp_local_var(size,align);
1061 sv.type.t = type->t;
1062 sv.r = VT_LOCAL | VT_LVAL;
1063 sv.c.i = l;
1064 store(r, &sv);
1065 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1066 /* x86 specific: need to pop fp register ST0 if saved */
1067 if (r == TREG_ST0) {
1068 o(0xd8dd); /* fstp %st(0) */
1070 #endif
1071 #if PTR_SIZE == 4
1072 /* special long long case */
1073 if ((type->t & VT_BTYPE) == VT_LLONG) {
1074 sv.c.i += 4;
1075 store(p->r2, &sv);
1077 #endif
1078 saved = 1;
1080 /* mark that stack entry as being saved on the stack */
1081 if (p->r & VT_LVAL) {
1082 /* also clear the bounded flag because the
1083 relocation address of the function was stored in
1084 p->c.i */
1085 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1086 } else {
1087 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1089 p->r2 = VT_CONST;
1090 p->c.i = l;
1095 #ifdef TCC_TARGET_ARM
1096 /* find a register of class 'rc2' with at most one reference on stack.
1097 * If none, call get_reg(rc) */
1098 ST_FUNC int get_reg_ex(int rc, int rc2)
1100 int r;
1101 SValue *p;
1103 for(r=0;r<NB_REGS;r++) {
1104 if (reg_classes[r] & rc2) {
1105 int n;
1106 n=0;
1107 for(p = vstack; p <= vtop; p++) {
1108 if ((p->r & VT_VALMASK) == r ||
1109 (p->r2 & VT_VALMASK) == r)
1110 n++;
1112 if (n <= 1)
1113 return r;
1116 return get_reg(rc);
1118 #endif
1120 /* find a free register of class 'rc'. If none, save one register */
1121 ST_FUNC int get_reg(int rc)
1123 int r;
1124 SValue *p;
1126 /* find a free register */
1127 for(r=0;r<NB_REGS;r++) {
1128 if (reg_classes[r] & rc) {
1129 if (nocode_wanted)
1130 return r;
1131 for(p=vstack;p<=vtop;p++) {
1132 if ((p->r & VT_VALMASK) == r ||
1133 (p->r2 & VT_VALMASK) == r)
1134 goto notfound;
1136 return r;
1138 notfound: ;
1141 /* no register left : free the first one on the stack (VERY
1142 IMPORTANT to start from the bottom to ensure that we don't
1143 spill registers used in gen_opi()) */
1144 for(p=vstack;p<=vtop;p++) {
1145 /* look at second register (if long long) */
1146 r = p->r2 & VT_VALMASK;
1147 if (r < VT_CONST && (reg_classes[r] & rc))
1148 goto save_found;
1149 r = p->r & VT_VALMASK;
1150 if (r < VT_CONST && (reg_classes[r] & rc)) {
1151 save_found:
1152 save_reg(r);
1153 return r;
1156 /* Should never comes here */
1157 return -1;
1160 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1161 static int get_temp_local_var(int size,int align){
1162 int i;
1163 struct temp_local_variable *temp_var;
1164 int found_var;
1165 SValue *p;
1166 int r;
1167 char free;
1168 char found;
1169 found=0;
1170 for(i=0;i<nb_temp_local_vars;i++){
1171 temp_var=&arr_temp_local_vars[i];
1172 if(temp_var->size<size||align!=temp_var->align){
1173 continue;
1175 /*check if temp_var is free*/
1176 free=1;
1177 for(p=vstack;p<=vtop;p++) {
1178 r=p->r&VT_VALMASK;
1179 if(r==VT_LOCAL||r==VT_LLOCAL){
1180 if(p->c.i==temp_var->location){
1181 free=0;
1182 break;
1186 if(free){
1187 found_var=temp_var->location;
1188 found=1;
1189 break;
1192 if(!found){
1193 loc = (loc - size) & -align;
1194 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1195 temp_var=&arr_temp_local_vars[i];
1196 temp_var->location=loc;
1197 temp_var->size=size;
1198 temp_var->align=align;
1199 nb_temp_local_vars++;
1201 found_var=loc;
1203 return found_var;
1206 static void clear_temp_local_var_list(){
1207 nb_temp_local_vars=0;
1210 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1211 if needed */
1212 static void move_reg(int r, int s, int t)
1214 SValue sv;
1216 if (r != s) {
1217 save_reg(r);
1218 sv.type.t = t;
1219 sv.type.ref = NULL;
1220 sv.r = s;
1221 sv.c.i = 0;
1222 load(r, &sv);
1226 /* get address of vtop (vtop MUST BE an lvalue) */
1227 ST_FUNC void gaddrof(void)
1229 vtop->r &= ~VT_LVAL;
1230 /* tricky: if saved lvalue, then we can go back to lvalue */
1231 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1232 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1237 #ifdef CONFIG_TCC_BCHECK
1238 /* generate lvalue bound code */
1239 static void gbound(void)
1241 int lval_type;
1242 CType type1;
1244 vtop->r &= ~VT_MUSTBOUND;
1245 /* if lvalue, then use checking code before dereferencing */
1246 if (vtop->r & VT_LVAL) {
1247 /* if not VT_BOUNDED value, then make one */
1248 if (!(vtop->r & VT_BOUNDED)) {
1249 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1250 /* must save type because we must set it to int to get pointer */
1251 type1 = vtop->type;
1252 vtop->type.t = VT_PTR;
1253 gaddrof();
1254 vpushi(0);
1255 gen_bounded_ptr_add();
1256 vtop->r |= lval_type;
1257 vtop->type = type1;
1259 /* then check for dereferencing */
1260 gen_bounded_ptr_deref();
1263 #endif
1265 static void incr_bf_adr(int o)
1267 vtop->type = char_pointer_type;
1268 gaddrof();
1269 vpushi(o);
1270 gen_op('+');
1271 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1272 | (VT_BYTE|VT_UNSIGNED);
1273 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1274 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1277 /* single-byte load mode for packed or otherwise unaligned bitfields */
1278 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1280 int n, o, bits;
1281 save_reg_upstack(vtop->r, 1);
1282 vpush64(type->t & VT_BTYPE, 0); // B X
1283 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1284 do {
1285 vswap(); // X B
1286 incr_bf_adr(o);
1287 vdup(); // X B B
1288 n = 8 - bit_pos;
1289 if (n > bit_size)
1290 n = bit_size;
1291 if (bit_pos)
1292 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1293 if (n < 8)
1294 vpushi((1 << n) - 1), gen_op('&');
1295 gen_cast(type);
1296 if (bits)
1297 vpushi(bits), gen_op(TOK_SHL);
1298 vrotb(3); // B Y X
1299 gen_op('|'); // B X
1300 bits += n, bit_size -= n, o = 1;
1301 } while (bit_size);
1302 vswap(), vpop();
1303 if (!(type->t & VT_UNSIGNED)) {
1304 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1305 vpushi(n), gen_op(TOK_SHL);
1306 vpushi(n), gen_op(TOK_SAR);
1310 /* single-byte store mode for packed or otherwise unaligned bitfields */
1311 static void store_packed_bf(int bit_pos, int bit_size)
1313 int bits, n, o, m, c;
1315 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1316 vswap(); // X B
1317 save_reg_upstack(vtop->r, 1);
1318 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1319 do {
1320 incr_bf_adr(o); // X B
1321 vswap(); //B X
1322 c ? vdup() : gv_dup(); // B V X
1323 vrott(3); // X B V
1324 if (bits)
1325 vpushi(bits), gen_op(TOK_SHR);
1326 if (bit_pos)
1327 vpushi(bit_pos), gen_op(TOK_SHL);
1328 n = 8 - bit_pos;
1329 if (n > bit_size)
1330 n = bit_size;
1331 if (n < 8) {
1332 m = ((1 << n) - 1) << bit_pos;
1333 vpushi(m), gen_op('&'); // X B V1
1334 vpushv(vtop-1); // X B V1 B
1335 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1336 gen_op('&'); // X B V1 B1
1337 gen_op('|'); // X B V2
1339 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1340 vstore(), vpop(); // X B
1341 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1342 } while (bit_size);
1343 vpop(), vpop();
1346 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1348 int t;
1349 if (0 == sv->type.ref)
1350 return 0;
1351 t = sv->type.ref->auxtype;
1352 if (t != -1 && t != VT_STRUCT) {
1353 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1354 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1356 return t;
1359 /* store vtop a register belonging to class 'rc'. lvalues are
1360 converted to values. Cannot be used if cannot be converted to
1361 register value (such as structures). */
1362 ST_FUNC int gv(int rc)
1364 int r, bit_pos, bit_size, size, align, rc2;
1366 /* NOTE: get_reg can modify vstack[] */
1367 if (vtop->type.t & VT_BITFIELD) {
1368 CType type;
1370 bit_pos = BIT_POS(vtop->type.t);
1371 bit_size = BIT_SIZE(vtop->type.t);
1372 /* remove bit field info to avoid loops */
1373 vtop->type.t &= ~VT_STRUCT_MASK;
1375 type.ref = NULL;
1376 type.t = vtop->type.t & VT_UNSIGNED;
1377 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1378 type.t |= VT_UNSIGNED;
1380 r = adjust_bf(vtop, bit_pos, bit_size);
1382 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1383 type.t |= VT_LLONG;
1384 else
1385 type.t |= VT_INT;
1387 if (r == VT_STRUCT) {
1388 load_packed_bf(&type, bit_pos, bit_size);
1389 } else {
1390 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1391 /* cast to int to propagate signedness in following ops */
1392 gen_cast(&type);
1393 /* generate shifts */
1394 vpushi(bits - (bit_pos + bit_size));
1395 gen_op(TOK_SHL);
1396 vpushi(bits - bit_size);
1397 /* NOTE: transformed to SHR if unsigned */
1398 gen_op(TOK_SAR);
1400 r = gv(rc);
1401 } else {
1402 if (is_float(vtop->type.t) &&
1403 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1404 unsigned long offset;
1405 /* CPUs usually cannot use float constants, so we store them
1406 generically in data segment */
1407 size = type_size(&vtop->type, &align);
1408 if (NODATA_WANTED)
1409 size = 0, align = 1;
1410 offset = section_add(data_section, size, align);
1411 vpush_ref(&vtop->type, data_section, offset, size);
1412 vswap();
1413 init_putv(&vtop->type, data_section, offset);
1414 vtop->r |= VT_LVAL;
1416 #ifdef CONFIG_TCC_BCHECK
1417 if (vtop->r & VT_MUSTBOUND)
1418 gbound();
1419 #endif
1421 r = vtop->r & VT_VALMASK;
1422 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1423 #ifndef TCC_TARGET_ARM64
1424 if (rc == RC_IRET)
1425 rc2 = RC_LRET;
1426 #ifdef TCC_TARGET_X86_64
1427 else if (rc == RC_FRET)
1428 rc2 = RC_QRET;
1429 #endif
1430 #endif
1431 /* need to reload if:
1432 - constant
1433 - lvalue (need to dereference pointer)
1434 - already a register, but not in the right class */
1435 if (r >= VT_CONST
1436 || (vtop->r & VT_LVAL)
1437 || !(reg_classes[r] & rc)
1438 #if PTR_SIZE == 8
1439 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1440 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1441 #else
1442 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1443 #endif
1446 r = get_reg(rc);
1447 #if PTR_SIZE == 8
1448 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1449 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1450 #else
1451 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1452 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1453 unsigned long long ll;
1454 #endif
1455 int r2, original_type;
1456 original_type = vtop->type.t;
1457 /* two register type load : expand to two words
1458 temporarily */
1459 #if PTR_SIZE == 4
1460 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1461 /* load constant */
1462 ll = vtop->c.i;
1463 vtop->c.i = ll; /* first word */
1464 load(r, vtop);
1465 vtop->r = r; /* save register value */
1466 vpushi(ll >> 32); /* second word */
1467 } else
1468 #endif
1469 if (vtop->r & VT_LVAL) {
1470 /* We do not want to modifier the long long
1471 pointer here, so the safest (and less
1472 efficient) is to save all the other registers
1473 in the stack. XXX: totally inefficient. */
1474 #if 0
1475 save_regs(1);
1476 #else
1477 /* lvalue_save: save only if used further down the stack */
1478 save_reg_upstack(vtop->r, 1);
1479 #endif
1480 /* load from memory */
1481 vtop->type.t = load_type;
1482 load(r, vtop);
1483 vdup();
1484 vtop[-1].r = r; /* save register value */
1485 /* increment pointer to get second word */
1486 vtop->type.t = addr_type;
1487 gaddrof();
1488 vpushi(load_size);
1489 gen_op('+');
1490 vtop->r |= VT_LVAL;
1491 vtop->type.t = load_type;
1492 } else {
1493 /* move registers */
1494 load(r, vtop);
1495 vdup();
1496 vtop[-1].r = r; /* save register value */
1497 vtop->r = vtop[-1].r2;
1499 /* Allocate second register. Here we rely on the fact that
1500 get_reg() tries first to free r2 of an SValue. */
1501 r2 = get_reg(rc2);
1502 load(r2, vtop);
1503 vpop();
1504 /* write second register */
1505 vtop->r2 = r2;
1506 vtop->type.t = original_type;
1507 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1508 int t1, t;
1509 /* lvalue of scalar type : need to use lvalue type
1510 because of possible cast */
1511 t = vtop->type.t;
1512 t1 = t;
1513 /* compute memory access type */
1514 if (vtop->r & VT_LVAL_BYTE)
1515 t = VT_BYTE;
1516 else if (vtop->r & VT_LVAL_SHORT)
1517 t = VT_SHORT;
1518 if (vtop->r & VT_LVAL_UNSIGNED)
1519 t |= VT_UNSIGNED;
1520 vtop->type.t = t;
1521 load(r, vtop);
1522 /* restore wanted type */
1523 vtop->type.t = t1;
1524 } else {
1525 /* one register type load */
1526 load(r, vtop);
1529 vtop->r = r;
1530 #ifdef TCC_TARGET_C67
1531 /* uses register pairs for doubles */
1532 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1533 vtop->r2 = r+1;
1534 #endif
1536 return r;
1539 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1540 ST_FUNC void gv2(int rc1, int rc2)
1542 int v;
1544 /* generate more generic register first. But VT_JMP or VT_CMP
1545 values must be generated first in all cases to avoid possible
1546 reload errors */
1547 v = vtop[0].r & VT_VALMASK;
1548 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1549 vswap();
1550 gv(rc1);
1551 vswap();
1552 gv(rc2);
1553 /* test if reload is needed for first register */
1554 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1555 vswap();
1556 gv(rc1);
1557 vswap();
1559 } else {
1560 gv(rc2);
1561 vswap();
1562 gv(rc1);
1563 vswap();
1564 /* test if reload is needed for first register */
1565 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1566 gv(rc2);
1571 #ifndef TCC_TARGET_ARM64
1572 /* wrapper around RC_FRET to return a register by type */
1573 static int rc_fret(int t)
1575 #ifdef TCC_TARGET_X86_64
1576 if (t == VT_LDOUBLE) {
1577 return RC_ST0;
1579 #endif
1580 return RC_FRET;
1582 #endif
1584 /* wrapper around REG_FRET to return a register by type */
1585 static int reg_fret(int t)
1587 #ifdef TCC_TARGET_X86_64
1588 if (t == VT_LDOUBLE) {
1589 return TREG_ST0;
1591 #endif
1592 return REG_FRET;
1595 #if PTR_SIZE == 4
1596 /* expand 64bit on stack in two ints */
1597 ST_FUNC void lexpand(void)
1599 int u, v;
1600 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1601 v = vtop->r & (VT_VALMASK | VT_LVAL);
1602 if (v == VT_CONST) {
1603 vdup();
1604 vtop[0].c.i >>= 32;
1605 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1606 vdup();
1607 vtop[0].c.i += 4;
1608 } else {
1609 gv(RC_INT);
1610 vdup();
1611 vtop[0].r = vtop[-1].r2;
1612 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1614 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1616 #endif
1618 #if PTR_SIZE == 4
1619 /* build a long long from two ints */
1620 static void lbuild(int t)
1622 gv2(RC_INT, RC_INT);
1623 vtop[-1].r2 = vtop[0].r;
1624 vtop[-1].type.t = t;
1625 vpop();
1627 #endif
1629 /* convert stack entry to register and duplicate its value in another
1630 register */
1631 static void gv_dup(void)
1633 int rc, t, r, r1;
1634 SValue sv;
1636 t = vtop->type.t;
1637 #if PTR_SIZE == 4
1638 if ((t & VT_BTYPE) == VT_LLONG) {
1639 if (t & VT_BITFIELD) {
1640 gv(RC_INT);
1641 t = vtop->type.t;
1643 lexpand();
1644 gv_dup();
1645 vswap();
1646 vrotb(3);
1647 gv_dup();
1648 vrotb(4);
1649 /* stack: H L L1 H1 */
1650 lbuild(t);
1651 vrotb(3);
1652 vrotb(3);
1653 vswap();
1654 lbuild(t);
1655 vswap();
1656 } else
1657 #endif
1659 /* duplicate value */
1660 rc = RC_INT;
1661 sv.type.t = VT_INT;
1662 if (is_float(t)) {
1663 rc = RC_FLOAT;
1664 #ifdef TCC_TARGET_X86_64
1665 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1666 rc = RC_ST0;
1668 #endif
1669 sv.type.t = t;
1671 r = gv(rc);
1672 r1 = get_reg(rc);
1673 sv.r = r;
1674 sv.c.i = 0;
1675 load(r1, &sv); /* move r to r1 */
1676 vdup();
1677 /* duplicates value */
1678 if (r != r1)
1679 vtop->r = r1;
1683 /* Generate value test
1685 * Generate a test for any value (jump, comparison and integers) */
1686 ST_FUNC int gvtst(int inv, int t)
1688 int v = vtop->r & VT_VALMASK;
1689 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1690 vpushi(0);
1691 gen_op(TOK_NE);
1693 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1694 /* constant jmp optimization */
1695 if ((vtop->c.i != 0) != inv)
1696 t = gjmp(t);
1697 vtop--;
1698 return t;
1700 return gtst(inv, t);
1703 #if PTR_SIZE == 4
1704 /* generate CPU independent (unsigned) long long operations */
1705 static void gen_opl(int op)
1707 int t, a, b, op1, c, i;
1708 int func;
1709 unsigned short reg_iret = REG_IRET;
1710 unsigned short reg_lret = REG_LRET;
1711 SValue tmp;
1713 switch(op) {
1714 case '/':
1715 case TOK_PDIV:
1716 func = TOK___divdi3;
1717 goto gen_func;
1718 case TOK_UDIV:
1719 func = TOK___udivdi3;
1720 goto gen_func;
1721 case '%':
1722 func = TOK___moddi3;
1723 goto gen_mod_func;
1724 case TOK_UMOD:
1725 func = TOK___umoddi3;
1726 gen_mod_func:
1727 #ifdef TCC_ARM_EABI
1728 reg_iret = TREG_R2;
1729 reg_lret = TREG_R3;
1730 #endif
1731 gen_func:
1732 /* call generic long long function */
1733 vpush_global_sym(&func_old_type, func);
1734 vrott(3);
1735 gfunc_call(2);
1736 vpushi(0);
1737 vtop->r = reg_iret;
1738 vtop->r2 = reg_lret;
1739 break;
1740 case '^':
1741 case '&':
1742 case '|':
1743 case '*':
1744 case '+':
1745 case '-':
1746 //pv("gen_opl A",0,2);
1747 t = vtop->type.t;
1748 vswap();
1749 lexpand();
1750 vrotb(3);
1751 lexpand();
1752 /* stack: L1 H1 L2 H2 */
1753 tmp = vtop[0];
1754 vtop[0] = vtop[-3];
1755 vtop[-3] = tmp;
1756 tmp = vtop[-2];
1757 vtop[-2] = vtop[-3];
1758 vtop[-3] = tmp;
1759 vswap();
1760 /* stack: H1 H2 L1 L2 */
1761 //pv("gen_opl B",0,4);
1762 if (op == '*') {
1763 vpushv(vtop - 1);
1764 vpushv(vtop - 1);
1765 gen_op(TOK_UMULL);
1766 lexpand();
1767 /* stack: H1 H2 L1 L2 ML MH */
1768 for(i=0;i<4;i++)
1769 vrotb(6);
1770 /* stack: ML MH H1 H2 L1 L2 */
1771 tmp = vtop[0];
1772 vtop[0] = vtop[-2];
1773 vtop[-2] = tmp;
1774 /* stack: ML MH H1 L2 H2 L1 */
1775 gen_op('*');
1776 vrotb(3);
1777 vrotb(3);
1778 gen_op('*');
1779 /* stack: ML MH M1 M2 */
1780 gen_op('+');
1781 gen_op('+');
1782 } else if (op == '+' || op == '-') {
1783 /* XXX: add non carry method too (for MIPS or alpha) */
1784 if (op == '+')
1785 op1 = TOK_ADDC1;
1786 else
1787 op1 = TOK_SUBC1;
1788 gen_op(op1);
1789 /* stack: H1 H2 (L1 op L2) */
1790 vrotb(3);
1791 vrotb(3);
1792 gen_op(op1 + 1); /* TOK_xxxC2 */
1793 } else {
1794 gen_op(op);
1795 /* stack: H1 H2 (L1 op L2) */
1796 vrotb(3);
1797 vrotb(3);
1798 /* stack: (L1 op L2) H1 H2 */
1799 gen_op(op);
1800 /* stack: (L1 op L2) (H1 op H2) */
1802 /* stack: L H */
1803 lbuild(t);
1804 break;
1805 case TOK_SAR:
1806 case TOK_SHR:
1807 case TOK_SHL:
1808 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1809 t = vtop[-1].type.t;
1810 vswap();
1811 lexpand();
1812 vrotb(3);
1813 /* stack: L H shift */
1814 c = (int)vtop->c.i;
1815 /* constant: simpler */
1816 /* NOTE: all comments are for SHL. the other cases are
1817 done by swapping words */
1818 vpop();
1819 if (op != TOK_SHL)
1820 vswap();
1821 if (c >= 32) {
1822 /* stack: L H */
1823 vpop();
1824 if (c > 32) {
1825 vpushi(c - 32);
1826 gen_op(op);
1828 if (op != TOK_SAR) {
1829 vpushi(0);
1830 } else {
1831 gv_dup();
1832 vpushi(31);
1833 gen_op(TOK_SAR);
1835 vswap();
1836 } else {
1837 vswap();
1838 gv_dup();
1839 /* stack: H L L */
1840 vpushi(c);
1841 gen_op(op);
1842 vswap();
1843 vpushi(32 - c);
1844 if (op == TOK_SHL)
1845 gen_op(TOK_SHR);
1846 else
1847 gen_op(TOK_SHL);
1848 vrotb(3);
1849 /* stack: L L H */
1850 vpushi(c);
1851 if (op == TOK_SHL)
1852 gen_op(TOK_SHL);
1853 else
1854 gen_op(TOK_SHR);
1855 gen_op('|');
1857 if (op != TOK_SHL)
1858 vswap();
1859 lbuild(t);
1860 } else {
1861 /* XXX: should provide a faster fallback on x86 ? */
1862 switch(op) {
1863 case TOK_SAR:
1864 func = TOK___ashrdi3;
1865 goto gen_func;
1866 case TOK_SHR:
1867 func = TOK___lshrdi3;
1868 goto gen_func;
1869 case TOK_SHL:
1870 func = TOK___ashldi3;
1871 goto gen_func;
1874 break;
1875 default:
1876 /* compare operations */
1877 t = vtop->type.t;
1878 vswap();
1879 lexpand();
1880 vrotb(3);
1881 lexpand();
1882 /* stack: L1 H1 L2 H2 */
1883 tmp = vtop[-1];
1884 vtop[-1] = vtop[-2];
1885 vtop[-2] = tmp;
1886 /* stack: L1 L2 H1 H2 */
1887 /* compare high */
1888 op1 = op;
1889 /* when values are equal, we need to compare low words. since
1890 the jump is inverted, we invert the test too. */
1891 if (op1 == TOK_LT)
1892 op1 = TOK_LE;
1893 else if (op1 == TOK_GT)
1894 op1 = TOK_GE;
1895 else if (op1 == TOK_ULT)
1896 op1 = TOK_ULE;
1897 else if (op1 == TOK_UGT)
1898 op1 = TOK_UGE;
1899 a = 0;
1900 b = 0;
1901 gen_op(op1);
1902 if (op == TOK_NE) {
1903 b = gvtst(0, 0);
1904 } else {
1905 a = gvtst(1, 0);
1906 if (op != TOK_EQ) {
1907 /* generate non equal test */
1908 vpushi(TOK_NE);
1909 vtop->r = VT_CMP;
1910 b = gvtst(0, 0);
1913 /* compare low. Always unsigned */
1914 op1 = op;
1915 if (op1 == TOK_LT)
1916 op1 = TOK_ULT;
1917 else if (op1 == TOK_LE)
1918 op1 = TOK_ULE;
1919 else if (op1 == TOK_GT)
1920 op1 = TOK_UGT;
1921 else if (op1 == TOK_GE)
1922 op1 = TOK_UGE;
1923 gen_op(op1);
1924 a = gvtst(1, a);
1925 gsym(b);
1926 vseti(VT_JMPI, a);
1927 break;
1930 #endif
1932 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1934 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1935 return (a ^ b) >> 63 ? -x : x;
1938 static int gen_opic_lt(uint64_t a, uint64_t b)
1940 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1943 /* handle integer constant optimizations and various machine
1944 independent opt */
1945 static void gen_opic(int op)
1947 SValue *v1 = vtop - 1;
1948 SValue *v2 = vtop;
1949 int t1 = v1->type.t & VT_BTYPE;
1950 int t2 = v2->type.t & VT_BTYPE;
1951 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1952 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1953 uint64_t l1 = c1 ? v1->c.i : 0;
1954 uint64_t l2 = c2 ? v2->c.i : 0;
1955 int shm = (t1 == VT_LLONG) ? 63 : 31;
1957 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1958 l1 = ((uint32_t)l1 |
1959 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1960 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1961 l2 = ((uint32_t)l2 |
1962 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1964 if (c1 && c2) {
1965 switch(op) {
1966 case '+': l1 += l2; break;
1967 case '-': l1 -= l2; break;
1968 case '&': l1 &= l2; break;
1969 case '^': l1 ^= l2; break;
1970 case '|': l1 |= l2; break;
1971 case '*': l1 *= l2; break;
1973 case TOK_PDIV:
1974 case '/':
1975 case '%':
1976 case TOK_UDIV:
1977 case TOK_UMOD:
1978 /* if division by zero, generate explicit division */
1979 if (l2 == 0) {
1980 if (const_wanted)
1981 tcc_error("division by zero in constant");
1982 goto general_case;
1984 switch(op) {
1985 default: l1 = gen_opic_sdiv(l1, l2); break;
1986 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1987 case TOK_UDIV: l1 = l1 / l2; break;
1988 case TOK_UMOD: l1 = l1 % l2; break;
1990 break;
1991 case TOK_SHL: l1 <<= (l2 & shm); break;
1992 case TOK_SHR: l1 >>= (l2 & shm); break;
1993 case TOK_SAR:
1994 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
1995 break;
1996 /* tests */
1997 case TOK_ULT: l1 = l1 < l2; break;
1998 case TOK_UGE: l1 = l1 >= l2; break;
1999 case TOK_EQ: l1 = l1 == l2; break;
2000 case TOK_NE: l1 = l1 != l2; break;
2001 case TOK_ULE: l1 = l1 <= l2; break;
2002 case TOK_UGT: l1 = l1 > l2; break;
2003 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2004 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2005 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2006 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2007 /* logical */
2008 case TOK_LAND: l1 = l1 && l2; break;
2009 case TOK_LOR: l1 = l1 || l2; break;
2010 default:
2011 goto general_case;
2013 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2014 l1 = ((uint32_t)l1 |
2015 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2016 v1->c.i = l1;
2017 vtop--;
2018 } else {
2019 /* if commutative ops, put c2 as constant */
2020 if (c1 && (op == '+' || op == '&' || op == '^' ||
2021 op == '|' || op == '*')) {
2022 vswap();
2023 c2 = c1; //c = c1, c1 = c2, c2 = c;
2024 l2 = l1; //l = l1, l1 = l2, l2 = l;
2026 if (!const_wanted &&
2027 c1 && ((l1 == 0 &&
2028 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2029 (l1 == -1 && op == TOK_SAR))) {
2030 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2031 vtop--;
2032 } else if (!const_wanted &&
2033 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2034 (op == '|' &&
2035 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2036 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2037 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2038 if (l2 == 1)
2039 vtop->c.i = 0;
2040 vswap();
2041 vtop--;
2042 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2043 op == TOK_PDIV) &&
2044 l2 == 1) ||
2045 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2046 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2047 l2 == 0) ||
2048 (op == '&' &&
2049 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2050 /* filter out NOP operations like x*1, x-0, x&-1... */
2051 vtop--;
2052 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2053 /* try to use shifts instead of muls or divs */
2054 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2055 int n = -1;
2056 while (l2) {
2057 l2 >>= 1;
2058 n++;
2060 vtop->c.i = n;
2061 if (op == '*')
2062 op = TOK_SHL;
2063 else if (op == TOK_PDIV)
2064 op = TOK_SAR;
2065 else
2066 op = TOK_SHR;
2068 goto general_case;
2069 } else if (c2 && (op == '+' || op == '-') &&
2070 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2071 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2072 /* symbol + constant case */
2073 if (op == '-')
2074 l2 = -l2;
2075 l2 += vtop[-1].c.i;
2076 /* The backends can't always deal with addends to symbols
2077 larger than +-1<<31. Don't construct such. */
2078 if ((int)l2 != l2)
2079 goto general_case;
2080 vtop--;
2081 vtop->c.i = l2;
2082 } else {
2083 general_case:
2084 /* call low level op generator */
2085 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2086 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2087 gen_opl(op);
2088 else
2089 gen_opi(op);
2094 /* generate a floating point operation with constant propagation */
2095 static void gen_opif(int op)
2097 int c1, c2;
2098 SValue *v1, *v2;
2099 #if defined _MSC_VER && defined _AMD64_
2100 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2101 volatile
2102 #endif
2103 long double f1, f2;
2105 v1 = vtop - 1;
2106 v2 = vtop;
2107 /* currently, we cannot do computations with forward symbols */
2108 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2109 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2110 if (c1 && c2) {
2111 if (v1->type.t == VT_FLOAT) {
2112 f1 = v1->c.f;
2113 f2 = v2->c.f;
2114 } else if (v1->type.t == VT_DOUBLE) {
2115 f1 = v1->c.d;
2116 f2 = v2->c.d;
2117 } else {
2118 f1 = v1->c.ld;
2119 f2 = v2->c.ld;
2122 /* NOTE: we only do constant propagation if finite number (not
2123 NaN or infinity) (ANSI spec) */
2124 if (!ieee_finite(f1) || !ieee_finite(f2))
2125 goto general_case;
2127 switch(op) {
2128 case '+': f1 += f2; break;
2129 case '-': f1 -= f2; break;
2130 case '*': f1 *= f2; break;
2131 case '/':
2132 if (f2 == 0.0) {
2133 /* If not in initializer we need to potentially generate
2134 FP exceptions at runtime, otherwise we want to fold. */
2135 if (!const_wanted)
2136 goto general_case;
2138 f1 /= f2;
2139 break;
2140 /* XXX: also handles tests ? */
2141 default:
2142 goto general_case;
2144 /* XXX: overflow test ? */
2145 if (v1->type.t == VT_FLOAT) {
2146 v1->c.f = f1;
2147 } else if (v1->type.t == VT_DOUBLE) {
2148 v1->c.d = f1;
2149 } else {
2150 v1->c.ld = f1;
2152 vtop--;
2153 } else {
2154 general_case:
2155 gen_opf(op);
2159 static int pointed_size(CType *type)
2161 int align;
2162 return type_size(pointed_type(type), &align);
2165 static void vla_runtime_pointed_size(CType *type)
2167 int align;
2168 vla_runtime_type_size(pointed_type(type), &align);
2171 static inline int is_null_pointer(SValue *p)
2173 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2174 return 0;
2175 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2176 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2177 ((p->type.t & VT_BTYPE) == VT_PTR &&
2178 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2179 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2180 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2183 static inline int is_integer_btype(int bt)
2185 return (bt == VT_BYTE || bt == VT_SHORT ||
2186 bt == VT_INT || bt == VT_LLONG);
2189 /* check types for comparison or subtraction of pointers */
2190 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2192 CType *type1, *type2, tmp_type1, tmp_type2;
2193 int bt1, bt2;
2195 /* null pointers are accepted for all comparisons as gcc */
2196 if (is_null_pointer(p1) || is_null_pointer(p2))
2197 return;
2198 type1 = &p1->type;
2199 type2 = &p2->type;
2200 bt1 = type1->t & VT_BTYPE;
2201 bt2 = type2->t & VT_BTYPE;
2202 /* accept comparison between pointer and integer with a warning */
2203 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2204 if (op != TOK_LOR && op != TOK_LAND )
2205 tcc_warning("comparison between pointer and integer");
2206 return;
2209 /* both must be pointers or implicit function pointers */
2210 if (bt1 == VT_PTR) {
2211 type1 = pointed_type(type1);
2212 } else if (bt1 != VT_FUNC)
2213 goto invalid_operands;
2215 if (bt2 == VT_PTR) {
2216 type2 = pointed_type(type2);
2217 } else if (bt2 != VT_FUNC) {
2218 invalid_operands:
2219 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2221 if ((type1->t & VT_BTYPE) == VT_VOID ||
2222 (type2->t & VT_BTYPE) == VT_VOID)
2223 return;
2224 tmp_type1 = *type1;
2225 tmp_type2 = *type2;
2226 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2227 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2228 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2229 /* gcc-like error if '-' is used */
2230 if (op == '-')
2231 goto invalid_operands;
2232 else
2233 tcc_warning("comparison of distinct pointer types lacks a cast");
2237 /* generic gen_op: handles types problems */
2238 ST_FUNC void gen_op(int op)
2240 int u, t1, t2, bt1, bt2, t;
2241 CType type1;
2243 redo:
2244 t1 = vtop[-1].type.t;
2245 t2 = vtop[0].type.t;
2246 bt1 = t1 & VT_BTYPE;
2247 bt2 = t2 & VT_BTYPE;
2249 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2250 tcc_error("operation on a struct");
2251 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2252 if (bt2 == VT_FUNC) {
2253 mk_pointer(&vtop->type);
2254 gaddrof();
2256 if (bt1 == VT_FUNC) {
2257 vswap();
2258 mk_pointer(&vtop->type);
2259 gaddrof();
2260 vswap();
2262 goto redo;
2263 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2264 /* at least one operand is a pointer */
2265 /* relational op: must be both pointers */
2266 if (op >= TOK_ULT && op <= TOK_LOR) {
2267 check_comparison_pointer_types(vtop - 1, vtop, op);
2268 /* pointers are handled are unsigned */
2269 #if PTR_SIZE == 8
2270 t = VT_LLONG | VT_UNSIGNED;
2271 #else
2272 t = VT_INT | VT_UNSIGNED;
2273 #endif
2274 goto std_op;
2276 /* if both pointers, then it must be the '-' op */
2277 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2278 if (op != '-')
2279 tcc_error("cannot use pointers here");
2280 check_comparison_pointer_types(vtop - 1, vtop, op);
2281 /* XXX: check that types are compatible */
2282 if (vtop[-1].type.t & VT_VLA) {
2283 vla_runtime_pointed_size(&vtop[-1].type);
2284 } else {
2285 vpushi(pointed_size(&vtop[-1].type));
2287 vrott(3);
2288 gen_opic(op);
2289 vtop->type.t = ptrdiff_type.t;
2290 vswap();
2291 gen_op(TOK_PDIV);
2292 } else {
2293 /* exactly one pointer : must be '+' or '-'. */
2294 if (op != '-' && op != '+')
2295 tcc_error("cannot use pointers here");
2296 /* Put pointer as first operand */
2297 if (bt2 == VT_PTR) {
2298 vswap();
2299 t = t1, t1 = t2, t2 = t;
2301 #if PTR_SIZE == 4
2302 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2303 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2304 gen_cast_s(VT_INT);
2305 #endif
2306 type1 = vtop[-1].type;
2307 type1.t &= ~VT_ARRAY;
2308 if (vtop[-1].type.t & VT_VLA)
2309 vla_runtime_pointed_size(&vtop[-1].type);
2310 else {
2311 u = pointed_size(&vtop[-1].type);
2312 if (u < 0)
2313 tcc_error("unknown array element size");
2314 #if PTR_SIZE == 8
2315 vpushll(u);
2316 #else
2317 /* XXX: cast to int ? (long long case) */
2318 vpushi(u);
2319 #endif
2321 gen_op('*');
2322 #if 0
2323 /* #ifdef CONFIG_TCC_BCHECK
2324 The main reason to removing this code:
2325 #include <stdio.h>
2326 int main ()
2328 int v[10];
2329 int i = 10;
2330 int j = 9;
2331 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2332 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2334 When this code is on. then the output looks like
2335 v+i-j = 0xfffffffe
2336 v+(i-j) = 0xbff84000
2338 /* if evaluating constant expression, no code should be
2339 generated, so no bound check */
2340 if (tcc_state->do_bounds_check && !const_wanted) {
2341 /* if bounded pointers, we generate a special code to
2342 test bounds */
2343 if (op == '-') {
2344 vpushi(0);
2345 vswap();
2346 gen_op('-');
2348 gen_bounded_ptr_add();
2349 } else
2350 #endif
2352 gen_opic(op);
2354 /* put again type if gen_opic() swaped operands */
2355 vtop->type = type1;
2357 } else if (is_float(bt1) || is_float(bt2)) {
2358 /* compute bigger type and do implicit casts */
2359 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2360 t = VT_LDOUBLE;
2361 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2362 t = VT_DOUBLE;
2363 } else {
2364 t = VT_FLOAT;
2366 /* floats can only be used for a few operations */
2367 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2368 (op < TOK_ULT || op > TOK_GT))
2369 tcc_error("invalid operands for binary operation");
2370 goto std_op;
2371 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2372 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2373 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2374 t |= VT_UNSIGNED;
2375 t |= (VT_LONG & t1);
2376 goto std_op;
2377 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2378 /* cast to biggest op */
2379 t = VT_LLONG | VT_LONG;
2380 if (bt1 == VT_LLONG)
2381 t &= t1;
2382 if (bt2 == VT_LLONG)
2383 t &= t2;
2384 /* convert to unsigned if it does not fit in a long long */
2385 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2386 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2387 t |= VT_UNSIGNED;
2388 goto std_op;
2389 } else {
2390 /* integer operations */
2391 t = VT_INT | (VT_LONG & (t1 | t2));
2392 /* convert to unsigned if it does not fit in an integer */
2393 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2394 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2395 t |= VT_UNSIGNED;
2396 std_op:
2397 /* XXX: currently, some unsigned operations are explicit, so
2398 we modify them here */
2399 if (t & VT_UNSIGNED) {
2400 if (op == TOK_SAR)
2401 op = TOK_SHR;
2402 else if (op == '/')
2403 op = TOK_UDIV;
2404 else if (op == '%')
2405 op = TOK_UMOD;
2406 else if (op == TOK_LT)
2407 op = TOK_ULT;
2408 else if (op == TOK_GT)
2409 op = TOK_UGT;
2410 else if (op == TOK_LE)
2411 op = TOK_ULE;
2412 else if (op == TOK_GE)
2413 op = TOK_UGE;
2415 vswap();
2416 type1.t = t;
2417 type1.ref = NULL;
2418 gen_cast(&type1);
2419 vswap();
2420 /* special case for shifts and long long: we keep the shift as
2421 an integer */
2422 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2423 type1.t = VT_INT;
2424 gen_cast(&type1);
2425 if (is_float(t))
2426 gen_opif(op);
2427 else
2428 gen_opic(op);
2429 if (op >= TOK_ULT && op <= TOK_GT) {
2430 /* relational op: the result is an int */
2431 vtop->type.t = VT_INT;
2432 } else {
2433 vtop->type.t = t;
2436 // Make sure that we have converted to an rvalue:
2437 if (vtop->r & VT_LVAL)
2438 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2441 #ifndef TCC_TARGET_ARM
2442 /* generic itof for unsigned long long case */
2443 static void gen_cvt_itof1(int t)
2445 #ifdef TCC_TARGET_ARM64
2446 gen_cvt_itof(t);
2447 #else
2448 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2449 (VT_LLONG | VT_UNSIGNED)) {
2451 if (t == VT_FLOAT)
2452 vpush_global_sym(&func_old_type, TOK___floatundisf);
2453 #if LDOUBLE_SIZE != 8
2454 else if (t == VT_LDOUBLE)
2455 vpush_global_sym(&func_old_type, TOK___floatundixf);
2456 #endif
2457 else
2458 vpush_global_sym(&func_old_type, TOK___floatundidf);
2459 vrott(2);
2460 gfunc_call(1);
2461 vpushi(0);
2462 vtop->r = reg_fret(t);
2463 } else {
2464 gen_cvt_itof(t);
2466 #endif
2468 #endif
2470 /* generic ftoi for unsigned long long case */
2471 static void gen_cvt_ftoi1(int t)
2473 #ifdef TCC_TARGET_ARM64
2474 gen_cvt_ftoi(t);
2475 #else
2476 int st;
2478 if (t == (VT_LLONG | VT_UNSIGNED)) {
2479 /* not handled natively */
2480 st = vtop->type.t & VT_BTYPE;
2481 if (st == VT_FLOAT)
2482 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2483 #if LDOUBLE_SIZE != 8
2484 else if (st == VT_LDOUBLE)
2485 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2486 #endif
2487 else
2488 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2489 vrott(2);
2490 gfunc_call(1);
2491 vpushi(0);
2492 vtop->r = REG_IRET;
2493 vtop->r2 = REG_LRET;
2494 } else {
2495 gen_cvt_ftoi(t);
2497 #endif
2500 /* force char or short cast */
2501 static void force_charshort_cast(int t)
2503 int bits, dbt;
2505 /* cannot cast static initializers */
2506 if (STATIC_DATA_WANTED)
2507 return;
2509 dbt = t & VT_BTYPE;
2510 /* XXX: add optimization if lvalue : just change type and offset */
2511 if (dbt == VT_BYTE)
2512 bits = 8;
2513 else
2514 bits = 16;
2515 if (t & VT_UNSIGNED) {
2516 vpushi((1 << bits) - 1);
2517 gen_op('&');
2518 } else {
2519 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2520 bits = 64 - bits;
2521 else
2522 bits = 32 - bits;
2523 vpushi(bits);
2524 gen_op(TOK_SHL);
2525 /* result must be signed or the SAR is converted to an SHL
2526 This was not the case when "t" was a signed short
2527 and the last value on the stack was an unsigned int */
2528 vtop->type.t &= ~VT_UNSIGNED;
2529 vpushi(bits);
2530 gen_op(TOK_SAR);
2534 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2535 static void gen_cast_s(int t)
2537 CType type;
2538 type.t = t;
2539 type.ref = NULL;
2540 gen_cast(&type);
2543 static void gen_cast(CType *type)
2545 int sbt, dbt, sf, df, c, p;
2547 /* special delayed cast for char/short */
2548 /* XXX: in some cases (multiple cascaded casts), it may still
2549 be incorrect */
2550 if (vtop->r & VT_MUSTCAST) {
2551 vtop->r &= ~VT_MUSTCAST;
2552 force_charshort_cast(vtop->type.t);
2555 /* bitfields first get cast to ints */
2556 if (vtop->type.t & VT_BITFIELD) {
2557 gv(RC_INT);
2560 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2561 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2563 if (sbt != dbt) {
2564 sf = is_float(sbt);
2565 df = is_float(dbt);
2566 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2567 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2568 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2569 c &= dbt != VT_LDOUBLE;
2570 #endif
2571 if (c) {
2572 /* constant case: we can do it now */
2573 /* XXX: in ISOC, cannot do it if error in convert */
2574 if (sbt == VT_FLOAT)
2575 vtop->c.ld = vtop->c.f;
2576 else if (sbt == VT_DOUBLE)
2577 vtop->c.ld = vtop->c.d;
2579 if (df) {
2580 if ((sbt & VT_BTYPE) == VT_LLONG) {
2581 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2582 vtop->c.ld = vtop->c.i;
2583 else
2584 vtop->c.ld = -(long double)-vtop->c.i;
2585 } else if(!sf) {
2586 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2587 vtop->c.ld = (uint32_t)vtop->c.i;
2588 else
2589 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2592 if (dbt == VT_FLOAT)
2593 vtop->c.f = (float)vtop->c.ld;
2594 else if (dbt == VT_DOUBLE)
2595 vtop->c.d = (double)vtop->c.ld;
2596 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2597 vtop->c.i = vtop->c.ld;
2598 } else if (sf && dbt == VT_BOOL) {
2599 vtop->c.i = (vtop->c.ld != 0);
2600 } else {
2601 if(sf)
2602 vtop->c.i = vtop->c.ld;
2603 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2605 else if (sbt & VT_UNSIGNED)
2606 vtop->c.i = (uint32_t)vtop->c.i;
2607 #if PTR_SIZE == 8
2608 else if (sbt == VT_PTR)
2610 #endif
2611 else if (sbt != VT_LLONG)
2612 vtop->c.i = ((uint32_t)vtop->c.i |
2613 -(vtop->c.i & 0x80000000));
2615 if (dbt == (VT_LLONG|VT_UNSIGNED))
2617 else if (dbt == VT_BOOL)
2618 vtop->c.i = (vtop->c.i != 0);
2619 #if PTR_SIZE == 8
2620 else if (dbt == VT_PTR)
2622 #endif
2623 else if (dbt != VT_LLONG) {
2624 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2625 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2626 0xffffffff);
2627 vtop->c.i &= m;
2628 if (!(dbt & VT_UNSIGNED))
2629 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2632 } else if (p && dbt == VT_BOOL) {
2633 vtop->r = VT_CONST;
2634 vtop->c.i = 1;
2635 } else {
2636 /* non constant case: generate code */
2637 if (sf && df) {
2638 /* convert from fp to fp */
2639 gen_cvt_ftof(dbt);
2640 } else if (df) {
2641 /* convert int to fp */
2642 gen_cvt_itof1(dbt);
2643 } else if (sf) {
2644 /* convert fp to int */
2645 if (dbt == VT_BOOL) {
2646 vpushi(0);
2647 gen_op(TOK_NE);
2648 } else {
2649 /* we handle char/short/etc... with generic code */
2650 if (dbt != (VT_INT | VT_UNSIGNED) &&
2651 dbt != (VT_LLONG | VT_UNSIGNED) &&
2652 dbt != VT_LLONG)
2653 dbt = VT_INT;
2654 gen_cvt_ftoi1(dbt);
2655 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2656 /* additional cast for char/short... */
2657 vtop->type.t = dbt;
2658 gen_cast(type);
2661 #if PTR_SIZE == 4
2662 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2663 if ((sbt & VT_BTYPE) != VT_LLONG) {
2664 /* scalar to long long */
2665 /* machine independent conversion */
2666 gv(RC_INT);
2667 /* generate high word */
2668 if (sbt == (VT_INT | VT_UNSIGNED)) {
2669 vpushi(0);
2670 gv(RC_INT);
2671 } else {
2672 if (sbt == VT_PTR) {
2673 /* cast from pointer to int before we apply
2674 shift operation, which pointers don't support*/
2675 gen_cast_s(VT_INT);
2677 gv_dup();
2678 vpushi(31);
2679 gen_op(TOK_SAR);
2681 /* patch second register */
2682 vtop[-1].r2 = vtop->r;
2683 vpop();
2685 #else
2686 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2687 (dbt & VT_BTYPE) == VT_PTR ||
2688 (dbt & VT_BTYPE) == VT_FUNC) {
2689 if ((sbt & VT_BTYPE) != VT_LLONG &&
2690 (sbt & VT_BTYPE) != VT_PTR &&
2691 (sbt & VT_BTYPE) != VT_FUNC) {
2692 /* need to convert from 32bit to 64bit */
2693 gv(RC_INT);
2694 if (sbt != (VT_INT | VT_UNSIGNED)) {
2695 #if defined(TCC_TARGET_ARM64)
2696 gen_cvt_sxtw();
2697 #elif defined(TCC_TARGET_X86_64)
2698 int r = gv(RC_INT);
2699 /* x86_64 specific: movslq */
2700 o(0x6348);
2701 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2702 #else
2703 #error
2704 #endif
2707 #endif
2708 } else if (dbt == VT_BOOL) {
2709 /* scalar to bool */
2710 vpushi(0);
2711 gen_op(TOK_NE);
2712 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2713 (dbt & VT_BTYPE) == VT_SHORT) {
2714 if (sbt == VT_PTR) {
2715 vtop->type.t = VT_INT;
2716 tcc_warning("nonportable conversion from pointer to char/short");
2718 force_charshort_cast(dbt);
2719 } else if ((dbt & VT_BTYPE) == VT_INT) {
2720 /* scalar to int */
2721 if ((sbt & VT_BTYPE) == VT_LLONG) {
2722 #if PTR_SIZE == 4
2723 /* from long long: just take low order word */
2724 lexpand();
2725 vpop();
2726 #else
2727 vpushi(0xffffffff);
2728 vtop->type.t |= VT_UNSIGNED;
2729 gen_op('&');
2730 #endif
2732 /* if lvalue and single word type, nothing to do because
2733 the lvalue already contains the real type size (see
2734 VT_LVAL_xxx constants) */
2737 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2738 /* if we are casting between pointer types,
2739 we must update the VT_LVAL_xxx size */
2740 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2741 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2743 vtop->type = *type;
2744 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2747 /* return type size as known at compile time. Put alignment at 'a' */
2748 ST_FUNC int type_size(CType *type, int *a)
2750 Sym *s;
2751 int bt;
2753 bt = type->t & VT_BTYPE;
2754 if (bt == VT_STRUCT) {
2755 /* struct/union */
2756 s = type->ref;
2757 *a = s->r;
2758 return s->c;
2759 } else if (bt == VT_PTR) {
2760 if (type->t & VT_ARRAY) {
2761 int ts;
2763 s = type->ref;
2764 ts = type_size(&s->type, a);
2766 if (ts < 0 && s->c < 0)
2767 ts = -ts;
2769 return ts * s->c;
2770 } else {
2771 *a = PTR_SIZE;
2772 return PTR_SIZE;
2774 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2775 return -1; /* incomplete enum */
2776 } else if (bt == VT_LDOUBLE) {
2777 *a = LDOUBLE_ALIGN;
2778 return LDOUBLE_SIZE;
2779 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2780 #ifdef TCC_TARGET_I386
2781 #ifdef TCC_TARGET_PE
2782 *a = 8;
2783 #else
2784 *a = 4;
2785 #endif
2786 #elif defined(TCC_TARGET_ARM)
2787 #ifdef TCC_ARM_EABI
2788 *a = 8;
2789 #else
2790 *a = 4;
2791 #endif
2792 #else
2793 *a = 8;
2794 #endif
2795 return 8;
2796 } else if (bt == VT_INT || bt == VT_FLOAT) {
2797 *a = 4;
2798 return 4;
2799 } else if (bt == VT_SHORT) {
2800 *a = 2;
2801 return 2;
2802 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2803 *a = 8;
2804 return 16;
2805 } else {
2806 /* char, void, function, _Bool */
2807 *a = 1;
2808 return 1;
2812 /* push type size as known at runtime time on top of value stack. Put
2813 alignment at 'a' */
2814 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2816 if (type->t & VT_VLA) {
2817 type_size(&type->ref->type, a);
2818 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2819 } else {
2820 vpushi(type_size(type, a));
2824 static void vla_sp_restore(void) {
2825 if (vlas_in_scope) {
2826 gen_vla_sp_restore(vla_sp_loc);
2830 static void vla_sp_restore_root(void) {
2831 if (vlas_in_scope) {
2832 gen_vla_sp_restore(vla_sp_root_loc);
2836 /* return the pointed type of t */
2837 static inline CType *pointed_type(CType *type)
2839 return &type->ref->type;
2842 /* modify type so that its it is a pointer to type. */
2843 ST_FUNC void mk_pointer(CType *type)
2845 Sym *s;
2846 s = sym_push(SYM_FIELD, type, 0, -1);
2847 type->t = VT_PTR | (type->t & VT_STORAGE);
2848 type->ref = s;
2851 /* compare function types. OLD functions match any new functions */
2852 static int is_compatible_func(CType *type1, CType *type2)
2854 Sym *s1, *s2;
2856 s1 = type1->ref;
2857 s2 = type2->ref;
2858 if (!is_compatible_types(&s1->type, &s2->type))
2859 return 0;
2860 /* check func_call */
2861 if (s1->f.func_call != s2->f.func_call)
2862 return 0;
2863 /* XXX: not complete */
2864 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2865 return 1;
2866 if (s1->f.func_type != s2->f.func_type)
2867 return 0;
2868 while (s1 != NULL) {
2869 if (s2 == NULL)
2870 return 0;
2871 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2872 return 0;
2873 s1 = s1->next;
2874 s2 = s2->next;
2876 if (s2)
2877 return 0;
2878 return 1;
2881 /* return true if type1 and type2 are the same. If unqualified is
2882 true, qualifiers on the types are ignored.
2884 static int compare_types(CType *type1, CType *type2, int unqualified)
2886 int bt1, t1, t2;
2888 t1 = type1->t & VT_TYPE;
2889 t2 = type2->t & VT_TYPE;
2890 if (unqualified) {
2891 /* strip qualifiers before comparing */
2892 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2893 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2896 /* Default Vs explicit signedness only matters for char */
2897 if ((t1 & VT_BTYPE) != VT_BYTE) {
2898 t1 &= ~VT_DEFSIGN;
2899 t2 &= ~VT_DEFSIGN;
2901 /* XXX: bitfields ? */
2902 if (t1 != t2)
2903 return 0;
2904 /* test more complicated cases */
2905 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2906 if (bt1 == VT_PTR) {
2907 type1 = pointed_type(type1);
2908 type2 = pointed_type(type2);
2909 return is_compatible_types(type1, type2);
2910 } else if (bt1 & VT_ARRAY) {
2911 return type1->ref->c < 0 || type2->ref->c < 0
2912 || type1->ref->c == type2->ref->c;
2913 } else if (bt1 == VT_STRUCT) {
2914 return (type1->ref == type2->ref);
2915 } else if (bt1 == VT_FUNC) {
2916 return is_compatible_func(type1, type2);
2917 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2918 return type1->ref == type2->ref;
2919 } else {
2920 return 1;
2924 /* return true if type1 and type2 are exactly the same (including
2925 qualifiers).
2927 static int is_compatible_types(CType *type1, CType *type2)
2929 return compare_types(type1,type2,0);
2932 /* return true if type1 and type2 are the same (ignoring qualifiers).
2934 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2936 return compare_types(type1,type2,1);
2939 /* print a type. If 'varstr' is not NULL, then the variable is also
2940 printed in the type */
2941 /* XXX: union */
2942 /* XXX: add array and function pointers */
2943 static void type_to_str(char *buf, int buf_size,
2944 CType *type, const char *varstr)
2946 int bt, v, t;
2947 Sym *s, *sa;
2948 char buf1[256];
2949 const char *tstr;
2951 t = type->t;
2952 bt = t & VT_BTYPE;
2953 buf[0] = '\0';
2955 if (t & VT_EXTERN)
2956 pstrcat(buf, buf_size, "extern ");
2957 if (t & VT_STATIC)
2958 pstrcat(buf, buf_size, "static ");
2959 if (t & VT_TYPEDEF)
2960 pstrcat(buf, buf_size, "typedef ");
2961 if (t & VT_INLINE)
2962 pstrcat(buf, buf_size, "inline ");
2963 if (t & VT_VOLATILE)
2964 pstrcat(buf, buf_size, "volatile ");
2965 if (t & VT_CONSTANT)
2966 pstrcat(buf, buf_size, "const ");
2968 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2969 || ((t & VT_UNSIGNED)
2970 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2971 && !IS_ENUM(t)
2973 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2975 buf_size -= strlen(buf);
2976 buf += strlen(buf);
2978 switch(bt) {
2979 case VT_VOID:
2980 tstr = "void";
2981 goto add_tstr;
2982 case VT_BOOL:
2983 tstr = "_Bool";
2984 goto add_tstr;
2985 case VT_BYTE:
2986 tstr = "char";
2987 goto add_tstr;
2988 case VT_SHORT:
2989 tstr = "short";
2990 goto add_tstr;
2991 case VT_INT:
2992 tstr = "int";
2993 goto maybe_long;
2994 case VT_LLONG:
2995 tstr = "long long";
2996 maybe_long:
2997 if (t & VT_LONG)
2998 tstr = "long";
2999 if (!IS_ENUM(t))
3000 goto add_tstr;
3001 tstr = "enum ";
3002 goto tstruct;
3003 case VT_FLOAT:
3004 tstr = "float";
3005 goto add_tstr;
3006 case VT_DOUBLE:
3007 tstr = "double";
3008 goto add_tstr;
3009 case VT_LDOUBLE:
3010 tstr = "long double";
3011 add_tstr:
3012 pstrcat(buf, buf_size, tstr);
3013 break;
3014 case VT_STRUCT:
3015 tstr = "struct ";
3016 if (IS_UNION(t))
3017 tstr = "union ";
3018 tstruct:
3019 pstrcat(buf, buf_size, tstr);
3020 v = type->ref->v & ~SYM_STRUCT;
3021 if (v >= SYM_FIRST_ANOM)
3022 pstrcat(buf, buf_size, "<anonymous>");
3023 else
3024 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3025 break;
3026 case VT_FUNC:
3027 s = type->ref;
3028 buf1[0]=0;
3029 if (varstr && '*' == *varstr) {
3030 pstrcat(buf1, sizeof(buf1), "(");
3031 pstrcat(buf1, sizeof(buf1), varstr);
3032 pstrcat(buf1, sizeof(buf1), ")");
3034 pstrcat(buf1, buf_size, "(");
3035 sa = s->next;
3036 while (sa != NULL) {
3037 char buf2[256];
3038 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3039 pstrcat(buf1, sizeof(buf1), buf2);
3040 sa = sa->next;
3041 if (sa)
3042 pstrcat(buf1, sizeof(buf1), ", ");
3044 if (s->f.func_type == FUNC_ELLIPSIS)
3045 pstrcat(buf1, sizeof(buf1), ", ...");
3046 pstrcat(buf1, sizeof(buf1), ")");
3047 type_to_str(buf, buf_size, &s->type, buf1);
3048 goto no_var;
3049 case VT_PTR:
3050 s = type->ref;
3051 if (t & VT_ARRAY) {
3052 if (varstr && '*' == *varstr)
3053 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3054 else
3055 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3056 type_to_str(buf, buf_size, &s->type, buf1);
3057 goto no_var;
3059 pstrcpy(buf1, sizeof(buf1), "*");
3060 if (t & VT_CONSTANT)
3061 pstrcat(buf1, buf_size, "const ");
3062 if (t & VT_VOLATILE)
3063 pstrcat(buf1, buf_size, "volatile ");
3064 if (varstr)
3065 pstrcat(buf1, sizeof(buf1), varstr);
3066 type_to_str(buf, buf_size, &s->type, buf1);
3067 goto no_var;
3069 if (varstr) {
3070 pstrcat(buf, buf_size, " ");
3071 pstrcat(buf, buf_size, varstr);
3073 no_var: ;
3076 /* verify type compatibility to store vtop in 'dt' type, and generate
3077 casts if needed. */
3078 static void gen_assign_cast(CType *dt)
3080 CType *st, *type1, *type2;
3081 char buf1[256], buf2[256];
3082 int dbt, sbt, qualwarn, lvl;
3084 st = &vtop->type; /* source type */
3085 dbt = dt->t & VT_BTYPE;
3086 sbt = st->t & VT_BTYPE;
3087 if (sbt == VT_VOID || dbt == VT_VOID) {
3088 if (sbt == VT_VOID && dbt == VT_VOID)
3089 ; /* It is Ok if both are void */
3090 else
3091 tcc_error("cannot cast from/to void");
3093 if (dt->t & VT_CONSTANT)
3094 tcc_warning("assignment of read-only location");
3095 switch(dbt) {
3096 case VT_PTR:
3097 /* special cases for pointers */
3098 /* '0' can also be a pointer */
3099 if (is_null_pointer(vtop))
3100 break;
3101 /* accept implicit pointer to integer cast with warning */
3102 if (is_integer_btype(sbt)) {
3103 tcc_warning("assignment makes pointer from integer without a cast");
3104 break;
3106 type1 = pointed_type(dt);
3107 if (sbt == VT_PTR)
3108 type2 = pointed_type(st);
3109 else if (sbt == VT_FUNC)
3110 type2 = st; /* a function is implicitly a function pointer */
3111 else
3112 goto error;
3113 if (is_compatible_types(type1, type2))
3114 break;
3115 for (qualwarn = lvl = 0;; ++lvl) {
3116 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3117 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3118 qualwarn = 1;
3119 dbt = type1->t & (VT_BTYPE|VT_LONG);
3120 sbt = type2->t & (VT_BTYPE|VT_LONG);
3121 if (dbt != VT_PTR || sbt != VT_PTR)
3122 break;
3123 type1 = pointed_type(type1);
3124 type2 = pointed_type(type2);
3126 if (!is_compatible_unqualified_types(type1, type2)) {
3127 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3128 /* void * can match anything */
3129 } else if (dbt == sbt
3130 && is_integer_btype(sbt & VT_BTYPE)
3131 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3132 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3133 /* Like GCC don't warn by default for merely changes
3134 in pointer target signedness. Do warn for different
3135 base types, though, in particular for unsigned enums
3136 and signed int targets. */
3137 } else {
3138 tcc_warning("assignment from incompatible pointer type");
3139 break;
3142 if (qualwarn)
3143 tcc_warning("assignment discards qualifiers from pointer target type");
3144 break;
3145 case VT_BYTE:
3146 case VT_SHORT:
3147 case VT_INT:
3148 case VT_LLONG:
3149 if (sbt == VT_PTR || sbt == VT_FUNC) {
3150 tcc_warning("assignment makes integer from pointer without a cast");
3151 } else if (sbt == VT_STRUCT) {
3152 goto case_VT_STRUCT;
3154 /* XXX: more tests */
3155 break;
3156 case VT_STRUCT:
3157 case_VT_STRUCT:
3158 if (!is_compatible_unqualified_types(dt, st)) {
3159 error:
3160 type_to_str(buf1, sizeof(buf1), st, NULL);
3161 type_to_str(buf2, sizeof(buf2), dt, NULL);
3162 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3164 break;
3166 gen_cast(dt);
3169 /* store vtop in lvalue pushed on stack */
3170 ST_FUNC void vstore(void)
3172 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3174 ft = vtop[-1].type.t;
3175 sbt = vtop->type.t & VT_BTYPE;
3176 dbt = ft & VT_BTYPE;
3177 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3178 (sbt == VT_INT && dbt == VT_SHORT))
3179 && !(vtop->type.t & VT_BITFIELD)) {
3180 /* optimize char/short casts */
3181 delayed_cast = VT_MUSTCAST;
3182 vtop->type.t = ft & VT_TYPE;
3183 /* XXX: factorize */
3184 if (ft & VT_CONSTANT)
3185 tcc_warning("assignment of read-only location");
3186 } else {
3187 delayed_cast = 0;
3188 if (!(ft & VT_BITFIELD))
3189 gen_assign_cast(&vtop[-1].type);
3192 if (sbt == VT_STRUCT) {
3193 /* if structure, only generate pointer */
3194 /* structure assignment : generate memcpy */
3195 /* XXX: optimize if small size */
3196 size = type_size(&vtop->type, &align);
3198 /* destination */
3199 vswap();
3200 vtop->type.t = VT_PTR;
3201 gaddrof();
3203 /* address of memcpy() */
3204 #ifdef TCC_ARM_EABI
3205 if(!(align & 7))
3206 vpush_global_sym(&func_old_type, TOK_memcpy8);
3207 else if(!(align & 3))
3208 vpush_global_sym(&func_old_type, TOK_memcpy4);
3209 else
3210 #endif
3211 /* Use memmove, rather than memcpy, as dest and src may be same: */
3212 vpush_global_sym(&func_old_type, TOK_memmove);
3214 vswap();
3215 /* source */
3216 vpushv(vtop - 2);
3217 vtop->type.t = VT_PTR;
3218 gaddrof();
3219 /* type size */
3220 vpushi(size);
3221 gfunc_call(3);
3223 /* leave source on stack */
3224 } else if (ft & VT_BITFIELD) {
3225 /* bitfield store handling */
3227 /* save lvalue as expression result (example: s.b = s.a = n;) */
3228 vdup(), vtop[-1] = vtop[-2];
3230 bit_pos = BIT_POS(ft);
3231 bit_size = BIT_SIZE(ft);
3232 /* remove bit field info to avoid loops */
3233 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3235 if ((ft & VT_BTYPE) == VT_BOOL) {
3236 gen_cast(&vtop[-1].type);
3237 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3240 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3241 if (r == VT_STRUCT) {
3242 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3243 store_packed_bf(bit_pos, bit_size);
3244 } else {
3245 unsigned long long mask = (1ULL << bit_size) - 1;
3246 if ((ft & VT_BTYPE) != VT_BOOL) {
3247 /* mask source */
3248 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3249 vpushll(mask);
3250 else
3251 vpushi((unsigned)mask);
3252 gen_op('&');
3254 /* shift source */
3255 vpushi(bit_pos);
3256 gen_op(TOK_SHL);
3257 vswap();
3258 /* duplicate destination */
3259 vdup();
3260 vrott(3);
3261 /* load destination, mask and or with source */
3262 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3263 vpushll(~(mask << bit_pos));
3264 else
3265 vpushi(~((unsigned)mask << bit_pos));
3266 gen_op('&');
3267 gen_op('|');
3268 /* store result */
3269 vstore();
3270 /* ... and discard */
3271 vpop();
3273 } else if (dbt == VT_VOID) {
3274 --vtop;
3275 } else {
3276 #ifdef CONFIG_TCC_BCHECK
3277 /* bound check case */
3278 if (vtop[-1].r & VT_MUSTBOUND) {
3279 vswap();
3280 gbound();
3281 vswap();
3283 #endif
3284 rc = RC_INT;
3285 if (is_float(ft)) {
3286 rc = RC_FLOAT;
3287 #ifdef TCC_TARGET_X86_64
3288 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3289 rc = RC_ST0;
3290 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3291 rc = RC_FRET;
3293 #endif
3295 r = gv(rc); /* generate value */
3296 /* if lvalue was saved on stack, must read it */
3297 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3298 SValue sv;
3299 t = get_reg(RC_INT);
3300 #if PTR_SIZE == 8
3301 sv.type.t = VT_PTR;
3302 #else
3303 sv.type.t = VT_INT;
3304 #endif
3305 sv.r = VT_LOCAL | VT_LVAL;
3306 sv.c.i = vtop[-1].c.i;
3307 load(t, &sv);
3308 vtop[-1].r = t | VT_LVAL;
3310 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3311 #if PTR_SIZE == 8
3312 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3313 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3314 #else
3315 if ((ft & VT_BTYPE) == VT_LLONG) {
3316 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3317 #endif
3318 vtop[-1].type.t = load_type;
3319 store(r, vtop - 1);
3320 vswap();
3321 /* convert to int to increment easily */
3322 vtop->type.t = addr_type;
3323 gaddrof();
3324 vpushi(load_size);
3325 gen_op('+');
3326 vtop->r |= VT_LVAL;
3327 vswap();
3328 vtop[-1].type.t = load_type;
3329 /* XXX: it works because r2 is spilled last ! */
3330 store(vtop->r2, vtop - 1);
3331 } else {
3332 store(r, vtop - 1);
3335 vswap();
3336 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3337 vtop->r |= delayed_cast;
3341 /* post defines POST/PRE add. c is the token ++ or -- */
3342 ST_FUNC void inc(int post, int c)
3344 test_lvalue();
3345 vdup(); /* save lvalue */
3346 if (post) {
3347 gv_dup(); /* duplicate value */
3348 vrotb(3);
3349 vrotb(3);
3351 /* add constant */
3352 vpushi(c - TOK_MID);
3353 gen_op('+');
3354 vstore(); /* store value */
3355 if (post)
3356 vpop(); /* if post op, return saved value */
3359 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3361 /* read the string */
3362 if (tok != TOK_STR)
3363 expect(msg);
3364 cstr_new(astr);
3365 while (tok == TOK_STR) {
3366 /* XXX: add \0 handling too ? */
3367 cstr_cat(astr, tokc.str.data, -1);
3368 next();
3370 cstr_ccat(astr, '\0');
3373 /* If I is >= 1 and a power of two, returns log2(i)+1.
3374 If I is 0 returns 0. */
3375 static int exact_log2p1(int i)
3377 int ret;
3378 if (!i)
3379 return 0;
3380 for (ret = 1; i >= 1 << 8; ret += 8)
3381 i >>= 8;
3382 if (i >= 1 << 4)
3383 ret += 4, i >>= 4;
3384 if (i >= 1 << 2)
3385 ret += 2, i >>= 2;
3386 if (i >= 1 << 1)
3387 ret++;
3388 return ret;
3391 /* Parse __attribute__((...)) GNUC extension. */
3392 static void parse_attribute(AttributeDef *ad)
3394 int t, n;
3395 CString astr;
3397 redo:
3398 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3399 return;
3400 next();
3401 skip('(');
3402 skip('(');
3403 while (tok != ')') {
3404 if (tok < TOK_IDENT)
3405 expect("attribute name");
3406 t = tok;
3407 next();
3408 switch(t) {
3409 case TOK_CLEANUP1:
3410 case TOK_CLEANUP2:
3412 Sym *s;
3414 skip('(');
3415 s = sym_find(tok);
3416 if (!s) {
3417 tcc_warning("implicit declaration of function '%s'",
3418 get_tok_str(tok, &tokc));
3419 s = external_global_sym(tok, &func_old_type, 0);
3421 ad->cleanup_func = s;
3422 next();
3423 skip(')');
3424 break;
3426 case TOK_SECTION1:
3427 case TOK_SECTION2:
3428 skip('(');
3429 parse_mult_str(&astr, "section name");
3430 ad->section = find_section(tcc_state, (char *)astr.data);
3431 skip(')');
3432 cstr_free(&astr);
3433 break;
3434 case TOK_ALIAS1:
3435 case TOK_ALIAS2:
3436 skip('(');
3437 parse_mult_str(&astr, "alias(\"target\")");
3438 ad->alias_target = /* save string as token, for later */
3439 tok_alloc((char*)astr.data, astr.size-1)->tok;
3440 skip(')');
3441 cstr_free(&astr);
3442 break;
3443 case TOK_VISIBILITY1:
3444 case TOK_VISIBILITY2:
3445 skip('(');
3446 parse_mult_str(&astr,
3447 "visibility(\"default|hidden|internal|protected\")");
3448 if (!strcmp (astr.data, "default"))
3449 ad->a.visibility = STV_DEFAULT;
3450 else if (!strcmp (astr.data, "hidden"))
3451 ad->a.visibility = STV_HIDDEN;
3452 else if (!strcmp (astr.data, "internal"))
3453 ad->a.visibility = STV_INTERNAL;
3454 else if (!strcmp (astr.data, "protected"))
3455 ad->a.visibility = STV_PROTECTED;
3456 else
3457 expect("visibility(\"default|hidden|internal|protected\")");
3458 skip(')');
3459 cstr_free(&astr);
3460 break;
3461 case TOK_ALIGNED1:
3462 case TOK_ALIGNED2:
3463 if (tok == '(') {
3464 next();
3465 n = expr_const();
3466 if (n <= 0 || (n & (n - 1)) != 0)
3467 tcc_error("alignment must be a positive power of two");
3468 skip(')');
3469 } else {
3470 n = MAX_ALIGN;
3472 ad->a.aligned = exact_log2p1(n);
3473 if (n != 1 << (ad->a.aligned - 1))
3474 tcc_error("alignment of %d is larger than implemented", n);
3475 break;
3476 case TOK_PACKED1:
3477 case TOK_PACKED2:
3478 ad->a.packed = 1;
3479 break;
3480 case TOK_WEAK1:
3481 case TOK_WEAK2:
3482 ad->a.weak = 1;
3483 break;
3484 case TOK_UNUSED1:
3485 case TOK_UNUSED2:
3486 /* currently, no need to handle it because tcc does not
3487 track unused objects */
3488 break;
3489 case TOK_NORETURN1:
3490 case TOK_NORETURN2:
3491 /* currently, no need to handle it because tcc does not
3492 track unused objects */
3493 break;
3494 case TOK_CDECL1:
3495 case TOK_CDECL2:
3496 case TOK_CDECL3:
3497 ad->f.func_call = FUNC_CDECL;
3498 break;
3499 case TOK_STDCALL1:
3500 case TOK_STDCALL2:
3501 case TOK_STDCALL3:
3502 ad->f.func_call = FUNC_STDCALL;
3503 break;
3504 #ifdef TCC_TARGET_I386
3505 case TOK_REGPARM1:
3506 case TOK_REGPARM2:
3507 skip('(');
3508 n = expr_const();
3509 if (n > 3)
3510 n = 3;
3511 else if (n < 0)
3512 n = 0;
3513 if (n > 0)
3514 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3515 skip(')');
3516 break;
3517 case TOK_FASTCALL1:
3518 case TOK_FASTCALL2:
3519 case TOK_FASTCALL3:
3520 ad->f.func_call = FUNC_FASTCALLW;
3521 break;
3522 #endif
3523 case TOK_MODE:
3524 skip('(');
3525 switch(tok) {
3526 case TOK_MODE_DI:
3527 ad->attr_mode = VT_LLONG + 1;
3528 break;
3529 case TOK_MODE_QI:
3530 ad->attr_mode = VT_BYTE + 1;
3531 break;
3532 case TOK_MODE_HI:
3533 ad->attr_mode = VT_SHORT + 1;
3534 break;
3535 case TOK_MODE_SI:
3536 case TOK_MODE_word:
3537 ad->attr_mode = VT_INT + 1;
3538 break;
3539 default:
3540 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3541 break;
3543 next();
3544 skip(')');
3545 break;
3546 case TOK_DLLEXPORT:
3547 ad->a.dllexport = 1;
3548 break;
3549 case TOK_NODECORATE:
3550 ad->a.nodecorate = 1;
3551 break;
3552 case TOK_DLLIMPORT:
3553 ad->a.dllimport = 1;
3554 break;
3555 default:
3556 if (tcc_state->warn_unsupported)
3557 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3558 /* skip parameters */
3559 if (tok == '(') {
3560 int parenthesis = 0;
3561 do {
3562 if (tok == '(')
3563 parenthesis++;
3564 else if (tok == ')')
3565 parenthesis--;
3566 next();
3567 } while (parenthesis && tok != -1);
3569 break;
3571 if (tok != ',')
3572 break;
3573 next();
3575 skip(')');
3576 skip(')');
3577 goto redo;
3580 static Sym * find_field (CType *type, int v)
3582 Sym *s = type->ref;
3583 v |= SYM_FIELD;
3584 while ((s = s->next) != NULL) {
3585 if ((s->v & SYM_FIELD) &&
3586 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3587 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3588 Sym *ret = find_field (&s->type, v);
3589 if (ret)
3590 return ret;
3592 if (s->v == v)
3593 break;
3595 return s;
3598 static void struct_add_offset (Sym *s, int offset)
3600 while ((s = s->next) != NULL) {
3601 if ((s->v & SYM_FIELD) &&
3602 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3603 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3604 struct_add_offset(s->type.ref, offset);
3605 } else
3606 s->c += offset;
3610 static void struct_layout(CType *type, AttributeDef *ad)
3612 int size, align, maxalign, offset, c, bit_pos, bit_size;
3613 int packed, a, bt, prevbt, prev_bit_size;
3614 int pcc = !tcc_state->ms_bitfields;
3615 int pragma_pack = *tcc_state->pack_stack_ptr;
3616 Sym *f;
3618 maxalign = 1;
3619 offset = 0;
3620 c = 0;
3621 bit_pos = 0;
3622 prevbt = VT_STRUCT; /* make it never match */
3623 prev_bit_size = 0;
3625 //#define BF_DEBUG
3627 for (f = type->ref->next; f; f = f->next) {
3628 if (f->type.t & VT_BITFIELD)
3629 bit_size = BIT_SIZE(f->type.t);
3630 else
3631 bit_size = -1;
3632 size = type_size(&f->type, &align);
3633 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3634 packed = 0;
3636 if (pcc && bit_size == 0) {
3637 /* in pcc mode, packing does not affect zero-width bitfields */
3639 } else {
3640 /* in pcc mode, attribute packed overrides if set. */
3641 if (pcc && (f->a.packed || ad->a.packed))
3642 align = packed = 1;
3644 /* pragma pack overrides align if lesser and packs bitfields always */
3645 if (pragma_pack) {
3646 packed = 1;
3647 if (pragma_pack < align)
3648 align = pragma_pack;
3649 /* in pcc mode pragma pack also overrides individual align */
3650 if (pcc && pragma_pack < a)
3651 a = 0;
3654 /* some individual align was specified */
3655 if (a)
3656 align = a;
3658 if (type->ref->type.t == VT_UNION) {
3659 if (pcc && bit_size >= 0)
3660 size = (bit_size + 7) >> 3;
3661 offset = 0;
3662 if (size > c)
3663 c = size;
3665 } else if (bit_size < 0) {
3666 if (pcc)
3667 c += (bit_pos + 7) >> 3;
3668 c = (c + align - 1) & -align;
3669 offset = c;
3670 if (size > 0)
3671 c += size;
3672 bit_pos = 0;
3673 prevbt = VT_STRUCT;
3674 prev_bit_size = 0;
3676 } else {
3677 /* A bit-field. Layout is more complicated. There are two
3678 options: PCC (GCC) compatible and MS compatible */
3679 if (pcc) {
3680 /* In PCC layout a bit-field is placed adjacent to the
3681 preceding bit-fields, except if:
3682 - it has zero-width
3683 - an individual alignment was given
3684 - it would overflow its base type container and
3685 there is no packing */
3686 if (bit_size == 0) {
3687 new_field:
3688 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3689 bit_pos = 0;
3690 } else if (f->a.aligned) {
3691 goto new_field;
3692 } else if (!packed) {
3693 int a8 = align * 8;
3694 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3695 if (ofs > size / align)
3696 goto new_field;
3699 /* in pcc mode, long long bitfields have type int if they fit */
3700 if (size == 8 && bit_size <= 32)
3701 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3703 while (bit_pos >= align * 8)
3704 c += align, bit_pos -= align * 8;
3705 offset = c;
3707 /* In PCC layout named bit-fields influence the alignment
3708 of the containing struct using the base types alignment,
3709 except for packed fields (which here have correct align). */
3710 if (f->v & SYM_FIRST_ANOM
3711 // && bit_size // ??? gcc on ARM/rpi does that
3713 align = 1;
3715 } else {
3716 bt = f->type.t & VT_BTYPE;
3717 if ((bit_pos + bit_size > size * 8)
3718 || (bit_size > 0) == (bt != prevbt)
3720 c = (c + align - 1) & -align;
3721 offset = c;
3722 bit_pos = 0;
3723 /* In MS bitfield mode a bit-field run always uses
3724 at least as many bits as the underlying type.
3725 To start a new run it's also required that this
3726 or the last bit-field had non-zero width. */
3727 if (bit_size || prev_bit_size)
3728 c += size;
3730 /* In MS layout the records alignment is normally
3731 influenced by the field, except for a zero-width
3732 field at the start of a run (but by further zero-width
3733 fields it is again). */
3734 if (bit_size == 0 && prevbt != bt)
3735 align = 1;
3736 prevbt = bt;
3737 prev_bit_size = bit_size;
3740 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3741 | (bit_pos << VT_STRUCT_SHIFT);
3742 bit_pos += bit_size;
3744 if (align > maxalign)
3745 maxalign = align;
3747 #ifdef BF_DEBUG
3748 printf("set field %s offset %-2d size %-2d align %-2d",
3749 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3750 if (f->type.t & VT_BITFIELD) {
3751 printf(" pos %-2d bits %-2d",
3752 BIT_POS(f->type.t),
3753 BIT_SIZE(f->type.t)
3756 printf("\n");
3757 #endif
3759 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3760 Sym *ass;
3761 /* An anonymous struct/union. Adjust member offsets
3762 to reflect the real offset of our containing struct.
3763 Also set the offset of this anon member inside
3764 the outer struct to be zero. Via this it
3765 works when accessing the field offset directly
3766 (from base object), as well as when recursing
3767 members in initializer handling. */
3768 int v2 = f->type.ref->v;
3769 if (!(v2 & SYM_FIELD) &&
3770 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3771 Sym **pps;
3772 /* This happens only with MS extensions. The
3773 anon member has a named struct type, so it
3774 potentially is shared with other references.
3775 We need to unshare members so we can modify
3776 them. */
3777 ass = f->type.ref;
3778 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3779 &f->type.ref->type, 0,
3780 f->type.ref->c);
3781 pps = &f->type.ref->next;
3782 while ((ass = ass->next) != NULL) {
3783 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3784 pps = &((*pps)->next);
3786 *pps = NULL;
3788 struct_add_offset(f->type.ref, offset);
3789 f->c = 0;
3790 } else {
3791 f->c = offset;
3794 f->r = 0;
3797 if (pcc)
3798 c += (bit_pos + 7) >> 3;
3800 /* store size and alignment */
3801 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3802 if (a < maxalign)
3803 a = maxalign;
3804 type->ref->r = a;
3805 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3806 /* can happen if individual align for some member was given. In
3807 this case MSVC ignores maxalign when aligning the size */
3808 a = pragma_pack;
3809 if (a < bt)
3810 a = bt;
3812 c = (c + a - 1) & -a;
3813 type->ref->c = c;
3815 #ifdef BF_DEBUG
3816 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3817 #endif
3819 /* check whether we can access bitfields by their type */
3820 for (f = type->ref->next; f; f = f->next) {
3821 int s, px, cx, c0;
3822 CType t;
3824 if (0 == (f->type.t & VT_BITFIELD))
3825 continue;
3826 f->type.ref = f;
3827 f->auxtype = -1;
3828 bit_size = BIT_SIZE(f->type.t);
3829 if (bit_size == 0)
3830 continue;
3831 bit_pos = BIT_POS(f->type.t);
3832 size = type_size(&f->type, &align);
3833 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3834 continue;
3836 /* try to access the field using a different type */
3837 c0 = -1, s = align = 1;
3838 for (;;) {
3839 px = f->c * 8 + bit_pos;
3840 cx = (px >> 3) & -align;
3841 px = px - (cx << 3);
3842 if (c0 == cx)
3843 break;
3844 s = (px + bit_size + 7) >> 3;
3845 if (s > 4) {
3846 t.t = VT_LLONG;
3847 } else if (s > 2) {
3848 t.t = VT_INT;
3849 } else if (s > 1) {
3850 t.t = VT_SHORT;
3851 } else {
3852 t.t = VT_BYTE;
3854 s = type_size(&t, &align);
3855 c0 = cx;
3858 if (px + bit_size <= s * 8 && cx + s <= c) {
3859 /* update offset and bit position */
3860 f->c = cx;
3861 bit_pos = px;
3862 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3863 | (bit_pos << VT_STRUCT_SHIFT);
3864 if (s != size)
3865 f->auxtype = t.t;
3866 #ifdef BF_DEBUG
3867 printf("FIX field %s offset %-2d size %-2d align %-2d "
3868 "pos %-2d bits %-2d\n",
3869 get_tok_str(f->v & ~SYM_FIELD, NULL),
3870 cx, s, align, px, bit_size);
3871 #endif
3872 } else {
3873 /* fall back to load/store single-byte wise */
3874 f->auxtype = VT_STRUCT;
3875 #ifdef BF_DEBUG
3876 printf("FIX field %s : load byte-wise\n",
3877 get_tok_str(f->v & ~SYM_FIELD, NULL));
3878 #endif
3883 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3884 static void struct_decl(CType *type, int u)
3886 int v, c, size, align, flexible;
3887 int bit_size, bsize, bt;
3888 Sym *s, *ss, **ps;
3889 AttributeDef ad, ad1;
3890 CType type1, btype;
3892 memset(&ad, 0, sizeof ad);
3893 next();
3894 parse_attribute(&ad);
3895 if (tok != '{') {
3896 v = tok;
3897 next();
3898 /* struct already defined ? return it */
3899 if (v < TOK_IDENT)
3900 expect("struct/union/enum name");
3901 s = struct_find(v);
3902 if (s && (s->sym_scope == local_scope || tok != '{')) {
3903 if (u == s->type.t)
3904 goto do_decl;
3905 if (u == VT_ENUM && IS_ENUM(s->type.t))
3906 goto do_decl;
3907 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3909 } else {
3910 v = anon_sym++;
3912 /* Record the original enum/struct/union token. */
3913 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3914 type1.ref = NULL;
3915 /* we put an undefined size for struct/union */
3916 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3917 s->r = 0; /* default alignment is zero as gcc */
3918 do_decl:
3919 type->t = s->type.t;
3920 type->ref = s;
3922 if (tok == '{') {
3923 next();
3924 if (s->c != -1)
3925 tcc_error("struct/union/enum already defined");
3926 /* cannot be empty */
3927 /* non empty enums are not allowed */
3928 ps = &s->next;
3929 if (u == VT_ENUM) {
3930 long long ll = 0, pl = 0, nl = 0;
3931 CType t;
3932 t.ref = s;
3933 /* enum symbols have static storage */
3934 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3935 for(;;) {
3936 v = tok;
3937 if (v < TOK_UIDENT)
3938 expect("identifier");
3939 ss = sym_find(v);
3940 if (ss && !local_stack)
3941 tcc_error("redefinition of enumerator '%s'",
3942 get_tok_str(v, NULL));
3943 next();
3944 if (tok == '=') {
3945 next();
3946 ll = expr_const64();
3948 ss = sym_push(v, &t, VT_CONST, 0);
3949 ss->enum_val = ll;
3950 *ps = ss, ps = &ss->next;
3951 if (ll < nl)
3952 nl = ll;
3953 if (ll > pl)
3954 pl = ll;
3955 if (tok != ',')
3956 break;
3957 next();
3958 ll++;
3959 /* NOTE: we accept a trailing comma */
3960 if (tok == '}')
3961 break;
3963 skip('}');
3964 /* set integral type of the enum */
3965 t.t = VT_INT;
3966 if (nl >= 0) {
3967 if (pl != (unsigned)pl)
3968 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3969 t.t |= VT_UNSIGNED;
3970 } else if (pl != (int)pl || nl != (int)nl)
3971 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3972 s->type.t = type->t = t.t | VT_ENUM;
3973 s->c = 0;
3974 /* set type for enum members */
3975 for (ss = s->next; ss; ss = ss->next) {
3976 ll = ss->enum_val;
3977 if (ll == (int)ll) /* default is int if it fits */
3978 continue;
3979 if (t.t & VT_UNSIGNED) {
3980 ss->type.t |= VT_UNSIGNED;
3981 if (ll == (unsigned)ll)
3982 continue;
3984 ss->type.t = (ss->type.t & ~VT_BTYPE)
3985 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3987 } else {
3988 c = 0;
3989 flexible = 0;
3990 while (tok != '}') {
3991 if (!parse_btype(&btype, &ad1)) {
3992 skip(';');
3993 continue;
3995 while (1) {
3996 if (flexible)
3997 tcc_error("flexible array member '%s' not at the end of struct",
3998 get_tok_str(v, NULL));
3999 bit_size = -1;
4000 v = 0;
4001 type1 = btype;
4002 if (tok != ':') {
4003 if (tok != ';')
4004 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4005 if (v == 0) {
4006 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4007 expect("identifier");
4008 else {
4009 int v = btype.ref->v;
4010 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4011 if (tcc_state->ms_extensions == 0)
4012 expect("identifier");
4016 if (type_size(&type1, &align) < 0) {
4017 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4018 flexible = 1;
4019 else
4020 tcc_error("field '%s' has incomplete type",
4021 get_tok_str(v, NULL));
4023 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4024 (type1.t & VT_BTYPE) == VT_VOID ||
4025 (type1.t & VT_STORAGE))
4026 tcc_error("invalid type for '%s'",
4027 get_tok_str(v, NULL));
4029 if (tok == ':') {
4030 next();
4031 bit_size = expr_const();
4032 /* XXX: handle v = 0 case for messages */
4033 if (bit_size < 0)
4034 tcc_error("negative width in bit-field '%s'",
4035 get_tok_str(v, NULL));
4036 if (v && bit_size == 0)
4037 tcc_error("zero width for bit-field '%s'",
4038 get_tok_str(v, NULL));
4039 parse_attribute(&ad1);
4041 size = type_size(&type1, &align);
4042 if (bit_size >= 0) {
4043 bt = type1.t & VT_BTYPE;
4044 if (bt != VT_INT &&
4045 bt != VT_BYTE &&
4046 bt != VT_SHORT &&
4047 bt != VT_BOOL &&
4048 bt != VT_LLONG)
4049 tcc_error("bitfields must have scalar type");
4050 bsize = size * 8;
4051 if (bit_size > bsize) {
4052 tcc_error("width of '%s' exceeds its type",
4053 get_tok_str(v, NULL));
4054 } else if (bit_size == bsize
4055 && !ad.a.packed && !ad1.a.packed) {
4056 /* no need for bit fields */
4058 } else if (bit_size == 64) {
4059 tcc_error("field width 64 not implemented");
4060 } else {
4061 type1.t = (type1.t & ~VT_STRUCT_MASK)
4062 | VT_BITFIELD
4063 | (bit_size << (VT_STRUCT_SHIFT + 6));
4066 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4067 /* Remember we've seen a real field to check
4068 for placement of flexible array member. */
4069 c = 1;
4071 /* If member is a struct or bit-field, enforce
4072 placing into the struct (as anonymous). */
4073 if (v == 0 &&
4074 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4075 bit_size >= 0)) {
4076 v = anon_sym++;
4078 if (v) {
4079 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4080 ss->a = ad1.a;
4081 *ps = ss;
4082 ps = &ss->next;
4084 if (tok == ';' || tok == TOK_EOF)
4085 break;
4086 skip(',');
4088 skip(';');
4090 skip('}');
4091 parse_attribute(&ad);
4092 struct_layout(type, &ad);
4097 static void sym_to_attr(AttributeDef *ad, Sym *s)
4099 merge_symattr(&ad->a, &s->a);
4100 merge_funcattr(&ad->f, &s->f);
4103 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4104 are added to the element type, copied because it could be a typedef. */
4105 static void parse_btype_qualify(CType *type, int qualifiers)
4107 while (type->t & VT_ARRAY) {
4108 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4109 type = &type->ref->type;
4111 type->t |= qualifiers;
4114 /* return 0 if no type declaration. otherwise, return the basic type
4115 and skip it.
4117 static int parse_btype(CType *type, AttributeDef *ad)
4119 int t, u, bt, st, type_found, typespec_found, g;
4120 Sym *s;
4121 CType type1;
4123 memset(ad, 0, sizeof(AttributeDef));
4124 type_found = 0;
4125 typespec_found = 0;
4126 t = VT_INT;
4127 bt = st = -1;
4128 type->ref = NULL;
4130 while(1) {
4131 switch(tok) {
4132 case TOK_EXTENSION:
4133 /* currently, we really ignore extension */
4134 next();
4135 continue;
4137 /* basic types */
4138 case TOK_CHAR:
4139 u = VT_BYTE;
4140 basic_type:
4141 next();
4142 basic_type1:
4143 if (u == VT_SHORT || u == VT_LONG) {
4144 if (st != -1 || (bt != -1 && bt != VT_INT))
4145 tmbt: tcc_error("too many basic types");
4146 st = u;
4147 } else {
4148 if (bt != -1 || (st != -1 && u != VT_INT))
4149 goto tmbt;
4150 bt = u;
4152 if (u != VT_INT)
4153 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4154 typespec_found = 1;
4155 break;
4156 case TOK_VOID:
4157 u = VT_VOID;
4158 goto basic_type;
4159 case TOK_SHORT:
4160 u = VT_SHORT;
4161 goto basic_type;
4162 case TOK_INT:
4163 u = VT_INT;
4164 goto basic_type;
4165 case TOK_LONG:
4166 if ((t & VT_BTYPE) == VT_DOUBLE) {
4167 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4168 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4169 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4170 } else {
4171 u = VT_LONG;
4172 goto basic_type;
4174 next();
4175 break;
4176 #ifdef TCC_TARGET_ARM64
4177 case TOK_UINT128:
4178 /* GCC's __uint128_t appears in some Linux header files. Make it a
4179 synonym for long double to get the size and alignment right. */
4180 u = VT_LDOUBLE;
4181 goto basic_type;
4182 #endif
4183 case TOK_BOOL:
4184 u = VT_BOOL;
4185 goto basic_type;
4186 case TOK_FLOAT:
4187 u = VT_FLOAT;
4188 goto basic_type;
4189 case TOK_DOUBLE:
4190 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4191 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4192 } else {
4193 u = VT_DOUBLE;
4194 goto basic_type;
4196 next();
4197 break;
4198 case TOK_ENUM:
4199 struct_decl(&type1, VT_ENUM);
4200 basic_type2:
4201 u = type1.t;
4202 type->ref = type1.ref;
4203 goto basic_type1;
4204 case TOK_STRUCT:
4205 struct_decl(&type1, VT_STRUCT);
4206 goto basic_type2;
4207 case TOK_UNION:
4208 struct_decl(&type1, VT_UNION);
4209 goto basic_type2;
4211 /* type modifiers */
4212 case TOK_CONST1:
4213 case TOK_CONST2:
4214 case TOK_CONST3:
4215 type->t = t;
4216 parse_btype_qualify(type, VT_CONSTANT);
4217 t = type->t;
4218 next();
4219 break;
4220 case TOK_VOLATILE1:
4221 case TOK_VOLATILE2:
4222 case TOK_VOLATILE3:
4223 type->t = t;
4224 parse_btype_qualify(type, VT_VOLATILE);
4225 t = type->t;
4226 next();
4227 break;
4228 case TOK_SIGNED1:
4229 case TOK_SIGNED2:
4230 case TOK_SIGNED3:
4231 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4232 tcc_error("signed and unsigned modifier");
4233 t |= VT_DEFSIGN;
4234 next();
4235 typespec_found = 1;
4236 break;
4237 case TOK_REGISTER:
4238 case TOK_AUTO:
4239 case TOK_RESTRICT1:
4240 case TOK_RESTRICT2:
4241 case TOK_RESTRICT3:
4242 next();
4243 break;
4244 case TOK_UNSIGNED:
4245 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4246 tcc_error("signed and unsigned modifier");
4247 t |= VT_DEFSIGN | VT_UNSIGNED;
4248 next();
4249 typespec_found = 1;
4250 break;
4252 /* storage */
4253 case TOK_EXTERN:
4254 g = VT_EXTERN;
4255 goto storage;
4256 case TOK_STATIC:
4257 g = VT_STATIC;
4258 goto storage;
4259 case TOK_TYPEDEF:
4260 g = VT_TYPEDEF;
4261 goto storage;
4262 storage:
4263 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4264 tcc_error("multiple storage classes");
4265 t |= g;
4266 next();
4267 break;
4268 case TOK_INLINE1:
4269 case TOK_INLINE2:
4270 case TOK_INLINE3:
4271 t |= VT_INLINE;
4272 next();
4273 break;
4275 /* GNUC attribute */
4276 case TOK_ATTRIBUTE1:
4277 case TOK_ATTRIBUTE2:
4278 parse_attribute(ad);
4279 if (ad->attr_mode) {
4280 u = ad->attr_mode -1;
4281 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4283 continue;
4284 /* GNUC typeof */
4285 case TOK_TYPEOF1:
4286 case TOK_TYPEOF2:
4287 case TOK_TYPEOF3:
4288 next();
4289 parse_expr_type(&type1);
4290 /* remove all storage modifiers except typedef */
4291 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4292 if (type1.ref)
4293 sym_to_attr(ad, type1.ref);
4294 goto basic_type2;
4295 default:
4296 if (typespec_found)
4297 goto the_end;
4298 s = sym_find(tok);
4299 if (!s || !(s->type.t & VT_TYPEDEF))
4300 goto the_end;
4301 t &= ~(VT_BTYPE|VT_LONG);
4302 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4303 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4304 type->ref = s->type.ref;
4305 if (t)
4306 parse_btype_qualify(type, t);
4307 t = type->t;
4308 /* get attributes from typedef */
4309 sym_to_attr(ad, s);
4310 next();
4311 typespec_found = 1;
4312 st = bt = -2;
4313 break;
4315 type_found = 1;
4317 the_end:
4318 if (tcc_state->char_is_unsigned) {
4319 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4320 t |= VT_UNSIGNED;
4322 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4323 bt = t & (VT_BTYPE|VT_LONG);
4324 if (bt == VT_LONG)
4325 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4326 #ifdef TCC_TARGET_PE
4327 if (bt == VT_LDOUBLE)
4328 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4329 #endif
4330 type->t = t;
4331 return type_found;
4334 /* convert a function parameter type (array to pointer and function to
4335 function pointer) */
4336 static inline void convert_parameter_type(CType *pt)
4338 /* remove const and volatile qualifiers (XXX: const could be used
4339 to indicate a const function parameter */
4340 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4341 /* array must be transformed to pointer according to ANSI C */
4342 pt->t &= ~VT_ARRAY;
4343 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4344 mk_pointer(pt);
4348 ST_FUNC void parse_asm_str(CString *astr)
4350 skip('(');
4351 parse_mult_str(astr, "string constant");
4354 /* Parse an asm label and return the token */
4355 static int asm_label_instr(void)
4357 int v;
4358 CString astr;
4360 next();
4361 parse_asm_str(&astr);
4362 skip(')');
4363 #ifdef ASM_DEBUG
4364 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4365 #endif
4366 v = tok_alloc(astr.data, astr.size - 1)->tok;
4367 cstr_free(&astr);
4368 return v;
4371 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4373 int n, l, t1, arg_size, align;
4374 Sym **plast, *s, *first;
4375 AttributeDef ad1;
4376 CType pt;
4378 if (tok == '(') {
4379 /* function type, or recursive declarator (return if so) */
4380 next();
4381 if (td && !(td & TYPE_ABSTRACT))
4382 return 0;
4383 if (tok == ')')
4384 l = 0;
4385 else if (parse_btype(&pt, &ad1))
4386 l = FUNC_NEW;
4387 else if (td) {
4388 merge_attr (ad, &ad1);
4389 return 0;
4390 } else
4391 l = FUNC_OLD;
4392 first = NULL;
4393 plast = &first;
4394 arg_size = 0;
4395 if (l) {
4396 for(;;) {
4397 /* read param name and compute offset */
4398 if (l != FUNC_OLD) {
4399 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4400 break;
4401 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4402 if ((pt.t & VT_BTYPE) == VT_VOID)
4403 tcc_error("parameter declared as void");
4404 } else {
4405 n = tok;
4406 if (n < TOK_UIDENT)
4407 expect("identifier");
4408 pt.t = VT_VOID; /* invalid type */
4409 next();
4411 convert_parameter_type(&pt);
4412 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4413 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4414 *plast = s;
4415 plast = &s->next;
4416 if (tok == ')')
4417 break;
4418 skip(',');
4419 if (l == FUNC_NEW && tok == TOK_DOTS) {
4420 l = FUNC_ELLIPSIS;
4421 next();
4422 break;
4424 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4425 tcc_error("invalid type");
4427 } else
4428 /* if no parameters, then old type prototype */
4429 l = FUNC_OLD;
4430 skip(')');
4431 /* NOTE: const is ignored in returned type as it has a special
4432 meaning in gcc / C++ */
4433 type->t &= ~VT_CONSTANT;
4434 /* some ancient pre-K&R C allows a function to return an array
4435 and the array brackets to be put after the arguments, such
4436 that "int c()[]" means something like "int[] c()" */
4437 if (tok == '[') {
4438 next();
4439 skip(']'); /* only handle simple "[]" */
4440 mk_pointer(type);
4442 /* we push a anonymous symbol which will contain the function prototype */
4443 ad->f.func_args = arg_size;
4444 ad->f.func_type = l;
4445 s = sym_push(SYM_FIELD, type, 0, 0);
4446 s->a = ad->a;
4447 s->f = ad->f;
4448 s->next = first;
4449 type->t = VT_FUNC;
4450 type->ref = s;
4451 } else if (tok == '[') {
4452 int saved_nocode_wanted = nocode_wanted;
4453 /* array definition */
4454 next();
4455 while (1) {
4456 /* XXX The optional type-quals and static should only be accepted
4457 in parameter decls. The '*' as well, and then even only
4458 in prototypes (not function defs). */
4459 switch (tok) {
4460 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4461 case TOK_CONST1:
4462 case TOK_VOLATILE1:
4463 case TOK_STATIC:
4464 case '*':
4465 next();
4466 continue;
4467 default:
4468 break;
4470 break;
4472 n = -1;
4473 t1 = 0;
4474 if (tok != ']') {
4475 if (!local_stack || (storage & VT_STATIC))
4476 vpushi(expr_const());
4477 else {
4478 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4479 length must always be evaluated, even under nocode_wanted,
4480 so that its size slot is initialized (e.g. under sizeof
4481 or typeof). */
4482 nocode_wanted = 0;
4483 gexpr();
4485 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4486 n = vtop->c.i;
4487 if (n < 0)
4488 tcc_error("invalid array size");
4489 } else {
4490 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4491 tcc_error("size of variable length array should be an integer");
4492 t1 = VT_VLA;
4495 skip(']');
4496 /* parse next post type */
4497 post_type(type, ad, storage, 0);
4498 if (type->t == VT_FUNC)
4499 tcc_error("declaration of an array of functions");
4500 t1 |= type->t & VT_VLA;
4502 if (t1 & VT_VLA) {
4503 loc -= type_size(&int_type, &align);
4504 loc &= -align;
4505 n = loc;
4507 vla_runtime_type_size(type, &align);
4508 gen_op('*');
4509 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4510 vswap();
4511 vstore();
4513 if (n != -1)
4514 vpop();
4515 nocode_wanted = saved_nocode_wanted;
4517 /* we push an anonymous symbol which will contain the array
4518 element type */
4519 s = sym_push(SYM_FIELD, type, 0, n);
4520 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4521 type->ref = s;
4523 return 1;
4526 /* Parse a type declarator (except basic type), and return the type
4527 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4528 expected. 'type' should contain the basic type. 'ad' is the
4529 attribute definition of the basic type. It can be modified by
4530 type_decl(). If this (possibly abstract) declarator is a pointer chain
4531 it returns the innermost pointed to type (equals *type, but is a different
4532 pointer), otherwise returns type itself, that's used for recursive calls. */
4533 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4535 CType *post, *ret;
4536 int qualifiers, storage;
4538 /* recursive type, remove storage bits first, apply them later again */
4539 storage = type->t & VT_STORAGE;
4540 type->t &= ~VT_STORAGE;
4541 post = ret = type;
4543 while (tok == '*') {
4544 qualifiers = 0;
4545 redo:
4546 next();
4547 switch(tok) {
4548 case TOK_CONST1:
4549 case TOK_CONST2:
4550 case TOK_CONST3:
4551 qualifiers |= VT_CONSTANT;
4552 goto redo;
4553 case TOK_VOLATILE1:
4554 case TOK_VOLATILE2:
4555 case TOK_VOLATILE3:
4556 qualifiers |= VT_VOLATILE;
4557 goto redo;
4558 case TOK_RESTRICT1:
4559 case TOK_RESTRICT2:
4560 case TOK_RESTRICT3:
4561 goto redo;
4562 /* XXX: clarify attribute handling */
4563 case TOK_ATTRIBUTE1:
4564 case TOK_ATTRIBUTE2:
4565 parse_attribute(ad);
4566 break;
4568 mk_pointer(type);
4569 type->t |= qualifiers;
4570 if (ret == type)
4571 /* innermost pointed to type is the one for the first derivation */
4572 ret = pointed_type(type);
4575 if (tok == '(') {
4576 /* This is possibly a parameter type list for abstract declarators
4577 ('int ()'), use post_type for testing this. */
4578 if (!post_type(type, ad, 0, td)) {
4579 /* It's not, so it's a nested declarator, and the post operations
4580 apply to the innermost pointed to type (if any). */
4581 /* XXX: this is not correct to modify 'ad' at this point, but
4582 the syntax is not clear */
4583 parse_attribute(ad);
4584 post = type_decl(type, ad, v, td);
4585 skip(')');
4587 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4588 /* type identifier */
4589 *v = tok;
4590 next();
4591 } else {
4592 if (!(td & TYPE_ABSTRACT))
4593 expect("identifier");
4594 *v = 0;
4596 post_type(post, ad, storage, 0);
4597 parse_attribute(ad);
4598 type->t |= storage;
4599 return ret;
4602 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4603 ST_FUNC int lvalue_type(int t)
4605 int bt, r;
4606 r = VT_LVAL;
4607 bt = t & VT_BTYPE;
4608 if (bt == VT_BYTE || bt == VT_BOOL)
4609 r |= VT_LVAL_BYTE;
4610 else if (bt == VT_SHORT)
4611 r |= VT_LVAL_SHORT;
4612 else
4613 return r;
4614 if (t & VT_UNSIGNED)
4615 r |= VT_LVAL_UNSIGNED;
4616 return r;
4619 /* indirection with full error checking and bound check */
4620 ST_FUNC void indir(void)
4622 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4623 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4624 return;
4625 expect("pointer");
4627 if (vtop->r & VT_LVAL)
4628 gv(RC_INT);
4629 vtop->type = *pointed_type(&vtop->type);
4630 /* Arrays and functions are never lvalues */
4631 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4632 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4633 vtop->r |= lvalue_type(vtop->type.t);
4634 /* if bound checking, the referenced pointer must be checked */
4635 #ifdef CONFIG_TCC_BCHECK
4636 if (tcc_state->do_bounds_check)
4637 vtop->r |= VT_MUSTBOUND;
4638 #endif
4642 /* pass a parameter to a function and do type checking and casting */
4643 static void gfunc_param_typed(Sym *func, Sym *arg)
4645 int func_type;
4646 CType type;
4648 func_type = func->f.func_type;
4649 if (func_type == FUNC_OLD ||
4650 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4651 /* default casting : only need to convert float to double */
4652 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4653 gen_cast_s(VT_DOUBLE);
4654 } else if (vtop->type.t & VT_BITFIELD) {
4655 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4656 type.ref = vtop->type.ref;
4657 gen_cast(&type);
4659 } else if (arg == NULL) {
4660 tcc_error("too many arguments to function");
4661 } else {
4662 type = arg->type;
4663 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4664 gen_assign_cast(&type);
4668 /* parse an expression and return its type without any side effect. */
4669 static void expr_type(CType *type, void (*expr_fn)(void))
4671 nocode_wanted++;
4672 expr_fn();
4673 *type = vtop->type;
4674 vpop();
4675 nocode_wanted--;
4678 /* parse an expression of the form '(type)' or '(expr)' and return its
4679 type */
4680 static void parse_expr_type(CType *type)
4682 int n;
4683 AttributeDef ad;
4685 skip('(');
4686 if (parse_btype(type, &ad)) {
4687 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4688 } else {
4689 expr_type(type, gexpr);
4691 skip(')');
4694 static void parse_type(CType *type)
4696 AttributeDef ad;
4697 int n;
4699 if (!parse_btype(type, &ad)) {
4700 expect("type");
4702 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4705 static void parse_builtin_params(int nc, const char *args)
4707 char c, sep = '(';
4708 CType t;
4709 if (nc)
4710 nocode_wanted++;
4711 next();
4712 while ((c = *args++)) {
4713 skip(sep);
4714 sep = ',';
4715 switch (c) {
4716 case 'e': expr_eq(); continue;
4717 case 't': parse_type(&t); vpush(&t); continue;
4718 default: tcc_error("internal error"); break;
4721 skip(')');
4722 if (nc)
4723 nocode_wanted--;
4726 static void try_call_scope_cleanup(Sym *stop)
4728 Sym *cls = current_cleanups;
4730 for (; cls != stop; cls = cls->ncl) {
4731 Sym *fs = cls->next;
4732 Sym *vs = cls->prev_tok;
4734 vpushsym(&fs->type, fs);
4735 vset(&vs->type, vs->r, vs->c);
4736 vtop->sym = vs;
4737 mk_pointer(&vtop->type);
4738 gaddrof();
4739 gfunc_call(1);
4743 static void try_call_cleanup_goto(Sym *cleanupstate)
4745 Sym *oc, *cc;
4746 int ocd, ccd;
4748 if (!current_cleanups)
4749 return;
4751 /* search NCA of both cleanup chains given parents and initial depth */
4752 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4753 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4755 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4757 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4760 try_call_scope_cleanup(cc);
4763 ST_FUNC void unary(void)
4765 int n, t, align, size, r, sizeof_caller;
4766 CType type;
4767 Sym *s;
4768 AttributeDef ad;
4770 sizeof_caller = in_sizeof;
4771 in_sizeof = 0;
4772 type.ref = NULL;
4773 /* XXX: GCC 2.95.3 does not generate a table although it should be
4774 better here */
4775 tok_next:
4776 switch(tok) {
4777 case TOK_EXTENSION:
4778 next();
4779 goto tok_next;
4780 case TOK_LCHAR:
4781 #ifdef TCC_TARGET_PE
4782 t = VT_SHORT|VT_UNSIGNED;
4783 goto push_tokc;
4784 #endif
4785 case TOK_CINT:
4786 case TOK_CCHAR:
4787 t = VT_INT;
4788 push_tokc:
4789 type.t = t;
4790 vsetc(&type, VT_CONST, &tokc);
4791 next();
4792 break;
4793 case TOK_CUINT:
4794 t = VT_INT | VT_UNSIGNED;
4795 goto push_tokc;
4796 case TOK_CLLONG:
4797 t = VT_LLONG;
4798 goto push_tokc;
4799 case TOK_CULLONG:
4800 t = VT_LLONG | VT_UNSIGNED;
4801 goto push_tokc;
4802 case TOK_CFLOAT:
4803 t = VT_FLOAT;
4804 goto push_tokc;
4805 case TOK_CDOUBLE:
4806 t = VT_DOUBLE;
4807 goto push_tokc;
4808 case TOK_CLDOUBLE:
4809 t = VT_LDOUBLE;
4810 goto push_tokc;
4811 case TOK_CLONG:
4812 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4813 goto push_tokc;
4814 case TOK_CULONG:
4815 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4816 goto push_tokc;
4817 case TOK___FUNCTION__:
4818 if (!gnu_ext)
4819 goto tok_identifier;
4820 /* fall thru */
4821 case TOK___FUNC__:
4823 void *ptr;
4824 int len;
4825 /* special function name identifier */
4826 len = strlen(funcname) + 1;
4827 /* generate char[len] type */
4828 type.t = VT_BYTE;
4829 mk_pointer(&type);
4830 type.t |= VT_ARRAY;
4831 type.ref->c = len;
4832 vpush_ref(&type, data_section, data_section->data_offset, len);
4833 if (!NODATA_WANTED) {
4834 ptr = section_ptr_add(data_section, len);
4835 memcpy(ptr, funcname, len);
4837 next();
4839 break;
4840 case TOK_LSTR:
4841 #ifdef TCC_TARGET_PE
4842 t = VT_SHORT | VT_UNSIGNED;
4843 #else
4844 t = VT_INT;
4845 #endif
4846 goto str_init;
4847 case TOK_STR:
4848 /* string parsing */
4849 t = VT_BYTE;
4850 if (tcc_state->char_is_unsigned)
4851 t = VT_BYTE | VT_UNSIGNED;
4852 str_init:
4853 if (tcc_state->warn_write_strings)
4854 t |= VT_CONSTANT;
4855 type.t = t;
4856 mk_pointer(&type);
4857 type.t |= VT_ARRAY;
4858 memset(&ad, 0, sizeof(AttributeDef));
4859 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4860 break;
4861 case '(':
4862 next();
4863 /* cast ? */
4864 if (parse_btype(&type, &ad)) {
4865 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4866 skip(')');
4867 /* check ISOC99 compound literal */
4868 if (tok == '{') {
4869 /* data is allocated locally by default */
4870 if (global_expr)
4871 r = VT_CONST;
4872 else
4873 r = VT_LOCAL;
4874 /* all except arrays are lvalues */
4875 if (!(type.t & VT_ARRAY))
4876 r |= lvalue_type(type.t);
4877 memset(&ad, 0, sizeof(AttributeDef));
4878 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4879 } else {
4880 if (sizeof_caller) {
4881 vpush(&type);
4882 return;
4884 unary();
4885 gen_cast(&type);
4887 } else if (tok == '{') {
4888 int saved_nocode_wanted = nocode_wanted;
4889 if (const_wanted)
4890 tcc_error("expected constant");
4891 /* save all registers */
4892 save_regs(0);
4893 /* statement expression : we do not accept break/continue
4894 inside as GCC does. We do retain the nocode_wanted state,
4895 as statement expressions can't ever be entered from the
4896 outside, so any reactivation of code emission (from labels
4897 or loop heads) can be disabled again after the end of it. */
4898 block(NULL, NULL, 1);
4899 nocode_wanted = saved_nocode_wanted;
4900 skip(')');
4901 } else {
4902 gexpr();
4903 skip(')');
4905 break;
4906 case '*':
4907 next();
4908 unary();
4909 indir();
4910 break;
4911 case '&':
4912 next();
4913 unary();
4914 /* functions names must be treated as function pointers,
4915 except for unary '&' and sizeof. Since we consider that
4916 functions are not lvalues, we only have to handle it
4917 there and in function calls. */
4918 /* arrays can also be used although they are not lvalues */
4919 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4920 !(vtop->type.t & VT_ARRAY))
4921 test_lvalue();
4922 mk_pointer(&vtop->type);
4923 gaddrof();
4924 break;
4925 case '!':
4926 next();
4927 unary();
4928 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4929 gen_cast_s(VT_BOOL);
4930 vtop->c.i = !vtop->c.i;
4931 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4932 vtop->c.i ^= 1;
4933 else {
4934 save_regs(1);
4935 vseti(VT_JMP, gvtst(1, 0));
4937 break;
4938 case '~':
4939 next();
4940 unary();
4941 vpushi(-1);
4942 gen_op('^');
4943 break;
4944 case '+':
4945 next();
4946 unary();
4947 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4948 tcc_error("pointer not accepted for unary plus");
4949 /* In order to force cast, we add zero, except for floating point
4950 where we really need an noop (otherwise -0.0 will be transformed
4951 into +0.0). */
4952 if (!is_float(vtop->type.t)) {
4953 vpushi(0);
4954 gen_op('+');
4956 break;
4957 case TOK_SIZEOF:
4958 case TOK_ALIGNOF1:
4959 case TOK_ALIGNOF2:
4960 case TOK_ALIGNOF3:
4961 t = tok;
4962 next();
4963 in_sizeof++;
4964 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4965 s = vtop[1].sym; /* hack: accessing previous vtop */
4966 size = type_size(&type, &align);
4967 if (s && s->a.aligned)
4968 align = 1 << (s->a.aligned - 1);
4969 if (t == TOK_SIZEOF) {
4970 if (!(type.t & VT_VLA)) {
4971 if (size < 0)
4972 tcc_error("sizeof applied to an incomplete type");
4973 vpushs(size);
4974 } else {
4975 vla_runtime_type_size(&type, &align);
4977 } else {
4978 vpushs(align);
4980 vtop->type.t |= VT_UNSIGNED;
4981 break;
4983 case TOK_builtin_expect:
4984 /* __builtin_expect is a no-op for now */
4985 parse_builtin_params(0, "ee");
4986 vpop();
4987 break;
4988 case TOK_builtin_types_compatible_p:
4989 parse_builtin_params(0, "tt");
4990 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4991 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4992 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4993 vtop -= 2;
4994 vpushi(n);
4995 break;
4996 case TOK_builtin_choose_expr:
4998 int64_t c;
4999 next();
5000 skip('(');
5001 c = expr_const64();
5002 skip(',');
5003 if (!c) {
5004 nocode_wanted++;
5006 expr_eq();
5007 if (!c) {
5008 vpop();
5009 nocode_wanted--;
5011 skip(',');
5012 if (c) {
5013 nocode_wanted++;
5015 expr_eq();
5016 if (c) {
5017 vpop();
5018 nocode_wanted--;
5020 skip(')');
5022 break;
5023 case TOK_builtin_constant_p:
5024 parse_builtin_params(1, "e");
5025 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5026 vtop--;
5027 vpushi(n);
5028 break;
5029 case TOK_builtin_frame_address:
5030 case TOK_builtin_return_address:
5032 int tok1 = tok;
5033 int level;
5034 next();
5035 skip('(');
5036 if (tok != TOK_CINT) {
5037 tcc_error("%s only takes positive integers",
5038 tok1 == TOK_builtin_return_address ?
5039 "__builtin_return_address" :
5040 "__builtin_frame_address");
5042 level = (uint32_t)tokc.i;
5043 next();
5044 skip(')');
5045 type.t = VT_VOID;
5046 mk_pointer(&type);
5047 vset(&type, VT_LOCAL, 0); /* local frame */
5048 while (level--) {
5049 mk_pointer(&vtop->type);
5050 indir(); /* -> parent frame */
5052 if (tok1 == TOK_builtin_return_address) {
5053 // assume return address is just above frame pointer on stack
5054 vpushi(PTR_SIZE);
5055 gen_op('+');
5056 mk_pointer(&vtop->type);
5057 indir();
5060 break;
5061 #ifdef TCC_TARGET_X86_64
5062 #ifdef TCC_TARGET_PE
5063 case TOK_builtin_va_start:
5064 parse_builtin_params(0, "ee");
5065 r = vtop->r & VT_VALMASK;
5066 if (r == VT_LLOCAL)
5067 r = VT_LOCAL;
5068 if (r != VT_LOCAL)
5069 tcc_error("__builtin_va_start expects a local variable");
5070 vtop->r = r;
5071 vtop->type = char_pointer_type;
5072 vtop->c.i += 8;
5073 vstore();
5074 break;
5075 #else
5076 case TOK_builtin_va_arg_types:
5077 parse_builtin_params(0, "t");
5078 vpushi(classify_x86_64_va_arg(&vtop->type));
5079 vswap();
5080 vpop();
5081 break;
5082 #endif
5083 #endif
5085 #ifdef TCC_TARGET_ARM64
5086 case TOK___va_start: {
5087 parse_builtin_params(0, "ee");
5088 //xx check types
5089 gen_va_start();
5090 vpushi(0);
5091 vtop->type.t = VT_VOID;
5092 break;
5094 case TOK___va_arg: {
5095 parse_builtin_params(0, "et");
5096 type = vtop->type;
5097 vpop();
5098 //xx check types
5099 gen_va_arg(&type);
5100 vtop->type = type;
5101 break;
5103 case TOK___arm64_clear_cache: {
5104 parse_builtin_params(0, "ee");
5105 gen_clear_cache();
5106 vpushi(0);
5107 vtop->type.t = VT_VOID;
5108 break;
5110 #endif
5111 /* pre operations */
5112 case TOK_INC:
5113 case TOK_DEC:
5114 t = tok;
5115 next();
5116 unary();
5117 inc(0, t);
5118 break;
5119 case '-':
5120 next();
5121 unary();
5122 t = vtop->type.t & VT_BTYPE;
5123 if (is_float(t)) {
5124 /* In IEEE negate(x) isn't subtract(0,x), but rather
5125 subtract(-0, x). */
5126 vpush(&vtop->type);
5127 if (t == VT_FLOAT)
5128 vtop->c.f = -1.0 * 0.0;
5129 else if (t == VT_DOUBLE)
5130 vtop->c.d = -1.0 * 0.0;
5131 else
5132 vtop->c.ld = -1.0 * 0.0;
5133 } else
5134 vpushi(0);
5135 vswap();
5136 gen_op('-');
5137 break;
5138 case TOK_LAND:
5139 if (!gnu_ext)
5140 goto tok_identifier;
5141 next();
5142 /* allow to take the address of a label */
5143 if (tok < TOK_UIDENT)
5144 expect("label identifier");
5145 s = label_find(tok);
5146 if (!s) {
5147 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5148 } else {
5149 if (s->r == LABEL_DECLARED)
5150 s->r = LABEL_FORWARD;
5152 if (!s->type.t) {
5153 s->type.t = VT_VOID;
5154 mk_pointer(&s->type);
5155 s->type.t |= VT_STATIC;
5157 vpushsym(&s->type, s);
5158 next();
5159 break;
5161 case TOK_GENERIC:
5163 CType controlling_type;
5164 int has_default = 0;
5165 int has_match = 0;
5166 int learn = 0;
5167 TokenString *str = NULL;
5168 int saved_const_wanted = const_wanted;
5170 next();
5171 skip('(');
5172 const_wanted = 0;
5173 expr_type(&controlling_type, expr_eq);
5174 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5175 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5176 mk_pointer(&controlling_type);
5177 const_wanted = saved_const_wanted;
5178 for (;;) {
5179 learn = 0;
5180 skip(',');
5181 if (tok == TOK_DEFAULT) {
5182 if (has_default)
5183 tcc_error("too many 'default'");
5184 has_default = 1;
5185 if (!has_match)
5186 learn = 1;
5187 next();
5188 } else {
5189 AttributeDef ad_tmp;
5190 int itmp;
5191 CType cur_type;
5192 parse_btype(&cur_type, &ad_tmp);
5193 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5194 if (compare_types(&controlling_type, &cur_type, 0)) {
5195 if (has_match) {
5196 tcc_error("type match twice");
5198 has_match = 1;
5199 learn = 1;
5202 skip(':');
5203 if (learn) {
5204 if (str)
5205 tok_str_free(str);
5206 skip_or_save_block(&str);
5207 } else {
5208 skip_or_save_block(NULL);
5210 if (tok == ')')
5211 break;
5213 if (!str) {
5214 char buf[60];
5215 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5216 tcc_error("type '%s' does not match any association", buf);
5218 begin_macro(str, 1);
5219 next();
5220 expr_eq();
5221 if (tok != TOK_EOF)
5222 expect(",");
5223 end_macro();
5224 next();
5225 break;
5227 // special qnan , snan and infinity values
5228 case TOK___NAN__:
5229 n = 0x7fc00000;
5230 special_math_val:
5231 vpushi(n);
5232 vtop->type.t = VT_FLOAT;
5233 next();
5234 break;
5235 case TOK___SNAN__:
5236 n = 0x7f800001;
5237 goto special_math_val;
5238 case TOK___INF__:
5239 n = 0x7f800000;
5240 goto special_math_val;
5242 default:
5243 tok_identifier:
5244 t = tok;
5245 next();
5246 if (t < TOK_UIDENT)
5247 expect("identifier");
5248 s = sym_find(t);
5249 if (!s || IS_ASM_SYM(s)) {
5250 const char *name = get_tok_str(t, NULL);
5251 if (tok != '(')
5252 tcc_error("'%s' undeclared", name);
5253 /* for simple function calls, we tolerate undeclared
5254 external reference to int() function */
5255 if (tcc_state->warn_implicit_function_declaration
5256 #ifdef TCC_TARGET_PE
5257 /* people must be warned about using undeclared WINAPI functions
5258 (which usually start with uppercase letter) */
5259 || (name[0] >= 'A' && name[0] <= 'Z')
5260 #endif
5262 tcc_warning("implicit declaration of function '%s'", name);
5263 s = external_global_sym(t, &func_old_type, 0);
5266 r = s->r;
5267 /* A symbol that has a register is a local register variable,
5268 which starts out as VT_LOCAL value. */
5269 if ((r & VT_VALMASK) < VT_CONST)
5270 r = (r & ~VT_VALMASK) | VT_LOCAL;
5272 vset(&s->type, r, s->c);
5273 /* Point to s as backpointer (even without r&VT_SYM).
5274 Will be used by at least the x86 inline asm parser for
5275 regvars. */
5276 vtop->sym = s;
5278 if (r & VT_SYM) {
5279 vtop->c.i = 0;
5280 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5281 vtop->c.i = s->enum_val;
5283 break;
5286 /* post operations */
5287 while (1) {
5288 if (tok == TOK_INC || tok == TOK_DEC) {
5289 inc(1, tok);
5290 next();
5291 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5292 int qualifiers;
5293 /* field */
5294 if (tok == TOK_ARROW)
5295 indir();
5296 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5297 test_lvalue();
5298 gaddrof();
5299 /* expect pointer on structure */
5300 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5301 expect("struct or union");
5302 if (tok == TOK_CDOUBLE)
5303 expect("field name");
5304 next();
5305 if (tok == TOK_CINT || tok == TOK_CUINT)
5306 expect("field name");
5307 s = find_field(&vtop->type, tok);
5308 if (!s)
5309 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5310 /* add field offset to pointer */
5311 vtop->type = char_pointer_type; /* change type to 'char *' */
5312 vpushi(s->c);
5313 gen_op('+');
5314 /* change type to field type, and set to lvalue */
5315 vtop->type = s->type;
5316 vtop->type.t |= qualifiers;
5317 /* an array is never an lvalue */
5318 if (!(vtop->type.t & VT_ARRAY)) {
5319 vtop->r |= lvalue_type(vtop->type.t);
5320 #ifdef CONFIG_TCC_BCHECK
5321 /* if bound checking, the referenced pointer must be checked */
5322 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5323 vtop->r |= VT_MUSTBOUND;
5324 #endif
5326 next();
5327 } else if (tok == '[') {
5328 next();
5329 gexpr();
5330 gen_op('+');
5331 indir();
5332 skip(']');
5333 } else if (tok == '(') {
5334 SValue ret;
5335 Sym *sa;
5336 int nb_args, ret_nregs, ret_align, regsize, variadic;
5338 /* function call */
5339 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5340 /* pointer test (no array accepted) */
5341 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5342 vtop->type = *pointed_type(&vtop->type);
5343 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5344 goto error_func;
5345 } else {
5346 error_func:
5347 expect("function pointer");
5349 } else {
5350 vtop->r &= ~VT_LVAL; /* no lvalue */
5352 /* get return type */
5353 s = vtop->type.ref;
5354 next();
5355 sa = s->next; /* first parameter */
5356 nb_args = regsize = 0;
5357 ret.r2 = VT_CONST;
5358 /* compute first implicit argument if a structure is returned */
5359 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5360 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5361 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5362 &ret_align, &regsize);
5363 if (!ret_nregs) {
5364 /* get some space for the returned structure */
5365 size = type_size(&s->type, &align);
5366 #ifdef TCC_TARGET_ARM64
5367 /* On arm64, a small struct is return in registers.
5368 It is much easier to write it to memory if we know
5369 that we are allowed to write some extra bytes, so
5370 round the allocated space up to a power of 2: */
5371 if (size < 16)
5372 while (size & (size - 1))
5373 size = (size | (size - 1)) + 1;
5374 #endif
5375 loc = (loc - size) & -align;
5376 ret.type = s->type;
5377 ret.r = VT_LOCAL | VT_LVAL;
5378 /* pass it as 'int' to avoid structure arg passing
5379 problems */
5380 vseti(VT_LOCAL, loc);
5381 ret.c = vtop->c;
5382 nb_args++;
5384 } else {
5385 ret_nregs = 1;
5386 ret.type = s->type;
5389 if (ret_nregs) {
5390 /* return in register */
5391 if (is_float(ret.type.t)) {
5392 ret.r = reg_fret(ret.type.t);
5393 #ifdef TCC_TARGET_X86_64
5394 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5395 ret.r2 = REG_QRET;
5396 #endif
5397 } else {
5398 #ifndef TCC_TARGET_ARM64
5399 #ifdef TCC_TARGET_X86_64
5400 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5401 #else
5402 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5403 #endif
5404 ret.r2 = REG_LRET;
5405 #endif
5406 ret.r = REG_IRET;
5408 ret.c.i = 0;
5410 if (tok != ')') {
5411 for(;;) {
5412 expr_eq();
5413 gfunc_param_typed(s, sa);
5414 nb_args++;
5415 if (sa)
5416 sa = sa->next;
5417 if (tok == ')')
5418 break;
5419 skip(',');
5422 if (sa)
5423 tcc_error("too few arguments to function");
5424 skip(')');
5425 gfunc_call(nb_args);
5427 /* return value */
5428 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5429 vsetc(&ret.type, r, &ret.c);
5430 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5433 /* handle packed struct return */
5434 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5435 int addr, offset;
5437 size = type_size(&s->type, &align);
5438 /* We're writing whole regs often, make sure there's enough
5439 space. Assume register size is power of 2. */
5440 if (regsize > align)
5441 align = regsize;
5442 loc = (loc - size) & -align;
5443 addr = loc;
5444 offset = 0;
5445 for (;;) {
5446 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5447 vswap();
5448 vstore();
5449 vtop--;
5450 if (--ret_nregs == 0)
5451 break;
5452 offset += regsize;
5454 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5456 } else {
5457 break;
5462 ST_FUNC void expr_prod(void)
5464 int t;
5466 unary();
5467 while (tok == '*' || tok == '/' || tok == '%') {
5468 t = tok;
5469 next();
5470 unary();
5471 gen_op(t);
5475 ST_FUNC void expr_sum(void)
5477 int t;
5479 expr_prod();
5480 while (tok == '+' || tok == '-') {
5481 t = tok;
5482 next();
5483 expr_prod();
5484 gen_op(t);
5488 static void expr_shift(void)
5490 int t;
5492 expr_sum();
5493 while (tok == TOK_SHL || tok == TOK_SAR) {
5494 t = tok;
5495 next();
5496 expr_sum();
5497 gen_op(t);
5501 static void expr_cmp(void)
5503 int t;
5505 expr_shift();
5506 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5507 tok == TOK_ULT || tok == TOK_UGE) {
5508 t = tok;
5509 next();
5510 expr_shift();
5511 gen_op(t);
5515 static void expr_cmpeq(void)
5517 int t;
5519 expr_cmp();
5520 while (tok == TOK_EQ || tok == TOK_NE) {
5521 t = tok;
5522 next();
5523 expr_cmp();
5524 gen_op(t);
5528 static void expr_and(void)
5530 expr_cmpeq();
5531 while (tok == '&') {
5532 next();
5533 expr_cmpeq();
5534 gen_op('&');
5538 static void expr_xor(void)
5540 expr_and();
5541 while (tok == '^') {
5542 next();
5543 expr_and();
5544 gen_op('^');
5548 static void expr_or(void)
5550 expr_xor();
5551 while (tok == '|') {
5552 next();
5553 expr_xor();
5554 gen_op('|');
5558 static void expr_land(void)
5560 expr_or();
5561 if (tok == TOK_LAND) {
5562 int t = 0;
5563 for(;;) {
5564 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5565 gen_cast_s(VT_BOOL);
5566 if (vtop->c.i) {
5567 vpop();
5568 } else {
5569 nocode_wanted++;
5570 while (tok == TOK_LAND) {
5571 next();
5572 expr_or();
5573 vpop();
5575 nocode_wanted--;
5576 if (t)
5577 gsym(t);
5578 gen_cast_s(VT_INT);
5579 break;
5581 } else {
5582 if (!t)
5583 save_regs(1);
5584 t = gvtst(1, t);
5586 if (tok != TOK_LAND) {
5587 if (t)
5588 vseti(VT_JMPI, t);
5589 else
5590 vpushi(1);
5591 break;
5593 next();
5594 expr_or();
5599 static void expr_lor(void)
5601 expr_land();
5602 if (tok == TOK_LOR) {
5603 int t = 0;
5604 for(;;) {
5605 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5606 gen_cast_s(VT_BOOL);
5607 if (!vtop->c.i) {
5608 vpop();
5609 } else {
5610 nocode_wanted++;
5611 while (tok == TOK_LOR) {
5612 next();
5613 expr_land();
5614 vpop();
5616 nocode_wanted--;
5617 if (t)
5618 gsym(t);
5619 gen_cast_s(VT_INT);
5620 break;
5622 } else {
5623 if (!t)
5624 save_regs(1);
5625 t = gvtst(0, t);
5627 if (tok != TOK_LOR) {
5628 if (t)
5629 vseti(VT_JMP, t);
5630 else
5631 vpushi(0);
5632 break;
5634 next();
5635 expr_land();
5640 /* Assuming vtop is a value used in a conditional context
5641 (i.e. compared with zero) return 0 if it's false, 1 if
5642 true and -1 if it can't be statically determined. */
5643 static int condition_3way(void)
5645 int c = -1;
5646 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5647 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5648 vdup();
5649 gen_cast_s(VT_BOOL);
5650 c = vtop->c.i;
5651 vpop();
5653 return c;
5656 static void expr_cond(void)
5658 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5659 SValue sv;
5660 CType type, type1, type2;
5662 expr_lor();
5663 if (tok == '?') {
5664 next();
5665 c = condition_3way();
5666 g = (tok == ':' && gnu_ext);
5667 if (c < 0) {
5668 /* needed to avoid having different registers saved in
5669 each branch */
5670 if (is_float(vtop->type.t)) {
5671 rc = RC_FLOAT;
5672 #ifdef TCC_TARGET_X86_64
5673 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5674 rc = RC_ST0;
5676 #endif
5677 } else
5678 rc = RC_INT;
5679 gv(rc);
5680 save_regs(1);
5681 if (g)
5682 gv_dup();
5683 tt = gvtst(1, 0);
5685 } else {
5686 if (!g)
5687 vpop();
5688 tt = 0;
5691 if (1) {
5692 if (c == 0)
5693 nocode_wanted++;
5694 if (!g)
5695 gexpr();
5697 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5698 mk_pointer(&vtop->type);
5699 type1 = vtop->type;
5700 sv = *vtop; /* save value to handle it later */
5701 vtop--; /* no vpop so that FP stack is not flushed */
5702 skip(':');
5704 u = 0;
5705 if (c < 0)
5706 u = gjmp(0);
5707 gsym(tt);
5709 if (c == 0)
5710 nocode_wanted--;
5711 if (c == 1)
5712 nocode_wanted++;
5713 expr_cond();
5714 if (c == 1)
5715 nocode_wanted--;
5717 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5718 mk_pointer(&vtop->type);
5719 type2=vtop->type;
5720 t1 = type1.t;
5721 bt1 = t1 & VT_BTYPE;
5722 t2 = type2.t;
5723 bt2 = t2 & VT_BTYPE;
5724 type.ref = NULL;
5727 /* cast operands to correct type according to ISOC rules */
5728 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5729 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5730 } else if (is_float(bt1) || is_float(bt2)) {
5731 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5732 type.t = VT_LDOUBLE;
5734 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5735 type.t = VT_DOUBLE;
5736 } else {
5737 type.t = VT_FLOAT;
5739 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5740 /* cast to biggest op */
5741 type.t = VT_LLONG | VT_LONG;
5742 if (bt1 == VT_LLONG)
5743 type.t &= t1;
5744 if (bt2 == VT_LLONG)
5745 type.t &= t2;
5746 /* convert to unsigned if it does not fit in a long long */
5747 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5748 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5749 type.t |= VT_UNSIGNED;
5750 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5751 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5752 /* If one is a null ptr constant the result type
5753 is the other. */
5754 if (is_null_pointer (vtop)) type = type1;
5755 else if (is_null_pointer (&sv)) type = type2;
5756 else if (bt1 != bt2)
5757 tcc_error("incompatible types in conditional expressions");
5758 else {
5759 CType *pt1 = pointed_type(&type1);
5760 CType *pt2 = pointed_type(&type2);
5761 int pbt1 = pt1->t & VT_BTYPE;
5762 int pbt2 = pt2->t & VT_BTYPE;
5763 int newquals, copied = 0;
5764 /* pointers to void get preferred, otherwise the
5765 pointed to types minus qualifs should be compatible */
5766 type = (pbt1 == VT_VOID) ? type1 : type2;
5767 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5768 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5769 tcc_warning("pointer type mismatch in conditional expression\n");
5771 /* combine qualifs */
5772 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5773 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5774 & newquals)
5776 /* copy the pointer target symbol */
5777 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5778 0, type.ref->c);
5779 copied = 1;
5780 pointed_type(&type)->t |= newquals;
5782 /* pointers to incomplete arrays get converted to
5783 pointers to completed ones if possible */
5784 if (pt1->t & VT_ARRAY
5785 && pt2->t & VT_ARRAY
5786 && pointed_type(&type)->ref->c < 0
5787 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5789 if (!copied)
5790 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5791 0, type.ref->c);
5792 pointed_type(&type)->ref =
5793 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5794 0, pointed_type(&type)->ref->c);
5795 pointed_type(&type)->ref->c =
5796 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5799 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5800 /* XXX: test structure compatibility */
5801 type = bt1 == VT_STRUCT ? type1 : type2;
5802 } else {
5803 /* integer operations */
5804 type.t = VT_INT | (VT_LONG & (t1 | t2));
5805 /* convert to unsigned if it does not fit in an integer */
5806 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5807 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5808 type.t |= VT_UNSIGNED;
5810 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5811 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5812 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5814 /* now we convert second operand */
5815 if (c != 1) {
5816 gen_cast(&type);
5817 if (islv) {
5818 mk_pointer(&vtop->type);
5819 gaddrof();
5820 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5821 gaddrof();
5824 rc = RC_INT;
5825 if (is_float(type.t)) {
5826 rc = RC_FLOAT;
5827 #ifdef TCC_TARGET_X86_64
5828 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5829 rc = RC_ST0;
5831 #endif
5832 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5833 /* for long longs, we use fixed registers to avoid having
5834 to handle a complicated move */
5835 rc = RC_IRET;
5838 tt = r2 = 0;
5839 if (c < 0) {
5840 r2 = gv(rc);
5841 tt = gjmp(0);
5843 gsym(u);
5845 /* this is horrible, but we must also convert first
5846 operand */
5847 if (c != 0) {
5848 *vtop = sv;
5849 gen_cast(&type);
5850 if (islv) {
5851 mk_pointer(&vtop->type);
5852 gaddrof();
5853 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5854 gaddrof();
5857 if (c < 0 || islv) {
5858 r1 = gv(rc);
5859 move_reg(r2, r1, type.t);
5860 vtop->r = r2;
5861 gsym(tt);
5862 if (islv)
5863 indir();
5869 static void expr_eq(void)
5871 int t;
5873 expr_cond();
5874 if (tok == '=' ||
5875 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5876 tok == TOK_A_XOR || tok == TOK_A_OR ||
5877 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5878 test_lvalue();
5879 t = tok;
5880 next();
5881 if (t == '=') {
5882 expr_eq();
5883 } else {
5884 vdup();
5885 expr_eq();
5886 gen_op(t & 0x7f);
5888 vstore();
5892 ST_FUNC void gexpr(void)
5894 while (1) {
5895 expr_eq();
5896 if (tok != ',')
5897 break;
5898 vpop();
5899 next();
5903 /* parse a constant expression and return value in vtop. */
5904 static void expr_const1(void)
5906 const_wanted++;
5907 nocode_wanted++;
5908 expr_cond();
5909 nocode_wanted--;
5910 const_wanted--;
5913 /* parse an integer constant and return its value. */
5914 static inline int64_t expr_const64(void)
5916 int64_t c;
5917 expr_const1();
5918 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5919 expect("constant expression");
5920 c = vtop->c.i;
5921 vpop();
5922 return c;
5925 /* parse an integer constant and return its value.
5926 Complain if it doesn't fit 32bit (signed or unsigned). */
5927 ST_FUNC int expr_const(void)
5929 int c;
5930 int64_t wc = expr_const64();
5931 c = wc;
5932 if (c != wc && (unsigned)c != wc)
5933 tcc_error("constant exceeds 32 bit");
5934 return c;
5937 /* return the label token if current token is a label, otherwise
5938 return zero */
5939 static int is_label(void)
5941 int last_tok;
5943 /* fast test first */
5944 if (tok < TOK_UIDENT)
5945 return 0;
5946 /* no need to save tokc because tok is an identifier */
5947 last_tok = tok;
5948 next();
5949 if (tok == ':') {
5950 return last_tok;
5951 } else {
5952 unget_tok(last_tok);
5953 return 0;
5957 #ifndef TCC_TARGET_ARM64
5958 static void gfunc_return(CType *func_type)
5960 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5961 CType type, ret_type;
5962 int ret_align, ret_nregs, regsize;
5963 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5964 &ret_align, &regsize);
5965 if (0 == ret_nregs) {
5966 /* if returning structure, must copy it to implicit
5967 first pointer arg location */
5968 type = *func_type;
5969 mk_pointer(&type);
5970 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5971 indir();
5972 vswap();
5973 /* copy structure value to pointer */
5974 vstore();
5975 } else {
5976 /* returning structure packed into registers */
5977 int r, size, addr, align;
5978 size = type_size(func_type,&align);
5979 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5980 (vtop->c.i & (ret_align-1)))
5981 && (align & (ret_align-1))) {
5982 loc = (loc - size) & -ret_align;
5983 addr = loc;
5984 type = *func_type;
5985 vset(&type, VT_LOCAL | VT_LVAL, addr);
5986 vswap();
5987 vstore();
5988 vpop();
5989 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5991 vtop->type = ret_type;
5992 if (is_float(ret_type.t))
5993 r = rc_fret(ret_type.t);
5994 else
5995 r = RC_IRET;
5997 if (ret_nregs == 1)
5998 gv(r);
5999 else {
6000 for (;;) {
6001 vdup();
6002 gv(r);
6003 vpop();
6004 if (--ret_nregs == 0)
6005 break;
6006 /* We assume that when a structure is returned in multiple
6007 registers, their classes are consecutive values of the
6008 suite s(n) = 2^n */
6009 r <<= 1;
6010 vtop->c.i += regsize;
6014 } else if (is_float(func_type->t)) {
6015 gv(rc_fret(func_type->t));
6016 } else {
6017 gv(RC_IRET);
6019 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6021 #endif
6023 static int case_cmp(const void *pa, const void *pb)
6025 int64_t a = (*(struct case_t**) pa)->v1;
6026 int64_t b = (*(struct case_t**) pb)->v1;
6027 return a < b ? -1 : a > b;
6030 static void gcase(struct case_t **base, int len, int *bsym)
6032 struct case_t *p;
6033 int e;
6034 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6035 gv(RC_INT);
6036 while (len > 4) {
6037 /* binary search */
6038 p = base[len/2];
6039 vdup();
6040 if (ll)
6041 vpushll(p->v2);
6042 else
6043 vpushi(p->v2);
6044 gen_op(TOK_LE);
6045 e = gtst(1, 0);
6046 vdup();
6047 if (ll)
6048 vpushll(p->v1);
6049 else
6050 vpushi(p->v1);
6051 gen_op(TOK_GE);
6052 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6053 /* x < v1 */
6054 gcase(base, len/2, bsym);
6055 if (cur_switch->def_sym)
6056 gjmp_addr(cur_switch->def_sym);
6057 else
6058 *bsym = gjmp(*bsym);
6059 /* x > v2 */
6060 gsym(e);
6061 e = len/2 + 1;
6062 base += e; len -= e;
6064 /* linear scan */
6065 while (len--) {
6066 p = *base++;
6067 vdup();
6068 if (ll)
6069 vpushll(p->v2);
6070 else
6071 vpushi(p->v2);
6072 if (p->v1 == p->v2) {
6073 gen_op(TOK_EQ);
6074 gtst_addr(0, p->sym);
6075 } else {
6076 gen_op(TOK_LE);
6077 e = gtst(1, 0);
6078 vdup();
6079 if (ll)
6080 vpushll(p->v1);
6081 else
6082 vpushi(p->v1);
6083 gen_op(TOK_GE);
6084 gtst_addr(0, p->sym);
6085 gsym(e);
6090 static void block(int *bsym, int *csym, int is_expr)
6092 int a, b, c, d, cond;
6093 Sym *s;
6095 /* generate line number info */
6096 if (tcc_state->do_debug)
6097 tcc_debug_line(tcc_state);
6099 if (is_expr) {
6100 /* default return value is (void) */
6101 vpushi(0);
6102 vtop->type.t = VT_VOID;
6105 if (tok == TOK_IF) {
6106 /* if test */
6107 int saved_nocode_wanted = nocode_wanted;
6108 next();
6109 skip('(');
6110 gexpr();
6111 skip(')');
6112 cond = condition_3way();
6113 if (cond == 1)
6114 a = 0, vpop();
6115 else
6116 a = gvtst(1, 0);
6117 if (cond == 0)
6118 nocode_wanted |= 0x20000000;
6119 block(bsym, csym, 0);
6120 if (cond != 1)
6121 nocode_wanted = saved_nocode_wanted;
6122 if (tok == TOK_ELSE) {
6123 next();
6124 d = gjmp(0);
6125 gsym(a);
6126 if (cond == 1)
6127 nocode_wanted |= 0x20000000;
6128 block(bsym, csym, 0);
6129 gsym(d); /* patch else jmp */
6130 if (cond != 0)
6131 nocode_wanted = saved_nocode_wanted;
6132 } else
6133 gsym(a);
6134 } else if (tok == TOK_WHILE) {
6135 int saved_nocode_wanted;
6136 nocode_wanted &= ~0x20000000;
6137 next();
6138 d = ind;
6139 vla_sp_restore();
6140 skip('(');
6141 gexpr();
6142 skip(')');
6143 a = gvtst(1, 0);
6144 b = 0;
6145 ++local_scope;
6146 saved_nocode_wanted = nocode_wanted;
6147 block(&a, &b, 0);
6148 nocode_wanted = saved_nocode_wanted;
6149 --local_scope;
6150 gjmp_addr(d);
6151 gsym(a);
6152 gsym_addr(b, d);
6153 } else if (tok == '{') {
6154 Sym *llabel, *lcleanup;
6155 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6156 int lncleanups = ncleanups;
6158 next();
6159 /* record local declaration stack position */
6160 s = local_stack;
6161 llabel = local_label_stack;
6162 lcleanup = current_cleanups;
6163 ++local_scope;
6165 /* handle local labels declarations */
6166 while (tok == TOK_LABEL) {
6167 next();
6168 for(;;) {
6169 if (tok < TOK_UIDENT)
6170 expect("label identifier");
6171 label_push(&local_label_stack, tok, LABEL_DECLARED);
6172 next();
6173 if (tok == ',') {
6174 next();
6175 } else {
6176 skip(';');
6177 break;
6181 while (tok != '}') {
6182 if ((a = is_label()))
6183 unget_tok(a);
6184 else
6185 decl(VT_LOCAL);
6186 if (tok != '}') {
6187 if (is_expr)
6188 vpop();
6189 block(bsym, csym, is_expr);
6193 if (current_cleanups != lcleanup) {
6194 int jmp = 0;
6195 Sym *g, **pg;
6197 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6198 if (g->prev_tok->r & LABEL_FORWARD) {
6199 Sym *pcl = g->next;
6200 if (!jmp)
6201 jmp = gjmp(0);
6202 gsym(pcl->jnext);
6203 try_call_scope_cleanup(lcleanup);
6204 pcl->jnext = gjmp(0);
6205 if (!lncleanups)
6206 goto remove_pending;
6207 g->c = lncleanups;
6208 pg = &g->prev;
6209 } else {
6210 remove_pending:
6211 *pg = g->prev;
6212 sym_free(g);
6214 gsym(jmp);
6215 if (!nocode_wanted) {
6216 try_call_scope_cleanup(lcleanup);
6220 current_cleanups = lcleanup;
6221 ncleanups = lncleanups;
6222 /* pop locally defined labels */
6223 label_pop(&local_label_stack, llabel, is_expr);
6224 /* pop locally defined symbols */
6225 --local_scope;
6226 /* In the is_expr case (a statement expression is finished here),
6227 vtop might refer to symbols on the local_stack. Either via the
6228 type or via vtop->sym. We can't pop those nor any that in turn
6229 might be referred to. To make it easier we don't roll back
6230 any symbols in that case; some upper level call to block() will
6231 do that. We do have to remove such symbols from the lookup
6232 tables, though. sym_pop will do that. */
6233 sym_pop(&local_stack, s, is_expr);
6235 /* Pop VLA frames and restore stack pointer if required */
6236 if (vlas_in_scope > saved_vlas_in_scope) {
6237 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6238 vla_sp_restore();
6240 vlas_in_scope = saved_vlas_in_scope;
6242 next();
6243 } else if (tok == TOK_RETURN) {
6244 next();
6245 if (tok != ';') {
6246 gexpr();
6247 gen_assign_cast(&func_vt);
6248 try_call_scope_cleanup(NULL);
6249 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6250 vtop--;
6251 else
6252 gfunc_return(&func_vt);
6253 } else {
6254 try_call_scope_cleanup(NULL);
6256 skip(';');
6257 /* jump unless last stmt in top-level block */
6258 if (tok != '}' || local_scope != 1)
6259 rsym = gjmp(rsym);
6260 nocode_wanted |= 0x20000000;
6261 } else if (tok == TOK_BREAK) {
6262 /* compute jump */
6263 if (!bsym)
6264 tcc_error("cannot break");
6265 *bsym = gjmp(*bsym);
6266 next();
6267 skip(';');
6268 nocode_wanted |= 0x20000000;
6269 } else if (tok == TOK_CONTINUE) {
6270 /* compute jump */
6271 if (!csym)
6272 tcc_error("cannot continue");
6273 vla_sp_restore_root();
6274 *csym = gjmp(*csym);
6275 next();
6276 skip(';');
6277 nocode_wanted |= 0x20000000;
6278 } else if (tok == TOK_FOR) {
6279 int e;
6280 int saved_nocode_wanted;
6281 nocode_wanted &= ~0x20000000;
6282 next();
6283 skip('(');
6284 s = local_stack;
6285 ++local_scope;
6286 if (tok != ';') {
6287 /* c99 for-loop init decl? */
6288 if (!decl0(VT_LOCAL, 1, NULL)) {
6289 /* no, regular for-loop init expr */
6290 gexpr();
6291 vpop();
6294 skip(';');
6295 d = ind;
6296 c = ind;
6297 vla_sp_restore();
6298 a = 0;
6299 b = 0;
6300 if (tok != ';') {
6301 gexpr();
6302 a = gvtst(1, 0);
6304 skip(';');
6305 if (tok != ')') {
6306 e = gjmp(0);
6307 c = ind;
6308 vla_sp_restore();
6309 gexpr();
6310 vpop();
6311 gjmp_addr(d);
6312 gsym(e);
6314 skip(')');
6315 saved_nocode_wanted = nocode_wanted;
6316 block(&a, &b, 0);
6317 nocode_wanted = saved_nocode_wanted;
6318 gjmp_addr(c);
6319 gsym(a);
6320 gsym_addr(b, c);
6321 --local_scope;
6322 sym_pop(&local_stack, s, 0);
6324 } else
6325 if (tok == TOK_DO) {
6326 int saved_nocode_wanted;
6327 nocode_wanted &= ~0x20000000;
6328 next();
6329 a = 0;
6330 b = 0;
6331 d = ind;
6332 vla_sp_restore();
6333 saved_nocode_wanted = nocode_wanted;
6334 block(&a, &b, 0);
6335 skip(TOK_WHILE);
6336 skip('(');
6337 gsym(b);
6338 if (b)
6339 nocode_wanted = saved_nocode_wanted;
6340 gexpr();
6341 c = gvtst(0, 0);
6342 gsym_addr(c, d);
6343 nocode_wanted = saved_nocode_wanted;
6344 skip(')');
6345 gsym(a);
6346 skip(';');
6347 } else
6348 if (tok == TOK_SWITCH) {
6349 struct switch_t *saved, sw;
6350 int saved_nocode_wanted = nocode_wanted;
6351 SValue switchval;
6352 next();
6353 skip('(');
6354 gexpr();
6355 skip(')');
6356 switchval = *vtop--;
6357 a = 0;
6358 b = gjmp(0); /* jump to first case */
6359 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6360 saved = cur_switch;
6361 cur_switch = &sw;
6362 block(&a, csym, 0);
6363 nocode_wanted = saved_nocode_wanted;
6364 a = gjmp(a); /* add implicit break */
6365 /* case lookup */
6366 gsym(b);
6367 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6368 for (b = 1; b < sw.n; b++)
6369 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6370 tcc_error("duplicate case value");
6371 /* Our switch table sorting is signed, so the compared
6372 value needs to be as well when it's 64bit. */
6373 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6374 switchval.type.t &= ~VT_UNSIGNED;
6375 vpushv(&switchval);
6376 gcase(sw.p, sw.n, &a);
6377 vpop();
6378 if (sw.def_sym)
6379 gjmp_addr(sw.def_sym);
6380 dynarray_reset(&sw.p, &sw.n);
6381 cur_switch = saved;
6382 /* break label */
6383 gsym(a);
6384 } else
6385 if (tok == TOK_CASE) {
6386 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6387 if (!cur_switch)
6388 expect("switch");
6389 nocode_wanted &= ~0x20000000;
6390 next();
6391 cr->v1 = cr->v2 = expr_const64();
6392 if (gnu_ext && tok == TOK_DOTS) {
6393 next();
6394 cr->v2 = expr_const64();
6395 if (cr->v2 < cr->v1)
6396 tcc_warning("empty case range");
6398 cr->sym = ind;
6399 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6400 skip(':');
6401 is_expr = 0;
6402 goto block_after_label;
6403 } else
6404 if (tok == TOK_DEFAULT) {
6405 next();
6406 skip(':');
6407 if (!cur_switch)
6408 expect("switch");
6409 if (cur_switch->def_sym)
6410 tcc_error("too many 'default'");
6411 cur_switch->def_sym = ind;
6412 is_expr = 0;
6413 goto block_after_label;
6414 } else
6415 if (tok == TOK_GOTO) {
6416 next();
6417 if (tok == '*' && gnu_ext) {
6418 /* computed goto */
6419 next();
6420 gexpr();
6421 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6422 expect("pointer");
6423 ggoto();
6424 } else if (tok >= TOK_UIDENT) {
6425 s = label_find(tok);
6426 /* put forward definition if needed */
6427 if (!s)
6428 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6429 else if (s->r == LABEL_DECLARED)
6430 s->r = LABEL_FORWARD;
6432 vla_sp_restore_root();
6433 if (s->r & LABEL_FORWARD) {
6434 /* start new goto chain for cleanups, linked via label->next */
6435 if (current_cleanups) {
6436 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6437 pending_gotos->prev_tok = s;
6438 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6439 pending_gotos->next = s;
6441 s->jnext = gjmp(s->jnext);
6442 } else {
6443 try_call_cleanup_goto(s->cleanupstate);
6444 gjmp_addr(s->jnext);
6446 next();
6447 } else {
6448 expect("label identifier");
6450 skip(';');
6451 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6452 asm_instr();
6453 } else {
6454 b = is_label();
6455 if (b) {
6456 /* label case */
6457 next();
6458 s = label_find(b);
6459 if (s) {
6460 if (s->r == LABEL_DEFINED)
6461 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6462 s->r = LABEL_DEFINED;
6463 if (s->next) {
6464 Sym *pcl; /* pending cleanup goto */
6465 for (pcl = s->next; pcl; pcl = pcl->prev)
6466 gsym(pcl->jnext);
6467 sym_pop(&s->next, NULL, 0);
6468 } else
6469 gsym(s->jnext);
6470 } else {
6471 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6473 s->jnext = ind;
6474 s->cleanupstate = current_cleanups;
6475 vla_sp_restore();
6476 /* we accept this, but it is a mistake */
6477 block_after_label:
6478 nocode_wanted &= ~0x20000000;
6479 if (tok == '}') {
6480 tcc_warning("deprecated use of label at end of compound statement");
6481 } else {
6482 if (is_expr)
6483 vpop();
6484 block(bsym, csym, is_expr);
6486 } else {
6487 /* expression case */
6488 if (tok != ';') {
6489 if (is_expr) {
6490 vpop();
6491 gexpr();
6492 } else {
6493 gexpr();
6494 vpop();
6497 skip(';');
6502 /* This skips over a stream of tokens containing balanced {} and ()
6503 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6504 with a '{'). If STR then allocates and stores the skipped tokens
6505 in *STR. This doesn't check if () and {} are nested correctly,
6506 i.e. "({)}" is accepted. */
6507 static void skip_or_save_block(TokenString **str)
6509 int braces = tok == '{';
6510 int level = 0;
6511 if (str)
6512 *str = tok_str_alloc();
6514 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6515 int t;
6516 if (tok == TOK_EOF) {
6517 if (str || level > 0)
6518 tcc_error("unexpected end of file");
6519 else
6520 break;
6522 if (str)
6523 tok_str_add_tok(*str);
6524 t = tok;
6525 next();
6526 if (t == '{' || t == '(') {
6527 level++;
6528 } else if (t == '}' || t == ')') {
6529 level--;
6530 if (level == 0 && braces && t == '}')
6531 break;
6534 if (str) {
6535 tok_str_add(*str, -1);
6536 tok_str_add(*str, 0);
6540 #define EXPR_CONST 1
6541 #define EXPR_ANY 2
6543 static void parse_init_elem(int expr_type)
6545 int saved_global_expr;
6546 switch(expr_type) {
6547 case EXPR_CONST:
6548 /* compound literals must be allocated globally in this case */
6549 saved_global_expr = global_expr;
6550 global_expr = 1;
6551 expr_const1();
6552 global_expr = saved_global_expr;
6553 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6554 (compound literals). */
6555 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6556 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6557 || vtop->sym->v < SYM_FIRST_ANOM))
6558 #ifdef TCC_TARGET_PE
6559 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6560 #endif
6562 tcc_error("initializer element is not constant");
6563 break;
6564 case EXPR_ANY:
6565 expr_eq();
6566 break;
6570 /* put zeros for variable based init */
6571 static void init_putz(Section *sec, unsigned long c, int size)
6573 if (sec) {
6574 /* nothing to do because globals are already set to zero */
6575 } else {
6576 vpush_global_sym(&func_old_type, TOK_memset);
6577 vseti(VT_LOCAL, c);
6578 #ifdef TCC_TARGET_ARM
6579 vpushs(size);
6580 vpushi(0);
6581 #else
6582 vpushi(0);
6583 vpushs(size);
6584 #endif
6585 gfunc_call(3);
6589 /* t is the array or struct type. c is the array or struct
6590 address. cur_field is the pointer to the current
6591 field, for arrays the 'c' member contains the current start
6592 index. 'size_only' is true if only size info is needed (only used
6593 in arrays). al contains the already initialized length of the
6594 current container (starting at c). This returns the new length of that. */
6595 static int decl_designator(CType *type, Section *sec, unsigned long c,
6596 Sym **cur_field, int size_only, int al)
6598 Sym *s, *f;
6599 int index, index_last, align, l, nb_elems, elem_size;
6600 unsigned long corig = c;
6602 elem_size = 0;
6603 nb_elems = 1;
6604 if (gnu_ext && (l = is_label()) != 0)
6605 goto struct_field;
6606 /* NOTE: we only support ranges for last designator */
6607 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6608 if (tok == '[') {
6609 if (!(type->t & VT_ARRAY))
6610 expect("array type");
6611 next();
6612 index = index_last = expr_const();
6613 if (tok == TOK_DOTS && gnu_ext) {
6614 next();
6615 index_last = expr_const();
6617 skip(']');
6618 s = type->ref;
6619 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6620 index_last < index)
6621 tcc_error("invalid index");
6622 if (cur_field)
6623 (*cur_field)->c = index_last;
6624 type = pointed_type(type);
6625 elem_size = type_size(type, &align);
6626 c += index * elem_size;
6627 nb_elems = index_last - index + 1;
6628 } else {
6629 next();
6630 l = tok;
6631 struct_field:
6632 next();
6633 if ((type->t & VT_BTYPE) != VT_STRUCT)
6634 expect("struct/union type");
6635 f = find_field(type, l);
6636 if (!f)
6637 expect("field");
6638 if (cur_field)
6639 *cur_field = f;
6640 type = &f->type;
6641 c += f->c;
6643 cur_field = NULL;
6645 if (!cur_field) {
6646 if (tok == '=') {
6647 next();
6648 } else if (!gnu_ext) {
6649 expect("=");
6651 } else {
6652 if (type->t & VT_ARRAY) {
6653 index = (*cur_field)->c;
6654 if (type->ref->c >= 0 && index >= type->ref->c)
6655 tcc_error("index too large");
6656 type = pointed_type(type);
6657 c += index * type_size(type, &align);
6658 } else {
6659 f = *cur_field;
6660 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6661 *cur_field = f = f->next;
6662 if (!f)
6663 tcc_error("too many field init");
6664 type = &f->type;
6665 c += f->c;
6668 /* must put zero in holes (note that doing it that way
6669 ensures that it even works with designators) */
6670 if (!size_only && c - corig > al)
6671 init_putz(sec, corig + al, c - corig - al);
6672 decl_initializer(type, sec, c, 0, size_only);
6674 /* XXX: make it more general */
6675 if (!size_only && nb_elems > 1) {
6676 unsigned long c_end;
6677 uint8_t *src, *dst;
6678 int i;
6680 if (!sec) {
6681 vset(type, VT_LOCAL|VT_LVAL, c);
6682 for (i = 1; i < nb_elems; i++) {
6683 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6684 vswap();
6685 vstore();
6687 vpop();
6688 } else if (!NODATA_WANTED) {
6689 c_end = c + nb_elems * elem_size;
6690 if (c_end > sec->data_allocated)
6691 section_realloc(sec, c_end);
6692 src = sec->data + c;
6693 dst = src;
6694 for(i = 1; i < nb_elems; i++) {
6695 dst += elem_size;
6696 memcpy(dst, src, elem_size);
6700 c += nb_elems * type_size(type, &align);
6701 if (c - corig > al)
6702 al = c - corig;
6703 return al;
6706 /* store a value or an expression directly in global data or in local array */
6707 static void init_putv(CType *type, Section *sec, unsigned long c)
6709 int bt;
6710 void *ptr;
6711 CType dtype;
6713 dtype = *type;
6714 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6716 if (sec) {
6717 int size, align;
6718 /* XXX: not portable */
6719 /* XXX: generate error if incorrect relocation */
6720 gen_assign_cast(&dtype);
6721 bt = type->t & VT_BTYPE;
6723 if ((vtop->r & VT_SYM)
6724 && bt != VT_PTR
6725 && bt != VT_FUNC
6726 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6727 || (type->t & VT_BITFIELD))
6728 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6730 tcc_error("initializer element is not computable at load time");
6732 if (NODATA_WANTED) {
6733 vtop--;
6734 return;
6737 size = type_size(type, &align);
6738 section_reserve(sec, c + size);
6739 ptr = sec->data + c;
6741 /* XXX: make code faster ? */
6742 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6743 vtop->sym->v >= SYM_FIRST_ANOM &&
6744 /* XXX This rejects compound literals like
6745 '(void *){ptr}'. The problem is that '&sym' is
6746 represented the same way, which would be ruled out
6747 by the SYM_FIRST_ANOM check above, but also '"string"'
6748 in 'char *p = "string"' is represented the same
6749 with the type being VT_PTR and the symbol being an
6750 anonymous one. That is, there's no difference in vtop
6751 between '(void *){x}' and '&(void *){x}'. Ignore
6752 pointer typed entities here. Hopefully no real code
6753 will every use compound literals with scalar type. */
6754 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6755 /* These come from compound literals, memcpy stuff over. */
6756 Section *ssec;
6757 ElfSym *esym;
6758 ElfW_Rel *rel;
6759 esym = elfsym(vtop->sym);
6760 ssec = tcc_state->sections[esym->st_shndx];
6761 memmove (ptr, ssec->data + esym->st_value, size);
6762 if (ssec->reloc) {
6763 /* We need to copy over all memory contents, and that
6764 includes relocations. Use the fact that relocs are
6765 created it order, so look from the end of relocs
6766 until we hit one before the copied region. */
6767 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6768 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6769 while (num_relocs--) {
6770 rel--;
6771 if (rel->r_offset >= esym->st_value + size)
6772 continue;
6773 if (rel->r_offset < esym->st_value)
6774 break;
6775 /* Note: if the same fields are initialized multiple
6776 times (possible with designators) then we possibly
6777 add multiple relocations for the same offset here.
6778 That would lead to wrong code, the last reloc needs
6779 to win. We clean this up later after the whole
6780 initializer is parsed. */
6781 put_elf_reloca(symtab_section, sec,
6782 c + rel->r_offset - esym->st_value,
6783 ELFW(R_TYPE)(rel->r_info),
6784 ELFW(R_SYM)(rel->r_info),
6785 #if PTR_SIZE == 8
6786 rel->r_addend
6787 #else
6789 #endif
6793 } else {
6794 if (type->t & VT_BITFIELD) {
6795 int bit_pos, bit_size, bits, n;
6796 unsigned char *p, v, m;
6797 bit_pos = BIT_POS(vtop->type.t);
6798 bit_size = BIT_SIZE(vtop->type.t);
6799 p = (unsigned char*)ptr + (bit_pos >> 3);
6800 bit_pos &= 7, bits = 0;
6801 while (bit_size) {
6802 n = 8 - bit_pos;
6803 if (n > bit_size)
6804 n = bit_size;
6805 v = vtop->c.i >> bits << bit_pos;
6806 m = ((1 << n) - 1) << bit_pos;
6807 *p = (*p & ~m) | (v & m);
6808 bits += n, bit_size -= n, bit_pos = 0, ++p;
6810 } else
6811 switch(bt) {
6812 /* XXX: when cross-compiling we assume that each type has the
6813 same representation on host and target, which is likely to
6814 be wrong in the case of long double */
6815 case VT_BOOL:
6816 vtop->c.i = vtop->c.i != 0;
6817 case VT_BYTE:
6818 *(char *)ptr |= vtop->c.i;
6819 break;
6820 case VT_SHORT:
6821 *(short *)ptr |= vtop->c.i;
6822 break;
6823 case VT_FLOAT:
6824 *(float*)ptr = vtop->c.f;
6825 break;
6826 case VT_DOUBLE:
6827 *(double *)ptr = vtop->c.d;
6828 break;
6829 case VT_LDOUBLE:
6830 #if defined TCC_IS_NATIVE_387
6831 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6832 memcpy(ptr, &vtop->c.ld, 10);
6833 #ifdef __TINYC__
6834 else if (sizeof (long double) == sizeof (double))
6835 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6836 #endif
6837 else if (vtop->c.ld == 0.0)
6839 else
6840 #endif
6841 if (sizeof(long double) == LDOUBLE_SIZE)
6842 *(long double*)ptr = vtop->c.ld;
6843 else if (sizeof(double) == LDOUBLE_SIZE)
6844 *(double *)ptr = (double)vtop->c.ld;
6845 else
6846 tcc_error("can't cross compile long double constants");
6847 break;
6848 #if PTR_SIZE != 8
6849 case VT_LLONG:
6850 *(long long *)ptr |= vtop->c.i;
6851 break;
6852 #else
6853 case VT_LLONG:
6854 #endif
6855 case VT_PTR:
6857 addr_t val = vtop->c.i;
6858 #if PTR_SIZE == 8
6859 if (vtop->r & VT_SYM)
6860 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6861 else
6862 *(addr_t *)ptr |= val;
6863 #else
6864 if (vtop->r & VT_SYM)
6865 greloc(sec, vtop->sym, c, R_DATA_PTR);
6866 *(addr_t *)ptr |= val;
6867 #endif
6868 break;
6870 default:
6872 int val = vtop->c.i;
6873 #if PTR_SIZE == 8
6874 if (vtop->r & VT_SYM)
6875 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6876 else
6877 *(int *)ptr |= val;
6878 #else
6879 if (vtop->r & VT_SYM)
6880 greloc(sec, vtop->sym, c, R_DATA_PTR);
6881 *(int *)ptr |= val;
6882 #endif
6883 break;
6887 vtop--;
6888 } else {
6889 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6890 vswap();
6891 vstore();
6892 vpop();
6896 /* 't' contains the type and storage info. 'c' is the offset of the
6897 object in section 'sec'. If 'sec' is NULL, it means stack based
6898 allocation. 'first' is true if array '{' must be read (multi
6899 dimension implicit array init handling). 'size_only' is true if
6900 size only evaluation is wanted (only for arrays). */
6901 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6902 int first, int size_only)
6904 int len, n, no_oblock, nb, i;
6905 int size1, align1;
6906 int have_elem;
6907 Sym *s, *f;
6908 Sym indexsym;
6909 CType *t1;
6911 /* If we currently are at an '}' or ',' we have read an initializer
6912 element in one of our callers, and not yet consumed it. */
6913 have_elem = tok == '}' || tok == ',';
6914 if (!have_elem && tok != '{' &&
6915 /* In case of strings we have special handling for arrays, so
6916 don't consume them as initializer value (which would commit them
6917 to some anonymous symbol). */
6918 tok != TOK_LSTR && tok != TOK_STR &&
6919 !size_only) {
6920 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6921 have_elem = 1;
6924 if (have_elem &&
6925 !(type->t & VT_ARRAY) &&
6926 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6927 The source type might have VT_CONSTANT set, which is
6928 of course assignable to non-const elements. */
6929 is_compatible_unqualified_types(type, &vtop->type)) {
6930 init_putv(type, sec, c);
6931 } else if (type->t & VT_ARRAY) {
6932 s = type->ref;
6933 n = s->c;
6934 t1 = pointed_type(type);
6935 size1 = type_size(t1, &align1);
6937 no_oblock = 1;
6938 if ((first && tok != TOK_LSTR && tok != TOK_STR) ||
6939 tok == '{') {
6940 if (tok != '{')
6941 tcc_error("character array initializer must be a literal,"
6942 " optionally enclosed in braces");
6943 skip('{');
6944 no_oblock = 0;
6947 /* only parse strings here if correct type (otherwise: handle
6948 them as ((w)char *) expressions */
6949 if ((tok == TOK_LSTR &&
6950 #ifdef TCC_TARGET_PE
6951 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6952 #else
6953 (t1->t & VT_BTYPE) == VT_INT
6954 #endif
6955 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6956 len = 0;
6957 while (tok == TOK_STR || tok == TOK_LSTR) {
6958 int cstr_len, ch;
6960 /* compute maximum number of chars wanted */
6961 if (tok == TOK_STR)
6962 cstr_len = tokc.str.size;
6963 else
6964 cstr_len = tokc.str.size / sizeof(nwchar_t);
6965 cstr_len--;
6966 nb = cstr_len;
6967 if (n >= 0 && nb > (n - len))
6968 nb = n - len;
6969 if (!size_only) {
6970 if (cstr_len > nb)
6971 tcc_warning("initializer-string for array is too long");
6972 /* in order to go faster for common case (char
6973 string in global variable, we handle it
6974 specifically */
6975 if (sec && tok == TOK_STR && size1 == 1) {
6976 if (!NODATA_WANTED)
6977 memcpy(sec->data + c + len, tokc.str.data, nb);
6978 } else {
6979 for(i=0;i<nb;i++) {
6980 if (tok == TOK_STR)
6981 ch = ((unsigned char *)tokc.str.data)[i];
6982 else
6983 ch = ((nwchar_t *)tokc.str.data)[i];
6984 vpushi(ch);
6985 init_putv(t1, sec, c + (len + i) * size1);
6989 len += nb;
6990 next();
6992 /* only add trailing zero if enough storage (no
6993 warning in this case since it is standard) */
6994 if (n < 0 || len < n) {
6995 if (!size_only) {
6996 vpushi(0);
6997 init_putv(t1, sec, c + (len * size1));
6999 len++;
7001 len *= size1;
7002 } else {
7003 indexsym.c = 0;
7004 f = &indexsym;
7006 do_init_list:
7007 len = 0;
7008 while (tok != '}' || have_elem) {
7009 len = decl_designator(type, sec, c, &f, size_only, len);
7010 have_elem = 0;
7011 if (type->t & VT_ARRAY) {
7012 ++indexsym.c;
7013 /* special test for multi dimensional arrays (may not
7014 be strictly correct if designators are used at the
7015 same time) */
7016 if (no_oblock && len >= n*size1)
7017 break;
7018 } else {
7019 if (s->type.t == VT_UNION)
7020 f = NULL;
7021 else
7022 f = f->next;
7023 if (no_oblock && f == NULL)
7024 break;
7027 if (tok == '}')
7028 break;
7029 skip(',');
7032 /* put zeros at the end */
7033 if (!size_only && len < n*size1)
7034 init_putz(sec, c + len, n*size1 - len);
7035 if (!no_oblock)
7036 skip('}');
7037 /* patch type size if needed, which happens only for array types */
7038 if (n < 0)
7039 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7040 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7041 size1 = 1;
7042 no_oblock = 1;
7043 if (first || tok == '{') {
7044 skip('{');
7045 no_oblock = 0;
7047 s = type->ref;
7048 f = s->next;
7049 n = s->c;
7050 goto do_init_list;
7051 } else if (tok == '{') {
7052 next();
7053 decl_initializer(type, sec, c, first, size_only);
7054 skip('}');
7055 } else if (size_only) {
7056 /* If we supported only ISO C we wouldn't have to accept calling
7057 this on anything than an array size_only==1 (and even then
7058 only on the outermost level, so no recursion would be needed),
7059 because initializing a flex array member isn't supported.
7060 But GNU C supports it, so we need to recurse even into
7061 subfields of structs and arrays when size_only is set. */
7062 /* just skip expression */
7063 skip_or_save_block(NULL);
7064 } else {
7065 if (!have_elem) {
7066 /* This should happen only when we haven't parsed
7067 the init element above for fear of committing a
7068 string constant to memory too early. */
7069 if (tok != TOK_STR && tok != TOK_LSTR)
7070 expect("string constant");
7071 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7073 init_putv(type, sec, c);
7077 /* parse an initializer for type 't' if 'has_init' is non zero, and
7078 allocate space in local or global data space ('r' is either
7079 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7080 variable 'v' of scope 'scope' is declared before initializers
7081 are parsed. If 'v' is zero, then a reference to the new object
7082 is put in the value stack. If 'has_init' is 2, a special parsing
7083 is done to handle string constants. */
7084 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7085 int has_init, int v, int scope)
7087 int size, align, addr;
7088 TokenString *init_str = NULL;
7090 Section *sec;
7091 Sym *flexible_array;
7092 Sym *sym = NULL;
7093 int saved_nocode_wanted = nocode_wanted;
7094 #ifdef CONFIG_TCC_BCHECK
7095 int bcheck;
7096 #endif
7098 /* Always allocate static or global variables */
7099 if (v && (r & VT_VALMASK) == VT_CONST)
7100 nocode_wanted |= 0x80000000;
7102 #ifdef CONFIG_TCC_BCHECK
7103 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7104 #endif
7106 flexible_array = NULL;
7107 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7108 Sym *field = type->ref->next;
7109 if (field) {
7110 while (field->next)
7111 field = field->next;
7112 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7113 flexible_array = field;
7117 size = type_size(type, &align);
7118 /* If unknown size, we must evaluate it before
7119 evaluating initializers because
7120 initializers can generate global data too
7121 (e.g. string pointers or ISOC99 compound
7122 literals). It also simplifies local
7123 initializers handling */
7124 if (size < 0 || (flexible_array && has_init)) {
7125 if (!has_init)
7126 tcc_error("unknown type size");
7127 /* get all init string */
7128 if (has_init == 2) {
7129 init_str = tok_str_alloc();
7130 /* only get strings */
7131 while (tok == TOK_STR || tok == TOK_LSTR) {
7132 tok_str_add_tok(init_str);
7133 next();
7135 tok_str_add(init_str, -1);
7136 tok_str_add(init_str, 0);
7137 } else {
7138 skip_or_save_block(&init_str);
7140 unget_tok(0);
7142 /* compute size */
7143 begin_macro(init_str, 1);
7144 next();
7145 decl_initializer(type, NULL, 0, 1, 1);
7146 /* prepare second initializer parsing */
7147 macro_ptr = init_str->str;
7148 next();
7150 /* if still unknown size, error */
7151 size = type_size(type, &align);
7152 if (size < 0)
7153 tcc_error("unknown type size");
7155 /* If there's a flex member and it was used in the initializer
7156 adjust size. */
7157 if (flexible_array &&
7158 flexible_array->type.ref->c > 0)
7159 size += flexible_array->type.ref->c
7160 * pointed_size(&flexible_array->type);
7161 /* take into account specified alignment if bigger */
7162 if (ad->a.aligned) {
7163 int speca = 1 << (ad->a.aligned - 1);
7164 if (speca > align)
7165 align = speca;
7166 } else if (ad->a.packed) {
7167 align = 1;
7170 if (!v && NODATA_WANTED)
7171 size = 0, align = 1;
7173 if ((r & VT_VALMASK) == VT_LOCAL) {
7174 sec = NULL;
7175 #ifdef CONFIG_TCC_BCHECK
7176 if (bcheck && (type->t & VT_ARRAY)) {
7177 loc--;
7179 #endif
7180 loc = (loc - size) & -align;
7181 addr = loc;
7182 #ifdef CONFIG_TCC_BCHECK
7183 /* handles bounds */
7184 /* XXX: currently, since we do only one pass, we cannot track
7185 '&' operators, so we add only arrays */
7186 if (bcheck && (type->t & VT_ARRAY)) {
7187 addr_t *bounds_ptr;
7188 /* add padding between regions */
7189 loc--;
7190 /* then add local bound info */
7191 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7192 bounds_ptr[0] = addr;
7193 bounds_ptr[1] = size;
7195 #endif
7196 if (v) {
7197 /* local variable */
7198 #ifdef CONFIG_TCC_ASM
7199 if (ad->asm_label) {
7200 int reg = asm_parse_regvar(ad->asm_label);
7201 if (reg >= 0)
7202 r = (r & ~VT_VALMASK) | reg;
7204 #endif
7205 sym = sym_push(v, type, r, addr);
7206 if (ad->cleanup_func) {
7207 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7208 cls->prev_tok = sym;
7209 cls->next = ad->cleanup_func;
7210 cls->ncl = current_cleanups;
7211 current_cleanups = cls;
7214 sym->a = ad->a;
7215 } else {
7216 /* push local reference */
7217 vset(type, r, addr);
7219 } else {
7220 if (v && scope == VT_CONST) {
7221 /* see if the symbol was already defined */
7222 sym = sym_find(v);
7223 if (sym) {
7224 patch_storage(sym, ad, type);
7225 /* we accept several definitions of the same global variable. */
7226 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7227 goto no_alloc;
7231 /* allocate symbol in corresponding section */
7232 sec = ad->section;
7233 if (!sec) {
7234 if (has_init)
7235 sec = data_section;
7236 else if (tcc_state->nocommon)
7237 sec = bss_section;
7240 if (sec) {
7241 addr = section_add(sec, size, align);
7242 #ifdef CONFIG_TCC_BCHECK
7243 /* add padding if bound check */
7244 if (bcheck)
7245 section_add(sec, 1, 1);
7246 #endif
7247 } else {
7248 addr = align; /* SHN_COMMON is special, symbol value is align */
7249 sec = common_section;
7252 if (v) {
7253 if (!sym) {
7254 sym = sym_push(v, type, r | VT_SYM, 0);
7255 patch_storage(sym, ad, NULL);
7257 /* Local statics have a scope until now (for
7258 warnings), remove it here. */
7259 sym->sym_scope = 0;
7260 /* update symbol definition */
7261 put_extern_sym(sym, sec, addr, size);
7262 } else {
7263 /* push global reference */
7264 sym = get_sym_ref(type, sec, addr, size);
7265 vpushsym(type, sym);
7266 vtop->r |= r;
7269 #ifdef CONFIG_TCC_BCHECK
7270 /* handles bounds now because the symbol must be defined
7271 before for the relocation */
7272 if (bcheck) {
7273 addr_t *bounds_ptr;
7275 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7276 /* then add global bound info */
7277 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7278 bounds_ptr[0] = 0; /* relocated */
7279 bounds_ptr[1] = size;
7281 #endif
7284 if (type->t & VT_VLA) {
7285 int a;
7287 if (NODATA_WANTED)
7288 goto no_alloc;
7290 /* save current stack pointer */
7291 if (vlas_in_scope == 0) {
7292 if (vla_sp_root_loc == -1)
7293 vla_sp_root_loc = (loc -= PTR_SIZE);
7294 gen_vla_sp_save(vla_sp_root_loc);
7297 vla_runtime_type_size(type, &a);
7298 gen_vla_alloc(type, a);
7299 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7300 /* on _WIN64, because of the function args scratch area, the
7301 result of alloca differs from RSP and is returned in RAX. */
7302 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7303 #endif
7304 gen_vla_sp_save(addr);
7305 vla_sp_loc = addr;
7306 vlas_in_scope++;
7308 } else if (has_init) {
7309 size_t oldreloc_offset = 0;
7310 if (sec && sec->reloc)
7311 oldreloc_offset = sec->reloc->data_offset;
7312 decl_initializer(type, sec, addr, 1, 0);
7313 if (sec && sec->reloc)
7314 squeeze_multi_relocs(sec, oldreloc_offset);
7315 /* patch flexible array member size back to -1, */
7316 /* for possible subsequent similar declarations */
7317 if (flexible_array)
7318 flexible_array->type.ref->c = -1;
7321 no_alloc:
7322 /* restore parse state if needed */
7323 if (init_str) {
7324 end_macro();
7325 next();
7328 nocode_wanted = saved_nocode_wanted;
7331 /* parse a function defined by symbol 'sym' and generate its code in
7332 'cur_text_section' */
7333 static void gen_function(Sym *sym)
7335 nocode_wanted = 0;
7336 ind = cur_text_section->data_offset;
7337 if (sym->a.aligned) {
7338 size_t newoff = section_add(cur_text_section, 0,
7339 1 << (sym->a.aligned - 1));
7340 gen_fill_nops(newoff - ind);
7342 /* NOTE: we patch the symbol size later */
7343 put_extern_sym(sym, cur_text_section, ind, 0);
7344 funcname = get_tok_str(sym->v, NULL);
7345 func_ind = ind;
7346 /* Initialize VLA state */
7347 vla_sp_loc = -1;
7348 vla_sp_root_loc = -1;
7349 /* put debug symbol */
7350 tcc_debug_funcstart(tcc_state, sym);
7351 /* push a dummy symbol to enable local sym storage */
7352 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7353 local_scope = 1; /* for function parameters */
7354 gfunc_prolog(&sym->type);
7355 reset_local_scope();
7356 rsym = 0;
7357 clear_temp_local_var_list();
7358 block(NULL, NULL, 0);
7359 if (!(nocode_wanted & 0x20000000)
7360 && ((func_vt.t & VT_BTYPE) == VT_INT)
7361 && !strcmp (funcname, "main"))
7363 nocode_wanted = 0;
7364 vpushi(0);
7365 gen_assign_cast(&func_vt);
7366 gfunc_return(&func_vt);
7368 nocode_wanted = 0;
7369 gsym(rsym);
7370 gfunc_epilog();
7371 cur_text_section->data_offset = ind;
7372 label_pop(&global_label_stack, NULL, 0);
7373 /* reset local stack */
7374 reset_local_scope();
7375 sym_pop(&local_stack, NULL, 0);
7376 /* end of function */
7377 /* patch symbol size */
7378 elfsym(sym)->st_size = ind - func_ind;
7379 tcc_debug_funcend(tcc_state, ind - func_ind);
7380 /* It's better to crash than to generate wrong code */
7381 cur_text_section = NULL;
7382 funcname = ""; /* for safety */
7383 func_vt.t = VT_VOID; /* for safety */
7384 func_var = 0; /* for safety */
7385 ind = 0; /* for safety */
7386 nocode_wanted = 0x80000000;
7387 check_vstack();
7390 static void gen_inline_functions(TCCState *s)
7392 Sym *sym;
7393 int inline_generated, i, ln;
7394 struct InlineFunc *fn;
7396 ln = file->line_num;
7397 /* iterate while inline function are referenced */
7398 do {
7399 inline_generated = 0;
7400 for (i = 0; i < s->nb_inline_fns; ++i) {
7401 fn = s->inline_fns[i];
7402 sym = fn->sym;
7403 if (sym && sym->c) {
7404 /* the function was used: generate its code and
7405 convert it to a normal function */
7406 fn->sym = NULL;
7407 if (file)
7408 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7409 sym->type.t &= ~VT_INLINE;
7411 begin_macro(fn->func_str, 1);
7412 next();
7413 cur_text_section = text_section;
7414 gen_function(sym);
7415 end_macro();
7417 inline_generated = 1;
7420 } while (inline_generated);
7421 file->line_num = ln;
7424 ST_FUNC void free_inline_functions(TCCState *s)
7426 int i;
7427 /* free tokens of unused inline functions */
7428 for (i = 0; i < s->nb_inline_fns; ++i) {
7429 struct InlineFunc *fn = s->inline_fns[i];
7430 if (fn->sym)
7431 tok_str_free(fn->func_str);
7433 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7436 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7437 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7438 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7440 int v, has_init, r;
7441 CType type, btype;
7442 Sym *sym;
7443 AttributeDef ad, adbase;
7445 while (1) {
7446 if (!parse_btype(&btype, &adbase)) {
7447 if (is_for_loop_init)
7448 return 0;
7449 /* skip redundant ';' if not in old parameter decl scope */
7450 if (tok == ';' && l != VT_CMP) {
7451 next();
7452 continue;
7454 if (l != VT_CONST)
7455 break;
7456 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7457 /* global asm block */
7458 asm_global_instr();
7459 continue;
7461 if (tok >= TOK_UIDENT) {
7462 /* special test for old K&R protos without explicit int
7463 type. Only accepted when defining global data */
7464 btype.t = VT_INT;
7465 } else {
7466 if (tok != TOK_EOF)
7467 expect("declaration");
7468 break;
7471 if (tok == ';') {
7472 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7473 int v = btype.ref->v;
7474 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7475 tcc_warning("unnamed struct/union that defines no instances");
7476 next();
7477 continue;
7479 if (IS_ENUM(btype.t)) {
7480 next();
7481 continue;
7484 while (1) { /* iterate thru each declaration */
7485 type = btype;
7486 /* If the base type itself was an array type of unspecified
7487 size (like in 'typedef int arr[]; arr x = {1};') then
7488 we will overwrite the unknown size by the real one for
7489 this decl. We need to unshare the ref symbol holding
7490 that size. */
7491 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7492 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7494 ad = adbase;
7495 type_decl(&type, &ad, &v, TYPE_DIRECT);
7496 #if 0
7498 char buf[500];
7499 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7500 printf("type = '%s'\n", buf);
7502 #endif
7503 if ((type.t & VT_BTYPE) == VT_FUNC) {
7504 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7505 tcc_error("function without file scope cannot be static");
7507 /* if old style function prototype, we accept a
7508 declaration list */
7509 sym = type.ref;
7510 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7511 decl0(VT_CMP, 0, sym);
7514 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7515 ad.asm_label = asm_label_instr();
7516 /* parse one last attribute list, after asm label */
7517 parse_attribute(&ad);
7518 if (tok == '{')
7519 expect(";");
7522 #ifdef TCC_TARGET_PE
7523 if (ad.a.dllimport || ad.a.dllexport) {
7524 if (type.t & (VT_STATIC|VT_TYPEDEF))
7525 tcc_error("cannot have dll linkage with static or typedef");
7526 if (ad.a.dllimport) {
7527 if ((type.t & VT_BTYPE) == VT_FUNC)
7528 ad.a.dllimport = 0;
7529 else
7530 type.t |= VT_EXTERN;
7533 #endif
7534 if (tok == '{') {
7535 if (l != VT_CONST)
7536 tcc_error("cannot use local functions");
7537 if ((type.t & VT_BTYPE) != VT_FUNC)
7538 expect("function definition");
7540 /* reject abstract declarators in function definition
7541 make old style params without decl have int type */
7542 sym = type.ref;
7543 while ((sym = sym->next) != NULL) {
7544 if (!(sym->v & ~SYM_FIELD))
7545 expect("identifier");
7546 if (sym->type.t == VT_VOID)
7547 sym->type = int_type;
7550 /* XXX: cannot do better now: convert extern line to static inline */
7551 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7552 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7554 /* put function symbol */
7555 sym = external_global_sym(v, &type, 0);
7556 type.t &= ~VT_EXTERN;
7557 patch_storage(sym, &ad, &type);
7559 /* static inline functions are just recorded as a kind
7560 of macro. Their code will be emitted at the end of
7561 the compilation unit only if they are used */
7562 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7563 (VT_INLINE | VT_STATIC)) {
7564 struct InlineFunc *fn;
7565 const char *filename;
7567 filename = file ? file->filename : "";
7568 fn = tcc_malloc(sizeof *fn + strlen(filename));
7569 strcpy(fn->filename, filename);
7570 fn->sym = sym;
7571 skip_or_save_block(&fn->func_str);
7572 dynarray_add(&tcc_state->inline_fns,
7573 &tcc_state->nb_inline_fns, fn);
7574 } else {
7575 /* compute text section */
7576 cur_text_section = ad.section;
7577 if (!cur_text_section)
7578 cur_text_section = text_section;
7579 gen_function(sym);
7581 break;
7582 } else {
7583 if (l == VT_CMP) {
7584 /* find parameter in function parameter list */
7585 for (sym = func_sym->next; sym; sym = sym->next)
7586 if ((sym->v & ~SYM_FIELD) == v)
7587 goto found;
7588 tcc_error("declaration for parameter '%s' but no such parameter",
7589 get_tok_str(v, NULL));
7590 found:
7591 if (type.t & VT_STORAGE) /* 'register' is okay */
7592 tcc_error("storage class specified for '%s'",
7593 get_tok_str(v, NULL));
7594 if (sym->type.t != VT_VOID)
7595 tcc_error("redefinition of parameter '%s'",
7596 get_tok_str(v, NULL));
7597 convert_parameter_type(&type);
7598 sym->type = type;
7599 } else if (type.t & VT_TYPEDEF) {
7600 /* save typedefed type */
7601 /* XXX: test storage specifiers ? */
7602 sym = sym_find(v);
7603 if (sym && sym->sym_scope == local_scope) {
7604 if (!is_compatible_types(&sym->type, &type)
7605 || !(sym->type.t & VT_TYPEDEF))
7606 tcc_error("incompatible redefinition of '%s'",
7607 get_tok_str(v, NULL));
7608 sym->type = type;
7609 } else {
7610 sym = sym_push(v, &type, 0, 0);
7612 sym->a = ad.a;
7613 sym->f = ad.f;
7614 } else if ((type.t & VT_BTYPE) == VT_VOID
7615 && !(type.t & VT_EXTERN)) {
7616 tcc_error("declaration of void object");
7617 } else {
7618 r = 0;
7619 if ((type.t & VT_BTYPE) == VT_FUNC) {
7620 /* external function definition */
7621 /* specific case for func_call attribute */
7622 type.ref->f = ad.f;
7623 } else if (!(type.t & VT_ARRAY)) {
7624 /* not lvalue if array */
7625 r |= lvalue_type(type.t);
7627 has_init = (tok == '=');
7628 if (has_init && (type.t & VT_VLA))
7629 tcc_error("variable length array cannot be initialized");
7630 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7631 ((type.t & VT_BTYPE) == VT_FUNC) ||
7632 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7633 !has_init && l == VT_CONST && type.ref->c < 0)) {
7634 /* external variable or function */
7635 /* NOTE: as GCC, uninitialized global static
7636 arrays of null size are considered as
7637 extern */
7638 type.t |= VT_EXTERN;
7639 sym = external_sym(v, &type, r, &ad);
7640 if (ad.alias_target) {
7641 ElfSym *esym;
7642 Sym *alias_target;
7643 alias_target = sym_find(ad.alias_target);
7644 esym = elfsym(alias_target);
7645 if (!esym)
7646 tcc_error("unsupported forward __alias__ attribute");
7647 /* Local statics have a scope until now (for
7648 warnings), remove it here. */
7649 sym->sym_scope = 0;
7650 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7652 } else {
7653 if (type.t & VT_STATIC)
7654 r |= VT_CONST;
7655 else
7656 r |= l;
7657 if (has_init)
7658 next();
7659 else if (l == VT_CONST)
7660 /* uninitialized global variables may be overridden */
7661 type.t |= VT_EXTERN;
7662 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7665 if (tok != ',') {
7666 if (is_for_loop_init)
7667 return 1;
7668 skip(';');
7669 break;
7671 next();
7675 return 0;
7678 static void decl(int l)
7680 decl0(l, 0, NULL);
7683 /* ------------------------------------------------------------------------- */