Detect more invalid initializers
[tinycc.git] / tccgen.c
blob1fb65e0d39b810ca2a6ba4c4bc4e92b6325fe7e5
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
97 static void block(int *bsym, int *csym, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
267 /* define some often used types */
268 int_type.t = VT_INT;
269 char_pointer_type.t = VT_BYTE;
270 mk_pointer(&char_pointer_type);
271 #if PTR_SIZE == 4
272 size_type.t = VT_INT | VT_UNSIGNED;
273 ptrdiff_type.t = VT_INT;
274 #elif LONG_SIZE == 4
275 size_type.t = VT_LLONG | VT_UNSIGNED;
276 ptrdiff_type.t = VT_LLONG;
277 #else
278 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
279 ptrdiff_type.t = VT_LONG | VT_LLONG;
280 #endif
281 func_old_type.t = VT_FUNC;
282 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
283 func_old_type.ref->f.func_call = FUNC_CDECL;
284 func_old_type.ref->f.func_type = FUNC_OLD;
286 tcc_debug_start(s1);
288 #ifdef TCC_TARGET_ARM
289 arm_init(s1);
290 #endif
292 #ifdef INC_DEBUG
293 printf("%s: **** new file\n", file->filename);
294 #endif
296 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
297 next();
298 decl(VT_CONST);
299 gen_inline_functions(s1);
300 check_vstack();
301 /* end of translation unit info */
302 tcc_debug_end(s1);
303 return 0;
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym *elfsym(Sym *s)
309 if (!s || !s->c)
310 return NULL;
311 return &((ElfSym *)symtab_section->data)[s->c];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC void update_storage(Sym *sym)
317 ElfSym *esym;
318 int sym_bind, old_sym_bind;
320 esym = elfsym(sym);
321 if (!esym)
322 return;
324 if (sym->a.visibility)
325 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
326 | sym->a.visibility;
328 if (sym->type.t & VT_STATIC)
329 sym_bind = STB_LOCAL;
330 else if (sym->a.weak)
331 sym_bind = STB_WEAK;
332 else
333 sym_bind = STB_GLOBAL;
334 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
335 if (sym_bind != old_sym_bind) {
336 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
339 #ifdef TCC_TARGET_PE
340 if (sym->a.dllimport)
341 esym->st_other |= ST_PE_IMPORT;
342 if (sym->a.dllexport)
343 esym->st_other |= ST_PE_EXPORT;
344 #endif
346 #if 0
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym->v, NULL),
349 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
350 sym->a.visibility,
351 sym->a.dllexport,
352 sym->a.dllimport
354 #endif
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
362 addr_t value, unsigned long size,
363 int can_add_underscore)
365 int sym_type, sym_bind, info, other, t;
366 ElfSym *esym;
367 const char *name;
368 char buf1[256];
369 #ifdef CONFIG_TCC_BCHECK
370 char buf[32];
371 #endif
373 if (!sym->c) {
374 name = get_tok_str(sym->v, NULL);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state->do_bounds_check) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
380 switch(sym->v) {
381 #ifdef TCC_TARGET_PE
382 /* XXX: we rely only on malloc hooks */
383 case TOK_malloc:
384 case TOK_free:
385 case TOK_realloc:
386 case TOK_memalign:
387 case TOK_calloc:
388 #endif
389 case TOK_memcpy:
390 case TOK_memmove:
391 case TOK_memset:
392 case TOK_strlen:
393 case TOK_strcpy:
394 case TOK_alloca:
395 strcpy(buf, "__bound_");
396 strcat(buf, name);
397 name = buf;
398 break;
401 #endif
402 t = sym->type.t;
403 if ((t & VT_BTYPE) == VT_FUNC) {
404 sym_type = STT_FUNC;
405 } else if ((t & VT_BTYPE) == VT_VOID) {
406 sym_type = STT_NOTYPE;
407 } else {
408 sym_type = STT_OBJECT;
410 if (t & VT_STATIC)
411 sym_bind = STB_LOCAL;
412 else
413 sym_bind = STB_GLOBAL;
414 other = 0;
415 #ifdef TCC_TARGET_PE
416 if (sym_type == STT_FUNC && sym->type.ref) {
417 Sym *ref = sym->type.ref;
418 if (ref->a.nodecorate) {
419 can_add_underscore = 0;
421 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
422 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
423 name = buf1;
424 other |= ST_PE_STDCALL;
425 can_add_underscore = 0;
428 #endif
429 if (tcc_state->leading_underscore && can_add_underscore) {
430 buf1[0] = '_';
431 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
432 name = buf1;
434 if (sym->asm_label)
435 name = get_tok_str(sym->asm_label, NULL);
436 info = ELFW(ST_INFO)(sym_bind, sym_type);
437 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
438 } else {
439 esym = elfsym(sym);
440 esym->st_value = value;
441 esym->st_size = size;
442 esym->st_shndx = sh_num;
444 update_storage(sym);
447 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
448 addr_t value, unsigned long size)
450 int sh_num = section ? section->sh_num : SHN_UNDEF;
451 put_extern_sym2(sym, sh_num, value, size, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
456 addr_t addend)
458 int c = 0;
460 if (nocode_wanted && s == cur_text_section)
461 return;
463 if (sym) {
464 if (0 == sym->c)
465 put_extern_sym(sym, NULL, 0, 0);
466 c = sym->c;
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section, s, offset, type, c, addend);
473 #if PTR_SIZE == 4
474 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
476 greloca(s, sym, offset, type, 0);
478 #endif
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym *__sym_malloc(void)
484 Sym *sym_pool, *sym, *last_sym;
485 int i;
487 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
488 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
490 last_sym = sym_free_first;
491 sym = sym_pool;
492 for(i = 0; i < SYM_POOL_NB; i++) {
493 sym->next = last_sym;
494 last_sym = sym;
495 sym++;
497 sym_free_first = last_sym;
498 return last_sym;
501 static inline Sym *sym_malloc(void)
503 Sym *sym;
504 #ifndef SYM_DEBUG
505 sym = sym_free_first;
506 if (!sym)
507 sym = __sym_malloc();
508 sym_free_first = sym->next;
509 return sym;
510 #else
511 sym = tcc_malloc(sizeof(Sym));
512 return sym;
513 #endif
516 ST_INLN void sym_free(Sym *sym)
518 #ifndef SYM_DEBUG
519 sym->next = sym_free_first;
520 sym_free_first = sym;
521 #else
522 tcc_free(sym);
523 #endif
526 /* push, without hashing */
527 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
529 Sym *s;
531 s = sym_malloc();
532 memset(s, 0, sizeof *s);
533 s->v = v;
534 s->type.t = t;
535 s->c = c;
536 /* add in stack */
537 s->prev = *ps;
538 *ps = s;
539 return s;
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym *sym_find2(Sym *s, int v)
546 while (s) {
547 if (s->v == v)
548 return s;
549 else if (s->v == -1)
550 return NULL;
551 s = s->prev;
553 return NULL;
556 /* structure lookup */
557 ST_INLN Sym *struct_find(int v)
559 v -= TOK_IDENT;
560 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
561 return NULL;
562 return table_ident[v]->sym_struct;
565 /* find an identifier */
566 ST_INLN Sym *sym_find(int v)
568 v -= TOK_IDENT;
569 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
570 return NULL;
571 return table_ident[v]->sym_identifier;
574 static int sym_scope(Sym *s)
576 if (IS_ENUM_VAL (s->type.t))
577 return s->type.ref->sym_scope;
578 else
579 return s->sym_scope;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
585 Sym *s, **ps;
586 TokenSym *ts;
588 if (local_stack)
589 ps = &local_stack;
590 else
591 ps = &global_stack;
592 s = sym_push2(ps, v, type->t, c);
593 s->type.ref = type->ref;
594 s->r = r;
595 /* don't record fields or anonymous symbols */
596 /* XXX: simplify */
597 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
598 /* record symbol in token array */
599 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
600 if (v & SYM_STRUCT)
601 ps = &ts->sym_struct;
602 else
603 ps = &ts->sym_identifier;
604 s->prev_tok = *ps;
605 *ps = s;
606 s->sym_scope = local_scope;
607 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v & ~SYM_STRUCT, NULL));
611 return s;
614 /* push a global identifier */
615 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
617 Sym *s, **ps;
618 s = sym_push2(&global_stack, v, t, c);
619 /* don't record anonymous symbol */
620 if (v < SYM_FIRST_ANOM) {
621 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
622 /* modify the top most local identifier, so that
623 sym_identifier will point to 's' when popped */
624 while (*ps != NULL && (*ps)->sym_scope)
625 ps = &(*ps)->prev_tok;
626 s->prev_tok = *ps;
627 *ps = s;
629 return s;
632 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
633 pop them yet from the list, but do remove them from the token array. */
634 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
636 Sym *s, *ss, **ps;
637 TokenSym *ts;
638 int v;
640 s = *ptop;
641 while(s != b) {
642 ss = s->prev;
643 v = s->v;
644 /* remove symbol in token array */
645 /* XXX: simplify */
646 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
647 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
648 if (v & SYM_STRUCT)
649 ps = &ts->sym_struct;
650 else
651 ps = &ts->sym_identifier;
652 *ps = s->prev_tok;
654 if (!keep)
655 sym_free(s);
656 s = ss;
658 if (!keep)
659 *ptop = b;
662 /* ------------------------------------------------------------------------- */
664 static void vsetc(CType *type, int r, CValue *vc)
666 int v;
668 if (vtop >= vstack + (VSTACK_SIZE - 1))
669 tcc_error("memory full (vstack)");
670 /* cannot let cpu flags if other instruction are generated. Also
671 avoid leaving VT_JMP anywhere except on the top of the stack
672 because it would complicate the code generator.
674 Don't do this when nocode_wanted. vtop might come from
675 !nocode_wanted regions (see 88_codeopt.c) and transforming
676 it to a register without actually generating code is wrong
677 as their value might still be used for real. All values
678 we push under nocode_wanted will eventually be popped
679 again, so that the VT_CMP/VT_JMP value will be in vtop
680 when code is unsuppressed again.
682 Same logic below in vswap(); */
683 if (vtop >= vstack && !nocode_wanted) {
684 v = vtop->r & VT_VALMASK;
685 if (v == VT_CMP || (v & ~1) == VT_JMP)
686 gv(RC_INT);
689 vtop++;
690 vtop->type = *type;
691 vtop->r = r;
692 vtop->r2 = VT_CONST;
693 vtop->c = *vc;
694 vtop->sym = NULL;
697 ST_FUNC void vswap(void)
699 SValue tmp;
700 /* cannot vswap cpu flags. See comment at vsetc() above */
701 if (vtop >= vstack && !nocode_wanted) {
702 int v = vtop->r & VT_VALMASK;
703 if (v == VT_CMP || (v & ~1) == VT_JMP)
704 gv(RC_INT);
706 tmp = vtop[0];
707 vtop[0] = vtop[-1];
708 vtop[-1] = tmp;
711 /* pop stack value */
712 ST_FUNC void vpop(void)
714 int v;
715 v = vtop->r & VT_VALMASK;
716 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
717 /* for x86, we need to pop the FP stack */
718 if (v == TREG_ST0) {
719 o(0xd8dd); /* fstp %st(0) */
720 } else
721 #endif
722 if (v == VT_JMP || v == VT_JMPI) {
723 /* need to put correct jump if && or || without test */
724 gsym(vtop->c.i);
726 vtop--;
729 /* push constant of type "type" with useless value */
730 ST_FUNC void vpush(CType *type)
732 vset(type, VT_CONST, 0);
735 /* push integer constant */
736 ST_FUNC void vpushi(int v)
738 CValue cval;
739 cval.i = v;
740 vsetc(&int_type, VT_CONST, &cval);
743 /* push a pointer sized constant */
744 static void vpushs(addr_t v)
746 CValue cval;
747 cval.i = v;
748 vsetc(&size_type, VT_CONST, &cval);
751 /* push arbitrary 64bit constant */
752 ST_FUNC void vpush64(int ty, unsigned long long v)
754 CValue cval;
755 CType ctype;
756 ctype.t = ty;
757 ctype.ref = NULL;
758 cval.i = v;
759 vsetc(&ctype, VT_CONST, &cval);
762 /* push long long constant */
763 static inline void vpushll(long long v)
765 vpush64(VT_LLONG, v);
768 ST_FUNC void vset(CType *type, int r, int v)
770 CValue cval;
772 cval.i = v;
773 vsetc(type, r, &cval);
776 static void vseti(int r, int v)
778 CType type;
779 type.t = VT_INT;
780 type.ref = NULL;
781 vset(&type, r, v);
784 ST_FUNC void vpushv(SValue *v)
786 if (vtop >= vstack + (VSTACK_SIZE - 1))
787 tcc_error("memory full (vstack)");
788 vtop++;
789 *vtop = *v;
792 static void vdup(void)
794 vpushv(vtop);
797 /* rotate n first stack elements to the bottom
798 I1 ... In -> I2 ... In I1 [top is right]
800 ST_FUNC void vrotb(int n)
802 int i;
803 SValue tmp;
805 tmp = vtop[-n + 1];
806 for(i=-n+1;i!=0;i++)
807 vtop[i] = vtop[i+1];
808 vtop[0] = tmp;
811 /* rotate the n elements before entry e towards the top
812 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
814 ST_FUNC void vrote(SValue *e, int n)
816 int i;
817 SValue tmp;
819 tmp = *e;
820 for(i = 0;i < n - 1; i++)
821 e[-i] = e[-i - 1];
822 e[-n + 1] = tmp;
825 /* rotate n first stack elements to the top
826 I1 ... In -> In I1 ... I(n-1) [top is right]
828 ST_FUNC void vrott(int n)
830 vrote(vtop, n);
833 /* push a symbol value of TYPE */
834 static inline void vpushsym(CType *type, Sym *sym)
836 CValue cval;
837 cval.i = 0;
838 vsetc(type, VT_CONST | VT_SYM, &cval);
839 vtop->sym = sym;
842 /* Return a static symbol pointing to a section */
843 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
845 int v;
846 Sym *sym;
848 v = anon_sym++;
849 sym = global_identifier_push(v, type->t | VT_STATIC, 0);
850 sym->type.ref = type->ref;
851 sym->r = VT_CONST | VT_SYM;
852 put_extern_sym(sym, sec, offset, size);
853 return sym;
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
859 vpushsym(type, get_sym_ref(type, sec, offset, size));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym *external_global_sym(int v, CType *type, int r)
865 Sym *s;
867 s = sym_find(v);
868 if (!s) {
869 /* push forward reference */
870 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
871 s->type.ref = type->ref;
872 s->r = r | VT_CONST | VT_SYM;
873 } else if (IS_ASM_SYM(s)) {
874 s->type.t = type->t | (s->type.t & VT_EXTERN);
875 s->type.ref = type->ref;
876 update_storage(s);
878 return s;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
884 if (sa1->aligned && !sa->aligned)
885 sa->aligned = sa1->aligned;
886 sa->packed |= sa1->packed;
887 sa->weak |= sa1->weak;
888 if (sa1->visibility != STV_DEFAULT) {
889 int vis = sa->visibility;
890 if (vis == STV_DEFAULT
891 || vis > sa1->visibility)
892 vis = sa1->visibility;
893 sa->visibility = vis;
895 sa->dllexport |= sa1->dllexport;
896 sa->nodecorate |= sa1->nodecorate;
897 sa->dllimport |= sa1->dllimport;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
903 if (fa1->func_call && !fa->func_call)
904 fa->func_call = fa1->func_call;
905 if (fa1->func_type && !fa->func_type)
906 fa->func_type = fa1->func_type;
907 if (fa1->func_args && !fa->func_args)
908 fa->func_args = fa1->func_args;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
914 merge_symattr(&ad->a, &ad1->a);
915 merge_funcattr(&ad->f, &ad1->f);
917 if (ad1->section)
918 ad->section = ad1->section;
919 if (ad1->alias_target)
920 ad->alias_target = ad1->alias_target;
921 if (ad1->asm_label)
922 ad->asm_label = ad1->asm_label;
923 if (ad1->attr_mode)
924 ad->attr_mode = ad1->attr_mode;
927 /* Merge some type attributes. */
928 static void patch_type(Sym *sym, CType *type)
930 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
931 if (!(sym->type.t & VT_EXTERN))
932 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
933 sym->type.t &= ~VT_EXTERN;
936 if (IS_ASM_SYM(sym)) {
937 /* stay static if both are static */
938 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
939 sym->type.ref = type->ref;
942 if (!is_compatible_types(&sym->type, type)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym->v, NULL));
946 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
947 int static_proto = sym->type.t & VT_STATIC;
948 /* warn if static follows non-static function declaration */
949 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
950 tcc_warning("static storage ignored for redefinition of '%s'",
951 get_tok_str(sym->v, NULL));
953 if (0 == (type->t & VT_EXTERN)) {
954 /* put complete type, use static from prototype */
955 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
956 if (type->t & VT_INLINE)
957 sym->type.t = type->t;
958 sym->type.ref = type->ref;
961 } else {
962 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
963 /* set array size if it was omitted in extern declaration */
964 if (sym->type.ref->c < 0)
965 sym->type.ref->c = type->ref->c;
966 else if (sym->type.ref->c != type->ref->c)
967 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
969 if ((type->t ^ sym->type.t) & VT_STATIC)
970 tcc_warning("storage mismatch for redefinition of '%s'",
971 get_tok_str(sym->v, NULL));
976 /* Merge some storage attributes. */
977 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
979 if (type)
980 patch_type(sym, type);
982 #ifdef TCC_TARGET_PE
983 if (sym->a.dllimport != ad->a.dllimport)
984 tcc_error("incompatible dll linkage for redefinition of '%s'",
985 get_tok_str(sym->v, NULL));
986 #endif
987 merge_symattr(&sym->a, &ad->a);
988 if (ad->asm_label)
989 sym->asm_label = ad->asm_label;
990 update_storage(sym);
993 /* define a new external reference to a symbol 'v' */
994 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
996 Sym *s;
997 s = sym_find(v);
998 if (!s) {
999 /* push forward reference */
1000 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
1001 s->type.t |= VT_EXTERN;
1002 s->a = ad->a;
1003 s->sym_scope = 0;
1004 } else {
1005 if (s->type.ref == func_old_type.ref) {
1006 s->type.ref = type->ref;
1007 s->r = r | VT_CONST | VT_SYM;
1008 s->type.t |= VT_EXTERN;
1010 patch_storage(s, ad, type);
1012 return s;
1015 /* push a reference to global symbol v */
1016 ST_FUNC void vpush_global_sym(CType *type, int v)
1018 vpushsym(type, external_global_sym(v, type, 0));
1021 /* save registers up to (vtop - n) stack entry */
1022 ST_FUNC void save_regs(int n)
1024 SValue *p, *p1;
1025 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1026 save_reg(p->r);
1029 /* save r to the memory stack, and mark it as being free */
1030 ST_FUNC void save_reg(int r)
1032 save_reg_upstack(r, 0);
1035 /* save r to the memory stack, and mark it as being free,
1036 if seen up to (vtop - n) stack entry */
1037 ST_FUNC void save_reg_upstack(int r, int n)
1039 int l, saved, size, align;
1040 SValue *p, *p1, sv;
1041 CType *type;
1043 if ((r &= VT_VALMASK) >= VT_CONST)
1044 return;
1045 if (nocode_wanted)
1046 return;
1048 /* modify all stack values */
1049 saved = 0;
1050 l = 0;
1051 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1052 if ((p->r & VT_VALMASK) == r ||
1053 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1054 /* must save value on stack if not already done */
1055 if (!saved) {
1056 /* NOTE: must reload 'r' because r might be equal to r2 */
1057 r = p->r & VT_VALMASK;
1058 /* store register in the stack */
1059 type = &p->type;
1060 if ((p->r & VT_LVAL) ||
1061 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1062 #if PTR_SIZE == 8
1063 type = &char_pointer_type;
1064 #else
1065 type = &int_type;
1066 #endif
1067 size = type_size(type, &align);
1068 l=get_temp_local_var(size,align);
1069 sv.type.t = type->t;
1070 sv.r = VT_LOCAL | VT_LVAL;
1071 sv.c.i = l;
1072 store(r, &sv);
1073 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1074 /* x86 specific: need to pop fp register ST0 if saved */
1075 if (r == TREG_ST0) {
1076 o(0xd8dd); /* fstp %st(0) */
1078 #endif
1079 #if PTR_SIZE == 4
1080 /* special long long case */
1081 if ((type->t & VT_BTYPE) == VT_LLONG) {
1082 sv.c.i += 4;
1083 store(p->r2, &sv);
1085 #endif
1086 saved = 1;
1088 /* mark that stack entry as being saved on the stack */
1089 if (p->r & VT_LVAL) {
1090 /* also clear the bounded flag because the
1091 relocation address of the function was stored in
1092 p->c.i */
1093 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1094 } else {
1095 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1097 p->r2 = VT_CONST;
1098 p->c.i = l;
1103 #ifdef TCC_TARGET_ARM
1104 /* find a register of class 'rc2' with at most one reference on stack.
1105 * If none, call get_reg(rc) */
1106 ST_FUNC int get_reg_ex(int rc, int rc2)
1108 int r;
1109 SValue *p;
1111 for(r=0;r<NB_REGS;r++) {
1112 if (reg_classes[r] & rc2) {
1113 int n;
1114 n=0;
1115 for(p = vstack; p <= vtop; p++) {
1116 if ((p->r & VT_VALMASK) == r ||
1117 (p->r2 & VT_VALMASK) == r)
1118 n++;
1120 if (n <= 1)
1121 return r;
1124 return get_reg(rc);
1126 #endif
1128 /* find a free register of class 'rc'. If none, save one register */
1129 ST_FUNC int get_reg(int rc)
1131 int r;
1132 SValue *p;
1134 /* find a free register */
1135 for(r=0;r<NB_REGS;r++) {
1136 if (reg_classes[r] & rc) {
1137 if (nocode_wanted)
1138 return r;
1139 for(p=vstack;p<=vtop;p++) {
1140 if ((p->r & VT_VALMASK) == r ||
1141 (p->r2 & VT_VALMASK) == r)
1142 goto notfound;
1144 return r;
1146 notfound: ;
1149 /* no register left : free the first one on the stack (VERY
1150 IMPORTANT to start from the bottom to ensure that we don't
1151 spill registers used in gen_opi()) */
1152 for(p=vstack;p<=vtop;p++) {
1153 /* look at second register (if long long) */
1154 r = p->r2 & VT_VALMASK;
1155 if (r < VT_CONST && (reg_classes[r] & rc))
1156 goto save_found;
1157 r = p->r & VT_VALMASK;
1158 if (r < VT_CONST && (reg_classes[r] & rc)) {
1159 save_found:
1160 save_reg(r);
1161 return r;
1164 /* Should never comes here */
1165 return -1;
1168 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1169 static int get_temp_local_var(int size,int align){
1170 int i;
1171 struct temp_local_variable *temp_var;
1172 int found_var;
1173 SValue *p;
1174 int r;
1175 char free;
1176 char found;
1177 found=0;
1178 for(i=0;i<nb_temp_local_vars;i++){
1179 temp_var=&arr_temp_local_vars[i];
1180 if(temp_var->size<size||align!=temp_var->align){
1181 continue;
1183 /*check if temp_var is free*/
1184 free=1;
1185 for(p=vstack;p<=vtop;p++) {
1186 r=p->r&VT_VALMASK;
1187 if(r==VT_LOCAL||r==VT_LLOCAL){
1188 if(p->c.i==temp_var->location){
1189 free=0;
1190 break;
1194 if(free){
1195 found_var=temp_var->location;
1196 found=1;
1197 break;
1200 if(!found){
1201 loc = (loc - size) & -align;
1202 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1203 temp_var=&arr_temp_local_vars[i];
1204 temp_var->location=loc;
1205 temp_var->size=size;
1206 temp_var->align=align;
1207 nb_temp_local_vars++;
1209 found_var=loc;
1211 return found_var;
1214 static void clear_temp_local_var_list(){
1215 nb_temp_local_vars=0;
1218 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1219 if needed */
1220 static void move_reg(int r, int s, int t)
1222 SValue sv;
1224 if (r != s) {
1225 save_reg(r);
1226 sv.type.t = t;
1227 sv.type.ref = NULL;
1228 sv.r = s;
1229 sv.c.i = 0;
1230 load(r, &sv);
1234 /* get address of vtop (vtop MUST BE an lvalue) */
1235 ST_FUNC void gaddrof(void)
1237 vtop->r &= ~VT_LVAL;
1238 /* tricky: if saved lvalue, then we can go back to lvalue */
1239 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1240 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1245 #ifdef CONFIG_TCC_BCHECK
1246 /* generate lvalue bound code */
1247 static void gbound(void)
1249 int lval_type;
1250 CType type1;
1252 vtop->r &= ~VT_MUSTBOUND;
1253 /* if lvalue, then use checking code before dereferencing */
1254 if (vtop->r & VT_LVAL) {
1255 /* if not VT_BOUNDED value, then make one */
1256 if (!(vtop->r & VT_BOUNDED)) {
1257 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1258 /* must save type because we must set it to int to get pointer */
1259 type1 = vtop->type;
1260 vtop->type.t = VT_PTR;
1261 gaddrof();
1262 vpushi(0);
1263 gen_bounded_ptr_add();
1264 vtop->r |= lval_type;
1265 vtop->type = type1;
1267 /* then check for dereferencing */
1268 gen_bounded_ptr_deref();
1271 #endif
1273 static void incr_bf_adr(int o)
1275 vtop->type = char_pointer_type;
1276 gaddrof();
1277 vpushi(o);
1278 gen_op('+');
1279 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1280 | (VT_BYTE|VT_UNSIGNED);
1281 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1282 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1285 /* single-byte load mode for packed or otherwise unaligned bitfields */
1286 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1288 int n, o, bits;
1289 save_reg_upstack(vtop->r, 1);
1290 vpush64(type->t & VT_BTYPE, 0); // B X
1291 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1292 do {
1293 vswap(); // X B
1294 incr_bf_adr(o);
1295 vdup(); // X B B
1296 n = 8 - bit_pos;
1297 if (n > bit_size)
1298 n = bit_size;
1299 if (bit_pos)
1300 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1301 if (n < 8)
1302 vpushi((1 << n) - 1), gen_op('&');
1303 gen_cast(type);
1304 if (bits)
1305 vpushi(bits), gen_op(TOK_SHL);
1306 vrotb(3); // B Y X
1307 gen_op('|'); // B X
1308 bits += n, bit_size -= n, o = 1;
1309 } while (bit_size);
1310 vswap(), vpop();
1311 if (!(type->t & VT_UNSIGNED)) {
1312 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1313 vpushi(n), gen_op(TOK_SHL);
1314 vpushi(n), gen_op(TOK_SAR);
1318 /* single-byte store mode for packed or otherwise unaligned bitfields */
1319 static void store_packed_bf(int bit_pos, int bit_size)
1321 int bits, n, o, m, c;
1323 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1324 vswap(); // X B
1325 save_reg_upstack(vtop->r, 1);
1326 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1327 do {
1328 incr_bf_adr(o); // X B
1329 vswap(); //B X
1330 c ? vdup() : gv_dup(); // B V X
1331 vrott(3); // X B V
1332 if (bits)
1333 vpushi(bits), gen_op(TOK_SHR);
1334 if (bit_pos)
1335 vpushi(bit_pos), gen_op(TOK_SHL);
1336 n = 8 - bit_pos;
1337 if (n > bit_size)
1338 n = bit_size;
1339 if (n < 8) {
1340 m = ((1 << n) - 1) << bit_pos;
1341 vpushi(m), gen_op('&'); // X B V1
1342 vpushv(vtop-1); // X B V1 B
1343 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1344 gen_op('&'); // X B V1 B1
1345 gen_op('|'); // X B V2
1347 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1348 vstore(), vpop(); // X B
1349 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1350 } while (bit_size);
1351 vpop(), vpop();
1354 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1356 int t;
1357 if (0 == sv->type.ref)
1358 return 0;
1359 t = sv->type.ref->auxtype;
1360 if (t != -1 && t != VT_STRUCT) {
1361 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1362 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1364 return t;
1367 /* store vtop a register belonging to class 'rc'. lvalues are
1368 converted to values. Cannot be used if cannot be converted to
1369 register value (such as structures). */
1370 ST_FUNC int gv(int rc)
1372 int r, bit_pos, bit_size, size, align, rc2;
1374 /* NOTE: get_reg can modify vstack[] */
1375 if (vtop->type.t & VT_BITFIELD) {
1376 CType type;
1378 bit_pos = BIT_POS(vtop->type.t);
1379 bit_size = BIT_SIZE(vtop->type.t);
1380 /* remove bit field info to avoid loops */
1381 vtop->type.t &= ~VT_STRUCT_MASK;
1383 type.ref = NULL;
1384 type.t = vtop->type.t & VT_UNSIGNED;
1385 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1386 type.t |= VT_UNSIGNED;
1388 r = adjust_bf(vtop, bit_pos, bit_size);
1390 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1391 type.t |= VT_LLONG;
1392 else
1393 type.t |= VT_INT;
1395 if (r == VT_STRUCT) {
1396 load_packed_bf(&type, bit_pos, bit_size);
1397 } else {
1398 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1399 /* cast to int to propagate signedness in following ops */
1400 gen_cast(&type);
1401 /* generate shifts */
1402 vpushi(bits - (bit_pos + bit_size));
1403 gen_op(TOK_SHL);
1404 vpushi(bits - bit_size);
1405 /* NOTE: transformed to SHR if unsigned */
1406 gen_op(TOK_SAR);
1408 r = gv(rc);
1409 } else {
1410 if (is_float(vtop->type.t) &&
1411 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1412 unsigned long offset;
1413 /* CPUs usually cannot use float constants, so we store them
1414 generically in data segment */
1415 size = type_size(&vtop->type, &align);
1416 if (NODATA_WANTED)
1417 size = 0, align = 1;
1418 offset = section_add(data_section, size, align);
1419 vpush_ref(&vtop->type, data_section, offset, size);
1420 vswap();
1421 init_putv(&vtop->type, data_section, offset);
1422 vtop->r |= VT_LVAL;
1424 #ifdef CONFIG_TCC_BCHECK
1425 if (vtop->r & VT_MUSTBOUND)
1426 gbound();
1427 #endif
1429 r = vtop->r & VT_VALMASK;
1430 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1431 #ifndef TCC_TARGET_ARM64
1432 if (rc == RC_IRET)
1433 rc2 = RC_LRET;
1434 #ifdef TCC_TARGET_X86_64
1435 else if (rc == RC_FRET)
1436 rc2 = RC_QRET;
1437 #endif
1438 #endif
1439 /* need to reload if:
1440 - constant
1441 - lvalue (need to dereference pointer)
1442 - already a register, but not in the right class */
1443 if (r >= VT_CONST
1444 || (vtop->r & VT_LVAL)
1445 || !(reg_classes[r] & rc)
1446 #if PTR_SIZE == 8
1447 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1448 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1449 #else
1450 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1451 #endif
1454 r = get_reg(rc);
1455 #if PTR_SIZE == 8
1456 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1457 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1458 #else
1459 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1460 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1461 unsigned long long ll;
1462 #endif
1463 int r2, original_type;
1464 original_type = vtop->type.t;
1465 /* two register type load : expand to two words
1466 temporarily */
1467 #if PTR_SIZE == 4
1468 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1469 /* load constant */
1470 ll = vtop->c.i;
1471 vtop->c.i = ll; /* first word */
1472 load(r, vtop);
1473 vtop->r = r; /* save register value */
1474 vpushi(ll >> 32); /* second word */
1475 } else
1476 #endif
1477 if (vtop->r & VT_LVAL) {
1478 /* We do not want to modifier the long long
1479 pointer here, so the safest (and less
1480 efficient) is to save all the other registers
1481 in the stack. XXX: totally inefficient. */
1482 #if 0
1483 save_regs(1);
1484 #else
1485 /* lvalue_save: save only if used further down the stack */
1486 save_reg_upstack(vtop->r, 1);
1487 #endif
1488 /* load from memory */
1489 vtop->type.t = load_type;
1490 load(r, vtop);
1491 vdup();
1492 vtop[-1].r = r; /* save register value */
1493 /* increment pointer to get second word */
1494 vtop->type.t = addr_type;
1495 gaddrof();
1496 vpushi(load_size);
1497 gen_op('+');
1498 vtop->r |= VT_LVAL;
1499 vtop->type.t = load_type;
1500 } else {
1501 /* move registers */
1502 load(r, vtop);
1503 vdup();
1504 vtop[-1].r = r; /* save register value */
1505 vtop->r = vtop[-1].r2;
1507 /* Allocate second register. Here we rely on the fact that
1508 get_reg() tries first to free r2 of an SValue. */
1509 r2 = get_reg(rc2);
1510 load(r2, vtop);
1511 vpop();
1512 /* write second register */
1513 vtop->r2 = r2;
1514 vtop->type.t = original_type;
1515 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1516 int t1, t;
1517 /* lvalue of scalar type : need to use lvalue type
1518 because of possible cast */
1519 t = vtop->type.t;
1520 t1 = t;
1521 /* compute memory access type */
1522 if (vtop->r & VT_LVAL_BYTE)
1523 t = VT_BYTE;
1524 else if (vtop->r & VT_LVAL_SHORT)
1525 t = VT_SHORT;
1526 if (vtop->r & VT_LVAL_UNSIGNED)
1527 t |= VT_UNSIGNED;
1528 vtop->type.t = t;
1529 load(r, vtop);
1530 /* restore wanted type */
1531 vtop->type.t = t1;
1532 } else {
1533 /* one register type load */
1534 load(r, vtop);
1537 vtop->r = r;
1538 #ifdef TCC_TARGET_C67
1539 /* uses register pairs for doubles */
1540 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1541 vtop->r2 = r+1;
1542 #endif
1544 return r;
1547 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1548 ST_FUNC void gv2(int rc1, int rc2)
1550 int v;
1552 /* generate more generic register first. But VT_JMP or VT_CMP
1553 values must be generated first in all cases to avoid possible
1554 reload errors */
1555 v = vtop[0].r & VT_VALMASK;
1556 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1557 vswap();
1558 gv(rc1);
1559 vswap();
1560 gv(rc2);
1561 /* test if reload is needed for first register */
1562 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1563 vswap();
1564 gv(rc1);
1565 vswap();
1567 } else {
1568 gv(rc2);
1569 vswap();
1570 gv(rc1);
1571 vswap();
1572 /* test if reload is needed for first register */
1573 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1574 gv(rc2);
1579 #ifndef TCC_TARGET_ARM64
1580 /* wrapper around RC_FRET to return a register by type */
1581 static int rc_fret(int t)
1583 #ifdef TCC_TARGET_X86_64
1584 if (t == VT_LDOUBLE) {
1585 return RC_ST0;
1587 #endif
1588 return RC_FRET;
1590 #endif
1592 /* wrapper around REG_FRET to return a register by type */
1593 static int reg_fret(int t)
1595 #ifdef TCC_TARGET_X86_64
1596 if (t == VT_LDOUBLE) {
1597 return TREG_ST0;
1599 #endif
1600 return REG_FRET;
1603 #if PTR_SIZE == 4
1604 /* expand 64bit on stack in two ints */
1605 ST_FUNC void lexpand(void)
1607 int u, v;
1608 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1609 v = vtop->r & (VT_VALMASK | VT_LVAL);
1610 if (v == VT_CONST) {
1611 vdup();
1612 vtop[0].c.i >>= 32;
1613 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1614 vdup();
1615 vtop[0].c.i += 4;
1616 } else {
1617 gv(RC_INT);
1618 vdup();
1619 vtop[0].r = vtop[-1].r2;
1620 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1622 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1624 #endif
1626 #if PTR_SIZE == 4
1627 /* build a long long from two ints */
1628 static void lbuild(int t)
1630 gv2(RC_INT, RC_INT);
1631 vtop[-1].r2 = vtop[0].r;
1632 vtop[-1].type.t = t;
1633 vpop();
1635 #endif
1637 /* convert stack entry to register and duplicate its value in another
1638 register */
1639 static void gv_dup(void)
1641 int rc, t, r, r1;
1642 SValue sv;
1644 t = vtop->type.t;
1645 #if PTR_SIZE == 4
1646 if ((t & VT_BTYPE) == VT_LLONG) {
1647 if (t & VT_BITFIELD) {
1648 gv(RC_INT);
1649 t = vtop->type.t;
1651 lexpand();
1652 gv_dup();
1653 vswap();
1654 vrotb(3);
1655 gv_dup();
1656 vrotb(4);
1657 /* stack: H L L1 H1 */
1658 lbuild(t);
1659 vrotb(3);
1660 vrotb(3);
1661 vswap();
1662 lbuild(t);
1663 vswap();
1664 } else
1665 #endif
1667 /* duplicate value */
1668 rc = RC_INT;
1669 sv.type.t = VT_INT;
1670 if (is_float(t)) {
1671 rc = RC_FLOAT;
1672 #ifdef TCC_TARGET_X86_64
1673 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1674 rc = RC_ST0;
1676 #endif
1677 sv.type.t = t;
1679 r = gv(rc);
1680 r1 = get_reg(rc);
1681 sv.r = r;
1682 sv.c.i = 0;
1683 load(r1, &sv); /* move r to r1 */
1684 vdup();
1685 /* duplicates value */
1686 if (r != r1)
1687 vtop->r = r1;
1691 /* Generate value test
1693 * Generate a test for any value (jump, comparison and integers) */
1694 ST_FUNC int gvtst(int inv, int t)
1696 int v = vtop->r & VT_VALMASK;
1697 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1698 vpushi(0);
1699 gen_op(TOK_NE);
1701 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1702 /* constant jmp optimization */
1703 if ((vtop->c.i != 0) != inv)
1704 t = gjmp(t);
1705 vtop--;
1706 return t;
1708 return gtst(inv, t);
1711 #if PTR_SIZE == 4
1712 /* generate CPU independent (unsigned) long long operations */
1713 static void gen_opl(int op)
1715 int t, a, b, op1, c, i;
1716 int func;
1717 unsigned short reg_iret = REG_IRET;
1718 unsigned short reg_lret = REG_LRET;
1719 SValue tmp;
1721 switch(op) {
1722 case '/':
1723 case TOK_PDIV:
1724 func = TOK___divdi3;
1725 goto gen_func;
1726 case TOK_UDIV:
1727 func = TOK___udivdi3;
1728 goto gen_func;
1729 case '%':
1730 func = TOK___moddi3;
1731 goto gen_mod_func;
1732 case TOK_UMOD:
1733 func = TOK___umoddi3;
1734 gen_mod_func:
1735 #ifdef TCC_ARM_EABI
1736 reg_iret = TREG_R2;
1737 reg_lret = TREG_R3;
1738 #endif
1739 gen_func:
1740 /* call generic long long function */
1741 vpush_global_sym(&func_old_type, func);
1742 vrott(3);
1743 gfunc_call(2);
1744 vpushi(0);
1745 vtop->r = reg_iret;
1746 vtop->r2 = reg_lret;
1747 break;
1748 case '^':
1749 case '&':
1750 case '|':
1751 case '*':
1752 case '+':
1753 case '-':
1754 //pv("gen_opl A",0,2);
1755 t = vtop->type.t;
1756 vswap();
1757 lexpand();
1758 vrotb(3);
1759 lexpand();
1760 /* stack: L1 H1 L2 H2 */
1761 tmp = vtop[0];
1762 vtop[0] = vtop[-3];
1763 vtop[-3] = tmp;
1764 tmp = vtop[-2];
1765 vtop[-2] = vtop[-3];
1766 vtop[-3] = tmp;
1767 vswap();
1768 /* stack: H1 H2 L1 L2 */
1769 //pv("gen_opl B",0,4);
1770 if (op == '*') {
1771 vpushv(vtop - 1);
1772 vpushv(vtop - 1);
1773 gen_op(TOK_UMULL);
1774 lexpand();
1775 /* stack: H1 H2 L1 L2 ML MH */
1776 for(i=0;i<4;i++)
1777 vrotb(6);
1778 /* stack: ML MH H1 H2 L1 L2 */
1779 tmp = vtop[0];
1780 vtop[0] = vtop[-2];
1781 vtop[-2] = tmp;
1782 /* stack: ML MH H1 L2 H2 L1 */
1783 gen_op('*');
1784 vrotb(3);
1785 vrotb(3);
1786 gen_op('*');
1787 /* stack: ML MH M1 M2 */
1788 gen_op('+');
1789 gen_op('+');
1790 } else if (op == '+' || op == '-') {
1791 /* XXX: add non carry method too (for MIPS or alpha) */
1792 if (op == '+')
1793 op1 = TOK_ADDC1;
1794 else
1795 op1 = TOK_SUBC1;
1796 gen_op(op1);
1797 /* stack: H1 H2 (L1 op L2) */
1798 vrotb(3);
1799 vrotb(3);
1800 gen_op(op1 + 1); /* TOK_xxxC2 */
1801 } else {
1802 gen_op(op);
1803 /* stack: H1 H2 (L1 op L2) */
1804 vrotb(3);
1805 vrotb(3);
1806 /* stack: (L1 op L2) H1 H2 */
1807 gen_op(op);
1808 /* stack: (L1 op L2) (H1 op H2) */
1810 /* stack: L H */
1811 lbuild(t);
1812 break;
1813 case TOK_SAR:
1814 case TOK_SHR:
1815 case TOK_SHL:
1816 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1817 t = vtop[-1].type.t;
1818 vswap();
1819 lexpand();
1820 vrotb(3);
1821 /* stack: L H shift */
1822 c = (int)vtop->c.i;
1823 /* constant: simpler */
1824 /* NOTE: all comments are for SHL. the other cases are
1825 done by swapping words */
1826 vpop();
1827 if (op != TOK_SHL)
1828 vswap();
1829 if (c >= 32) {
1830 /* stack: L H */
1831 vpop();
1832 if (c > 32) {
1833 vpushi(c - 32);
1834 gen_op(op);
1836 if (op != TOK_SAR) {
1837 vpushi(0);
1838 } else {
1839 gv_dup();
1840 vpushi(31);
1841 gen_op(TOK_SAR);
1843 vswap();
1844 } else {
1845 vswap();
1846 gv_dup();
1847 /* stack: H L L */
1848 vpushi(c);
1849 gen_op(op);
1850 vswap();
1851 vpushi(32 - c);
1852 if (op == TOK_SHL)
1853 gen_op(TOK_SHR);
1854 else
1855 gen_op(TOK_SHL);
1856 vrotb(3);
1857 /* stack: L L H */
1858 vpushi(c);
1859 if (op == TOK_SHL)
1860 gen_op(TOK_SHL);
1861 else
1862 gen_op(TOK_SHR);
1863 gen_op('|');
1865 if (op != TOK_SHL)
1866 vswap();
1867 lbuild(t);
1868 } else {
1869 /* XXX: should provide a faster fallback on x86 ? */
1870 switch(op) {
1871 case TOK_SAR:
1872 func = TOK___ashrdi3;
1873 goto gen_func;
1874 case TOK_SHR:
1875 func = TOK___lshrdi3;
1876 goto gen_func;
1877 case TOK_SHL:
1878 func = TOK___ashldi3;
1879 goto gen_func;
1882 break;
1883 default:
1884 /* compare operations */
1885 t = vtop->type.t;
1886 vswap();
1887 lexpand();
1888 vrotb(3);
1889 lexpand();
1890 /* stack: L1 H1 L2 H2 */
1891 tmp = vtop[-1];
1892 vtop[-1] = vtop[-2];
1893 vtop[-2] = tmp;
1894 /* stack: L1 L2 H1 H2 */
1895 /* compare high */
1896 op1 = op;
1897 /* when values are equal, we need to compare low words. since
1898 the jump is inverted, we invert the test too. */
1899 if (op1 == TOK_LT)
1900 op1 = TOK_LE;
1901 else if (op1 == TOK_GT)
1902 op1 = TOK_GE;
1903 else if (op1 == TOK_ULT)
1904 op1 = TOK_ULE;
1905 else if (op1 == TOK_UGT)
1906 op1 = TOK_UGE;
1907 a = 0;
1908 b = 0;
1909 gen_op(op1);
1910 if (op == TOK_NE) {
1911 b = gvtst(0, 0);
1912 } else {
1913 a = gvtst(1, 0);
1914 if (op != TOK_EQ) {
1915 /* generate non equal test */
1916 vpushi(TOK_NE);
1917 vtop->r = VT_CMP;
1918 b = gvtst(0, 0);
1921 /* compare low. Always unsigned */
1922 op1 = op;
1923 if (op1 == TOK_LT)
1924 op1 = TOK_ULT;
1925 else if (op1 == TOK_LE)
1926 op1 = TOK_ULE;
1927 else if (op1 == TOK_GT)
1928 op1 = TOK_UGT;
1929 else if (op1 == TOK_GE)
1930 op1 = TOK_UGE;
1931 gen_op(op1);
1932 a = gvtst(1, a);
1933 gsym(b);
1934 vseti(VT_JMPI, a);
1935 break;
1938 #endif
1940 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1942 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1943 return (a ^ b) >> 63 ? -x : x;
1946 static int gen_opic_lt(uint64_t a, uint64_t b)
1948 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1951 /* handle integer constant optimizations and various machine
1952 independent opt */
1953 static void gen_opic(int op)
1955 SValue *v1 = vtop - 1;
1956 SValue *v2 = vtop;
1957 int t1 = v1->type.t & VT_BTYPE;
1958 int t2 = v2->type.t & VT_BTYPE;
1959 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1960 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1961 uint64_t l1 = c1 ? v1->c.i : 0;
1962 uint64_t l2 = c2 ? v2->c.i : 0;
1963 int shm = (t1 == VT_LLONG) ? 63 : 31;
1965 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1966 l1 = ((uint32_t)l1 |
1967 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1968 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1969 l2 = ((uint32_t)l2 |
1970 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1972 if (c1 && c2) {
1973 switch(op) {
1974 case '+': l1 += l2; break;
1975 case '-': l1 -= l2; break;
1976 case '&': l1 &= l2; break;
1977 case '^': l1 ^= l2; break;
1978 case '|': l1 |= l2; break;
1979 case '*': l1 *= l2; break;
1981 case TOK_PDIV:
1982 case '/':
1983 case '%':
1984 case TOK_UDIV:
1985 case TOK_UMOD:
1986 /* if division by zero, generate explicit division */
1987 if (l2 == 0) {
1988 if (const_wanted)
1989 tcc_error("division by zero in constant");
1990 goto general_case;
1992 switch(op) {
1993 default: l1 = gen_opic_sdiv(l1, l2); break;
1994 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1995 case TOK_UDIV: l1 = l1 / l2; break;
1996 case TOK_UMOD: l1 = l1 % l2; break;
1998 break;
1999 case TOK_SHL: l1 <<= (l2 & shm); break;
2000 case TOK_SHR: l1 >>= (l2 & shm); break;
2001 case TOK_SAR:
2002 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2003 break;
2004 /* tests */
2005 case TOK_ULT: l1 = l1 < l2; break;
2006 case TOK_UGE: l1 = l1 >= l2; break;
2007 case TOK_EQ: l1 = l1 == l2; break;
2008 case TOK_NE: l1 = l1 != l2; break;
2009 case TOK_ULE: l1 = l1 <= l2; break;
2010 case TOK_UGT: l1 = l1 > l2; break;
2011 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2012 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2013 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2014 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2015 /* logical */
2016 case TOK_LAND: l1 = l1 && l2; break;
2017 case TOK_LOR: l1 = l1 || l2; break;
2018 default:
2019 goto general_case;
2021 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2022 l1 = ((uint32_t)l1 |
2023 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2024 v1->c.i = l1;
2025 vtop--;
2026 } else {
2027 /* if commutative ops, put c2 as constant */
2028 if (c1 && (op == '+' || op == '&' || op == '^' ||
2029 op == '|' || op == '*')) {
2030 vswap();
2031 c2 = c1; //c = c1, c1 = c2, c2 = c;
2032 l2 = l1; //l = l1, l1 = l2, l2 = l;
2034 if (!const_wanted &&
2035 c1 && ((l1 == 0 &&
2036 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2037 (l1 == -1 && op == TOK_SAR))) {
2038 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2039 vtop--;
2040 } else if (!const_wanted &&
2041 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2042 (op == '|' &&
2043 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2044 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2045 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2046 if (l2 == 1)
2047 vtop->c.i = 0;
2048 vswap();
2049 vtop--;
2050 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2051 op == TOK_PDIV) &&
2052 l2 == 1) ||
2053 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2054 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2055 l2 == 0) ||
2056 (op == '&' &&
2057 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2058 /* filter out NOP operations like x*1, x-0, x&-1... */
2059 vtop--;
2060 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2061 /* try to use shifts instead of muls or divs */
2062 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2063 int n = -1;
2064 while (l2) {
2065 l2 >>= 1;
2066 n++;
2068 vtop->c.i = n;
2069 if (op == '*')
2070 op = TOK_SHL;
2071 else if (op == TOK_PDIV)
2072 op = TOK_SAR;
2073 else
2074 op = TOK_SHR;
2076 goto general_case;
2077 } else if (c2 && (op == '+' || op == '-') &&
2078 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2079 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2080 /* symbol + constant case */
2081 if (op == '-')
2082 l2 = -l2;
2083 l2 += vtop[-1].c.i;
2084 /* The backends can't always deal with addends to symbols
2085 larger than +-1<<31. Don't construct such. */
2086 if ((int)l2 != l2)
2087 goto general_case;
2088 vtop--;
2089 vtop->c.i = l2;
2090 } else {
2091 general_case:
2092 /* call low level op generator */
2093 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2094 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2095 gen_opl(op);
2096 else
2097 gen_opi(op);
2102 /* generate a floating point operation with constant propagation */
2103 static void gen_opif(int op)
2105 int c1, c2;
2106 SValue *v1, *v2;
2107 #if defined _MSC_VER && defined _AMD64_
2108 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2109 volatile
2110 #endif
2111 long double f1, f2;
2113 v1 = vtop - 1;
2114 v2 = vtop;
2115 /* currently, we cannot do computations with forward symbols */
2116 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2117 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2118 if (c1 && c2) {
2119 if (v1->type.t == VT_FLOAT) {
2120 f1 = v1->c.f;
2121 f2 = v2->c.f;
2122 } else if (v1->type.t == VT_DOUBLE) {
2123 f1 = v1->c.d;
2124 f2 = v2->c.d;
2125 } else {
2126 f1 = v1->c.ld;
2127 f2 = v2->c.ld;
2130 /* NOTE: we only do constant propagation if finite number (not
2131 NaN or infinity) (ANSI spec) */
2132 if (!ieee_finite(f1) || !ieee_finite(f2))
2133 goto general_case;
2135 switch(op) {
2136 case '+': f1 += f2; break;
2137 case '-': f1 -= f2; break;
2138 case '*': f1 *= f2; break;
2139 case '/':
2140 if (f2 == 0.0) {
2141 /* If not in initializer we need to potentially generate
2142 FP exceptions at runtime, otherwise we want to fold. */
2143 if (!const_wanted)
2144 goto general_case;
2146 f1 /= f2;
2147 break;
2148 /* XXX: also handles tests ? */
2149 default:
2150 goto general_case;
2152 /* XXX: overflow test ? */
2153 if (v1->type.t == VT_FLOAT) {
2154 v1->c.f = f1;
2155 } else if (v1->type.t == VT_DOUBLE) {
2156 v1->c.d = f1;
2157 } else {
2158 v1->c.ld = f1;
2160 vtop--;
2161 } else {
2162 general_case:
2163 gen_opf(op);
2167 static int pointed_size(CType *type)
2169 int align;
2170 return type_size(pointed_type(type), &align);
2173 static void vla_runtime_pointed_size(CType *type)
2175 int align;
2176 vla_runtime_type_size(pointed_type(type), &align);
2179 static inline int is_null_pointer(SValue *p)
2181 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2182 return 0;
2183 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2184 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2185 ((p->type.t & VT_BTYPE) == VT_PTR &&
2186 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2187 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2188 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2191 static inline int is_integer_btype(int bt)
2193 return (bt == VT_BYTE || bt == VT_SHORT ||
2194 bt == VT_INT || bt == VT_LLONG);
2197 /* check types for comparison or subtraction of pointers */
2198 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2200 CType *type1, *type2, tmp_type1, tmp_type2;
2201 int bt1, bt2;
2203 /* null pointers are accepted for all comparisons as gcc */
2204 if (is_null_pointer(p1) || is_null_pointer(p2))
2205 return;
2206 type1 = &p1->type;
2207 type2 = &p2->type;
2208 bt1 = type1->t & VT_BTYPE;
2209 bt2 = type2->t & VT_BTYPE;
2210 /* accept comparison between pointer and integer with a warning */
2211 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2212 if (op != TOK_LOR && op != TOK_LAND )
2213 tcc_warning("comparison between pointer and integer");
2214 return;
2217 /* both must be pointers or implicit function pointers */
2218 if (bt1 == VT_PTR) {
2219 type1 = pointed_type(type1);
2220 } else if (bt1 != VT_FUNC)
2221 goto invalid_operands;
2223 if (bt2 == VT_PTR) {
2224 type2 = pointed_type(type2);
2225 } else if (bt2 != VT_FUNC) {
2226 invalid_operands:
2227 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2229 if ((type1->t & VT_BTYPE) == VT_VOID ||
2230 (type2->t & VT_BTYPE) == VT_VOID)
2231 return;
2232 tmp_type1 = *type1;
2233 tmp_type2 = *type2;
2234 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2235 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2236 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2237 /* gcc-like error if '-' is used */
2238 if (op == '-')
2239 goto invalid_operands;
2240 else
2241 tcc_warning("comparison of distinct pointer types lacks a cast");
2245 /* generic gen_op: handles types problems */
2246 ST_FUNC void gen_op(int op)
2248 int u, t1, t2, bt1, bt2, t;
2249 CType type1;
2251 redo:
2252 t1 = vtop[-1].type.t;
2253 t2 = vtop[0].type.t;
2254 bt1 = t1 & VT_BTYPE;
2255 bt2 = t2 & VT_BTYPE;
2257 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2258 tcc_error("operation on a struct");
2259 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2260 if (bt2 == VT_FUNC) {
2261 mk_pointer(&vtop->type);
2262 gaddrof();
2264 if (bt1 == VT_FUNC) {
2265 vswap();
2266 mk_pointer(&vtop->type);
2267 gaddrof();
2268 vswap();
2270 goto redo;
2271 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2272 /* at least one operand is a pointer */
2273 /* relational op: must be both pointers */
2274 if (op >= TOK_ULT && op <= TOK_LOR) {
2275 check_comparison_pointer_types(vtop - 1, vtop, op);
2276 /* pointers are handled are unsigned */
2277 #if PTR_SIZE == 8
2278 t = VT_LLONG | VT_UNSIGNED;
2279 #else
2280 t = VT_INT | VT_UNSIGNED;
2281 #endif
2282 goto std_op;
2284 /* if both pointers, then it must be the '-' op */
2285 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2286 if (op != '-')
2287 tcc_error("cannot use pointers here");
2288 check_comparison_pointer_types(vtop - 1, vtop, op);
2289 /* XXX: check that types are compatible */
2290 if (vtop[-1].type.t & VT_VLA) {
2291 vla_runtime_pointed_size(&vtop[-1].type);
2292 } else {
2293 vpushi(pointed_size(&vtop[-1].type));
2295 vrott(3);
2296 gen_opic(op);
2297 vtop->type.t = ptrdiff_type.t;
2298 vswap();
2299 gen_op(TOK_PDIV);
2300 } else {
2301 /* exactly one pointer : must be '+' or '-'. */
2302 if (op != '-' && op != '+')
2303 tcc_error("cannot use pointers here");
2304 /* Put pointer as first operand */
2305 if (bt2 == VT_PTR) {
2306 vswap();
2307 t = t1, t1 = t2, t2 = t;
2309 #if PTR_SIZE == 4
2310 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2311 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2312 gen_cast_s(VT_INT);
2313 #endif
2314 type1 = vtop[-1].type;
2315 type1.t &= ~VT_ARRAY;
2316 if (vtop[-1].type.t & VT_VLA)
2317 vla_runtime_pointed_size(&vtop[-1].type);
2318 else {
2319 u = pointed_size(&vtop[-1].type);
2320 if (u < 0)
2321 tcc_error("unknown array element size");
2322 #if PTR_SIZE == 8
2323 vpushll(u);
2324 #else
2325 /* XXX: cast to int ? (long long case) */
2326 vpushi(u);
2327 #endif
2329 gen_op('*');
2330 #if 0
2331 /* #ifdef CONFIG_TCC_BCHECK
2332 The main reason to removing this code:
2333 #include <stdio.h>
2334 int main ()
2336 int v[10];
2337 int i = 10;
2338 int j = 9;
2339 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2340 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2342 When this code is on. then the output looks like
2343 v+i-j = 0xfffffffe
2344 v+(i-j) = 0xbff84000
2346 /* if evaluating constant expression, no code should be
2347 generated, so no bound check */
2348 if (tcc_state->do_bounds_check && !const_wanted) {
2349 /* if bounded pointers, we generate a special code to
2350 test bounds */
2351 if (op == '-') {
2352 vpushi(0);
2353 vswap();
2354 gen_op('-');
2356 gen_bounded_ptr_add();
2357 } else
2358 #endif
2360 gen_opic(op);
2362 /* put again type if gen_opic() swaped operands */
2363 vtop->type = type1;
2365 } else if (is_float(bt1) || is_float(bt2)) {
2366 /* compute bigger type and do implicit casts */
2367 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2368 t = VT_LDOUBLE;
2369 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2370 t = VT_DOUBLE;
2371 } else {
2372 t = VT_FLOAT;
2374 /* floats can only be used for a few operations */
2375 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2376 (op < TOK_ULT || op > TOK_GT))
2377 tcc_error("invalid operands for binary operation");
2378 goto std_op;
2379 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2380 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2381 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2382 t |= VT_UNSIGNED;
2383 t |= (VT_LONG & t1);
2384 goto std_op;
2385 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2386 /* cast to biggest op */
2387 t = VT_LLONG | VT_LONG;
2388 if (bt1 == VT_LLONG)
2389 t &= t1;
2390 if (bt2 == VT_LLONG)
2391 t &= t2;
2392 /* convert to unsigned if it does not fit in a long long */
2393 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2394 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2395 t |= VT_UNSIGNED;
2396 goto std_op;
2397 } else {
2398 /* integer operations */
2399 t = VT_INT | (VT_LONG & (t1 | t2));
2400 /* convert to unsigned if it does not fit in an integer */
2401 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2402 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2403 t |= VT_UNSIGNED;
2404 std_op:
2405 /* XXX: currently, some unsigned operations are explicit, so
2406 we modify them here */
2407 if (t & VT_UNSIGNED) {
2408 if (op == TOK_SAR)
2409 op = TOK_SHR;
2410 else if (op == '/')
2411 op = TOK_UDIV;
2412 else if (op == '%')
2413 op = TOK_UMOD;
2414 else if (op == TOK_LT)
2415 op = TOK_ULT;
2416 else if (op == TOK_GT)
2417 op = TOK_UGT;
2418 else if (op == TOK_LE)
2419 op = TOK_ULE;
2420 else if (op == TOK_GE)
2421 op = TOK_UGE;
2423 vswap();
2424 type1.t = t;
2425 type1.ref = NULL;
2426 gen_cast(&type1);
2427 vswap();
2428 /* special case for shifts and long long: we keep the shift as
2429 an integer */
2430 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2431 type1.t = VT_INT;
2432 gen_cast(&type1);
2433 if (is_float(t))
2434 gen_opif(op);
2435 else
2436 gen_opic(op);
2437 if (op >= TOK_ULT && op <= TOK_GT) {
2438 /* relational op: the result is an int */
2439 vtop->type.t = VT_INT;
2440 } else {
2441 vtop->type.t = t;
2444 // Make sure that we have converted to an rvalue:
2445 if (vtop->r & VT_LVAL)
2446 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2449 #ifndef TCC_TARGET_ARM
2450 /* generic itof for unsigned long long case */
2451 static void gen_cvt_itof1(int t)
2453 #ifdef TCC_TARGET_ARM64
2454 gen_cvt_itof(t);
2455 #else
2456 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2457 (VT_LLONG | VT_UNSIGNED)) {
2459 if (t == VT_FLOAT)
2460 vpush_global_sym(&func_old_type, TOK___floatundisf);
2461 #if LDOUBLE_SIZE != 8
2462 else if (t == VT_LDOUBLE)
2463 vpush_global_sym(&func_old_type, TOK___floatundixf);
2464 #endif
2465 else
2466 vpush_global_sym(&func_old_type, TOK___floatundidf);
2467 vrott(2);
2468 gfunc_call(1);
2469 vpushi(0);
2470 vtop->r = reg_fret(t);
2471 } else {
2472 gen_cvt_itof(t);
2474 #endif
2476 #endif
2478 /* generic ftoi for unsigned long long case */
2479 static void gen_cvt_ftoi1(int t)
2481 #ifdef TCC_TARGET_ARM64
2482 gen_cvt_ftoi(t);
2483 #else
2484 int st;
2486 if (t == (VT_LLONG | VT_UNSIGNED)) {
2487 /* not handled natively */
2488 st = vtop->type.t & VT_BTYPE;
2489 if (st == VT_FLOAT)
2490 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2491 #if LDOUBLE_SIZE != 8
2492 else if (st == VT_LDOUBLE)
2493 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2494 #endif
2495 else
2496 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2497 vrott(2);
2498 gfunc_call(1);
2499 vpushi(0);
2500 vtop->r = REG_IRET;
2501 vtop->r2 = REG_LRET;
2502 } else {
2503 gen_cvt_ftoi(t);
2505 #endif
2508 /* force char or short cast */
2509 static void force_charshort_cast(int t)
2511 int bits, dbt;
2513 /* cannot cast static initializers */
2514 if (STATIC_DATA_WANTED)
2515 return;
2517 dbt = t & VT_BTYPE;
2518 /* XXX: add optimization if lvalue : just change type and offset */
2519 if (dbt == VT_BYTE)
2520 bits = 8;
2521 else
2522 bits = 16;
2523 if (t & VT_UNSIGNED) {
2524 vpushi((1 << bits) - 1);
2525 gen_op('&');
2526 } else {
2527 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2528 bits = 64 - bits;
2529 else
2530 bits = 32 - bits;
2531 vpushi(bits);
2532 gen_op(TOK_SHL);
2533 /* result must be signed or the SAR is converted to an SHL
2534 This was not the case when "t" was a signed short
2535 and the last value on the stack was an unsigned int */
2536 vtop->type.t &= ~VT_UNSIGNED;
2537 vpushi(bits);
2538 gen_op(TOK_SAR);
2542 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2543 static void gen_cast_s(int t)
2545 CType type;
2546 type.t = t;
2547 type.ref = NULL;
2548 gen_cast(&type);
2551 static void gen_cast(CType *type)
2553 int sbt, dbt, sf, df, c, p;
2555 /* special delayed cast for char/short */
2556 /* XXX: in some cases (multiple cascaded casts), it may still
2557 be incorrect */
2558 if (vtop->r & VT_MUSTCAST) {
2559 vtop->r &= ~VT_MUSTCAST;
2560 force_charshort_cast(vtop->type.t);
2563 /* bitfields first get cast to ints */
2564 if (vtop->type.t & VT_BITFIELD) {
2565 gv(RC_INT);
2568 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2569 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2571 if (sbt != dbt) {
2572 sf = is_float(sbt);
2573 df = is_float(dbt);
2574 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2575 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2576 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2577 c &= dbt != VT_LDOUBLE;
2578 #endif
2579 if (c) {
2580 /* constant case: we can do it now */
2581 /* XXX: in ISOC, cannot do it if error in convert */
2582 if (sbt == VT_FLOAT)
2583 vtop->c.ld = vtop->c.f;
2584 else if (sbt == VT_DOUBLE)
2585 vtop->c.ld = vtop->c.d;
2587 if (df) {
2588 if ((sbt & VT_BTYPE) == VT_LLONG) {
2589 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2590 vtop->c.ld = vtop->c.i;
2591 else
2592 vtop->c.ld = -(long double)-vtop->c.i;
2593 } else if(!sf) {
2594 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2595 vtop->c.ld = (uint32_t)vtop->c.i;
2596 else
2597 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2600 if (dbt == VT_FLOAT)
2601 vtop->c.f = (float)vtop->c.ld;
2602 else if (dbt == VT_DOUBLE)
2603 vtop->c.d = (double)vtop->c.ld;
2604 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2605 vtop->c.i = vtop->c.ld;
2606 } else if (sf && dbt == VT_BOOL) {
2607 vtop->c.i = (vtop->c.ld != 0);
2608 } else {
2609 if(sf)
2610 vtop->c.i = vtop->c.ld;
2611 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2613 else if (sbt & VT_UNSIGNED)
2614 vtop->c.i = (uint32_t)vtop->c.i;
2615 #if PTR_SIZE == 8
2616 else if (sbt == VT_PTR)
2618 #endif
2619 else if (sbt != VT_LLONG)
2620 vtop->c.i = ((uint32_t)vtop->c.i |
2621 -(vtop->c.i & 0x80000000));
2623 if (dbt == (VT_LLONG|VT_UNSIGNED))
2625 else if (dbt == VT_BOOL)
2626 vtop->c.i = (vtop->c.i != 0);
2627 #if PTR_SIZE == 8
2628 else if (dbt == VT_PTR)
2630 #endif
2631 else if (dbt != VT_LLONG) {
2632 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2633 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2634 0xffffffff);
2635 vtop->c.i &= m;
2636 if (!(dbt & VT_UNSIGNED))
2637 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2640 } else if (p && dbt == VT_BOOL) {
2641 vtop->r = VT_CONST;
2642 vtop->c.i = 1;
2643 } else {
2644 /* non constant case: generate code */
2645 if (sf && df) {
2646 /* convert from fp to fp */
2647 gen_cvt_ftof(dbt);
2648 } else if (df) {
2649 /* convert int to fp */
2650 gen_cvt_itof1(dbt);
2651 } else if (sf) {
2652 /* convert fp to int */
2653 if (dbt == VT_BOOL) {
2654 vpushi(0);
2655 gen_op(TOK_NE);
2656 } else {
2657 /* we handle char/short/etc... with generic code */
2658 if (dbt != (VT_INT | VT_UNSIGNED) &&
2659 dbt != (VT_LLONG | VT_UNSIGNED) &&
2660 dbt != VT_LLONG)
2661 dbt = VT_INT;
2662 gen_cvt_ftoi1(dbt);
2663 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2664 /* additional cast for char/short... */
2665 vtop->type.t = dbt;
2666 gen_cast(type);
2669 #if PTR_SIZE == 4
2670 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2671 if ((sbt & VT_BTYPE) != VT_LLONG) {
2672 /* scalar to long long */
2673 /* machine independent conversion */
2674 gv(RC_INT);
2675 /* generate high word */
2676 if (sbt == (VT_INT | VT_UNSIGNED)) {
2677 vpushi(0);
2678 gv(RC_INT);
2679 } else {
2680 if (sbt == VT_PTR) {
2681 /* cast from pointer to int before we apply
2682 shift operation, which pointers don't support*/
2683 gen_cast_s(VT_INT);
2685 gv_dup();
2686 vpushi(31);
2687 gen_op(TOK_SAR);
2689 /* patch second register */
2690 vtop[-1].r2 = vtop->r;
2691 vpop();
2693 #else
2694 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2695 (dbt & VT_BTYPE) == VT_PTR ||
2696 (dbt & VT_BTYPE) == VT_FUNC) {
2697 if ((sbt & VT_BTYPE) != VT_LLONG &&
2698 (sbt & VT_BTYPE) != VT_PTR &&
2699 (sbt & VT_BTYPE) != VT_FUNC) {
2700 /* need to convert from 32bit to 64bit */
2701 gv(RC_INT);
2702 if (sbt != (VT_INT | VT_UNSIGNED)) {
2703 #if defined(TCC_TARGET_ARM64)
2704 gen_cvt_sxtw();
2705 #elif defined(TCC_TARGET_X86_64)
2706 int r = gv(RC_INT);
2707 /* x86_64 specific: movslq */
2708 o(0x6348);
2709 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2710 #else
2711 #error
2712 #endif
2715 #endif
2716 } else if (dbt == VT_BOOL) {
2717 /* scalar to bool */
2718 vpushi(0);
2719 gen_op(TOK_NE);
2720 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2721 (dbt & VT_BTYPE) == VT_SHORT) {
2722 if (sbt == VT_PTR) {
2723 vtop->type.t = VT_INT;
2724 tcc_warning("nonportable conversion from pointer to char/short");
2726 force_charshort_cast(dbt);
2727 } else if ((dbt & VT_BTYPE) == VT_INT) {
2728 /* scalar to int */
2729 if ((sbt & VT_BTYPE) == VT_LLONG) {
2730 #if PTR_SIZE == 4
2731 /* from long long: just take low order word */
2732 lexpand();
2733 vpop();
2734 #else
2735 vpushi(0xffffffff);
2736 vtop->type.t |= VT_UNSIGNED;
2737 gen_op('&');
2738 #endif
2740 /* if lvalue and single word type, nothing to do because
2741 the lvalue already contains the real type size (see
2742 VT_LVAL_xxx constants) */
2745 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2746 /* if we are casting between pointer types,
2747 we must update the VT_LVAL_xxx size */
2748 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2749 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2751 vtop->type = *type;
2752 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2755 /* return type size as known at compile time. Put alignment at 'a' */
2756 ST_FUNC int type_size(CType *type, int *a)
2758 Sym *s;
2759 int bt;
2761 bt = type->t & VT_BTYPE;
2762 if (bt == VT_STRUCT) {
2763 /* struct/union */
2764 s = type->ref;
2765 *a = s->r;
2766 return s->c;
2767 } else if (bt == VT_PTR) {
2768 if (type->t & VT_ARRAY) {
2769 int ts;
2771 s = type->ref;
2772 ts = type_size(&s->type, a);
2774 if (ts < 0 && s->c < 0)
2775 ts = -ts;
2777 return ts * s->c;
2778 } else {
2779 *a = PTR_SIZE;
2780 return PTR_SIZE;
2782 } else if (IS_ENUM(type->t) && type->ref->c == -1) {
2783 return -1; /* incomplete enum */
2784 } else if (bt == VT_LDOUBLE) {
2785 *a = LDOUBLE_ALIGN;
2786 return LDOUBLE_SIZE;
2787 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2788 #ifdef TCC_TARGET_I386
2789 #ifdef TCC_TARGET_PE
2790 *a = 8;
2791 #else
2792 *a = 4;
2793 #endif
2794 #elif defined(TCC_TARGET_ARM)
2795 #ifdef TCC_ARM_EABI
2796 *a = 8;
2797 #else
2798 *a = 4;
2799 #endif
2800 #else
2801 *a = 8;
2802 #endif
2803 return 8;
2804 } else if (bt == VT_INT || bt == VT_FLOAT) {
2805 *a = 4;
2806 return 4;
2807 } else if (bt == VT_SHORT) {
2808 *a = 2;
2809 return 2;
2810 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2811 *a = 8;
2812 return 16;
2813 } else {
2814 /* char, void, function, _Bool */
2815 *a = 1;
2816 return 1;
2820 /* push type size as known at runtime time on top of value stack. Put
2821 alignment at 'a' */
2822 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2824 if (type->t & VT_VLA) {
2825 type_size(&type->ref->type, a);
2826 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2827 } else {
2828 vpushi(type_size(type, a));
2832 static void vla_sp_restore(void) {
2833 if (vlas_in_scope) {
2834 gen_vla_sp_restore(vla_sp_loc);
2838 static void vla_sp_restore_root(void) {
2839 if (vlas_in_scope) {
2840 gen_vla_sp_restore(vla_sp_root_loc);
2844 /* return the pointed type of t */
2845 static inline CType *pointed_type(CType *type)
2847 return &type->ref->type;
2850 /* modify type so that its it is a pointer to type. */
2851 ST_FUNC void mk_pointer(CType *type)
2853 Sym *s;
2854 s = sym_push(SYM_FIELD, type, 0, -1);
2855 type->t = VT_PTR | (type->t & VT_STORAGE);
2856 type->ref = s;
2859 /* compare function types. OLD functions match any new functions */
2860 static int is_compatible_func(CType *type1, CType *type2)
2862 Sym *s1, *s2;
2864 s1 = type1->ref;
2865 s2 = type2->ref;
2866 if (!is_compatible_types(&s1->type, &s2->type))
2867 return 0;
2868 /* check func_call */
2869 if (s1->f.func_call != s2->f.func_call)
2870 return 0;
2871 /* XXX: not complete */
2872 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2873 return 1;
2874 if (s1->f.func_type != s2->f.func_type)
2875 return 0;
2876 while (s1 != NULL) {
2877 if (s2 == NULL)
2878 return 0;
2879 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2880 return 0;
2881 s1 = s1->next;
2882 s2 = s2->next;
2884 if (s2)
2885 return 0;
2886 return 1;
2889 /* return true if type1 and type2 are the same. If unqualified is
2890 true, qualifiers on the types are ignored.
2892 static int compare_types(CType *type1, CType *type2, int unqualified)
2894 int bt1, t1, t2;
2896 t1 = type1->t & VT_TYPE;
2897 t2 = type2->t & VT_TYPE;
2898 if (unqualified) {
2899 /* strip qualifiers before comparing */
2900 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2901 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2904 /* Default Vs explicit signedness only matters for char */
2905 if ((t1 & VT_BTYPE) != VT_BYTE) {
2906 t1 &= ~VT_DEFSIGN;
2907 t2 &= ~VT_DEFSIGN;
2909 /* XXX: bitfields ? */
2910 if (t1 != t2)
2911 return 0;
2912 /* test more complicated cases */
2913 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2914 if (bt1 == VT_PTR) {
2915 type1 = pointed_type(type1);
2916 type2 = pointed_type(type2);
2917 return is_compatible_types(type1, type2);
2918 } else if (bt1 & VT_ARRAY) {
2919 return type1->ref->c < 0 || type2->ref->c < 0
2920 || type1->ref->c == type2->ref->c;
2921 } else if (bt1 == VT_STRUCT) {
2922 return (type1->ref == type2->ref);
2923 } else if (bt1 == VT_FUNC) {
2924 return is_compatible_func(type1, type2);
2925 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2926 return type1->ref == type2->ref;
2927 } else {
2928 return 1;
2932 /* return true if type1 and type2 are exactly the same (including
2933 qualifiers).
2935 static int is_compatible_types(CType *type1, CType *type2)
2937 return compare_types(type1,type2,0);
2940 /* return true if type1 and type2 are the same (ignoring qualifiers).
2942 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2944 return compare_types(type1,type2,1);
2947 /* print a type. If 'varstr' is not NULL, then the variable is also
2948 printed in the type */
2949 /* XXX: union */
2950 /* XXX: add array and function pointers */
2951 static void type_to_str(char *buf, int buf_size,
2952 CType *type, const char *varstr)
2954 int bt, v, t;
2955 Sym *s, *sa;
2956 char buf1[256];
2957 const char *tstr;
2959 t = type->t;
2960 bt = t & VT_BTYPE;
2961 buf[0] = '\0';
2963 if (t & VT_EXTERN)
2964 pstrcat(buf, buf_size, "extern ");
2965 if (t & VT_STATIC)
2966 pstrcat(buf, buf_size, "static ");
2967 if (t & VT_TYPEDEF)
2968 pstrcat(buf, buf_size, "typedef ");
2969 if (t & VT_INLINE)
2970 pstrcat(buf, buf_size, "inline ");
2971 if (t & VT_VOLATILE)
2972 pstrcat(buf, buf_size, "volatile ");
2973 if (t & VT_CONSTANT)
2974 pstrcat(buf, buf_size, "const ");
2976 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2977 || ((t & VT_UNSIGNED)
2978 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2979 && !IS_ENUM(t)
2981 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2983 buf_size -= strlen(buf);
2984 buf += strlen(buf);
2986 switch(bt) {
2987 case VT_VOID:
2988 tstr = "void";
2989 goto add_tstr;
2990 case VT_BOOL:
2991 tstr = "_Bool";
2992 goto add_tstr;
2993 case VT_BYTE:
2994 tstr = "char";
2995 goto add_tstr;
2996 case VT_SHORT:
2997 tstr = "short";
2998 goto add_tstr;
2999 case VT_INT:
3000 tstr = "int";
3001 goto maybe_long;
3002 case VT_LLONG:
3003 tstr = "long long";
3004 maybe_long:
3005 if (t & VT_LONG)
3006 tstr = "long";
3007 if (!IS_ENUM(t))
3008 goto add_tstr;
3009 tstr = "enum ";
3010 goto tstruct;
3011 case VT_FLOAT:
3012 tstr = "float";
3013 goto add_tstr;
3014 case VT_DOUBLE:
3015 tstr = "double";
3016 goto add_tstr;
3017 case VT_LDOUBLE:
3018 tstr = "long double";
3019 add_tstr:
3020 pstrcat(buf, buf_size, tstr);
3021 break;
3022 case VT_STRUCT:
3023 tstr = "struct ";
3024 if (IS_UNION(t))
3025 tstr = "union ";
3026 tstruct:
3027 pstrcat(buf, buf_size, tstr);
3028 v = type->ref->v & ~SYM_STRUCT;
3029 if (v >= SYM_FIRST_ANOM)
3030 pstrcat(buf, buf_size, "<anonymous>");
3031 else
3032 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3033 break;
3034 case VT_FUNC:
3035 s = type->ref;
3036 buf1[0]=0;
3037 if (varstr && '*' == *varstr) {
3038 pstrcat(buf1, sizeof(buf1), "(");
3039 pstrcat(buf1, sizeof(buf1), varstr);
3040 pstrcat(buf1, sizeof(buf1), ")");
3042 pstrcat(buf1, buf_size, "(");
3043 sa = s->next;
3044 while (sa != NULL) {
3045 char buf2[256];
3046 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3047 pstrcat(buf1, sizeof(buf1), buf2);
3048 sa = sa->next;
3049 if (sa)
3050 pstrcat(buf1, sizeof(buf1), ", ");
3052 if (s->f.func_type == FUNC_ELLIPSIS)
3053 pstrcat(buf1, sizeof(buf1), ", ...");
3054 pstrcat(buf1, sizeof(buf1), ")");
3055 type_to_str(buf, buf_size, &s->type, buf1);
3056 goto no_var;
3057 case VT_PTR:
3058 s = type->ref;
3059 if (t & VT_ARRAY) {
3060 if (varstr && '*' == *varstr)
3061 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3062 else
3063 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3064 type_to_str(buf, buf_size, &s->type, buf1);
3065 goto no_var;
3067 pstrcpy(buf1, sizeof(buf1), "*");
3068 if (t & VT_CONSTANT)
3069 pstrcat(buf1, buf_size, "const ");
3070 if (t & VT_VOLATILE)
3071 pstrcat(buf1, buf_size, "volatile ");
3072 if (varstr)
3073 pstrcat(buf1, sizeof(buf1), varstr);
3074 type_to_str(buf, buf_size, &s->type, buf1);
3075 goto no_var;
3077 if (varstr) {
3078 pstrcat(buf, buf_size, " ");
3079 pstrcat(buf, buf_size, varstr);
3081 no_var: ;
3084 /* verify type compatibility to store vtop in 'dt' type, and generate
3085 casts if needed. */
3086 static void gen_assign_cast(CType *dt)
3088 CType *st, *type1, *type2;
3089 char buf1[256], buf2[256];
3090 int dbt, sbt, qualwarn, lvl;
3092 st = &vtop->type; /* source type */
3093 dbt = dt->t & VT_BTYPE;
3094 sbt = st->t & VT_BTYPE;
3095 if (sbt == VT_VOID || dbt == VT_VOID) {
3096 if (sbt == VT_VOID && dbt == VT_VOID)
3097 ; /* It is Ok if both are void */
3098 else
3099 tcc_error("cannot cast from/to void");
3101 if (dt->t & VT_CONSTANT)
3102 tcc_warning("assignment of read-only location");
3103 switch(dbt) {
3104 case VT_PTR:
3105 /* special cases for pointers */
3106 /* '0' can also be a pointer */
3107 if (is_null_pointer(vtop))
3108 break;
3109 /* accept implicit pointer to integer cast with warning */
3110 if (is_integer_btype(sbt)) {
3111 tcc_warning("assignment makes pointer from integer without a cast");
3112 break;
3114 type1 = pointed_type(dt);
3115 if (sbt == VT_PTR)
3116 type2 = pointed_type(st);
3117 else if (sbt == VT_FUNC)
3118 type2 = st; /* a function is implicitly a function pointer */
3119 else
3120 goto error;
3121 if (is_compatible_types(type1, type2))
3122 break;
3123 for (qualwarn = lvl = 0;; ++lvl) {
3124 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3125 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3126 qualwarn = 1;
3127 dbt = type1->t & (VT_BTYPE|VT_LONG);
3128 sbt = type2->t & (VT_BTYPE|VT_LONG);
3129 if (dbt != VT_PTR || sbt != VT_PTR)
3130 break;
3131 type1 = pointed_type(type1);
3132 type2 = pointed_type(type2);
3134 if (!is_compatible_unqualified_types(type1, type2)) {
3135 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3136 /* void * can match anything */
3137 } else if (dbt == sbt
3138 && is_integer_btype(sbt & VT_BTYPE)
3139 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3140 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3141 /* Like GCC don't warn by default for merely changes
3142 in pointer target signedness. Do warn for different
3143 base types, though, in particular for unsigned enums
3144 and signed int targets. */
3145 } else {
3146 tcc_warning("assignment from incompatible pointer type");
3147 break;
3150 if (qualwarn)
3151 tcc_warning("assignment discards qualifiers from pointer target type");
3152 break;
3153 case VT_BYTE:
3154 case VT_SHORT:
3155 case VT_INT:
3156 case VT_LLONG:
3157 if (sbt == VT_PTR || sbt == VT_FUNC) {
3158 tcc_warning("assignment makes integer from pointer without a cast");
3159 } else if (sbt == VT_STRUCT) {
3160 goto case_VT_STRUCT;
3162 /* XXX: more tests */
3163 break;
3164 case VT_STRUCT:
3165 case_VT_STRUCT:
3166 if (!is_compatible_unqualified_types(dt, st)) {
3167 error:
3168 type_to_str(buf1, sizeof(buf1), st, NULL);
3169 type_to_str(buf2, sizeof(buf2), dt, NULL);
3170 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3172 break;
3174 gen_cast(dt);
3177 /* store vtop in lvalue pushed on stack */
3178 ST_FUNC void vstore(void)
3180 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3182 ft = vtop[-1].type.t;
3183 sbt = vtop->type.t & VT_BTYPE;
3184 dbt = ft & VT_BTYPE;
3185 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3186 (sbt == VT_INT && dbt == VT_SHORT))
3187 && !(vtop->type.t & VT_BITFIELD)) {
3188 /* optimize char/short casts */
3189 delayed_cast = VT_MUSTCAST;
3190 vtop->type.t = ft & VT_TYPE;
3191 /* XXX: factorize */
3192 if (ft & VT_CONSTANT)
3193 tcc_warning("assignment of read-only location");
3194 } else {
3195 delayed_cast = 0;
3196 if (!(ft & VT_BITFIELD))
3197 gen_assign_cast(&vtop[-1].type);
3200 if (sbt == VT_STRUCT) {
3201 /* if structure, only generate pointer */
3202 /* structure assignment : generate memcpy */
3203 /* XXX: optimize if small size */
3204 size = type_size(&vtop->type, &align);
3206 /* destination */
3207 vswap();
3208 vtop->type.t = VT_PTR;
3209 gaddrof();
3211 /* address of memcpy() */
3212 #ifdef TCC_ARM_EABI
3213 if(!(align & 7))
3214 vpush_global_sym(&func_old_type, TOK_memcpy8);
3215 else if(!(align & 3))
3216 vpush_global_sym(&func_old_type, TOK_memcpy4);
3217 else
3218 #endif
3219 /* Use memmove, rather than memcpy, as dest and src may be same: */
3220 vpush_global_sym(&func_old_type, TOK_memmove);
3222 vswap();
3223 /* source */
3224 vpushv(vtop - 2);
3225 vtop->type.t = VT_PTR;
3226 gaddrof();
3227 /* type size */
3228 vpushi(size);
3229 gfunc_call(3);
3231 /* leave source on stack */
3232 } else if (ft & VT_BITFIELD) {
3233 /* bitfield store handling */
3235 /* save lvalue as expression result (example: s.b = s.a = n;) */
3236 vdup(), vtop[-1] = vtop[-2];
3238 bit_pos = BIT_POS(ft);
3239 bit_size = BIT_SIZE(ft);
3240 /* remove bit field info to avoid loops */
3241 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3243 if ((ft & VT_BTYPE) == VT_BOOL) {
3244 gen_cast(&vtop[-1].type);
3245 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3248 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3249 if (r == VT_STRUCT) {
3250 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3251 store_packed_bf(bit_pos, bit_size);
3252 } else {
3253 unsigned long long mask = (1ULL << bit_size) - 1;
3254 if ((ft & VT_BTYPE) != VT_BOOL) {
3255 /* mask source */
3256 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3257 vpushll(mask);
3258 else
3259 vpushi((unsigned)mask);
3260 gen_op('&');
3262 /* shift source */
3263 vpushi(bit_pos);
3264 gen_op(TOK_SHL);
3265 vswap();
3266 /* duplicate destination */
3267 vdup();
3268 vrott(3);
3269 /* load destination, mask and or with source */
3270 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3271 vpushll(~(mask << bit_pos));
3272 else
3273 vpushi(~((unsigned)mask << bit_pos));
3274 gen_op('&');
3275 gen_op('|');
3276 /* store result */
3277 vstore();
3278 /* ... and discard */
3279 vpop();
3281 } else if (dbt == VT_VOID) {
3282 --vtop;
3283 } else {
3284 #ifdef CONFIG_TCC_BCHECK
3285 /* bound check case */
3286 if (vtop[-1].r & VT_MUSTBOUND) {
3287 vswap();
3288 gbound();
3289 vswap();
3291 #endif
3292 rc = RC_INT;
3293 if (is_float(ft)) {
3294 rc = RC_FLOAT;
3295 #ifdef TCC_TARGET_X86_64
3296 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3297 rc = RC_ST0;
3298 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3299 rc = RC_FRET;
3301 #endif
3303 r = gv(rc); /* generate value */
3304 /* if lvalue was saved on stack, must read it */
3305 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3306 SValue sv;
3307 t = get_reg(RC_INT);
3308 #if PTR_SIZE == 8
3309 sv.type.t = VT_PTR;
3310 #else
3311 sv.type.t = VT_INT;
3312 #endif
3313 sv.r = VT_LOCAL | VT_LVAL;
3314 sv.c.i = vtop[-1].c.i;
3315 load(t, &sv);
3316 vtop[-1].r = t | VT_LVAL;
3318 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3319 #if PTR_SIZE == 8
3320 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3321 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3322 #else
3323 if ((ft & VT_BTYPE) == VT_LLONG) {
3324 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3325 #endif
3326 vtop[-1].type.t = load_type;
3327 store(r, vtop - 1);
3328 vswap();
3329 /* convert to int to increment easily */
3330 vtop->type.t = addr_type;
3331 gaddrof();
3332 vpushi(load_size);
3333 gen_op('+');
3334 vtop->r |= VT_LVAL;
3335 vswap();
3336 vtop[-1].type.t = load_type;
3337 /* XXX: it works because r2 is spilled last ! */
3338 store(vtop->r2, vtop - 1);
3339 } else {
3340 store(r, vtop - 1);
3343 vswap();
3344 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3345 vtop->r |= delayed_cast;
3349 /* post defines POST/PRE add. c is the token ++ or -- */
3350 ST_FUNC void inc(int post, int c)
3352 test_lvalue();
3353 vdup(); /* save lvalue */
3354 if (post) {
3355 gv_dup(); /* duplicate value */
3356 vrotb(3);
3357 vrotb(3);
3359 /* add constant */
3360 vpushi(c - TOK_MID);
3361 gen_op('+');
3362 vstore(); /* store value */
3363 if (post)
3364 vpop(); /* if post op, return saved value */
3367 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3369 /* read the string */
3370 if (tok != TOK_STR)
3371 expect(msg);
3372 cstr_new(astr);
3373 while (tok == TOK_STR) {
3374 /* XXX: add \0 handling too ? */
3375 cstr_cat(astr, tokc.str.data, -1);
3376 next();
3378 cstr_ccat(astr, '\0');
3381 /* If I is >= 1 and a power of two, returns log2(i)+1.
3382 If I is 0 returns 0. */
3383 static int exact_log2p1(int i)
3385 int ret;
3386 if (!i)
3387 return 0;
3388 for (ret = 1; i >= 1 << 8; ret += 8)
3389 i >>= 8;
3390 if (i >= 1 << 4)
3391 ret += 4, i >>= 4;
3392 if (i >= 1 << 2)
3393 ret += 2, i >>= 2;
3394 if (i >= 1 << 1)
3395 ret++;
3396 return ret;
3399 /* Parse __attribute__((...)) GNUC extension. */
3400 static void parse_attribute(AttributeDef *ad)
3402 int t, n;
3403 CString astr;
3405 redo:
3406 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3407 return;
3408 next();
3409 skip('(');
3410 skip('(');
3411 while (tok != ')') {
3412 if (tok < TOK_IDENT)
3413 expect("attribute name");
3414 t = tok;
3415 next();
3416 switch(t) {
3417 case TOK_CLEANUP1:
3418 case TOK_CLEANUP2:
3420 Sym *s;
3422 skip('(');
3423 s = sym_find(tok);
3424 if (!s) {
3425 tcc_warning("implicit declaration of function '%s'",
3426 get_tok_str(tok, &tokc));
3427 s = external_global_sym(tok, &func_old_type, 0);
3429 ad->cleanup_func = s;
3430 next();
3431 skip(')');
3432 break;
3434 case TOK_SECTION1:
3435 case TOK_SECTION2:
3436 skip('(');
3437 parse_mult_str(&astr, "section name");
3438 ad->section = find_section(tcc_state, (char *)astr.data);
3439 skip(')');
3440 cstr_free(&astr);
3441 break;
3442 case TOK_ALIAS1:
3443 case TOK_ALIAS2:
3444 skip('(');
3445 parse_mult_str(&astr, "alias(\"target\")");
3446 ad->alias_target = /* save string as token, for later */
3447 tok_alloc((char*)astr.data, astr.size-1)->tok;
3448 skip(')');
3449 cstr_free(&astr);
3450 break;
3451 case TOK_VISIBILITY1:
3452 case TOK_VISIBILITY2:
3453 skip('(');
3454 parse_mult_str(&astr,
3455 "visibility(\"default|hidden|internal|protected\")");
3456 if (!strcmp (astr.data, "default"))
3457 ad->a.visibility = STV_DEFAULT;
3458 else if (!strcmp (astr.data, "hidden"))
3459 ad->a.visibility = STV_HIDDEN;
3460 else if (!strcmp (astr.data, "internal"))
3461 ad->a.visibility = STV_INTERNAL;
3462 else if (!strcmp (astr.data, "protected"))
3463 ad->a.visibility = STV_PROTECTED;
3464 else
3465 expect("visibility(\"default|hidden|internal|protected\")");
3466 skip(')');
3467 cstr_free(&astr);
3468 break;
3469 case TOK_ALIGNED1:
3470 case TOK_ALIGNED2:
3471 if (tok == '(') {
3472 next();
3473 n = expr_const();
3474 if (n <= 0 || (n & (n - 1)) != 0)
3475 tcc_error("alignment must be a positive power of two");
3476 skip(')');
3477 } else {
3478 n = MAX_ALIGN;
3480 ad->a.aligned = exact_log2p1(n);
3481 if (n != 1 << (ad->a.aligned - 1))
3482 tcc_error("alignment of %d is larger than implemented", n);
3483 break;
3484 case TOK_PACKED1:
3485 case TOK_PACKED2:
3486 ad->a.packed = 1;
3487 break;
3488 case TOK_WEAK1:
3489 case TOK_WEAK2:
3490 ad->a.weak = 1;
3491 break;
3492 case TOK_UNUSED1:
3493 case TOK_UNUSED2:
3494 /* currently, no need to handle it because tcc does not
3495 track unused objects */
3496 break;
3497 case TOK_NORETURN1:
3498 case TOK_NORETURN2:
3499 /* currently, no need to handle it because tcc does not
3500 track unused objects */
3501 break;
3502 case TOK_CDECL1:
3503 case TOK_CDECL2:
3504 case TOK_CDECL3:
3505 ad->f.func_call = FUNC_CDECL;
3506 break;
3507 case TOK_STDCALL1:
3508 case TOK_STDCALL2:
3509 case TOK_STDCALL3:
3510 ad->f.func_call = FUNC_STDCALL;
3511 break;
3512 #ifdef TCC_TARGET_I386
3513 case TOK_REGPARM1:
3514 case TOK_REGPARM2:
3515 skip('(');
3516 n = expr_const();
3517 if (n > 3)
3518 n = 3;
3519 else if (n < 0)
3520 n = 0;
3521 if (n > 0)
3522 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3523 skip(')');
3524 break;
3525 case TOK_FASTCALL1:
3526 case TOK_FASTCALL2:
3527 case TOK_FASTCALL3:
3528 ad->f.func_call = FUNC_FASTCALLW;
3529 break;
3530 #endif
3531 case TOK_MODE:
3532 skip('(');
3533 switch(tok) {
3534 case TOK_MODE_DI:
3535 ad->attr_mode = VT_LLONG + 1;
3536 break;
3537 case TOK_MODE_QI:
3538 ad->attr_mode = VT_BYTE + 1;
3539 break;
3540 case TOK_MODE_HI:
3541 ad->attr_mode = VT_SHORT + 1;
3542 break;
3543 case TOK_MODE_SI:
3544 case TOK_MODE_word:
3545 ad->attr_mode = VT_INT + 1;
3546 break;
3547 default:
3548 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3549 break;
3551 next();
3552 skip(')');
3553 break;
3554 case TOK_DLLEXPORT:
3555 ad->a.dllexport = 1;
3556 break;
3557 case TOK_NODECORATE:
3558 ad->a.nodecorate = 1;
3559 break;
3560 case TOK_DLLIMPORT:
3561 ad->a.dllimport = 1;
3562 break;
3563 default:
3564 if (tcc_state->warn_unsupported)
3565 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3566 /* skip parameters */
3567 if (tok == '(') {
3568 int parenthesis = 0;
3569 do {
3570 if (tok == '(')
3571 parenthesis++;
3572 else if (tok == ')')
3573 parenthesis--;
3574 next();
3575 } while (parenthesis && tok != -1);
3577 break;
3579 if (tok != ',')
3580 break;
3581 next();
3583 skip(')');
3584 skip(')');
3585 goto redo;
3588 static Sym * find_field (CType *type, int v)
3590 Sym *s = type->ref;
3591 v |= SYM_FIELD;
3592 while ((s = s->next) != NULL) {
3593 if ((s->v & SYM_FIELD) &&
3594 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3595 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3596 Sym *ret = find_field (&s->type, v);
3597 if (ret)
3598 return ret;
3600 if (s->v == v)
3601 break;
3603 return s;
3606 static void struct_add_offset (Sym *s, int offset)
3608 while ((s = s->next) != NULL) {
3609 if ((s->v & SYM_FIELD) &&
3610 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3611 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3612 struct_add_offset(s->type.ref, offset);
3613 } else
3614 s->c += offset;
3618 static void struct_layout(CType *type, AttributeDef *ad)
3620 int size, align, maxalign, offset, c, bit_pos, bit_size;
3621 int packed, a, bt, prevbt, prev_bit_size;
3622 int pcc = !tcc_state->ms_bitfields;
3623 int pragma_pack = *tcc_state->pack_stack_ptr;
3624 Sym *f;
3626 maxalign = 1;
3627 offset = 0;
3628 c = 0;
3629 bit_pos = 0;
3630 prevbt = VT_STRUCT; /* make it never match */
3631 prev_bit_size = 0;
3633 //#define BF_DEBUG
3635 for (f = type->ref->next; f; f = f->next) {
3636 if (f->type.t & VT_BITFIELD)
3637 bit_size = BIT_SIZE(f->type.t);
3638 else
3639 bit_size = -1;
3640 size = type_size(&f->type, &align);
3641 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3642 packed = 0;
3644 if (pcc && bit_size == 0) {
3645 /* in pcc mode, packing does not affect zero-width bitfields */
3647 } else {
3648 /* in pcc mode, attribute packed overrides if set. */
3649 if (pcc && (f->a.packed || ad->a.packed))
3650 align = packed = 1;
3652 /* pragma pack overrides align if lesser and packs bitfields always */
3653 if (pragma_pack) {
3654 packed = 1;
3655 if (pragma_pack < align)
3656 align = pragma_pack;
3657 /* in pcc mode pragma pack also overrides individual align */
3658 if (pcc && pragma_pack < a)
3659 a = 0;
3662 /* some individual align was specified */
3663 if (a)
3664 align = a;
3666 if (type->ref->type.t == VT_UNION) {
3667 if (pcc && bit_size >= 0)
3668 size = (bit_size + 7) >> 3;
3669 offset = 0;
3670 if (size > c)
3671 c = size;
3673 } else if (bit_size < 0) {
3674 if (pcc)
3675 c += (bit_pos + 7) >> 3;
3676 c = (c + align - 1) & -align;
3677 offset = c;
3678 if (size > 0)
3679 c += size;
3680 bit_pos = 0;
3681 prevbt = VT_STRUCT;
3682 prev_bit_size = 0;
3684 } else {
3685 /* A bit-field. Layout is more complicated. There are two
3686 options: PCC (GCC) compatible and MS compatible */
3687 if (pcc) {
3688 /* In PCC layout a bit-field is placed adjacent to the
3689 preceding bit-fields, except if:
3690 - it has zero-width
3691 - an individual alignment was given
3692 - it would overflow its base type container and
3693 there is no packing */
3694 if (bit_size == 0) {
3695 new_field:
3696 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3697 bit_pos = 0;
3698 } else if (f->a.aligned) {
3699 goto new_field;
3700 } else if (!packed) {
3701 int a8 = align * 8;
3702 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3703 if (ofs > size / align)
3704 goto new_field;
3707 /* in pcc mode, long long bitfields have type int if they fit */
3708 if (size == 8 && bit_size <= 32)
3709 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3711 while (bit_pos >= align * 8)
3712 c += align, bit_pos -= align * 8;
3713 offset = c;
3715 /* In PCC layout named bit-fields influence the alignment
3716 of the containing struct using the base types alignment,
3717 except for packed fields (which here have correct align). */
3718 if (f->v & SYM_FIRST_ANOM
3719 // && bit_size // ??? gcc on ARM/rpi does that
3721 align = 1;
3723 } else {
3724 bt = f->type.t & VT_BTYPE;
3725 if ((bit_pos + bit_size > size * 8)
3726 || (bit_size > 0) == (bt != prevbt)
3728 c = (c + align - 1) & -align;
3729 offset = c;
3730 bit_pos = 0;
3731 /* In MS bitfield mode a bit-field run always uses
3732 at least as many bits as the underlying type.
3733 To start a new run it's also required that this
3734 or the last bit-field had non-zero width. */
3735 if (bit_size || prev_bit_size)
3736 c += size;
3738 /* In MS layout the records alignment is normally
3739 influenced by the field, except for a zero-width
3740 field at the start of a run (but by further zero-width
3741 fields it is again). */
3742 if (bit_size == 0 && prevbt != bt)
3743 align = 1;
3744 prevbt = bt;
3745 prev_bit_size = bit_size;
3748 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3749 | (bit_pos << VT_STRUCT_SHIFT);
3750 bit_pos += bit_size;
3752 if (align > maxalign)
3753 maxalign = align;
3755 #ifdef BF_DEBUG
3756 printf("set field %s offset %-2d size %-2d align %-2d",
3757 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3758 if (f->type.t & VT_BITFIELD) {
3759 printf(" pos %-2d bits %-2d",
3760 BIT_POS(f->type.t),
3761 BIT_SIZE(f->type.t)
3764 printf("\n");
3765 #endif
3767 if (f->v & SYM_FIRST_ANOM && (f->type.t & VT_BTYPE) == VT_STRUCT) {
3768 Sym *ass;
3769 /* An anonymous struct/union. Adjust member offsets
3770 to reflect the real offset of our containing struct.
3771 Also set the offset of this anon member inside
3772 the outer struct to be zero. Via this it
3773 works when accessing the field offset directly
3774 (from base object), as well as when recursing
3775 members in initializer handling. */
3776 int v2 = f->type.ref->v;
3777 if (!(v2 & SYM_FIELD) &&
3778 (v2 & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3779 Sym **pps;
3780 /* This happens only with MS extensions. The
3781 anon member has a named struct type, so it
3782 potentially is shared with other references.
3783 We need to unshare members so we can modify
3784 them. */
3785 ass = f->type.ref;
3786 f->type.ref = sym_push(anon_sym++ | SYM_FIELD,
3787 &f->type.ref->type, 0,
3788 f->type.ref->c);
3789 pps = &f->type.ref->next;
3790 while ((ass = ass->next) != NULL) {
3791 *pps = sym_push(ass->v, &ass->type, 0, ass->c);
3792 pps = &((*pps)->next);
3794 *pps = NULL;
3796 struct_add_offset(f->type.ref, offset);
3797 f->c = 0;
3798 } else {
3799 f->c = offset;
3802 f->r = 0;
3805 if (pcc)
3806 c += (bit_pos + 7) >> 3;
3808 /* store size and alignment */
3809 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3810 if (a < maxalign)
3811 a = maxalign;
3812 type->ref->r = a;
3813 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3814 /* can happen if individual align for some member was given. In
3815 this case MSVC ignores maxalign when aligning the size */
3816 a = pragma_pack;
3817 if (a < bt)
3818 a = bt;
3820 c = (c + a - 1) & -a;
3821 type->ref->c = c;
3823 #ifdef BF_DEBUG
3824 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3825 #endif
3827 /* check whether we can access bitfields by their type */
3828 for (f = type->ref->next; f; f = f->next) {
3829 int s, px, cx, c0;
3830 CType t;
3832 if (0 == (f->type.t & VT_BITFIELD))
3833 continue;
3834 f->type.ref = f;
3835 f->auxtype = -1;
3836 bit_size = BIT_SIZE(f->type.t);
3837 if (bit_size == 0)
3838 continue;
3839 bit_pos = BIT_POS(f->type.t);
3840 size = type_size(&f->type, &align);
3841 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3842 continue;
3844 /* try to access the field using a different type */
3845 c0 = -1, s = align = 1;
3846 for (;;) {
3847 px = f->c * 8 + bit_pos;
3848 cx = (px >> 3) & -align;
3849 px = px - (cx << 3);
3850 if (c0 == cx)
3851 break;
3852 s = (px + bit_size + 7) >> 3;
3853 if (s > 4) {
3854 t.t = VT_LLONG;
3855 } else if (s > 2) {
3856 t.t = VT_INT;
3857 } else if (s > 1) {
3858 t.t = VT_SHORT;
3859 } else {
3860 t.t = VT_BYTE;
3862 s = type_size(&t, &align);
3863 c0 = cx;
3866 if (px + bit_size <= s * 8 && cx + s <= c) {
3867 /* update offset and bit position */
3868 f->c = cx;
3869 bit_pos = px;
3870 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3871 | (bit_pos << VT_STRUCT_SHIFT);
3872 if (s != size)
3873 f->auxtype = t.t;
3874 #ifdef BF_DEBUG
3875 printf("FIX field %s offset %-2d size %-2d align %-2d "
3876 "pos %-2d bits %-2d\n",
3877 get_tok_str(f->v & ~SYM_FIELD, NULL),
3878 cx, s, align, px, bit_size);
3879 #endif
3880 } else {
3881 /* fall back to load/store single-byte wise */
3882 f->auxtype = VT_STRUCT;
3883 #ifdef BF_DEBUG
3884 printf("FIX field %s : load byte-wise\n",
3885 get_tok_str(f->v & ~SYM_FIELD, NULL));
3886 #endif
3891 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3892 static void struct_decl(CType *type, int u)
3894 int v, c, size, align, flexible;
3895 int bit_size, bsize, bt;
3896 Sym *s, *ss, **ps;
3897 AttributeDef ad, ad1;
3898 CType type1, btype;
3900 memset(&ad, 0, sizeof ad);
3901 next();
3902 parse_attribute(&ad);
3903 if (tok != '{') {
3904 v = tok;
3905 next();
3906 /* struct already defined ? return it */
3907 if (v < TOK_IDENT)
3908 expect("struct/union/enum name");
3909 s = struct_find(v);
3910 if (s && (s->sym_scope == local_scope || tok != '{')) {
3911 if (u == s->type.t)
3912 goto do_decl;
3913 if (u == VT_ENUM && IS_ENUM(s->type.t))
3914 goto do_decl;
3915 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3917 } else {
3918 v = anon_sym++;
3920 /* Record the original enum/struct/union token. */
3921 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3922 type1.ref = NULL;
3923 /* we put an undefined size for struct/union */
3924 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3925 s->r = 0; /* default alignment is zero as gcc */
3926 do_decl:
3927 type->t = s->type.t;
3928 type->ref = s;
3930 if (tok == '{') {
3931 next();
3932 if (s->c != -1)
3933 tcc_error("struct/union/enum already defined");
3934 /* cannot be empty */
3935 /* non empty enums are not allowed */
3936 ps = &s->next;
3937 if (u == VT_ENUM) {
3938 long long ll = 0, pl = 0, nl = 0;
3939 CType t;
3940 t.ref = s;
3941 /* enum symbols have static storage */
3942 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3943 for(;;) {
3944 v = tok;
3945 if (v < TOK_UIDENT)
3946 expect("identifier");
3947 ss = sym_find(v);
3948 if (ss && !local_stack)
3949 tcc_error("redefinition of enumerator '%s'",
3950 get_tok_str(v, NULL));
3951 next();
3952 if (tok == '=') {
3953 next();
3954 ll = expr_const64();
3956 ss = sym_push(v, &t, VT_CONST, 0);
3957 ss->enum_val = ll;
3958 *ps = ss, ps = &ss->next;
3959 if (ll < nl)
3960 nl = ll;
3961 if (ll > pl)
3962 pl = ll;
3963 if (tok != ',')
3964 break;
3965 next();
3966 ll++;
3967 /* NOTE: we accept a trailing comma */
3968 if (tok == '}')
3969 break;
3971 skip('}');
3972 /* set integral type of the enum */
3973 t.t = VT_INT;
3974 if (nl >= 0) {
3975 if (pl != (unsigned)pl)
3976 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3977 t.t |= VT_UNSIGNED;
3978 } else if (pl != (int)pl || nl != (int)nl)
3979 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3980 s->type.t = type->t = t.t | VT_ENUM;
3981 s->c = 0;
3982 /* set type for enum members */
3983 for (ss = s->next; ss; ss = ss->next) {
3984 ll = ss->enum_val;
3985 if (ll == (int)ll) /* default is int if it fits */
3986 continue;
3987 if (t.t & VT_UNSIGNED) {
3988 ss->type.t |= VT_UNSIGNED;
3989 if (ll == (unsigned)ll)
3990 continue;
3992 ss->type.t = (ss->type.t & ~VT_BTYPE)
3993 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3995 } else {
3996 c = 0;
3997 flexible = 0;
3998 while (tok != '}') {
3999 if (!parse_btype(&btype, &ad1)) {
4000 skip(';');
4001 continue;
4003 while (1) {
4004 if (flexible)
4005 tcc_error("flexible array member '%s' not at the end of struct",
4006 get_tok_str(v, NULL));
4007 bit_size = -1;
4008 v = 0;
4009 type1 = btype;
4010 if (tok != ':') {
4011 if (tok != ';')
4012 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4013 if (v == 0) {
4014 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4015 expect("identifier");
4016 else {
4017 int v = btype.ref->v;
4018 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4019 if (tcc_state->ms_extensions == 0)
4020 expect("identifier");
4024 if (type_size(&type1, &align) < 0) {
4025 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4026 flexible = 1;
4027 else
4028 tcc_error("field '%s' has incomplete type",
4029 get_tok_str(v, NULL));
4031 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4032 (type1.t & VT_BTYPE) == VT_VOID ||
4033 (type1.t & VT_STORAGE))
4034 tcc_error("invalid type for '%s'",
4035 get_tok_str(v, NULL));
4037 if (tok == ':') {
4038 next();
4039 bit_size = expr_const();
4040 /* XXX: handle v = 0 case for messages */
4041 if (bit_size < 0)
4042 tcc_error("negative width in bit-field '%s'",
4043 get_tok_str(v, NULL));
4044 if (v && bit_size == 0)
4045 tcc_error("zero width for bit-field '%s'",
4046 get_tok_str(v, NULL));
4047 parse_attribute(&ad1);
4049 size = type_size(&type1, &align);
4050 if (bit_size >= 0) {
4051 bt = type1.t & VT_BTYPE;
4052 if (bt != VT_INT &&
4053 bt != VT_BYTE &&
4054 bt != VT_SHORT &&
4055 bt != VT_BOOL &&
4056 bt != VT_LLONG)
4057 tcc_error("bitfields must have scalar type");
4058 bsize = size * 8;
4059 if (bit_size > bsize) {
4060 tcc_error("width of '%s' exceeds its type",
4061 get_tok_str(v, NULL));
4062 } else if (bit_size == bsize
4063 && !ad.a.packed && !ad1.a.packed) {
4064 /* no need for bit fields */
4066 } else if (bit_size == 64) {
4067 tcc_error("field width 64 not implemented");
4068 } else {
4069 type1.t = (type1.t & ~VT_STRUCT_MASK)
4070 | VT_BITFIELD
4071 | (bit_size << (VT_STRUCT_SHIFT + 6));
4074 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4075 /* Remember we've seen a real field to check
4076 for placement of flexible array member. */
4077 c = 1;
4079 /* If member is a struct or bit-field, enforce
4080 placing into the struct (as anonymous). */
4081 if (v == 0 &&
4082 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4083 bit_size >= 0)) {
4084 v = anon_sym++;
4086 if (v) {
4087 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4088 ss->a = ad1.a;
4089 *ps = ss;
4090 ps = &ss->next;
4092 if (tok == ';' || tok == TOK_EOF)
4093 break;
4094 skip(',');
4096 skip(';');
4098 skip('}');
4099 parse_attribute(&ad);
4100 struct_layout(type, &ad);
4105 static void sym_to_attr(AttributeDef *ad, Sym *s)
4107 merge_symattr(&ad->a, &s->a);
4108 merge_funcattr(&ad->f, &s->f);
4111 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4112 are added to the element type, copied because it could be a typedef. */
4113 static void parse_btype_qualify(CType *type, int qualifiers)
4115 while (type->t & VT_ARRAY) {
4116 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4117 type = &type->ref->type;
4119 type->t |= qualifiers;
4122 /* return 0 if no type declaration. otherwise, return the basic type
4123 and skip it.
4125 static int parse_btype(CType *type, AttributeDef *ad)
4127 int t, u, bt, st, type_found, typespec_found, g;
4128 Sym *s;
4129 CType type1;
4131 memset(ad, 0, sizeof(AttributeDef));
4132 type_found = 0;
4133 typespec_found = 0;
4134 t = VT_INT;
4135 bt = st = -1;
4136 type->ref = NULL;
4138 while(1) {
4139 switch(tok) {
4140 case TOK_EXTENSION:
4141 /* currently, we really ignore extension */
4142 next();
4143 continue;
4145 /* basic types */
4146 case TOK_CHAR:
4147 u = VT_BYTE;
4148 basic_type:
4149 next();
4150 basic_type1:
4151 if (u == VT_SHORT || u == VT_LONG) {
4152 if (st != -1 || (bt != -1 && bt != VT_INT))
4153 tmbt: tcc_error("too many basic types");
4154 st = u;
4155 } else {
4156 if (bt != -1 || (st != -1 && u != VT_INT))
4157 goto tmbt;
4158 bt = u;
4160 if (u != VT_INT)
4161 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4162 typespec_found = 1;
4163 break;
4164 case TOK_VOID:
4165 u = VT_VOID;
4166 goto basic_type;
4167 case TOK_SHORT:
4168 u = VT_SHORT;
4169 goto basic_type;
4170 case TOK_INT:
4171 u = VT_INT;
4172 goto basic_type;
4173 case TOK_LONG:
4174 if ((t & VT_BTYPE) == VT_DOUBLE) {
4175 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4176 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4177 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4178 } else {
4179 u = VT_LONG;
4180 goto basic_type;
4182 next();
4183 break;
4184 #ifdef TCC_TARGET_ARM64
4185 case TOK_UINT128:
4186 /* GCC's __uint128_t appears in some Linux header files. Make it a
4187 synonym for long double to get the size and alignment right. */
4188 u = VT_LDOUBLE;
4189 goto basic_type;
4190 #endif
4191 case TOK_BOOL:
4192 u = VT_BOOL;
4193 goto basic_type;
4194 case TOK_FLOAT:
4195 u = VT_FLOAT;
4196 goto basic_type;
4197 case TOK_DOUBLE:
4198 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4199 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4200 } else {
4201 u = VT_DOUBLE;
4202 goto basic_type;
4204 next();
4205 break;
4206 case TOK_ENUM:
4207 struct_decl(&type1, VT_ENUM);
4208 basic_type2:
4209 u = type1.t;
4210 type->ref = type1.ref;
4211 goto basic_type1;
4212 case TOK_STRUCT:
4213 struct_decl(&type1, VT_STRUCT);
4214 goto basic_type2;
4215 case TOK_UNION:
4216 struct_decl(&type1, VT_UNION);
4217 goto basic_type2;
4219 /* type modifiers */
4220 case TOK_CONST1:
4221 case TOK_CONST2:
4222 case TOK_CONST3:
4223 type->t = t;
4224 parse_btype_qualify(type, VT_CONSTANT);
4225 t = type->t;
4226 next();
4227 break;
4228 case TOK_VOLATILE1:
4229 case TOK_VOLATILE2:
4230 case TOK_VOLATILE3:
4231 type->t = t;
4232 parse_btype_qualify(type, VT_VOLATILE);
4233 t = type->t;
4234 next();
4235 break;
4236 case TOK_SIGNED1:
4237 case TOK_SIGNED2:
4238 case TOK_SIGNED3:
4239 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4240 tcc_error("signed and unsigned modifier");
4241 t |= VT_DEFSIGN;
4242 next();
4243 typespec_found = 1;
4244 break;
4245 case TOK_REGISTER:
4246 case TOK_AUTO:
4247 case TOK_RESTRICT1:
4248 case TOK_RESTRICT2:
4249 case TOK_RESTRICT3:
4250 next();
4251 break;
4252 case TOK_UNSIGNED:
4253 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4254 tcc_error("signed and unsigned modifier");
4255 t |= VT_DEFSIGN | VT_UNSIGNED;
4256 next();
4257 typespec_found = 1;
4258 break;
4260 /* storage */
4261 case TOK_EXTERN:
4262 g = VT_EXTERN;
4263 goto storage;
4264 case TOK_STATIC:
4265 g = VT_STATIC;
4266 goto storage;
4267 case TOK_TYPEDEF:
4268 g = VT_TYPEDEF;
4269 goto storage;
4270 storage:
4271 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4272 tcc_error("multiple storage classes");
4273 t |= g;
4274 next();
4275 break;
4276 case TOK_INLINE1:
4277 case TOK_INLINE2:
4278 case TOK_INLINE3:
4279 t |= VT_INLINE;
4280 next();
4281 break;
4283 /* GNUC attribute */
4284 case TOK_ATTRIBUTE1:
4285 case TOK_ATTRIBUTE2:
4286 parse_attribute(ad);
4287 if (ad->attr_mode) {
4288 u = ad->attr_mode -1;
4289 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4291 continue;
4292 /* GNUC typeof */
4293 case TOK_TYPEOF1:
4294 case TOK_TYPEOF2:
4295 case TOK_TYPEOF3:
4296 next();
4297 parse_expr_type(&type1);
4298 /* remove all storage modifiers except typedef */
4299 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4300 if (type1.ref)
4301 sym_to_attr(ad, type1.ref);
4302 goto basic_type2;
4303 default:
4304 if (typespec_found)
4305 goto the_end;
4306 s = sym_find(tok);
4307 if (!s || !(s->type.t & VT_TYPEDEF))
4308 goto the_end;
4309 t &= ~(VT_BTYPE|VT_LONG);
4310 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4311 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4312 type->ref = s->type.ref;
4313 if (t)
4314 parse_btype_qualify(type, t);
4315 t = type->t;
4316 /* get attributes from typedef */
4317 sym_to_attr(ad, s);
4318 next();
4319 typespec_found = 1;
4320 st = bt = -2;
4321 break;
4323 type_found = 1;
4325 the_end:
4326 if (tcc_state->char_is_unsigned) {
4327 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4328 t |= VT_UNSIGNED;
4330 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4331 bt = t & (VT_BTYPE|VT_LONG);
4332 if (bt == VT_LONG)
4333 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4334 #ifdef TCC_TARGET_PE
4335 if (bt == VT_LDOUBLE)
4336 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4337 #endif
4338 type->t = t;
4339 return type_found;
4342 /* convert a function parameter type (array to pointer and function to
4343 function pointer) */
4344 static inline void convert_parameter_type(CType *pt)
4346 /* remove const and volatile qualifiers (XXX: const could be used
4347 to indicate a const function parameter */
4348 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4349 /* array must be transformed to pointer according to ANSI C */
4350 pt->t &= ~VT_ARRAY;
4351 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4352 mk_pointer(pt);
4356 ST_FUNC void parse_asm_str(CString *astr)
4358 skip('(');
4359 parse_mult_str(astr, "string constant");
4362 /* Parse an asm label and return the token */
4363 static int asm_label_instr(void)
4365 int v;
4366 CString astr;
4368 next();
4369 parse_asm_str(&astr);
4370 skip(')');
4371 #ifdef ASM_DEBUG
4372 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4373 #endif
4374 v = tok_alloc(astr.data, astr.size - 1)->tok;
4375 cstr_free(&astr);
4376 return v;
4379 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4381 int n, l, t1, arg_size, align;
4382 Sym **plast, *s, *first;
4383 AttributeDef ad1;
4384 CType pt;
4386 if (tok == '(') {
4387 /* function type, or recursive declarator (return if so) */
4388 next();
4389 if (td && !(td & TYPE_ABSTRACT))
4390 return 0;
4391 if (tok == ')')
4392 l = 0;
4393 else if (parse_btype(&pt, &ad1))
4394 l = FUNC_NEW;
4395 else if (td) {
4396 merge_attr (ad, &ad1);
4397 return 0;
4398 } else
4399 l = FUNC_OLD;
4400 first = NULL;
4401 plast = &first;
4402 arg_size = 0;
4403 if (l) {
4404 for(;;) {
4405 /* read param name and compute offset */
4406 if (l != FUNC_OLD) {
4407 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4408 break;
4409 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4410 if ((pt.t & VT_BTYPE) == VT_VOID)
4411 tcc_error("parameter declared as void");
4412 } else {
4413 n = tok;
4414 if (n < TOK_UIDENT)
4415 expect("identifier");
4416 pt.t = VT_VOID; /* invalid type */
4417 next();
4419 convert_parameter_type(&pt);
4420 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4421 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4422 *plast = s;
4423 plast = &s->next;
4424 if (tok == ')')
4425 break;
4426 skip(',');
4427 if (l == FUNC_NEW && tok == TOK_DOTS) {
4428 l = FUNC_ELLIPSIS;
4429 next();
4430 break;
4432 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4433 tcc_error("invalid type");
4435 } else
4436 /* if no parameters, then old type prototype */
4437 l = FUNC_OLD;
4438 skip(')');
4439 /* NOTE: const is ignored in returned type as it has a special
4440 meaning in gcc / C++ */
4441 type->t &= ~VT_CONSTANT;
4442 /* some ancient pre-K&R C allows a function to return an array
4443 and the array brackets to be put after the arguments, such
4444 that "int c()[]" means something like "int[] c()" */
4445 if (tok == '[') {
4446 next();
4447 skip(']'); /* only handle simple "[]" */
4448 mk_pointer(type);
4450 /* we push a anonymous symbol which will contain the function prototype */
4451 ad->f.func_args = arg_size;
4452 ad->f.func_type = l;
4453 s = sym_push(SYM_FIELD, type, 0, 0);
4454 s->a = ad->a;
4455 s->f = ad->f;
4456 s->next = first;
4457 type->t = VT_FUNC;
4458 type->ref = s;
4459 } else if (tok == '[') {
4460 int saved_nocode_wanted = nocode_wanted;
4461 /* array definition */
4462 next();
4463 while (1) {
4464 /* XXX The optional type-quals and static should only be accepted
4465 in parameter decls. The '*' as well, and then even only
4466 in prototypes (not function defs). */
4467 switch (tok) {
4468 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4469 case TOK_CONST1:
4470 case TOK_VOLATILE1:
4471 case TOK_STATIC:
4472 case '*':
4473 next();
4474 continue;
4475 default:
4476 break;
4478 break;
4480 n = -1;
4481 t1 = 0;
4482 if (tok != ']') {
4483 if (!local_stack || (storage & VT_STATIC))
4484 vpushi(expr_const());
4485 else {
4486 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4487 length must always be evaluated, even under nocode_wanted,
4488 so that its size slot is initialized (e.g. under sizeof
4489 or typeof). */
4490 nocode_wanted = 0;
4491 gexpr();
4493 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4494 n = vtop->c.i;
4495 if (n < 0)
4496 tcc_error("invalid array size");
4497 } else {
4498 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4499 tcc_error("size of variable length array should be an integer");
4500 t1 = VT_VLA;
4503 skip(']');
4504 /* parse next post type */
4505 post_type(type, ad, storage, 0);
4506 if (type->t == VT_FUNC)
4507 tcc_error("declaration of an array of functions");
4508 t1 |= type->t & VT_VLA;
4510 if (t1 & VT_VLA) {
4511 loc -= type_size(&int_type, &align);
4512 loc &= -align;
4513 n = loc;
4515 vla_runtime_type_size(type, &align);
4516 gen_op('*');
4517 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4518 vswap();
4519 vstore();
4521 if (n != -1)
4522 vpop();
4523 nocode_wanted = saved_nocode_wanted;
4525 /* we push an anonymous symbol which will contain the array
4526 element type */
4527 s = sym_push(SYM_FIELD, type, 0, n);
4528 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4529 type->ref = s;
4531 return 1;
4534 /* Parse a type declarator (except basic type), and return the type
4535 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4536 expected. 'type' should contain the basic type. 'ad' is the
4537 attribute definition of the basic type. It can be modified by
4538 type_decl(). If this (possibly abstract) declarator is a pointer chain
4539 it returns the innermost pointed to type (equals *type, but is a different
4540 pointer), otherwise returns type itself, that's used for recursive calls. */
4541 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4543 CType *post, *ret;
4544 int qualifiers, storage;
4546 /* recursive type, remove storage bits first, apply them later again */
4547 storage = type->t & VT_STORAGE;
4548 type->t &= ~VT_STORAGE;
4549 post = ret = type;
4551 while (tok == '*') {
4552 qualifiers = 0;
4553 redo:
4554 next();
4555 switch(tok) {
4556 case TOK_CONST1:
4557 case TOK_CONST2:
4558 case TOK_CONST3:
4559 qualifiers |= VT_CONSTANT;
4560 goto redo;
4561 case TOK_VOLATILE1:
4562 case TOK_VOLATILE2:
4563 case TOK_VOLATILE3:
4564 qualifiers |= VT_VOLATILE;
4565 goto redo;
4566 case TOK_RESTRICT1:
4567 case TOK_RESTRICT2:
4568 case TOK_RESTRICT3:
4569 goto redo;
4570 /* XXX: clarify attribute handling */
4571 case TOK_ATTRIBUTE1:
4572 case TOK_ATTRIBUTE2:
4573 parse_attribute(ad);
4574 break;
4576 mk_pointer(type);
4577 type->t |= qualifiers;
4578 if (ret == type)
4579 /* innermost pointed to type is the one for the first derivation */
4580 ret = pointed_type(type);
4583 if (tok == '(') {
4584 /* This is possibly a parameter type list for abstract declarators
4585 ('int ()'), use post_type for testing this. */
4586 if (!post_type(type, ad, 0, td)) {
4587 /* It's not, so it's a nested declarator, and the post operations
4588 apply to the innermost pointed to type (if any). */
4589 /* XXX: this is not correct to modify 'ad' at this point, but
4590 the syntax is not clear */
4591 parse_attribute(ad);
4592 post = type_decl(type, ad, v, td);
4593 skip(')');
4594 } else
4595 goto abstract;
4596 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4597 /* type identifier */
4598 *v = tok;
4599 next();
4600 } else {
4601 abstract:
4602 if (!(td & TYPE_ABSTRACT))
4603 expect("identifier");
4604 *v = 0;
4606 post_type(post, ad, storage, 0);
4607 parse_attribute(ad);
4608 type->t |= storage;
4609 return ret;
4612 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4613 ST_FUNC int lvalue_type(int t)
4615 int bt, r;
4616 r = VT_LVAL;
4617 bt = t & VT_BTYPE;
4618 if (bt == VT_BYTE || bt == VT_BOOL)
4619 r |= VT_LVAL_BYTE;
4620 else if (bt == VT_SHORT)
4621 r |= VT_LVAL_SHORT;
4622 else
4623 return r;
4624 if (t & VT_UNSIGNED)
4625 r |= VT_LVAL_UNSIGNED;
4626 return r;
4629 /* indirection with full error checking and bound check */
4630 ST_FUNC void indir(void)
4632 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4633 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4634 return;
4635 expect("pointer");
4637 if (vtop->r & VT_LVAL)
4638 gv(RC_INT);
4639 vtop->type = *pointed_type(&vtop->type);
4640 /* Arrays and functions are never lvalues */
4641 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4642 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4643 vtop->r |= lvalue_type(vtop->type.t);
4644 /* if bound checking, the referenced pointer must be checked */
4645 #ifdef CONFIG_TCC_BCHECK
4646 if (tcc_state->do_bounds_check)
4647 vtop->r |= VT_MUSTBOUND;
4648 #endif
4652 /* pass a parameter to a function and do type checking and casting */
4653 static void gfunc_param_typed(Sym *func, Sym *arg)
4655 int func_type;
4656 CType type;
4658 func_type = func->f.func_type;
4659 if (func_type == FUNC_OLD ||
4660 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4661 /* default casting : only need to convert float to double */
4662 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4663 gen_cast_s(VT_DOUBLE);
4664 } else if (vtop->type.t & VT_BITFIELD) {
4665 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4666 type.ref = vtop->type.ref;
4667 gen_cast(&type);
4669 } else if (arg == NULL) {
4670 tcc_error("too many arguments to function");
4671 } else {
4672 type = arg->type;
4673 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4674 gen_assign_cast(&type);
4678 /* parse an expression and return its type without any side effect. */
4679 static void expr_type(CType *type, void (*expr_fn)(void))
4681 nocode_wanted++;
4682 expr_fn();
4683 *type = vtop->type;
4684 vpop();
4685 nocode_wanted--;
4688 /* parse an expression of the form '(type)' or '(expr)' and return its
4689 type */
4690 static void parse_expr_type(CType *type)
4692 int n;
4693 AttributeDef ad;
4695 skip('(');
4696 if (parse_btype(type, &ad)) {
4697 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4698 } else {
4699 expr_type(type, gexpr);
4701 skip(')');
4704 static void parse_type(CType *type)
4706 AttributeDef ad;
4707 int n;
4709 if (!parse_btype(type, &ad)) {
4710 expect("type");
4712 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4715 static void parse_builtin_params(int nc, const char *args)
4717 char c, sep = '(';
4718 CType t;
4719 if (nc)
4720 nocode_wanted++;
4721 next();
4722 while ((c = *args++)) {
4723 skip(sep);
4724 sep = ',';
4725 switch (c) {
4726 case 'e': expr_eq(); continue;
4727 case 't': parse_type(&t); vpush(&t); continue;
4728 default: tcc_error("internal error"); break;
4731 skip(')');
4732 if (nc)
4733 nocode_wanted--;
4736 static void try_call_scope_cleanup(Sym *stop)
4738 Sym *cls = current_cleanups;
4740 for (; cls != stop; cls = cls->ncl) {
4741 Sym *fs = cls->next;
4742 Sym *vs = cls->prev_tok;
4744 vpushsym(&fs->type, fs);
4745 vset(&vs->type, vs->r, vs->c);
4746 vtop->sym = vs;
4747 mk_pointer(&vtop->type);
4748 gaddrof();
4749 gfunc_call(1);
4753 static void try_call_cleanup_goto(Sym *cleanupstate)
4755 Sym *oc, *cc;
4756 int ocd, ccd;
4758 if (!current_cleanups)
4759 return;
4761 /* search NCA of both cleanup chains given parents and initial depth */
4762 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4763 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4765 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4767 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4770 try_call_scope_cleanup(cc);
4773 ST_FUNC void unary(void)
4775 int n, t, align, size, r, sizeof_caller;
4776 CType type;
4777 Sym *s;
4778 AttributeDef ad;
4780 sizeof_caller = in_sizeof;
4781 in_sizeof = 0;
4782 type.ref = NULL;
4783 /* XXX: GCC 2.95.3 does not generate a table although it should be
4784 better here */
4785 tok_next:
4786 switch(tok) {
4787 case TOK_EXTENSION:
4788 next();
4789 goto tok_next;
4790 case TOK_LCHAR:
4791 #ifdef TCC_TARGET_PE
4792 t = VT_SHORT|VT_UNSIGNED;
4793 goto push_tokc;
4794 #endif
4795 case TOK_CINT:
4796 case TOK_CCHAR:
4797 t = VT_INT;
4798 push_tokc:
4799 type.t = t;
4800 vsetc(&type, VT_CONST, &tokc);
4801 next();
4802 break;
4803 case TOK_CUINT:
4804 t = VT_INT | VT_UNSIGNED;
4805 goto push_tokc;
4806 case TOK_CLLONG:
4807 t = VT_LLONG;
4808 goto push_tokc;
4809 case TOK_CULLONG:
4810 t = VT_LLONG | VT_UNSIGNED;
4811 goto push_tokc;
4812 case TOK_CFLOAT:
4813 t = VT_FLOAT;
4814 goto push_tokc;
4815 case TOK_CDOUBLE:
4816 t = VT_DOUBLE;
4817 goto push_tokc;
4818 case TOK_CLDOUBLE:
4819 t = VT_LDOUBLE;
4820 goto push_tokc;
4821 case TOK_CLONG:
4822 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4823 goto push_tokc;
4824 case TOK_CULONG:
4825 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4826 goto push_tokc;
4827 case TOK___FUNCTION__:
4828 if (!gnu_ext)
4829 goto tok_identifier;
4830 /* fall thru */
4831 case TOK___FUNC__:
4833 void *ptr;
4834 int len;
4835 /* special function name identifier */
4836 len = strlen(funcname) + 1;
4837 /* generate char[len] type */
4838 type.t = VT_BYTE;
4839 mk_pointer(&type);
4840 type.t |= VT_ARRAY;
4841 type.ref->c = len;
4842 vpush_ref(&type, data_section, data_section->data_offset, len);
4843 if (!NODATA_WANTED) {
4844 ptr = section_ptr_add(data_section, len);
4845 memcpy(ptr, funcname, len);
4847 next();
4849 break;
4850 case TOK_LSTR:
4851 #ifdef TCC_TARGET_PE
4852 t = VT_SHORT | VT_UNSIGNED;
4853 #else
4854 t = VT_INT;
4855 #endif
4856 goto str_init;
4857 case TOK_STR:
4858 /* string parsing */
4859 t = VT_BYTE;
4860 if (tcc_state->char_is_unsigned)
4861 t = VT_BYTE | VT_UNSIGNED;
4862 str_init:
4863 if (tcc_state->warn_write_strings)
4864 t |= VT_CONSTANT;
4865 type.t = t;
4866 mk_pointer(&type);
4867 type.t |= VT_ARRAY;
4868 memset(&ad, 0, sizeof(AttributeDef));
4869 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4870 break;
4871 case '(':
4872 next();
4873 /* cast ? */
4874 if (parse_btype(&type, &ad)) {
4875 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4876 skip(')');
4877 /* check ISOC99 compound literal */
4878 if (tok == '{') {
4879 /* data is allocated locally by default */
4880 if (global_expr)
4881 r = VT_CONST;
4882 else
4883 r = VT_LOCAL;
4884 /* all except arrays are lvalues */
4885 if (!(type.t & VT_ARRAY))
4886 r |= lvalue_type(type.t);
4887 memset(&ad, 0, sizeof(AttributeDef));
4888 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4889 } else {
4890 if (sizeof_caller) {
4891 vpush(&type);
4892 return;
4894 unary();
4895 gen_cast(&type);
4897 } else if (tok == '{') {
4898 int saved_nocode_wanted = nocode_wanted;
4899 if (const_wanted)
4900 tcc_error("expected constant");
4901 /* save all registers */
4902 save_regs(0);
4903 /* statement expression : we do not accept break/continue
4904 inside as GCC does. We do retain the nocode_wanted state,
4905 as statement expressions can't ever be entered from the
4906 outside, so any reactivation of code emission (from labels
4907 or loop heads) can be disabled again after the end of it. */
4908 block(NULL, NULL, 1);
4909 nocode_wanted = saved_nocode_wanted;
4910 skip(')');
4911 } else {
4912 gexpr();
4913 skip(')');
4915 break;
4916 case '*':
4917 next();
4918 unary();
4919 indir();
4920 break;
4921 case '&':
4922 next();
4923 unary();
4924 /* functions names must be treated as function pointers,
4925 except for unary '&' and sizeof. Since we consider that
4926 functions are not lvalues, we only have to handle it
4927 there and in function calls. */
4928 /* arrays can also be used although they are not lvalues */
4929 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4930 !(vtop->type.t & VT_ARRAY))
4931 test_lvalue();
4932 mk_pointer(&vtop->type);
4933 gaddrof();
4934 break;
4935 case '!':
4936 next();
4937 unary();
4938 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4939 gen_cast_s(VT_BOOL);
4940 vtop->c.i = !vtop->c.i;
4941 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4942 vtop->c.i ^= 1;
4943 else {
4944 save_regs(1);
4945 vseti(VT_JMP, gvtst(1, 0));
4947 break;
4948 case '~':
4949 next();
4950 unary();
4951 vpushi(-1);
4952 gen_op('^');
4953 break;
4954 case '+':
4955 next();
4956 unary();
4957 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4958 tcc_error("pointer not accepted for unary plus");
4959 /* In order to force cast, we add zero, except for floating point
4960 where we really need an noop (otherwise -0.0 will be transformed
4961 into +0.0). */
4962 if (!is_float(vtop->type.t)) {
4963 vpushi(0);
4964 gen_op('+');
4966 break;
4967 case TOK_SIZEOF:
4968 case TOK_ALIGNOF1:
4969 case TOK_ALIGNOF2:
4970 case TOK_ALIGNOF3:
4971 t = tok;
4972 next();
4973 in_sizeof++;
4974 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4975 s = vtop[1].sym; /* hack: accessing previous vtop */
4976 size = type_size(&type, &align);
4977 if (s && s->a.aligned)
4978 align = 1 << (s->a.aligned - 1);
4979 if (t == TOK_SIZEOF) {
4980 if (!(type.t & VT_VLA)) {
4981 if (size < 0)
4982 tcc_error("sizeof applied to an incomplete type");
4983 vpushs(size);
4984 } else {
4985 vla_runtime_type_size(&type, &align);
4987 } else {
4988 vpushs(align);
4990 vtop->type.t |= VT_UNSIGNED;
4991 break;
4993 case TOK_builtin_expect:
4994 /* __builtin_expect is a no-op for now */
4995 parse_builtin_params(0, "ee");
4996 vpop();
4997 break;
4998 case TOK_builtin_types_compatible_p:
4999 parse_builtin_params(0, "tt");
5000 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5001 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5002 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5003 vtop -= 2;
5004 vpushi(n);
5005 break;
5006 case TOK_builtin_choose_expr:
5008 int64_t c;
5009 next();
5010 skip('(');
5011 c = expr_const64();
5012 skip(',');
5013 if (!c) {
5014 nocode_wanted++;
5016 expr_eq();
5017 if (!c) {
5018 vpop();
5019 nocode_wanted--;
5021 skip(',');
5022 if (c) {
5023 nocode_wanted++;
5025 expr_eq();
5026 if (c) {
5027 vpop();
5028 nocode_wanted--;
5030 skip(')');
5032 break;
5033 case TOK_builtin_constant_p:
5034 parse_builtin_params(1, "e");
5035 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5036 vtop--;
5037 vpushi(n);
5038 break;
5039 case TOK_builtin_frame_address:
5040 case TOK_builtin_return_address:
5042 int tok1 = tok;
5043 int level;
5044 next();
5045 skip('(');
5046 if (tok != TOK_CINT) {
5047 tcc_error("%s only takes positive integers",
5048 tok1 == TOK_builtin_return_address ?
5049 "__builtin_return_address" :
5050 "__builtin_frame_address");
5052 level = (uint32_t)tokc.i;
5053 next();
5054 skip(')');
5055 type.t = VT_VOID;
5056 mk_pointer(&type);
5057 vset(&type, VT_LOCAL, 0); /* local frame */
5058 while (level--) {
5059 mk_pointer(&vtop->type);
5060 indir(); /* -> parent frame */
5062 if (tok1 == TOK_builtin_return_address) {
5063 // assume return address is just above frame pointer on stack
5064 vpushi(PTR_SIZE);
5065 gen_op('+');
5066 mk_pointer(&vtop->type);
5067 indir();
5070 break;
5071 #ifdef TCC_TARGET_X86_64
5072 #ifdef TCC_TARGET_PE
5073 case TOK_builtin_va_start:
5074 parse_builtin_params(0, "ee");
5075 r = vtop->r & VT_VALMASK;
5076 if (r == VT_LLOCAL)
5077 r = VT_LOCAL;
5078 if (r != VT_LOCAL)
5079 tcc_error("__builtin_va_start expects a local variable");
5080 vtop->r = r;
5081 vtop->type = char_pointer_type;
5082 vtop->c.i += 8;
5083 vstore();
5084 break;
5085 #else
5086 case TOK_builtin_va_arg_types:
5087 parse_builtin_params(0, "t");
5088 vpushi(classify_x86_64_va_arg(&vtop->type));
5089 vswap();
5090 vpop();
5091 break;
5092 #endif
5093 #endif
5095 #ifdef TCC_TARGET_ARM64
5096 case TOK___va_start: {
5097 parse_builtin_params(0, "ee");
5098 //xx check types
5099 gen_va_start();
5100 vpushi(0);
5101 vtop->type.t = VT_VOID;
5102 break;
5104 case TOK___va_arg: {
5105 parse_builtin_params(0, "et");
5106 type = vtop->type;
5107 vpop();
5108 //xx check types
5109 gen_va_arg(&type);
5110 vtop->type = type;
5111 break;
5113 case TOK___arm64_clear_cache: {
5114 parse_builtin_params(0, "ee");
5115 gen_clear_cache();
5116 vpushi(0);
5117 vtop->type.t = VT_VOID;
5118 break;
5120 #endif
5121 /* pre operations */
5122 case TOK_INC:
5123 case TOK_DEC:
5124 t = tok;
5125 next();
5126 unary();
5127 inc(0, t);
5128 break;
5129 case '-':
5130 next();
5131 unary();
5132 t = vtop->type.t & VT_BTYPE;
5133 if (is_float(t)) {
5134 /* In IEEE negate(x) isn't subtract(0,x), but rather
5135 subtract(-0, x). */
5136 vpush(&vtop->type);
5137 if (t == VT_FLOAT)
5138 vtop->c.f = -1.0 * 0.0;
5139 else if (t == VT_DOUBLE)
5140 vtop->c.d = -1.0 * 0.0;
5141 else
5142 vtop->c.ld = -1.0 * 0.0;
5143 } else
5144 vpushi(0);
5145 vswap();
5146 gen_op('-');
5147 break;
5148 case TOK_LAND:
5149 if (!gnu_ext)
5150 goto tok_identifier;
5151 next();
5152 /* allow to take the address of a label */
5153 if (tok < TOK_UIDENT)
5154 expect("label identifier");
5155 s = label_find(tok);
5156 if (!s) {
5157 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5158 } else {
5159 if (s->r == LABEL_DECLARED)
5160 s->r = LABEL_FORWARD;
5162 if (!s->type.t) {
5163 s->type.t = VT_VOID;
5164 mk_pointer(&s->type);
5165 s->type.t |= VT_STATIC;
5167 vpushsym(&s->type, s);
5168 next();
5169 break;
5171 case TOK_GENERIC:
5173 CType controlling_type;
5174 int has_default = 0;
5175 int has_match = 0;
5176 int learn = 0;
5177 TokenString *str = NULL;
5178 int saved_const_wanted = const_wanted;
5180 next();
5181 skip('(');
5182 const_wanted = 0;
5183 expr_type(&controlling_type, expr_eq);
5184 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5185 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5186 mk_pointer(&controlling_type);
5187 const_wanted = saved_const_wanted;
5188 for (;;) {
5189 learn = 0;
5190 skip(',');
5191 if (tok == TOK_DEFAULT) {
5192 if (has_default)
5193 tcc_error("too many 'default'");
5194 has_default = 1;
5195 if (!has_match)
5196 learn = 1;
5197 next();
5198 } else {
5199 AttributeDef ad_tmp;
5200 int itmp;
5201 CType cur_type;
5202 parse_btype(&cur_type, &ad_tmp);
5203 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5204 if (compare_types(&controlling_type, &cur_type, 0)) {
5205 if (has_match) {
5206 tcc_error("type match twice");
5208 has_match = 1;
5209 learn = 1;
5212 skip(':');
5213 if (learn) {
5214 if (str)
5215 tok_str_free(str);
5216 skip_or_save_block(&str);
5217 } else {
5218 skip_or_save_block(NULL);
5220 if (tok == ')')
5221 break;
5223 if (!str) {
5224 char buf[60];
5225 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5226 tcc_error("type '%s' does not match any association", buf);
5228 begin_macro(str, 1);
5229 next();
5230 expr_eq();
5231 if (tok != TOK_EOF)
5232 expect(",");
5233 end_macro();
5234 next();
5235 break;
5237 // special qnan , snan and infinity values
5238 case TOK___NAN__:
5239 n = 0x7fc00000;
5240 special_math_val:
5241 vpushi(n);
5242 vtop->type.t = VT_FLOAT;
5243 next();
5244 break;
5245 case TOK___SNAN__:
5246 n = 0x7f800001;
5247 goto special_math_val;
5248 case TOK___INF__:
5249 n = 0x7f800000;
5250 goto special_math_val;
5252 default:
5253 tok_identifier:
5254 t = tok;
5255 next();
5256 if (t < TOK_UIDENT)
5257 expect("identifier");
5258 s = sym_find(t);
5259 if (!s || IS_ASM_SYM(s)) {
5260 const char *name = get_tok_str(t, NULL);
5261 if (tok != '(')
5262 tcc_error("'%s' undeclared", name);
5263 /* for simple function calls, we tolerate undeclared
5264 external reference to int() function */
5265 if (tcc_state->warn_implicit_function_declaration
5266 #ifdef TCC_TARGET_PE
5267 /* people must be warned about using undeclared WINAPI functions
5268 (which usually start with uppercase letter) */
5269 || (name[0] >= 'A' && name[0] <= 'Z')
5270 #endif
5272 tcc_warning("implicit declaration of function '%s'", name);
5273 s = external_global_sym(t, &func_old_type, 0);
5276 r = s->r;
5277 /* A symbol that has a register is a local register variable,
5278 which starts out as VT_LOCAL value. */
5279 if ((r & VT_VALMASK) < VT_CONST)
5280 r = (r & ~VT_VALMASK) | VT_LOCAL;
5282 vset(&s->type, r, s->c);
5283 /* Point to s as backpointer (even without r&VT_SYM).
5284 Will be used by at least the x86 inline asm parser for
5285 regvars. */
5286 vtop->sym = s;
5288 if (r & VT_SYM) {
5289 vtop->c.i = 0;
5290 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5291 vtop->c.i = s->enum_val;
5293 break;
5296 /* post operations */
5297 while (1) {
5298 if (tok == TOK_INC || tok == TOK_DEC) {
5299 inc(1, tok);
5300 next();
5301 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5302 int qualifiers;
5303 /* field */
5304 if (tok == TOK_ARROW)
5305 indir();
5306 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5307 test_lvalue();
5308 gaddrof();
5309 /* expect pointer on structure */
5310 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5311 expect("struct or union");
5312 if (tok == TOK_CDOUBLE)
5313 expect("field name");
5314 next();
5315 if (tok == TOK_CINT || tok == TOK_CUINT)
5316 expect("field name");
5317 s = find_field(&vtop->type, tok);
5318 if (!s)
5319 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5320 /* add field offset to pointer */
5321 vtop->type = char_pointer_type; /* change type to 'char *' */
5322 vpushi(s->c);
5323 gen_op('+');
5324 /* change type to field type, and set to lvalue */
5325 vtop->type = s->type;
5326 vtop->type.t |= qualifiers;
5327 /* an array is never an lvalue */
5328 if (!(vtop->type.t & VT_ARRAY)) {
5329 vtop->r |= lvalue_type(vtop->type.t);
5330 #ifdef CONFIG_TCC_BCHECK
5331 /* if bound checking, the referenced pointer must be checked */
5332 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5333 vtop->r |= VT_MUSTBOUND;
5334 #endif
5336 next();
5337 } else if (tok == '[') {
5338 next();
5339 gexpr();
5340 gen_op('+');
5341 indir();
5342 skip(']');
5343 } else if (tok == '(') {
5344 SValue ret;
5345 Sym *sa;
5346 int nb_args, ret_nregs, ret_align, regsize, variadic;
5348 /* function call */
5349 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5350 /* pointer test (no array accepted) */
5351 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5352 vtop->type = *pointed_type(&vtop->type);
5353 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5354 goto error_func;
5355 } else {
5356 error_func:
5357 expect("function pointer");
5359 } else {
5360 vtop->r &= ~VT_LVAL; /* no lvalue */
5362 /* get return type */
5363 s = vtop->type.ref;
5364 next();
5365 sa = s->next; /* first parameter */
5366 nb_args = regsize = 0;
5367 ret.r2 = VT_CONST;
5368 /* compute first implicit argument if a structure is returned */
5369 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5370 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5371 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5372 &ret_align, &regsize);
5373 if (!ret_nregs) {
5374 /* get some space for the returned structure */
5375 size = type_size(&s->type, &align);
5376 #ifdef TCC_TARGET_ARM64
5377 /* On arm64, a small struct is return in registers.
5378 It is much easier to write it to memory if we know
5379 that we are allowed to write some extra bytes, so
5380 round the allocated space up to a power of 2: */
5381 if (size < 16)
5382 while (size & (size - 1))
5383 size = (size | (size - 1)) + 1;
5384 #endif
5385 loc = (loc - size) & -align;
5386 ret.type = s->type;
5387 ret.r = VT_LOCAL | VT_LVAL;
5388 /* pass it as 'int' to avoid structure arg passing
5389 problems */
5390 vseti(VT_LOCAL, loc);
5391 ret.c = vtop->c;
5392 nb_args++;
5394 } else {
5395 ret_nregs = 1;
5396 ret.type = s->type;
5399 if (ret_nregs) {
5400 /* return in register */
5401 if (is_float(ret.type.t)) {
5402 ret.r = reg_fret(ret.type.t);
5403 #ifdef TCC_TARGET_X86_64
5404 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5405 ret.r2 = REG_QRET;
5406 #endif
5407 } else {
5408 #ifndef TCC_TARGET_ARM64
5409 #ifdef TCC_TARGET_X86_64
5410 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5411 #else
5412 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5413 #endif
5414 ret.r2 = REG_LRET;
5415 #endif
5416 ret.r = REG_IRET;
5418 ret.c.i = 0;
5420 if (tok != ')') {
5421 for(;;) {
5422 expr_eq();
5423 gfunc_param_typed(s, sa);
5424 nb_args++;
5425 if (sa)
5426 sa = sa->next;
5427 if (tok == ')')
5428 break;
5429 skip(',');
5432 if (sa)
5433 tcc_error("too few arguments to function");
5434 skip(')');
5435 gfunc_call(nb_args);
5437 /* return value */
5438 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5439 vsetc(&ret.type, r, &ret.c);
5440 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5443 /* handle packed struct return */
5444 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5445 int addr, offset;
5447 size = type_size(&s->type, &align);
5448 /* We're writing whole regs often, make sure there's enough
5449 space. Assume register size is power of 2. */
5450 if (regsize > align)
5451 align = regsize;
5452 loc = (loc - size) & -align;
5453 addr = loc;
5454 offset = 0;
5455 for (;;) {
5456 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5457 vswap();
5458 vstore();
5459 vtop--;
5460 if (--ret_nregs == 0)
5461 break;
5462 offset += regsize;
5464 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5466 } else {
5467 break;
5472 ST_FUNC void expr_prod(void)
5474 int t;
5476 unary();
5477 while (tok == '*' || tok == '/' || tok == '%') {
5478 t = tok;
5479 next();
5480 unary();
5481 gen_op(t);
5485 ST_FUNC void expr_sum(void)
5487 int t;
5489 expr_prod();
5490 while (tok == '+' || tok == '-') {
5491 t = tok;
5492 next();
5493 expr_prod();
5494 gen_op(t);
5498 static void expr_shift(void)
5500 int t;
5502 expr_sum();
5503 while (tok == TOK_SHL || tok == TOK_SAR) {
5504 t = tok;
5505 next();
5506 expr_sum();
5507 gen_op(t);
5511 static void expr_cmp(void)
5513 int t;
5515 expr_shift();
5516 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5517 tok == TOK_ULT || tok == TOK_UGE) {
5518 t = tok;
5519 next();
5520 expr_shift();
5521 gen_op(t);
5525 static void expr_cmpeq(void)
5527 int t;
5529 expr_cmp();
5530 while (tok == TOK_EQ || tok == TOK_NE) {
5531 t = tok;
5532 next();
5533 expr_cmp();
5534 gen_op(t);
5538 static void expr_and(void)
5540 expr_cmpeq();
5541 while (tok == '&') {
5542 next();
5543 expr_cmpeq();
5544 gen_op('&');
5548 static void expr_xor(void)
5550 expr_and();
5551 while (tok == '^') {
5552 next();
5553 expr_and();
5554 gen_op('^');
5558 static void expr_or(void)
5560 expr_xor();
5561 while (tok == '|') {
5562 next();
5563 expr_xor();
5564 gen_op('|');
5568 static void expr_land(void)
5570 expr_or();
5571 if (tok == TOK_LAND) {
5572 int t = 0;
5573 for(;;) {
5574 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5575 gen_cast_s(VT_BOOL);
5576 if (vtop->c.i) {
5577 vpop();
5578 } else {
5579 nocode_wanted++;
5580 while (tok == TOK_LAND) {
5581 next();
5582 expr_or();
5583 vpop();
5585 nocode_wanted--;
5586 if (t)
5587 gsym(t);
5588 gen_cast_s(VT_INT);
5589 break;
5591 } else {
5592 if (!t)
5593 save_regs(1);
5594 t = gvtst(1, t);
5596 if (tok != TOK_LAND) {
5597 if (t)
5598 vseti(VT_JMPI, t);
5599 else
5600 vpushi(1);
5601 break;
5603 next();
5604 expr_or();
5609 static void expr_lor(void)
5611 expr_land();
5612 if (tok == TOK_LOR) {
5613 int t = 0;
5614 for(;;) {
5615 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5616 gen_cast_s(VT_BOOL);
5617 if (!vtop->c.i) {
5618 vpop();
5619 } else {
5620 nocode_wanted++;
5621 while (tok == TOK_LOR) {
5622 next();
5623 expr_land();
5624 vpop();
5626 nocode_wanted--;
5627 if (t)
5628 gsym(t);
5629 gen_cast_s(VT_INT);
5630 break;
5632 } else {
5633 if (!t)
5634 save_regs(1);
5635 t = gvtst(0, t);
5637 if (tok != TOK_LOR) {
5638 if (t)
5639 vseti(VT_JMP, t);
5640 else
5641 vpushi(0);
5642 break;
5644 next();
5645 expr_land();
5650 /* Assuming vtop is a value used in a conditional context
5651 (i.e. compared with zero) return 0 if it's false, 1 if
5652 true and -1 if it can't be statically determined. */
5653 static int condition_3way(void)
5655 int c = -1;
5656 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5657 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5658 vdup();
5659 gen_cast_s(VT_BOOL);
5660 c = vtop->c.i;
5661 vpop();
5663 return c;
5666 static void expr_cond(void)
5668 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5669 SValue sv;
5670 CType type, type1, type2;
5672 expr_lor();
5673 if (tok == '?') {
5674 next();
5675 c = condition_3way();
5676 g = (tok == ':' && gnu_ext);
5677 if (c < 0) {
5678 /* needed to avoid having different registers saved in
5679 each branch */
5680 if (is_float(vtop->type.t)) {
5681 rc = RC_FLOAT;
5682 #ifdef TCC_TARGET_X86_64
5683 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5684 rc = RC_ST0;
5686 #endif
5687 } else
5688 rc = RC_INT;
5689 gv(rc);
5690 save_regs(1);
5691 if (g)
5692 gv_dup();
5693 tt = gvtst(1, 0);
5695 } else {
5696 if (!g)
5697 vpop();
5698 tt = 0;
5701 if (1) {
5702 if (c == 0)
5703 nocode_wanted++;
5704 if (!g)
5705 gexpr();
5707 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5708 mk_pointer(&vtop->type);
5709 type1 = vtop->type;
5710 sv = *vtop; /* save value to handle it later */
5711 vtop--; /* no vpop so that FP stack is not flushed */
5712 skip(':');
5714 u = 0;
5715 if (c < 0)
5716 u = gjmp(0);
5717 gsym(tt);
5719 if (c == 0)
5720 nocode_wanted--;
5721 if (c == 1)
5722 nocode_wanted++;
5723 expr_cond();
5724 if (c == 1)
5725 nocode_wanted--;
5727 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5728 mk_pointer(&vtop->type);
5729 type2=vtop->type;
5730 t1 = type1.t;
5731 bt1 = t1 & VT_BTYPE;
5732 t2 = type2.t;
5733 bt2 = t2 & VT_BTYPE;
5734 type.ref = NULL;
5737 /* cast operands to correct type according to ISOC rules */
5738 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5739 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5740 } else if (is_float(bt1) || is_float(bt2)) {
5741 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5742 type.t = VT_LDOUBLE;
5744 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5745 type.t = VT_DOUBLE;
5746 } else {
5747 type.t = VT_FLOAT;
5749 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5750 /* cast to biggest op */
5751 type.t = VT_LLONG | VT_LONG;
5752 if (bt1 == VT_LLONG)
5753 type.t &= t1;
5754 if (bt2 == VT_LLONG)
5755 type.t &= t2;
5756 /* convert to unsigned if it does not fit in a long long */
5757 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5758 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5759 type.t |= VT_UNSIGNED;
5760 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5761 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5762 /* If one is a null ptr constant the result type
5763 is the other. */
5764 if (is_null_pointer (vtop)) type = type1;
5765 else if (is_null_pointer (&sv)) type = type2;
5766 else if (bt1 != bt2)
5767 tcc_error("incompatible types in conditional expressions");
5768 else {
5769 CType *pt1 = pointed_type(&type1);
5770 CType *pt2 = pointed_type(&type2);
5771 int pbt1 = pt1->t & VT_BTYPE;
5772 int pbt2 = pt2->t & VT_BTYPE;
5773 int newquals, copied = 0;
5774 /* pointers to void get preferred, otherwise the
5775 pointed to types minus qualifs should be compatible */
5776 type = (pbt1 == VT_VOID) ? type1 : type2;
5777 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5778 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5779 tcc_warning("pointer type mismatch in conditional expression\n");
5781 /* combine qualifs */
5782 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5783 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5784 & newquals)
5786 /* copy the pointer target symbol */
5787 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5788 0, type.ref->c);
5789 copied = 1;
5790 pointed_type(&type)->t |= newquals;
5792 /* pointers to incomplete arrays get converted to
5793 pointers to completed ones if possible */
5794 if (pt1->t & VT_ARRAY
5795 && pt2->t & VT_ARRAY
5796 && pointed_type(&type)->ref->c < 0
5797 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5799 if (!copied)
5800 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5801 0, type.ref->c);
5802 pointed_type(&type)->ref =
5803 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5804 0, pointed_type(&type)->ref->c);
5805 pointed_type(&type)->ref->c =
5806 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5809 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5810 /* XXX: test structure compatibility */
5811 type = bt1 == VT_STRUCT ? type1 : type2;
5812 } else {
5813 /* integer operations */
5814 type.t = VT_INT | (VT_LONG & (t1 | t2));
5815 /* convert to unsigned if it does not fit in an integer */
5816 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5817 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5818 type.t |= VT_UNSIGNED;
5820 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5821 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5822 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5824 /* now we convert second operand */
5825 if (c != 1) {
5826 gen_cast(&type);
5827 if (islv) {
5828 mk_pointer(&vtop->type);
5829 gaddrof();
5830 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5831 gaddrof();
5834 rc = RC_INT;
5835 if (is_float(type.t)) {
5836 rc = RC_FLOAT;
5837 #ifdef TCC_TARGET_X86_64
5838 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5839 rc = RC_ST0;
5841 #endif
5842 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5843 /* for long longs, we use fixed registers to avoid having
5844 to handle a complicated move */
5845 rc = RC_IRET;
5848 tt = r2 = 0;
5849 if (c < 0) {
5850 r2 = gv(rc);
5851 tt = gjmp(0);
5853 gsym(u);
5855 /* this is horrible, but we must also convert first
5856 operand */
5857 if (c != 0) {
5858 *vtop = sv;
5859 gen_cast(&type);
5860 if (islv) {
5861 mk_pointer(&vtop->type);
5862 gaddrof();
5863 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5864 gaddrof();
5867 if (c < 0 || islv) {
5868 r1 = gv(rc);
5869 move_reg(r2, r1, type.t);
5870 vtop->r = r2;
5871 gsym(tt);
5872 if (islv)
5873 indir();
5879 static void expr_eq(void)
5881 int t;
5883 expr_cond();
5884 if (tok == '=' ||
5885 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5886 tok == TOK_A_XOR || tok == TOK_A_OR ||
5887 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5888 test_lvalue();
5889 t = tok;
5890 next();
5891 if (t == '=') {
5892 expr_eq();
5893 } else {
5894 vdup();
5895 expr_eq();
5896 gen_op(t & 0x7f);
5898 vstore();
5902 ST_FUNC void gexpr(void)
5904 while (1) {
5905 expr_eq();
5906 if (tok != ',')
5907 break;
5908 vpop();
5909 next();
5913 /* parse a constant expression and return value in vtop. */
5914 static void expr_const1(void)
5916 const_wanted++;
5917 nocode_wanted++;
5918 expr_cond();
5919 nocode_wanted--;
5920 const_wanted--;
5923 /* parse an integer constant and return its value. */
5924 static inline int64_t expr_const64(void)
5926 int64_t c;
5927 expr_const1();
5928 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5929 expect("constant expression");
5930 c = vtop->c.i;
5931 vpop();
5932 return c;
5935 /* parse an integer constant and return its value.
5936 Complain if it doesn't fit 32bit (signed or unsigned). */
5937 ST_FUNC int expr_const(void)
5939 int c;
5940 int64_t wc = expr_const64();
5941 c = wc;
5942 if (c != wc && (unsigned)c != wc)
5943 tcc_error("constant exceeds 32 bit");
5944 return c;
5947 /* return the label token if current token is a label, otherwise
5948 return zero */
5949 static int is_label(void)
5951 int last_tok;
5953 /* fast test first */
5954 if (tok < TOK_UIDENT)
5955 return 0;
5956 /* no need to save tokc because tok is an identifier */
5957 last_tok = tok;
5958 next();
5959 if (tok == ':') {
5960 return last_tok;
5961 } else {
5962 unget_tok(last_tok);
5963 return 0;
5967 #ifndef TCC_TARGET_ARM64
5968 static void gfunc_return(CType *func_type)
5970 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5971 CType type, ret_type;
5972 int ret_align, ret_nregs, regsize;
5973 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5974 &ret_align, &regsize);
5975 if (0 == ret_nregs) {
5976 /* if returning structure, must copy it to implicit
5977 first pointer arg location */
5978 type = *func_type;
5979 mk_pointer(&type);
5980 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5981 indir();
5982 vswap();
5983 /* copy structure value to pointer */
5984 vstore();
5985 } else {
5986 /* returning structure packed into registers */
5987 int r, size, addr, align;
5988 size = type_size(func_type,&align);
5989 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5990 (vtop->c.i & (ret_align-1)))
5991 && (align & (ret_align-1))) {
5992 loc = (loc - size) & -ret_align;
5993 addr = loc;
5994 type = *func_type;
5995 vset(&type, VT_LOCAL | VT_LVAL, addr);
5996 vswap();
5997 vstore();
5998 vpop();
5999 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6001 vtop->type = ret_type;
6002 if (is_float(ret_type.t))
6003 r = rc_fret(ret_type.t);
6004 else
6005 r = RC_IRET;
6007 if (ret_nregs == 1)
6008 gv(r);
6009 else {
6010 for (;;) {
6011 vdup();
6012 gv(r);
6013 vpop();
6014 if (--ret_nregs == 0)
6015 break;
6016 /* We assume that when a structure is returned in multiple
6017 registers, their classes are consecutive values of the
6018 suite s(n) = 2^n */
6019 r <<= 1;
6020 vtop->c.i += regsize;
6024 } else if (is_float(func_type->t)) {
6025 gv(rc_fret(func_type->t));
6026 } else {
6027 gv(RC_IRET);
6029 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6031 #endif
6033 static int case_cmp(const void *pa, const void *pb)
6035 int64_t a = (*(struct case_t**) pa)->v1;
6036 int64_t b = (*(struct case_t**) pb)->v1;
6037 return a < b ? -1 : a > b;
6040 static void gcase(struct case_t **base, int len, int *bsym)
6042 struct case_t *p;
6043 int e;
6044 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6045 gv(RC_INT);
6046 while (len > 4) {
6047 /* binary search */
6048 p = base[len/2];
6049 vdup();
6050 if (ll)
6051 vpushll(p->v2);
6052 else
6053 vpushi(p->v2);
6054 gen_op(TOK_LE);
6055 e = gtst(1, 0);
6056 vdup();
6057 if (ll)
6058 vpushll(p->v1);
6059 else
6060 vpushi(p->v1);
6061 gen_op(TOK_GE);
6062 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6063 /* x < v1 */
6064 gcase(base, len/2, bsym);
6065 if (cur_switch->def_sym)
6066 gjmp_addr(cur_switch->def_sym);
6067 else
6068 *bsym = gjmp(*bsym);
6069 /* x > v2 */
6070 gsym(e);
6071 e = len/2 + 1;
6072 base += e; len -= e;
6074 /* linear scan */
6075 while (len--) {
6076 p = *base++;
6077 vdup();
6078 if (ll)
6079 vpushll(p->v2);
6080 else
6081 vpushi(p->v2);
6082 if (p->v1 == p->v2) {
6083 gen_op(TOK_EQ);
6084 gtst_addr(0, p->sym);
6085 } else {
6086 gen_op(TOK_LE);
6087 e = gtst(1, 0);
6088 vdup();
6089 if (ll)
6090 vpushll(p->v1);
6091 else
6092 vpushi(p->v1);
6093 gen_op(TOK_GE);
6094 gtst_addr(0, p->sym);
6095 gsym(e);
6100 static void block(int *bsym, int *csym, int is_expr)
6102 int a, b, c, d, cond;
6103 Sym *s;
6105 /* generate line number info */
6106 if (tcc_state->do_debug)
6107 tcc_debug_line(tcc_state);
6109 if (is_expr) {
6110 /* default return value is (void) */
6111 vpushi(0);
6112 vtop->type.t = VT_VOID;
6115 if (tok == TOK_IF) {
6116 /* if test */
6117 int saved_nocode_wanted = nocode_wanted;
6118 next();
6119 skip('(');
6120 gexpr();
6121 skip(')');
6122 cond = condition_3way();
6123 if (cond == 1)
6124 a = 0, vpop();
6125 else
6126 a = gvtst(1, 0);
6127 if (cond == 0)
6128 nocode_wanted |= 0x20000000;
6129 block(bsym, csym, 0);
6130 if (cond != 1)
6131 nocode_wanted = saved_nocode_wanted;
6132 if (tok == TOK_ELSE) {
6133 next();
6134 d = gjmp(0);
6135 gsym(a);
6136 if (cond == 1)
6137 nocode_wanted |= 0x20000000;
6138 block(bsym, csym, 0);
6139 gsym(d); /* patch else jmp */
6140 if (cond != 0)
6141 nocode_wanted = saved_nocode_wanted;
6142 } else
6143 gsym(a);
6144 } else if (tok == TOK_WHILE) {
6145 int saved_nocode_wanted;
6146 nocode_wanted &= ~0x20000000;
6147 next();
6148 d = ind;
6149 vla_sp_restore();
6150 skip('(');
6151 gexpr();
6152 skip(')');
6153 a = gvtst(1, 0);
6154 b = 0;
6155 ++local_scope;
6156 saved_nocode_wanted = nocode_wanted;
6157 block(&a, &b, 0);
6158 nocode_wanted = saved_nocode_wanted;
6159 --local_scope;
6160 gjmp_addr(d);
6161 gsym(a);
6162 gsym_addr(b, d);
6163 } else if (tok == '{') {
6164 Sym *llabel, *lcleanup;
6165 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6166 int lncleanups = ncleanups;
6168 next();
6169 /* record local declaration stack position */
6170 s = local_stack;
6171 llabel = local_label_stack;
6172 lcleanup = current_cleanups;
6173 ++local_scope;
6175 /* handle local labels declarations */
6176 while (tok == TOK_LABEL) {
6177 next();
6178 for(;;) {
6179 if (tok < TOK_UIDENT)
6180 expect("label identifier");
6181 label_push(&local_label_stack, tok, LABEL_DECLARED);
6182 next();
6183 if (tok == ',') {
6184 next();
6185 } else {
6186 skip(';');
6187 break;
6191 while (tok != '}') {
6192 if ((a = is_label()))
6193 unget_tok(a);
6194 else
6195 decl(VT_LOCAL);
6196 if (tok != '}') {
6197 if (is_expr)
6198 vpop();
6199 block(bsym, csym, is_expr);
6203 if (current_cleanups != lcleanup) {
6204 int jmp = 0;
6205 Sym *g, **pg;
6207 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6208 if (g->prev_tok->r & LABEL_FORWARD) {
6209 Sym *pcl = g->next;
6210 if (!jmp)
6211 jmp = gjmp(0);
6212 gsym(pcl->jnext);
6213 try_call_scope_cleanup(lcleanup);
6214 pcl->jnext = gjmp(0);
6215 if (!lncleanups)
6216 goto remove_pending;
6217 g->c = lncleanups;
6218 pg = &g->prev;
6219 } else {
6220 remove_pending:
6221 *pg = g->prev;
6222 sym_free(g);
6224 gsym(jmp);
6225 if (!nocode_wanted) {
6226 try_call_scope_cleanup(lcleanup);
6230 current_cleanups = lcleanup;
6231 ncleanups = lncleanups;
6232 /* pop locally defined labels */
6233 label_pop(&local_label_stack, llabel, is_expr);
6234 /* pop locally defined symbols */
6235 --local_scope;
6236 /* In the is_expr case (a statement expression is finished here),
6237 vtop might refer to symbols on the local_stack. Either via the
6238 type or via vtop->sym. We can't pop those nor any that in turn
6239 might be referred to. To make it easier we don't roll back
6240 any symbols in that case; some upper level call to block() will
6241 do that. We do have to remove such symbols from the lookup
6242 tables, though. sym_pop will do that. */
6243 sym_pop(&local_stack, s, is_expr);
6245 /* Pop VLA frames and restore stack pointer if required */
6246 if (vlas_in_scope > saved_vlas_in_scope) {
6247 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6248 vla_sp_restore();
6250 vlas_in_scope = saved_vlas_in_scope;
6252 next();
6253 } else if (tok == TOK_RETURN) {
6254 next();
6255 if (tok != ';') {
6256 gexpr();
6257 gen_assign_cast(&func_vt);
6258 try_call_scope_cleanup(NULL);
6259 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6260 vtop--;
6261 else
6262 gfunc_return(&func_vt);
6263 } else {
6264 try_call_scope_cleanup(NULL);
6266 skip(';');
6267 /* jump unless last stmt in top-level block */
6268 if (tok != '}' || local_scope != 1)
6269 rsym = gjmp(rsym);
6270 nocode_wanted |= 0x20000000;
6271 } else if (tok == TOK_BREAK) {
6272 /* compute jump */
6273 if (!bsym)
6274 tcc_error("cannot break");
6275 *bsym = gjmp(*bsym);
6276 next();
6277 skip(';');
6278 nocode_wanted |= 0x20000000;
6279 } else if (tok == TOK_CONTINUE) {
6280 /* compute jump */
6281 if (!csym)
6282 tcc_error("cannot continue");
6283 vla_sp_restore_root();
6284 *csym = gjmp(*csym);
6285 next();
6286 skip(';');
6287 nocode_wanted |= 0x20000000;
6288 } else if (tok == TOK_FOR) {
6289 int e;
6290 int saved_nocode_wanted;
6291 nocode_wanted &= ~0x20000000;
6292 next();
6293 skip('(');
6294 s = local_stack;
6295 ++local_scope;
6296 if (tok != ';') {
6297 /* c99 for-loop init decl? */
6298 if (!decl0(VT_LOCAL, 1, NULL)) {
6299 /* no, regular for-loop init expr */
6300 gexpr();
6301 vpop();
6304 skip(';');
6305 d = ind;
6306 c = ind;
6307 vla_sp_restore();
6308 a = 0;
6309 b = 0;
6310 if (tok != ';') {
6311 gexpr();
6312 a = gvtst(1, 0);
6314 skip(';');
6315 if (tok != ')') {
6316 e = gjmp(0);
6317 c = ind;
6318 vla_sp_restore();
6319 gexpr();
6320 vpop();
6321 gjmp_addr(d);
6322 gsym(e);
6324 skip(')');
6325 saved_nocode_wanted = nocode_wanted;
6326 block(&a, &b, 0);
6327 nocode_wanted = saved_nocode_wanted;
6328 gjmp_addr(c);
6329 gsym(a);
6330 gsym_addr(b, c);
6331 --local_scope;
6332 sym_pop(&local_stack, s, 0);
6334 } else
6335 if (tok == TOK_DO) {
6336 int saved_nocode_wanted;
6337 nocode_wanted &= ~0x20000000;
6338 next();
6339 a = 0;
6340 b = 0;
6341 d = ind;
6342 vla_sp_restore();
6343 saved_nocode_wanted = nocode_wanted;
6344 block(&a, &b, 0);
6345 skip(TOK_WHILE);
6346 skip('(');
6347 gsym(b);
6348 if (b)
6349 nocode_wanted = saved_nocode_wanted;
6350 gexpr();
6351 c = gvtst(0, 0);
6352 gsym_addr(c, d);
6353 nocode_wanted = saved_nocode_wanted;
6354 skip(')');
6355 gsym(a);
6356 skip(';');
6357 } else
6358 if (tok == TOK_SWITCH) {
6359 struct switch_t *saved, sw;
6360 int saved_nocode_wanted = nocode_wanted;
6361 SValue switchval;
6362 next();
6363 skip('(');
6364 gexpr();
6365 skip(')');
6366 switchval = *vtop--;
6367 a = 0;
6368 b = gjmp(0); /* jump to first case */
6369 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6370 saved = cur_switch;
6371 cur_switch = &sw;
6372 block(&a, csym, 0);
6373 nocode_wanted = saved_nocode_wanted;
6374 a = gjmp(a); /* add implicit break */
6375 /* case lookup */
6376 gsym(b);
6377 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6378 for (b = 1; b < sw.n; b++)
6379 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6380 tcc_error("duplicate case value");
6381 /* Our switch table sorting is signed, so the compared
6382 value needs to be as well when it's 64bit. */
6383 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6384 switchval.type.t &= ~VT_UNSIGNED;
6385 vpushv(&switchval);
6386 gcase(sw.p, sw.n, &a);
6387 vpop();
6388 if (sw.def_sym)
6389 gjmp_addr(sw.def_sym);
6390 dynarray_reset(&sw.p, &sw.n);
6391 cur_switch = saved;
6392 /* break label */
6393 gsym(a);
6394 } else
6395 if (tok == TOK_CASE) {
6396 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6397 if (!cur_switch)
6398 expect("switch");
6399 nocode_wanted &= ~0x20000000;
6400 next();
6401 cr->v1 = cr->v2 = expr_const64();
6402 if (gnu_ext && tok == TOK_DOTS) {
6403 next();
6404 cr->v2 = expr_const64();
6405 if (cr->v2 < cr->v1)
6406 tcc_warning("empty case range");
6408 cr->sym = ind;
6409 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6410 skip(':');
6411 is_expr = 0;
6412 goto block_after_label;
6413 } else
6414 if (tok == TOK_DEFAULT) {
6415 next();
6416 skip(':');
6417 if (!cur_switch)
6418 expect("switch");
6419 if (cur_switch->def_sym)
6420 tcc_error("too many 'default'");
6421 cur_switch->def_sym = ind;
6422 is_expr = 0;
6423 goto block_after_label;
6424 } else
6425 if (tok == TOK_GOTO) {
6426 next();
6427 if (tok == '*' && gnu_ext) {
6428 /* computed goto */
6429 next();
6430 gexpr();
6431 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6432 expect("pointer");
6433 ggoto();
6434 } else if (tok >= TOK_UIDENT) {
6435 s = label_find(tok);
6436 /* put forward definition if needed */
6437 if (!s)
6438 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6439 else if (s->r == LABEL_DECLARED)
6440 s->r = LABEL_FORWARD;
6442 vla_sp_restore_root();
6443 if (s->r & LABEL_FORWARD) {
6444 /* start new goto chain for cleanups, linked via label->next */
6445 if (current_cleanups) {
6446 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6447 pending_gotos->prev_tok = s;
6448 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6449 pending_gotos->next = s;
6451 s->jnext = gjmp(s->jnext);
6452 } else {
6453 try_call_cleanup_goto(s->cleanupstate);
6454 gjmp_addr(s->jnext);
6456 next();
6457 } else {
6458 expect("label identifier");
6460 skip(';');
6461 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6462 asm_instr();
6463 } else {
6464 b = is_label();
6465 if (b) {
6466 /* label case */
6467 next();
6468 s = label_find(b);
6469 if (s) {
6470 if (s->r == LABEL_DEFINED)
6471 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6472 s->r = LABEL_DEFINED;
6473 if (s->next) {
6474 Sym *pcl; /* pending cleanup goto */
6475 for (pcl = s->next; pcl; pcl = pcl->prev)
6476 gsym(pcl->jnext);
6477 sym_pop(&s->next, NULL, 0);
6478 } else
6479 gsym(s->jnext);
6480 } else {
6481 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6483 s->jnext = ind;
6484 s->cleanupstate = current_cleanups;
6485 vla_sp_restore();
6486 /* we accept this, but it is a mistake */
6487 block_after_label:
6488 nocode_wanted &= ~0x20000000;
6489 if (tok == '}') {
6490 tcc_warning("deprecated use of label at end of compound statement");
6491 } else {
6492 if (is_expr)
6493 vpop();
6494 block(bsym, csym, is_expr);
6496 } else {
6497 /* expression case */
6498 if (tok != ';') {
6499 if (is_expr) {
6500 vpop();
6501 gexpr();
6502 } else {
6503 gexpr();
6504 vpop();
6507 skip(';');
6512 /* This skips over a stream of tokens containing balanced {} and ()
6513 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6514 with a '{'). If STR then allocates and stores the skipped tokens
6515 in *STR. This doesn't check if () and {} are nested correctly,
6516 i.e. "({)}" is accepted. */
6517 static void skip_or_save_block(TokenString **str)
6519 int braces = tok == '{';
6520 int level = 0;
6521 if (str)
6522 *str = tok_str_alloc();
6524 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6525 int t;
6526 if (tok == TOK_EOF) {
6527 if (str || level > 0)
6528 tcc_error("unexpected end of file");
6529 else
6530 break;
6532 if (str)
6533 tok_str_add_tok(*str);
6534 t = tok;
6535 next();
6536 if (t == '{' || t == '(') {
6537 level++;
6538 } else if (t == '}' || t == ')') {
6539 level--;
6540 if (level == 0 && braces && t == '}')
6541 break;
6544 if (str) {
6545 tok_str_add(*str, -1);
6546 tok_str_add(*str, 0);
6550 #define EXPR_CONST 1
6551 #define EXPR_ANY 2
6553 static void parse_init_elem(int expr_type)
6555 int saved_global_expr;
6556 switch(expr_type) {
6557 case EXPR_CONST:
6558 /* compound literals must be allocated globally in this case */
6559 saved_global_expr = global_expr;
6560 global_expr = 1;
6561 expr_const1();
6562 global_expr = saved_global_expr;
6563 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6564 (compound literals). */
6565 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6566 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6567 || vtop->sym->v < SYM_FIRST_ANOM))
6568 #ifdef TCC_TARGET_PE
6569 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6570 #endif
6572 tcc_error("initializer element is not constant");
6573 break;
6574 case EXPR_ANY:
6575 expr_eq();
6576 break;
6580 /* put zeros for variable based init */
6581 static void init_putz(Section *sec, unsigned long c, int size)
6583 if (sec) {
6584 /* nothing to do because globals are already set to zero */
6585 } else {
6586 vpush_global_sym(&func_old_type, TOK_memset);
6587 vseti(VT_LOCAL, c);
6588 #ifdef TCC_TARGET_ARM
6589 vpushs(size);
6590 vpushi(0);
6591 #else
6592 vpushi(0);
6593 vpushs(size);
6594 #endif
6595 gfunc_call(3);
6599 #define DIF_FIRST 1
6600 #define DIF_SIZE_ONLY 2
6601 #define DIF_HAVE_ELEM 4
6603 /* t is the array or struct type. c is the array or struct
6604 address. cur_field is the pointer to the current
6605 field, for arrays the 'c' member contains the current start
6606 index. 'flags' is as in decl_initializer.
6607 'al' contains the already initialized length of the
6608 current container (starting at c). This returns the new length of that. */
6609 static int decl_designator(CType *type, Section *sec, unsigned long c,
6610 Sym **cur_field, int flags, int al)
6612 Sym *s, *f;
6613 int index, index_last, align, l, nb_elems, elem_size;
6614 unsigned long corig = c;
6616 elem_size = 0;
6617 nb_elems = 1;
6618 if (flags & DIF_HAVE_ELEM)
6619 goto no_designator;
6620 if (gnu_ext && (l = is_label()) != 0)
6621 goto struct_field;
6622 /* NOTE: we only support ranges for last designator */
6623 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6624 if (tok == '[') {
6625 if (!(type->t & VT_ARRAY))
6626 expect("array type");
6627 next();
6628 index = index_last = expr_const();
6629 if (tok == TOK_DOTS && gnu_ext) {
6630 next();
6631 index_last = expr_const();
6633 skip(']');
6634 s = type->ref;
6635 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6636 index_last < index)
6637 tcc_error("invalid index");
6638 if (cur_field)
6639 (*cur_field)->c = index_last;
6640 type = pointed_type(type);
6641 elem_size = type_size(type, &align);
6642 c += index * elem_size;
6643 nb_elems = index_last - index + 1;
6644 } else {
6645 next();
6646 l = tok;
6647 struct_field:
6648 next();
6649 if ((type->t & VT_BTYPE) != VT_STRUCT)
6650 expect("struct/union type");
6651 f = find_field(type, l);
6652 if (!f)
6653 expect("field");
6654 if (cur_field)
6655 *cur_field = f;
6656 type = &f->type;
6657 c += f->c;
6659 cur_field = NULL;
6661 if (!cur_field) {
6662 if (tok == '=') {
6663 next();
6664 } else if (!gnu_ext) {
6665 expect("=");
6667 } else {
6668 no_designator:
6669 if (type->t & VT_ARRAY) {
6670 index = (*cur_field)->c;
6671 if (type->ref->c >= 0 && index >= type->ref->c)
6672 tcc_error("index too large");
6673 type = pointed_type(type);
6674 c += index * type_size(type, &align);
6675 } else {
6676 f = *cur_field;
6677 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6678 *cur_field = f = f->next;
6679 if (!f)
6680 tcc_error("too many field init");
6681 type = &f->type;
6682 c += f->c;
6685 /* must put zero in holes (note that doing it that way
6686 ensures that it even works with designators) */
6687 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6688 init_putz(sec, corig + al, c - corig - al);
6689 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6691 /* XXX: make it more general */
6692 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6693 unsigned long c_end;
6694 uint8_t *src, *dst;
6695 int i;
6697 if (!sec) {
6698 vset(type, VT_LOCAL|VT_LVAL, c);
6699 for (i = 1; i < nb_elems; i++) {
6700 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6701 vswap();
6702 vstore();
6704 vpop();
6705 } else if (!NODATA_WANTED) {
6706 c_end = c + nb_elems * elem_size;
6707 if (c_end > sec->data_allocated)
6708 section_realloc(sec, c_end);
6709 src = sec->data + c;
6710 dst = src;
6711 for(i = 1; i < nb_elems; i++) {
6712 dst += elem_size;
6713 memcpy(dst, src, elem_size);
6717 c += nb_elems * type_size(type, &align);
6718 if (c - corig > al)
6719 al = c - corig;
6720 return al;
6723 /* store a value or an expression directly in global data or in local array */
6724 static void init_putv(CType *type, Section *sec, unsigned long c)
6726 int bt;
6727 void *ptr;
6728 CType dtype;
6730 dtype = *type;
6731 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6733 if (sec) {
6734 int size, align;
6735 /* XXX: not portable */
6736 /* XXX: generate error if incorrect relocation */
6737 gen_assign_cast(&dtype);
6738 bt = type->t & VT_BTYPE;
6740 if ((vtop->r & VT_SYM)
6741 && bt != VT_PTR
6742 && bt != VT_FUNC
6743 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6744 || (type->t & VT_BITFIELD))
6745 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6747 tcc_error("initializer element is not computable at load time");
6749 if (NODATA_WANTED) {
6750 vtop--;
6751 return;
6754 size = type_size(type, &align);
6755 section_reserve(sec, c + size);
6756 ptr = sec->data + c;
6758 /* XXX: make code faster ? */
6759 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6760 vtop->sym->v >= SYM_FIRST_ANOM &&
6761 /* XXX This rejects compound literals like
6762 '(void *){ptr}'. The problem is that '&sym' is
6763 represented the same way, which would be ruled out
6764 by the SYM_FIRST_ANOM check above, but also '"string"'
6765 in 'char *p = "string"' is represented the same
6766 with the type being VT_PTR and the symbol being an
6767 anonymous one. That is, there's no difference in vtop
6768 between '(void *){x}' and '&(void *){x}'. Ignore
6769 pointer typed entities here. Hopefully no real code
6770 will every use compound literals with scalar type. */
6771 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6772 /* These come from compound literals, memcpy stuff over. */
6773 Section *ssec;
6774 ElfSym *esym;
6775 ElfW_Rel *rel;
6776 esym = elfsym(vtop->sym);
6777 ssec = tcc_state->sections[esym->st_shndx];
6778 memmove (ptr, ssec->data + esym->st_value, size);
6779 if (ssec->reloc) {
6780 /* We need to copy over all memory contents, and that
6781 includes relocations. Use the fact that relocs are
6782 created it order, so look from the end of relocs
6783 until we hit one before the copied region. */
6784 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6785 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6786 while (num_relocs--) {
6787 rel--;
6788 if (rel->r_offset >= esym->st_value + size)
6789 continue;
6790 if (rel->r_offset < esym->st_value)
6791 break;
6792 /* Note: if the same fields are initialized multiple
6793 times (possible with designators) then we possibly
6794 add multiple relocations for the same offset here.
6795 That would lead to wrong code, the last reloc needs
6796 to win. We clean this up later after the whole
6797 initializer is parsed. */
6798 put_elf_reloca(symtab_section, sec,
6799 c + rel->r_offset - esym->st_value,
6800 ELFW(R_TYPE)(rel->r_info),
6801 ELFW(R_SYM)(rel->r_info),
6802 #if PTR_SIZE == 8
6803 rel->r_addend
6804 #else
6806 #endif
6810 } else {
6811 if (type->t & VT_BITFIELD) {
6812 int bit_pos, bit_size, bits, n;
6813 unsigned char *p, v, m;
6814 bit_pos = BIT_POS(vtop->type.t);
6815 bit_size = BIT_SIZE(vtop->type.t);
6816 p = (unsigned char*)ptr + (bit_pos >> 3);
6817 bit_pos &= 7, bits = 0;
6818 while (bit_size) {
6819 n = 8 - bit_pos;
6820 if (n > bit_size)
6821 n = bit_size;
6822 v = vtop->c.i >> bits << bit_pos;
6823 m = ((1 << n) - 1) << bit_pos;
6824 *p = (*p & ~m) | (v & m);
6825 bits += n, bit_size -= n, bit_pos = 0, ++p;
6827 } else
6828 switch(bt) {
6829 /* XXX: when cross-compiling we assume that each type has the
6830 same representation on host and target, which is likely to
6831 be wrong in the case of long double */
6832 case VT_BOOL:
6833 vtop->c.i = vtop->c.i != 0;
6834 case VT_BYTE:
6835 *(char *)ptr |= vtop->c.i;
6836 break;
6837 case VT_SHORT:
6838 *(short *)ptr |= vtop->c.i;
6839 break;
6840 case VT_FLOAT:
6841 *(float*)ptr = vtop->c.f;
6842 break;
6843 case VT_DOUBLE:
6844 *(double *)ptr = vtop->c.d;
6845 break;
6846 case VT_LDOUBLE:
6847 #if defined TCC_IS_NATIVE_387
6848 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6849 memcpy(ptr, &vtop->c.ld, 10);
6850 #ifdef __TINYC__
6851 else if (sizeof (long double) == sizeof (double))
6852 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6853 #endif
6854 else if (vtop->c.ld == 0.0)
6856 else
6857 #endif
6858 if (sizeof(long double) == LDOUBLE_SIZE)
6859 *(long double*)ptr = vtop->c.ld;
6860 else if (sizeof(double) == LDOUBLE_SIZE)
6861 *(double *)ptr = (double)vtop->c.ld;
6862 else
6863 tcc_error("can't cross compile long double constants");
6864 break;
6865 #if PTR_SIZE != 8
6866 case VT_LLONG:
6867 *(long long *)ptr |= vtop->c.i;
6868 break;
6869 #else
6870 case VT_LLONG:
6871 #endif
6872 case VT_PTR:
6874 addr_t val = vtop->c.i;
6875 #if PTR_SIZE == 8
6876 if (vtop->r & VT_SYM)
6877 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6878 else
6879 *(addr_t *)ptr |= val;
6880 #else
6881 if (vtop->r & VT_SYM)
6882 greloc(sec, vtop->sym, c, R_DATA_PTR);
6883 *(addr_t *)ptr |= val;
6884 #endif
6885 break;
6887 default:
6889 int val = vtop->c.i;
6890 #if PTR_SIZE == 8
6891 if (vtop->r & VT_SYM)
6892 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6893 else
6894 *(int *)ptr |= val;
6895 #else
6896 if (vtop->r & VT_SYM)
6897 greloc(sec, vtop->sym, c, R_DATA_PTR);
6898 *(int *)ptr |= val;
6899 #endif
6900 break;
6904 vtop--;
6905 } else {
6906 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6907 vswap();
6908 vstore();
6909 vpop();
6913 /* 't' contains the type and storage info. 'c' is the offset of the
6914 object in section 'sec'. If 'sec' is NULL, it means stack based
6915 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6916 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6917 size only evaluation is wanted (only for arrays). */
6918 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6919 int flags)
6921 int len, n, no_oblock, nb, i;
6922 int size1, align1;
6923 Sym *s, *f;
6924 Sym indexsym;
6925 CType *t1;
6927 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
6928 /* In case of strings we have special handling for arrays, so
6929 don't consume them as initializer value (which would commit them
6930 to some anonymous symbol). */
6931 tok != TOK_LSTR && tok != TOK_STR &&
6932 !(flags & DIF_SIZE_ONLY)) {
6933 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6934 flags |= DIF_HAVE_ELEM;
6937 if ((flags & DIF_HAVE_ELEM) &&
6938 !(type->t & VT_ARRAY) &&
6939 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6940 The source type might have VT_CONSTANT set, which is
6941 of course assignable to non-const elements. */
6942 is_compatible_unqualified_types(type, &vtop->type)) {
6943 init_putv(type, sec, c);
6944 } else if (type->t & VT_ARRAY) {
6945 s = type->ref;
6946 n = s->c;
6947 t1 = pointed_type(type);
6948 size1 = type_size(t1, &align1);
6950 no_oblock = 1;
6951 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
6952 tok == '{') {
6953 if (tok != '{')
6954 tcc_error("character array initializer must be a literal,"
6955 " optionally enclosed in braces");
6956 skip('{');
6957 no_oblock = 0;
6960 /* only parse strings here if correct type (otherwise: handle
6961 them as ((w)char *) expressions */
6962 if ((tok == TOK_LSTR &&
6963 #ifdef TCC_TARGET_PE
6964 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6965 #else
6966 (t1->t & VT_BTYPE) == VT_INT
6967 #endif
6968 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6969 len = 0;
6970 while (tok == TOK_STR || tok == TOK_LSTR) {
6971 int cstr_len, ch;
6973 /* compute maximum number of chars wanted */
6974 if (tok == TOK_STR)
6975 cstr_len = tokc.str.size;
6976 else
6977 cstr_len = tokc.str.size / sizeof(nwchar_t);
6978 cstr_len--;
6979 nb = cstr_len;
6980 if (n >= 0 && nb > (n - len))
6981 nb = n - len;
6982 if (!(flags & DIF_SIZE_ONLY)) {
6983 if (cstr_len > nb)
6984 tcc_warning("initializer-string for array is too long");
6985 /* in order to go faster for common case (char
6986 string in global variable, we handle it
6987 specifically */
6988 if (sec && tok == TOK_STR && size1 == 1) {
6989 if (!NODATA_WANTED)
6990 memcpy(sec->data + c + len, tokc.str.data, nb);
6991 } else {
6992 for(i=0;i<nb;i++) {
6993 if (tok == TOK_STR)
6994 ch = ((unsigned char *)tokc.str.data)[i];
6995 else
6996 ch = ((nwchar_t *)tokc.str.data)[i];
6997 vpushi(ch);
6998 init_putv(t1, sec, c + (len + i) * size1);
7002 len += nb;
7003 next();
7005 /* only add trailing zero if enough storage (no
7006 warning in this case since it is standard) */
7007 if (n < 0 || len < n) {
7008 if (!(flags & DIF_SIZE_ONLY)) {
7009 vpushi(0);
7010 init_putv(t1, sec, c + (len * size1));
7012 len++;
7014 len *= size1;
7015 } else {
7016 indexsym.c = 0;
7017 f = &indexsym;
7019 do_init_list:
7020 len = 0;
7021 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7022 len = decl_designator(type, sec, c, &f, flags, len);
7023 flags &= ~DIF_HAVE_ELEM;
7024 if (type->t & VT_ARRAY) {
7025 ++indexsym.c;
7026 /* special test for multi dimensional arrays (may not
7027 be strictly correct if designators are used at the
7028 same time) */
7029 if (no_oblock && len >= n*size1)
7030 break;
7031 } else {
7032 if (s->type.t == VT_UNION)
7033 f = NULL;
7034 else
7035 f = f->next;
7036 if (no_oblock && f == NULL)
7037 break;
7040 if (tok == '}')
7041 break;
7042 skip(',');
7045 /* put zeros at the end */
7046 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7047 init_putz(sec, c + len, n*size1 - len);
7048 if (!no_oblock)
7049 skip('}');
7050 /* patch type size if needed, which happens only for array types */
7051 if (n < 0)
7052 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7053 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7054 size1 = 1;
7055 no_oblock = 1;
7056 if ((flags & DIF_FIRST) || tok == '{') {
7057 skip('{');
7058 no_oblock = 0;
7060 s = type->ref;
7061 f = s->next;
7062 n = s->c;
7063 goto do_init_list;
7064 } else if (tok == '{') {
7065 if (flags & DIF_HAVE_ELEM)
7066 skip(';');
7067 next();
7068 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7069 skip('}');
7070 } else if ((flags & DIF_SIZE_ONLY)) {
7071 /* If we supported only ISO C we wouldn't have to accept calling
7072 this on anything than an array if DIF_SIZE_ONLY (and even then
7073 only on the outermost level, so no recursion would be needed),
7074 because initializing a flex array member isn't supported.
7075 But GNU C supports it, so we need to recurse even into
7076 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7077 /* just skip expression */
7078 skip_or_save_block(NULL);
7079 } else {
7080 if (!(flags & DIF_HAVE_ELEM)) {
7081 /* This should happen only when we haven't parsed
7082 the init element above for fear of committing a
7083 string constant to memory too early. */
7084 if (tok != TOK_STR && tok != TOK_LSTR)
7085 expect("string constant");
7086 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7088 init_putv(type, sec, c);
7092 /* parse an initializer for type 't' if 'has_init' is non zero, and
7093 allocate space in local or global data space ('r' is either
7094 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7095 variable 'v' of scope 'scope' is declared before initializers
7096 are parsed. If 'v' is zero, then a reference to the new object
7097 is put in the value stack. If 'has_init' is 2, a special parsing
7098 is done to handle string constants. */
7099 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7100 int has_init, int v, int scope)
7102 int size, align, addr;
7103 TokenString *init_str = NULL;
7105 Section *sec;
7106 Sym *flexible_array;
7107 Sym *sym = NULL;
7108 int saved_nocode_wanted = nocode_wanted;
7109 #ifdef CONFIG_TCC_BCHECK
7110 int bcheck;
7111 #endif
7113 /* Always allocate static or global variables */
7114 if (v && (r & VT_VALMASK) == VT_CONST)
7115 nocode_wanted |= 0x80000000;
7117 #ifdef CONFIG_TCC_BCHECK
7118 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7119 #endif
7121 flexible_array = NULL;
7122 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7123 Sym *field = type->ref->next;
7124 if (field) {
7125 while (field->next)
7126 field = field->next;
7127 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7128 flexible_array = field;
7132 size = type_size(type, &align);
7133 /* If unknown size, we must evaluate it before
7134 evaluating initializers because
7135 initializers can generate global data too
7136 (e.g. string pointers or ISOC99 compound
7137 literals). It also simplifies local
7138 initializers handling */
7139 if (size < 0 || (flexible_array && has_init)) {
7140 if (!has_init)
7141 tcc_error("unknown type size");
7142 /* get all init string */
7143 if (has_init == 2) {
7144 init_str = tok_str_alloc();
7145 /* only get strings */
7146 while (tok == TOK_STR || tok == TOK_LSTR) {
7147 tok_str_add_tok(init_str);
7148 next();
7150 tok_str_add(init_str, -1);
7151 tok_str_add(init_str, 0);
7152 } else {
7153 skip_or_save_block(&init_str);
7155 unget_tok(0);
7157 /* compute size */
7158 begin_macro(init_str, 1);
7159 next();
7160 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7161 /* prepare second initializer parsing */
7162 macro_ptr = init_str->str;
7163 next();
7165 /* if still unknown size, error */
7166 size = type_size(type, &align);
7167 if (size < 0)
7168 tcc_error("unknown type size");
7170 /* If there's a flex member and it was used in the initializer
7171 adjust size. */
7172 if (flexible_array &&
7173 flexible_array->type.ref->c > 0)
7174 size += flexible_array->type.ref->c
7175 * pointed_size(&flexible_array->type);
7176 /* take into account specified alignment if bigger */
7177 if (ad->a.aligned) {
7178 int speca = 1 << (ad->a.aligned - 1);
7179 if (speca > align)
7180 align = speca;
7181 } else if (ad->a.packed) {
7182 align = 1;
7185 if (!v && NODATA_WANTED)
7186 size = 0, align = 1;
7188 if ((r & VT_VALMASK) == VT_LOCAL) {
7189 sec = NULL;
7190 #ifdef CONFIG_TCC_BCHECK
7191 if (bcheck && (type->t & VT_ARRAY)) {
7192 loc--;
7194 #endif
7195 loc = (loc - size) & -align;
7196 addr = loc;
7197 #ifdef CONFIG_TCC_BCHECK
7198 /* handles bounds */
7199 /* XXX: currently, since we do only one pass, we cannot track
7200 '&' operators, so we add only arrays */
7201 if (bcheck && (type->t & VT_ARRAY)) {
7202 addr_t *bounds_ptr;
7203 /* add padding between regions */
7204 loc--;
7205 /* then add local bound info */
7206 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7207 bounds_ptr[0] = addr;
7208 bounds_ptr[1] = size;
7210 #endif
7211 if (v) {
7212 /* local variable */
7213 #ifdef CONFIG_TCC_ASM
7214 if (ad->asm_label) {
7215 int reg = asm_parse_regvar(ad->asm_label);
7216 if (reg >= 0)
7217 r = (r & ~VT_VALMASK) | reg;
7219 #endif
7220 sym = sym_push(v, type, r, addr);
7221 if (ad->cleanup_func) {
7222 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7223 cls->prev_tok = sym;
7224 cls->next = ad->cleanup_func;
7225 cls->ncl = current_cleanups;
7226 current_cleanups = cls;
7229 sym->a = ad->a;
7230 } else {
7231 /* push local reference */
7232 vset(type, r, addr);
7234 } else {
7235 if (v && scope == VT_CONST) {
7236 /* see if the symbol was already defined */
7237 sym = sym_find(v);
7238 if (sym) {
7239 patch_storage(sym, ad, type);
7240 /* we accept several definitions of the same global variable. */
7241 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7242 goto no_alloc;
7246 /* allocate symbol in corresponding section */
7247 sec = ad->section;
7248 if (!sec) {
7249 if (has_init)
7250 sec = data_section;
7251 else if (tcc_state->nocommon)
7252 sec = bss_section;
7255 if (sec) {
7256 addr = section_add(sec, size, align);
7257 #ifdef CONFIG_TCC_BCHECK
7258 /* add padding if bound check */
7259 if (bcheck)
7260 section_add(sec, 1, 1);
7261 #endif
7262 } else {
7263 addr = align; /* SHN_COMMON is special, symbol value is align */
7264 sec = common_section;
7267 if (v) {
7268 if (!sym) {
7269 sym = sym_push(v, type, r | VT_SYM, 0);
7270 patch_storage(sym, ad, NULL);
7272 /* Local statics have a scope until now (for
7273 warnings), remove it here. */
7274 sym->sym_scope = 0;
7275 /* update symbol definition */
7276 put_extern_sym(sym, sec, addr, size);
7277 } else {
7278 /* push global reference */
7279 sym = get_sym_ref(type, sec, addr, size);
7280 vpushsym(type, sym);
7281 vtop->r |= r;
7284 #ifdef CONFIG_TCC_BCHECK
7285 /* handles bounds now because the symbol must be defined
7286 before for the relocation */
7287 if (bcheck) {
7288 addr_t *bounds_ptr;
7290 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7291 /* then add global bound info */
7292 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7293 bounds_ptr[0] = 0; /* relocated */
7294 bounds_ptr[1] = size;
7296 #endif
7299 if (type->t & VT_VLA) {
7300 int a;
7302 if (NODATA_WANTED)
7303 goto no_alloc;
7305 /* save current stack pointer */
7306 if (vlas_in_scope == 0) {
7307 if (vla_sp_root_loc == -1)
7308 vla_sp_root_loc = (loc -= PTR_SIZE);
7309 gen_vla_sp_save(vla_sp_root_loc);
7312 vla_runtime_type_size(type, &a);
7313 gen_vla_alloc(type, a);
7314 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7315 /* on _WIN64, because of the function args scratch area, the
7316 result of alloca differs from RSP and is returned in RAX. */
7317 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7318 #endif
7319 gen_vla_sp_save(addr);
7320 vla_sp_loc = addr;
7321 vlas_in_scope++;
7323 } else if (has_init) {
7324 size_t oldreloc_offset = 0;
7325 if (sec && sec->reloc)
7326 oldreloc_offset = sec->reloc->data_offset;
7327 decl_initializer(type, sec, addr, DIF_FIRST);
7328 if (sec && sec->reloc)
7329 squeeze_multi_relocs(sec, oldreloc_offset);
7330 /* patch flexible array member size back to -1, */
7331 /* for possible subsequent similar declarations */
7332 if (flexible_array)
7333 flexible_array->type.ref->c = -1;
7336 no_alloc:
7337 /* restore parse state if needed */
7338 if (init_str) {
7339 end_macro();
7340 next();
7343 nocode_wanted = saved_nocode_wanted;
7346 /* parse a function defined by symbol 'sym' and generate its code in
7347 'cur_text_section' */
7348 static void gen_function(Sym *sym)
7350 nocode_wanted = 0;
7351 ind = cur_text_section->data_offset;
7352 if (sym->a.aligned) {
7353 size_t newoff = section_add(cur_text_section, 0,
7354 1 << (sym->a.aligned - 1));
7355 gen_fill_nops(newoff - ind);
7357 /* NOTE: we patch the symbol size later */
7358 put_extern_sym(sym, cur_text_section, ind, 0);
7359 funcname = get_tok_str(sym->v, NULL);
7360 func_ind = ind;
7361 /* Initialize VLA state */
7362 vla_sp_loc = -1;
7363 vla_sp_root_loc = -1;
7364 /* put debug symbol */
7365 tcc_debug_funcstart(tcc_state, sym);
7366 /* push a dummy symbol to enable local sym storage */
7367 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7368 local_scope = 1; /* for function parameters */
7369 gfunc_prolog(&sym->type);
7370 reset_local_scope();
7371 rsym = 0;
7372 clear_temp_local_var_list();
7373 block(NULL, NULL, 0);
7374 if (!(nocode_wanted & 0x20000000)
7375 && ((func_vt.t & VT_BTYPE) == VT_INT)
7376 && !strcmp (funcname, "main"))
7378 nocode_wanted = 0;
7379 vpushi(0);
7380 gen_assign_cast(&func_vt);
7381 gfunc_return(&func_vt);
7383 nocode_wanted = 0;
7384 gsym(rsym);
7385 gfunc_epilog();
7386 cur_text_section->data_offset = ind;
7387 label_pop(&global_label_stack, NULL, 0);
7388 /* reset local stack */
7389 reset_local_scope();
7390 sym_pop(&local_stack, NULL, 0);
7391 /* end of function */
7392 /* patch symbol size */
7393 elfsym(sym)->st_size = ind - func_ind;
7394 tcc_debug_funcend(tcc_state, ind - func_ind);
7395 /* It's better to crash than to generate wrong code */
7396 cur_text_section = NULL;
7397 funcname = ""; /* for safety */
7398 func_vt.t = VT_VOID; /* for safety */
7399 func_var = 0; /* for safety */
7400 ind = 0; /* for safety */
7401 nocode_wanted = 0x80000000;
7402 check_vstack();
7405 static void gen_inline_functions(TCCState *s)
7407 Sym *sym;
7408 int inline_generated, i, ln;
7409 struct InlineFunc *fn;
7411 ln = file->line_num;
7412 /* iterate while inline function are referenced */
7413 do {
7414 inline_generated = 0;
7415 for (i = 0; i < s->nb_inline_fns; ++i) {
7416 fn = s->inline_fns[i];
7417 sym = fn->sym;
7418 if (sym && sym->c) {
7419 /* the function was used: generate its code and
7420 convert it to a normal function */
7421 fn->sym = NULL;
7422 if (file)
7423 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7424 sym->type.t &= ~VT_INLINE;
7426 begin_macro(fn->func_str, 1);
7427 next();
7428 cur_text_section = text_section;
7429 gen_function(sym);
7430 end_macro();
7432 inline_generated = 1;
7435 } while (inline_generated);
7436 file->line_num = ln;
7439 ST_FUNC void free_inline_functions(TCCState *s)
7441 int i;
7442 /* free tokens of unused inline functions */
7443 for (i = 0; i < s->nb_inline_fns; ++i) {
7444 struct InlineFunc *fn = s->inline_fns[i];
7445 if (fn->sym)
7446 tok_str_free(fn->func_str);
7448 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7451 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7452 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7453 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7455 int v, has_init, r;
7456 CType type, btype;
7457 Sym *sym;
7458 AttributeDef ad, adbase;
7460 while (1) {
7461 if (!parse_btype(&btype, &adbase)) {
7462 if (is_for_loop_init)
7463 return 0;
7464 /* skip redundant ';' if not in old parameter decl scope */
7465 if (tok == ';' && l != VT_CMP) {
7466 next();
7467 continue;
7469 if (l != VT_CONST)
7470 break;
7471 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7472 /* global asm block */
7473 asm_global_instr();
7474 continue;
7476 if (tok >= TOK_UIDENT) {
7477 /* special test for old K&R protos without explicit int
7478 type. Only accepted when defining global data */
7479 btype.t = VT_INT;
7480 } else {
7481 if (tok != TOK_EOF)
7482 expect("declaration");
7483 break;
7486 if (tok == ';') {
7487 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7488 int v = btype.ref->v;
7489 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7490 tcc_warning("unnamed struct/union that defines no instances");
7491 next();
7492 continue;
7494 if (IS_ENUM(btype.t)) {
7495 next();
7496 continue;
7499 while (1) { /* iterate thru each declaration */
7500 type = btype;
7501 /* If the base type itself was an array type of unspecified
7502 size (like in 'typedef int arr[]; arr x = {1};') then
7503 we will overwrite the unknown size by the real one for
7504 this decl. We need to unshare the ref symbol holding
7505 that size. */
7506 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7507 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7509 ad = adbase;
7510 type_decl(&type, &ad, &v, TYPE_DIRECT);
7511 #if 0
7513 char buf[500];
7514 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7515 printf("type = '%s'\n", buf);
7517 #endif
7518 if ((type.t & VT_BTYPE) == VT_FUNC) {
7519 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7520 tcc_error("function without file scope cannot be static");
7522 /* if old style function prototype, we accept a
7523 declaration list */
7524 sym = type.ref;
7525 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7526 decl0(VT_CMP, 0, sym);
7529 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7530 ad.asm_label = asm_label_instr();
7531 /* parse one last attribute list, after asm label */
7532 parse_attribute(&ad);
7533 if (tok == '{')
7534 expect(";");
7537 #ifdef TCC_TARGET_PE
7538 if (ad.a.dllimport || ad.a.dllexport) {
7539 if (type.t & (VT_STATIC|VT_TYPEDEF))
7540 tcc_error("cannot have dll linkage with static or typedef");
7541 if (ad.a.dllimport) {
7542 if ((type.t & VT_BTYPE) == VT_FUNC)
7543 ad.a.dllimport = 0;
7544 else
7545 type.t |= VT_EXTERN;
7548 #endif
7549 if (tok == '{') {
7550 if (l != VT_CONST)
7551 tcc_error("cannot use local functions");
7552 if ((type.t & VT_BTYPE) != VT_FUNC)
7553 expect("function definition");
7555 /* reject abstract declarators in function definition
7556 make old style params without decl have int type */
7557 sym = type.ref;
7558 while ((sym = sym->next) != NULL) {
7559 if (!(sym->v & ~SYM_FIELD))
7560 expect("identifier");
7561 if (sym->type.t == VT_VOID)
7562 sym->type = int_type;
7565 /* XXX: cannot do better now: convert extern line to static inline */
7566 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7567 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7569 /* put function symbol */
7570 sym = external_global_sym(v, &type, 0);
7571 type.t &= ~VT_EXTERN;
7572 patch_storage(sym, &ad, &type);
7574 /* static inline functions are just recorded as a kind
7575 of macro. Their code will be emitted at the end of
7576 the compilation unit only if they are used */
7577 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7578 (VT_INLINE | VT_STATIC)) {
7579 struct InlineFunc *fn;
7580 const char *filename;
7582 filename = file ? file->filename : "";
7583 fn = tcc_malloc(sizeof *fn + strlen(filename));
7584 strcpy(fn->filename, filename);
7585 fn->sym = sym;
7586 skip_or_save_block(&fn->func_str);
7587 dynarray_add(&tcc_state->inline_fns,
7588 &tcc_state->nb_inline_fns, fn);
7589 } else {
7590 /* compute text section */
7591 cur_text_section = ad.section;
7592 if (!cur_text_section)
7593 cur_text_section = text_section;
7594 gen_function(sym);
7596 break;
7597 } else {
7598 if (l == VT_CMP) {
7599 /* find parameter in function parameter list */
7600 for (sym = func_sym->next; sym; sym = sym->next)
7601 if ((sym->v & ~SYM_FIELD) == v)
7602 goto found;
7603 tcc_error("declaration for parameter '%s' but no such parameter",
7604 get_tok_str(v, NULL));
7605 found:
7606 if (type.t & VT_STORAGE) /* 'register' is okay */
7607 tcc_error("storage class specified for '%s'",
7608 get_tok_str(v, NULL));
7609 if (sym->type.t != VT_VOID)
7610 tcc_error("redefinition of parameter '%s'",
7611 get_tok_str(v, NULL));
7612 convert_parameter_type(&type);
7613 sym->type = type;
7614 } else if (type.t & VT_TYPEDEF) {
7615 /* save typedefed type */
7616 /* XXX: test storage specifiers ? */
7617 sym = sym_find(v);
7618 if (sym && sym->sym_scope == local_scope) {
7619 if (!is_compatible_types(&sym->type, &type)
7620 || !(sym->type.t & VT_TYPEDEF))
7621 tcc_error("incompatible redefinition of '%s'",
7622 get_tok_str(v, NULL));
7623 sym->type = type;
7624 } else {
7625 sym = sym_push(v, &type, 0, 0);
7627 sym->a = ad.a;
7628 sym->f = ad.f;
7629 } else if ((type.t & VT_BTYPE) == VT_VOID
7630 && !(type.t & VT_EXTERN)) {
7631 tcc_error("declaration of void object");
7632 } else {
7633 r = 0;
7634 if ((type.t & VT_BTYPE) == VT_FUNC) {
7635 /* external function definition */
7636 /* specific case for func_call attribute */
7637 type.ref->f = ad.f;
7638 } else if (!(type.t & VT_ARRAY)) {
7639 /* not lvalue if array */
7640 r |= lvalue_type(type.t);
7642 has_init = (tok == '=');
7643 if (has_init && (type.t & VT_VLA))
7644 tcc_error("variable length array cannot be initialized");
7645 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7646 ((type.t & VT_BTYPE) == VT_FUNC) ||
7647 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7648 !has_init && l == VT_CONST && type.ref->c < 0)) {
7649 /* external variable or function */
7650 /* NOTE: as GCC, uninitialized global static
7651 arrays of null size are considered as
7652 extern */
7653 type.t |= VT_EXTERN;
7654 sym = external_sym(v, &type, r, &ad);
7655 if (ad.alias_target) {
7656 ElfSym *esym;
7657 Sym *alias_target;
7658 alias_target = sym_find(ad.alias_target);
7659 esym = elfsym(alias_target);
7660 if (!esym)
7661 tcc_error("unsupported forward __alias__ attribute");
7662 /* Local statics have a scope until now (for
7663 warnings), remove it here. */
7664 sym->sym_scope = 0;
7665 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7667 } else {
7668 if (type.t & VT_STATIC)
7669 r |= VT_CONST;
7670 else
7671 r |= l;
7672 if (has_init)
7673 next();
7674 else if (l == VT_CONST)
7675 /* uninitialized global variables may be overridden */
7676 type.t |= VT_EXTERN;
7677 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7680 if (tok != ',') {
7681 if (is_for_loop_init)
7682 return 1;
7683 skip(';');
7684 break;
7686 next();
7690 return 0;
7693 static void decl(int l)
7695 decl0(l, 0, NULL);
7698 /* ------------------------------------------------------------------------- */