_Static_assert must be followed by semicolon
[tinycc.git] / tccgen.c
blob23712fee2ce1a269772b5407d20bb77b3b7f4a78
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int section_sym;
49 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
50 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
61 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
62 ST_DATA int func_vc;
63 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
64 ST_DATA const char *funcname;
65 ST_DATA int g_debug;
67 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
69 ST_DATA struct switch_t {
70 struct case_t {
71 int64_t v1, v2;
72 int sym;
73 } **p; int n; /* list of case ranges */
74 int def_sym; /* default symbol */
75 } *cur_switch; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA struct temp_local_variable {
80 int location; //offset on stack. Svalue.c.i
81 short size;
82 short align;
83 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
84 short nb_temp_local_vars;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType *type);
89 static void gen_cast_s(int t);
90 static inline CType *pointed_type(CType *type);
91 static int is_compatible_types(CType *type1, CType *type2);
92 static int parse_btype(CType *type, AttributeDef *ad);
93 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
94 static void parse_expr_type(CType *type);
95 static void init_putv(CType *type, Section *sec, unsigned long c);
96 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
97 static void block(int *bsym, int *csym, int is_expr);
98 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
99 static void decl(int l);
100 static int decl0(int l, int is_for_loop_init, Sym *);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType *type, int *a);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType *type1, CType *type2);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty, unsigned long long v);
108 static void vpush(CType *type);
109 static int gvtst(int inv, int t);
110 static void gen_inline_functions(TCCState *s);
111 static void skip_or_save_block(TokenString **str);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size,int align);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups, NULL, 0);
122 local_scope = 0;
125 ST_INLN int is_float(int t)
127 int bt;
128 bt = t & VT_BTYPE;
129 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC int ieee_finite(double d)
137 int p[4];
138 memcpy(p, &d, sizeof(double));
139 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
146 #endif
148 ST_FUNC void test_lvalue(void)
150 if (!(vtop->r & VT_LVAL))
151 expect("lvalue");
154 ST_FUNC void check_vstack(void)
156 if (pvtop != vtop)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
163 #if 0
164 void pv (const char *lbl, int a, int b)
166 int i;
167 for (i = a; i < a + b; ++i) {
168 SValue *p = &vtop[-i];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
173 #endif
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC void tcc_debug_start(TCCState *s1)
179 if (s1->do_debug) {
180 char buf[512];
182 /* file info: full path + filename */
183 section_sym = put_elf_sym(symtab_section, 0, 0,
184 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
185 text_section->sh_num, NULL);
186 getcwd(buf, sizeof(buf));
187 #ifdef _WIN32
188 normalize_slashes(buf);
189 #endif
190 pstrcat(buf, sizeof(buf), "/");
191 put_stabs_r(buf, N_SO, 0, 0,
192 text_section->data_offset, text_section, section_sym);
193 put_stabs_r(file->filename, N_SO, 0, 0,
194 text_section->data_offset, text_section, section_sym);
195 last_ind = 0;
196 last_line_num = 0;
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section, 0, 0,
202 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
203 SHN_ABS, file->filename);
206 /* put end of translation unit info */
207 ST_FUNC void tcc_debug_end(TCCState *s1)
209 if (!s1->do_debug)
210 return;
211 put_stabs_r(NULL, N_SO, 0, 0,
212 text_section->data_offset, text_section, section_sym);
216 /* generate line number info */
217 ST_FUNC void tcc_debug_line(TCCState *s1)
219 if (!s1->do_debug)
220 return;
221 if ((last_line_num != file->line_num || last_ind != ind)) {
222 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
223 last_ind = ind;
224 last_line_num = file->line_num;
228 /* put function symbol */
229 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
231 char buf[512];
233 if (!s1->do_debug)
234 return;
236 /* stabs info */
237 /* XXX: we put here a dummy type */
238 snprintf(buf, sizeof(buf), "%s:%c1",
239 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
240 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
241 cur_text_section, sym->c);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE, 0, file->line_num, 0);
245 last_ind = 0;
246 last_line_num = 0;
249 /* put function size */
250 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
252 if (!s1->do_debug)
253 return;
254 put_stabn(N_FUN, 0, 0, size);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC int tccgen_compile(TCCState *s1)
260 cur_text_section = NULL;
261 funcname = "";
262 anon_sym = SYM_FIRST_ANOM;
263 section_sym = 0;
264 const_wanted = 0;
265 nocode_wanted = 0x80000000;
267 /* define some often used types */
268 int_type.t = VT_INT;
269 char_pointer_type.t = VT_BYTE;
270 mk_pointer(&char_pointer_type);
271 #if PTR_SIZE == 4
272 size_type.t = VT_INT | VT_UNSIGNED;
273 ptrdiff_type.t = VT_INT;
274 #elif LONG_SIZE == 4
275 size_type.t = VT_LLONG | VT_UNSIGNED;
276 ptrdiff_type.t = VT_LLONG;
277 #else
278 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
279 ptrdiff_type.t = VT_LONG | VT_LLONG;
280 #endif
281 func_old_type.t = VT_FUNC;
282 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
283 func_old_type.ref->f.func_call = FUNC_CDECL;
284 func_old_type.ref->f.func_type = FUNC_OLD;
286 tcc_debug_start(s1);
288 #ifdef TCC_TARGET_ARM
289 arm_init(s1);
290 #endif
292 #ifdef INC_DEBUG
293 printf("%s: **** new file\n", file->filename);
294 #endif
296 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
297 next();
298 decl(VT_CONST);
299 gen_inline_functions(s1);
300 check_vstack();
301 /* end of translation unit info */
302 tcc_debug_end(s1);
303 return 0;
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym *elfsym(Sym *s)
309 if (!s || !s->c)
310 return NULL;
311 return &((ElfSym *)symtab_section->data)[s->c];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC void update_storage(Sym *sym)
317 ElfSym *esym;
318 int sym_bind, old_sym_bind;
320 esym = elfsym(sym);
321 if (!esym)
322 return;
324 if (sym->a.visibility)
325 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
326 | sym->a.visibility;
328 if (sym->type.t & VT_STATIC)
329 sym_bind = STB_LOCAL;
330 else if (sym->a.weak)
331 sym_bind = STB_WEAK;
332 else
333 sym_bind = STB_GLOBAL;
334 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
335 if (sym_bind != old_sym_bind) {
336 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
339 #ifdef TCC_TARGET_PE
340 if (sym->a.dllimport)
341 esym->st_other |= ST_PE_IMPORT;
342 if (sym->a.dllexport)
343 esym->st_other |= ST_PE_EXPORT;
344 #endif
346 #if 0
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym->v, NULL),
349 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
350 sym->a.visibility,
351 sym->a.dllexport,
352 sym->a.dllimport
354 #endif
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
362 addr_t value, unsigned long size,
363 int can_add_underscore)
365 int sym_type, sym_bind, info, other, t;
366 ElfSym *esym;
367 const char *name;
368 char buf1[256];
369 #ifdef CONFIG_TCC_BCHECK
370 char buf[32];
371 #endif
373 if (!sym->c) {
374 name = get_tok_str(sym->v, NULL);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state->do_bounds_check) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
380 switch(sym->v) {
381 #ifdef TCC_TARGET_PE
382 /* XXX: we rely only on malloc hooks */
383 case TOK_malloc:
384 case TOK_free:
385 case TOK_realloc:
386 case TOK_memalign:
387 case TOK_calloc:
388 #endif
389 case TOK_memcpy:
390 case TOK_memmove:
391 case TOK_memset:
392 case TOK_strlen:
393 case TOK_strcpy:
394 case TOK_alloca:
395 strcpy(buf, "__bound_");
396 strcat(buf, name);
397 name = buf;
398 break;
401 #endif
402 t = sym->type.t;
403 if ((t & VT_BTYPE) == VT_FUNC) {
404 sym_type = STT_FUNC;
405 } else if ((t & VT_BTYPE) == VT_VOID) {
406 sym_type = STT_NOTYPE;
407 } else {
408 sym_type = STT_OBJECT;
410 if (t & VT_STATIC)
411 sym_bind = STB_LOCAL;
412 else
413 sym_bind = STB_GLOBAL;
414 other = 0;
415 #ifdef TCC_TARGET_PE
416 if (sym_type == STT_FUNC && sym->type.ref) {
417 Sym *ref = sym->type.ref;
418 if (ref->a.nodecorate) {
419 can_add_underscore = 0;
421 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
422 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
423 name = buf1;
424 other |= ST_PE_STDCALL;
425 can_add_underscore = 0;
428 #endif
429 if (tcc_state->leading_underscore && can_add_underscore) {
430 buf1[0] = '_';
431 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
432 name = buf1;
434 if (sym->asm_label)
435 name = get_tok_str(sym->asm_label, NULL);
436 info = ELFW(ST_INFO)(sym_bind, sym_type);
437 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
438 } else {
439 esym = elfsym(sym);
440 esym->st_value = value;
441 esym->st_size = size;
442 esym->st_shndx = sh_num;
444 update_storage(sym);
447 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
448 addr_t value, unsigned long size)
450 int sh_num = section ? section->sh_num : SHN_UNDEF;
451 put_extern_sym2(sym, sh_num, value, size, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
456 addr_t addend)
458 int c = 0;
460 if (nocode_wanted && s == cur_text_section)
461 return;
463 if (sym) {
464 if (0 == sym->c)
465 put_extern_sym(sym, NULL, 0, 0);
466 c = sym->c;
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section, s, offset, type, c, addend);
473 #if PTR_SIZE == 4
474 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
476 greloca(s, sym, offset, type, 0);
478 #endif
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym *__sym_malloc(void)
484 Sym *sym_pool, *sym, *last_sym;
485 int i;
487 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
488 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
490 last_sym = sym_free_first;
491 sym = sym_pool;
492 for(i = 0; i < SYM_POOL_NB; i++) {
493 sym->next = last_sym;
494 last_sym = sym;
495 sym++;
497 sym_free_first = last_sym;
498 return last_sym;
501 static inline Sym *sym_malloc(void)
503 Sym *sym;
504 #ifndef SYM_DEBUG
505 sym = sym_free_first;
506 if (!sym)
507 sym = __sym_malloc();
508 sym_free_first = sym->next;
509 return sym;
510 #else
511 sym = tcc_malloc(sizeof(Sym));
512 return sym;
513 #endif
516 ST_INLN void sym_free(Sym *sym)
518 #ifndef SYM_DEBUG
519 sym->next = sym_free_first;
520 sym_free_first = sym;
521 #else
522 tcc_free(sym);
523 #endif
526 /* push, without hashing */
527 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
529 Sym *s;
531 s = sym_malloc();
532 memset(s, 0, sizeof *s);
533 s->v = v;
534 s->type.t = t;
535 s->c = c;
536 /* add in stack */
537 s->prev = *ps;
538 *ps = s;
539 return s;
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym *sym_find2(Sym *s, int v)
546 while (s) {
547 if (s->v == v)
548 return s;
549 else if (s->v == -1)
550 return NULL;
551 s = s->prev;
553 return NULL;
556 /* structure lookup */
557 ST_INLN Sym *struct_find(int v)
559 v -= TOK_IDENT;
560 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
561 return NULL;
562 return table_ident[v]->sym_struct;
565 /* find an identifier */
566 ST_INLN Sym *sym_find(int v)
568 v -= TOK_IDENT;
569 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
570 return NULL;
571 return table_ident[v]->sym_identifier;
574 static int sym_scope(Sym *s)
576 if (IS_ENUM_VAL (s->type.t))
577 return s->type.ref->sym_scope;
578 else
579 return s->sym_scope;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
585 Sym *s, **ps;
586 TokenSym *ts;
588 if (local_stack)
589 ps = &local_stack;
590 else
591 ps = &global_stack;
592 s = sym_push2(ps, v, type->t, c);
593 s->type.ref = type->ref;
594 s->r = r;
595 /* don't record fields or anonymous symbols */
596 /* XXX: simplify */
597 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
598 /* record symbol in token array */
599 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
600 if (v & SYM_STRUCT)
601 ps = &ts->sym_struct;
602 else
603 ps = &ts->sym_identifier;
604 s->prev_tok = *ps;
605 *ps = s;
606 s->sym_scope = local_scope;
607 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v & ~SYM_STRUCT, NULL));
611 return s;
614 /* push a global identifier */
615 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
617 Sym *s, **ps;
618 s = sym_push2(&global_stack, v, t, c);
619 s->r = VT_CONST | VT_SYM;
620 /* don't record anonymous symbol */
621 if (v < SYM_FIRST_ANOM) {
622 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
623 /* modify the top most local identifier, so that sym_identifier will
624 point to 's' when popped; happens when called from inline asm */
625 while (*ps != NULL && (*ps)->sym_scope)
626 ps = &(*ps)->prev_tok;
627 s->prev_tok = *ps;
628 *ps = s;
630 return s;
633 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
634 pop them yet from the list, but do remove them from the token array. */
635 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
637 Sym *s, *ss, **ps;
638 TokenSym *ts;
639 int v;
641 s = *ptop;
642 while(s != b) {
643 ss = s->prev;
644 v = s->v;
645 /* remove symbol in token array */
646 /* XXX: simplify */
647 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
648 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
649 if (v & SYM_STRUCT)
650 ps = &ts->sym_struct;
651 else
652 ps = &ts->sym_identifier;
653 *ps = s->prev_tok;
655 if (!keep)
656 sym_free(s);
657 s = ss;
659 if (!keep)
660 *ptop = b;
663 /* ------------------------------------------------------------------------- */
665 static void vsetc(CType *type, int r, CValue *vc)
667 int v;
669 if (vtop >= vstack + (VSTACK_SIZE - 1))
670 tcc_error("memory full (vstack)");
671 /* cannot let cpu flags if other instruction are generated. Also
672 avoid leaving VT_JMP anywhere except on the top of the stack
673 because it would complicate the code generator.
675 Don't do this when nocode_wanted. vtop might come from
676 !nocode_wanted regions (see 88_codeopt.c) and transforming
677 it to a register without actually generating code is wrong
678 as their value might still be used for real. All values
679 we push under nocode_wanted will eventually be popped
680 again, so that the VT_CMP/VT_JMP value will be in vtop
681 when code is unsuppressed again.
683 Same logic below in vswap(); */
684 if (vtop >= vstack && !nocode_wanted) {
685 v = vtop->r & VT_VALMASK;
686 if (v == VT_CMP || (v & ~1) == VT_JMP)
687 gv(RC_INT);
690 vtop++;
691 vtop->type = *type;
692 vtop->r = r;
693 vtop->r2 = VT_CONST;
694 vtop->c = *vc;
695 vtop->sym = NULL;
698 ST_FUNC void vswap(void)
700 SValue tmp;
701 /* cannot vswap cpu flags. See comment at vsetc() above */
702 if (vtop >= vstack && !nocode_wanted) {
703 int v = vtop->r & VT_VALMASK;
704 if (v == VT_CMP || (v & ~1) == VT_JMP)
705 gv(RC_INT);
707 tmp = vtop[0];
708 vtop[0] = vtop[-1];
709 vtop[-1] = tmp;
712 /* pop stack value */
713 ST_FUNC void vpop(void)
715 int v;
716 v = vtop->r & VT_VALMASK;
717 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
718 /* for x86, we need to pop the FP stack */
719 if (v == TREG_ST0) {
720 o(0xd8dd); /* fstp %st(0) */
721 } else
722 #endif
723 if (v == VT_JMP || v == VT_JMPI) {
724 /* need to put correct jump if && or || without test */
725 gsym(vtop->c.i);
727 vtop--;
730 /* push constant of type "type" with useless value */
731 ST_FUNC void vpush(CType *type)
733 vset(type, VT_CONST, 0);
736 /* push integer constant */
737 ST_FUNC void vpushi(int v)
739 CValue cval;
740 cval.i = v;
741 vsetc(&int_type, VT_CONST, &cval);
744 /* push a pointer sized constant */
745 static void vpushs(addr_t v)
747 CValue cval;
748 cval.i = v;
749 vsetc(&size_type, VT_CONST, &cval);
752 /* push arbitrary 64bit constant */
753 ST_FUNC void vpush64(int ty, unsigned long long v)
755 CValue cval;
756 CType ctype;
757 ctype.t = ty;
758 ctype.ref = NULL;
759 cval.i = v;
760 vsetc(&ctype, VT_CONST, &cval);
763 /* push long long constant */
764 static inline void vpushll(long long v)
766 vpush64(VT_LLONG, v);
769 ST_FUNC void vset(CType *type, int r, int v)
771 CValue cval;
773 cval.i = v;
774 vsetc(type, r, &cval);
777 static void vseti(int r, int v)
779 CType type;
780 type.t = VT_INT;
781 type.ref = NULL;
782 vset(&type, r, v);
785 ST_FUNC void vpushv(SValue *v)
787 if (vtop >= vstack + (VSTACK_SIZE - 1))
788 tcc_error("memory full (vstack)");
789 vtop++;
790 *vtop = *v;
793 static void vdup(void)
795 vpushv(vtop);
798 /* rotate n first stack elements to the bottom
799 I1 ... In -> I2 ... In I1 [top is right]
801 ST_FUNC void vrotb(int n)
803 int i;
804 SValue tmp;
806 tmp = vtop[-n + 1];
807 for(i=-n+1;i!=0;i++)
808 vtop[i] = vtop[i+1];
809 vtop[0] = tmp;
812 /* rotate the n elements before entry e towards the top
813 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
815 ST_FUNC void vrote(SValue *e, int n)
817 int i;
818 SValue tmp;
820 tmp = *e;
821 for(i = 0;i < n - 1; i++)
822 e[-i] = e[-i - 1];
823 e[-n + 1] = tmp;
826 /* rotate n first stack elements to the top
827 I1 ... In -> In I1 ... I(n-1) [top is right]
829 ST_FUNC void vrott(int n)
831 vrote(vtop, n);
834 /* push a symbol value of TYPE */
835 static inline void vpushsym(CType *type, Sym *sym)
837 CValue cval;
838 cval.i = 0;
839 vsetc(type, VT_CONST | VT_SYM, &cval);
840 vtop->sym = sym;
843 /* Return a static symbol pointing to a section */
844 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
846 int v;
847 Sym *sym;
849 v = anon_sym++;
850 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
851 sym->type.t |= VT_STATIC;
852 put_extern_sym(sym, sec, offset, size);
853 return sym;
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
859 vpushsym(type, get_sym_ref(type, sec, offset, size));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym *external_global_sym(int v, CType *type)
865 Sym *s;
867 s = sym_find(v);
868 if (!s) {
869 /* push forward reference */
870 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
871 s->type.ref = type->ref;
872 } else if (IS_ASM_SYM(s)) {
873 s->type.t = type->t | (s->type.t & VT_EXTERN);
874 s->type.ref = type->ref;
875 update_storage(s);
877 return s;
880 /* Merge symbol attributes. */
881 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
883 if (sa1->aligned && !sa->aligned)
884 sa->aligned = sa1->aligned;
885 sa->packed |= sa1->packed;
886 sa->weak |= sa1->weak;
887 if (sa1->visibility != STV_DEFAULT) {
888 int vis = sa->visibility;
889 if (vis == STV_DEFAULT
890 || vis > sa1->visibility)
891 vis = sa1->visibility;
892 sa->visibility = vis;
894 sa->dllexport |= sa1->dllexport;
895 sa->nodecorate |= sa1->nodecorate;
896 sa->dllimport |= sa1->dllimport;
899 /* Merge function attributes. */
900 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
902 if (fa1->func_call && !fa->func_call)
903 fa->func_call = fa1->func_call;
904 if (fa1->func_type && !fa->func_type)
905 fa->func_type = fa1->func_type;
906 if (fa1->func_args && !fa->func_args)
907 fa->func_args = fa1->func_args;
910 /* Merge attributes. */
911 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
913 merge_symattr(&ad->a, &ad1->a);
914 merge_funcattr(&ad->f, &ad1->f);
916 if (ad1->section)
917 ad->section = ad1->section;
918 if (ad1->alias_target)
919 ad->alias_target = ad1->alias_target;
920 if (ad1->asm_label)
921 ad->asm_label = ad1->asm_label;
922 if (ad1->attr_mode)
923 ad->attr_mode = ad1->attr_mode;
926 /* Merge some type attributes. */
927 static void patch_type(Sym *sym, CType *type)
929 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
930 if (!(sym->type.t & VT_EXTERN))
931 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
932 sym->type.t &= ~VT_EXTERN;
935 if (IS_ASM_SYM(sym)) {
936 /* stay static if both are static */
937 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
938 sym->type.ref = type->ref;
941 if (!is_compatible_types(&sym->type, type)) {
942 tcc_error("incompatible types for redefinition of '%s'",
943 get_tok_str(sym->v, NULL));
945 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
946 int static_proto = sym->type.t & VT_STATIC;
947 /* warn if static follows non-static function declaration */
948 if ((type->t & VT_STATIC) && !static_proto && !(type->t & VT_INLINE))
949 tcc_warning("static storage ignored for redefinition of '%s'",
950 get_tok_str(sym->v, NULL));
952 if (0 == (type->t & VT_EXTERN)) {
953 /* put complete type, use static from prototype */
954 sym->type.t = (type->t & ~VT_STATIC) | static_proto;
955 if (type->t & VT_INLINE)
956 sym->type.t = type->t;
957 sym->type.ref = type->ref;
960 } else {
961 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
962 /* set array size if it was omitted in extern declaration */
963 if (sym->type.ref->c < 0)
964 sym->type.ref->c = type->ref->c;
965 else if (sym->type.ref->c != type->ref->c)
966 tcc_error("conflicting type for '%s'", get_tok_str(sym->v, NULL));
968 if ((type->t ^ sym->type.t) & VT_STATIC)
969 tcc_warning("storage mismatch for redefinition of '%s'",
970 get_tok_str(sym->v, NULL));
975 /* Merge some storage attributes. */
976 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
978 if (type)
979 patch_type(sym, type);
981 #ifdef TCC_TARGET_PE
982 if (sym->a.dllimport != ad->a.dllimport)
983 tcc_error("incompatible dll linkage for redefinition of '%s'",
984 get_tok_str(sym->v, NULL));
985 #endif
986 merge_symattr(&sym->a, &ad->a);
987 if (ad->asm_label)
988 sym->asm_label = ad->asm_label;
989 update_storage(sym);
992 /* define a new external reference to a symbol 'v' */
993 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
995 Sym *s;
996 s = sym_find(v);
997 if (!s || (!IS_ASM_SYM(s) && !(s->type.t & VT_EXTERN)
998 && (s->type.t & VT_BTYPE) != VT_FUNC)) {
999 if (s && !is_compatible_types(&s->type, type))
1000 tcc_error("conflicting types for '%s'", get_tok_str(s->v, NULL));
1001 /* push forward reference */
1002 s = sym_push(v, type, r | VT_CONST | VT_SYM, 0);
1003 s->a = ad->a;
1004 s->sym_scope = 0;
1005 } else {
1006 if (s->type.ref == func_old_type.ref) {
1007 s->type.ref = type->ref;
1008 s->r = r | VT_CONST | VT_SYM;
1009 s->type.t |= VT_EXTERN;
1011 patch_storage(s, ad, type);
1013 return s;
1016 /* push a reference to global symbol v */
1017 ST_FUNC void vpush_global_sym(CType *type, int v)
1019 vpushsym(type, external_global_sym(v, type));
1022 /* save registers up to (vtop - n) stack entry */
1023 ST_FUNC void save_regs(int n)
1025 SValue *p, *p1;
1026 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1027 save_reg(p->r);
1030 /* save r to the memory stack, and mark it as being free */
1031 ST_FUNC void save_reg(int r)
1033 save_reg_upstack(r, 0);
1036 /* save r to the memory stack, and mark it as being free,
1037 if seen up to (vtop - n) stack entry */
1038 ST_FUNC void save_reg_upstack(int r, int n)
1040 int l, saved, size, align;
1041 SValue *p, *p1, sv;
1042 CType *type;
1044 if ((r &= VT_VALMASK) >= VT_CONST)
1045 return;
1046 if (nocode_wanted)
1047 return;
1049 /* modify all stack values */
1050 saved = 0;
1051 l = 0;
1052 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1053 if ((p->r & VT_VALMASK) == r ||
1054 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1055 /* must save value on stack if not already done */
1056 if (!saved) {
1057 /* NOTE: must reload 'r' because r might be equal to r2 */
1058 r = p->r & VT_VALMASK;
1059 /* store register in the stack */
1060 type = &p->type;
1061 if ((p->r & VT_LVAL) ||
1062 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1063 #if PTR_SIZE == 8
1064 type = &char_pointer_type;
1065 #else
1066 type = &int_type;
1067 #endif
1068 size = type_size(type, &align);
1069 l=get_temp_local_var(size,align);
1070 sv.type.t = type->t;
1071 sv.r = VT_LOCAL | VT_LVAL;
1072 sv.c.i = l;
1073 store(r, &sv);
1074 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1075 /* x86 specific: need to pop fp register ST0 if saved */
1076 if (r == TREG_ST0) {
1077 o(0xd8dd); /* fstp %st(0) */
1079 #endif
1080 #if PTR_SIZE == 4
1081 /* special long long case */
1082 if ((type->t & VT_BTYPE) == VT_LLONG) {
1083 sv.c.i += 4;
1084 store(p->r2, &sv);
1086 #endif
1087 saved = 1;
1089 /* mark that stack entry as being saved on the stack */
1090 if (p->r & VT_LVAL) {
1091 /* also clear the bounded flag because the
1092 relocation address of the function was stored in
1093 p->c.i */
1094 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1095 } else {
1096 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1098 p->r2 = VT_CONST;
1099 p->c.i = l;
1104 #ifdef TCC_TARGET_ARM
1105 /* find a register of class 'rc2' with at most one reference on stack.
1106 * If none, call get_reg(rc) */
1107 ST_FUNC int get_reg_ex(int rc, int rc2)
1109 int r;
1110 SValue *p;
1112 for(r=0;r<NB_REGS;r++) {
1113 if (reg_classes[r] & rc2) {
1114 int n;
1115 n=0;
1116 for(p = vstack; p <= vtop; p++) {
1117 if ((p->r & VT_VALMASK) == r ||
1118 (p->r2 & VT_VALMASK) == r)
1119 n++;
1121 if (n <= 1)
1122 return r;
1125 return get_reg(rc);
1127 #endif
1129 /* find a free register of class 'rc'. If none, save one register */
1130 ST_FUNC int get_reg(int rc)
1132 int r;
1133 SValue *p;
1135 /* find a free register */
1136 for(r=0;r<NB_REGS;r++) {
1137 if (reg_classes[r] & rc) {
1138 if (nocode_wanted)
1139 return r;
1140 for(p=vstack;p<=vtop;p++) {
1141 if ((p->r & VT_VALMASK) == r ||
1142 (p->r2 & VT_VALMASK) == r)
1143 goto notfound;
1145 return r;
1147 notfound: ;
1150 /* no register left : free the first one on the stack (VERY
1151 IMPORTANT to start from the bottom to ensure that we don't
1152 spill registers used in gen_opi()) */
1153 for(p=vstack;p<=vtop;p++) {
1154 /* look at second register (if long long) */
1155 r = p->r2 & VT_VALMASK;
1156 if (r < VT_CONST && (reg_classes[r] & rc))
1157 goto save_found;
1158 r = p->r & VT_VALMASK;
1159 if (r < VT_CONST && (reg_classes[r] & rc)) {
1160 save_found:
1161 save_reg(r);
1162 return r;
1165 /* Should never comes here */
1166 return -1;
1169 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1170 static int get_temp_local_var(int size,int align){
1171 int i;
1172 struct temp_local_variable *temp_var;
1173 int found_var;
1174 SValue *p;
1175 int r;
1176 char free;
1177 char found;
1178 found=0;
1179 for(i=0;i<nb_temp_local_vars;i++){
1180 temp_var=&arr_temp_local_vars[i];
1181 if(temp_var->size<size||align!=temp_var->align){
1182 continue;
1184 /*check if temp_var is free*/
1185 free=1;
1186 for(p=vstack;p<=vtop;p++) {
1187 r=p->r&VT_VALMASK;
1188 if(r==VT_LOCAL||r==VT_LLOCAL){
1189 if(p->c.i==temp_var->location){
1190 free=0;
1191 break;
1195 if(free){
1196 found_var=temp_var->location;
1197 found=1;
1198 break;
1201 if(!found){
1202 loc = (loc - size) & -align;
1203 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1204 temp_var=&arr_temp_local_vars[i];
1205 temp_var->location=loc;
1206 temp_var->size=size;
1207 temp_var->align=align;
1208 nb_temp_local_vars++;
1210 found_var=loc;
1212 return found_var;
1215 static void clear_temp_local_var_list(){
1216 nb_temp_local_vars=0;
1219 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1220 if needed */
1221 static void move_reg(int r, int s, int t)
1223 SValue sv;
1225 if (r != s) {
1226 save_reg(r);
1227 sv.type.t = t;
1228 sv.type.ref = NULL;
1229 sv.r = s;
1230 sv.c.i = 0;
1231 load(r, &sv);
1235 /* get address of vtop (vtop MUST BE an lvalue) */
1236 ST_FUNC void gaddrof(void)
1238 vtop->r &= ~VT_LVAL;
1239 /* tricky: if saved lvalue, then we can go back to lvalue */
1240 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1241 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1246 #ifdef CONFIG_TCC_BCHECK
1247 /* generate lvalue bound code */
1248 static void gbound(void)
1250 int lval_type;
1251 CType type1;
1253 vtop->r &= ~VT_MUSTBOUND;
1254 /* if lvalue, then use checking code before dereferencing */
1255 if (vtop->r & VT_LVAL) {
1256 /* if not VT_BOUNDED value, then make one */
1257 if (!(vtop->r & VT_BOUNDED)) {
1258 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1259 /* must save type because we must set it to int to get pointer */
1260 type1 = vtop->type;
1261 vtop->type.t = VT_PTR;
1262 gaddrof();
1263 vpushi(0);
1264 gen_bounded_ptr_add();
1265 vtop->r |= lval_type;
1266 vtop->type = type1;
1268 /* then check for dereferencing */
1269 gen_bounded_ptr_deref();
1272 #endif
1274 static void incr_bf_adr(int o)
1276 vtop->type = char_pointer_type;
1277 gaddrof();
1278 vpushi(o);
1279 gen_op('+');
1280 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1281 | (VT_BYTE|VT_UNSIGNED);
1282 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1283 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1286 /* single-byte load mode for packed or otherwise unaligned bitfields */
1287 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1289 int n, o, bits;
1290 save_reg_upstack(vtop->r, 1);
1291 vpush64(type->t & VT_BTYPE, 0); // B X
1292 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1293 do {
1294 vswap(); // X B
1295 incr_bf_adr(o);
1296 vdup(); // X B B
1297 n = 8 - bit_pos;
1298 if (n > bit_size)
1299 n = bit_size;
1300 if (bit_pos)
1301 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1302 if (n < 8)
1303 vpushi((1 << n) - 1), gen_op('&');
1304 gen_cast(type);
1305 if (bits)
1306 vpushi(bits), gen_op(TOK_SHL);
1307 vrotb(3); // B Y X
1308 gen_op('|'); // B X
1309 bits += n, bit_size -= n, o = 1;
1310 } while (bit_size);
1311 vswap(), vpop();
1312 if (!(type->t & VT_UNSIGNED)) {
1313 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1314 vpushi(n), gen_op(TOK_SHL);
1315 vpushi(n), gen_op(TOK_SAR);
1319 /* single-byte store mode for packed or otherwise unaligned bitfields */
1320 static void store_packed_bf(int bit_pos, int bit_size)
1322 int bits, n, o, m, c;
1324 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1325 vswap(); // X B
1326 save_reg_upstack(vtop->r, 1);
1327 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1328 do {
1329 incr_bf_adr(o); // X B
1330 vswap(); //B X
1331 c ? vdup() : gv_dup(); // B V X
1332 vrott(3); // X B V
1333 if (bits)
1334 vpushi(bits), gen_op(TOK_SHR);
1335 if (bit_pos)
1336 vpushi(bit_pos), gen_op(TOK_SHL);
1337 n = 8 - bit_pos;
1338 if (n > bit_size)
1339 n = bit_size;
1340 if (n < 8) {
1341 m = ((1 << n) - 1) << bit_pos;
1342 vpushi(m), gen_op('&'); // X B V1
1343 vpushv(vtop-1); // X B V1 B
1344 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1345 gen_op('&'); // X B V1 B1
1346 gen_op('|'); // X B V2
1348 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1349 vstore(), vpop(); // X B
1350 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1351 } while (bit_size);
1352 vpop(), vpop();
1355 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1357 int t;
1358 if (0 == sv->type.ref)
1359 return 0;
1360 t = sv->type.ref->auxtype;
1361 if (t != -1 && t != VT_STRUCT) {
1362 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1363 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1365 return t;
1368 /* store vtop a register belonging to class 'rc'. lvalues are
1369 converted to values. Cannot be used if cannot be converted to
1370 register value (such as structures). */
1371 ST_FUNC int gv(int rc)
1373 int r, bit_pos, bit_size, size, align, rc2;
1375 /* NOTE: get_reg can modify vstack[] */
1376 if (vtop->type.t & VT_BITFIELD) {
1377 CType type;
1379 bit_pos = BIT_POS(vtop->type.t);
1380 bit_size = BIT_SIZE(vtop->type.t);
1381 /* remove bit field info to avoid loops */
1382 vtop->type.t &= ~VT_STRUCT_MASK;
1384 type.ref = NULL;
1385 type.t = vtop->type.t & VT_UNSIGNED;
1386 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1387 type.t |= VT_UNSIGNED;
1389 r = adjust_bf(vtop, bit_pos, bit_size);
1391 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1392 type.t |= VT_LLONG;
1393 else
1394 type.t |= VT_INT;
1396 if (r == VT_STRUCT) {
1397 load_packed_bf(&type, bit_pos, bit_size);
1398 } else {
1399 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1400 /* cast to int to propagate signedness in following ops */
1401 gen_cast(&type);
1402 /* generate shifts */
1403 vpushi(bits - (bit_pos + bit_size));
1404 gen_op(TOK_SHL);
1405 vpushi(bits - bit_size);
1406 /* NOTE: transformed to SHR if unsigned */
1407 gen_op(TOK_SAR);
1409 r = gv(rc);
1410 } else {
1411 if (is_float(vtop->type.t) &&
1412 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1413 unsigned long offset;
1414 /* CPUs usually cannot use float constants, so we store them
1415 generically in data segment */
1416 size = type_size(&vtop->type, &align);
1417 if (NODATA_WANTED)
1418 size = 0, align = 1;
1419 offset = section_add(data_section, size, align);
1420 vpush_ref(&vtop->type, data_section, offset, size);
1421 vswap();
1422 init_putv(&vtop->type, data_section, offset);
1423 vtop->r |= VT_LVAL;
1425 #ifdef CONFIG_TCC_BCHECK
1426 if (vtop->r & VT_MUSTBOUND)
1427 gbound();
1428 #endif
1430 r = vtop->r & VT_VALMASK;
1431 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1432 #ifndef TCC_TARGET_ARM64
1433 if (rc == RC_IRET)
1434 rc2 = RC_LRET;
1435 #ifdef TCC_TARGET_X86_64
1436 else if (rc == RC_FRET)
1437 rc2 = RC_QRET;
1438 #endif
1439 #endif
1440 /* need to reload if:
1441 - constant
1442 - lvalue (need to dereference pointer)
1443 - already a register, but not in the right class */
1444 if (r >= VT_CONST
1445 || (vtop->r & VT_LVAL)
1446 || !(reg_classes[r] & rc)
1447 #if PTR_SIZE == 8
1448 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1449 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1450 #else
1451 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1452 #endif
1455 r = get_reg(rc);
1456 #if PTR_SIZE == 8
1457 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1458 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1459 #else
1460 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1461 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1462 unsigned long long ll;
1463 #endif
1464 int r2, original_type;
1465 original_type = vtop->type.t;
1466 /* two register type load : expand to two words
1467 temporarily */
1468 #if PTR_SIZE == 4
1469 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1470 /* load constant */
1471 ll = vtop->c.i;
1472 vtop->c.i = ll; /* first word */
1473 load(r, vtop);
1474 vtop->r = r; /* save register value */
1475 vpushi(ll >> 32); /* second word */
1476 } else
1477 #endif
1478 if (vtop->r & VT_LVAL) {
1479 /* We do not want to modifier the long long
1480 pointer here, so the safest (and less
1481 efficient) is to save all the other registers
1482 in the stack. XXX: totally inefficient. */
1483 #if 0
1484 save_regs(1);
1485 #else
1486 /* lvalue_save: save only if used further down the stack */
1487 save_reg_upstack(vtop->r, 1);
1488 #endif
1489 /* load from memory */
1490 vtop->type.t = load_type;
1491 load(r, vtop);
1492 vdup();
1493 vtop[-1].r = r; /* save register value */
1494 /* increment pointer to get second word */
1495 vtop->type.t = addr_type;
1496 gaddrof();
1497 vpushi(load_size);
1498 gen_op('+');
1499 vtop->r |= VT_LVAL;
1500 vtop->type.t = load_type;
1501 } else {
1502 /* move registers */
1503 load(r, vtop);
1504 vdup();
1505 vtop[-1].r = r; /* save register value */
1506 vtop->r = vtop[-1].r2;
1508 /* Allocate second register. Here we rely on the fact that
1509 get_reg() tries first to free r2 of an SValue. */
1510 r2 = get_reg(rc2);
1511 load(r2, vtop);
1512 vpop();
1513 /* write second register */
1514 vtop->r2 = r2;
1515 vtop->type.t = original_type;
1516 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1517 int t1, t;
1518 /* lvalue of scalar type : need to use lvalue type
1519 because of possible cast */
1520 t = vtop->type.t;
1521 t1 = t;
1522 /* compute memory access type */
1523 if (vtop->r & VT_LVAL_BYTE)
1524 t = VT_BYTE;
1525 else if (vtop->r & VT_LVAL_SHORT)
1526 t = VT_SHORT;
1527 if (vtop->r & VT_LVAL_UNSIGNED)
1528 t |= VT_UNSIGNED;
1529 vtop->type.t = t;
1530 load(r, vtop);
1531 /* restore wanted type */
1532 vtop->type.t = t1;
1533 } else {
1534 /* one register type load */
1535 load(r, vtop);
1538 vtop->r = r;
1539 #ifdef TCC_TARGET_C67
1540 /* uses register pairs for doubles */
1541 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1542 vtop->r2 = r+1;
1543 #endif
1545 return r;
1548 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1549 ST_FUNC void gv2(int rc1, int rc2)
1551 int v;
1553 /* generate more generic register first. But VT_JMP or VT_CMP
1554 values must be generated first in all cases to avoid possible
1555 reload errors */
1556 v = vtop[0].r & VT_VALMASK;
1557 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1558 vswap();
1559 gv(rc1);
1560 vswap();
1561 gv(rc2);
1562 /* test if reload is needed for first register */
1563 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1564 vswap();
1565 gv(rc1);
1566 vswap();
1568 } else {
1569 gv(rc2);
1570 vswap();
1571 gv(rc1);
1572 vswap();
1573 /* test if reload is needed for first register */
1574 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1575 gv(rc2);
1580 #ifndef TCC_TARGET_ARM64
1581 /* wrapper around RC_FRET to return a register by type */
1582 static int rc_fret(int t)
1584 #ifdef TCC_TARGET_X86_64
1585 if (t == VT_LDOUBLE) {
1586 return RC_ST0;
1588 #endif
1589 return RC_FRET;
1591 #endif
1593 /* wrapper around REG_FRET to return a register by type */
1594 static int reg_fret(int t)
1596 #ifdef TCC_TARGET_X86_64
1597 if (t == VT_LDOUBLE) {
1598 return TREG_ST0;
1600 #endif
1601 return REG_FRET;
1604 #if PTR_SIZE == 4
1605 /* expand 64bit on stack in two ints */
1606 ST_FUNC void lexpand(void)
1608 int u, v;
1609 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1610 v = vtop->r & (VT_VALMASK | VT_LVAL);
1611 if (v == VT_CONST) {
1612 vdup();
1613 vtop[0].c.i >>= 32;
1614 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1615 vdup();
1616 vtop[0].c.i += 4;
1617 } else {
1618 gv(RC_INT);
1619 vdup();
1620 vtop[0].r = vtop[-1].r2;
1621 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1623 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1625 #endif
1627 #if PTR_SIZE == 4
1628 /* build a long long from two ints */
1629 static void lbuild(int t)
1631 gv2(RC_INT, RC_INT);
1632 vtop[-1].r2 = vtop[0].r;
1633 vtop[-1].type.t = t;
1634 vpop();
1636 #endif
1638 /* convert stack entry to register and duplicate its value in another
1639 register */
1640 static void gv_dup(void)
1642 int rc, t, r, r1;
1643 SValue sv;
1645 t = vtop->type.t;
1646 #if PTR_SIZE == 4
1647 if ((t & VT_BTYPE) == VT_LLONG) {
1648 if (t & VT_BITFIELD) {
1649 gv(RC_INT);
1650 t = vtop->type.t;
1652 lexpand();
1653 gv_dup();
1654 vswap();
1655 vrotb(3);
1656 gv_dup();
1657 vrotb(4);
1658 /* stack: H L L1 H1 */
1659 lbuild(t);
1660 vrotb(3);
1661 vrotb(3);
1662 vswap();
1663 lbuild(t);
1664 vswap();
1665 } else
1666 #endif
1668 /* duplicate value */
1669 rc = RC_INT;
1670 sv.type.t = VT_INT;
1671 if (is_float(t)) {
1672 rc = RC_FLOAT;
1673 #ifdef TCC_TARGET_X86_64
1674 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1675 rc = RC_ST0;
1677 #endif
1678 sv.type.t = t;
1680 r = gv(rc);
1681 r1 = get_reg(rc);
1682 sv.r = r;
1683 sv.c.i = 0;
1684 load(r1, &sv); /* move r to r1 */
1685 vdup();
1686 /* duplicates value */
1687 if (r != r1)
1688 vtop->r = r1;
1692 /* Generate value test
1694 * Generate a test for any value (jump, comparison and integers) */
1695 ST_FUNC int gvtst(int inv, int t)
1697 int v = vtop->r & VT_VALMASK;
1698 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1699 vpushi(0);
1700 gen_op(TOK_NE);
1702 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1703 /* constant jmp optimization */
1704 if ((vtop->c.i != 0) != inv)
1705 t = gjmp(t);
1706 vtop--;
1707 return t;
1709 return gtst(inv, t);
1712 #if PTR_SIZE == 4
1713 /* generate CPU independent (unsigned) long long operations */
1714 static void gen_opl(int op)
1716 int t, a, b, op1, c, i;
1717 int func;
1718 unsigned short reg_iret = REG_IRET;
1719 unsigned short reg_lret = REG_LRET;
1720 SValue tmp;
1722 switch(op) {
1723 case '/':
1724 case TOK_PDIV:
1725 func = TOK___divdi3;
1726 goto gen_func;
1727 case TOK_UDIV:
1728 func = TOK___udivdi3;
1729 goto gen_func;
1730 case '%':
1731 func = TOK___moddi3;
1732 goto gen_mod_func;
1733 case TOK_UMOD:
1734 func = TOK___umoddi3;
1735 gen_mod_func:
1736 #ifdef TCC_ARM_EABI
1737 reg_iret = TREG_R2;
1738 reg_lret = TREG_R3;
1739 #endif
1740 gen_func:
1741 /* call generic long long function */
1742 vpush_global_sym(&func_old_type, func);
1743 vrott(3);
1744 gfunc_call(2);
1745 vpushi(0);
1746 vtop->r = reg_iret;
1747 vtop->r2 = reg_lret;
1748 break;
1749 case '^':
1750 case '&':
1751 case '|':
1752 case '*':
1753 case '+':
1754 case '-':
1755 //pv("gen_opl A",0,2);
1756 t = vtop->type.t;
1757 vswap();
1758 lexpand();
1759 vrotb(3);
1760 lexpand();
1761 /* stack: L1 H1 L2 H2 */
1762 tmp = vtop[0];
1763 vtop[0] = vtop[-3];
1764 vtop[-3] = tmp;
1765 tmp = vtop[-2];
1766 vtop[-2] = vtop[-3];
1767 vtop[-3] = tmp;
1768 vswap();
1769 /* stack: H1 H2 L1 L2 */
1770 //pv("gen_opl B",0,4);
1771 if (op == '*') {
1772 vpushv(vtop - 1);
1773 vpushv(vtop - 1);
1774 gen_op(TOK_UMULL);
1775 lexpand();
1776 /* stack: H1 H2 L1 L2 ML MH */
1777 for(i=0;i<4;i++)
1778 vrotb(6);
1779 /* stack: ML MH H1 H2 L1 L2 */
1780 tmp = vtop[0];
1781 vtop[0] = vtop[-2];
1782 vtop[-2] = tmp;
1783 /* stack: ML MH H1 L2 H2 L1 */
1784 gen_op('*');
1785 vrotb(3);
1786 vrotb(3);
1787 gen_op('*');
1788 /* stack: ML MH M1 M2 */
1789 gen_op('+');
1790 gen_op('+');
1791 } else if (op == '+' || op == '-') {
1792 /* XXX: add non carry method too (for MIPS or alpha) */
1793 if (op == '+')
1794 op1 = TOK_ADDC1;
1795 else
1796 op1 = TOK_SUBC1;
1797 gen_op(op1);
1798 /* stack: H1 H2 (L1 op L2) */
1799 vrotb(3);
1800 vrotb(3);
1801 gen_op(op1 + 1); /* TOK_xxxC2 */
1802 } else {
1803 gen_op(op);
1804 /* stack: H1 H2 (L1 op L2) */
1805 vrotb(3);
1806 vrotb(3);
1807 /* stack: (L1 op L2) H1 H2 */
1808 gen_op(op);
1809 /* stack: (L1 op L2) (H1 op H2) */
1811 /* stack: L H */
1812 lbuild(t);
1813 break;
1814 case TOK_SAR:
1815 case TOK_SHR:
1816 case TOK_SHL:
1817 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1818 t = vtop[-1].type.t;
1819 vswap();
1820 lexpand();
1821 vrotb(3);
1822 /* stack: L H shift */
1823 c = (int)vtop->c.i;
1824 /* constant: simpler */
1825 /* NOTE: all comments are for SHL. the other cases are
1826 done by swapping words */
1827 vpop();
1828 if (op != TOK_SHL)
1829 vswap();
1830 if (c >= 32) {
1831 /* stack: L H */
1832 vpop();
1833 if (c > 32) {
1834 vpushi(c - 32);
1835 gen_op(op);
1837 if (op != TOK_SAR) {
1838 vpushi(0);
1839 } else {
1840 gv_dup();
1841 vpushi(31);
1842 gen_op(TOK_SAR);
1844 vswap();
1845 } else {
1846 vswap();
1847 gv_dup();
1848 /* stack: H L L */
1849 vpushi(c);
1850 gen_op(op);
1851 vswap();
1852 vpushi(32 - c);
1853 if (op == TOK_SHL)
1854 gen_op(TOK_SHR);
1855 else
1856 gen_op(TOK_SHL);
1857 vrotb(3);
1858 /* stack: L L H */
1859 vpushi(c);
1860 if (op == TOK_SHL)
1861 gen_op(TOK_SHL);
1862 else
1863 gen_op(TOK_SHR);
1864 gen_op('|');
1866 if (op != TOK_SHL)
1867 vswap();
1868 lbuild(t);
1869 } else {
1870 /* XXX: should provide a faster fallback on x86 ? */
1871 switch(op) {
1872 case TOK_SAR:
1873 func = TOK___ashrdi3;
1874 goto gen_func;
1875 case TOK_SHR:
1876 func = TOK___lshrdi3;
1877 goto gen_func;
1878 case TOK_SHL:
1879 func = TOK___ashldi3;
1880 goto gen_func;
1883 break;
1884 default:
1885 /* compare operations */
1886 t = vtop->type.t;
1887 vswap();
1888 lexpand();
1889 vrotb(3);
1890 lexpand();
1891 /* stack: L1 H1 L2 H2 */
1892 tmp = vtop[-1];
1893 vtop[-1] = vtop[-2];
1894 vtop[-2] = tmp;
1895 /* stack: L1 L2 H1 H2 */
1896 /* compare high */
1897 op1 = op;
1898 /* when values are equal, we need to compare low words. since
1899 the jump is inverted, we invert the test too. */
1900 if (op1 == TOK_LT)
1901 op1 = TOK_LE;
1902 else if (op1 == TOK_GT)
1903 op1 = TOK_GE;
1904 else if (op1 == TOK_ULT)
1905 op1 = TOK_ULE;
1906 else if (op1 == TOK_UGT)
1907 op1 = TOK_UGE;
1908 a = 0;
1909 b = 0;
1910 gen_op(op1);
1911 if (op == TOK_NE) {
1912 b = gvtst(0, 0);
1913 } else {
1914 a = gvtst(1, 0);
1915 if (op != TOK_EQ) {
1916 /* generate non equal test */
1917 vpushi(TOK_NE);
1918 vtop->r = VT_CMP;
1919 b = gvtst(0, 0);
1922 /* compare low. Always unsigned */
1923 op1 = op;
1924 if (op1 == TOK_LT)
1925 op1 = TOK_ULT;
1926 else if (op1 == TOK_LE)
1927 op1 = TOK_ULE;
1928 else if (op1 == TOK_GT)
1929 op1 = TOK_UGT;
1930 else if (op1 == TOK_GE)
1931 op1 = TOK_UGE;
1932 gen_op(op1);
1933 a = gvtst(1, a);
1934 gsym(b);
1935 vseti(VT_JMPI, a);
1936 break;
1939 #endif
1941 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
1943 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
1944 return (a ^ b) >> 63 ? -x : x;
1947 static int gen_opic_lt(uint64_t a, uint64_t b)
1949 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
1952 /* handle integer constant optimizations and various machine
1953 independent opt */
1954 static void gen_opic(int op)
1956 SValue *v1 = vtop - 1;
1957 SValue *v2 = vtop;
1958 int t1 = v1->type.t & VT_BTYPE;
1959 int t2 = v2->type.t & VT_BTYPE;
1960 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1961 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1962 uint64_t l1 = c1 ? v1->c.i : 0;
1963 uint64_t l2 = c2 ? v2->c.i : 0;
1964 int shm = (t1 == VT_LLONG) ? 63 : 31;
1966 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
1967 l1 = ((uint32_t)l1 |
1968 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
1969 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
1970 l2 = ((uint32_t)l2 |
1971 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
1973 if (c1 && c2) {
1974 switch(op) {
1975 case '+': l1 += l2; break;
1976 case '-': l1 -= l2; break;
1977 case '&': l1 &= l2; break;
1978 case '^': l1 ^= l2; break;
1979 case '|': l1 |= l2; break;
1980 case '*': l1 *= l2; break;
1982 case TOK_PDIV:
1983 case '/':
1984 case '%':
1985 case TOK_UDIV:
1986 case TOK_UMOD:
1987 /* if division by zero, generate explicit division */
1988 if (l2 == 0) {
1989 if (const_wanted)
1990 tcc_error("division by zero in constant");
1991 goto general_case;
1993 switch(op) {
1994 default: l1 = gen_opic_sdiv(l1, l2); break;
1995 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
1996 case TOK_UDIV: l1 = l1 / l2; break;
1997 case TOK_UMOD: l1 = l1 % l2; break;
1999 break;
2000 case TOK_SHL: l1 <<= (l2 & shm); break;
2001 case TOK_SHR: l1 >>= (l2 & shm); break;
2002 case TOK_SAR:
2003 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2004 break;
2005 /* tests */
2006 case TOK_ULT: l1 = l1 < l2; break;
2007 case TOK_UGE: l1 = l1 >= l2; break;
2008 case TOK_EQ: l1 = l1 == l2; break;
2009 case TOK_NE: l1 = l1 != l2; break;
2010 case TOK_ULE: l1 = l1 <= l2; break;
2011 case TOK_UGT: l1 = l1 > l2; break;
2012 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2013 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2014 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2015 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2016 /* logical */
2017 case TOK_LAND: l1 = l1 && l2; break;
2018 case TOK_LOR: l1 = l1 || l2; break;
2019 default:
2020 goto general_case;
2022 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2023 l1 = ((uint32_t)l1 |
2024 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2025 v1->c.i = l1;
2026 vtop--;
2027 } else {
2028 /* if commutative ops, put c2 as constant */
2029 if (c1 && (op == '+' || op == '&' || op == '^' ||
2030 op == '|' || op == '*')) {
2031 vswap();
2032 c2 = c1; //c = c1, c1 = c2, c2 = c;
2033 l2 = l1; //l = l1, l1 = l2, l2 = l;
2035 if (!const_wanted &&
2036 c1 && ((l1 == 0 &&
2037 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2038 (l1 == -1 && op == TOK_SAR))) {
2039 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2040 vtop--;
2041 } else if (!const_wanted &&
2042 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2043 (op == '|' &&
2044 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2045 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2046 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2047 if (l2 == 1)
2048 vtop->c.i = 0;
2049 vswap();
2050 vtop--;
2051 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2052 op == TOK_PDIV) &&
2053 l2 == 1) ||
2054 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2055 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2056 l2 == 0) ||
2057 (op == '&' &&
2058 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2059 /* filter out NOP operations like x*1, x-0, x&-1... */
2060 vtop--;
2061 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2062 /* try to use shifts instead of muls or divs */
2063 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2064 int n = -1;
2065 while (l2) {
2066 l2 >>= 1;
2067 n++;
2069 vtop->c.i = n;
2070 if (op == '*')
2071 op = TOK_SHL;
2072 else if (op == TOK_PDIV)
2073 op = TOK_SAR;
2074 else
2075 op = TOK_SHR;
2077 goto general_case;
2078 } else if (c2 && (op == '+' || op == '-') &&
2079 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2080 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2081 /* symbol + constant case */
2082 if (op == '-')
2083 l2 = -l2;
2084 l2 += vtop[-1].c.i;
2085 /* The backends can't always deal with addends to symbols
2086 larger than +-1<<31. Don't construct such. */
2087 if ((int)l2 != l2)
2088 goto general_case;
2089 vtop--;
2090 vtop->c.i = l2;
2091 } else {
2092 general_case:
2093 /* call low level op generator */
2094 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2095 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2096 gen_opl(op);
2097 else
2098 gen_opi(op);
2103 /* generate a floating point operation with constant propagation */
2104 static void gen_opif(int op)
2106 int c1, c2;
2107 SValue *v1, *v2;
2108 #if defined _MSC_VER && defined _AMD64_
2109 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2110 volatile
2111 #endif
2112 long double f1, f2;
2114 v1 = vtop - 1;
2115 v2 = vtop;
2116 /* currently, we cannot do computations with forward symbols */
2117 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2118 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2119 if (c1 && c2) {
2120 if (v1->type.t == VT_FLOAT) {
2121 f1 = v1->c.f;
2122 f2 = v2->c.f;
2123 } else if (v1->type.t == VT_DOUBLE) {
2124 f1 = v1->c.d;
2125 f2 = v2->c.d;
2126 } else {
2127 f1 = v1->c.ld;
2128 f2 = v2->c.ld;
2131 /* NOTE: we only do constant propagation if finite number (not
2132 NaN or infinity) (ANSI spec) */
2133 if (!ieee_finite(f1) || !ieee_finite(f2))
2134 goto general_case;
2136 switch(op) {
2137 case '+': f1 += f2; break;
2138 case '-': f1 -= f2; break;
2139 case '*': f1 *= f2; break;
2140 case '/':
2141 if (f2 == 0.0) {
2142 /* If not in initializer we need to potentially generate
2143 FP exceptions at runtime, otherwise we want to fold. */
2144 if (!const_wanted)
2145 goto general_case;
2147 f1 /= f2;
2148 break;
2149 /* XXX: also handles tests ? */
2150 default:
2151 goto general_case;
2153 /* XXX: overflow test ? */
2154 if (v1->type.t == VT_FLOAT) {
2155 v1->c.f = f1;
2156 } else if (v1->type.t == VT_DOUBLE) {
2157 v1->c.d = f1;
2158 } else {
2159 v1->c.ld = f1;
2161 vtop--;
2162 } else {
2163 general_case:
2164 gen_opf(op);
2168 static int pointed_size(CType *type)
2170 int align;
2171 return type_size(pointed_type(type), &align);
2174 static void vla_runtime_pointed_size(CType *type)
2176 int align;
2177 vla_runtime_type_size(pointed_type(type), &align);
2180 static inline int is_null_pointer(SValue *p)
2182 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2183 return 0;
2184 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2185 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2186 ((p->type.t & VT_BTYPE) == VT_PTR &&
2187 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2188 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2189 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2192 static inline int is_integer_btype(int bt)
2194 return (bt == VT_BYTE || bt == VT_SHORT ||
2195 bt == VT_INT || bt == VT_LLONG);
2198 /* check types for comparison or subtraction of pointers */
2199 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2201 CType *type1, *type2, tmp_type1, tmp_type2;
2202 int bt1, bt2;
2204 /* null pointers are accepted for all comparisons as gcc */
2205 if (is_null_pointer(p1) || is_null_pointer(p2))
2206 return;
2207 type1 = &p1->type;
2208 type2 = &p2->type;
2209 bt1 = type1->t & VT_BTYPE;
2210 bt2 = type2->t & VT_BTYPE;
2211 /* accept comparison between pointer and integer with a warning */
2212 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2213 if (op != TOK_LOR && op != TOK_LAND )
2214 tcc_warning("comparison between pointer and integer");
2215 return;
2218 /* both must be pointers or implicit function pointers */
2219 if (bt1 == VT_PTR) {
2220 type1 = pointed_type(type1);
2221 } else if (bt1 != VT_FUNC)
2222 goto invalid_operands;
2224 if (bt2 == VT_PTR) {
2225 type2 = pointed_type(type2);
2226 } else if (bt2 != VT_FUNC) {
2227 invalid_operands:
2228 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2230 if ((type1->t & VT_BTYPE) == VT_VOID ||
2231 (type2->t & VT_BTYPE) == VT_VOID)
2232 return;
2233 tmp_type1 = *type1;
2234 tmp_type2 = *type2;
2235 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2236 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2237 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2238 /* gcc-like error if '-' is used */
2239 if (op == '-')
2240 goto invalid_operands;
2241 else
2242 tcc_warning("comparison of distinct pointer types lacks a cast");
2246 /* generic gen_op: handles types problems */
2247 ST_FUNC void gen_op(int op)
2249 int u, t1, t2, bt1, bt2, t;
2250 CType type1;
2252 redo:
2253 t1 = vtop[-1].type.t;
2254 t2 = vtop[0].type.t;
2255 bt1 = t1 & VT_BTYPE;
2256 bt2 = t2 & VT_BTYPE;
2258 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2259 tcc_error("operation on a struct");
2260 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2261 if (bt2 == VT_FUNC) {
2262 mk_pointer(&vtop->type);
2263 gaddrof();
2265 if (bt1 == VT_FUNC) {
2266 vswap();
2267 mk_pointer(&vtop->type);
2268 gaddrof();
2269 vswap();
2271 goto redo;
2272 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2273 /* at least one operand is a pointer */
2274 /* relational op: must be both pointers */
2275 if (op >= TOK_ULT && op <= TOK_LOR) {
2276 check_comparison_pointer_types(vtop - 1, vtop, op);
2277 /* pointers are handled are unsigned */
2278 #if PTR_SIZE == 8
2279 t = VT_LLONG | VT_UNSIGNED;
2280 #else
2281 t = VT_INT | VT_UNSIGNED;
2282 #endif
2283 goto std_op;
2285 /* if both pointers, then it must be the '-' op */
2286 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2287 if (op != '-')
2288 tcc_error("cannot use pointers here");
2289 check_comparison_pointer_types(vtop - 1, vtop, op);
2290 /* XXX: check that types are compatible */
2291 if (vtop[-1].type.t & VT_VLA) {
2292 vla_runtime_pointed_size(&vtop[-1].type);
2293 } else {
2294 vpushi(pointed_size(&vtop[-1].type));
2296 vrott(3);
2297 gen_opic(op);
2298 vtop->type.t = ptrdiff_type.t;
2299 vswap();
2300 gen_op(TOK_PDIV);
2301 } else {
2302 /* exactly one pointer : must be '+' or '-'. */
2303 if (op != '-' && op != '+')
2304 tcc_error("cannot use pointers here");
2305 /* Put pointer as first operand */
2306 if (bt2 == VT_PTR) {
2307 vswap();
2308 t = t1, t1 = t2, t2 = t;
2310 #if PTR_SIZE == 4
2311 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2312 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2313 gen_cast_s(VT_INT);
2314 #endif
2315 type1 = vtop[-1].type;
2316 type1.t &= ~VT_ARRAY;
2317 if (vtop[-1].type.t & VT_VLA)
2318 vla_runtime_pointed_size(&vtop[-1].type);
2319 else {
2320 u = pointed_size(&vtop[-1].type);
2321 if (u < 0)
2322 tcc_error("unknown array element size");
2323 #if PTR_SIZE == 8
2324 vpushll(u);
2325 #else
2326 /* XXX: cast to int ? (long long case) */
2327 vpushi(u);
2328 #endif
2330 gen_op('*');
2331 #if 0
2332 /* #ifdef CONFIG_TCC_BCHECK
2333 The main reason to removing this code:
2334 #include <stdio.h>
2335 int main ()
2337 int v[10];
2338 int i = 10;
2339 int j = 9;
2340 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2341 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2343 When this code is on. then the output looks like
2344 v+i-j = 0xfffffffe
2345 v+(i-j) = 0xbff84000
2347 /* if evaluating constant expression, no code should be
2348 generated, so no bound check */
2349 if (tcc_state->do_bounds_check && !const_wanted) {
2350 /* if bounded pointers, we generate a special code to
2351 test bounds */
2352 if (op == '-') {
2353 vpushi(0);
2354 vswap();
2355 gen_op('-');
2357 gen_bounded_ptr_add();
2358 } else
2359 #endif
2361 gen_opic(op);
2363 /* put again type if gen_opic() swaped operands */
2364 vtop->type = type1;
2366 } else if (is_float(bt1) || is_float(bt2)) {
2367 /* compute bigger type and do implicit casts */
2368 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2369 t = VT_LDOUBLE;
2370 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2371 t = VT_DOUBLE;
2372 } else {
2373 t = VT_FLOAT;
2375 /* floats can only be used for a few operations */
2376 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2377 (op < TOK_ULT || op > TOK_GT))
2378 tcc_error("invalid operands for binary operation");
2379 goto std_op;
2380 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2381 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2382 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2383 t |= VT_UNSIGNED;
2384 t |= (VT_LONG & t1);
2385 goto std_op;
2386 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2387 /* cast to biggest op */
2388 t = VT_LLONG | VT_LONG;
2389 if (bt1 == VT_LLONG)
2390 t &= t1;
2391 if (bt2 == VT_LLONG)
2392 t &= t2;
2393 /* convert to unsigned if it does not fit in a long long */
2394 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2395 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2396 t |= VT_UNSIGNED;
2397 goto std_op;
2398 } else {
2399 /* integer operations */
2400 t = VT_INT | (VT_LONG & (t1 | t2));
2401 /* convert to unsigned if it does not fit in an integer */
2402 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2403 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2404 t |= VT_UNSIGNED;
2405 std_op:
2406 /* XXX: currently, some unsigned operations are explicit, so
2407 we modify them here */
2408 if (t & VT_UNSIGNED) {
2409 if (op == TOK_SAR)
2410 op = TOK_SHR;
2411 else if (op == '/')
2412 op = TOK_UDIV;
2413 else if (op == '%')
2414 op = TOK_UMOD;
2415 else if (op == TOK_LT)
2416 op = TOK_ULT;
2417 else if (op == TOK_GT)
2418 op = TOK_UGT;
2419 else if (op == TOK_LE)
2420 op = TOK_ULE;
2421 else if (op == TOK_GE)
2422 op = TOK_UGE;
2424 vswap();
2425 type1.t = t;
2426 type1.ref = NULL;
2427 gen_cast(&type1);
2428 vswap();
2429 /* special case for shifts and long long: we keep the shift as
2430 an integer */
2431 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2432 type1.t = VT_INT;
2433 gen_cast(&type1);
2434 if (is_float(t))
2435 gen_opif(op);
2436 else
2437 gen_opic(op);
2438 if (op >= TOK_ULT && op <= TOK_GT) {
2439 /* relational op: the result is an int */
2440 vtop->type.t = VT_INT;
2441 } else {
2442 vtop->type.t = t;
2445 // Make sure that we have converted to an rvalue:
2446 if (vtop->r & VT_LVAL)
2447 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2450 #ifndef TCC_TARGET_ARM
2451 /* generic itof for unsigned long long case */
2452 static void gen_cvt_itof1(int t)
2454 #ifdef TCC_TARGET_ARM64
2455 gen_cvt_itof(t);
2456 #else
2457 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2458 (VT_LLONG | VT_UNSIGNED)) {
2460 if (t == VT_FLOAT)
2461 vpush_global_sym(&func_old_type, TOK___floatundisf);
2462 #if LDOUBLE_SIZE != 8
2463 else if (t == VT_LDOUBLE)
2464 vpush_global_sym(&func_old_type, TOK___floatundixf);
2465 #endif
2466 else
2467 vpush_global_sym(&func_old_type, TOK___floatundidf);
2468 vrott(2);
2469 gfunc_call(1);
2470 vpushi(0);
2471 vtop->r = reg_fret(t);
2472 } else {
2473 gen_cvt_itof(t);
2475 #endif
2477 #endif
2479 /* generic ftoi for unsigned long long case */
2480 static void gen_cvt_ftoi1(int t)
2482 #ifdef TCC_TARGET_ARM64
2483 gen_cvt_ftoi(t);
2484 #else
2485 int st;
2487 if (t == (VT_LLONG | VT_UNSIGNED)) {
2488 /* not handled natively */
2489 st = vtop->type.t & VT_BTYPE;
2490 if (st == VT_FLOAT)
2491 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2492 #if LDOUBLE_SIZE != 8
2493 else if (st == VT_LDOUBLE)
2494 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2495 #endif
2496 else
2497 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2498 vrott(2);
2499 gfunc_call(1);
2500 vpushi(0);
2501 vtop->r = REG_IRET;
2502 vtop->r2 = REG_LRET;
2503 } else {
2504 gen_cvt_ftoi(t);
2506 #endif
2509 /* force char or short cast */
2510 static void force_charshort_cast(int t)
2512 int bits, dbt;
2514 /* cannot cast static initializers */
2515 if (STATIC_DATA_WANTED)
2516 return;
2518 dbt = t & VT_BTYPE;
2519 /* XXX: add optimization if lvalue : just change type and offset */
2520 if (dbt == VT_BYTE)
2521 bits = 8;
2522 else
2523 bits = 16;
2524 if (t & VT_UNSIGNED) {
2525 vpushi((1 << bits) - 1);
2526 gen_op('&');
2527 } else {
2528 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2529 bits = 64 - bits;
2530 else
2531 bits = 32 - bits;
2532 vpushi(bits);
2533 gen_op(TOK_SHL);
2534 /* result must be signed or the SAR is converted to an SHL
2535 This was not the case when "t" was a signed short
2536 and the last value on the stack was an unsigned int */
2537 vtop->type.t &= ~VT_UNSIGNED;
2538 vpushi(bits);
2539 gen_op(TOK_SAR);
2543 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2544 static void gen_cast_s(int t)
2546 CType type;
2547 type.t = t;
2548 type.ref = NULL;
2549 gen_cast(&type);
2552 static void gen_cast(CType *type)
2554 int sbt, dbt, sf, df, c, p;
2556 /* special delayed cast for char/short */
2557 /* XXX: in some cases (multiple cascaded casts), it may still
2558 be incorrect */
2559 if (vtop->r & VT_MUSTCAST) {
2560 vtop->r &= ~VT_MUSTCAST;
2561 force_charshort_cast(vtop->type.t);
2564 /* bitfields first get cast to ints */
2565 if (vtop->type.t & VT_BITFIELD) {
2566 gv(RC_INT);
2569 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2570 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2572 if (sbt != dbt) {
2573 sf = is_float(sbt);
2574 df = is_float(dbt);
2575 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2576 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2577 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2578 c &= dbt != VT_LDOUBLE;
2579 #endif
2580 if (c) {
2581 /* constant case: we can do it now */
2582 /* XXX: in ISOC, cannot do it if error in convert */
2583 if (sbt == VT_FLOAT)
2584 vtop->c.ld = vtop->c.f;
2585 else if (sbt == VT_DOUBLE)
2586 vtop->c.ld = vtop->c.d;
2588 if (df) {
2589 if ((sbt & VT_BTYPE) == VT_LLONG) {
2590 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2591 vtop->c.ld = vtop->c.i;
2592 else
2593 vtop->c.ld = -(long double)-vtop->c.i;
2594 } else if(!sf) {
2595 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2596 vtop->c.ld = (uint32_t)vtop->c.i;
2597 else
2598 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2601 if (dbt == VT_FLOAT)
2602 vtop->c.f = (float)vtop->c.ld;
2603 else if (dbt == VT_DOUBLE)
2604 vtop->c.d = (double)vtop->c.ld;
2605 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2606 vtop->c.i = vtop->c.ld;
2607 } else if (sf && dbt == VT_BOOL) {
2608 vtop->c.i = (vtop->c.ld != 0);
2609 } else {
2610 if(sf)
2611 vtop->c.i = vtop->c.ld;
2612 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2614 else if (sbt & VT_UNSIGNED)
2615 vtop->c.i = (uint32_t)vtop->c.i;
2616 #if PTR_SIZE == 8
2617 else if (sbt == VT_PTR)
2619 #endif
2620 else if (sbt != VT_LLONG)
2621 vtop->c.i = ((uint32_t)vtop->c.i |
2622 -(vtop->c.i & 0x80000000));
2624 if (dbt == (VT_LLONG|VT_UNSIGNED))
2626 else if (dbt == VT_BOOL)
2627 vtop->c.i = (vtop->c.i != 0);
2628 #if PTR_SIZE == 8
2629 else if (dbt == VT_PTR)
2631 #endif
2632 else if (dbt != VT_LLONG) {
2633 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2634 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2635 0xffffffff);
2636 vtop->c.i &= m;
2637 if (!(dbt & VT_UNSIGNED))
2638 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2641 } else if (p && dbt == VT_BOOL) {
2642 vtop->r = VT_CONST;
2643 vtop->c.i = 1;
2644 } else {
2645 /* non constant case: generate code */
2646 if (sf && df) {
2647 /* convert from fp to fp */
2648 gen_cvt_ftof(dbt);
2649 } else if (df) {
2650 /* convert int to fp */
2651 gen_cvt_itof1(dbt);
2652 } else if (sf) {
2653 /* convert fp to int */
2654 if (dbt == VT_BOOL) {
2655 vpushi(0);
2656 gen_op(TOK_NE);
2657 } else {
2658 /* we handle char/short/etc... with generic code */
2659 if (dbt != (VT_INT | VT_UNSIGNED) &&
2660 dbt != (VT_LLONG | VT_UNSIGNED) &&
2661 dbt != VT_LLONG)
2662 dbt = VT_INT;
2663 gen_cvt_ftoi1(dbt);
2664 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2665 /* additional cast for char/short... */
2666 vtop->type.t = dbt;
2667 gen_cast(type);
2670 #if PTR_SIZE == 4
2671 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2672 if ((sbt & VT_BTYPE) != VT_LLONG) {
2673 /* scalar to long long */
2674 /* machine independent conversion */
2675 gv(RC_INT);
2676 /* generate high word */
2677 if (sbt == (VT_INT | VT_UNSIGNED)) {
2678 vpushi(0);
2679 gv(RC_INT);
2680 } else {
2681 if (sbt == VT_PTR) {
2682 /* cast from pointer to int before we apply
2683 shift operation, which pointers don't support*/
2684 gen_cast_s(VT_INT);
2686 gv_dup();
2687 vpushi(31);
2688 gen_op(TOK_SAR);
2690 /* patch second register */
2691 vtop[-1].r2 = vtop->r;
2692 vpop();
2694 #else
2695 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2696 (dbt & VT_BTYPE) == VT_PTR ||
2697 (dbt & VT_BTYPE) == VT_FUNC) {
2698 if ((sbt & VT_BTYPE) != VT_LLONG &&
2699 (sbt & VT_BTYPE) != VT_PTR &&
2700 (sbt & VT_BTYPE) != VT_FUNC) {
2701 /* need to convert from 32bit to 64bit */
2702 gv(RC_INT);
2703 if (sbt != (VT_INT | VT_UNSIGNED)) {
2704 #if defined(TCC_TARGET_ARM64)
2705 gen_cvt_sxtw();
2706 #elif defined(TCC_TARGET_X86_64)
2707 int r = gv(RC_INT);
2708 /* x86_64 specific: movslq */
2709 o(0x6348);
2710 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2711 #else
2712 #error
2713 #endif
2716 #endif
2717 } else if (dbt == VT_BOOL) {
2718 /* scalar to bool */
2719 vpushi(0);
2720 gen_op(TOK_NE);
2721 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2722 (dbt & VT_BTYPE) == VT_SHORT) {
2723 if (sbt == VT_PTR) {
2724 vtop->type.t = VT_INT;
2725 tcc_warning("nonportable conversion from pointer to char/short");
2727 force_charshort_cast(dbt);
2728 } else if ((dbt & VT_BTYPE) == VT_INT) {
2729 /* scalar to int */
2730 if ((sbt & VT_BTYPE) == VT_LLONG) {
2731 #if PTR_SIZE == 4
2732 /* from long long: just take low order word */
2733 lexpand();
2734 vpop();
2735 #else
2736 vpushi(0xffffffff);
2737 vtop->type.t |= VT_UNSIGNED;
2738 gen_op('&');
2739 #endif
2741 /* if lvalue and single word type, nothing to do because
2742 the lvalue already contains the real type size (see
2743 VT_LVAL_xxx constants) */
2746 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2747 /* if we are casting between pointer types,
2748 we must update the VT_LVAL_xxx size */
2749 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2750 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2752 vtop->type = *type;
2753 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2756 /* return type size as known at compile time. Put alignment at 'a' */
2757 ST_FUNC int type_size(CType *type, int *a)
2759 Sym *s;
2760 int bt;
2762 bt = type->t & VT_BTYPE;
2763 if (bt == VT_STRUCT) {
2764 /* struct/union */
2765 s = type->ref;
2766 *a = s->r;
2767 return s->c;
2768 } else if (bt == VT_PTR) {
2769 if (type->t & VT_ARRAY) {
2770 int ts;
2772 s = type->ref;
2773 ts = type_size(&s->type, a);
2775 if (ts < 0 && s->c < 0)
2776 ts = -ts;
2778 return ts * s->c;
2779 } else {
2780 *a = PTR_SIZE;
2781 return PTR_SIZE;
2783 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2784 return -1; /* incomplete enum */
2785 } else if (bt == VT_LDOUBLE) {
2786 *a = LDOUBLE_ALIGN;
2787 return LDOUBLE_SIZE;
2788 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2789 #ifdef TCC_TARGET_I386
2790 #ifdef TCC_TARGET_PE
2791 *a = 8;
2792 #else
2793 *a = 4;
2794 #endif
2795 #elif defined(TCC_TARGET_ARM)
2796 #ifdef TCC_ARM_EABI
2797 *a = 8;
2798 #else
2799 *a = 4;
2800 #endif
2801 #else
2802 *a = 8;
2803 #endif
2804 return 8;
2805 } else if (bt == VT_INT || bt == VT_FLOAT) {
2806 *a = 4;
2807 return 4;
2808 } else if (bt == VT_SHORT) {
2809 *a = 2;
2810 return 2;
2811 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2812 *a = 8;
2813 return 16;
2814 } else {
2815 /* char, void, function, _Bool */
2816 *a = 1;
2817 return 1;
2821 /* push type size as known at runtime time on top of value stack. Put
2822 alignment at 'a' */
2823 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2825 if (type->t & VT_VLA) {
2826 type_size(&type->ref->type, a);
2827 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2828 } else {
2829 vpushi(type_size(type, a));
2833 static void vla_sp_restore(void) {
2834 if (vlas_in_scope) {
2835 gen_vla_sp_restore(vla_sp_loc);
2839 static void vla_sp_restore_root(void) {
2840 if (vlas_in_scope) {
2841 gen_vla_sp_restore(vla_sp_root_loc);
2845 /* return the pointed type of t */
2846 static inline CType *pointed_type(CType *type)
2848 return &type->ref->type;
2851 /* modify type so that its it is a pointer to type. */
2852 ST_FUNC void mk_pointer(CType *type)
2854 Sym *s;
2855 s = sym_push(SYM_FIELD, type, 0, -1);
2856 type->t = VT_PTR | (type->t & VT_STORAGE);
2857 type->ref = s;
2860 /* compare function types. OLD functions match any new functions */
2861 static int is_compatible_func(CType *type1, CType *type2)
2863 Sym *s1, *s2;
2865 s1 = type1->ref;
2866 s2 = type2->ref;
2867 if (!is_compatible_types(&s1->type, &s2->type))
2868 return 0;
2869 /* check func_call */
2870 if (s1->f.func_call != s2->f.func_call)
2871 return 0;
2872 /* XXX: not complete */
2873 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD)
2874 return 1;
2875 if (s1->f.func_type != s2->f.func_type)
2876 return 0;
2877 while (s1 != NULL) {
2878 if (s2 == NULL)
2879 return 0;
2880 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2881 return 0;
2882 s1 = s1->next;
2883 s2 = s2->next;
2885 if (s2)
2886 return 0;
2887 return 1;
2890 /* return true if type1 and type2 are the same. If unqualified is
2891 true, qualifiers on the types are ignored.
2893 static int compare_types(CType *type1, CType *type2, int unqualified)
2895 int bt1, t1, t2;
2897 t1 = type1->t & VT_TYPE;
2898 t2 = type2->t & VT_TYPE;
2899 if (unqualified) {
2900 /* strip qualifiers before comparing */
2901 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2902 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2905 /* Default Vs explicit signedness only matters for char */
2906 if ((t1 & VT_BTYPE) != VT_BYTE) {
2907 t1 &= ~VT_DEFSIGN;
2908 t2 &= ~VT_DEFSIGN;
2910 /* XXX: bitfields ? */
2911 if (t1 != t2)
2912 return 0;
2913 /* test more complicated cases */
2914 bt1 = t1 & (VT_BTYPE | VT_ARRAY);
2915 if (bt1 == VT_PTR) {
2916 type1 = pointed_type(type1);
2917 type2 = pointed_type(type2);
2918 return is_compatible_types(type1, type2);
2919 } else if (bt1 & VT_ARRAY) {
2920 return type1->ref->c < 0 || type2->ref->c < 0
2921 || type1->ref->c == type2->ref->c;
2922 } else if (bt1 == VT_STRUCT) {
2923 return (type1->ref == type2->ref);
2924 } else if (bt1 == VT_FUNC) {
2925 return is_compatible_func(type1, type2);
2926 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2927 return type1->ref == type2->ref;
2928 } else {
2929 return 1;
2933 /* return true if type1 and type2 are exactly the same (including
2934 qualifiers).
2936 static int is_compatible_types(CType *type1, CType *type2)
2938 return compare_types(type1,type2,0);
2941 /* return true if type1 and type2 are the same (ignoring qualifiers).
2943 static int is_compatible_unqualified_types(CType *type1, CType *type2)
2945 return compare_types(type1,type2,1);
2948 /* print a type. If 'varstr' is not NULL, then the variable is also
2949 printed in the type */
2950 /* XXX: union */
2951 /* XXX: add array and function pointers */
2952 static void type_to_str(char *buf, int buf_size,
2953 CType *type, const char *varstr)
2955 int bt, v, t;
2956 Sym *s, *sa;
2957 char buf1[256];
2958 const char *tstr;
2960 t = type->t;
2961 bt = t & VT_BTYPE;
2962 buf[0] = '\0';
2964 if (t & VT_EXTERN)
2965 pstrcat(buf, buf_size, "extern ");
2966 if (t & VT_STATIC)
2967 pstrcat(buf, buf_size, "static ");
2968 if (t & VT_TYPEDEF)
2969 pstrcat(buf, buf_size, "typedef ");
2970 if (t & VT_INLINE)
2971 pstrcat(buf, buf_size, "inline ");
2972 if (t & VT_VOLATILE)
2973 pstrcat(buf, buf_size, "volatile ");
2974 if (t & VT_CONSTANT)
2975 pstrcat(buf, buf_size, "const ");
2977 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2978 || ((t & VT_UNSIGNED)
2979 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2980 && !IS_ENUM(t)
2982 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2984 buf_size -= strlen(buf);
2985 buf += strlen(buf);
2987 switch(bt) {
2988 case VT_VOID:
2989 tstr = "void";
2990 goto add_tstr;
2991 case VT_BOOL:
2992 tstr = "_Bool";
2993 goto add_tstr;
2994 case VT_BYTE:
2995 tstr = "char";
2996 goto add_tstr;
2997 case VT_SHORT:
2998 tstr = "short";
2999 goto add_tstr;
3000 case VT_INT:
3001 tstr = "int";
3002 goto maybe_long;
3003 case VT_LLONG:
3004 tstr = "long long";
3005 maybe_long:
3006 if (t & VT_LONG)
3007 tstr = "long";
3008 if (!IS_ENUM(t))
3009 goto add_tstr;
3010 tstr = "enum ";
3011 goto tstruct;
3012 case VT_FLOAT:
3013 tstr = "float";
3014 goto add_tstr;
3015 case VT_DOUBLE:
3016 tstr = "double";
3017 goto add_tstr;
3018 case VT_LDOUBLE:
3019 tstr = "long double";
3020 add_tstr:
3021 pstrcat(buf, buf_size, tstr);
3022 break;
3023 case VT_STRUCT:
3024 tstr = "struct ";
3025 if (IS_UNION(t))
3026 tstr = "union ";
3027 tstruct:
3028 pstrcat(buf, buf_size, tstr);
3029 v = type->ref->v & ~SYM_STRUCT;
3030 if (v >= SYM_FIRST_ANOM)
3031 pstrcat(buf, buf_size, "<anonymous>");
3032 else
3033 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3034 break;
3035 case VT_FUNC:
3036 s = type->ref;
3037 buf1[0]=0;
3038 if (varstr && '*' == *varstr) {
3039 pstrcat(buf1, sizeof(buf1), "(");
3040 pstrcat(buf1, sizeof(buf1), varstr);
3041 pstrcat(buf1, sizeof(buf1), ")");
3043 pstrcat(buf1, buf_size, "(");
3044 sa = s->next;
3045 while (sa != NULL) {
3046 char buf2[256];
3047 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3048 pstrcat(buf1, sizeof(buf1), buf2);
3049 sa = sa->next;
3050 if (sa)
3051 pstrcat(buf1, sizeof(buf1), ", ");
3053 if (s->f.func_type == FUNC_ELLIPSIS)
3054 pstrcat(buf1, sizeof(buf1), ", ...");
3055 pstrcat(buf1, sizeof(buf1), ")");
3056 type_to_str(buf, buf_size, &s->type, buf1);
3057 goto no_var;
3058 case VT_PTR:
3059 s = type->ref;
3060 if (t & VT_ARRAY) {
3061 if (varstr && '*' == *varstr)
3062 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3063 else
3064 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3065 type_to_str(buf, buf_size, &s->type, buf1);
3066 goto no_var;
3068 pstrcpy(buf1, sizeof(buf1), "*");
3069 if (t & VT_CONSTANT)
3070 pstrcat(buf1, buf_size, "const ");
3071 if (t & VT_VOLATILE)
3072 pstrcat(buf1, buf_size, "volatile ");
3073 if (varstr)
3074 pstrcat(buf1, sizeof(buf1), varstr);
3075 type_to_str(buf, buf_size, &s->type, buf1);
3076 goto no_var;
3078 if (varstr) {
3079 pstrcat(buf, buf_size, " ");
3080 pstrcat(buf, buf_size, varstr);
3082 no_var: ;
3085 /* verify type compatibility to store vtop in 'dt' type, and generate
3086 casts if needed. */
3087 static void gen_assign_cast(CType *dt)
3089 CType *st, *type1, *type2;
3090 char buf1[256], buf2[256];
3091 int dbt, sbt, qualwarn, lvl;
3093 st = &vtop->type; /* source type */
3094 dbt = dt->t & VT_BTYPE;
3095 sbt = st->t & VT_BTYPE;
3096 if (sbt == VT_VOID || dbt == VT_VOID) {
3097 if (sbt == VT_VOID && dbt == VT_VOID)
3098 ; /* It is Ok if both are void */
3099 else
3100 tcc_error("cannot cast from/to void");
3102 if (dt->t & VT_CONSTANT)
3103 tcc_warning("assignment of read-only location");
3104 switch(dbt) {
3105 case VT_PTR:
3106 /* special cases for pointers */
3107 /* '0' can also be a pointer */
3108 if (is_null_pointer(vtop))
3109 break;
3110 /* accept implicit pointer to integer cast with warning */
3111 if (is_integer_btype(sbt)) {
3112 tcc_warning("assignment makes pointer from integer without a cast");
3113 break;
3115 type1 = pointed_type(dt);
3116 if (sbt == VT_PTR)
3117 type2 = pointed_type(st);
3118 else if (sbt == VT_FUNC)
3119 type2 = st; /* a function is implicitly a function pointer */
3120 else
3121 goto error;
3122 if (is_compatible_types(type1, type2))
3123 break;
3124 for (qualwarn = lvl = 0;; ++lvl) {
3125 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3126 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3127 qualwarn = 1;
3128 dbt = type1->t & (VT_BTYPE|VT_LONG);
3129 sbt = type2->t & (VT_BTYPE|VT_LONG);
3130 if (dbt != VT_PTR || sbt != VT_PTR)
3131 break;
3132 type1 = pointed_type(type1);
3133 type2 = pointed_type(type2);
3135 if (!is_compatible_unqualified_types(type1, type2)) {
3136 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3137 /* void * can match anything */
3138 } else if (dbt == sbt
3139 && is_integer_btype(sbt & VT_BTYPE)
3140 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3141 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3142 /* Like GCC don't warn by default for merely changes
3143 in pointer target signedness. Do warn for different
3144 base types, though, in particular for unsigned enums
3145 and signed int targets. */
3146 } else {
3147 tcc_warning("assignment from incompatible pointer type");
3148 break;
3151 if (qualwarn)
3152 tcc_warning("assignment discards qualifiers from pointer target type");
3153 break;
3154 case VT_BYTE:
3155 case VT_SHORT:
3156 case VT_INT:
3157 case VT_LLONG:
3158 if (sbt == VT_PTR || sbt == VT_FUNC) {
3159 tcc_warning("assignment makes integer from pointer without a cast");
3160 } else if (sbt == VT_STRUCT) {
3161 goto case_VT_STRUCT;
3163 /* XXX: more tests */
3164 break;
3165 case VT_STRUCT:
3166 case_VT_STRUCT:
3167 if (!is_compatible_unqualified_types(dt, st)) {
3168 error:
3169 type_to_str(buf1, sizeof(buf1), st, NULL);
3170 type_to_str(buf2, sizeof(buf2), dt, NULL);
3171 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3173 break;
3175 gen_cast(dt);
3178 /* store vtop in lvalue pushed on stack */
3179 ST_FUNC void vstore(void)
3181 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3183 ft = vtop[-1].type.t;
3184 sbt = vtop->type.t & VT_BTYPE;
3185 dbt = ft & VT_BTYPE;
3186 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3187 (sbt == VT_INT && dbt == VT_SHORT))
3188 && !(vtop->type.t & VT_BITFIELD)) {
3189 /* optimize char/short casts */
3190 delayed_cast = VT_MUSTCAST;
3191 vtop->type.t = ft & VT_TYPE;
3192 /* XXX: factorize */
3193 if (ft & VT_CONSTANT)
3194 tcc_warning("assignment of read-only location");
3195 } else {
3196 delayed_cast = 0;
3197 if (!(ft & VT_BITFIELD))
3198 gen_assign_cast(&vtop[-1].type);
3201 if (sbt == VT_STRUCT) {
3202 /* if structure, only generate pointer */
3203 /* structure assignment : generate memcpy */
3204 /* XXX: optimize if small size */
3205 size = type_size(&vtop->type, &align);
3207 /* destination */
3208 vswap();
3209 vtop->type.t = VT_PTR;
3210 gaddrof();
3212 /* address of memcpy() */
3213 #ifdef TCC_ARM_EABI
3214 if(!(align & 7))
3215 vpush_global_sym(&func_old_type, TOK_memcpy8);
3216 else if(!(align & 3))
3217 vpush_global_sym(&func_old_type, TOK_memcpy4);
3218 else
3219 #endif
3220 /* Use memmove, rather than memcpy, as dest and src may be same: */
3221 vpush_global_sym(&func_old_type, TOK_memmove);
3223 vswap();
3224 /* source */
3225 vpushv(vtop - 2);
3226 vtop->type.t = VT_PTR;
3227 gaddrof();
3228 /* type size */
3229 vpushi(size);
3230 gfunc_call(3);
3232 /* leave source on stack */
3233 } else if (ft & VT_BITFIELD) {
3234 /* bitfield store handling */
3236 /* save lvalue as expression result (example: s.b = s.a = n;) */
3237 vdup(), vtop[-1] = vtop[-2];
3239 bit_pos = BIT_POS(ft);
3240 bit_size = BIT_SIZE(ft);
3241 /* remove bit field info to avoid loops */
3242 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3244 if ((ft & VT_BTYPE) == VT_BOOL) {
3245 gen_cast(&vtop[-1].type);
3246 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3249 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3250 if (r == VT_STRUCT) {
3251 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3252 store_packed_bf(bit_pos, bit_size);
3253 } else {
3254 unsigned long long mask = (1ULL << bit_size) - 1;
3255 if ((ft & VT_BTYPE) != VT_BOOL) {
3256 /* mask source */
3257 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3258 vpushll(mask);
3259 else
3260 vpushi((unsigned)mask);
3261 gen_op('&');
3263 /* shift source */
3264 vpushi(bit_pos);
3265 gen_op(TOK_SHL);
3266 vswap();
3267 /* duplicate destination */
3268 vdup();
3269 vrott(3);
3270 /* load destination, mask and or with source */
3271 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3272 vpushll(~(mask << bit_pos));
3273 else
3274 vpushi(~((unsigned)mask << bit_pos));
3275 gen_op('&');
3276 gen_op('|');
3277 /* store result */
3278 vstore();
3279 /* ... and discard */
3280 vpop();
3282 } else if (dbt == VT_VOID) {
3283 --vtop;
3284 } else {
3285 #ifdef CONFIG_TCC_BCHECK
3286 /* bound check case */
3287 if (vtop[-1].r & VT_MUSTBOUND) {
3288 vswap();
3289 gbound();
3290 vswap();
3292 #endif
3293 rc = RC_INT;
3294 if (is_float(ft)) {
3295 rc = RC_FLOAT;
3296 #ifdef TCC_TARGET_X86_64
3297 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3298 rc = RC_ST0;
3299 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3300 rc = RC_FRET;
3302 #endif
3304 r = gv(rc); /* generate value */
3305 /* if lvalue was saved on stack, must read it */
3306 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3307 SValue sv;
3308 t = get_reg(RC_INT);
3309 #if PTR_SIZE == 8
3310 sv.type.t = VT_PTR;
3311 #else
3312 sv.type.t = VT_INT;
3313 #endif
3314 sv.r = VT_LOCAL | VT_LVAL;
3315 sv.c.i = vtop[-1].c.i;
3316 load(t, &sv);
3317 vtop[-1].r = t | VT_LVAL;
3319 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3320 #if PTR_SIZE == 8
3321 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3322 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3323 #else
3324 if ((ft & VT_BTYPE) == VT_LLONG) {
3325 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3326 #endif
3327 vtop[-1].type.t = load_type;
3328 store(r, vtop - 1);
3329 vswap();
3330 /* convert to int to increment easily */
3331 vtop->type.t = addr_type;
3332 gaddrof();
3333 vpushi(load_size);
3334 gen_op('+');
3335 vtop->r |= VT_LVAL;
3336 vswap();
3337 vtop[-1].type.t = load_type;
3338 /* XXX: it works because r2 is spilled last ! */
3339 store(vtop->r2, vtop - 1);
3340 } else {
3341 store(r, vtop - 1);
3344 vswap();
3345 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3346 vtop->r |= delayed_cast;
3350 /* post defines POST/PRE add. c is the token ++ or -- */
3351 ST_FUNC void inc(int post, int c)
3353 test_lvalue();
3354 vdup(); /* save lvalue */
3355 if (post) {
3356 gv_dup(); /* duplicate value */
3357 vrotb(3);
3358 vrotb(3);
3360 /* add constant */
3361 vpushi(c - TOK_MID);
3362 gen_op('+');
3363 vstore(); /* store value */
3364 if (post)
3365 vpop(); /* if post op, return saved value */
3368 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3370 /* read the string */
3371 if (tok != TOK_STR)
3372 expect(msg);
3373 cstr_new(astr);
3374 while (tok == TOK_STR) {
3375 /* XXX: add \0 handling too ? */
3376 cstr_cat(astr, tokc.str.data, -1);
3377 next();
3379 cstr_ccat(astr, '\0');
3382 /* If I is >= 1 and a power of two, returns log2(i)+1.
3383 If I is 0 returns 0. */
3384 static int exact_log2p1(int i)
3386 int ret;
3387 if (!i)
3388 return 0;
3389 for (ret = 1; i >= 1 << 8; ret += 8)
3390 i >>= 8;
3391 if (i >= 1 << 4)
3392 ret += 4, i >>= 4;
3393 if (i >= 1 << 2)
3394 ret += 2, i >>= 2;
3395 if (i >= 1 << 1)
3396 ret++;
3397 return ret;
3400 /* Parse __attribute__((...)) GNUC extension. */
3401 static void parse_attribute(AttributeDef *ad)
3403 int t, n;
3404 CString astr;
3406 redo:
3407 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3408 return;
3409 next();
3410 skip('(');
3411 skip('(');
3412 while (tok != ')') {
3413 if (tok < TOK_IDENT)
3414 expect("attribute name");
3415 t = tok;
3416 next();
3417 switch(t) {
3418 case TOK_CLEANUP1:
3419 case TOK_CLEANUP2:
3421 Sym *s;
3423 skip('(');
3424 s = sym_find(tok);
3425 if (!s) {
3426 tcc_warning("implicit declaration of function '%s'",
3427 get_tok_str(tok, &tokc));
3428 s = external_global_sym(tok, &func_old_type);
3430 ad->cleanup_func = s;
3431 next();
3432 skip(')');
3433 break;
3435 case TOK_SECTION1:
3436 case TOK_SECTION2:
3437 skip('(');
3438 parse_mult_str(&astr, "section name");
3439 ad->section = find_section(tcc_state, (char *)astr.data);
3440 skip(')');
3441 cstr_free(&astr);
3442 break;
3443 case TOK_ALIAS1:
3444 case TOK_ALIAS2:
3445 skip('(');
3446 parse_mult_str(&astr, "alias(\"target\")");
3447 ad->alias_target = /* save string as token, for later */
3448 tok_alloc((char*)astr.data, astr.size-1)->tok;
3449 skip(')');
3450 cstr_free(&astr);
3451 break;
3452 case TOK_VISIBILITY1:
3453 case TOK_VISIBILITY2:
3454 skip('(');
3455 parse_mult_str(&astr,
3456 "visibility(\"default|hidden|internal|protected\")");
3457 if (!strcmp (astr.data, "default"))
3458 ad->a.visibility = STV_DEFAULT;
3459 else if (!strcmp (astr.data, "hidden"))
3460 ad->a.visibility = STV_HIDDEN;
3461 else if (!strcmp (astr.data, "internal"))
3462 ad->a.visibility = STV_INTERNAL;
3463 else if (!strcmp (astr.data, "protected"))
3464 ad->a.visibility = STV_PROTECTED;
3465 else
3466 expect("visibility(\"default|hidden|internal|protected\")");
3467 skip(')');
3468 cstr_free(&astr);
3469 break;
3470 case TOK_ALIGNED1:
3471 case TOK_ALIGNED2:
3472 if (tok == '(') {
3473 next();
3474 n = expr_const();
3475 if (n <= 0 || (n & (n - 1)) != 0)
3476 tcc_error("alignment must be a positive power of two");
3477 skip(')');
3478 } else {
3479 n = MAX_ALIGN;
3481 ad->a.aligned = exact_log2p1(n);
3482 if (n != 1 << (ad->a.aligned - 1))
3483 tcc_error("alignment of %d is larger than implemented", n);
3484 break;
3485 case TOK_PACKED1:
3486 case TOK_PACKED2:
3487 ad->a.packed = 1;
3488 break;
3489 case TOK_WEAK1:
3490 case TOK_WEAK2:
3491 ad->a.weak = 1;
3492 break;
3493 case TOK_UNUSED1:
3494 case TOK_UNUSED2:
3495 /* currently, no need to handle it because tcc does not
3496 track unused objects */
3497 break;
3498 case TOK_NORETURN1:
3499 case TOK_NORETURN2:
3500 /* currently, no need to handle it because tcc does not
3501 track unused objects */
3502 break;
3503 case TOK_CDECL1:
3504 case TOK_CDECL2:
3505 case TOK_CDECL3:
3506 ad->f.func_call = FUNC_CDECL;
3507 break;
3508 case TOK_STDCALL1:
3509 case TOK_STDCALL2:
3510 case TOK_STDCALL3:
3511 ad->f.func_call = FUNC_STDCALL;
3512 break;
3513 #ifdef TCC_TARGET_I386
3514 case TOK_REGPARM1:
3515 case TOK_REGPARM2:
3516 skip('(');
3517 n = expr_const();
3518 if (n > 3)
3519 n = 3;
3520 else if (n < 0)
3521 n = 0;
3522 if (n > 0)
3523 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3524 skip(')');
3525 break;
3526 case TOK_FASTCALL1:
3527 case TOK_FASTCALL2:
3528 case TOK_FASTCALL3:
3529 ad->f.func_call = FUNC_FASTCALLW;
3530 break;
3531 #endif
3532 case TOK_MODE:
3533 skip('(');
3534 switch(tok) {
3535 case TOK_MODE_DI:
3536 ad->attr_mode = VT_LLONG + 1;
3537 break;
3538 case TOK_MODE_QI:
3539 ad->attr_mode = VT_BYTE + 1;
3540 break;
3541 case TOK_MODE_HI:
3542 ad->attr_mode = VT_SHORT + 1;
3543 break;
3544 case TOK_MODE_SI:
3545 case TOK_MODE_word:
3546 ad->attr_mode = VT_INT + 1;
3547 break;
3548 default:
3549 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3550 break;
3552 next();
3553 skip(')');
3554 break;
3555 case TOK_DLLEXPORT:
3556 ad->a.dllexport = 1;
3557 break;
3558 case TOK_NODECORATE:
3559 ad->a.nodecorate = 1;
3560 break;
3561 case TOK_DLLIMPORT:
3562 ad->a.dllimport = 1;
3563 break;
3564 default:
3565 if (tcc_state->warn_unsupported)
3566 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3567 /* skip parameters */
3568 if (tok == '(') {
3569 int parenthesis = 0;
3570 do {
3571 if (tok == '(')
3572 parenthesis++;
3573 else if (tok == ')')
3574 parenthesis--;
3575 next();
3576 } while (parenthesis && tok != -1);
3578 break;
3580 if (tok != ',')
3581 break;
3582 next();
3584 skip(')');
3585 skip(')');
3586 goto redo;
3589 static Sym * find_field (CType *type, int v, int *cumofs)
3591 Sym *s = type->ref;
3592 v |= SYM_FIELD;
3593 while ((s = s->next) != NULL) {
3594 if ((s->v & SYM_FIELD) &&
3595 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3596 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3597 Sym *ret = find_field (&s->type, v, cumofs);
3598 if (ret) {
3599 *cumofs += s->c;
3600 return ret;
3603 if (s->v == v)
3604 break;
3606 return s;
3609 static void struct_layout(CType *type, AttributeDef *ad)
3611 int size, align, maxalign, offset, c, bit_pos, bit_size;
3612 int packed, a, bt, prevbt, prev_bit_size;
3613 int pcc = !tcc_state->ms_bitfields;
3614 int pragma_pack = *tcc_state->pack_stack_ptr;
3615 Sym *f;
3617 maxalign = 1;
3618 offset = 0;
3619 c = 0;
3620 bit_pos = 0;
3621 prevbt = VT_STRUCT; /* make it never match */
3622 prev_bit_size = 0;
3624 //#define BF_DEBUG
3626 for (f = type->ref->next; f; f = f->next) {
3627 if (f->type.t & VT_BITFIELD)
3628 bit_size = BIT_SIZE(f->type.t);
3629 else
3630 bit_size = -1;
3631 size = type_size(&f->type, &align);
3632 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3633 packed = 0;
3635 if (pcc && bit_size == 0) {
3636 /* in pcc mode, packing does not affect zero-width bitfields */
3638 } else {
3639 /* in pcc mode, attribute packed overrides if set. */
3640 if (pcc && (f->a.packed || ad->a.packed))
3641 align = packed = 1;
3643 /* pragma pack overrides align if lesser and packs bitfields always */
3644 if (pragma_pack) {
3645 packed = 1;
3646 if (pragma_pack < align)
3647 align = pragma_pack;
3648 /* in pcc mode pragma pack also overrides individual align */
3649 if (pcc && pragma_pack < a)
3650 a = 0;
3653 /* some individual align was specified */
3654 if (a)
3655 align = a;
3657 if (type->ref->type.t == VT_UNION) {
3658 if (pcc && bit_size >= 0)
3659 size = (bit_size + 7) >> 3;
3660 offset = 0;
3661 if (size > c)
3662 c = size;
3664 } else if (bit_size < 0) {
3665 if (pcc)
3666 c += (bit_pos + 7) >> 3;
3667 c = (c + align - 1) & -align;
3668 offset = c;
3669 if (size > 0)
3670 c += size;
3671 bit_pos = 0;
3672 prevbt = VT_STRUCT;
3673 prev_bit_size = 0;
3675 } else {
3676 /* A bit-field. Layout is more complicated. There are two
3677 options: PCC (GCC) compatible and MS compatible */
3678 if (pcc) {
3679 /* In PCC layout a bit-field is placed adjacent to the
3680 preceding bit-fields, except if:
3681 - it has zero-width
3682 - an individual alignment was given
3683 - it would overflow its base type container and
3684 there is no packing */
3685 if (bit_size == 0) {
3686 new_field:
3687 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3688 bit_pos = 0;
3689 } else if (f->a.aligned) {
3690 goto new_field;
3691 } else if (!packed) {
3692 int a8 = align * 8;
3693 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3694 if (ofs > size / align)
3695 goto new_field;
3698 /* in pcc mode, long long bitfields have type int if they fit */
3699 if (size == 8 && bit_size <= 32)
3700 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3702 while (bit_pos >= align * 8)
3703 c += align, bit_pos -= align * 8;
3704 offset = c;
3706 /* In PCC layout named bit-fields influence the alignment
3707 of the containing struct using the base types alignment,
3708 except for packed fields (which here have correct align). */
3709 if (f->v & SYM_FIRST_ANOM
3710 // && bit_size // ??? gcc on ARM/rpi does that
3712 align = 1;
3714 } else {
3715 bt = f->type.t & VT_BTYPE;
3716 if ((bit_pos + bit_size > size * 8)
3717 || (bit_size > 0) == (bt != prevbt)
3719 c = (c + align - 1) & -align;
3720 offset = c;
3721 bit_pos = 0;
3722 /* In MS bitfield mode a bit-field run always uses
3723 at least as many bits as the underlying type.
3724 To start a new run it's also required that this
3725 or the last bit-field had non-zero width. */
3726 if (bit_size || prev_bit_size)
3727 c += size;
3729 /* In MS layout the records alignment is normally
3730 influenced by the field, except for a zero-width
3731 field at the start of a run (but by further zero-width
3732 fields it is again). */
3733 if (bit_size == 0 && prevbt != bt)
3734 align = 1;
3735 prevbt = bt;
3736 prev_bit_size = bit_size;
3739 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3740 | (bit_pos << VT_STRUCT_SHIFT);
3741 bit_pos += bit_size;
3743 if (align > maxalign)
3744 maxalign = align;
3746 #ifdef BF_DEBUG
3747 printf("set field %s offset %-2d size %-2d align %-2d",
3748 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3749 if (f->type.t & VT_BITFIELD) {
3750 printf(" pos %-2d bits %-2d",
3751 BIT_POS(f->type.t),
3752 BIT_SIZE(f->type.t)
3755 printf("\n");
3756 #endif
3758 f->c = offset;
3759 f->r = 0;
3762 if (pcc)
3763 c += (bit_pos + 7) >> 3;
3765 /* store size and alignment */
3766 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3767 if (a < maxalign)
3768 a = maxalign;
3769 type->ref->r = a;
3770 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3771 /* can happen if individual align for some member was given. In
3772 this case MSVC ignores maxalign when aligning the size */
3773 a = pragma_pack;
3774 if (a < bt)
3775 a = bt;
3777 c = (c + a - 1) & -a;
3778 type->ref->c = c;
3780 #ifdef BF_DEBUG
3781 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3782 #endif
3784 /* check whether we can access bitfields by their type */
3785 for (f = type->ref->next; f; f = f->next) {
3786 int s, px, cx, c0;
3787 CType t;
3789 if (0 == (f->type.t & VT_BITFIELD))
3790 continue;
3791 f->type.ref = f;
3792 f->auxtype = -1;
3793 bit_size = BIT_SIZE(f->type.t);
3794 if (bit_size == 0)
3795 continue;
3796 bit_pos = BIT_POS(f->type.t);
3797 size = type_size(&f->type, &align);
3798 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3799 continue;
3801 /* try to access the field using a different type */
3802 c0 = -1, s = align = 1;
3803 for (;;) {
3804 px = f->c * 8 + bit_pos;
3805 cx = (px >> 3) & -align;
3806 px = px - (cx << 3);
3807 if (c0 == cx)
3808 break;
3809 s = (px + bit_size + 7) >> 3;
3810 if (s > 4) {
3811 t.t = VT_LLONG;
3812 } else if (s > 2) {
3813 t.t = VT_INT;
3814 } else if (s > 1) {
3815 t.t = VT_SHORT;
3816 } else {
3817 t.t = VT_BYTE;
3819 s = type_size(&t, &align);
3820 c0 = cx;
3823 if (px + bit_size <= s * 8 && cx + s <= c) {
3824 /* update offset and bit position */
3825 f->c = cx;
3826 bit_pos = px;
3827 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3828 | (bit_pos << VT_STRUCT_SHIFT);
3829 if (s != size)
3830 f->auxtype = t.t;
3831 #ifdef BF_DEBUG
3832 printf("FIX field %s offset %-2d size %-2d align %-2d "
3833 "pos %-2d bits %-2d\n",
3834 get_tok_str(f->v & ~SYM_FIELD, NULL),
3835 cx, s, align, px, bit_size);
3836 #endif
3837 } else {
3838 /* fall back to load/store single-byte wise */
3839 f->auxtype = VT_STRUCT;
3840 #ifdef BF_DEBUG
3841 printf("FIX field %s : load byte-wise\n",
3842 get_tok_str(f->v & ~SYM_FIELD, NULL));
3843 #endif
3848 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3849 static void struct_decl(CType *type, int u)
3851 int v, c, size, align, flexible;
3852 int bit_size, bsize, bt;
3853 Sym *s, *ss, **ps;
3854 AttributeDef ad, ad1;
3855 CType type1, btype;
3857 memset(&ad, 0, sizeof ad);
3858 next();
3859 parse_attribute(&ad);
3860 if (tok != '{') {
3861 v = tok;
3862 next();
3863 /* struct already defined ? return it */
3864 if (v < TOK_IDENT)
3865 expect("struct/union/enum name");
3866 s = struct_find(v);
3867 if (s && (s->sym_scope == local_scope || tok != '{')) {
3868 if (u == s->type.t)
3869 goto do_decl;
3870 if (u == VT_ENUM && IS_ENUM(s->type.t))
3871 goto do_decl;
3872 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3874 } else {
3875 v = anon_sym++;
3877 /* Record the original enum/struct/union token. */
3878 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3879 type1.ref = NULL;
3880 /* we put an undefined size for struct/union */
3881 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3882 s->r = 0; /* default alignment is zero as gcc */
3883 do_decl:
3884 type->t = s->type.t;
3885 type->ref = s;
3887 if (tok == '{') {
3888 next();
3889 if (s->c != -1)
3890 tcc_error("struct/union/enum already defined");
3891 s->c = -2;
3892 /* cannot be empty */
3893 /* non empty enums are not allowed */
3894 ps = &s->next;
3895 if (u == VT_ENUM) {
3896 long long ll = 0, pl = 0, nl = 0;
3897 CType t;
3898 t.ref = s;
3899 /* enum symbols have static storage */
3900 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3901 for(;;) {
3902 v = tok;
3903 if (v < TOK_UIDENT)
3904 expect("identifier");
3905 ss = sym_find(v);
3906 if (ss && !local_stack)
3907 tcc_error("redefinition of enumerator '%s'",
3908 get_tok_str(v, NULL));
3909 next();
3910 if (tok == '=') {
3911 next();
3912 ll = expr_const64();
3914 ss = sym_push(v, &t, VT_CONST, 0);
3915 ss->enum_val = ll;
3916 *ps = ss, ps = &ss->next;
3917 if (ll < nl)
3918 nl = ll;
3919 if (ll > pl)
3920 pl = ll;
3921 if (tok != ',')
3922 break;
3923 next();
3924 ll++;
3925 /* NOTE: we accept a trailing comma */
3926 if (tok == '}')
3927 break;
3929 skip('}');
3930 /* set integral type of the enum */
3931 t.t = VT_INT;
3932 if (nl >= 0) {
3933 if (pl != (unsigned)pl)
3934 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3935 t.t |= VT_UNSIGNED;
3936 } else if (pl != (int)pl || nl != (int)nl)
3937 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3938 s->type.t = type->t = t.t | VT_ENUM;
3939 s->c = 0;
3940 /* set type for enum members */
3941 for (ss = s->next; ss; ss = ss->next) {
3942 ll = ss->enum_val;
3943 if (ll == (int)ll) /* default is int if it fits */
3944 continue;
3945 if (t.t & VT_UNSIGNED) {
3946 ss->type.t |= VT_UNSIGNED;
3947 if (ll == (unsigned)ll)
3948 continue;
3950 ss->type.t = (ss->type.t & ~VT_BTYPE)
3951 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
3953 } else {
3954 c = 0;
3955 flexible = 0;
3956 while (tok != '}') {
3957 if (!parse_btype(&btype, &ad1)) {
3958 skip(';');
3959 continue;
3961 while (1) {
3962 if (flexible)
3963 tcc_error("flexible array member '%s' not at the end of struct",
3964 get_tok_str(v, NULL));
3965 bit_size = -1;
3966 v = 0;
3967 type1 = btype;
3968 if (tok != ':') {
3969 if (tok != ';')
3970 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
3971 if (v == 0) {
3972 if ((type1.t & VT_BTYPE) != VT_STRUCT)
3973 expect("identifier");
3974 else {
3975 int v = btype.ref->v;
3976 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
3977 if (tcc_state->ms_extensions == 0)
3978 expect("identifier");
3982 if (type_size(&type1, &align) < 0) {
3983 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
3984 flexible = 1;
3985 else
3986 tcc_error("field '%s' has incomplete type",
3987 get_tok_str(v, NULL));
3989 if ((type1.t & VT_BTYPE) == VT_FUNC ||
3990 (type1.t & VT_BTYPE) == VT_VOID ||
3991 (type1.t & VT_STORAGE))
3992 tcc_error("invalid type for '%s'",
3993 get_tok_str(v, NULL));
3995 if (tok == ':') {
3996 next();
3997 bit_size = expr_const();
3998 /* XXX: handle v = 0 case for messages */
3999 if (bit_size < 0)
4000 tcc_error("negative width in bit-field '%s'",
4001 get_tok_str(v, NULL));
4002 if (v && bit_size == 0)
4003 tcc_error("zero width for bit-field '%s'",
4004 get_tok_str(v, NULL));
4005 parse_attribute(&ad1);
4007 size = type_size(&type1, &align);
4008 if (bit_size >= 0) {
4009 bt = type1.t & VT_BTYPE;
4010 if (bt != VT_INT &&
4011 bt != VT_BYTE &&
4012 bt != VT_SHORT &&
4013 bt != VT_BOOL &&
4014 bt != VT_LLONG)
4015 tcc_error("bitfields must have scalar type");
4016 bsize = size * 8;
4017 if (bit_size > bsize) {
4018 tcc_error("width of '%s' exceeds its type",
4019 get_tok_str(v, NULL));
4020 } else if (bit_size == bsize
4021 && !ad.a.packed && !ad1.a.packed) {
4022 /* no need for bit fields */
4024 } else if (bit_size == 64) {
4025 tcc_error("field width 64 not implemented");
4026 } else {
4027 type1.t = (type1.t & ~VT_STRUCT_MASK)
4028 | VT_BITFIELD
4029 | (bit_size << (VT_STRUCT_SHIFT + 6));
4032 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4033 /* Remember we've seen a real field to check
4034 for placement of flexible array member. */
4035 c = 1;
4037 /* If member is a struct or bit-field, enforce
4038 placing into the struct (as anonymous). */
4039 if (v == 0 &&
4040 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4041 bit_size >= 0)) {
4042 v = anon_sym++;
4044 if (v) {
4045 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4046 ss->a = ad1.a;
4047 *ps = ss;
4048 ps = &ss->next;
4050 if (tok == ';' || tok == TOK_EOF)
4051 break;
4052 skip(',');
4054 skip(';');
4056 skip('}');
4057 parse_attribute(&ad);
4058 struct_layout(type, &ad);
4063 static void sym_to_attr(AttributeDef *ad, Sym *s)
4065 merge_symattr(&ad->a, &s->a);
4066 merge_funcattr(&ad->f, &s->f);
4069 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4070 are added to the element type, copied because it could be a typedef. */
4071 static void parse_btype_qualify(CType *type, int qualifiers)
4073 while (type->t & VT_ARRAY) {
4074 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4075 type = &type->ref->type;
4077 type->t |= qualifiers;
4080 /* return 0 if no type declaration. otherwise, return the basic type
4081 and skip it.
4083 static int parse_btype(CType *type, AttributeDef *ad)
4085 int t, u, bt, st, type_found, typespec_found, g;
4086 Sym *s;
4087 CType type1;
4089 memset(ad, 0, sizeof(AttributeDef));
4090 type_found = 0;
4091 typespec_found = 0;
4092 t = VT_INT;
4093 bt = st = -1;
4094 type->ref = NULL;
4096 while(1) {
4097 switch(tok) {
4098 case TOK_EXTENSION:
4099 /* currently, we really ignore extension */
4100 next();
4101 continue;
4103 /* basic types */
4104 case TOK_CHAR:
4105 u = VT_BYTE;
4106 basic_type:
4107 next();
4108 basic_type1:
4109 if (u == VT_SHORT || u == VT_LONG) {
4110 if (st != -1 || (bt != -1 && bt != VT_INT))
4111 tmbt: tcc_error("too many basic types");
4112 st = u;
4113 } else {
4114 if (bt != -1 || (st != -1 && u != VT_INT))
4115 goto tmbt;
4116 bt = u;
4118 if (u != VT_INT)
4119 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4120 typespec_found = 1;
4121 break;
4122 case TOK_VOID:
4123 u = VT_VOID;
4124 goto basic_type;
4125 case TOK_SHORT:
4126 u = VT_SHORT;
4127 goto basic_type;
4128 case TOK_INT:
4129 u = VT_INT;
4130 goto basic_type;
4131 case TOK_ALIGNAS:
4132 { int n;
4133 AttributeDef ad1;
4134 next();
4135 skip('(');
4136 memset(&ad1, 0, sizeof(AttributeDef));
4137 if (parse_btype(&type1, &ad1)) {
4138 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4139 if (ad1.a.aligned)
4140 n = 1 << (ad1.a.aligned - 1);
4141 else
4142 type_size(&type1, &n);
4143 } else {
4144 n = expr_const();
4145 if (n <= 0 || (n & (n - 1)) != 0)
4146 tcc_error("alignment must be a positive power of two");
4148 skip(')');
4149 ad->a.aligned = exact_log2p1(n);
4151 continue;
4152 case TOK_LONG:
4153 if ((t & VT_BTYPE) == VT_DOUBLE) {
4154 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4155 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4156 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4157 } else {
4158 u = VT_LONG;
4159 goto basic_type;
4161 next();
4162 break;
4163 #ifdef TCC_TARGET_ARM64
4164 case TOK_UINT128:
4165 /* GCC's __uint128_t appears in some Linux header files. Make it a
4166 synonym for long double to get the size and alignment right. */
4167 u = VT_LDOUBLE;
4168 goto basic_type;
4169 #endif
4170 case TOK_BOOL:
4171 u = VT_BOOL;
4172 goto basic_type;
4173 case TOK_FLOAT:
4174 u = VT_FLOAT;
4175 goto basic_type;
4176 case TOK_DOUBLE:
4177 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4178 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4179 } else {
4180 u = VT_DOUBLE;
4181 goto basic_type;
4183 next();
4184 break;
4185 case TOK_ENUM:
4186 struct_decl(&type1, VT_ENUM);
4187 basic_type2:
4188 u = type1.t;
4189 type->ref = type1.ref;
4190 goto basic_type1;
4191 case TOK_STRUCT:
4192 struct_decl(&type1, VT_STRUCT);
4193 goto basic_type2;
4194 case TOK_UNION:
4195 struct_decl(&type1, VT_UNION);
4196 goto basic_type2;
4198 /* type modifiers */
4199 case TOK_CONST1:
4200 case TOK_CONST2:
4201 case TOK_CONST3:
4202 type->t = t;
4203 parse_btype_qualify(type, VT_CONSTANT);
4204 t = type->t;
4205 next();
4206 break;
4207 case TOK_VOLATILE1:
4208 case TOK_VOLATILE2:
4209 case TOK_VOLATILE3:
4210 type->t = t;
4211 parse_btype_qualify(type, VT_VOLATILE);
4212 t = type->t;
4213 next();
4214 break;
4215 case TOK_SIGNED1:
4216 case TOK_SIGNED2:
4217 case TOK_SIGNED3:
4218 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4219 tcc_error("signed and unsigned modifier");
4220 t |= VT_DEFSIGN;
4221 next();
4222 typespec_found = 1;
4223 break;
4224 case TOK_REGISTER:
4225 case TOK_AUTO:
4226 case TOK_RESTRICT1:
4227 case TOK_RESTRICT2:
4228 case TOK_RESTRICT3:
4229 next();
4230 break;
4231 case TOK_UNSIGNED:
4232 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4233 tcc_error("signed and unsigned modifier");
4234 t |= VT_DEFSIGN | VT_UNSIGNED;
4235 next();
4236 typespec_found = 1;
4237 break;
4239 /* storage */
4240 case TOK_EXTERN:
4241 g = VT_EXTERN;
4242 goto storage;
4243 case TOK_STATIC:
4244 g = VT_STATIC;
4245 goto storage;
4246 case TOK_TYPEDEF:
4247 g = VT_TYPEDEF;
4248 goto storage;
4249 storage:
4250 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4251 tcc_error("multiple storage classes");
4252 t |= g;
4253 next();
4254 break;
4255 case TOK_INLINE1:
4256 case TOK_INLINE2:
4257 case TOK_INLINE3:
4258 t |= VT_INLINE;
4259 next();
4260 break;
4261 case TOK_NORETURN3:
4262 /* currently, no need to handle it because tcc does not
4263 track unused objects */
4264 next();
4265 break;
4266 /* GNUC attribute */
4267 case TOK_ATTRIBUTE1:
4268 case TOK_ATTRIBUTE2:
4269 parse_attribute(ad);
4270 if (ad->attr_mode) {
4271 u = ad->attr_mode -1;
4272 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4274 continue;
4275 /* GNUC typeof */
4276 case TOK_TYPEOF1:
4277 case TOK_TYPEOF2:
4278 case TOK_TYPEOF3:
4279 next();
4280 parse_expr_type(&type1);
4281 /* remove all storage modifiers except typedef */
4282 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4283 if (type1.ref)
4284 sym_to_attr(ad, type1.ref);
4285 goto basic_type2;
4286 default:
4287 if (typespec_found)
4288 goto the_end;
4289 s = sym_find(tok);
4290 if (!s || !(s->type.t & VT_TYPEDEF))
4291 goto the_end;
4292 t &= ~(VT_BTYPE|VT_LONG);
4293 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4294 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4295 type->ref = s->type.ref;
4296 if (t)
4297 parse_btype_qualify(type, t);
4298 t = type->t;
4299 /* get attributes from typedef */
4300 sym_to_attr(ad, s);
4301 next();
4302 typespec_found = 1;
4303 st = bt = -2;
4304 break;
4306 type_found = 1;
4308 the_end:
4309 if (tcc_state->char_is_unsigned) {
4310 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4311 t |= VT_UNSIGNED;
4313 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4314 bt = t & (VT_BTYPE|VT_LONG);
4315 if (bt == VT_LONG)
4316 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4317 #ifdef TCC_TARGET_PE
4318 if (bt == VT_LDOUBLE)
4319 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4320 #endif
4321 type->t = t;
4322 return type_found;
4325 /* convert a function parameter type (array to pointer and function to
4326 function pointer) */
4327 static inline void convert_parameter_type(CType *pt)
4329 /* remove const and volatile qualifiers (XXX: const could be used
4330 to indicate a const function parameter */
4331 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4332 /* array must be transformed to pointer according to ANSI C */
4333 pt->t &= ~VT_ARRAY;
4334 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4335 mk_pointer(pt);
4339 ST_FUNC void parse_asm_str(CString *astr)
4341 skip('(');
4342 parse_mult_str(astr, "string constant");
4345 /* Parse an asm label and return the token */
4346 static int asm_label_instr(void)
4348 int v;
4349 CString astr;
4351 next();
4352 parse_asm_str(&astr);
4353 skip(')');
4354 #ifdef ASM_DEBUG
4355 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4356 #endif
4357 v = tok_alloc(astr.data, astr.size - 1)->tok;
4358 cstr_free(&astr);
4359 return v;
4362 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4364 int n, l, t1, arg_size, align;
4365 Sym **plast, *s, *first;
4366 AttributeDef ad1;
4367 CType pt;
4369 if (tok == '(') {
4370 /* function type, or recursive declarator (return if so) */
4371 next();
4372 if (td && !(td & TYPE_ABSTRACT))
4373 return 0;
4374 if (tok == ')')
4375 l = 0;
4376 else if (parse_btype(&pt, &ad1))
4377 l = FUNC_NEW;
4378 else if (td) {
4379 merge_attr (ad, &ad1);
4380 return 0;
4381 } else
4382 l = FUNC_OLD;
4383 first = NULL;
4384 plast = &first;
4385 arg_size = 0;
4386 if (l) {
4387 for(;;) {
4388 /* read param name and compute offset */
4389 if (l != FUNC_OLD) {
4390 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4391 break;
4392 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4393 if ((pt.t & VT_BTYPE) == VT_VOID)
4394 tcc_error("parameter declared as void");
4395 } else {
4396 n = tok;
4397 if (n < TOK_UIDENT)
4398 expect("identifier");
4399 pt.t = VT_VOID; /* invalid type */
4400 next();
4402 convert_parameter_type(&pt);
4403 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4404 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4405 *plast = s;
4406 plast = &s->next;
4407 if (tok == ')')
4408 break;
4409 skip(',');
4410 if (l == FUNC_NEW && tok == TOK_DOTS) {
4411 l = FUNC_ELLIPSIS;
4412 next();
4413 break;
4415 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4416 tcc_error("invalid type");
4418 } else
4419 /* if no parameters, then old type prototype */
4420 l = FUNC_OLD;
4421 skip(')');
4422 /* NOTE: const is ignored in returned type as it has a special
4423 meaning in gcc / C++ */
4424 type->t &= ~VT_CONSTANT;
4425 /* some ancient pre-K&R C allows a function to return an array
4426 and the array brackets to be put after the arguments, such
4427 that "int c()[]" means something like "int[] c()" */
4428 if (tok == '[') {
4429 next();
4430 skip(']'); /* only handle simple "[]" */
4431 mk_pointer(type);
4433 /* we push a anonymous symbol which will contain the function prototype */
4434 ad->f.func_args = arg_size;
4435 ad->f.func_type = l;
4436 s = sym_push(SYM_FIELD, type, 0, 0);
4437 s->a = ad->a;
4438 s->f = ad->f;
4439 s->next = first;
4440 type->t = VT_FUNC;
4441 type->ref = s;
4442 } else if (tok == '[') {
4443 int saved_nocode_wanted = nocode_wanted;
4444 /* array definition */
4445 next();
4446 while (1) {
4447 /* XXX The optional type-quals and static should only be accepted
4448 in parameter decls. The '*' as well, and then even only
4449 in prototypes (not function defs). */
4450 switch (tok) {
4451 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4452 case TOK_CONST1:
4453 case TOK_VOLATILE1:
4454 case TOK_STATIC:
4455 case '*':
4456 next();
4457 continue;
4458 default:
4459 break;
4461 break;
4463 n = -1;
4464 t1 = 0;
4465 if (tok != ']') {
4466 if (!local_stack || (storage & VT_STATIC))
4467 vpushi(expr_const());
4468 else {
4469 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4470 length must always be evaluated, even under nocode_wanted,
4471 so that its size slot is initialized (e.g. under sizeof
4472 or typeof). */
4473 nocode_wanted = 0;
4474 gexpr();
4476 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4477 n = vtop->c.i;
4478 if (n < 0)
4479 tcc_error("invalid array size");
4480 } else {
4481 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4482 tcc_error("size of variable length array should be an integer");
4483 n = 0;
4484 t1 = VT_VLA;
4487 skip(']');
4488 /* parse next post type */
4489 post_type(type, ad, storage, 0);
4490 if (type->t == VT_FUNC)
4491 tcc_error("declaration of an array of functions");
4492 t1 |= type->t & VT_VLA;
4494 if (t1 & VT_VLA) {
4495 if (n < 0)
4496 tcc_error("need explicit inner array size in VLAs");
4497 loc -= type_size(&int_type, &align);
4498 loc &= -align;
4499 n = loc;
4501 vla_runtime_type_size(type, &align);
4502 gen_op('*');
4503 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4504 vswap();
4505 vstore();
4507 if (n != -1)
4508 vpop();
4509 nocode_wanted = saved_nocode_wanted;
4511 /* we push an anonymous symbol which will contain the array
4512 element type */
4513 s = sym_push(SYM_FIELD, type, 0, n);
4514 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4515 type->ref = s;
4517 return 1;
4520 /* Parse a type declarator (except basic type), and return the type
4521 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4522 expected. 'type' should contain the basic type. 'ad' is the
4523 attribute definition of the basic type. It can be modified by
4524 type_decl(). If this (possibly abstract) declarator is a pointer chain
4525 it returns the innermost pointed to type (equals *type, but is a different
4526 pointer), otherwise returns type itself, that's used for recursive calls. */
4527 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4529 CType *post, *ret;
4530 int qualifiers, storage;
4532 /* recursive type, remove storage bits first, apply them later again */
4533 storage = type->t & VT_STORAGE;
4534 type->t &= ~VT_STORAGE;
4535 post = ret = type;
4537 while (tok == '*') {
4538 qualifiers = 0;
4539 redo:
4540 next();
4541 switch(tok) {
4542 case TOK_CONST1:
4543 case TOK_CONST2:
4544 case TOK_CONST3:
4545 qualifiers |= VT_CONSTANT;
4546 goto redo;
4547 case TOK_VOLATILE1:
4548 case TOK_VOLATILE2:
4549 case TOK_VOLATILE3:
4550 qualifiers |= VT_VOLATILE;
4551 goto redo;
4552 case TOK_RESTRICT1:
4553 case TOK_RESTRICT2:
4554 case TOK_RESTRICT3:
4555 goto redo;
4556 /* XXX: clarify attribute handling */
4557 case TOK_ATTRIBUTE1:
4558 case TOK_ATTRIBUTE2:
4559 parse_attribute(ad);
4560 break;
4562 mk_pointer(type);
4563 type->t |= qualifiers;
4564 if (ret == type)
4565 /* innermost pointed to type is the one for the first derivation */
4566 ret = pointed_type(type);
4569 if (tok == '(') {
4570 /* This is possibly a parameter type list for abstract declarators
4571 ('int ()'), use post_type for testing this. */
4572 if (!post_type(type, ad, 0, td)) {
4573 /* It's not, so it's a nested declarator, and the post operations
4574 apply to the innermost pointed to type (if any). */
4575 /* XXX: this is not correct to modify 'ad' at this point, but
4576 the syntax is not clear */
4577 parse_attribute(ad);
4578 post = type_decl(type, ad, v, td);
4579 skip(')');
4580 } else
4581 goto abstract;
4582 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4583 /* type identifier */
4584 *v = tok;
4585 next();
4586 } else {
4587 abstract:
4588 if (!(td & TYPE_ABSTRACT))
4589 expect("identifier");
4590 *v = 0;
4592 post_type(post, ad, storage, 0);
4593 parse_attribute(ad);
4594 type->t |= storage;
4595 return ret;
4598 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4599 ST_FUNC int lvalue_type(int t)
4601 int bt, r;
4602 r = VT_LVAL;
4603 bt = t & VT_BTYPE;
4604 if (bt == VT_BYTE || bt == VT_BOOL)
4605 r |= VT_LVAL_BYTE;
4606 else if (bt == VT_SHORT)
4607 r |= VT_LVAL_SHORT;
4608 else
4609 return r;
4610 if (t & VT_UNSIGNED)
4611 r |= VT_LVAL_UNSIGNED;
4612 return r;
4615 /* indirection with full error checking and bound check */
4616 ST_FUNC void indir(void)
4618 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4619 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4620 return;
4621 expect("pointer");
4623 if (vtop->r & VT_LVAL)
4624 gv(RC_INT);
4625 vtop->type = *pointed_type(&vtop->type);
4626 /* Arrays and functions are never lvalues */
4627 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4628 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4629 vtop->r |= lvalue_type(vtop->type.t);
4630 /* if bound checking, the referenced pointer must be checked */
4631 #ifdef CONFIG_TCC_BCHECK
4632 if (tcc_state->do_bounds_check)
4633 vtop->r |= VT_MUSTBOUND;
4634 #endif
4638 /* pass a parameter to a function and do type checking and casting */
4639 static void gfunc_param_typed(Sym *func, Sym *arg)
4641 int func_type;
4642 CType type;
4644 func_type = func->f.func_type;
4645 if (func_type == FUNC_OLD ||
4646 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4647 /* default casting : only need to convert float to double */
4648 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4649 gen_cast_s(VT_DOUBLE);
4650 } else if (vtop->type.t & VT_BITFIELD) {
4651 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4652 type.ref = vtop->type.ref;
4653 gen_cast(&type);
4655 } else if (arg == NULL) {
4656 tcc_error("too many arguments to function");
4657 } else {
4658 type = arg->type;
4659 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4660 gen_assign_cast(&type);
4664 /* parse an expression and return its type without any side effect. */
4665 static void expr_type(CType *type, void (*expr_fn)(void))
4667 nocode_wanted++;
4668 expr_fn();
4669 *type = vtop->type;
4670 vpop();
4671 nocode_wanted--;
4674 /* parse an expression of the form '(type)' or '(expr)' and return its
4675 type */
4676 static void parse_expr_type(CType *type)
4678 int n;
4679 AttributeDef ad;
4681 skip('(');
4682 if (parse_btype(type, &ad)) {
4683 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4684 } else {
4685 expr_type(type, gexpr);
4687 skip(')');
4690 static void parse_type(CType *type)
4692 AttributeDef ad;
4693 int n;
4695 if (!parse_btype(type, &ad)) {
4696 expect("type");
4698 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4701 static void parse_builtin_params(int nc, const char *args)
4703 char c, sep = '(';
4704 CType t;
4705 if (nc)
4706 nocode_wanted++;
4707 next();
4708 while ((c = *args++)) {
4709 skip(sep);
4710 sep = ',';
4711 switch (c) {
4712 case 'e': expr_eq(); continue;
4713 case 't': parse_type(&t); vpush(&t); continue;
4714 default: tcc_error("internal error"); break;
4717 skip(')');
4718 if (nc)
4719 nocode_wanted--;
4722 static void try_call_scope_cleanup(Sym *stop)
4724 Sym *cls = current_cleanups;
4726 for (; cls != stop; cls = cls->ncl) {
4727 Sym *fs = cls->next;
4728 Sym *vs = cls->prev_tok;
4730 vpushsym(&fs->type, fs);
4731 vset(&vs->type, vs->r, vs->c);
4732 vtop->sym = vs;
4733 mk_pointer(&vtop->type);
4734 gaddrof();
4735 gfunc_call(1);
4739 static void try_call_cleanup_goto(Sym *cleanupstate)
4741 Sym *oc, *cc;
4742 int ocd, ccd;
4744 if (!current_cleanups)
4745 return;
4747 /* search NCA of both cleanup chains given parents and initial depth */
4748 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4749 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4751 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4753 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4756 try_call_scope_cleanup(cc);
4759 ST_FUNC void unary(void)
4761 int n, t, align, size, r, sizeof_caller;
4762 CType type;
4763 Sym *s;
4764 AttributeDef ad;
4766 sizeof_caller = in_sizeof;
4767 in_sizeof = 0;
4768 type.ref = NULL;
4769 /* XXX: GCC 2.95.3 does not generate a table although it should be
4770 better here */
4771 tok_next:
4772 switch(tok) {
4773 case TOK_EXTENSION:
4774 next();
4775 goto tok_next;
4776 case TOK_LCHAR:
4777 #ifdef TCC_TARGET_PE
4778 t = VT_SHORT|VT_UNSIGNED;
4779 goto push_tokc;
4780 #endif
4781 case TOK_CINT:
4782 case TOK_CCHAR:
4783 t = VT_INT;
4784 push_tokc:
4785 type.t = t;
4786 vsetc(&type, VT_CONST, &tokc);
4787 next();
4788 break;
4789 case TOK_CUINT:
4790 t = VT_INT | VT_UNSIGNED;
4791 goto push_tokc;
4792 case TOK_CLLONG:
4793 t = VT_LLONG;
4794 goto push_tokc;
4795 case TOK_CULLONG:
4796 t = VT_LLONG | VT_UNSIGNED;
4797 goto push_tokc;
4798 case TOK_CFLOAT:
4799 t = VT_FLOAT;
4800 goto push_tokc;
4801 case TOK_CDOUBLE:
4802 t = VT_DOUBLE;
4803 goto push_tokc;
4804 case TOK_CLDOUBLE:
4805 t = VT_LDOUBLE;
4806 goto push_tokc;
4807 case TOK_CLONG:
4808 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4809 goto push_tokc;
4810 case TOK_CULONG:
4811 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4812 goto push_tokc;
4813 case TOK___FUNCTION__:
4814 if (!gnu_ext)
4815 goto tok_identifier;
4816 /* fall thru */
4817 case TOK___FUNC__:
4819 void *ptr;
4820 int len;
4821 /* special function name identifier */
4822 len = strlen(funcname) + 1;
4823 /* generate char[len] type */
4824 type.t = VT_BYTE;
4825 mk_pointer(&type);
4826 type.t |= VT_ARRAY;
4827 type.ref->c = len;
4828 vpush_ref(&type, data_section, data_section->data_offset, len);
4829 if (!NODATA_WANTED) {
4830 ptr = section_ptr_add(data_section, len);
4831 memcpy(ptr, funcname, len);
4833 next();
4835 break;
4836 case TOK_LSTR:
4837 #ifdef TCC_TARGET_PE
4838 t = VT_SHORT | VT_UNSIGNED;
4839 #else
4840 t = VT_INT;
4841 #endif
4842 goto str_init;
4843 case TOK_STR:
4844 /* string parsing */
4845 t = VT_BYTE;
4846 if (tcc_state->char_is_unsigned)
4847 t = VT_BYTE | VT_UNSIGNED;
4848 str_init:
4849 if (tcc_state->warn_write_strings)
4850 t |= VT_CONSTANT;
4851 type.t = t;
4852 mk_pointer(&type);
4853 type.t |= VT_ARRAY;
4854 memset(&ad, 0, sizeof(AttributeDef));
4855 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4856 break;
4857 case '(':
4858 next();
4859 /* cast ? */
4860 if (parse_btype(&type, &ad)) {
4861 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4862 skip(')');
4863 /* check ISOC99 compound literal */
4864 if (tok == '{') {
4865 /* data is allocated locally by default */
4866 if (global_expr)
4867 r = VT_CONST;
4868 else
4869 r = VT_LOCAL;
4870 /* all except arrays are lvalues */
4871 if (!(type.t & VT_ARRAY))
4872 r |= lvalue_type(type.t);
4873 memset(&ad, 0, sizeof(AttributeDef));
4874 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4875 } else {
4876 if (sizeof_caller) {
4877 vpush(&type);
4878 return;
4880 unary();
4881 gen_cast(&type);
4883 } else if (tok == '{') {
4884 int saved_nocode_wanted = nocode_wanted;
4885 if (const_wanted)
4886 tcc_error("expected constant");
4887 /* save all registers */
4888 save_regs(0);
4889 /* statement expression : we do not accept break/continue
4890 inside as GCC does. We do retain the nocode_wanted state,
4891 as statement expressions can't ever be entered from the
4892 outside, so any reactivation of code emission (from labels
4893 or loop heads) can be disabled again after the end of it. */
4894 block(NULL, NULL, 1);
4895 nocode_wanted = saved_nocode_wanted;
4896 skip(')');
4897 } else {
4898 gexpr();
4899 skip(')');
4901 break;
4902 case '*':
4903 next();
4904 unary();
4905 indir();
4906 break;
4907 case '&':
4908 next();
4909 unary();
4910 /* functions names must be treated as function pointers,
4911 except for unary '&' and sizeof. Since we consider that
4912 functions are not lvalues, we only have to handle it
4913 there and in function calls. */
4914 /* arrays can also be used although they are not lvalues */
4915 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4916 !(vtop->type.t & VT_ARRAY))
4917 test_lvalue();
4918 mk_pointer(&vtop->type);
4919 gaddrof();
4920 break;
4921 case '!':
4922 next();
4923 unary();
4924 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4925 gen_cast_s(VT_BOOL);
4926 vtop->c.i = !vtop->c.i;
4927 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
4928 vtop->c.i ^= 1;
4929 else {
4930 save_regs(1);
4931 vseti(VT_JMP, gvtst(1, 0));
4933 break;
4934 case '~':
4935 next();
4936 unary();
4937 vpushi(-1);
4938 gen_op('^');
4939 break;
4940 case '+':
4941 next();
4942 unary();
4943 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
4944 tcc_error("pointer not accepted for unary plus");
4945 /* In order to force cast, we add zero, except for floating point
4946 where we really need an noop (otherwise -0.0 will be transformed
4947 into +0.0). */
4948 if (!is_float(vtop->type.t)) {
4949 vpushi(0);
4950 gen_op('+');
4952 break;
4953 case TOK_SIZEOF:
4954 case TOK_ALIGNOF1:
4955 case TOK_ALIGNOF2:
4956 case TOK_ALIGNOF3:
4957 t = tok;
4958 next();
4959 in_sizeof++;
4960 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
4961 s = vtop[1].sym; /* hack: accessing previous vtop */
4962 size = type_size(&type, &align);
4963 if (s && s->a.aligned)
4964 align = 1 << (s->a.aligned - 1);
4965 if (t == TOK_SIZEOF) {
4966 if (!(type.t & VT_VLA)) {
4967 if (size < 0)
4968 tcc_error("sizeof applied to an incomplete type");
4969 vpushs(size);
4970 } else {
4971 vla_runtime_type_size(&type, &align);
4973 } else {
4974 vpushs(align);
4976 vtop->type.t |= VT_UNSIGNED;
4977 break;
4979 case TOK_builtin_expect:
4980 /* __builtin_expect is a no-op for now */
4981 parse_builtin_params(0, "ee");
4982 vpop();
4983 break;
4984 case TOK_builtin_types_compatible_p:
4985 parse_builtin_params(0, "tt");
4986 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4987 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
4988 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
4989 vtop -= 2;
4990 vpushi(n);
4991 break;
4992 case TOK_builtin_choose_expr:
4994 int64_t c;
4995 next();
4996 skip('(');
4997 c = expr_const64();
4998 skip(',');
4999 if (!c) {
5000 nocode_wanted++;
5002 expr_eq();
5003 if (!c) {
5004 vpop();
5005 nocode_wanted--;
5007 skip(',');
5008 if (c) {
5009 nocode_wanted++;
5011 expr_eq();
5012 if (c) {
5013 vpop();
5014 nocode_wanted--;
5016 skip(')');
5018 break;
5019 case TOK_builtin_constant_p:
5020 parse_builtin_params(1, "e");
5021 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5022 vtop--;
5023 vpushi(n);
5024 break;
5025 case TOK_builtin_frame_address:
5026 case TOK_builtin_return_address:
5028 int tok1 = tok;
5029 int level;
5030 next();
5031 skip('(');
5032 if (tok != TOK_CINT) {
5033 tcc_error("%s only takes positive integers",
5034 tok1 == TOK_builtin_return_address ?
5035 "__builtin_return_address" :
5036 "__builtin_frame_address");
5038 level = (uint32_t)tokc.i;
5039 next();
5040 skip(')');
5041 type.t = VT_VOID;
5042 mk_pointer(&type);
5043 vset(&type, VT_LOCAL, 0); /* local frame */
5044 while (level--) {
5045 mk_pointer(&vtop->type);
5046 indir(); /* -> parent frame */
5048 if (tok1 == TOK_builtin_return_address) {
5049 // assume return address is just above frame pointer on stack
5050 vpushi(PTR_SIZE);
5051 gen_op('+');
5052 mk_pointer(&vtop->type);
5053 indir();
5056 break;
5057 #ifdef TCC_TARGET_X86_64
5058 #ifdef TCC_TARGET_PE
5059 case TOK_builtin_va_start:
5060 parse_builtin_params(0, "ee");
5061 r = vtop->r & VT_VALMASK;
5062 if (r == VT_LLOCAL)
5063 r = VT_LOCAL;
5064 if (r != VT_LOCAL)
5065 tcc_error("__builtin_va_start expects a local variable");
5066 vtop->r = r;
5067 vtop->type = char_pointer_type;
5068 vtop->c.i += 8;
5069 vstore();
5070 break;
5071 #else
5072 case TOK_builtin_va_arg_types:
5073 parse_builtin_params(0, "t");
5074 vpushi(classify_x86_64_va_arg(&vtop->type));
5075 vswap();
5076 vpop();
5077 break;
5078 #endif
5079 #endif
5081 #ifdef TCC_TARGET_ARM64
5082 case TOK___va_start: {
5083 parse_builtin_params(0, "ee");
5084 //xx check types
5085 gen_va_start();
5086 vpushi(0);
5087 vtop->type.t = VT_VOID;
5088 break;
5090 case TOK___va_arg: {
5091 parse_builtin_params(0, "et");
5092 type = vtop->type;
5093 vpop();
5094 //xx check types
5095 gen_va_arg(&type);
5096 vtop->type = type;
5097 break;
5099 case TOK___arm64_clear_cache: {
5100 parse_builtin_params(0, "ee");
5101 gen_clear_cache();
5102 vpushi(0);
5103 vtop->type.t = VT_VOID;
5104 break;
5106 #endif
5107 /* pre operations */
5108 case TOK_INC:
5109 case TOK_DEC:
5110 t = tok;
5111 next();
5112 unary();
5113 inc(0, t);
5114 break;
5115 case '-':
5116 next();
5117 unary();
5118 t = vtop->type.t & VT_BTYPE;
5119 if (is_float(t)) {
5120 /* In IEEE negate(x) isn't subtract(0,x), but rather
5121 subtract(-0, x). */
5122 vpush(&vtop->type);
5123 if (t == VT_FLOAT)
5124 vtop->c.f = -1.0 * 0.0;
5125 else if (t == VT_DOUBLE)
5126 vtop->c.d = -1.0 * 0.0;
5127 else
5128 vtop->c.ld = -1.0 * 0.0;
5129 } else
5130 vpushi(0);
5131 vswap();
5132 gen_op('-');
5133 break;
5134 case TOK_LAND:
5135 if (!gnu_ext)
5136 goto tok_identifier;
5137 next();
5138 /* allow to take the address of a label */
5139 if (tok < TOK_UIDENT)
5140 expect("label identifier");
5141 s = label_find(tok);
5142 if (!s) {
5143 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5144 } else {
5145 if (s->r == LABEL_DECLARED)
5146 s->r = LABEL_FORWARD;
5148 if (!s->type.t) {
5149 s->type.t = VT_VOID;
5150 mk_pointer(&s->type);
5151 s->type.t |= VT_STATIC;
5153 vpushsym(&s->type, s);
5154 next();
5155 break;
5157 case TOK_GENERIC:
5159 CType controlling_type;
5160 int has_default = 0;
5161 int has_match = 0;
5162 int learn = 0;
5163 TokenString *str = NULL;
5164 int saved_const_wanted = const_wanted;
5166 next();
5167 skip('(');
5168 const_wanted = 0;
5169 expr_type(&controlling_type, expr_eq);
5170 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5171 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5172 mk_pointer(&controlling_type);
5173 const_wanted = saved_const_wanted;
5174 for (;;) {
5175 learn = 0;
5176 skip(',');
5177 if (tok == TOK_DEFAULT) {
5178 if (has_default)
5179 tcc_error("too many 'default'");
5180 has_default = 1;
5181 if (!has_match)
5182 learn = 1;
5183 next();
5184 } else {
5185 AttributeDef ad_tmp;
5186 int itmp;
5187 CType cur_type;
5188 parse_btype(&cur_type, &ad_tmp);
5189 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5190 if (compare_types(&controlling_type, &cur_type, 0)) {
5191 if (has_match) {
5192 tcc_error("type match twice");
5194 has_match = 1;
5195 learn = 1;
5198 skip(':');
5199 if (learn) {
5200 if (str)
5201 tok_str_free(str);
5202 skip_or_save_block(&str);
5203 } else {
5204 skip_or_save_block(NULL);
5206 if (tok == ')')
5207 break;
5209 if (!str) {
5210 char buf[60];
5211 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5212 tcc_error("type '%s' does not match any association", buf);
5214 begin_macro(str, 1);
5215 next();
5216 expr_eq();
5217 if (tok != TOK_EOF)
5218 expect(",");
5219 end_macro();
5220 next();
5221 break;
5223 // special qnan , snan and infinity values
5224 case TOK___NAN__:
5225 n = 0x7fc00000;
5226 special_math_val:
5227 vpushi(n);
5228 vtop->type.t = VT_FLOAT;
5229 next();
5230 break;
5231 case TOK___SNAN__:
5232 n = 0x7f800001;
5233 goto special_math_val;
5234 case TOK___INF__:
5235 n = 0x7f800000;
5236 goto special_math_val;
5238 default:
5239 tok_identifier:
5240 t = tok;
5241 next();
5242 if (t < TOK_UIDENT)
5243 expect("identifier");
5244 s = sym_find(t);
5245 if (!s || IS_ASM_SYM(s)) {
5246 const char *name = get_tok_str(t, NULL);
5247 if (tok != '(')
5248 tcc_error("'%s' undeclared", name);
5249 /* for simple function calls, we tolerate undeclared
5250 external reference to int() function */
5251 if (tcc_state->warn_implicit_function_declaration
5252 #ifdef TCC_TARGET_PE
5253 /* people must be warned about using undeclared WINAPI functions
5254 (which usually start with uppercase letter) */
5255 || (name[0] >= 'A' && name[0] <= 'Z')
5256 #endif
5258 tcc_warning("implicit declaration of function '%s'", name);
5259 s = external_global_sym(t, &func_old_type);
5262 r = s->r;
5263 /* A symbol that has a register is a local register variable,
5264 which starts out as VT_LOCAL value. */
5265 if ((r & VT_VALMASK) < VT_CONST)
5266 r = (r & ~VT_VALMASK) | VT_LOCAL;
5268 vset(&s->type, r, s->c);
5269 /* Point to s as backpointer (even without r&VT_SYM).
5270 Will be used by at least the x86 inline asm parser for
5271 regvars. */
5272 vtop->sym = s;
5274 if (r & VT_SYM) {
5275 vtop->c.i = 0;
5276 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5277 vtop->c.i = s->enum_val;
5279 break;
5282 /* post operations */
5283 while (1) {
5284 if (tok == TOK_INC || tok == TOK_DEC) {
5285 inc(1, tok);
5286 next();
5287 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5288 int qualifiers, cumofs = 0;
5289 /* field */
5290 if (tok == TOK_ARROW)
5291 indir();
5292 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5293 test_lvalue();
5294 gaddrof();
5295 /* expect pointer on structure */
5296 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5297 expect("struct or union");
5298 if (tok == TOK_CDOUBLE)
5299 expect("field name");
5300 next();
5301 if (tok == TOK_CINT || tok == TOK_CUINT)
5302 expect("field name");
5303 s = find_field(&vtop->type, tok, &cumofs);
5304 if (!s)
5305 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5306 /* add field offset to pointer */
5307 vtop->type = char_pointer_type; /* change type to 'char *' */
5308 vpushi(cumofs + s->c);
5309 gen_op('+');
5310 /* change type to field type, and set to lvalue */
5311 vtop->type = s->type;
5312 vtop->type.t |= qualifiers;
5313 /* an array is never an lvalue */
5314 if (!(vtop->type.t & VT_ARRAY)) {
5315 vtop->r |= lvalue_type(vtop->type.t);
5316 #ifdef CONFIG_TCC_BCHECK
5317 /* if bound checking, the referenced pointer must be checked */
5318 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5319 vtop->r |= VT_MUSTBOUND;
5320 #endif
5322 next();
5323 } else if (tok == '[') {
5324 next();
5325 gexpr();
5326 gen_op('+');
5327 indir();
5328 skip(']');
5329 } else if (tok == '(') {
5330 SValue ret;
5331 Sym *sa;
5332 int nb_args, ret_nregs, ret_align, regsize, variadic;
5334 /* function call */
5335 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5336 /* pointer test (no array accepted) */
5337 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5338 vtop->type = *pointed_type(&vtop->type);
5339 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5340 goto error_func;
5341 } else {
5342 error_func:
5343 expect("function pointer");
5345 } else {
5346 vtop->r &= ~VT_LVAL; /* no lvalue */
5348 /* get return type */
5349 s = vtop->type.ref;
5350 next();
5351 sa = s->next; /* first parameter */
5352 nb_args = regsize = 0;
5353 ret.r2 = VT_CONST;
5354 /* compute first implicit argument if a structure is returned */
5355 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5356 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5357 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5358 &ret_align, &regsize);
5359 if (!ret_nregs) {
5360 /* get some space for the returned structure */
5361 size = type_size(&s->type, &align);
5362 #ifdef TCC_TARGET_ARM64
5363 /* On arm64, a small struct is return in registers.
5364 It is much easier to write it to memory if we know
5365 that we are allowed to write some extra bytes, so
5366 round the allocated space up to a power of 2: */
5367 if (size < 16)
5368 while (size & (size - 1))
5369 size = (size | (size - 1)) + 1;
5370 #endif
5371 loc = (loc - size) & -align;
5372 ret.type = s->type;
5373 ret.r = VT_LOCAL | VT_LVAL;
5374 /* pass it as 'int' to avoid structure arg passing
5375 problems */
5376 vseti(VT_LOCAL, loc);
5377 ret.c = vtop->c;
5378 nb_args++;
5380 } else {
5381 ret_nregs = 1;
5382 ret.type = s->type;
5385 if (ret_nregs) {
5386 /* return in register */
5387 if (is_float(ret.type.t)) {
5388 ret.r = reg_fret(ret.type.t);
5389 #ifdef TCC_TARGET_X86_64
5390 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5391 ret.r2 = REG_QRET;
5392 #endif
5393 } else {
5394 #ifndef TCC_TARGET_ARM64
5395 #ifdef TCC_TARGET_X86_64
5396 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5397 #else
5398 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5399 #endif
5400 ret.r2 = REG_LRET;
5401 #endif
5402 ret.r = REG_IRET;
5404 ret.c.i = 0;
5406 if (tok != ')') {
5407 for(;;) {
5408 expr_eq();
5409 gfunc_param_typed(s, sa);
5410 nb_args++;
5411 if (sa)
5412 sa = sa->next;
5413 if (tok == ')')
5414 break;
5415 skip(',');
5418 if (sa)
5419 tcc_error("too few arguments to function");
5420 skip(')');
5421 gfunc_call(nb_args);
5423 /* return value */
5424 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5425 vsetc(&ret.type, r, &ret.c);
5426 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5429 /* handle packed struct return */
5430 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5431 int addr, offset;
5433 size = type_size(&s->type, &align);
5434 /* We're writing whole regs often, make sure there's enough
5435 space. Assume register size is power of 2. */
5436 if (regsize > align)
5437 align = regsize;
5438 loc = (loc - size) & -align;
5439 addr = loc;
5440 offset = 0;
5441 for (;;) {
5442 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5443 vswap();
5444 vstore();
5445 vtop--;
5446 if (--ret_nregs == 0)
5447 break;
5448 offset += regsize;
5450 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5452 } else {
5453 break;
5458 ST_FUNC void expr_prod(void)
5460 int t;
5462 unary();
5463 while (tok == '*' || tok == '/' || tok == '%') {
5464 t = tok;
5465 next();
5466 unary();
5467 gen_op(t);
5471 ST_FUNC void expr_sum(void)
5473 int t;
5475 expr_prod();
5476 while (tok == '+' || tok == '-') {
5477 t = tok;
5478 next();
5479 expr_prod();
5480 gen_op(t);
5484 static void expr_shift(void)
5486 int t;
5488 expr_sum();
5489 while (tok == TOK_SHL || tok == TOK_SAR) {
5490 t = tok;
5491 next();
5492 expr_sum();
5493 gen_op(t);
5497 static void expr_cmp(void)
5499 int t;
5501 expr_shift();
5502 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5503 tok == TOK_ULT || tok == TOK_UGE) {
5504 t = tok;
5505 next();
5506 expr_shift();
5507 gen_op(t);
5511 static void expr_cmpeq(void)
5513 int t;
5515 expr_cmp();
5516 while (tok == TOK_EQ || tok == TOK_NE) {
5517 t = tok;
5518 next();
5519 expr_cmp();
5520 gen_op(t);
5524 static void expr_and(void)
5526 expr_cmpeq();
5527 while (tok == '&') {
5528 next();
5529 expr_cmpeq();
5530 gen_op('&');
5534 static void expr_xor(void)
5536 expr_and();
5537 while (tok == '^') {
5538 next();
5539 expr_and();
5540 gen_op('^');
5544 static void expr_or(void)
5546 expr_xor();
5547 while (tok == '|') {
5548 next();
5549 expr_xor();
5550 gen_op('|');
5554 static void expr_land(void)
5556 expr_or();
5557 if (tok == TOK_LAND) {
5558 int t = 0;
5559 for(;;) {
5560 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5561 gen_cast_s(VT_BOOL);
5562 if (vtop->c.i) {
5563 vpop();
5564 } else {
5565 nocode_wanted++;
5566 while (tok == TOK_LAND) {
5567 next();
5568 expr_or();
5569 vpop();
5571 nocode_wanted--;
5572 if (t)
5573 gsym(t);
5574 gen_cast_s(VT_INT);
5575 break;
5577 } else {
5578 if (!t)
5579 save_regs(1);
5580 t = gvtst(1, t);
5582 if (tok != TOK_LAND) {
5583 if (t)
5584 vseti(VT_JMPI, t);
5585 else
5586 vpushi(1);
5587 break;
5589 next();
5590 expr_or();
5595 static void expr_lor(void)
5597 expr_land();
5598 if (tok == TOK_LOR) {
5599 int t = 0;
5600 for(;;) {
5601 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5602 gen_cast_s(VT_BOOL);
5603 if (!vtop->c.i) {
5604 vpop();
5605 } else {
5606 nocode_wanted++;
5607 while (tok == TOK_LOR) {
5608 next();
5609 expr_land();
5610 vpop();
5612 nocode_wanted--;
5613 if (t)
5614 gsym(t);
5615 gen_cast_s(VT_INT);
5616 break;
5618 } else {
5619 if (!t)
5620 save_regs(1);
5621 t = gvtst(0, t);
5623 if (tok != TOK_LOR) {
5624 if (t)
5625 vseti(VT_JMP, t);
5626 else
5627 vpushi(0);
5628 break;
5630 next();
5631 expr_land();
5636 /* Assuming vtop is a value used in a conditional context
5637 (i.e. compared with zero) return 0 if it's false, 1 if
5638 true and -1 if it can't be statically determined. */
5639 static int condition_3way(void)
5641 int c = -1;
5642 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5643 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5644 vdup();
5645 gen_cast_s(VT_BOOL);
5646 c = vtop->c.i;
5647 vpop();
5649 return c;
5652 static void expr_cond(void)
5654 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5655 SValue sv;
5656 CType type, type1, type2;
5658 expr_lor();
5659 if (tok == '?') {
5660 next();
5661 c = condition_3way();
5662 g = (tok == ':' && gnu_ext);
5663 if (c < 0) {
5664 /* needed to avoid having different registers saved in
5665 each branch */
5666 if (is_float(vtop->type.t)) {
5667 rc = RC_FLOAT;
5668 #ifdef TCC_TARGET_X86_64
5669 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5670 rc = RC_ST0;
5672 #endif
5673 } else
5674 rc = RC_INT;
5675 gv(rc);
5676 save_regs(1);
5677 if (g)
5678 gv_dup();
5679 tt = gvtst(1, 0);
5681 } else {
5682 if (!g)
5683 vpop();
5684 tt = 0;
5687 if (1) {
5688 if (c == 0)
5689 nocode_wanted++;
5690 if (!g)
5691 gexpr();
5693 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5694 mk_pointer(&vtop->type);
5695 type1 = vtop->type;
5696 sv = *vtop; /* save value to handle it later */
5697 vtop--; /* no vpop so that FP stack is not flushed */
5698 skip(':');
5700 u = 0;
5701 if (c < 0)
5702 u = gjmp(0);
5703 gsym(tt);
5705 if (c == 0)
5706 nocode_wanted--;
5707 if (c == 1)
5708 nocode_wanted++;
5709 expr_cond();
5710 if (c == 1)
5711 nocode_wanted--;
5713 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5714 mk_pointer(&vtop->type);
5715 type2=vtop->type;
5716 t1 = type1.t;
5717 bt1 = t1 & VT_BTYPE;
5718 t2 = type2.t;
5719 bt2 = t2 & VT_BTYPE;
5720 type.ref = NULL;
5723 /* cast operands to correct type according to ISOC rules */
5724 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5725 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5726 } else if (is_float(bt1) || is_float(bt2)) {
5727 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5728 type.t = VT_LDOUBLE;
5730 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5731 type.t = VT_DOUBLE;
5732 } else {
5733 type.t = VT_FLOAT;
5735 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5736 /* cast to biggest op */
5737 type.t = VT_LLONG | VT_LONG;
5738 if (bt1 == VT_LLONG)
5739 type.t &= t1;
5740 if (bt2 == VT_LLONG)
5741 type.t &= t2;
5742 /* convert to unsigned if it does not fit in a long long */
5743 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5744 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5745 type.t |= VT_UNSIGNED;
5746 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5747 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5748 /* If one is a null ptr constant the result type
5749 is the other. */
5750 if (is_null_pointer (vtop)) type = type1;
5751 else if (is_null_pointer (&sv)) type = type2;
5752 else if (bt1 != bt2)
5753 tcc_error("incompatible types in conditional expressions");
5754 else {
5755 CType *pt1 = pointed_type(&type1);
5756 CType *pt2 = pointed_type(&type2);
5757 int pbt1 = pt1->t & VT_BTYPE;
5758 int pbt2 = pt2->t & VT_BTYPE;
5759 int newquals, copied = 0;
5760 /* pointers to void get preferred, otherwise the
5761 pointed to types minus qualifs should be compatible */
5762 type = (pbt1 == VT_VOID) ? type1 : type2;
5763 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5764 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5765 tcc_warning("pointer type mismatch in conditional expression\n");
5767 /* combine qualifs */
5768 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5769 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5770 & newquals)
5772 /* copy the pointer target symbol */
5773 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5774 0, type.ref->c);
5775 copied = 1;
5776 pointed_type(&type)->t |= newquals;
5778 /* pointers to incomplete arrays get converted to
5779 pointers to completed ones if possible */
5780 if (pt1->t & VT_ARRAY
5781 && pt2->t & VT_ARRAY
5782 && pointed_type(&type)->ref->c < 0
5783 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5785 if (!copied)
5786 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5787 0, type.ref->c);
5788 pointed_type(&type)->ref =
5789 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5790 0, pointed_type(&type)->ref->c);
5791 pointed_type(&type)->ref->c =
5792 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5795 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5796 /* XXX: test structure compatibility */
5797 type = bt1 == VT_STRUCT ? type1 : type2;
5798 } else {
5799 /* integer operations */
5800 type.t = VT_INT | (VT_LONG & (t1 | t2));
5801 /* convert to unsigned if it does not fit in an integer */
5802 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5803 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5804 type.t |= VT_UNSIGNED;
5806 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5807 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5808 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5810 /* now we convert second operand */
5811 if (c != 1) {
5812 gen_cast(&type);
5813 if (islv) {
5814 mk_pointer(&vtop->type);
5815 gaddrof();
5816 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5817 gaddrof();
5820 rc = RC_INT;
5821 if (is_float(type.t)) {
5822 rc = RC_FLOAT;
5823 #ifdef TCC_TARGET_X86_64
5824 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5825 rc = RC_ST0;
5827 #endif
5828 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5829 /* for long longs, we use fixed registers to avoid having
5830 to handle a complicated move */
5831 rc = RC_IRET;
5834 tt = r2 = 0;
5835 if (c < 0) {
5836 r2 = gv(rc);
5837 tt = gjmp(0);
5839 gsym(u);
5841 /* this is horrible, but we must also convert first
5842 operand */
5843 if (c != 0) {
5844 *vtop = sv;
5845 gen_cast(&type);
5846 if (islv) {
5847 mk_pointer(&vtop->type);
5848 gaddrof();
5849 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5850 gaddrof();
5853 if (c < 0 || islv) {
5854 r1 = gv(rc);
5855 move_reg(r2, r1, type.t);
5856 vtop->r = r2;
5857 gsym(tt);
5858 if (islv)
5859 indir();
5865 static void expr_eq(void)
5867 int t;
5869 expr_cond();
5870 if (tok == '=' ||
5871 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5872 tok == TOK_A_XOR || tok == TOK_A_OR ||
5873 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5874 test_lvalue();
5875 t = tok;
5876 next();
5877 if (t == '=') {
5878 expr_eq();
5879 } else {
5880 vdup();
5881 expr_eq();
5882 gen_op(t & 0x7f);
5884 vstore();
5888 ST_FUNC void gexpr(void)
5890 while (1) {
5891 expr_eq();
5892 if (tok != ',')
5893 break;
5894 vpop();
5895 next();
5899 /* parse a constant expression and return value in vtop. */
5900 static void expr_const1(void)
5902 const_wanted++;
5903 nocode_wanted++;
5904 expr_cond();
5905 nocode_wanted--;
5906 const_wanted--;
5909 /* parse an integer constant and return its value. */
5910 static inline int64_t expr_const64(void)
5912 int64_t c;
5913 expr_const1();
5914 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5915 expect("constant expression");
5916 c = vtop->c.i;
5917 vpop();
5918 return c;
5921 /* parse an integer constant and return its value.
5922 Complain if it doesn't fit 32bit (signed or unsigned). */
5923 ST_FUNC int expr_const(void)
5925 int c;
5926 int64_t wc = expr_const64();
5927 c = wc;
5928 if (c != wc && (unsigned)c != wc)
5929 tcc_error("constant exceeds 32 bit");
5930 return c;
5933 /* return the label token if current token is a label, otherwise
5934 return zero */
5935 static int is_label(void)
5937 int last_tok;
5939 /* fast test first */
5940 if (tok < TOK_UIDENT)
5941 return 0;
5942 /* no need to save tokc because tok is an identifier */
5943 last_tok = tok;
5944 next();
5945 if (tok == ':') {
5946 return last_tok;
5947 } else {
5948 unget_tok(last_tok);
5949 return 0;
5953 #ifndef TCC_TARGET_ARM64
5954 static void gfunc_return(CType *func_type)
5956 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5957 CType type, ret_type;
5958 int ret_align, ret_nregs, regsize;
5959 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5960 &ret_align, &regsize);
5961 if (0 == ret_nregs) {
5962 /* if returning structure, must copy it to implicit
5963 first pointer arg location */
5964 type = *func_type;
5965 mk_pointer(&type);
5966 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
5967 indir();
5968 vswap();
5969 /* copy structure value to pointer */
5970 vstore();
5971 } else {
5972 /* returning structure packed into registers */
5973 int r, size, addr, align;
5974 size = type_size(func_type,&align);
5975 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
5976 (vtop->c.i & (ret_align-1)))
5977 && (align & (ret_align-1))) {
5978 loc = (loc - size) & -ret_align;
5979 addr = loc;
5980 type = *func_type;
5981 vset(&type, VT_LOCAL | VT_LVAL, addr);
5982 vswap();
5983 vstore();
5984 vpop();
5985 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
5987 vtop->type = ret_type;
5988 if (is_float(ret_type.t))
5989 r = rc_fret(ret_type.t);
5990 else
5991 r = RC_IRET;
5993 if (ret_nregs == 1)
5994 gv(r);
5995 else {
5996 for (;;) {
5997 vdup();
5998 gv(r);
5999 vpop();
6000 if (--ret_nregs == 0)
6001 break;
6002 /* We assume that when a structure is returned in multiple
6003 registers, their classes are consecutive values of the
6004 suite s(n) = 2^n */
6005 r <<= 1;
6006 vtop->c.i += regsize;
6010 } else if (is_float(func_type->t)) {
6011 gv(rc_fret(func_type->t));
6012 } else {
6013 gv(RC_IRET);
6015 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6017 #endif
6019 static int case_cmp(const void *pa, const void *pb)
6021 int64_t a = (*(struct case_t**) pa)->v1;
6022 int64_t b = (*(struct case_t**) pb)->v1;
6023 return a < b ? -1 : a > b;
6026 static void gcase(struct case_t **base, int len, int *bsym)
6028 struct case_t *p;
6029 int e;
6030 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6031 gv(RC_INT);
6032 while (len > 4) {
6033 /* binary search */
6034 p = base[len/2];
6035 vdup();
6036 if (ll)
6037 vpushll(p->v2);
6038 else
6039 vpushi(p->v2);
6040 gen_op(TOK_LE);
6041 e = gtst(1, 0);
6042 vdup();
6043 if (ll)
6044 vpushll(p->v1);
6045 else
6046 vpushi(p->v1);
6047 gen_op(TOK_GE);
6048 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6049 /* x < v1 */
6050 gcase(base, len/2, bsym);
6051 if (cur_switch->def_sym)
6052 gjmp_addr(cur_switch->def_sym);
6053 else
6054 *bsym = gjmp(*bsym);
6055 /* x > v2 */
6056 gsym(e);
6057 e = len/2 + 1;
6058 base += e; len -= e;
6060 /* linear scan */
6061 while (len--) {
6062 p = *base++;
6063 vdup();
6064 if (ll)
6065 vpushll(p->v2);
6066 else
6067 vpushi(p->v2);
6068 if (p->v1 == p->v2) {
6069 gen_op(TOK_EQ);
6070 gtst_addr(0, p->sym);
6071 } else {
6072 gen_op(TOK_LE);
6073 e = gtst(1, 0);
6074 vdup();
6075 if (ll)
6076 vpushll(p->v1);
6077 else
6078 vpushi(p->v1);
6079 gen_op(TOK_GE);
6080 gtst_addr(0, p->sym);
6081 gsym(e);
6086 static void block(int *bsym, int *csym, int is_expr)
6088 int a, b, c, d, cond;
6089 Sym *s;
6091 /* generate line number info */
6092 if (tcc_state->do_debug)
6093 tcc_debug_line(tcc_state);
6095 if (is_expr) {
6096 /* default return value is (void) */
6097 vpushi(0);
6098 vtop->type.t = VT_VOID;
6101 if (tok == TOK_IF) {
6102 /* if test */
6103 int saved_nocode_wanted = nocode_wanted;
6104 next();
6105 skip('(');
6106 gexpr();
6107 skip(')');
6108 cond = condition_3way();
6109 if (cond == 1)
6110 a = 0, vpop();
6111 else
6112 a = gvtst(1, 0);
6113 if (cond == 0)
6114 nocode_wanted |= 0x20000000;
6115 block(bsym, csym, 0);
6116 if (cond != 1)
6117 nocode_wanted = saved_nocode_wanted;
6118 if (tok == TOK_ELSE) {
6119 next();
6120 d = gjmp(0);
6121 gsym(a);
6122 if (cond == 1)
6123 nocode_wanted |= 0x20000000;
6124 block(bsym, csym, 0);
6125 gsym(d); /* patch else jmp */
6126 if (cond != 0)
6127 nocode_wanted = saved_nocode_wanted;
6128 } else
6129 gsym(a);
6130 } else if (tok == TOK_WHILE) {
6131 int saved_nocode_wanted;
6132 nocode_wanted &= ~0x20000000;
6133 next();
6134 d = ind;
6135 vla_sp_restore();
6136 skip('(');
6137 gexpr();
6138 skip(')');
6139 a = gvtst(1, 0);
6140 b = 0;
6141 ++local_scope;
6142 saved_nocode_wanted = nocode_wanted;
6143 block(&a, &b, 0);
6144 nocode_wanted = saved_nocode_wanted;
6145 --local_scope;
6146 gjmp_addr(d);
6147 gsym(a);
6148 gsym_addr(b, d);
6149 } else if (tok == '{') {
6150 Sym *llabel, *lcleanup;
6151 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6152 int lncleanups = ncleanups;
6154 next();
6155 /* record local declaration stack position */
6156 s = local_stack;
6157 llabel = local_label_stack;
6158 lcleanup = current_cleanups;
6159 ++local_scope;
6161 /* handle local labels declarations */
6162 while (tok == TOK_LABEL) {
6163 next();
6164 for(;;) {
6165 if (tok < TOK_UIDENT)
6166 expect("label identifier");
6167 label_push(&local_label_stack, tok, LABEL_DECLARED);
6168 next();
6169 if (tok == ',') {
6170 next();
6171 } else {
6172 skip(';');
6173 break;
6177 while (tok != '}') {
6178 if ((a = is_label()))
6179 unget_tok(a);
6180 else
6181 decl(VT_LOCAL);
6182 if (tok != '}') {
6183 if (is_expr)
6184 vpop();
6185 block(bsym, csym, is_expr);
6189 if (current_cleanups != lcleanup) {
6190 int jmp = 0;
6191 Sym *g, **pg;
6193 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;)
6194 if (g->prev_tok->r & LABEL_FORWARD) {
6195 Sym *pcl = g->next;
6196 if (!jmp)
6197 jmp = gjmp(0);
6198 gsym(pcl->jnext);
6199 try_call_scope_cleanup(lcleanup);
6200 pcl->jnext = gjmp(0);
6201 if (!lncleanups)
6202 goto remove_pending;
6203 g->c = lncleanups;
6204 pg = &g->prev;
6205 } else {
6206 remove_pending:
6207 *pg = g->prev;
6208 sym_free(g);
6210 gsym(jmp);
6211 if (!nocode_wanted) {
6212 try_call_scope_cleanup(lcleanup);
6216 current_cleanups = lcleanup;
6217 ncleanups = lncleanups;
6218 /* pop locally defined labels */
6219 label_pop(&local_label_stack, llabel, is_expr);
6220 /* pop locally defined symbols */
6221 --local_scope;
6222 /* In the is_expr case (a statement expression is finished here),
6223 vtop might refer to symbols on the local_stack. Either via the
6224 type or via vtop->sym. We can't pop those nor any that in turn
6225 might be referred to. To make it easier we don't roll back
6226 any symbols in that case; some upper level call to block() will
6227 do that. We do have to remove such symbols from the lookup
6228 tables, though. sym_pop will do that. */
6229 sym_pop(&local_stack, s, is_expr);
6231 /* Pop VLA frames and restore stack pointer if required */
6232 if (vlas_in_scope > saved_vlas_in_scope) {
6233 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6234 vla_sp_restore();
6236 vlas_in_scope = saved_vlas_in_scope;
6238 next();
6239 } else if (tok == TOK_RETURN) {
6240 next();
6241 if (tok != ';') {
6242 gexpr();
6243 gen_assign_cast(&func_vt);
6244 try_call_scope_cleanup(NULL);
6245 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6246 vtop--;
6247 else
6248 gfunc_return(&func_vt);
6249 } else {
6250 try_call_scope_cleanup(NULL);
6252 skip(';');
6253 /* jump unless last stmt in top-level block */
6254 if (tok != '}' || local_scope != 1)
6255 rsym = gjmp(rsym);
6256 nocode_wanted |= 0x20000000;
6257 } else if (tok == TOK_BREAK) {
6258 /* compute jump */
6259 if (!bsym)
6260 tcc_error("cannot break");
6261 *bsym = gjmp(*bsym);
6262 next();
6263 skip(';');
6264 nocode_wanted |= 0x20000000;
6265 } else if (tok == TOK_CONTINUE) {
6266 /* compute jump */
6267 if (!csym)
6268 tcc_error("cannot continue");
6269 vla_sp_restore_root();
6270 *csym = gjmp(*csym);
6271 next();
6272 skip(';');
6273 nocode_wanted |= 0x20000000;
6274 } else if (tok == TOK_FOR) {
6275 int e;
6276 int saved_nocode_wanted;
6277 nocode_wanted &= ~0x20000000;
6278 next();
6279 skip('(');
6280 s = local_stack;
6281 ++local_scope;
6282 if (tok != ';') {
6283 /* c99 for-loop init decl? */
6284 if (!decl0(VT_LOCAL, 1, NULL)) {
6285 /* no, regular for-loop init expr */
6286 gexpr();
6287 vpop();
6290 skip(';');
6291 d = ind;
6292 c = ind;
6293 vla_sp_restore();
6294 a = 0;
6295 b = 0;
6296 if (tok != ';') {
6297 gexpr();
6298 a = gvtst(1, 0);
6300 skip(';');
6301 if (tok != ')') {
6302 e = gjmp(0);
6303 c = ind;
6304 vla_sp_restore();
6305 gexpr();
6306 vpop();
6307 gjmp_addr(d);
6308 gsym(e);
6310 skip(')');
6311 saved_nocode_wanted = nocode_wanted;
6312 block(&a, &b, 0);
6313 nocode_wanted = saved_nocode_wanted;
6314 gjmp_addr(c);
6315 gsym(a);
6316 gsym_addr(b, c);
6317 --local_scope;
6318 sym_pop(&local_stack, s, 0);
6320 } else
6321 if (tok == TOK_DO) {
6322 int saved_nocode_wanted;
6323 nocode_wanted &= ~0x20000000;
6324 next();
6325 a = 0;
6326 b = 0;
6327 d = ind;
6328 vla_sp_restore();
6329 saved_nocode_wanted = nocode_wanted;
6330 block(&a, &b, 0);
6331 skip(TOK_WHILE);
6332 skip('(');
6333 gsym(b);
6334 if (b)
6335 nocode_wanted = saved_nocode_wanted;
6336 gexpr();
6337 c = gvtst(0, 0);
6338 gsym_addr(c, d);
6339 nocode_wanted = saved_nocode_wanted;
6340 skip(')');
6341 gsym(a);
6342 skip(';');
6343 } else
6344 if (tok == TOK_SWITCH) {
6345 struct switch_t *saved, sw;
6346 int saved_nocode_wanted = nocode_wanted;
6347 SValue switchval;
6348 next();
6349 skip('(');
6350 gexpr();
6351 skip(')');
6352 switchval = *vtop--;
6353 a = 0;
6354 b = gjmp(0); /* jump to first case */
6355 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6356 saved = cur_switch;
6357 cur_switch = &sw;
6358 block(&a, csym, 0);
6359 nocode_wanted = saved_nocode_wanted;
6360 a = gjmp(a); /* add implicit break */
6361 /* case lookup */
6362 gsym(b);
6363 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6364 for (b = 1; b < sw.n; b++)
6365 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6366 tcc_error("duplicate case value");
6367 /* Our switch table sorting is signed, so the compared
6368 value needs to be as well when it's 64bit. */
6369 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6370 switchval.type.t &= ~VT_UNSIGNED;
6371 vpushv(&switchval);
6372 gcase(sw.p, sw.n, &a);
6373 vpop();
6374 if (sw.def_sym)
6375 gjmp_addr(sw.def_sym);
6376 dynarray_reset(&sw.p, &sw.n);
6377 cur_switch = saved;
6378 /* break label */
6379 gsym(a);
6380 } else
6381 if (tok == TOK_CASE) {
6382 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6383 if (!cur_switch)
6384 expect("switch");
6385 nocode_wanted &= ~0x20000000;
6386 next();
6387 cr->v1 = cr->v2 = expr_const64();
6388 if (gnu_ext && tok == TOK_DOTS) {
6389 next();
6390 cr->v2 = expr_const64();
6391 if (cr->v2 < cr->v1)
6392 tcc_warning("empty case range");
6394 cr->sym = ind;
6395 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6396 skip(':');
6397 is_expr = 0;
6398 goto block_after_label;
6399 } else
6400 if (tok == TOK_DEFAULT) {
6401 next();
6402 skip(':');
6403 if (!cur_switch)
6404 expect("switch");
6405 if (cur_switch->def_sym)
6406 tcc_error("too many 'default'");
6407 cur_switch->def_sym = ind;
6408 is_expr = 0;
6409 goto block_after_label;
6410 } else
6411 if (tok == TOK_GOTO) {
6412 next();
6413 if (tok == '*' && gnu_ext) {
6414 /* computed goto */
6415 next();
6416 gexpr();
6417 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6418 expect("pointer");
6419 ggoto();
6420 } else if (tok >= TOK_UIDENT) {
6421 s = label_find(tok);
6422 /* put forward definition if needed */
6423 if (!s)
6424 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6425 else if (s->r == LABEL_DECLARED)
6426 s->r = LABEL_FORWARD;
6428 vla_sp_restore_root();
6429 if (s->r & LABEL_FORWARD) {
6430 /* start new goto chain for cleanups, linked via label->next */
6431 if (current_cleanups) {
6432 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6433 pending_gotos->prev_tok = s;
6434 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6435 pending_gotos->next = s;
6437 s->jnext = gjmp(s->jnext);
6438 } else {
6439 try_call_cleanup_goto(s->cleanupstate);
6440 gjmp_addr(s->jnext);
6442 next();
6443 } else {
6444 expect("label identifier");
6446 skip(';');
6447 } else if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
6448 asm_instr();
6449 } else {
6450 b = is_label();
6451 if (b) {
6452 /* label case */
6453 next();
6454 s = label_find(b);
6455 if (s) {
6456 if (s->r == LABEL_DEFINED)
6457 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6458 s->r = LABEL_DEFINED;
6459 if (s->next) {
6460 Sym *pcl; /* pending cleanup goto */
6461 for (pcl = s->next; pcl; pcl = pcl->prev)
6462 gsym(pcl->jnext);
6463 sym_pop(&s->next, NULL, 0);
6464 } else
6465 gsym(s->jnext);
6466 } else {
6467 s = label_push(&global_label_stack, b, LABEL_DEFINED);
6469 s->jnext = ind;
6470 s->cleanupstate = current_cleanups;
6471 vla_sp_restore();
6472 /* we accept this, but it is a mistake */
6473 block_after_label:
6474 nocode_wanted &= ~0x20000000;
6475 if (tok == '}') {
6476 tcc_warning("deprecated use of label at end of compound statement");
6477 } else {
6478 if (is_expr)
6479 vpop();
6480 block(bsym, csym, is_expr);
6482 } else {
6483 /* expression case */
6484 if (tok != ';') {
6485 if (is_expr) {
6486 vpop();
6487 gexpr();
6488 } else {
6489 gexpr();
6490 vpop();
6493 skip(';');
6498 /* This skips over a stream of tokens containing balanced {} and ()
6499 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6500 with a '{'). If STR then allocates and stores the skipped tokens
6501 in *STR. This doesn't check if () and {} are nested correctly,
6502 i.e. "({)}" is accepted. */
6503 static void skip_or_save_block(TokenString **str)
6505 int braces = tok == '{';
6506 int level = 0;
6507 if (str)
6508 *str = tok_str_alloc();
6510 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6511 int t;
6512 if (tok == TOK_EOF) {
6513 if (str || level > 0)
6514 tcc_error("unexpected end of file");
6515 else
6516 break;
6518 if (str)
6519 tok_str_add_tok(*str);
6520 t = tok;
6521 next();
6522 if (t == '{' || t == '(') {
6523 level++;
6524 } else if (t == '}' || t == ')') {
6525 level--;
6526 if (level == 0 && braces && t == '}')
6527 break;
6530 if (str) {
6531 tok_str_add(*str, -1);
6532 tok_str_add(*str, 0);
6536 #define EXPR_CONST 1
6537 #define EXPR_ANY 2
6539 static void parse_init_elem(int expr_type)
6541 int saved_global_expr;
6542 switch(expr_type) {
6543 case EXPR_CONST:
6544 /* compound literals must be allocated globally in this case */
6545 saved_global_expr = global_expr;
6546 global_expr = 1;
6547 expr_const1();
6548 global_expr = saved_global_expr;
6549 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6550 (compound literals). */
6551 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6552 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6553 || vtop->sym->v < SYM_FIRST_ANOM))
6554 #ifdef TCC_TARGET_PE
6555 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6556 #endif
6558 tcc_error("initializer element is not constant");
6559 break;
6560 case EXPR_ANY:
6561 expr_eq();
6562 break;
6566 /* put zeros for variable based init */
6567 static void init_putz(Section *sec, unsigned long c, int size)
6569 if (sec) {
6570 /* nothing to do because globals are already set to zero */
6571 } else {
6572 vpush_global_sym(&func_old_type, TOK_memset);
6573 vseti(VT_LOCAL, c);
6574 #ifdef TCC_TARGET_ARM
6575 vpushs(size);
6576 vpushi(0);
6577 #else
6578 vpushi(0);
6579 vpushs(size);
6580 #endif
6581 gfunc_call(3);
6585 #define DIF_FIRST 1
6586 #define DIF_SIZE_ONLY 2
6587 #define DIF_HAVE_ELEM 4
6589 /* t is the array or struct type. c is the array or struct
6590 address. cur_field is the pointer to the current
6591 field, for arrays the 'c' member contains the current start
6592 index. 'flags' is as in decl_initializer.
6593 'al' contains the already initialized length of the
6594 current container (starting at c). This returns the new length of that. */
6595 static int decl_designator(CType *type, Section *sec, unsigned long c,
6596 Sym **cur_field, int flags, int al)
6598 Sym *s, *f;
6599 int index, index_last, align, l, nb_elems, elem_size;
6600 unsigned long corig = c;
6602 elem_size = 0;
6603 nb_elems = 1;
6604 if (flags & DIF_HAVE_ELEM)
6605 goto no_designator;
6606 if (gnu_ext && (l = is_label()) != 0)
6607 goto struct_field;
6608 /* NOTE: we only support ranges for last designator */
6609 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6610 if (tok == '[') {
6611 if (!(type->t & VT_ARRAY))
6612 expect("array type");
6613 next();
6614 index = index_last = expr_const();
6615 if (tok == TOK_DOTS && gnu_ext) {
6616 next();
6617 index_last = expr_const();
6619 skip(']');
6620 s = type->ref;
6621 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6622 index_last < index)
6623 tcc_error("invalid index");
6624 if (cur_field)
6625 (*cur_field)->c = index_last;
6626 type = pointed_type(type);
6627 elem_size = type_size(type, &align);
6628 c += index * elem_size;
6629 nb_elems = index_last - index + 1;
6630 } else {
6631 int cumofs = 0;
6632 next();
6633 l = tok;
6634 struct_field:
6635 next();
6636 if ((type->t & VT_BTYPE) != VT_STRUCT)
6637 expect("struct/union type");
6638 f = find_field(type, l, &cumofs);
6639 if (!f)
6640 expect("field");
6641 if (cur_field)
6642 *cur_field = f;
6643 type = &f->type;
6644 c += cumofs + f->c;
6646 cur_field = NULL;
6648 if (!cur_field) {
6649 if (tok == '=') {
6650 next();
6651 } else if (!gnu_ext) {
6652 expect("=");
6654 } else {
6655 no_designator:
6656 if (type->t & VT_ARRAY) {
6657 index = (*cur_field)->c;
6658 if (type->ref->c >= 0 && index >= type->ref->c)
6659 tcc_error("index too large");
6660 type = pointed_type(type);
6661 c += index * type_size(type, &align);
6662 } else {
6663 f = *cur_field;
6664 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6665 *cur_field = f = f->next;
6666 if (!f)
6667 tcc_error("too many field init");
6668 type = &f->type;
6669 c += f->c;
6672 /* must put zero in holes (note that doing it that way
6673 ensures that it even works with designators) */
6674 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6675 init_putz(sec, corig + al, c - corig - al);
6676 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6678 /* XXX: make it more general */
6679 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6680 unsigned long c_end;
6681 uint8_t *src, *dst;
6682 int i;
6684 if (!sec) {
6685 vset(type, VT_LOCAL|VT_LVAL, c);
6686 for (i = 1; i < nb_elems; i++) {
6687 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6688 vswap();
6689 vstore();
6691 vpop();
6692 } else if (!NODATA_WANTED) {
6693 c_end = c + nb_elems * elem_size;
6694 if (c_end > sec->data_allocated)
6695 section_realloc(sec, c_end);
6696 src = sec->data + c;
6697 dst = src;
6698 for(i = 1; i < nb_elems; i++) {
6699 dst += elem_size;
6700 memcpy(dst, src, elem_size);
6704 c += nb_elems * type_size(type, &align);
6705 if (c - corig > al)
6706 al = c - corig;
6707 return al;
6710 /* store a value or an expression directly in global data or in local array */
6711 static void init_putv(CType *type, Section *sec, unsigned long c)
6713 int bt;
6714 void *ptr;
6715 CType dtype;
6717 dtype = *type;
6718 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6720 if (sec) {
6721 int size, align;
6722 /* XXX: not portable */
6723 /* XXX: generate error if incorrect relocation */
6724 gen_assign_cast(&dtype);
6725 bt = type->t & VT_BTYPE;
6727 if ((vtop->r & VT_SYM)
6728 && bt != VT_PTR
6729 && bt != VT_FUNC
6730 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6731 || (type->t & VT_BITFIELD))
6732 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6734 tcc_error("initializer element is not computable at load time");
6736 if (NODATA_WANTED) {
6737 vtop--;
6738 return;
6741 size = type_size(type, &align);
6742 section_reserve(sec, c + size);
6743 ptr = sec->data + c;
6745 /* XXX: make code faster ? */
6746 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6747 vtop->sym->v >= SYM_FIRST_ANOM &&
6748 /* XXX This rejects compound literals like
6749 '(void *){ptr}'. The problem is that '&sym' is
6750 represented the same way, which would be ruled out
6751 by the SYM_FIRST_ANOM check above, but also '"string"'
6752 in 'char *p = "string"' is represented the same
6753 with the type being VT_PTR and the symbol being an
6754 anonymous one. That is, there's no difference in vtop
6755 between '(void *){x}' and '&(void *){x}'. Ignore
6756 pointer typed entities here. Hopefully no real code
6757 will every use compound literals with scalar type. */
6758 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6759 /* These come from compound literals, memcpy stuff over. */
6760 Section *ssec;
6761 ElfSym *esym;
6762 ElfW_Rel *rel;
6763 esym = elfsym(vtop->sym);
6764 ssec = tcc_state->sections[esym->st_shndx];
6765 memmove (ptr, ssec->data + esym->st_value, size);
6766 if (ssec->reloc) {
6767 /* We need to copy over all memory contents, and that
6768 includes relocations. Use the fact that relocs are
6769 created it order, so look from the end of relocs
6770 until we hit one before the copied region. */
6771 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6772 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6773 while (num_relocs--) {
6774 rel--;
6775 if (rel->r_offset >= esym->st_value + size)
6776 continue;
6777 if (rel->r_offset < esym->st_value)
6778 break;
6779 /* Note: if the same fields are initialized multiple
6780 times (possible with designators) then we possibly
6781 add multiple relocations for the same offset here.
6782 That would lead to wrong code, the last reloc needs
6783 to win. We clean this up later after the whole
6784 initializer is parsed. */
6785 put_elf_reloca(symtab_section, sec,
6786 c + rel->r_offset - esym->st_value,
6787 ELFW(R_TYPE)(rel->r_info),
6788 ELFW(R_SYM)(rel->r_info),
6789 #if PTR_SIZE == 8
6790 rel->r_addend
6791 #else
6793 #endif
6797 } else {
6798 if (type->t & VT_BITFIELD) {
6799 int bit_pos, bit_size, bits, n;
6800 unsigned char *p, v, m;
6801 bit_pos = BIT_POS(vtop->type.t);
6802 bit_size = BIT_SIZE(vtop->type.t);
6803 p = (unsigned char*)ptr + (bit_pos >> 3);
6804 bit_pos &= 7, bits = 0;
6805 while (bit_size) {
6806 n = 8 - bit_pos;
6807 if (n > bit_size)
6808 n = bit_size;
6809 v = vtop->c.i >> bits << bit_pos;
6810 m = ((1 << n) - 1) << bit_pos;
6811 *p = (*p & ~m) | (v & m);
6812 bits += n, bit_size -= n, bit_pos = 0, ++p;
6814 } else
6815 switch(bt) {
6816 /* XXX: when cross-compiling we assume that each type has the
6817 same representation on host and target, which is likely to
6818 be wrong in the case of long double */
6819 case VT_BOOL:
6820 vtop->c.i = vtop->c.i != 0;
6821 case VT_BYTE:
6822 *(char *)ptr |= vtop->c.i;
6823 break;
6824 case VT_SHORT:
6825 *(short *)ptr |= vtop->c.i;
6826 break;
6827 case VT_FLOAT:
6828 *(float*)ptr = vtop->c.f;
6829 break;
6830 case VT_DOUBLE:
6831 *(double *)ptr = vtop->c.d;
6832 break;
6833 case VT_LDOUBLE:
6834 #if defined TCC_IS_NATIVE_387
6835 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6836 memcpy(ptr, &vtop->c.ld, 10);
6837 #ifdef __TINYC__
6838 else if (sizeof (long double) == sizeof (double))
6839 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6840 #endif
6841 else if (vtop->c.ld == 0.0)
6843 else
6844 #endif
6845 if (sizeof(long double) == LDOUBLE_SIZE)
6846 *(long double*)ptr = vtop->c.ld;
6847 else if (sizeof(double) == LDOUBLE_SIZE)
6848 *(double *)ptr = (double)vtop->c.ld;
6849 else
6850 tcc_error("can't cross compile long double constants");
6851 break;
6852 #if PTR_SIZE != 8
6853 case VT_LLONG:
6854 *(long long *)ptr |= vtop->c.i;
6855 break;
6856 #else
6857 case VT_LLONG:
6858 #endif
6859 case VT_PTR:
6861 addr_t val = vtop->c.i;
6862 #if PTR_SIZE == 8
6863 if (vtop->r & VT_SYM)
6864 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6865 else
6866 *(addr_t *)ptr |= val;
6867 #else
6868 if (vtop->r & VT_SYM)
6869 greloc(sec, vtop->sym, c, R_DATA_PTR);
6870 *(addr_t *)ptr |= val;
6871 #endif
6872 break;
6874 default:
6876 int val = vtop->c.i;
6877 #if PTR_SIZE == 8
6878 if (vtop->r & VT_SYM)
6879 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6880 else
6881 *(int *)ptr |= val;
6882 #else
6883 if (vtop->r & VT_SYM)
6884 greloc(sec, vtop->sym, c, R_DATA_PTR);
6885 *(int *)ptr |= val;
6886 #endif
6887 break;
6891 vtop--;
6892 } else {
6893 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6894 vswap();
6895 vstore();
6896 vpop();
6900 /* 't' contains the type and storage info. 'c' is the offset of the
6901 object in section 'sec'. If 'sec' is NULL, it means stack based
6902 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6903 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6904 size only evaluation is wanted (only for arrays). */
6905 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6906 int flags)
6908 int len, n, no_oblock, nb, i;
6909 int size1, align1;
6910 Sym *s, *f;
6911 Sym indexsym;
6912 CType *t1;
6914 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
6915 /* In case of strings we have special handling for arrays, so
6916 don't consume them as initializer value (which would commit them
6917 to some anonymous symbol). */
6918 tok != TOK_LSTR && tok != TOK_STR &&
6919 !(flags & DIF_SIZE_ONLY)) {
6920 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6921 flags |= DIF_HAVE_ELEM;
6924 if ((flags & DIF_HAVE_ELEM) &&
6925 !(type->t & VT_ARRAY) &&
6926 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6927 The source type might have VT_CONSTANT set, which is
6928 of course assignable to non-const elements. */
6929 is_compatible_unqualified_types(type, &vtop->type)) {
6930 init_putv(type, sec, c);
6931 } else if (type->t & VT_ARRAY) {
6932 s = type->ref;
6933 n = s->c;
6934 t1 = pointed_type(type);
6935 size1 = type_size(t1, &align1);
6937 no_oblock = 1;
6938 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
6939 tok == '{') {
6940 if (tok != '{')
6941 tcc_error("character array initializer must be a literal,"
6942 " optionally enclosed in braces");
6943 skip('{');
6944 no_oblock = 0;
6947 /* only parse strings here if correct type (otherwise: handle
6948 them as ((w)char *) expressions */
6949 if ((tok == TOK_LSTR &&
6950 #ifdef TCC_TARGET_PE
6951 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6952 #else
6953 (t1->t & VT_BTYPE) == VT_INT
6954 #endif
6955 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6956 len = 0;
6957 while (tok == TOK_STR || tok == TOK_LSTR) {
6958 int cstr_len, ch;
6960 /* compute maximum number of chars wanted */
6961 if (tok == TOK_STR)
6962 cstr_len = tokc.str.size;
6963 else
6964 cstr_len = tokc.str.size / sizeof(nwchar_t);
6965 cstr_len--;
6966 nb = cstr_len;
6967 if (n >= 0 && nb > (n - len))
6968 nb = n - len;
6969 if (!(flags & DIF_SIZE_ONLY)) {
6970 if (cstr_len > nb)
6971 tcc_warning("initializer-string for array is too long");
6972 /* in order to go faster for common case (char
6973 string in global variable, we handle it
6974 specifically */
6975 if (sec && tok == TOK_STR && size1 == 1) {
6976 if (!NODATA_WANTED)
6977 memcpy(sec->data + c + len, tokc.str.data, nb);
6978 } else {
6979 for(i=0;i<nb;i++) {
6980 if (tok == TOK_STR)
6981 ch = ((unsigned char *)tokc.str.data)[i];
6982 else
6983 ch = ((nwchar_t *)tokc.str.data)[i];
6984 vpushi(ch);
6985 init_putv(t1, sec, c + (len + i) * size1);
6989 len += nb;
6990 next();
6992 /* only add trailing zero if enough storage (no
6993 warning in this case since it is standard) */
6994 if (n < 0 || len < n) {
6995 if (!(flags & DIF_SIZE_ONLY)) {
6996 vpushi(0);
6997 init_putv(t1, sec, c + (len * size1));
6999 len++;
7001 len *= size1;
7002 } else {
7003 indexsym.c = 0;
7004 f = &indexsym;
7006 do_init_list:
7007 len = 0;
7008 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7009 len = decl_designator(type, sec, c, &f, flags, len);
7010 flags &= ~DIF_HAVE_ELEM;
7011 if (type->t & VT_ARRAY) {
7012 ++indexsym.c;
7013 /* special test for multi dimensional arrays (may not
7014 be strictly correct if designators are used at the
7015 same time) */
7016 if (no_oblock && len >= n*size1)
7017 break;
7018 } else {
7019 if (s->type.t == VT_UNION)
7020 f = NULL;
7021 else
7022 f = f->next;
7023 if (no_oblock && f == NULL)
7024 break;
7027 if (tok == '}')
7028 break;
7029 skip(',');
7032 /* put zeros at the end */
7033 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7034 init_putz(sec, c + len, n*size1 - len);
7035 if (!no_oblock)
7036 skip('}');
7037 /* patch type size if needed, which happens only for array types */
7038 if (n < 0)
7039 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7040 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7041 size1 = 1;
7042 no_oblock = 1;
7043 if ((flags & DIF_FIRST) || tok == '{') {
7044 skip('{');
7045 no_oblock = 0;
7047 s = type->ref;
7048 f = s->next;
7049 n = s->c;
7050 goto do_init_list;
7051 } else if (tok == '{') {
7052 if (flags & DIF_HAVE_ELEM)
7053 skip(';');
7054 next();
7055 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7056 skip('}');
7057 } else if ((flags & DIF_SIZE_ONLY)) {
7058 /* If we supported only ISO C we wouldn't have to accept calling
7059 this on anything than an array if DIF_SIZE_ONLY (and even then
7060 only on the outermost level, so no recursion would be needed),
7061 because initializing a flex array member isn't supported.
7062 But GNU C supports it, so we need to recurse even into
7063 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7064 /* just skip expression */
7065 skip_or_save_block(NULL);
7066 } else {
7067 if (!(flags & DIF_HAVE_ELEM)) {
7068 /* This should happen only when we haven't parsed
7069 the init element above for fear of committing a
7070 string constant to memory too early. */
7071 if (tok != TOK_STR && tok != TOK_LSTR)
7072 expect("string constant");
7073 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7075 init_putv(type, sec, c);
7079 /* parse an initializer for type 't' if 'has_init' is non zero, and
7080 allocate space in local or global data space ('r' is either
7081 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7082 variable 'v' of scope 'scope' is declared before initializers
7083 are parsed. If 'v' is zero, then a reference to the new object
7084 is put in the value stack. If 'has_init' is 2, a special parsing
7085 is done to handle string constants. */
7086 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7087 int has_init, int v, int scope)
7089 int size, align, addr;
7090 TokenString *init_str = NULL;
7092 Section *sec;
7093 Sym *flexible_array;
7094 Sym *sym = NULL;
7095 int saved_nocode_wanted = nocode_wanted;
7096 #ifdef CONFIG_TCC_BCHECK
7097 int bcheck;
7098 #endif
7100 /* Always allocate static or global variables */
7101 if (v && (r & VT_VALMASK) == VT_CONST)
7102 nocode_wanted |= 0x80000000;
7104 #ifdef CONFIG_TCC_BCHECK
7105 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7106 #endif
7108 flexible_array = NULL;
7109 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7110 Sym *field = type->ref->next;
7111 if (field) {
7112 while (field->next)
7113 field = field->next;
7114 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7115 flexible_array = field;
7119 size = type_size(type, &align);
7120 /* If unknown size, we must evaluate it before
7121 evaluating initializers because
7122 initializers can generate global data too
7123 (e.g. string pointers or ISOC99 compound
7124 literals). It also simplifies local
7125 initializers handling */
7126 if (size < 0 || (flexible_array && has_init)) {
7127 if (!has_init)
7128 tcc_error("unknown type size");
7129 /* get all init string */
7130 if (has_init == 2) {
7131 init_str = tok_str_alloc();
7132 /* only get strings */
7133 while (tok == TOK_STR || tok == TOK_LSTR) {
7134 tok_str_add_tok(init_str);
7135 next();
7137 tok_str_add(init_str, -1);
7138 tok_str_add(init_str, 0);
7139 } else {
7140 skip_or_save_block(&init_str);
7142 unget_tok(0);
7144 /* compute size */
7145 begin_macro(init_str, 1);
7146 next();
7147 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7148 /* prepare second initializer parsing */
7149 macro_ptr = init_str->str;
7150 next();
7152 /* if still unknown size, error */
7153 size = type_size(type, &align);
7154 if (size < 0)
7155 tcc_error("unknown type size");
7157 /* If there's a flex member and it was used in the initializer
7158 adjust size. */
7159 if (flexible_array &&
7160 flexible_array->type.ref->c > 0)
7161 size += flexible_array->type.ref->c
7162 * pointed_size(&flexible_array->type);
7163 /* take into account specified alignment if bigger */
7164 if (ad->a.aligned) {
7165 int speca = 1 << (ad->a.aligned - 1);
7166 if (speca > align)
7167 align = speca;
7168 } else if (ad->a.packed) {
7169 align = 1;
7172 if (!v && NODATA_WANTED)
7173 size = 0, align = 1;
7175 if ((r & VT_VALMASK) == VT_LOCAL) {
7176 sec = NULL;
7177 #ifdef CONFIG_TCC_BCHECK
7178 if (bcheck && (type->t & VT_ARRAY)) {
7179 loc--;
7181 #endif
7182 loc = (loc - size) & -align;
7183 addr = loc;
7184 #ifdef CONFIG_TCC_BCHECK
7185 /* handles bounds */
7186 /* XXX: currently, since we do only one pass, we cannot track
7187 '&' operators, so we add only arrays */
7188 if (bcheck && (type->t & VT_ARRAY)) {
7189 addr_t *bounds_ptr;
7190 /* add padding between regions */
7191 loc--;
7192 /* then add local bound info */
7193 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7194 bounds_ptr[0] = addr;
7195 bounds_ptr[1] = size;
7197 #endif
7198 if (v) {
7199 /* local variable */
7200 #ifdef CONFIG_TCC_ASM
7201 if (ad->asm_label) {
7202 int reg = asm_parse_regvar(ad->asm_label);
7203 if (reg >= 0)
7204 r = (r & ~VT_VALMASK) | reg;
7206 #endif
7207 sym = sym_push(v, type, r, addr);
7208 if (ad->cleanup_func) {
7209 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7210 cls->prev_tok = sym;
7211 cls->next = ad->cleanup_func;
7212 cls->ncl = current_cleanups;
7213 current_cleanups = cls;
7216 sym->a = ad->a;
7217 } else {
7218 /* push local reference */
7219 vset(type, r, addr);
7221 } else {
7222 if (v && scope == VT_CONST) {
7223 /* see if the symbol was already defined */
7224 sym = sym_find(v);
7225 if (sym) {
7226 patch_storage(sym, ad, type);
7227 /* we accept several definitions of the same global variable. */
7228 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7229 goto no_alloc;
7233 /* allocate symbol in corresponding section */
7234 sec = ad->section;
7235 if (!sec) {
7236 if (has_init)
7237 sec = data_section;
7238 else if (tcc_state->nocommon)
7239 sec = bss_section;
7242 if (sec) {
7243 addr = section_add(sec, size, align);
7244 #ifdef CONFIG_TCC_BCHECK
7245 /* add padding if bound check */
7246 if (bcheck)
7247 section_add(sec, 1, 1);
7248 #endif
7249 } else {
7250 addr = align; /* SHN_COMMON is special, symbol value is align */
7251 sec = common_section;
7254 if (v) {
7255 if (!sym) {
7256 sym = sym_push(v, type, r | VT_SYM, 0);
7257 patch_storage(sym, ad, NULL);
7259 /* Local statics have a scope until now (for
7260 warnings), remove it here. */
7261 sym->sym_scope = 0;
7262 /* update symbol definition */
7263 put_extern_sym(sym, sec, addr, size);
7264 } else {
7265 /* push global reference */
7266 vpush_ref(type, sec, addr, size);
7267 sym = vtop->sym;
7268 vtop->r |= r;
7271 #ifdef CONFIG_TCC_BCHECK
7272 /* handles bounds now because the symbol must be defined
7273 before for the relocation */
7274 if (bcheck) {
7275 addr_t *bounds_ptr;
7277 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7278 /* then add global bound info */
7279 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7280 bounds_ptr[0] = 0; /* relocated */
7281 bounds_ptr[1] = size;
7283 #endif
7286 if (type->t & VT_VLA) {
7287 int a;
7289 if (NODATA_WANTED)
7290 goto no_alloc;
7292 /* save current stack pointer */
7293 if (vlas_in_scope == 0) {
7294 if (vla_sp_root_loc == -1)
7295 vla_sp_root_loc = (loc -= PTR_SIZE);
7296 gen_vla_sp_save(vla_sp_root_loc);
7299 vla_runtime_type_size(type, &a);
7300 gen_vla_alloc(type, a);
7301 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7302 /* on _WIN64, because of the function args scratch area, the
7303 result of alloca differs from RSP and is returned in RAX. */
7304 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7305 #endif
7306 gen_vla_sp_save(addr);
7307 vla_sp_loc = addr;
7308 vlas_in_scope++;
7310 } else if (has_init) {
7311 size_t oldreloc_offset = 0;
7312 if (sec && sec->reloc)
7313 oldreloc_offset = sec->reloc->data_offset;
7314 decl_initializer(type, sec, addr, DIF_FIRST);
7315 if (sec && sec->reloc)
7316 squeeze_multi_relocs(sec, oldreloc_offset);
7317 /* patch flexible array member size back to -1, */
7318 /* for possible subsequent similar declarations */
7319 if (flexible_array)
7320 flexible_array->type.ref->c = -1;
7323 no_alloc:
7324 /* restore parse state if needed */
7325 if (init_str) {
7326 end_macro();
7327 next();
7330 nocode_wanted = saved_nocode_wanted;
7333 /* parse a function defined by symbol 'sym' and generate its code in
7334 'cur_text_section' */
7335 static void gen_function(Sym *sym)
7337 nocode_wanted = 0;
7338 ind = cur_text_section->data_offset;
7339 if (sym->a.aligned) {
7340 size_t newoff = section_add(cur_text_section, 0,
7341 1 << (sym->a.aligned - 1));
7342 gen_fill_nops(newoff - ind);
7344 /* NOTE: we patch the symbol size later */
7345 put_extern_sym(sym, cur_text_section, ind, 0);
7346 funcname = get_tok_str(sym->v, NULL);
7347 func_ind = ind;
7348 /* Initialize VLA state */
7349 vla_sp_loc = -1;
7350 vla_sp_root_loc = -1;
7351 /* put debug symbol */
7352 tcc_debug_funcstart(tcc_state, sym);
7353 /* push a dummy symbol to enable local sym storage */
7354 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7355 local_scope = 1; /* for function parameters */
7356 gfunc_prolog(&sym->type);
7357 reset_local_scope();
7358 rsym = 0;
7359 clear_temp_local_var_list();
7360 block(NULL, NULL, 0);
7361 if (!(nocode_wanted & 0x20000000)
7362 && ((func_vt.t & VT_BTYPE) == VT_INT)
7363 && !strcmp (funcname, "main"))
7365 nocode_wanted = 0;
7366 vpushi(0);
7367 gen_assign_cast(&func_vt);
7368 gfunc_return(&func_vt);
7370 nocode_wanted = 0;
7371 gsym(rsym);
7372 gfunc_epilog();
7373 cur_text_section->data_offset = ind;
7374 label_pop(&global_label_stack, NULL, 0);
7375 /* reset local stack */
7376 reset_local_scope();
7377 sym_pop(&local_stack, NULL, 0);
7378 /* end of function */
7379 /* patch symbol size */
7380 elfsym(sym)->st_size = ind - func_ind;
7381 tcc_debug_funcend(tcc_state, ind - func_ind);
7382 /* It's better to crash than to generate wrong code */
7383 cur_text_section = NULL;
7384 funcname = ""; /* for safety */
7385 func_vt.t = VT_VOID; /* for safety */
7386 func_var = 0; /* for safety */
7387 ind = 0; /* for safety */
7388 nocode_wanted = 0x80000000;
7389 check_vstack();
7392 static void gen_inline_functions(TCCState *s)
7394 Sym *sym;
7395 int inline_generated, i, ln;
7396 struct InlineFunc *fn;
7398 ln = file->line_num;
7399 /* iterate while inline function are referenced */
7400 do {
7401 inline_generated = 0;
7402 for (i = 0; i < s->nb_inline_fns; ++i) {
7403 fn = s->inline_fns[i];
7404 sym = fn->sym;
7405 if (sym && sym->c) {
7406 /* the function was used: generate its code and
7407 convert it to a normal function */
7408 fn->sym = NULL;
7409 if (file)
7410 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7411 sym->type.t &= ~VT_INLINE;
7413 begin_macro(fn->func_str, 1);
7414 next();
7415 cur_text_section = text_section;
7416 gen_function(sym);
7417 end_macro();
7419 inline_generated = 1;
7422 } while (inline_generated);
7423 file->line_num = ln;
7426 ST_FUNC void free_inline_functions(TCCState *s)
7428 int i;
7429 /* free tokens of unused inline functions */
7430 for (i = 0; i < s->nb_inline_fns; ++i) {
7431 struct InlineFunc *fn = s->inline_fns[i];
7432 if (fn->sym)
7433 tok_str_free(fn->func_str);
7435 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7438 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7439 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7440 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7442 int v, has_init, r;
7443 CType type, btype;
7444 Sym *sym;
7445 AttributeDef ad, adbase;
7447 while (1) {
7448 if (tok == TOK_STATIC_ASSERT) {
7449 int c;
7451 next();
7452 skip('(');
7453 c = expr_const();
7454 skip(',');
7455 if (c == 0)
7456 tcc_error("%s", get_tok_str(tok, &tokc));
7457 next();
7458 skip(')');
7459 skip(';');
7460 continue;
7462 if (!parse_btype(&btype, &adbase)) {
7463 if (is_for_loop_init)
7464 return 0;
7465 /* skip redundant ';' if not in old parameter decl scope */
7466 if (tok == ';' && l != VT_CMP) {
7467 next();
7468 continue;
7470 if (l != VT_CONST)
7471 break;
7472 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7473 /* global asm block */
7474 asm_global_instr();
7475 continue;
7477 if (tok >= TOK_UIDENT) {
7478 /* special test for old K&R protos without explicit int
7479 type. Only accepted when defining global data */
7480 btype.t = VT_INT;
7481 } else {
7482 if (tok != TOK_EOF)
7483 expect("declaration");
7484 break;
7487 if (tok == ';') {
7488 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7489 int v = btype.ref->v;
7490 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7491 tcc_warning("unnamed struct/union that defines no instances");
7492 next();
7493 continue;
7495 if (IS_ENUM(btype.t)) {
7496 next();
7497 continue;
7500 while (1) { /* iterate thru each declaration */
7501 type = btype;
7502 /* If the base type itself was an array type of unspecified
7503 size (like in 'typedef int arr[]; arr x = {1};') then
7504 we will overwrite the unknown size by the real one for
7505 this decl. We need to unshare the ref symbol holding
7506 that size. */
7507 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7508 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7510 ad = adbase;
7511 type_decl(&type, &ad, &v, TYPE_DIRECT);
7512 #if 0
7514 char buf[500];
7515 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7516 printf("type = '%s'\n", buf);
7518 #endif
7519 if ((type.t & VT_BTYPE) == VT_FUNC) {
7520 if ((type.t & VT_STATIC) && (l == VT_LOCAL)) {
7521 tcc_error("function without file scope cannot be static");
7523 /* if old style function prototype, we accept a
7524 declaration list */
7525 sym = type.ref;
7526 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7527 decl0(VT_CMP, 0, sym);
7530 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7531 ad.asm_label = asm_label_instr();
7532 /* parse one last attribute list, after asm label */
7533 parse_attribute(&ad);
7534 if (tok == '{')
7535 expect(";");
7538 #ifdef TCC_TARGET_PE
7539 if (ad.a.dllimport || ad.a.dllexport) {
7540 if (type.t & (VT_STATIC|VT_TYPEDEF))
7541 tcc_error("cannot have dll linkage with static or typedef");
7542 if (ad.a.dllimport) {
7543 if ((type.t & VT_BTYPE) == VT_FUNC)
7544 ad.a.dllimport = 0;
7545 else
7546 type.t |= VT_EXTERN;
7549 #endif
7550 if (tok == '{') {
7551 if (l != VT_CONST)
7552 tcc_error("cannot use local functions");
7553 if ((type.t & VT_BTYPE) != VT_FUNC)
7554 expect("function definition");
7556 /* reject abstract declarators in function definition
7557 make old style params without decl have int type */
7558 sym = type.ref;
7559 while ((sym = sym->next) != NULL) {
7560 if (!(sym->v & ~SYM_FIELD))
7561 expect("identifier");
7562 if (sym->type.t == VT_VOID)
7563 sym->type = int_type;
7566 /* XXX: cannot do better now: convert extern line to static inline */
7567 if ((type.t & (VT_EXTERN | VT_INLINE)) == (VT_EXTERN | VT_INLINE))
7568 type.t = (type.t & ~VT_EXTERN) | VT_STATIC;
7570 /* put function symbol */
7571 sym = external_sym(v, &type, 0, &ad);
7573 /* static inline functions are just recorded as a kind
7574 of macro. Their code will be emitted at the end of
7575 the compilation unit only if they are used */
7576 if ((type.t & (VT_INLINE | VT_STATIC)) ==
7577 (VT_INLINE | VT_STATIC)) {
7578 struct InlineFunc *fn;
7579 const char *filename;
7581 filename = file ? file->filename : "";
7582 fn = tcc_malloc(sizeof *fn + strlen(filename));
7583 strcpy(fn->filename, filename);
7584 fn->sym = sym;
7585 skip_or_save_block(&fn->func_str);
7586 dynarray_add(&tcc_state->inline_fns,
7587 &tcc_state->nb_inline_fns, fn);
7588 } else {
7589 /* compute text section */
7590 cur_text_section = ad.section;
7591 if (!cur_text_section)
7592 cur_text_section = text_section;
7593 gen_function(sym);
7595 break;
7596 } else {
7597 if (l == VT_CMP) {
7598 /* find parameter in function parameter list */
7599 for (sym = func_sym->next; sym; sym = sym->next)
7600 if ((sym->v & ~SYM_FIELD) == v)
7601 goto found;
7602 tcc_error("declaration for parameter '%s' but no such parameter",
7603 get_tok_str(v, NULL));
7604 found:
7605 if (type.t & VT_STORAGE) /* 'register' is okay */
7606 tcc_error("storage class specified for '%s'",
7607 get_tok_str(v, NULL));
7608 if (sym->type.t != VT_VOID)
7609 tcc_error("redefinition of parameter '%s'",
7610 get_tok_str(v, NULL));
7611 convert_parameter_type(&type);
7612 sym->type = type;
7613 } else if (type.t & VT_TYPEDEF) {
7614 /* save typedefed type */
7615 /* XXX: test storage specifiers ? */
7616 sym = sym_find(v);
7617 if (sym && sym->sym_scope == local_scope) {
7618 if (!is_compatible_types(&sym->type, &type)
7619 || !(sym->type.t & VT_TYPEDEF))
7620 tcc_error("incompatible redefinition of '%s'",
7621 get_tok_str(v, NULL));
7622 sym->type = type;
7623 } else {
7624 sym = sym_push(v, &type, 0, 0);
7626 sym->a = ad.a;
7627 sym->f = ad.f;
7628 } else if ((type.t & VT_BTYPE) == VT_VOID
7629 && !(type.t & VT_EXTERN)) {
7630 tcc_error("declaration of void object");
7631 } else {
7632 r = 0;
7633 if ((type.t & VT_BTYPE) == VT_FUNC) {
7634 /* external function definition */
7635 /* specific case for func_call attribute */
7636 type.ref->f = ad.f;
7637 } else if (!(type.t & VT_ARRAY)) {
7638 /* not lvalue if array */
7639 r |= lvalue_type(type.t);
7641 has_init = (tok == '=');
7642 if (has_init && (type.t & VT_VLA))
7643 tcc_error("variable length array cannot be initialized");
7644 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST)) ||
7645 ((type.t & VT_BTYPE) == VT_FUNC) ||
7646 ((type.t & VT_ARRAY) && (type.t & VT_STATIC) &&
7647 !has_init && l == VT_CONST && type.ref->c < 0)) {
7648 /* external variable or function */
7649 /* NOTE: as GCC, uninitialized global static
7650 arrays of null size are considered as
7651 extern */
7652 type.t |= VT_EXTERN;
7653 sym = external_sym(v, &type, r, &ad);
7654 if (ad.alias_target) {
7655 ElfSym *esym;
7656 Sym *alias_target;
7657 alias_target = sym_find(ad.alias_target);
7658 esym = elfsym(alias_target);
7659 if (!esym)
7660 tcc_error("unsupported forward __alias__ attribute");
7661 /* Local statics have a scope until now (for
7662 warnings), remove it here. */
7663 sym->sym_scope = 0;
7664 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7666 } else {
7667 if (type.t & VT_STATIC)
7668 r |= VT_CONST;
7669 else
7670 r |= l;
7671 if (has_init)
7672 next();
7673 else if (l == VT_CONST)
7674 /* uninitialized global variables may be overridden */
7675 type.t |= VT_EXTERN;
7676 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7679 if (tok != ',') {
7680 if (is_for_loop_init)
7681 return 1;
7682 skip(';');
7683 break;
7685 next();
7689 return 0;
7692 static void decl(int l)
7694 decl0(l, 0, NULL);
7697 /* ------------------------------------------------------------------------- */