nocode, noreturn
[tinycc.git] / tccgen.c
blob6cc842d2197cc8f04444056a0adf6a3964423154
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #include "tcc.h"
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
27 rsym: return symbol
28 anon_sym: anonymous symbol index
30 ST_DATA int rsym, anon_sym, ind, loc;
32 ST_DATA Sym *sym_free_first;
33 ST_DATA void **sym_pools;
34 ST_DATA int nb_sym_pools;
36 ST_DATA Sym *global_stack;
37 ST_DATA Sym *local_stack;
38 ST_DATA Sym *define_stack;
39 ST_DATA Sym *global_label_stack;
40 ST_DATA Sym *local_label_stack;
42 static Sym *all_cleanups, *current_cleanups, *pending_gotos;
43 static int ncleanups;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA int vlas_in_scope; /* number of VLAs that are currently in scope */
51 ST_DATA int vla_sp_root_loc; /* vla_sp_loc for SP before any VLAs were pushed */
52 ST_DATA int vla_sp_loc; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
54 ST_DATA SValue __vstack[1+VSTACK_SIZE], *vtop, *pvtop;
56 ST_DATA int const_wanted; /* true if constant wanted */
57 ST_DATA int nocode_wanted; /* no code generation wanted */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 /* Clear 'nocode_wanted' at label if it was used */
66 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
67 static int gind(void) { CODE_ON(); return ind; }
69 /* Set 'nocode_wanted' after unconditional jumps */
70 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
71 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
73 /* These are #undef'd at the end of this file */
74 #define gjmp_addr gjmp_addr_acs
75 #define gjmp gjmp_acs
76 /* <---- */
78 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
79 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
80 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
81 ST_DATA int func_vc;
82 ST_DATA int last_line_num, last_ind, func_ind; /* debug last line number and pc */
83 ST_DATA const char *funcname;
84 ST_DATA int g_debug;
86 ST_DATA CType char_pointer_type, func_old_type, int_type, size_type, ptrdiff_type;
88 ST_DATA struct switch_t {
89 struct case_t {
90 int64_t v1, v2;
91 int sym;
92 } **p; int n; /* list of case ranges */
93 int def_sym; /* default symbol */
94 } *cur_switch; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
97 /*list of temporary local variables on the stack in current function. */
98 ST_DATA struct temp_local_variable {
99 int location; //offset on stack. Svalue.c.i
100 short size;
101 short align;
102 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
103 short nb_temp_local_vars;
105 /* ------------------------------------------------------------------------- */
107 static void gen_cast(CType *type);
108 static void gen_cast_s(int t);
109 static inline CType *pointed_type(CType *type);
110 static int is_compatible_types(CType *type1, CType *type2);
111 static int parse_btype(CType *type, AttributeDef *ad);
112 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
113 static void parse_expr_type(CType *type);
114 static void init_putv(CType *type, Section *sec, unsigned long c);
115 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
116 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr);
117 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
118 static void decl(int l);
119 static int decl0(int l, int is_for_loop_init, Sym *);
120 static void expr_eq(void);
121 static void vla_runtime_type_size(CType *type, int *a);
122 static void vla_sp_restore(void);
123 static void vla_sp_restore_root(void);
124 static int is_compatible_unqualified_types(CType *type1, CType *type2);
125 static inline int64_t expr_const64(void);
126 static void vpush64(int ty, unsigned long long v);
127 static void vpush(CType *type);
128 static int gvtst(int inv, int t);
129 static void gen_inline_functions(TCCState *s);
130 static void skip_or_save_block(TokenString **str);
131 static void gv_dup(void);
132 static int get_temp_local_var(int size,int align);
133 static void clear_temp_local_var_list();
136 static void reset_local_scope(void)
138 if (current_cleanups)
139 tcc_error("ICE current_cleanups");
140 sym_pop(&all_cleanups, NULL, 0);
141 local_scope = 0;
144 ST_INLN int is_float(int t)
146 int bt;
147 bt = t & VT_BTYPE;
148 return bt == VT_LDOUBLE || bt == VT_DOUBLE || bt == VT_FLOAT || bt == VT_QFLOAT;
151 /* we use our own 'finite' function to avoid potential problems with
152 non standard math libs */
153 /* XXX: endianness dependent */
154 ST_FUNC int ieee_finite(double d)
156 int p[4];
157 memcpy(p, &d, sizeof(double));
158 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
161 /* compiling intel long double natively */
162 #if (defined __i386__ || defined __x86_64__) \
163 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
164 # define TCC_IS_NATIVE_387
165 #endif
167 ST_FUNC void test_lvalue(void)
169 if (!(vtop->r & VT_LVAL))
170 expect("lvalue");
173 ST_FUNC void check_vstack(void)
175 if (pvtop != vtop)
176 tcc_error("internal compiler error: vstack leak (%d)", vtop - pvtop);
179 /* ------------------------------------------------------------------------- */
180 /* vstack debugging aid */
182 #if 0
183 void pv (const char *lbl, int a, int b)
185 int i;
186 for (i = a; i < a + b; ++i) {
187 SValue *p = &vtop[-i];
188 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
189 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
192 #endif
194 /* ------------------------------------------------------------------------- */
195 /* start of translation unit info */
196 ST_FUNC void tcc_debug_start(TCCState *s1)
198 if (s1->do_debug) {
199 char buf[512];
201 /* file info: full path + filename */
202 section_sym = put_elf_sym(symtab_section, 0, 0,
203 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
204 text_section->sh_num, NULL);
205 getcwd(buf, sizeof(buf));
206 #ifdef _WIN32
207 normalize_slashes(buf);
208 #endif
209 pstrcat(buf, sizeof(buf), "/");
210 put_stabs_r(buf, N_SO, 0, 0,
211 text_section->data_offset, text_section, section_sym);
212 put_stabs_r(file->filename, N_SO, 0, 0,
213 text_section->data_offset, text_section, section_sym);
214 last_ind = 0;
215 last_line_num = 0;
218 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
219 symbols can be safely used */
220 put_elf_sym(symtab_section, 0, 0,
221 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
222 SHN_ABS, file->filename);
225 /* put end of translation unit info */
226 ST_FUNC void tcc_debug_end(TCCState *s1)
228 if (!s1->do_debug)
229 return;
230 put_stabs_r(NULL, N_SO, 0, 0,
231 text_section->data_offset, text_section, section_sym);
235 /* generate line number info */
236 ST_FUNC void tcc_debug_line(TCCState *s1)
238 if (!s1->do_debug)
239 return;
240 if ((last_line_num != file->line_num || last_ind != ind)) {
241 put_stabn(N_SLINE, 0, file->line_num, ind - func_ind);
242 last_ind = ind;
243 last_line_num = file->line_num;
247 /* put function symbol */
248 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
250 char buf[512];
252 if (!s1->do_debug)
253 return;
255 /* stabs info */
256 /* XXX: we put here a dummy type */
257 snprintf(buf, sizeof(buf), "%s:%c1",
258 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
259 put_stabs_r(buf, N_FUN, 0, file->line_num, 0,
260 cur_text_section, sym->c);
261 /* //gr gdb wants a line at the function */
262 put_stabn(N_SLINE, 0, file->line_num, 0);
264 last_ind = 0;
265 last_line_num = 0;
268 /* put function size */
269 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
271 if (!s1->do_debug)
272 return;
273 put_stabn(N_FUN, 0, 0, size);
276 /* ------------------------------------------------------------------------- */
277 ST_FUNC int tccgen_compile(TCCState *s1)
279 cur_text_section = NULL;
280 funcname = "";
281 anon_sym = SYM_FIRST_ANOM;
282 section_sym = 0;
283 const_wanted = 0;
284 nocode_wanted = 0x80000000;
285 local_scope = 0;
287 /* define some often used types */
288 int_type.t = VT_INT;
289 char_pointer_type.t = VT_BYTE;
290 mk_pointer(&char_pointer_type);
291 #if PTR_SIZE == 4
292 size_type.t = VT_INT | VT_UNSIGNED;
293 ptrdiff_type.t = VT_INT;
294 #elif LONG_SIZE == 4
295 size_type.t = VT_LLONG | VT_UNSIGNED;
296 ptrdiff_type.t = VT_LLONG;
297 #else
298 size_type.t = VT_LONG | VT_LLONG | VT_UNSIGNED;
299 ptrdiff_type.t = VT_LONG | VT_LLONG;
300 #endif
301 func_old_type.t = VT_FUNC;
302 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
303 func_old_type.ref->f.func_call = FUNC_CDECL;
304 func_old_type.ref->f.func_type = FUNC_OLD;
306 tcc_debug_start(s1);
308 #ifdef TCC_TARGET_ARM
309 arm_init(s1);
310 #endif
312 #ifdef INC_DEBUG
313 printf("%s: **** new file\n", file->filename);
314 #endif
316 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
317 next();
318 decl(VT_CONST);
319 gen_inline_functions(s1);
320 check_vstack();
321 /* end of translation unit info */
322 tcc_debug_end(s1);
323 return 0;
326 /* ------------------------------------------------------------------------- */
327 ST_FUNC ElfSym *elfsym(Sym *s)
329 if (!s || !s->c)
330 return NULL;
331 return &((ElfSym *)symtab_section->data)[s->c];
334 /* apply storage attributes to Elf symbol */
335 ST_FUNC void update_storage(Sym *sym)
337 ElfSym *esym;
338 int sym_bind, old_sym_bind;
340 esym = elfsym(sym);
341 if (!esym)
342 return;
344 if (sym->a.visibility)
345 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
346 | sym->a.visibility;
348 if (sym->type.t & (VT_STATIC | VT_INLINE))
349 sym_bind = STB_LOCAL;
350 else if (sym->a.weak)
351 sym_bind = STB_WEAK;
352 else
353 sym_bind = STB_GLOBAL;
354 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
355 if (sym_bind != old_sym_bind) {
356 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
359 #ifdef TCC_TARGET_PE
360 if (sym->a.dllimport)
361 esym->st_other |= ST_PE_IMPORT;
362 if (sym->a.dllexport)
363 esym->st_other |= ST_PE_EXPORT;
364 #endif
366 #if 0
367 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
368 get_tok_str(sym->v, NULL),
369 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
370 sym->a.visibility,
371 sym->a.dllexport,
372 sym->a.dllimport
374 #endif
377 /* ------------------------------------------------------------------------- */
378 /* update sym->c so that it points to an external symbol in section
379 'section' with value 'value' */
381 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
382 addr_t value, unsigned long size,
383 int can_add_underscore)
385 int sym_type, sym_bind, info, other, t;
386 ElfSym *esym;
387 const char *name;
388 char buf1[256];
389 #ifdef CONFIG_TCC_BCHECK
390 char buf[32];
391 #endif
393 if (!sym->c) {
394 name = get_tok_str(sym->v, NULL);
395 #ifdef CONFIG_TCC_BCHECK
396 if (tcc_state->do_bounds_check) {
397 /* XXX: avoid doing that for statics ? */
398 /* if bound checking is activated, we change some function
399 names by adding the "__bound" prefix */
400 switch(sym->v) {
401 #ifdef TCC_TARGET_PE
402 /* XXX: we rely only on malloc hooks */
403 case TOK_malloc:
404 case TOK_free:
405 case TOK_realloc:
406 case TOK_memalign:
407 case TOK_calloc:
408 #endif
409 case TOK_memcpy:
410 case TOK_memmove:
411 case TOK_memset:
412 case TOK_strlen:
413 case TOK_strcpy:
414 case TOK_alloca:
415 strcpy(buf, "__bound_");
416 strcat(buf, name);
417 name = buf;
418 break;
421 #endif
422 t = sym->type.t;
423 if ((t & VT_BTYPE) == VT_FUNC) {
424 sym_type = STT_FUNC;
425 } else if ((t & VT_BTYPE) == VT_VOID) {
426 sym_type = STT_NOTYPE;
427 } else {
428 sym_type = STT_OBJECT;
430 if (t & (VT_STATIC | VT_INLINE))
431 sym_bind = STB_LOCAL;
432 else
433 sym_bind = STB_GLOBAL;
434 other = 0;
435 #ifdef TCC_TARGET_PE
436 if (sym_type == STT_FUNC && sym->type.ref) {
437 Sym *ref = sym->type.ref;
438 if (ref->a.nodecorate) {
439 can_add_underscore = 0;
441 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
442 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
443 name = buf1;
444 other |= ST_PE_STDCALL;
445 can_add_underscore = 0;
448 #endif
449 if (tcc_state->leading_underscore && can_add_underscore) {
450 buf1[0] = '_';
451 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
452 name = buf1;
454 if (sym->asm_label)
455 name = get_tok_str(sym->asm_label, NULL);
456 info = ELFW(ST_INFO)(sym_bind, sym_type);
457 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
458 } else {
459 esym = elfsym(sym);
460 esym->st_value = value;
461 esym->st_size = size;
462 esym->st_shndx = sh_num;
464 update_storage(sym);
467 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
468 addr_t value, unsigned long size)
470 int sh_num = section ? section->sh_num : SHN_UNDEF;
471 put_extern_sym2(sym, sh_num, value, size, 1);
474 /* add a new relocation entry to symbol 'sym' in section 's' */
475 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
476 addr_t addend)
478 int c = 0;
480 if (nocode_wanted && s == cur_text_section)
481 return;
483 if (sym) {
484 if (0 == sym->c)
485 put_extern_sym(sym, NULL, 0, 0);
486 c = sym->c;
489 /* now we can add ELF relocation info */
490 put_elf_reloca(symtab_section, s, offset, type, c, addend);
493 #if PTR_SIZE == 4
494 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
496 greloca(s, sym, offset, type, 0);
498 #endif
500 /* ------------------------------------------------------------------------- */
501 /* symbol allocator */
502 static Sym *__sym_malloc(void)
504 Sym *sym_pool, *sym, *last_sym;
505 int i;
507 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
508 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
510 last_sym = sym_free_first;
511 sym = sym_pool;
512 for(i = 0; i < SYM_POOL_NB; i++) {
513 sym->next = last_sym;
514 last_sym = sym;
515 sym++;
517 sym_free_first = last_sym;
518 return last_sym;
521 static inline Sym *sym_malloc(void)
523 Sym *sym;
524 #ifndef SYM_DEBUG
525 sym = sym_free_first;
526 if (!sym)
527 sym = __sym_malloc();
528 sym_free_first = sym->next;
529 return sym;
530 #else
531 sym = tcc_malloc(sizeof(Sym));
532 return sym;
533 #endif
536 ST_INLN void sym_free(Sym *sym)
538 #ifndef SYM_DEBUG
539 sym->next = sym_free_first;
540 sym_free_first = sym;
541 #else
542 tcc_free(sym);
543 #endif
546 /* push, without hashing */
547 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
549 Sym *s;
551 s = sym_malloc();
552 memset(s, 0, sizeof *s);
553 s->v = v;
554 s->type.t = t;
555 s->c = c;
556 /* add in stack */
557 s->prev = *ps;
558 *ps = s;
559 return s;
562 /* find a symbol and return its associated structure. 's' is the top
563 of the symbol stack */
564 ST_FUNC Sym *sym_find2(Sym *s, int v)
566 while (s) {
567 if (s->v == v)
568 return s;
569 else if (s->v == -1)
570 return NULL;
571 s = s->prev;
573 return NULL;
576 /* structure lookup */
577 ST_INLN Sym *struct_find(int v)
579 v -= TOK_IDENT;
580 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
581 return NULL;
582 return table_ident[v]->sym_struct;
585 /* find an identifier */
586 ST_INLN Sym *sym_find(int v)
588 v -= TOK_IDENT;
589 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
590 return NULL;
591 return table_ident[v]->sym_identifier;
594 static int sym_scope(Sym *s)
596 if (IS_ENUM_VAL (s->type.t))
597 return s->type.ref->sym_scope;
598 else
599 return s->sym_scope;
602 /* push a given symbol on the symbol stack */
603 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
605 Sym *s, **ps;
606 TokenSym *ts;
608 if (local_stack)
609 ps = &local_stack;
610 else
611 ps = &global_stack;
612 s = sym_push2(ps, v, type->t, c);
613 s->type.ref = type->ref;
614 s->r = r;
615 /* don't record fields or anonymous symbols */
616 /* XXX: simplify */
617 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
618 /* record symbol in token array */
619 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
620 if (v & SYM_STRUCT)
621 ps = &ts->sym_struct;
622 else
623 ps = &ts->sym_identifier;
624 s->prev_tok = *ps;
625 *ps = s;
626 s->sym_scope = local_scope;
627 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
628 tcc_error("redeclaration of '%s'",
629 get_tok_str(v & ~SYM_STRUCT, NULL));
631 return s;
634 /* push a global identifier */
635 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
637 Sym *s, **ps;
638 s = sym_push2(&global_stack, v, t, c);
639 s->r = VT_CONST | VT_SYM;
640 /* don't record anonymous symbol */
641 if (v < SYM_FIRST_ANOM) {
642 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
643 /* modify the top most local identifier, so that sym_identifier will
644 point to 's' when popped; happens when called from inline asm */
645 while (*ps != NULL && (*ps)->sym_scope)
646 ps = &(*ps)->prev_tok;
647 s->prev_tok = *ps;
648 *ps = s;
650 return s;
653 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
654 pop them yet from the list, but do remove them from the token array. */
655 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
657 Sym *s, *ss, **ps;
658 TokenSym *ts;
659 int v;
661 s = *ptop;
662 while(s != b) {
663 ss = s->prev;
664 v = s->v;
665 /* remove symbol in token array */
666 /* XXX: simplify */
667 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
668 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
669 if (v & SYM_STRUCT)
670 ps = &ts->sym_struct;
671 else
672 ps = &ts->sym_identifier;
673 *ps = s->prev_tok;
675 if (!keep)
676 sym_free(s);
677 s = ss;
679 if (!keep)
680 *ptop = b;
683 /* ------------------------------------------------------------------------- */
685 static void vsetc(CType *type, int r, CValue *vc)
687 int v;
689 if (vtop >= vstack + (VSTACK_SIZE - 1))
690 tcc_error("memory full (vstack)");
691 /* cannot let cpu flags if other instruction are generated. Also
692 avoid leaving VT_JMP anywhere except on the top of the stack
693 because it would complicate the code generator.
695 Don't do this when nocode_wanted. vtop might come from
696 !nocode_wanted regions (see 88_codeopt.c) and transforming
697 it to a register without actually generating code is wrong
698 as their value might still be used for real. All values
699 we push under nocode_wanted will eventually be popped
700 again, so that the VT_CMP/VT_JMP value will be in vtop
701 when code is unsuppressed again.
703 Same logic below in vswap(); */
704 if (vtop >= vstack && !nocode_wanted) {
705 v = vtop->r & VT_VALMASK;
706 if (v == VT_CMP || (v & ~1) == VT_JMP)
707 gv(RC_INT);
710 vtop++;
711 vtop->type = *type;
712 vtop->r = r;
713 vtop->r2 = VT_CONST;
714 vtop->c = *vc;
715 vtop->sym = NULL;
718 ST_FUNC void vswap(void)
720 SValue tmp;
721 /* cannot vswap cpu flags. See comment at vsetc() above */
722 if (vtop >= vstack && !nocode_wanted) {
723 int v = vtop->r & VT_VALMASK;
724 if (v == VT_CMP || (v & ~1) == VT_JMP)
725 gv(RC_INT);
727 tmp = vtop[0];
728 vtop[0] = vtop[-1];
729 vtop[-1] = tmp;
732 /* pop stack value */
733 ST_FUNC void vpop(void)
735 int v;
736 v = vtop->r & VT_VALMASK;
737 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
738 /* for x86, we need to pop the FP stack */
739 if (v == TREG_ST0) {
740 o(0xd8dd); /* fstp %st(0) */
741 } else
742 #endif
743 if (v == VT_JMP || v == VT_JMPI) {
744 /* need to put correct jump if && or || without test */
745 gsym(vtop->c.i);
747 vtop--;
750 /* push constant of type "type" with useless value */
751 ST_FUNC void vpush(CType *type)
753 vset(type, VT_CONST, 0);
756 /* push integer constant */
757 ST_FUNC void vpushi(int v)
759 CValue cval;
760 cval.i = v;
761 vsetc(&int_type, VT_CONST, &cval);
764 /* push a pointer sized constant */
765 static void vpushs(addr_t v)
767 CValue cval;
768 cval.i = v;
769 vsetc(&size_type, VT_CONST, &cval);
772 /* push arbitrary 64bit constant */
773 ST_FUNC void vpush64(int ty, unsigned long long v)
775 CValue cval;
776 CType ctype;
777 ctype.t = ty;
778 ctype.ref = NULL;
779 cval.i = v;
780 vsetc(&ctype, VT_CONST, &cval);
783 /* push long long constant */
784 static inline void vpushll(long long v)
786 vpush64(VT_LLONG, v);
789 ST_FUNC void vset(CType *type, int r, int v)
791 CValue cval;
793 cval.i = v;
794 vsetc(type, r, &cval);
797 static void vseti(int r, int v)
799 CType type;
800 type.t = VT_INT;
801 type.ref = NULL;
802 vset(&type, r, v);
805 ST_FUNC void vpushv(SValue *v)
807 if (vtop >= vstack + (VSTACK_SIZE - 1))
808 tcc_error("memory full (vstack)");
809 vtop++;
810 *vtop = *v;
813 static void vdup(void)
815 vpushv(vtop);
818 /* rotate n first stack elements to the bottom
819 I1 ... In -> I2 ... In I1 [top is right]
821 ST_FUNC void vrotb(int n)
823 int i;
824 SValue tmp;
826 tmp = vtop[-n + 1];
827 for(i=-n+1;i!=0;i++)
828 vtop[i] = vtop[i+1];
829 vtop[0] = tmp;
832 /* rotate the n elements before entry e towards the top
833 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
835 ST_FUNC void vrote(SValue *e, int n)
837 int i;
838 SValue tmp;
840 tmp = *e;
841 for(i = 0;i < n - 1; i++)
842 e[-i] = e[-i - 1];
843 e[-n + 1] = tmp;
846 /* rotate n first stack elements to the top
847 I1 ... In -> In I1 ... I(n-1) [top is right]
849 ST_FUNC void vrott(int n)
851 vrote(vtop, n);
854 /* push a symbol value of TYPE */
855 static inline void vpushsym(CType *type, Sym *sym)
857 CValue cval;
858 cval.i = 0;
859 vsetc(type, VT_CONST | VT_SYM, &cval);
860 vtop->sym = sym;
863 /* Return a static symbol pointing to a section */
864 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
866 int v;
867 Sym *sym;
869 v = anon_sym++;
870 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
871 sym->type.t |= VT_STATIC;
872 put_extern_sym(sym, sec, offset, size);
873 return sym;
876 /* push a reference to a section offset by adding a dummy symbol */
877 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
879 vpushsym(type, get_sym_ref(type, sec, offset, size));
882 /* define a new external reference to a symbol 'v' of type 'u' */
883 ST_FUNC Sym *external_global_sym(int v, CType *type)
885 Sym *s;
887 s = sym_find(v);
888 if (!s) {
889 /* push forward reference */
890 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
891 s->type.ref = type->ref;
892 } else if (IS_ASM_SYM(s)) {
893 s->type.t = type->t | (s->type.t & VT_EXTERN);
894 s->type.ref = type->ref;
895 update_storage(s);
897 return s;
900 /* Merge symbol attributes. */
901 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
903 if (sa1->aligned && !sa->aligned)
904 sa->aligned = sa1->aligned;
905 sa->packed |= sa1->packed;
906 sa->weak |= sa1->weak;
907 if (sa1->visibility != STV_DEFAULT) {
908 int vis = sa->visibility;
909 if (vis == STV_DEFAULT
910 || vis > sa1->visibility)
911 vis = sa1->visibility;
912 sa->visibility = vis;
914 sa->dllexport |= sa1->dllexport;
915 sa->nodecorate |= sa1->nodecorate;
916 sa->dllimport |= sa1->dllimport;
919 /* Merge function attributes. */
920 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
922 if (fa1->func_call && !fa->func_call)
923 fa->func_call = fa1->func_call;
924 if (fa1->func_type && !fa->func_type)
925 fa->func_type = fa1->func_type;
926 if (fa1->func_args && !fa->func_args)
927 fa->func_args = fa1->func_args;
930 /* Merge attributes. */
931 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
933 merge_symattr(&ad->a, &ad1->a);
934 merge_funcattr(&ad->f, &ad1->f);
936 if (ad1->section)
937 ad->section = ad1->section;
938 if (ad1->alias_target)
939 ad->alias_target = ad1->alias_target;
940 if (ad1->asm_label)
941 ad->asm_label = ad1->asm_label;
942 if (ad1->attr_mode)
943 ad->attr_mode = ad1->attr_mode;
946 /* Merge some type attributes. */
947 static void patch_type(Sym *sym, CType *type)
949 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
950 if (!(sym->type.t & VT_EXTERN))
951 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
952 sym->type.t &= ~VT_EXTERN;
955 if (IS_ASM_SYM(sym)) {
956 /* stay static if both are static */
957 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
958 sym->type.ref = type->ref;
961 if (!is_compatible_types(&sym->type, type)) {
962 tcc_error("incompatible types for redefinition of '%s'",
963 get_tok_str(sym->v, NULL));
965 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
966 int static_proto = sym->type.t & VT_STATIC;
967 /* warn if static follows non-static function declaration */
968 if ((type->t & VT_STATIC) && !static_proto
969 /* XXX this test for inline shouldn't be here. Until we
970 implement gnu-inline mode again it silences a warning for
971 mingw caused by our workarounds. */
972 && !((type->t | sym->type.t) & VT_INLINE))
973 tcc_warning("static storage ignored for redefinition of '%s'",
974 get_tok_str(sym->v, NULL));
976 /* set 'inline' if both agree or if one has static */
977 if ((type->t | sym->type.t) & VT_INLINE) {
978 if (!((type->t ^ sym->type.t) & VT_INLINE)
979 || ((type->t | sym->type.t) & VT_STATIC))
980 static_proto |= VT_INLINE;
983 if (0 == (type->t & VT_EXTERN)) {
984 /* put complete type, use static from prototype */
985 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
986 sym->type.ref = type->ref;
987 } else {
988 sym->type.t &= ~VT_INLINE | static_proto;
991 if (sym->type.ref->f.func_type == FUNC_OLD
992 && type->ref->f.func_type != FUNC_OLD) {
993 sym->type.ref = type->ref;
996 } else {
997 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
998 /* set array size if it was omitted in extern declaration */
999 sym->type.ref->c = type->ref->c;
1001 if ((type->t ^ sym->type.t) & VT_STATIC)
1002 tcc_warning("storage mismatch for redefinition of '%s'",
1003 get_tok_str(sym->v, NULL));
1007 /* Merge some storage attributes. */
1008 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1010 if (type)
1011 patch_type(sym, type);
1013 #ifdef TCC_TARGET_PE
1014 if (sym->a.dllimport != ad->a.dllimport)
1015 tcc_error("incompatible dll linkage for redefinition of '%s'",
1016 get_tok_str(sym->v, NULL));
1017 #endif
1018 merge_symattr(&sym->a, &ad->a);
1019 if (ad->asm_label)
1020 sym->asm_label = ad->asm_label;
1021 update_storage(sym);
1024 /* copy sym to other stack */
1025 static Sym *sym_copy(Sym *s0, Sym **ps)
1027 Sym *s;
1028 s = sym_malloc(), *s = *s0;
1029 s->prev = *ps, *ps = s;
1030 if (s->v < SYM_FIRST_ANOM) {
1031 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1032 s->prev_tok = *ps, *ps = s;
1034 return s;
1037 /* copy a list of syms */
1038 static void sym_copy_ref(Sym *s0, Sym **ps)
1040 Sym *s, **sp = &s0->type.ref;
1041 for (s = *sp, *sp = NULL; s; s = s->next)
1042 sp = &(*sp = sym_copy(s, ps))->next;
1045 /* define a new external reference to a symbol 'v' */
1046 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1048 Sym *s; int bt;
1050 /* look for global symbol */
1051 s = sym_find(v);
1052 while (s && s->sym_scope)
1053 s = s->prev_tok;
1055 if (!s) {
1056 /* push forward reference */
1057 s = global_identifier_push(v, type->t, 0);
1058 s->r |= r;
1059 s->a = ad->a;
1060 s->asm_label = ad->asm_label;
1061 s->type.ref = type->ref;
1062 bt = s->type.t & (VT_BTYPE|VT_ARRAY);
1063 /* copy type to the global stack also */
1064 if (local_scope && (bt == VT_FUNC || (bt & VT_ARRAY)))
1065 sym_copy_ref(s, &global_stack);
1066 } else {
1067 patch_storage(s, ad, type);
1068 bt = s->type.t & VT_BTYPE;
1070 /* push variables to local scope if any */
1071 if (local_stack && bt != VT_FUNC)
1072 s = sym_copy(s, &local_stack);
1073 return s;
1076 /* push a reference to global symbol v */
1077 ST_FUNC void vpush_global_sym(CType *type, int v)
1079 vpushsym(type, external_global_sym(v, type));
1082 /* save registers up to (vtop - n) stack entry */
1083 ST_FUNC void save_regs(int n)
1085 SValue *p, *p1;
1086 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1087 save_reg(p->r);
1090 /* save r to the memory stack, and mark it as being free */
1091 ST_FUNC void save_reg(int r)
1093 save_reg_upstack(r, 0);
1096 /* save r to the memory stack, and mark it as being free,
1097 if seen up to (vtop - n) stack entry */
1098 ST_FUNC void save_reg_upstack(int r, int n)
1100 int l, saved, size, align;
1101 SValue *p, *p1, sv;
1102 CType *type;
1104 if ((r &= VT_VALMASK) >= VT_CONST)
1105 return;
1106 if (nocode_wanted)
1107 return;
1109 /* modify all stack values */
1110 saved = 0;
1111 l = 0;
1112 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1113 if ((p->r & VT_VALMASK) == r ||
1114 ((p->type.t & VT_BTYPE) == VT_LLONG && (p->r2 & VT_VALMASK) == r)) {
1115 /* must save value on stack if not already done */
1116 if (!saved) {
1117 /* NOTE: must reload 'r' because r might be equal to r2 */
1118 r = p->r & VT_VALMASK;
1119 /* store register in the stack */
1120 type = &p->type;
1121 if ((p->r & VT_LVAL) ||
1122 (!is_float(type->t) && (type->t & VT_BTYPE) != VT_LLONG))
1123 #if PTR_SIZE == 8
1124 type = &char_pointer_type;
1125 #else
1126 type = &int_type;
1127 #endif
1128 size = type_size(type, &align);
1129 l=get_temp_local_var(size,align);
1130 sv.type.t = type->t;
1131 sv.r = VT_LOCAL | VT_LVAL;
1132 sv.c.i = l;
1133 store(r, &sv);
1134 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1135 /* x86 specific: need to pop fp register ST0 if saved */
1136 if (r == TREG_ST0) {
1137 o(0xd8dd); /* fstp %st(0) */
1139 #endif
1140 #if PTR_SIZE == 4
1141 /* special long long case */
1142 if ((type->t & VT_BTYPE) == VT_LLONG) {
1143 sv.c.i += 4;
1144 store(p->r2, &sv);
1146 #endif
1147 saved = 1;
1149 /* mark that stack entry as being saved on the stack */
1150 if (p->r & VT_LVAL) {
1151 /* also clear the bounded flag because the
1152 relocation address of the function was stored in
1153 p->c.i */
1154 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1155 } else {
1156 p->r = lvalue_type(p->type.t) | VT_LOCAL;
1158 p->r2 = VT_CONST;
1159 p->c.i = l;
1164 #ifdef TCC_TARGET_ARM
1165 /* find a register of class 'rc2' with at most one reference on stack.
1166 * If none, call get_reg(rc) */
1167 ST_FUNC int get_reg_ex(int rc, int rc2)
1169 int r;
1170 SValue *p;
1172 for(r=0;r<NB_REGS;r++) {
1173 if (reg_classes[r] & rc2) {
1174 int n;
1175 n=0;
1176 for(p = vstack; p <= vtop; p++) {
1177 if ((p->r & VT_VALMASK) == r ||
1178 (p->r2 & VT_VALMASK) == r)
1179 n++;
1181 if (n <= 1)
1182 return r;
1185 return get_reg(rc);
1187 #endif
1189 /* find a free register of class 'rc'. If none, save one register */
1190 ST_FUNC int get_reg(int rc)
1192 int r;
1193 SValue *p;
1195 /* find a free register */
1196 for(r=0;r<NB_REGS;r++) {
1197 if (reg_classes[r] & rc) {
1198 if (nocode_wanted)
1199 return r;
1200 for(p=vstack;p<=vtop;p++) {
1201 if ((p->r & VT_VALMASK) == r ||
1202 (p->r2 & VT_VALMASK) == r)
1203 goto notfound;
1205 return r;
1207 notfound: ;
1210 /* no register left : free the first one on the stack (VERY
1211 IMPORTANT to start from the bottom to ensure that we don't
1212 spill registers used in gen_opi()) */
1213 for(p=vstack;p<=vtop;p++) {
1214 /* look at second register (if long long) */
1215 r = p->r2 & VT_VALMASK;
1216 if (r < VT_CONST && (reg_classes[r] & rc))
1217 goto save_found;
1218 r = p->r & VT_VALMASK;
1219 if (r < VT_CONST && (reg_classes[r] & rc)) {
1220 save_found:
1221 save_reg(r);
1222 return r;
1225 /* Should never comes here */
1226 return -1;
1229 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1230 static int get_temp_local_var(int size,int align){
1231 int i;
1232 struct temp_local_variable *temp_var;
1233 int found_var;
1234 SValue *p;
1235 int r;
1236 char free;
1237 char found;
1238 found=0;
1239 for(i=0;i<nb_temp_local_vars;i++){
1240 temp_var=&arr_temp_local_vars[i];
1241 if(temp_var->size<size||align!=temp_var->align){
1242 continue;
1244 /*check if temp_var is free*/
1245 free=1;
1246 for(p=vstack;p<=vtop;p++) {
1247 r=p->r&VT_VALMASK;
1248 if(r==VT_LOCAL||r==VT_LLOCAL){
1249 if(p->c.i==temp_var->location){
1250 free=0;
1251 break;
1255 if(free){
1256 found_var=temp_var->location;
1257 found=1;
1258 break;
1261 if(!found){
1262 loc = (loc - size) & -align;
1263 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1264 temp_var=&arr_temp_local_vars[i];
1265 temp_var->location=loc;
1266 temp_var->size=size;
1267 temp_var->align=align;
1268 nb_temp_local_vars++;
1270 found_var=loc;
1272 return found_var;
1275 static void clear_temp_local_var_list(){
1276 nb_temp_local_vars=0;
1279 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1280 if needed */
1281 static void move_reg(int r, int s, int t)
1283 SValue sv;
1285 if (r != s) {
1286 save_reg(r);
1287 sv.type.t = t;
1288 sv.type.ref = NULL;
1289 sv.r = s;
1290 sv.c.i = 0;
1291 load(r, &sv);
1295 /* get address of vtop (vtop MUST BE an lvalue) */
1296 ST_FUNC void gaddrof(void)
1298 vtop->r &= ~VT_LVAL;
1299 /* tricky: if saved lvalue, then we can go back to lvalue */
1300 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1301 vtop->r = (vtop->r & ~(VT_VALMASK | VT_LVAL_TYPE)) | VT_LOCAL | VT_LVAL;
1306 #ifdef CONFIG_TCC_BCHECK
1307 /* generate lvalue bound code */
1308 static void gbound(void)
1310 int lval_type;
1311 CType type1;
1313 vtop->r &= ~VT_MUSTBOUND;
1314 /* if lvalue, then use checking code before dereferencing */
1315 if (vtop->r & VT_LVAL) {
1316 /* if not VT_BOUNDED value, then make one */
1317 if (!(vtop->r & VT_BOUNDED)) {
1318 lval_type = vtop->r & (VT_LVAL_TYPE | VT_LVAL);
1319 /* must save type because we must set it to int to get pointer */
1320 type1 = vtop->type;
1321 vtop->type.t = VT_PTR;
1322 gaddrof();
1323 vpushi(0);
1324 gen_bounded_ptr_add();
1325 vtop->r |= lval_type;
1326 vtop->type = type1;
1328 /* then check for dereferencing */
1329 gen_bounded_ptr_deref();
1332 #endif
1334 static void incr_bf_adr(int o)
1336 vtop->type = char_pointer_type;
1337 gaddrof();
1338 vpushi(o);
1339 gen_op('+');
1340 vtop->type.t = (vtop->type.t & ~(VT_BTYPE|VT_DEFSIGN))
1341 | (VT_BYTE|VT_UNSIGNED);
1342 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
1343 | (VT_LVAL_BYTE|VT_LVAL_UNSIGNED|VT_LVAL);
1346 /* single-byte load mode for packed or otherwise unaligned bitfields */
1347 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1349 int n, o, bits;
1350 save_reg_upstack(vtop->r, 1);
1351 vpush64(type->t & VT_BTYPE, 0); // B X
1352 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1353 do {
1354 vswap(); // X B
1355 incr_bf_adr(o);
1356 vdup(); // X B B
1357 n = 8 - bit_pos;
1358 if (n > bit_size)
1359 n = bit_size;
1360 if (bit_pos)
1361 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1362 if (n < 8)
1363 vpushi((1 << n) - 1), gen_op('&');
1364 gen_cast(type);
1365 if (bits)
1366 vpushi(bits), gen_op(TOK_SHL);
1367 vrotb(3); // B Y X
1368 gen_op('|'); // B X
1369 bits += n, bit_size -= n, o = 1;
1370 } while (bit_size);
1371 vswap(), vpop();
1372 if (!(type->t & VT_UNSIGNED)) {
1373 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1374 vpushi(n), gen_op(TOK_SHL);
1375 vpushi(n), gen_op(TOK_SAR);
1379 /* single-byte store mode for packed or otherwise unaligned bitfields */
1380 static void store_packed_bf(int bit_pos, int bit_size)
1382 int bits, n, o, m, c;
1384 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1385 vswap(); // X B
1386 save_reg_upstack(vtop->r, 1);
1387 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1388 do {
1389 incr_bf_adr(o); // X B
1390 vswap(); //B X
1391 c ? vdup() : gv_dup(); // B V X
1392 vrott(3); // X B V
1393 if (bits)
1394 vpushi(bits), gen_op(TOK_SHR);
1395 if (bit_pos)
1396 vpushi(bit_pos), gen_op(TOK_SHL);
1397 n = 8 - bit_pos;
1398 if (n > bit_size)
1399 n = bit_size;
1400 if (n < 8) {
1401 m = ((1 << n) - 1) << bit_pos;
1402 vpushi(m), gen_op('&'); // X B V1
1403 vpushv(vtop-1); // X B V1 B
1404 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1405 gen_op('&'); // X B V1 B1
1406 gen_op('|'); // X B V2
1408 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1409 vstore(), vpop(); // X B
1410 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1411 } while (bit_size);
1412 vpop(), vpop();
1415 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1417 int t;
1418 if (0 == sv->type.ref)
1419 return 0;
1420 t = sv->type.ref->auxtype;
1421 if (t != -1 && t != VT_STRUCT) {
1422 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1423 sv->r = (sv->r & ~VT_LVAL_TYPE) | lvalue_type(sv->type.t);
1425 return t;
1428 /* store vtop a register belonging to class 'rc'. lvalues are
1429 converted to values. Cannot be used if cannot be converted to
1430 register value (such as structures). */
1431 ST_FUNC int gv(int rc)
1433 int r, bit_pos, bit_size, size, align, rc2;
1435 /* NOTE: get_reg can modify vstack[] */
1436 if (vtop->type.t & VT_BITFIELD) {
1437 CType type;
1439 bit_pos = BIT_POS(vtop->type.t);
1440 bit_size = BIT_SIZE(vtop->type.t);
1441 /* remove bit field info to avoid loops */
1442 vtop->type.t &= ~VT_STRUCT_MASK;
1444 type.ref = NULL;
1445 type.t = vtop->type.t & VT_UNSIGNED;
1446 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1447 type.t |= VT_UNSIGNED;
1449 r = adjust_bf(vtop, bit_pos, bit_size);
1451 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1452 type.t |= VT_LLONG;
1453 else
1454 type.t |= VT_INT;
1456 if (r == VT_STRUCT) {
1457 load_packed_bf(&type, bit_pos, bit_size);
1458 } else {
1459 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1460 /* cast to int to propagate signedness in following ops */
1461 gen_cast(&type);
1462 /* generate shifts */
1463 vpushi(bits - (bit_pos + bit_size));
1464 gen_op(TOK_SHL);
1465 vpushi(bits - bit_size);
1466 /* NOTE: transformed to SHR if unsigned */
1467 gen_op(TOK_SAR);
1469 r = gv(rc);
1470 } else {
1471 if (is_float(vtop->type.t) &&
1472 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1473 unsigned long offset;
1474 /* CPUs usually cannot use float constants, so we store them
1475 generically in data segment */
1476 size = type_size(&vtop->type, &align);
1477 if (NODATA_WANTED)
1478 size = 0, align = 1;
1479 offset = section_add(data_section, size, align);
1480 vpush_ref(&vtop->type, data_section, offset, size);
1481 vswap();
1482 init_putv(&vtop->type, data_section, offset);
1483 vtop->r |= VT_LVAL;
1485 #ifdef CONFIG_TCC_BCHECK
1486 if (vtop->r & VT_MUSTBOUND)
1487 gbound();
1488 #endif
1490 r = vtop->r & VT_VALMASK;
1491 rc2 = (rc & RC_FLOAT) ? RC_FLOAT : RC_INT;
1492 #ifndef TCC_TARGET_ARM64
1493 if (rc == RC_IRET)
1494 rc2 = RC_LRET;
1495 #ifdef TCC_TARGET_X86_64
1496 else if (rc == RC_FRET)
1497 rc2 = RC_QRET;
1498 #endif
1499 #endif
1500 /* need to reload if:
1501 - constant
1502 - lvalue (need to dereference pointer)
1503 - already a register, but not in the right class */
1504 if (r >= VT_CONST
1505 || (vtop->r & VT_LVAL)
1506 || !(reg_classes[r] & rc)
1507 #if PTR_SIZE == 8
1508 || ((vtop->type.t & VT_BTYPE) == VT_QLONG && !(reg_classes[vtop->r2] & rc2))
1509 || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT && !(reg_classes[vtop->r2] & rc2))
1510 #else
1511 || ((vtop->type.t & VT_BTYPE) == VT_LLONG && !(reg_classes[vtop->r2] & rc2))
1512 #endif
1515 r = get_reg(rc);
1516 #if PTR_SIZE == 8
1517 if (((vtop->type.t & VT_BTYPE) == VT_QLONG) || ((vtop->type.t & VT_BTYPE) == VT_QFLOAT)) {
1518 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
1519 #else
1520 if ((vtop->type.t & VT_BTYPE) == VT_LLONG) {
1521 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
1522 unsigned long long ll;
1523 #endif
1524 int r2, original_type;
1525 original_type = vtop->type.t;
1526 /* two register type load : expand to two words
1527 temporarily */
1528 #if PTR_SIZE == 4
1529 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1530 /* load constant */
1531 ll = vtop->c.i;
1532 vtop->c.i = ll; /* first word */
1533 load(r, vtop);
1534 vtop->r = r; /* save register value */
1535 vpushi(ll >> 32); /* second word */
1536 } else
1537 #endif
1538 if (vtop->r & VT_LVAL) {
1539 /* We do not want to modifier the long long
1540 pointer here, so the safest (and less
1541 efficient) is to save all the other registers
1542 in the stack. XXX: totally inefficient. */
1543 #if 0
1544 save_regs(1);
1545 #else
1546 /* lvalue_save: save only if used further down the stack */
1547 save_reg_upstack(vtop->r, 1);
1548 #endif
1549 /* load from memory */
1550 vtop->type.t = load_type;
1551 load(r, vtop);
1552 vdup();
1553 vtop[-1].r = r; /* save register value */
1554 /* increment pointer to get second word */
1555 vtop->type.t = addr_type;
1556 gaddrof();
1557 vpushi(load_size);
1558 gen_op('+');
1559 vtop->r |= VT_LVAL;
1560 vtop->type.t = load_type;
1561 } else {
1562 /* move registers */
1563 load(r, vtop);
1564 vdup();
1565 vtop[-1].r = r; /* save register value */
1566 vtop->r = vtop[-1].r2;
1568 /* Allocate second register. Here we rely on the fact that
1569 get_reg() tries first to free r2 of an SValue. */
1570 r2 = get_reg(rc2);
1571 load(r2, vtop);
1572 vpop();
1573 /* write second register */
1574 vtop->r2 = r2;
1575 vtop->type.t = original_type;
1576 } else if ((vtop->r & VT_LVAL) && !is_float(vtop->type.t)) {
1577 int t1, t;
1578 /* lvalue of scalar type : need to use lvalue type
1579 because of possible cast */
1580 t = vtop->type.t;
1581 t1 = t;
1582 /* compute memory access type */
1583 if (vtop->r & VT_LVAL_BYTE)
1584 t = VT_BYTE;
1585 else if (vtop->r & VT_LVAL_SHORT)
1586 t = VT_SHORT;
1587 if (vtop->r & VT_LVAL_UNSIGNED)
1588 t |= VT_UNSIGNED;
1589 vtop->type.t = t;
1590 load(r, vtop);
1591 /* restore wanted type */
1592 vtop->type.t = t1;
1593 } else {
1594 /* one register type load */
1595 load(r, vtop);
1598 vtop->r = r;
1599 #ifdef TCC_TARGET_C67
1600 /* uses register pairs for doubles */
1601 if ((vtop->type.t & VT_BTYPE) == VT_DOUBLE)
1602 vtop->r2 = r+1;
1603 #endif
1605 return r;
1608 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1609 ST_FUNC void gv2(int rc1, int rc2)
1611 int v;
1613 /* generate more generic register first. But VT_JMP or VT_CMP
1614 values must be generated first in all cases to avoid possible
1615 reload errors */
1616 v = vtop[0].r & VT_VALMASK;
1617 if (v != VT_CMP && (v & ~1) != VT_JMP && rc1 <= rc2) {
1618 vswap();
1619 gv(rc1);
1620 vswap();
1621 gv(rc2);
1622 /* test if reload is needed for first register */
1623 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1624 vswap();
1625 gv(rc1);
1626 vswap();
1628 } else {
1629 gv(rc2);
1630 vswap();
1631 gv(rc1);
1632 vswap();
1633 /* test if reload is needed for first register */
1634 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1635 gv(rc2);
1640 #ifndef TCC_TARGET_ARM64
1641 /* wrapper around RC_FRET to return a register by type */
1642 static int rc_fret(int t)
1644 #ifdef TCC_TARGET_X86_64
1645 if (t == VT_LDOUBLE) {
1646 return RC_ST0;
1648 #endif
1649 return RC_FRET;
1651 #endif
1653 /* wrapper around REG_FRET to return a register by type */
1654 static int reg_fret(int t)
1656 #ifdef TCC_TARGET_X86_64
1657 if (t == VT_LDOUBLE) {
1658 return TREG_ST0;
1660 #endif
1661 return REG_FRET;
1664 #if PTR_SIZE == 4
1665 /* expand 64bit on stack in two ints */
1666 ST_FUNC void lexpand(void)
1668 int u, v;
1669 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1670 v = vtop->r & (VT_VALMASK | VT_LVAL);
1671 if (v == VT_CONST) {
1672 vdup();
1673 vtop[0].c.i >>= 32;
1674 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1675 vdup();
1676 vtop[0].c.i += 4;
1677 } else {
1678 gv(RC_INT);
1679 vdup();
1680 vtop[0].r = vtop[-1].r2;
1681 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1683 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1685 #endif
1687 #if PTR_SIZE == 4
1688 /* build a long long from two ints */
1689 static void lbuild(int t)
1691 gv2(RC_INT, RC_INT);
1692 vtop[-1].r2 = vtop[0].r;
1693 vtop[-1].type.t = t;
1694 vpop();
1696 #endif
1698 /* convert stack entry to register and duplicate its value in another
1699 register */
1700 static void gv_dup(void)
1702 int rc, t, r, r1;
1703 SValue sv;
1705 t = vtop->type.t;
1706 #if PTR_SIZE == 4
1707 if ((t & VT_BTYPE) == VT_LLONG) {
1708 if (t & VT_BITFIELD) {
1709 gv(RC_INT);
1710 t = vtop->type.t;
1712 lexpand();
1713 gv_dup();
1714 vswap();
1715 vrotb(3);
1716 gv_dup();
1717 vrotb(4);
1718 /* stack: H L L1 H1 */
1719 lbuild(t);
1720 vrotb(3);
1721 vrotb(3);
1722 vswap();
1723 lbuild(t);
1724 vswap();
1725 } else
1726 #endif
1728 /* duplicate value */
1729 rc = RC_INT;
1730 sv.type.t = VT_INT;
1731 if (is_float(t)) {
1732 rc = RC_FLOAT;
1733 #ifdef TCC_TARGET_X86_64
1734 if ((t & VT_BTYPE) == VT_LDOUBLE) {
1735 rc = RC_ST0;
1737 #endif
1738 sv.type.t = t;
1740 r = gv(rc);
1741 r1 = get_reg(rc);
1742 sv.r = r;
1743 sv.c.i = 0;
1744 load(r1, &sv); /* move r to r1 */
1745 vdup();
1746 /* duplicates value */
1747 if (r != r1)
1748 vtop->r = r1;
1752 /* Generate value test
1754 * Generate a test for any value (jump, comparison and integers) */
1755 ST_FUNC int gvtst(int inv, int t)
1757 int v = vtop->r & VT_VALMASK;
1758 if (v != VT_CMP && v != VT_JMP && v != VT_JMPI) {
1759 vpushi(0);
1760 gen_op(TOK_NE);
1762 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1763 /* constant jmp optimization */
1764 if ((vtop->c.i != 0) != inv)
1765 t = gjmp(t);
1766 vtop--;
1767 return t;
1769 return gtst(inv, t);
1772 #if PTR_SIZE == 4
1773 /* generate CPU independent (unsigned) long long operations */
1774 static void gen_opl(int op)
1776 int t, a, b, op1, c, i;
1777 int func;
1778 unsigned short reg_iret = REG_IRET;
1779 unsigned short reg_lret = REG_LRET;
1780 SValue tmp;
1782 switch(op) {
1783 case '/':
1784 case TOK_PDIV:
1785 func = TOK___divdi3;
1786 goto gen_func;
1787 case TOK_UDIV:
1788 func = TOK___udivdi3;
1789 goto gen_func;
1790 case '%':
1791 func = TOK___moddi3;
1792 goto gen_mod_func;
1793 case TOK_UMOD:
1794 func = TOK___umoddi3;
1795 gen_mod_func:
1796 #ifdef TCC_ARM_EABI
1797 reg_iret = TREG_R2;
1798 reg_lret = TREG_R3;
1799 #endif
1800 gen_func:
1801 /* call generic long long function */
1802 vpush_global_sym(&func_old_type, func);
1803 vrott(3);
1804 gfunc_call(2);
1805 vpushi(0);
1806 vtop->r = reg_iret;
1807 vtop->r2 = reg_lret;
1808 break;
1809 case '^':
1810 case '&':
1811 case '|':
1812 case '*':
1813 case '+':
1814 case '-':
1815 //pv("gen_opl A",0,2);
1816 t = vtop->type.t;
1817 vswap();
1818 lexpand();
1819 vrotb(3);
1820 lexpand();
1821 /* stack: L1 H1 L2 H2 */
1822 tmp = vtop[0];
1823 vtop[0] = vtop[-3];
1824 vtop[-3] = tmp;
1825 tmp = vtop[-2];
1826 vtop[-2] = vtop[-3];
1827 vtop[-3] = tmp;
1828 vswap();
1829 /* stack: H1 H2 L1 L2 */
1830 //pv("gen_opl B",0,4);
1831 if (op == '*') {
1832 vpushv(vtop - 1);
1833 vpushv(vtop - 1);
1834 gen_op(TOK_UMULL);
1835 lexpand();
1836 /* stack: H1 H2 L1 L2 ML MH */
1837 for(i=0;i<4;i++)
1838 vrotb(6);
1839 /* stack: ML MH H1 H2 L1 L2 */
1840 tmp = vtop[0];
1841 vtop[0] = vtop[-2];
1842 vtop[-2] = tmp;
1843 /* stack: ML MH H1 L2 H2 L1 */
1844 gen_op('*');
1845 vrotb(3);
1846 vrotb(3);
1847 gen_op('*');
1848 /* stack: ML MH M1 M2 */
1849 gen_op('+');
1850 gen_op('+');
1851 } else if (op == '+' || op == '-') {
1852 /* XXX: add non carry method too (for MIPS or alpha) */
1853 if (op == '+')
1854 op1 = TOK_ADDC1;
1855 else
1856 op1 = TOK_SUBC1;
1857 gen_op(op1);
1858 /* stack: H1 H2 (L1 op L2) */
1859 vrotb(3);
1860 vrotb(3);
1861 gen_op(op1 + 1); /* TOK_xxxC2 */
1862 } else {
1863 gen_op(op);
1864 /* stack: H1 H2 (L1 op L2) */
1865 vrotb(3);
1866 vrotb(3);
1867 /* stack: (L1 op L2) H1 H2 */
1868 gen_op(op);
1869 /* stack: (L1 op L2) (H1 op H2) */
1871 /* stack: L H */
1872 lbuild(t);
1873 break;
1874 case TOK_SAR:
1875 case TOK_SHR:
1876 case TOK_SHL:
1877 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
1878 t = vtop[-1].type.t;
1879 vswap();
1880 lexpand();
1881 vrotb(3);
1882 /* stack: L H shift */
1883 c = (int)vtop->c.i;
1884 /* constant: simpler */
1885 /* NOTE: all comments are for SHL. the other cases are
1886 done by swapping words */
1887 vpop();
1888 if (op != TOK_SHL)
1889 vswap();
1890 if (c >= 32) {
1891 /* stack: L H */
1892 vpop();
1893 if (c > 32) {
1894 vpushi(c - 32);
1895 gen_op(op);
1897 if (op != TOK_SAR) {
1898 vpushi(0);
1899 } else {
1900 gv_dup();
1901 vpushi(31);
1902 gen_op(TOK_SAR);
1904 vswap();
1905 } else {
1906 vswap();
1907 gv_dup();
1908 /* stack: H L L */
1909 vpushi(c);
1910 gen_op(op);
1911 vswap();
1912 vpushi(32 - c);
1913 if (op == TOK_SHL)
1914 gen_op(TOK_SHR);
1915 else
1916 gen_op(TOK_SHL);
1917 vrotb(3);
1918 /* stack: L L H */
1919 vpushi(c);
1920 if (op == TOK_SHL)
1921 gen_op(TOK_SHL);
1922 else
1923 gen_op(TOK_SHR);
1924 gen_op('|');
1926 if (op != TOK_SHL)
1927 vswap();
1928 lbuild(t);
1929 } else {
1930 /* XXX: should provide a faster fallback on x86 ? */
1931 switch(op) {
1932 case TOK_SAR:
1933 func = TOK___ashrdi3;
1934 goto gen_func;
1935 case TOK_SHR:
1936 func = TOK___lshrdi3;
1937 goto gen_func;
1938 case TOK_SHL:
1939 func = TOK___ashldi3;
1940 goto gen_func;
1943 break;
1944 default:
1945 /* compare operations */
1946 t = vtop->type.t;
1947 vswap();
1948 lexpand();
1949 vrotb(3);
1950 lexpand();
1951 /* stack: L1 H1 L2 H2 */
1952 tmp = vtop[-1];
1953 vtop[-1] = vtop[-2];
1954 vtop[-2] = tmp;
1955 /* stack: L1 L2 H1 H2 */
1956 /* compare high */
1957 op1 = op;
1958 /* when values are equal, we need to compare low words. since
1959 the jump is inverted, we invert the test too. */
1960 if (op1 == TOK_LT)
1961 op1 = TOK_LE;
1962 else if (op1 == TOK_GT)
1963 op1 = TOK_GE;
1964 else if (op1 == TOK_ULT)
1965 op1 = TOK_ULE;
1966 else if (op1 == TOK_UGT)
1967 op1 = TOK_UGE;
1968 a = 0;
1969 b = 0;
1970 gen_op(op1);
1971 if (op == TOK_NE) {
1972 b = gvtst(0, 0);
1973 } else {
1974 a = gvtst(1, 0);
1975 if (op != TOK_EQ) {
1976 /* generate non equal test */
1977 vpushi(TOK_NE);
1978 vtop->r = VT_CMP;
1979 b = gvtst(0, 0);
1982 /* compare low. Always unsigned */
1983 op1 = op;
1984 if (op1 == TOK_LT)
1985 op1 = TOK_ULT;
1986 else if (op1 == TOK_LE)
1987 op1 = TOK_ULE;
1988 else if (op1 == TOK_GT)
1989 op1 = TOK_UGT;
1990 else if (op1 == TOK_GE)
1991 op1 = TOK_UGE;
1992 gen_op(op1);
1993 a = gvtst(1, a);
1994 gsym(b);
1995 vseti(VT_JMPI, a);
1996 break;
1999 #endif
2001 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2003 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2004 return (a ^ b) >> 63 ? -x : x;
2007 static int gen_opic_lt(uint64_t a, uint64_t b)
2009 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2012 /* handle integer constant optimizations and various machine
2013 independent opt */
2014 static void gen_opic(int op)
2016 SValue *v1 = vtop - 1;
2017 SValue *v2 = vtop;
2018 int t1 = v1->type.t & VT_BTYPE;
2019 int t2 = v2->type.t & VT_BTYPE;
2020 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2021 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2022 uint64_t l1 = c1 ? v1->c.i : 0;
2023 uint64_t l2 = c2 ? v2->c.i : 0;
2024 int shm = (t1 == VT_LLONG) ? 63 : 31;
2026 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2027 l1 = ((uint32_t)l1 |
2028 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2029 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2030 l2 = ((uint32_t)l2 |
2031 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2033 if (c1 && c2) {
2034 switch(op) {
2035 case '+': l1 += l2; break;
2036 case '-': l1 -= l2; break;
2037 case '&': l1 &= l2; break;
2038 case '^': l1 ^= l2; break;
2039 case '|': l1 |= l2; break;
2040 case '*': l1 *= l2; break;
2042 case TOK_PDIV:
2043 case '/':
2044 case '%':
2045 case TOK_UDIV:
2046 case TOK_UMOD:
2047 /* if division by zero, generate explicit division */
2048 if (l2 == 0) {
2049 if (const_wanted)
2050 tcc_error("division by zero in constant");
2051 goto general_case;
2053 switch(op) {
2054 default: l1 = gen_opic_sdiv(l1, l2); break;
2055 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2056 case TOK_UDIV: l1 = l1 / l2; break;
2057 case TOK_UMOD: l1 = l1 % l2; break;
2059 break;
2060 case TOK_SHL: l1 <<= (l2 & shm); break;
2061 case TOK_SHR: l1 >>= (l2 & shm); break;
2062 case TOK_SAR:
2063 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2064 break;
2065 /* tests */
2066 case TOK_ULT: l1 = l1 < l2; break;
2067 case TOK_UGE: l1 = l1 >= l2; break;
2068 case TOK_EQ: l1 = l1 == l2; break;
2069 case TOK_NE: l1 = l1 != l2; break;
2070 case TOK_ULE: l1 = l1 <= l2; break;
2071 case TOK_UGT: l1 = l1 > l2; break;
2072 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2073 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2074 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2075 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2076 /* logical */
2077 case TOK_LAND: l1 = l1 && l2; break;
2078 case TOK_LOR: l1 = l1 || l2; break;
2079 default:
2080 goto general_case;
2082 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2083 l1 = ((uint32_t)l1 |
2084 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2085 v1->c.i = l1;
2086 vtop--;
2087 } else {
2088 /* if commutative ops, put c2 as constant */
2089 if (c1 && (op == '+' || op == '&' || op == '^' ||
2090 op == '|' || op == '*')) {
2091 vswap();
2092 c2 = c1; //c = c1, c1 = c2, c2 = c;
2093 l2 = l1; //l = l1, l1 = l2, l2 = l;
2095 if (!const_wanted &&
2096 c1 && ((l1 == 0 &&
2097 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2098 (l1 == -1 && op == TOK_SAR))) {
2099 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2100 vtop--;
2101 } else if (!const_wanted &&
2102 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2103 (op == '|' &&
2104 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2105 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2106 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2107 if (l2 == 1)
2108 vtop->c.i = 0;
2109 vswap();
2110 vtop--;
2111 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2112 op == TOK_PDIV) &&
2113 l2 == 1) ||
2114 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2115 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2116 l2 == 0) ||
2117 (op == '&' &&
2118 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2119 /* filter out NOP operations like x*1, x-0, x&-1... */
2120 vtop--;
2121 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2122 /* try to use shifts instead of muls or divs */
2123 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2124 int n = -1;
2125 while (l2) {
2126 l2 >>= 1;
2127 n++;
2129 vtop->c.i = n;
2130 if (op == '*')
2131 op = TOK_SHL;
2132 else if (op == TOK_PDIV)
2133 op = TOK_SAR;
2134 else
2135 op = TOK_SHR;
2137 goto general_case;
2138 } else if (c2 && (op == '+' || op == '-') &&
2139 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2140 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2141 /* symbol + constant case */
2142 if (op == '-')
2143 l2 = -l2;
2144 l2 += vtop[-1].c.i;
2145 /* The backends can't always deal with addends to symbols
2146 larger than +-1<<31. Don't construct such. */
2147 if ((int)l2 != l2)
2148 goto general_case;
2149 vtop--;
2150 vtop->c.i = l2;
2151 } else {
2152 general_case:
2153 /* call low level op generator */
2154 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2155 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2156 gen_opl(op);
2157 else
2158 gen_opi(op);
2163 /* generate a floating point operation with constant propagation */
2164 static void gen_opif(int op)
2166 int c1, c2;
2167 SValue *v1, *v2;
2168 #if defined _MSC_VER && defined _AMD64_
2169 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2170 volatile
2171 #endif
2172 long double f1, f2;
2174 v1 = vtop - 1;
2175 v2 = vtop;
2176 /* currently, we cannot do computations with forward symbols */
2177 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2178 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2179 if (c1 && c2) {
2180 if (v1->type.t == VT_FLOAT) {
2181 f1 = v1->c.f;
2182 f2 = v2->c.f;
2183 } else if (v1->type.t == VT_DOUBLE) {
2184 f1 = v1->c.d;
2185 f2 = v2->c.d;
2186 } else {
2187 f1 = v1->c.ld;
2188 f2 = v2->c.ld;
2191 /* NOTE: we only do constant propagation if finite number (not
2192 NaN or infinity) (ANSI spec) */
2193 if (!ieee_finite(f1) || !ieee_finite(f2))
2194 goto general_case;
2196 switch(op) {
2197 case '+': f1 += f2; break;
2198 case '-': f1 -= f2; break;
2199 case '*': f1 *= f2; break;
2200 case '/':
2201 if (f2 == 0.0) {
2202 /* If not in initializer we need to potentially generate
2203 FP exceptions at runtime, otherwise we want to fold. */
2204 if (!const_wanted)
2205 goto general_case;
2207 f1 /= f2;
2208 break;
2209 /* XXX: also handles tests ? */
2210 default:
2211 goto general_case;
2213 /* XXX: overflow test ? */
2214 if (v1->type.t == VT_FLOAT) {
2215 v1->c.f = f1;
2216 } else if (v1->type.t == VT_DOUBLE) {
2217 v1->c.d = f1;
2218 } else {
2219 v1->c.ld = f1;
2221 vtop--;
2222 } else {
2223 general_case:
2224 gen_opf(op);
2228 static int pointed_size(CType *type)
2230 int align;
2231 return type_size(pointed_type(type), &align);
2234 static void vla_runtime_pointed_size(CType *type)
2236 int align;
2237 vla_runtime_type_size(pointed_type(type), &align);
2240 static inline int is_null_pointer(SValue *p)
2242 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2243 return 0;
2244 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2245 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2246 ((p->type.t & VT_BTYPE) == VT_PTR &&
2247 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2248 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2249 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2252 static inline int is_integer_btype(int bt)
2254 return (bt == VT_BYTE || bt == VT_SHORT ||
2255 bt == VT_INT || bt == VT_LLONG);
2258 /* check types for comparison or subtraction of pointers */
2259 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2261 CType *type1, *type2, tmp_type1, tmp_type2;
2262 int bt1, bt2;
2264 /* null pointers are accepted for all comparisons as gcc */
2265 if (is_null_pointer(p1) || is_null_pointer(p2))
2266 return;
2267 type1 = &p1->type;
2268 type2 = &p2->type;
2269 bt1 = type1->t & VT_BTYPE;
2270 bt2 = type2->t & VT_BTYPE;
2271 /* accept comparison between pointer and integer with a warning */
2272 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2273 if (op != TOK_LOR && op != TOK_LAND )
2274 tcc_warning("comparison between pointer and integer");
2275 return;
2278 /* both must be pointers or implicit function pointers */
2279 if (bt1 == VT_PTR) {
2280 type1 = pointed_type(type1);
2281 } else if (bt1 != VT_FUNC)
2282 goto invalid_operands;
2284 if (bt2 == VT_PTR) {
2285 type2 = pointed_type(type2);
2286 } else if (bt2 != VT_FUNC) {
2287 invalid_operands:
2288 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2290 if ((type1->t & VT_BTYPE) == VT_VOID ||
2291 (type2->t & VT_BTYPE) == VT_VOID)
2292 return;
2293 tmp_type1 = *type1;
2294 tmp_type2 = *type2;
2295 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2296 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2297 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2298 /* gcc-like error if '-' is used */
2299 if (op == '-')
2300 goto invalid_operands;
2301 else
2302 tcc_warning("comparison of distinct pointer types lacks a cast");
2306 /* generic gen_op: handles types problems */
2307 ST_FUNC void gen_op(int op)
2309 int u, t1, t2, bt1, bt2, t;
2310 CType type1;
2312 redo:
2313 t1 = vtop[-1].type.t;
2314 t2 = vtop[0].type.t;
2315 bt1 = t1 & VT_BTYPE;
2316 bt2 = t2 & VT_BTYPE;
2318 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2319 tcc_error("operation on a struct");
2320 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2321 if (bt2 == VT_FUNC) {
2322 mk_pointer(&vtop->type);
2323 gaddrof();
2325 if (bt1 == VT_FUNC) {
2326 vswap();
2327 mk_pointer(&vtop->type);
2328 gaddrof();
2329 vswap();
2331 goto redo;
2332 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2333 /* at least one operand is a pointer */
2334 /* relational op: must be both pointers */
2335 if (op >= TOK_ULT && op <= TOK_LOR) {
2336 check_comparison_pointer_types(vtop - 1, vtop, op);
2337 /* pointers are handled are unsigned */
2338 #if PTR_SIZE == 8
2339 t = VT_LLONG | VT_UNSIGNED;
2340 #else
2341 t = VT_INT | VT_UNSIGNED;
2342 #endif
2343 goto std_op;
2345 /* if both pointers, then it must be the '-' op */
2346 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2347 if (op != '-')
2348 tcc_error("cannot use pointers here");
2349 check_comparison_pointer_types(vtop - 1, vtop, op);
2350 /* XXX: check that types are compatible */
2351 if (vtop[-1].type.t & VT_VLA) {
2352 vla_runtime_pointed_size(&vtop[-1].type);
2353 } else {
2354 vpushi(pointed_size(&vtop[-1].type));
2356 vrott(3);
2357 gen_opic(op);
2358 vtop->type.t = ptrdiff_type.t;
2359 vswap();
2360 gen_op(TOK_PDIV);
2361 } else {
2362 /* exactly one pointer : must be '+' or '-'. */
2363 if (op != '-' && op != '+')
2364 tcc_error("cannot use pointers here");
2365 /* Put pointer as first operand */
2366 if (bt2 == VT_PTR) {
2367 vswap();
2368 t = t1, t1 = t2, t2 = t;
2370 #if PTR_SIZE == 4
2371 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2372 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2373 gen_cast_s(VT_INT);
2374 #endif
2375 type1 = vtop[-1].type;
2376 type1.t &= ~VT_ARRAY;
2377 if (vtop[-1].type.t & VT_VLA)
2378 vla_runtime_pointed_size(&vtop[-1].type);
2379 else {
2380 u = pointed_size(&vtop[-1].type);
2381 if (u < 0)
2382 tcc_error("unknown array element size");
2383 #if PTR_SIZE == 8
2384 vpushll(u);
2385 #else
2386 /* XXX: cast to int ? (long long case) */
2387 vpushi(u);
2388 #endif
2390 gen_op('*');
2391 #if 0
2392 /* #ifdef CONFIG_TCC_BCHECK
2393 The main reason to removing this code:
2394 #include <stdio.h>
2395 int main ()
2397 int v[10];
2398 int i = 10;
2399 int j = 9;
2400 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2401 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2403 When this code is on. then the output looks like
2404 v+i-j = 0xfffffffe
2405 v+(i-j) = 0xbff84000
2407 /* if evaluating constant expression, no code should be
2408 generated, so no bound check */
2409 if (tcc_state->do_bounds_check && !const_wanted) {
2410 /* if bounded pointers, we generate a special code to
2411 test bounds */
2412 if (op == '-') {
2413 vpushi(0);
2414 vswap();
2415 gen_op('-');
2417 gen_bounded_ptr_add();
2418 } else
2419 #endif
2421 gen_opic(op);
2423 /* put again type if gen_opic() swaped operands */
2424 vtop->type = type1;
2426 } else if (is_float(bt1) || is_float(bt2)) {
2427 /* compute bigger type and do implicit casts */
2428 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2429 t = VT_LDOUBLE;
2430 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2431 t = VT_DOUBLE;
2432 } else {
2433 t = VT_FLOAT;
2435 /* floats can only be used for a few operations */
2436 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2437 (op < TOK_ULT || op > TOK_GT))
2438 tcc_error("invalid operands for binary operation");
2439 goto std_op;
2440 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2441 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2442 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2443 t |= VT_UNSIGNED;
2444 t |= (VT_LONG & t1);
2445 goto std_op;
2446 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2447 /* cast to biggest op */
2448 t = VT_LLONG | VT_LONG;
2449 if (bt1 == VT_LLONG)
2450 t &= t1;
2451 if (bt2 == VT_LLONG)
2452 t &= t2;
2453 /* convert to unsigned if it does not fit in a long long */
2454 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2455 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2456 t |= VT_UNSIGNED;
2457 goto std_op;
2458 } else {
2459 /* integer operations */
2460 t = VT_INT | (VT_LONG & (t1 | t2));
2461 /* convert to unsigned if it does not fit in an integer */
2462 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2463 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2464 t |= VT_UNSIGNED;
2465 std_op:
2466 /* XXX: currently, some unsigned operations are explicit, so
2467 we modify them here */
2468 if (t & VT_UNSIGNED) {
2469 if (op == TOK_SAR)
2470 op = TOK_SHR;
2471 else if (op == '/')
2472 op = TOK_UDIV;
2473 else if (op == '%')
2474 op = TOK_UMOD;
2475 else if (op == TOK_LT)
2476 op = TOK_ULT;
2477 else if (op == TOK_GT)
2478 op = TOK_UGT;
2479 else if (op == TOK_LE)
2480 op = TOK_ULE;
2481 else if (op == TOK_GE)
2482 op = TOK_UGE;
2484 vswap();
2485 type1.t = t;
2486 type1.ref = NULL;
2487 gen_cast(&type1);
2488 vswap();
2489 /* special case for shifts and long long: we keep the shift as
2490 an integer */
2491 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2492 type1.t = VT_INT;
2493 gen_cast(&type1);
2494 if (is_float(t))
2495 gen_opif(op);
2496 else
2497 gen_opic(op);
2498 if (op >= TOK_ULT && op <= TOK_GT) {
2499 /* relational op: the result is an int */
2500 vtop->type.t = VT_INT;
2501 } else {
2502 vtop->type.t = t;
2505 // Make sure that we have converted to an rvalue:
2506 if (vtop->r & VT_LVAL)
2507 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2510 #ifndef TCC_TARGET_ARM
2511 /* generic itof for unsigned long long case */
2512 static void gen_cvt_itof1(int t)
2514 #ifdef TCC_TARGET_ARM64
2515 gen_cvt_itof(t);
2516 #else
2517 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2518 (VT_LLONG | VT_UNSIGNED)) {
2520 if (t == VT_FLOAT)
2521 vpush_global_sym(&func_old_type, TOK___floatundisf);
2522 #if LDOUBLE_SIZE != 8
2523 else if (t == VT_LDOUBLE)
2524 vpush_global_sym(&func_old_type, TOK___floatundixf);
2525 #endif
2526 else
2527 vpush_global_sym(&func_old_type, TOK___floatundidf);
2528 vrott(2);
2529 gfunc_call(1);
2530 vpushi(0);
2531 vtop->r = reg_fret(t);
2532 } else {
2533 gen_cvt_itof(t);
2535 #endif
2537 #endif
2539 /* generic ftoi for unsigned long long case */
2540 static void gen_cvt_ftoi1(int t)
2542 #ifdef TCC_TARGET_ARM64
2543 gen_cvt_ftoi(t);
2544 #else
2545 int st;
2547 if (t == (VT_LLONG | VT_UNSIGNED)) {
2548 /* not handled natively */
2549 st = vtop->type.t & VT_BTYPE;
2550 if (st == VT_FLOAT)
2551 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2552 #if LDOUBLE_SIZE != 8
2553 else if (st == VT_LDOUBLE)
2554 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2555 #endif
2556 else
2557 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2558 vrott(2);
2559 gfunc_call(1);
2560 vpushi(0);
2561 vtop->r = REG_IRET;
2562 vtop->r2 = REG_LRET;
2563 } else {
2564 gen_cvt_ftoi(t);
2566 #endif
2569 /* force char or short cast */
2570 static void force_charshort_cast(int t)
2572 int bits, dbt;
2574 /* cannot cast static initializers */
2575 if (STATIC_DATA_WANTED)
2576 return;
2578 dbt = t & VT_BTYPE;
2579 /* XXX: add optimization if lvalue : just change type and offset */
2580 if (dbt == VT_BYTE)
2581 bits = 8;
2582 else
2583 bits = 16;
2584 if (t & VT_UNSIGNED) {
2585 vpushi((1 << bits) - 1);
2586 gen_op('&');
2587 } else {
2588 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2589 bits = 64 - bits;
2590 else
2591 bits = 32 - bits;
2592 vpushi(bits);
2593 gen_op(TOK_SHL);
2594 /* result must be signed or the SAR is converted to an SHL
2595 This was not the case when "t" was a signed short
2596 and the last value on the stack was an unsigned int */
2597 vtop->type.t &= ~VT_UNSIGNED;
2598 vpushi(bits);
2599 gen_op(TOK_SAR);
2603 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2604 static void gen_cast_s(int t)
2606 CType type;
2607 type.t = t;
2608 type.ref = NULL;
2609 gen_cast(&type);
2612 static void gen_cast(CType *type)
2614 int sbt, dbt, sf, df, c, p;
2616 /* special delayed cast for char/short */
2617 /* XXX: in some cases (multiple cascaded casts), it may still
2618 be incorrect */
2619 if (vtop->r & VT_MUSTCAST) {
2620 vtop->r &= ~VT_MUSTCAST;
2621 force_charshort_cast(vtop->type.t);
2624 /* bitfields first get cast to ints */
2625 if (vtop->type.t & VT_BITFIELD) {
2626 gv(RC_INT);
2629 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2630 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2632 if (sbt != dbt) {
2633 sf = is_float(sbt);
2634 df = is_float(dbt);
2635 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2636 p = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM);
2637 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2638 c &= dbt != VT_LDOUBLE;
2639 #endif
2640 if (c) {
2641 /* constant case: we can do it now */
2642 /* XXX: in ISOC, cannot do it if error in convert */
2643 if (sbt == VT_FLOAT)
2644 vtop->c.ld = vtop->c.f;
2645 else if (sbt == VT_DOUBLE)
2646 vtop->c.ld = vtop->c.d;
2648 if (df) {
2649 if ((sbt & VT_BTYPE) == VT_LLONG) {
2650 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2651 vtop->c.ld = vtop->c.i;
2652 else
2653 vtop->c.ld = -(long double)-vtop->c.i;
2654 } else if(!sf) {
2655 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2656 vtop->c.ld = (uint32_t)vtop->c.i;
2657 else
2658 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2661 if (dbt == VT_FLOAT)
2662 vtop->c.f = (float)vtop->c.ld;
2663 else if (dbt == VT_DOUBLE)
2664 vtop->c.d = (double)vtop->c.ld;
2665 } else if (sf && dbt == (VT_LLONG|VT_UNSIGNED)) {
2666 vtop->c.i = vtop->c.ld;
2667 } else if (sf && dbt == VT_BOOL) {
2668 vtop->c.i = (vtop->c.ld != 0);
2669 } else {
2670 if(sf)
2671 vtop->c.i = vtop->c.ld;
2672 else if (sbt == (VT_LLONG|VT_UNSIGNED))
2674 else if (sbt & VT_UNSIGNED)
2675 vtop->c.i = (uint32_t)vtop->c.i;
2676 #if PTR_SIZE == 8
2677 else if (sbt == VT_PTR)
2679 #endif
2680 else if (sbt != VT_LLONG)
2681 vtop->c.i = ((uint32_t)vtop->c.i |
2682 -(vtop->c.i & 0x80000000));
2684 if (dbt == (VT_LLONG|VT_UNSIGNED))
2686 else if (dbt == VT_BOOL)
2687 vtop->c.i = (vtop->c.i != 0);
2688 #if PTR_SIZE == 8
2689 else if (dbt == VT_PTR)
2691 #endif
2692 else if (dbt != VT_LLONG) {
2693 uint32_t m = ((dbt & VT_BTYPE) == VT_BYTE ? 0xff :
2694 (dbt & VT_BTYPE) == VT_SHORT ? 0xffff :
2695 0xffffffff);
2696 vtop->c.i &= m;
2697 if (!(dbt & VT_UNSIGNED))
2698 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2701 } else if (p && dbt == VT_BOOL) {
2702 vtop->r = VT_CONST;
2703 vtop->c.i = 1;
2704 } else {
2705 /* non constant case: generate code */
2706 if (sf && df) {
2707 /* convert from fp to fp */
2708 gen_cvt_ftof(dbt);
2709 } else if (df) {
2710 /* convert int to fp */
2711 gen_cvt_itof1(dbt);
2712 } else if (sf) {
2713 /* convert fp to int */
2714 if (dbt == VT_BOOL) {
2715 vpushi(0);
2716 gen_op(TOK_NE);
2717 } else {
2718 /* we handle char/short/etc... with generic code */
2719 if (dbt != (VT_INT | VT_UNSIGNED) &&
2720 dbt != (VT_LLONG | VT_UNSIGNED) &&
2721 dbt != VT_LLONG)
2722 dbt = VT_INT;
2723 gen_cvt_ftoi1(dbt);
2724 if (dbt == VT_INT && (type->t & (VT_BTYPE | VT_UNSIGNED)) != dbt) {
2725 /* additional cast for char/short... */
2726 vtop->type.t = dbt;
2727 gen_cast(type);
2730 #if PTR_SIZE == 4
2731 } else if ((dbt & VT_BTYPE) == VT_LLONG) {
2732 if ((sbt & VT_BTYPE) != VT_LLONG) {
2733 /* scalar to long long */
2734 /* machine independent conversion */
2735 gv(RC_INT);
2736 /* generate high word */
2737 if (sbt == (VT_INT | VT_UNSIGNED)) {
2738 vpushi(0);
2739 gv(RC_INT);
2740 } else {
2741 if (sbt == VT_PTR) {
2742 /* cast from pointer to int before we apply
2743 shift operation, which pointers don't support*/
2744 gen_cast_s(VT_INT);
2746 gv_dup();
2747 vpushi(31);
2748 gen_op(TOK_SAR);
2750 /* patch second register */
2751 vtop[-1].r2 = vtop->r;
2752 vpop();
2754 #else
2755 } else if ((dbt & VT_BTYPE) == VT_LLONG ||
2756 (dbt & VT_BTYPE) == VT_PTR ||
2757 (dbt & VT_BTYPE) == VT_FUNC) {
2758 if ((sbt & VT_BTYPE) != VT_LLONG &&
2759 (sbt & VT_BTYPE) != VT_PTR &&
2760 (sbt & VT_BTYPE) != VT_FUNC) {
2761 /* need to convert from 32bit to 64bit */
2762 gv(RC_INT);
2763 if (sbt != (VT_INT | VT_UNSIGNED)) {
2764 #if defined(TCC_TARGET_ARM64)
2765 gen_cvt_sxtw();
2766 #elif defined(TCC_TARGET_X86_64)
2767 int r = gv(RC_INT);
2768 /* x86_64 specific: movslq */
2769 o(0x6348);
2770 o(0xc0 + (REG_VALUE(r) << 3) + REG_VALUE(r));
2771 #else
2772 #error
2773 #endif
2776 #endif
2777 } else if (dbt == VT_BOOL) {
2778 /* scalar to bool */
2779 vpushi(0);
2780 gen_op(TOK_NE);
2781 } else if ((dbt & VT_BTYPE) == VT_BYTE ||
2782 (dbt & VT_BTYPE) == VT_SHORT) {
2783 if (sbt == VT_PTR) {
2784 vtop->type.t = VT_INT;
2785 tcc_warning("nonportable conversion from pointer to char/short");
2787 force_charshort_cast(dbt);
2788 } else if ((dbt & VT_BTYPE) == VT_INT) {
2789 /* scalar to int */
2790 if ((sbt & VT_BTYPE) == VT_LLONG) {
2791 #if PTR_SIZE == 4
2792 /* from long long: just take low order word */
2793 lexpand();
2794 vpop();
2795 #else
2796 vpushi(0xffffffff);
2797 vtop->type.t |= VT_UNSIGNED;
2798 gen_op('&');
2799 #endif
2801 /* if lvalue and single word type, nothing to do because
2802 the lvalue already contains the real type size (see
2803 VT_LVAL_xxx constants) */
2806 } else if ((dbt & VT_BTYPE) == VT_PTR && !(vtop->r & VT_LVAL)) {
2807 /* if we are casting between pointer types,
2808 we must update the VT_LVAL_xxx size */
2809 vtop->r = (vtop->r & ~VT_LVAL_TYPE)
2810 | (lvalue_type(type->ref->type.t) & VT_LVAL_TYPE);
2812 vtop->type = *type;
2813 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2816 /* return type size as known at compile time. Put alignment at 'a' */
2817 ST_FUNC int type_size(CType *type, int *a)
2819 Sym *s;
2820 int bt;
2822 bt = type->t & VT_BTYPE;
2823 if (bt == VT_STRUCT) {
2824 /* struct/union */
2825 s = type->ref;
2826 *a = s->r;
2827 return s->c;
2828 } else if (bt == VT_PTR) {
2829 if (type->t & VT_ARRAY) {
2830 int ts;
2832 s = type->ref;
2833 ts = type_size(&s->type, a);
2835 if (ts < 0 && s->c < 0)
2836 ts = -ts;
2838 return ts * s->c;
2839 } else {
2840 *a = PTR_SIZE;
2841 return PTR_SIZE;
2843 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2844 return -1; /* incomplete enum */
2845 } else if (bt == VT_LDOUBLE) {
2846 *a = LDOUBLE_ALIGN;
2847 return LDOUBLE_SIZE;
2848 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2849 #ifdef TCC_TARGET_I386
2850 #ifdef TCC_TARGET_PE
2851 *a = 8;
2852 #else
2853 *a = 4;
2854 #endif
2855 #elif defined(TCC_TARGET_ARM)
2856 #ifdef TCC_ARM_EABI
2857 *a = 8;
2858 #else
2859 *a = 4;
2860 #endif
2861 #else
2862 *a = 8;
2863 #endif
2864 return 8;
2865 } else if (bt == VT_INT || bt == VT_FLOAT) {
2866 *a = 4;
2867 return 4;
2868 } else if (bt == VT_SHORT) {
2869 *a = 2;
2870 return 2;
2871 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
2872 *a = 8;
2873 return 16;
2874 } else {
2875 /* char, void, function, _Bool */
2876 *a = 1;
2877 return 1;
2881 /* push type size as known at runtime time on top of value stack. Put
2882 alignment at 'a' */
2883 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
2885 if (type->t & VT_VLA) {
2886 type_size(&type->ref->type, a);
2887 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
2888 } else {
2889 vpushi(type_size(type, a));
2893 static void vla_sp_restore(void) {
2894 if (vlas_in_scope) {
2895 gen_vla_sp_restore(vla_sp_loc);
2899 static void vla_sp_restore_root(void) {
2900 if (vlas_in_scope) {
2901 gen_vla_sp_restore(vla_sp_root_loc);
2905 /* return the pointed type of t */
2906 static inline CType *pointed_type(CType *type)
2908 return &type->ref->type;
2911 /* modify type so that its it is a pointer to type. */
2912 ST_FUNC void mk_pointer(CType *type)
2914 Sym *s;
2915 s = sym_push(SYM_FIELD, type, 0, -1);
2916 type->t = VT_PTR | (type->t & VT_STORAGE);
2917 type->ref = s;
2920 /* compare function types. OLD functions match any new functions */
2921 static int is_compatible_func(CType *type1, CType *type2)
2923 Sym *s1, *s2;
2925 s1 = type1->ref;
2926 s2 = type2->ref;
2927 if (s1->f.func_call != s2->f.func_call)
2928 return 0;
2929 if (s1->f.func_type != s2->f.func_type
2930 && s1->f.func_type != FUNC_OLD
2931 && s2->f.func_type != FUNC_OLD)
2932 return 0;
2933 /* we should check the function return type for FUNC_OLD too
2934 but that causes problems with the internally used support
2935 functions such as TOK_memmove */
2936 if (s1->f.func_type == FUNC_OLD && !s1->next)
2937 return 1;
2938 if (s2->f.func_type == FUNC_OLD && !s2->next)
2939 return 1;
2940 for (;;) {
2941 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2942 return 0;
2943 s1 = s1->next;
2944 s2 = s2->next;
2945 if (!s1)
2946 return !s2;
2947 if (!s2)
2948 return 0;
2952 /* return true if type1 and type2 are the same. If unqualified is
2953 true, qualifiers on the types are ignored.
2955 static int compare_types(CType *type1, CType *type2, int unqualified)
2957 int bt1, t1, t2;
2959 t1 = type1->t & VT_TYPE;
2960 t2 = type2->t & VT_TYPE;
2961 if (unqualified) {
2962 /* strip qualifiers before comparing */
2963 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2964 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2967 /* Default Vs explicit signedness only matters for char */
2968 if ((t1 & VT_BTYPE) != VT_BYTE) {
2969 t1 &= ~VT_DEFSIGN;
2970 t2 &= ~VT_DEFSIGN;
2972 /* XXX: bitfields ? */
2973 if (t1 != t2)
2974 return 0;
2976 if ((t1 & VT_ARRAY)
2977 && !(type1->ref->c < 0
2978 || type2->ref->c < 0
2979 || type1->ref->c == type2->ref->c))
2980 return 0;
2982 /* test more complicated cases */
2983 bt1 = t1 & VT_BTYPE;
2984 if (bt1 == VT_PTR) {
2985 type1 = pointed_type(type1);
2986 type2 = pointed_type(type2);
2987 return is_compatible_types(type1, type2);
2988 } else if (bt1 == VT_STRUCT) {
2989 return (type1->ref == type2->ref);
2990 } else if (bt1 == VT_FUNC) {
2991 return is_compatible_func(type1, type2);
2992 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2993 return type1->ref == type2->ref;
2994 } else {
2995 return 1;
2999 /* return true if type1 and type2 are exactly the same (including
3000 qualifiers).
3002 static int is_compatible_types(CType *type1, CType *type2)
3004 return compare_types(type1,type2,0);
3007 /* return true if type1 and type2 are the same (ignoring qualifiers).
3009 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3011 return compare_types(type1,type2,1);
3014 /* print a type. If 'varstr' is not NULL, then the variable is also
3015 printed in the type */
3016 /* XXX: union */
3017 /* XXX: add array and function pointers */
3018 static void type_to_str(char *buf, int buf_size,
3019 CType *type, const char *varstr)
3021 int bt, v, t;
3022 Sym *s, *sa;
3023 char buf1[256];
3024 const char *tstr;
3026 t = type->t;
3027 bt = t & VT_BTYPE;
3028 buf[0] = '\0';
3030 if (t & VT_EXTERN)
3031 pstrcat(buf, buf_size, "extern ");
3032 if (t & VT_STATIC)
3033 pstrcat(buf, buf_size, "static ");
3034 if (t & VT_TYPEDEF)
3035 pstrcat(buf, buf_size, "typedef ");
3036 if (t & VT_INLINE)
3037 pstrcat(buf, buf_size, "inline ");
3038 if (t & VT_VOLATILE)
3039 pstrcat(buf, buf_size, "volatile ");
3040 if (t & VT_CONSTANT)
3041 pstrcat(buf, buf_size, "const ");
3043 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3044 || ((t & VT_UNSIGNED)
3045 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3046 && !IS_ENUM(t)
3048 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3050 buf_size -= strlen(buf);
3051 buf += strlen(buf);
3053 switch(bt) {
3054 case VT_VOID:
3055 tstr = "void";
3056 goto add_tstr;
3057 case VT_BOOL:
3058 tstr = "_Bool";
3059 goto add_tstr;
3060 case VT_BYTE:
3061 tstr = "char";
3062 goto add_tstr;
3063 case VT_SHORT:
3064 tstr = "short";
3065 goto add_tstr;
3066 case VT_INT:
3067 tstr = "int";
3068 goto maybe_long;
3069 case VT_LLONG:
3070 tstr = "long long";
3071 maybe_long:
3072 if (t & VT_LONG)
3073 tstr = "long";
3074 if (!IS_ENUM(t))
3075 goto add_tstr;
3076 tstr = "enum ";
3077 goto tstruct;
3078 case VT_FLOAT:
3079 tstr = "float";
3080 goto add_tstr;
3081 case VT_DOUBLE:
3082 tstr = "double";
3083 goto add_tstr;
3084 case VT_LDOUBLE:
3085 tstr = "long double";
3086 add_tstr:
3087 pstrcat(buf, buf_size, tstr);
3088 break;
3089 case VT_STRUCT:
3090 tstr = "struct ";
3091 if (IS_UNION(t))
3092 tstr = "union ";
3093 tstruct:
3094 pstrcat(buf, buf_size, tstr);
3095 v = type->ref->v & ~SYM_STRUCT;
3096 if (v >= SYM_FIRST_ANOM)
3097 pstrcat(buf, buf_size, "<anonymous>");
3098 else
3099 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3100 break;
3101 case VT_FUNC:
3102 s = type->ref;
3103 buf1[0]=0;
3104 if (varstr && '*' == *varstr) {
3105 pstrcat(buf1, sizeof(buf1), "(");
3106 pstrcat(buf1, sizeof(buf1), varstr);
3107 pstrcat(buf1, sizeof(buf1), ")");
3109 pstrcat(buf1, buf_size, "(");
3110 sa = s->next;
3111 while (sa != NULL) {
3112 char buf2[256];
3113 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3114 pstrcat(buf1, sizeof(buf1), buf2);
3115 sa = sa->next;
3116 if (sa)
3117 pstrcat(buf1, sizeof(buf1), ", ");
3119 if (s->f.func_type == FUNC_ELLIPSIS)
3120 pstrcat(buf1, sizeof(buf1), ", ...");
3121 pstrcat(buf1, sizeof(buf1), ")");
3122 type_to_str(buf, buf_size, &s->type, buf1);
3123 goto no_var;
3124 case VT_PTR:
3125 s = type->ref;
3126 if (t & VT_ARRAY) {
3127 if (varstr && '*' == *varstr)
3128 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3129 else
3130 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3131 type_to_str(buf, buf_size, &s->type, buf1);
3132 goto no_var;
3134 pstrcpy(buf1, sizeof(buf1), "*");
3135 if (t & VT_CONSTANT)
3136 pstrcat(buf1, buf_size, "const ");
3137 if (t & VT_VOLATILE)
3138 pstrcat(buf1, buf_size, "volatile ");
3139 if (varstr)
3140 pstrcat(buf1, sizeof(buf1), varstr);
3141 type_to_str(buf, buf_size, &s->type, buf1);
3142 goto no_var;
3144 if (varstr) {
3145 pstrcat(buf, buf_size, " ");
3146 pstrcat(buf, buf_size, varstr);
3148 no_var: ;
3151 /* verify type compatibility to store vtop in 'dt' type, and generate
3152 casts if needed. */
3153 static void gen_assign_cast(CType *dt)
3155 CType *st, *type1, *type2;
3156 char buf1[256], buf2[256];
3157 int dbt, sbt, qualwarn, lvl;
3159 st = &vtop->type; /* source type */
3160 dbt = dt->t & VT_BTYPE;
3161 sbt = st->t & VT_BTYPE;
3162 if (sbt == VT_VOID || dbt == VT_VOID) {
3163 if (sbt == VT_VOID && dbt == VT_VOID)
3164 ; /* It is Ok if both are void */
3165 else
3166 tcc_error("cannot cast from/to void");
3168 if (dt->t & VT_CONSTANT)
3169 tcc_warning("assignment of read-only location");
3170 switch(dbt) {
3171 case VT_PTR:
3172 /* special cases for pointers */
3173 /* '0' can also be a pointer */
3174 if (is_null_pointer(vtop))
3175 break;
3176 /* accept implicit pointer to integer cast with warning */
3177 if (is_integer_btype(sbt)) {
3178 tcc_warning("assignment makes pointer from integer without a cast");
3179 break;
3181 type1 = pointed_type(dt);
3182 if (sbt == VT_PTR)
3183 type2 = pointed_type(st);
3184 else if (sbt == VT_FUNC)
3185 type2 = st; /* a function is implicitly a function pointer */
3186 else
3187 goto error;
3188 if (is_compatible_types(type1, type2))
3189 break;
3190 for (qualwarn = lvl = 0;; ++lvl) {
3191 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3192 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3193 qualwarn = 1;
3194 dbt = type1->t & (VT_BTYPE|VT_LONG);
3195 sbt = type2->t & (VT_BTYPE|VT_LONG);
3196 if (dbt != VT_PTR || sbt != VT_PTR)
3197 break;
3198 type1 = pointed_type(type1);
3199 type2 = pointed_type(type2);
3201 if (!is_compatible_unqualified_types(type1, type2)) {
3202 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3203 /* void * can match anything */
3204 } else if (dbt == sbt
3205 && is_integer_btype(sbt & VT_BTYPE)
3206 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3207 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3208 /* Like GCC don't warn by default for merely changes
3209 in pointer target signedness. Do warn for different
3210 base types, though, in particular for unsigned enums
3211 and signed int targets. */
3212 } else {
3213 tcc_warning("assignment from incompatible pointer type");
3214 break;
3217 if (qualwarn)
3218 tcc_warning("assignment discards qualifiers from pointer target type");
3219 break;
3220 case VT_BYTE:
3221 case VT_SHORT:
3222 case VT_INT:
3223 case VT_LLONG:
3224 if (sbt == VT_PTR || sbt == VT_FUNC) {
3225 tcc_warning("assignment makes integer from pointer without a cast");
3226 } else if (sbt == VT_STRUCT) {
3227 goto case_VT_STRUCT;
3229 /* XXX: more tests */
3230 break;
3231 case VT_STRUCT:
3232 case_VT_STRUCT:
3233 if (!is_compatible_unqualified_types(dt, st)) {
3234 error:
3235 type_to_str(buf1, sizeof(buf1), st, NULL);
3236 type_to_str(buf2, sizeof(buf2), dt, NULL);
3237 tcc_error("cannot cast '%s' to '%s'", buf1, buf2);
3239 break;
3241 gen_cast(dt);
3244 /* store vtop in lvalue pushed on stack */
3245 ST_FUNC void vstore(void)
3247 int sbt, dbt, ft, r, t, size, align, bit_size, bit_pos, rc, delayed_cast;
3249 ft = vtop[-1].type.t;
3250 sbt = vtop->type.t & VT_BTYPE;
3251 dbt = ft & VT_BTYPE;
3252 if ((((sbt == VT_INT || sbt == VT_SHORT) && dbt == VT_BYTE) ||
3253 (sbt == VT_INT && dbt == VT_SHORT))
3254 && !(vtop->type.t & VT_BITFIELD)) {
3255 /* optimize char/short casts */
3256 delayed_cast = VT_MUSTCAST;
3257 vtop->type.t = ft & VT_TYPE;
3258 /* XXX: factorize */
3259 if (ft & VT_CONSTANT)
3260 tcc_warning("assignment of read-only location");
3261 } else {
3262 delayed_cast = 0;
3263 if (!(ft & VT_BITFIELD))
3264 gen_assign_cast(&vtop[-1].type);
3267 if (sbt == VT_STRUCT) {
3268 /* if structure, only generate pointer */
3269 /* structure assignment : generate memcpy */
3270 /* XXX: optimize if small size */
3271 size = type_size(&vtop->type, &align);
3273 /* destination */
3274 vswap();
3275 vtop->type.t = VT_PTR;
3276 gaddrof();
3278 /* address of memcpy() */
3279 #ifdef TCC_ARM_EABI
3280 if(!(align & 7))
3281 vpush_global_sym(&func_old_type, TOK_memcpy8);
3282 else if(!(align & 3))
3283 vpush_global_sym(&func_old_type, TOK_memcpy4);
3284 else
3285 #endif
3286 /* Use memmove, rather than memcpy, as dest and src may be same: */
3287 vpush_global_sym(&func_old_type, TOK_memmove);
3289 vswap();
3290 /* source */
3291 vpushv(vtop - 2);
3292 vtop->type.t = VT_PTR;
3293 gaddrof();
3294 /* type size */
3295 vpushi(size);
3296 gfunc_call(3);
3298 /* leave source on stack */
3299 } else if (ft & VT_BITFIELD) {
3300 /* bitfield store handling */
3302 /* save lvalue as expression result (example: s.b = s.a = n;) */
3303 vdup(), vtop[-1] = vtop[-2];
3305 bit_pos = BIT_POS(ft);
3306 bit_size = BIT_SIZE(ft);
3307 /* remove bit field info to avoid loops */
3308 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3310 if ((ft & VT_BTYPE) == VT_BOOL) {
3311 gen_cast(&vtop[-1].type);
3312 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3315 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3316 if (r == VT_STRUCT) {
3317 gen_cast_s((ft & VT_BTYPE) == VT_LLONG ? VT_LLONG : VT_INT);
3318 store_packed_bf(bit_pos, bit_size);
3319 } else {
3320 unsigned long long mask = (1ULL << bit_size) - 1;
3321 if ((ft & VT_BTYPE) != VT_BOOL) {
3322 /* mask source */
3323 if ((vtop[-1].type.t & VT_BTYPE) == VT_LLONG)
3324 vpushll(mask);
3325 else
3326 vpushi((unsigned)mask);
3327 gen_op('&');
3329 /* shift source */
3330 vpushi(bit_pos);
3331 gen_op(TOK_SHL);
3332 vswap();
3333 /* duplicate destination */
3334 vdup();
3335 vrott(3);
3336 /* load destination, mask and or with source */
3337 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
3338 vpushll(~(mask << bit_pos));
3339 else
3340 vpushi(~((unsigned)mask << bit_pos));
3341 gen_op('&');
3342 gen_op('|');
3343 /* store result */
3344 vstore();
3345 /* ... and discard */
3346 vpop();
3348 } else if (dbt == VT_VOID) {
3349 --vtop;
3350 } else {
3351 #ifdef CONFIG_TCC_BCHECK
3352 /* bound check case */
3353 if (vtop[-1].r & VT_MUSTBOUND) {
3354 vswap();
3355 gbound();
3356 vswap();
3358 #endif
3359 rc = RC_INT;
3360 if (is_float(ft)) {
3361 rc = RC_FLOAT;
3362 #ifdef TCC_TARGET_X86_64
3363 if ((ft & VT_BTYPE) == VT_LDOUBLE) {
3364 rc = RC_ST0;
3365 } else if ((ft & VT_BTYPE) == VT_QFLOAT) {
3366 rc = RC_FRET;
3368 #endif
3370 r = gv(rc); /* generate value */
3371 /* if lvalue was saved on stack, must read it */
3372 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3373 SValue sv;
3374 t = get_reg(RC_INT);
3375 #if PTR_SIZE == 8
3376 sv.type.t = VT_PTR;
3377 #else
3378 sv.type.t = VT_INT;
3379 #endif
3380 sv.r = VT_LOCAL | VT_LVAL;
3381 sv.c.i = vtop[-1].c.i;
3382 load(t, &sv);
3383 vtop[-1].r = t | VT_LVAL;
3385 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3386 #if PTR_SIZE == 8
3387 if (((ft & VT_BTYPE) == VT_QLONG) || ((ft & VT_BTYPE) == VT_QFLOAT)) {
3388 int addr_type = VT_LLONG, load_size = 8, load_type = ((vtop->type.t & VT_BTYPE) == VT_QLONG) ? VT_LLONG : VT_DOUBLE;
3389 #else
3390 if ((ft & VT_BTYPE) == VT_LLONG) {
3391 int addr_type = VT_INT, load_size = 4, load_type = VT_INT;
3392 #endif
3393 vtop[-1].type.t = load_type;
3394 store(r, vtop - 1);
3395 vswap();
3396 /* convert to int to increment easily */
3397 vtop->type.t = addr_type;
3398 gaddrof();
3399 vpushi(load_size);
3400 gen_op('+');
3401 vtop->r |= VT_LVAL;
3402 vswap();
3403 vtop[-1].type.t = load_type;
3404 /* XXX: it works because r2 is spilled last ! */
3405 store(vtop->r2, vtop - 1);
3406 } else {
3407 store(r, vtop - 1);
3410 vswap();
3411 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3412 vtop->r |= delayed_cast;
3416 /* post defines POST/PRE add. c is the token ++ or -- */
3417 ST_FUNC void inc(int post, int c)
3419 test_lvalue();
3420 vdup(); /* save lvalue */
3421 if (post) {
3422 gv_dup(); /* duplicate value */
3423 vrotb(3);
3424 vrotb(3);
3426 /* add constant */
3427 vpushi(c - TOK_MID);
3428 gen_op('+');
3429 vstore(); /* store value */
3430 if (post)
3431 vpop(); /* if post op, return saved value */
3434 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3436 /* read the string */
3437 if (tok != TOK_STR)
3438 expect(msg);
3439 cstr_new(astr);
3440 while (tok == TOK_STR) {
3441 /* XXX: add \0 handling too ? */
3442 cstr_cat(astr, tokc.str.data, -1);
3443 next();
3445 cstr_ccat(astr, '\0');
3448 /* If I is >= 1 and a power of two, returns log2(i)+1.
3449 If I is 0 returns 0. */
3450 static int exact_log2p1(int i)
3452 int ret;
3453 if (!i)
3454 return 0;
3455 for (ret = 1; i >= 1 << 8; ret += 8)
3456 i >>= 8;
3457 if (i >= 1 << 4)
3458 ret += 4, i >>= 4;
3459 if (i >= 1 << 2)
3460 ret += 2, i >>= 2;
3461 if (i >= 1 << 1)
3462 ret++;
3463 return ret;
3466 /* Parse __attribute__((...)) GNUC extension. */
3467 static void parse_attribute(AttributeDef *ad)
3469 int t, n;
3470 CString astr;
3472 redo:
3473 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3474 return;
3475 next();
3476 skip('(');
3477 skip('(');
3478 while (tok != ')') {
3479 if (tok < TOK_IDENT)
3480 expect("attribute name");
3481 t = tok;
3482 next();
3483 switch(t) {
3484 case TOK_CLEANUP1:
3485 case TOK_CLEANUP2:
3487 Sym *s;
3489 skip('(');
3490 s = sym_find(tok);
3491 if (!s) {
3492 tcc_warning("implicit declaration of function '%s'",
3493 get_tok_str(tok, &tokc));
3494 s = external_global_sym(tok, &func_old_type);
3496 ad->cleanup_func = s;
3497 next();
3498 skip(')');
3499 break;
3501 case TOK_SECTION1:
3502 case TOK_SECTION2:
3503 skip('(');
3504 parse_mult_str(&astr, "section name");
3505 ad->section = find_section(tcc_state, (char *)astr.data);
3506 skip(')');
3507 cstr_free(&astr);
3508 break;
3509 case TOK_ALIAS1:
3510 case TOK_ALIAS2:
3511 skip('(');
3512 parse_mult_str(&astr, "alias(\"target\")");
3513 ad->alias_target = /* save string as token, for later */
3514 tok_alloc((char*)astr.data, astr.size-1)->tok;
3515 skip(')');
3516 cstr_free(&astr);
3517 break;
3518 case TOK_VISIBILITY1:
3519 case TOK_VISIBILITY2:
3520 skip('(');
3521 parse_mult_str(&astr,
3522 "visibility(\"default|hidden|internal|protected\")");
3523 if (!strcmp (astr.data, "default"))
3524 ad->a.visibility = STV_DEFAULT;
3525 else if (!strcmp (astr.data, "hidden"))
3526 ad->a.visibility = STV_HIDDEN;
3527 else if (!strcmp (astr.data, "internal"))
3528 ad->a.visibility = STV_INTERNAL;
3529 else if (!strcmp (astr.data, "protected"))
3530 ad->a.visibility = STV_PROTECTED;
3531 else
3532 expect("visibility(\"default|hidden|internal|protected\")");
3533 skip(')');
3534 cstr_free(&astr);
3535 break;
3536 case TOK_ALIGNED1:
3537 case TOK_ALIGNED2:
3538 if (tok == '(') {
3539 next();
3540 n = expr_const();
3541 if (n <= 0 || (n & (n - 1)) != 0)
3542 tcc_error("alignment must be a positive power of two");
3543 skip(')');
3544 } else {
3545 n = MAX_ALIGN;
3547 ad->a.aligned = exact_log2p1(n);
3548 if (n != 1 << (ad->a.aligned - 1))
3549 tcc_error("alignment of %d is larger than implemented", n);
3550 break;
3551 case TOK_PACKED1:
3552 case TOK_PACKED2:
3553 ad->a.packed = 1;
3554 break;
3555 case TOK_WEAK1:
3556 case TOK_WEAK2:
3557 ad->a.weak = 1;
3558 break;
3559 case TOK_UNUSED1:
3560 case TOK_UNUSED2:
3561 /* currently, no need to handle it because tcc does not
3562 track unused objects */
3563 break;
3564 case TOK_NORETURN1:
3565 case TOK_NORETURN2:
3566 ad->f.func_noreturn = 1;
3567 break;
3568 case TOK_CDECL1:
3569 case TOK_CDECL2:
3570 case TOK_CDECL3:
3571 ad->f.func_call = FUNC_CDECL;
3572 break;
3573 case TOK_STDCALL1:
3574 case TOK_STDCALL2:
3575 case TOK_STDCALL3:
3576 ad->f.func_call = FUNC_STDCALL;
3577 break;
3578 #ifdef TCC_TARGET_I386
3579 case TOK_REGPARM1:
3580 case TOK_REGPARM2:
3581 skip('(');
3582 n = expr_const();
3583 if (n > 3)
3584 n = 3;
3585 else if (n < 0)
3586 n = 0;
3587 if (n > 0)
3588 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3589 skip(')');
3590 break;
3591 case TOK_FASTCALL1:
3592 case TOK_FASTCALL2:
3593 case TOK_FASTCALL3:
3594 ad->f.func_call = FUNC_FASTCALLW;
3595 break;
3596 #endif
3597 case TOK_MODE:
3598 skip('(');
3599 switch(tok) {
3600 case TOK_MODE_DI:
3601 ad->attr_mode = VT_LLONG + 1;
3602 break;
3603 case TOK_MODE_QI:
3604 ad->attr_mode = VT_BYTE + 1;
3605 break;
3606 case TOK_MODE_HI:
3607 ad->attr_mode = VT_SHORT + 1;
3608 break;
3609 case TOK_MODE_SI:
3610 case TOK_MODE_word:
3611 ad->attr_mode = VT_INT + 1;
3612 break;
3613 default:
3614 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3615 break;
3617 next();
3618 skip(')');
3619 break;
3620 case TOK_DLLEXPORT:
3621 ad->a.dllexport = 1;
3622 break;
3623 case TOK_NODECORATE:
3624 ad->a.nodecorate = 1;
3625 break;
3626 case TOK_DLLIMPORT:
3627 ad->a.dllimport = 1;
3628 break;
3629 default:
3630 if (tcc_state->warn_unsupported)
3631 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3632 /* skip parameters */
3633 if (tok == '(') {
3634 int parenthesis = 0;
3635 do {
3636 if (tok == '(')
3637 parenthesis++;
3638 else if (tok == ')')
3639 parenthesis--;
3640 next();
3641 } while (parenthesis && tok != -1);
3643 break;
3645 if (tok != ',')
3646 break;
3647 next();
3649 skip(')');
3650 skip(')');
3651 goto redo;
3654 static Sym * find_field (CType *type, int v, int *cumofs)
3656 Sym *s = type->ref;
3657 v |= SYM_FIELD;
3658 while ((s = s->next) != NULL) {
3659 if ((s->v & SYM_FIELD) &&
3660 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3661 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3662 Sym *ret = find_field (&s->type, v, cumofs);
3663 if (ret) {
3664 *cumofs += s->c;
3665 return ret;
3668 if (s->v == v)
3669 break;
3671 return s;
3674 static void struct_layout(CType *type, AttributeDef *ad)
3676 int size, align, maxalign, offset, c, bit_pos, bit_size;
3677 int packed, a, bt, prevbt, prev_bit_size;
3678 int pcc = !tcc_state->ms_bitfields;
3679 int pragma_pack = *tcc_state->pack_stack_ptr;
3680 Sym *f;
3682 maxalign = 1;
3683 offset = 0;
3684 c = 0;
3685 bit_pos = 0;
3686 prevbt = VT_STRUCT; /* make it never match */
3687 prev_bit_size = 0;
3689 //#define BF_DEBUG
3691 for (f = type->ref->next; f; f = f->next) {
3692 if (f->type.t & VT_BITFIELD)
3693 bit_size = BIT_SIZE(f->type.t);
3694 else
3695 bit_size = -1;
3696 size = type_size(&f->type, &align);
3697 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3698 packed = 0;
3700 if (pcc && bit_size == 0) {
3701 /* in pcc mode, packing does not affect zero-width bitfields */
3703 } else {
3704 /* in pcc mode, attribute packed overrides if set. */
3705 if (pcc && (f->a.packed || ad->a.packed))
3706 align = packed = 1;
3708 /* pragma pack overrides align if lesser and packs bitfields always */
3709 if (pragma_pack) {
3710 packed = 1;
3711 if (pragma_pack < align)
3712 align = pragma_pack;
3713 /* in pcc mode pragma pack also overrides individual align */
3714 if (pcc && pragma_pack < a)
3715 a = 0;
3718 /* some individual align was specified */
3719 if (a)
3720 align = a;
3722 if (type->ref->type.t == VT_UNION) {
3723 if (pcc && bit_size >= 0)
3724 size = (bit_size + 7) >> 3;
3725 offset = 0;
3726 if (size > c)
3727 c = size;
3729 } else if (bit_size < 0) {
3730 if (pcc)
3731 c += (bit_pos + 7) >> 3;
3732 c = (c + align - 1) & -align;
3733 offset = c;
3734 if (size > 0)
3735 c += size;
3736 bit_pos = 0;
3737 prevbt = VT_STRUCT;
3738 prev_bit_size = 0;
3740 } else {
3741 /* A bit-field. Layout is more complicated. There are two
3742 options: PCC (GCC) compatible and MS compatible */
3743 if (pcc) {
3744 /* In PCC layout a bit-field is placed adjacent to the
3745 preceding bit-fields, except if:
3746 - it has zero-width
3747 - an individual alignment was given
3748 - it would overflow its base type container and
3749 there is no packing */
3750 if (bit_size == 0) {
3751 new_field:
3752 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3753 bit_pos = 0;
3754 } else if (f->a.aligned) {
3755 goto new_field;
3756 } else if (!packed) {
3757 int a8 = align * 8;
3758 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3759 if (ofs > size / align)
3760 goto new_field;
3763 /* in pcc mode, long long bitfields have type int if they fit */
3764 if (size == 8 && bit_size <= 32)
3765 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3767 while (bit_pos >= align * 8)
3768 c += align, bit_pos -= align * 8;
3769 offset = c;
3771 /* In PCC layout named bit-fields influence the alignment
3772 of the containing struct using the base types alignment,
3773 except for packed fields (which here have correct align). */
3774 if (f->v & SYM_FIRST_ANOM
3775 // && bit_size // ??? gcc on ARM/rpi does that
3777 align = 1;
3779 } else {
3780 bt = f->type.t & VT_BTYPE;
3781 if ((bit_pos + bit_size > size * 8)
3782 || (bit_size > 0) == (bt != prevbt)
3784 c = (c + align - 1) & -align;
3785 offset = c;
3786 bit_pos = 0;
3787 /* In MS bitfield mode a bit-field run always uses
3788 at least as many bits as the underlying type.
3789 To start a new run it's also required that this
3790 or the last bit-field had non-zero width. */
3791 if (bit_size || prev_bit_size)
3792 c += size;
3794 /* In MS layout the records alignment is normally
3795 influenced by the field, except for a zero-width
3796 field at the start of a run (but by further zero-width
3797 fields it is again). */
3798 if (bit_size == 0 && prevbt != bt)
3799 align = 1;
3800 prevbt = bt;
3801 prev_bit_size = bit_size;
3804 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3805 | (bit_pos << VT_STRUCT_SHIFT);
3806 bit_pos += bit_size;
3808 if (align > maxalign)
3809 maxalign = align;
3811 #ifdef BF_DEBUG
3812 printf("set field %s offset %-2d size %-2d align %-2d",
3813 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3814 if (f->type.t & VT_BITFIELD) {
3815 printf(" pos %-2d bits %-2d",
3816 BIT_POS(f->type.t),
3817 BIT_SIZE(f->type.t)
3820 printf("\n");
3821 #endif
3823 f->c = offset;
3824 f->r = 0;
3827 if (pcc)
3828 c += (bit_pos + 7) >> 3;
3830 /* store size and alignment */
3831 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3832 if (a < maxalign)
3833 a = maxalign;
3834 type->ref->r = a;
3835 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3836 /* can happen if individual align for some member was given. In
3837 this case MSVC ignores maxalign when aligning the size */
3838 a = pragma_pack;
3839 if (a < bt)
3840 a = bt;
3842 c = (c + a - 1) & -a;
3843 type->ref->c = c;
3845 #ifdef BF_DEBUG
3846 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3847 #endif
3849 /* check whether we can access bitfields by their type */
3850 for (f = type->ref->next; f; f = f->next) {
3851 int s, px, cx, c0;
3852 CType t;
3854 if (0 == (f->type.t & VT_BITFIELD))
3855 continue;
3856 f->type.ref = f;
3857 f->auxtype = -1;
3858 bit_size = BIT_SIZE(f->type.t);
3859 if (bit_size == 0)
3860 continue;
3861 bit_pos = BIT_POS(f->type.t);
3862 size = type_size(&f->type, &align);
3863 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
3864 continue;
3866 /* try to access the field using a different type */
3867 c0 = -1, s = align = 1;
3868 for (;;) {
3869 px = f->c * 8 + bit_pos;
3870 cx = (px >> 3) & -align;
3871 px = px - (cx << 3);
3872 if (c0 == cx)
3873 break;
3874 s = (px + bit_size + 7) >> 3;
3875 if (s > 4) {
3876 t.t = VT_LLONG;
3877 } else if (s > 2) {
3878 t.t = VT_INT;
3879 } else if (s > 1) {
3880 t.t = VT_SHORT;
3881 } else {
3882 t.t = VT_BYTE;
3884 s = type_size(&t, &align);
3885 c0 = cx;
3888 if (px + bit_size <= s * 8 && cx + s <= c) {
3889 /* update offset and bit position */
3890 f->c = cx;
3891 bit_pos = px;
3892 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3893 | (bit_pos << VT_STRUCT_SHIFT);
3894 if (s != size)
3895 f->auxtype = t.t;
3896 #ifdef BF_DEBUG
3897 printf("FIX field %s offset %-2d size %-2d align %-2d "
3898 "pos %-2d bits %-2d\n",
3899 get_tok_str(f->v & ~SYM_FIELD, NULL),
3900 cx, s, align, px, bit_size);
3901 #endif
3902 } else {
3903 /* fall back to load/store single-byte wise */
3904 f->auxtype = VT_STRUCT;
3905 #ifdef BF_DEBUG
3906 printf("FIX field %s : load byte-wise\n",
3907 get_tok_str(f->v & ~SYM_FIELD, NULL));
3908 #endif
3913 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3914 static void struct_decl(CType *type, int u)
3916 int v, c, size, align, flexible;
3917 int bit_size, bsize, bt;
3918 Sym *s, *ss, **ps;
3919 AttributeDef ad, ad1;
3920 CType type1, btype;
3922 memset(&ad, 0, sizeof ad);
3923 next();
3924 parse_attribute(&ad);
3925 if (tok != '{') {
3926 v = tok;
3927 next();
3928 /* struct already defined ? return it */
3929 if (v < TOK_IDENT)
3930 expect("struct/union/enum name");
3931 s = struct_find(v);
3932 if (s && (s->sym_scope == local_scope || tok != '{')) {
3933 if (u == s->type.t)
3934 goto do_decl;
3935 if (u == VT_ENUM && IS_ENUM(s->type.t))
3936 goto do_decl;
3937 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
3939 } else {
3940 v = anon_sym++;
3942 /* Record the original enum/struct/union token. */
3943 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
3944 type1.ref = NULL;
3945 /* we put an undefined size for struct/union */
3946 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
3947 s->r = 0; /* default alignment is zero as gcc */
3948 do_decl:
3949 type->t = s->type.t;
3950 type->ref = s;
3952 if (tok == '{') {
3953 next();
3954 if (s->c != -1)
3955 tcc_error("struct/union/enum already defined");
3956 s->c = -2;
3957 /* cannot be empty */
3958 /* non empty enums are not allowed */
3959 ps = &s->next;
3960 if (u == VT_ENUM) {
3961 long long ll = 0, pl = 0, nl = 0;
3962 CType t;
3963 t.ref = s;
3964 /* enum symbols have static storage */
3965 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
3966 for(;;) {
3967 v = tok;
3968 if (v < TOK_UIDENT)
3969 expect("identifier");
3970 ss = sym_find(v);
3971 if (ss && !local_stack)
3972 tcc_error("redefinition of enumerator '%s'",
3973 get_tok_str(v, NULL));
3974 next();
3975 if (tok == '=') {
3976 next();
3977 ll = expr_const64();
3979 ss = sym_push(v, &t, VT_CONST, 0);
3980 ss->enum_val = ll;
3981 *ps = ss, ps = &ss->next;
3982 if (ll < nl)
3983 nl = ll;
3984 if (ll > pl)
3985 pl = ll;
3986 if (tok != ',')
3987 break;
3988 next();
3989 ll++;
3990 /* NOTE: we accept a trailing comma */
3991 if (tok == '}')
3992 break;
3994 skip('}');
3995 /* set integral type of the enum */
3996 t.t = VT_INT;
3997 if (nl >= 0) {
3998 if (pl != (unsigned)pl)
3999 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4000 t.t |= VT_UNSIGNED;
4001 } else if (pl != (int)pl || nl != (int)nl)
4002 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4003 s->type.t = type->t = t.t | VT_ENUM;
4004 s->c = 0;
4005 /* set type for enum members */
4006 for (ss = s->next; ss; ss = ss->next) {
4007 ll = ss->enum_val;
4008 if (ll == (int)ll) /* default is int if it fits */
4009 continue;
4010 if (t.t & VT_UNSIGNED) {
4011 ss->type.t |= VT_UNSIGNED;
4012 if (ll == (unsigned)ll)
4013 continue;
4015 ss->type.t = (ss->type.t & ~VT_BTYPE)
4016 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4018 } else {
4019 c = 0;
4020 flexible = 0;
4021 while (tok != '}') {
4022 if (!parse_btype(&btype, &ad1)) {
4023 skip(';');
4024 continue;
4026 while (1) {
4027 if (flexible)
4028 tcc_error("flexible array member '%s' not at the end of struct",
4029 get_tok_str(v, NULL));
4030 bit_size = -1;
4031 v = 0;
4032 type1 = btype;
4033 if (tok != ':') {
4034 if (tok != ';')
4035 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4036 if (v == 0) {
4037 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4038 expect("identifier");
4039 else {
4040 int v = btype.ref->v;
4041 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4042 if (tcc_state->ms_extensions == 0)
4043 expect("identifier");
4047 if (type_size(&type1, &align) < 0) {
4048 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4049 flexible = 1;
4050 else
4051 tcc_error("field '%s' has incomplete type",
4052 get_tok_str(v, NULL));
4054 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4055 (type1.t & VT_BTYPE) == VT_VOID ||
4056 (type1.t & VT_STORAGE))
4057 tcc_error("invalid type for '%s'",
4058 get_tok_str(v, NULL));
4060 if (tok == ':') {
4061 next();
4062 bit_size = expr_const();
4063 /* XXX: handle v = 0 case for messages */
4064 if (bit_size < 0)
4065 tcc_error("negative width in bit-field '%s'",
4066 get_tok_str(v, NULL));
4067 if (v && bit_size == 0)
4068 tcc_error("zero width for bit-field '%s'",
4069 get_tok_str(v, NULL));
4070 parse_attribute(&ad1);
4072 size = type_size(&type1, &align);
4073 if (bit_size >= 0) {
4074 bt = type1.t & VT_BTYPE;
4075 if (bt != VT_INT &&
4076 bt != VT_BYTE &&
4077 bt != VT_SHORT &&
4078 bt != VT_BOOL &&
4079 bt != VT_LLONG)
4080 tcc_error("bitfields must have scalar type");
4081 bsize = size * 8;
4082 if (bit_size > bsize) {
4083 tcc_error("width of '%s' exceeds its type",
4084 get_tok_str(v, NULL));
4085 } else if (bit_size == bsize
4086 && !ad.a.packed && !ad1.a.packed) {
4087 /* no need for bit fields */
4089 } else if (bit_size == 64) {
4090 tcc_error("field width 64 not implemented");
4091 } else {
4092 type1.t = (type1.t & ~VT_STRUCT_MASK)
4093 | VT_BITFIELD
4094 | (bit_size << (VT_STRUCT_SHIFT + 6));
4097 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4098 /* Remember we've seen a real field to check
4099 for placement of flexible array member. */
4100 c = 1;
4102 /* If member is a struct or bit-field, enforce
4103 placing into the struct (as anonymous). */
4104 if (v == 0 &&
4105 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4106 bit_size >= 0)) {
4107 v = anon_sym++;
4109 if (v) {
4110 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4111 ss->a = ad1.a;
4112 *ps = ss;
4113 ps = &ss->next;
4115 if (tok == ';' || tok == TOK_EOF)
4116 break;
4117 skip(',');
4119 skip(';');
4121 skip('}');
4122 parse_attribute(&ad);
4123 struct_layout(type, &ad);
4128 static void sym_to_attr(AttributeDef *ad, Sym *s)
4130 merge_symattr(&ad->a, &s->a);
4131 merge_funcattr(&ad->f, &s->f);
4134 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4135 are added to the element type, copied because it could be a typedef. */
4136 static void parse_btype_qualify(CType *type, int qualifiers)
4138 while (type->t & VT_ARRAY) {
4139 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4140 type = &type->ref->type;
4142 type->t |= qualifiers;
4145 /* return 0 if no type declaration. otherwise, return the basic type
4146 and skip it.
4148 static int parse_btype(CType *type, AttributeDef *ad)
4150 int t, u, bt, st, type_found, typespec_found, g, n;
4151 Sym *s;
4152 CType type1;
4154 memset(ad, 0, sizeof(AttributeDef));
4155 type_found = 0;
4156 typespec_found = 0;
4157 t = VT_INT;
4158 bt = st = -1;
4159 type->ref = NULL;
4161 while(1) {
4162 switch(tok) {
4163 case TOK_EXTENSION:
4164 /* currently, we really ignore extension */
4165 next();
4166 continue;
4168 /* basic types */
4169 case TOK_CHAR:
4170 u = VT_BYTE;
4171 basic_type:
4172 next();
4173 basic_type1:
4174 if (u == VT_SHORT || u == VT_LONG) {
4175 if (st != -1 || (bt != -1 && bt != VT_INT))
4176 tmbt: tcc_error("too many basic types");
4177 st = u;
4178 } else {
4179 if (bt != -1 || (st != -1 && u != VT_INT))
4180 goto tmbt;
4181 bt = u;
4183 if (u != VT_INT)
4184 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4185 typespec_found = 1;
4186 break;
4187 case TOK_VOID:
4188 u = VT_VOID;
4189 goto basic_type;
4190 case TOK_SHORT:
4191 u = VT_SHORT;
4192 goto basic_type;
4193 case TOK_INT:
4194 u = VT_INT;
4195 goto basic_type;
4196 case TOK_ALIGNAS:
4197 { int n;
4198 AttributeDef ad1;
4199 next();
4200 skip('(');
4201 memset(&ad1, 0, sizeof(AttributeDef));
4202 if (parse_btype(&type1, &ad1)) {
4203 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4204 if (ad1.a.aligned)
4205 n = 1 << (ad1.a.aligned - 1);
4206 else
4207 type_size(&type1, &n);
4208 } else {
4209 n = expr_const();
4210 if (n <= 0 || (n & (n - 1)) != 0)
4211 tcc_error("alignment must be a positive power of two");
4213 skip(')');
4214 ad->a.aligned = exact_log2p1(n);
4216 continue;
4217 case TOK_LONG:
4218 if ((t & VT_BTYPE) == VT_DOUBLE) {
4219 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4220 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4221 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4222 } else {
4223 u = VT_LONG;
4224 goto basic_type;
4226 next();
4227 break;
4228 #ifdef TCC_TARGET_ARM64
4229 case TOK_UINT128:
4230 /* GCC's __uint128_t appears in some Linux header files. Make it a
4231 synonym for long double to get the size and alignment right. */
4232 u = VT_LDOUBLE;
4233 goto basic_type;
4234 #endif
4235 case TOK_BOOL:
4236 u = VT_BOOL;
4237 goto basic_type;
4238 case TOK_FLOAT:
4239 u = VT_FLOAT;
4240 goto basic_type;
4241 case TOK_DOUBLE:
4242 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4243 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4244 } else {
4245 u = VT_DOUBLE;
4246 goto basic_type;
4248 next();
4249 break;
4250 case TOK_ENUM:
4251 struct_decl(&type1, VT_ENUM);
4252 basic_type2:
4253 u = type1.t;
4254 type->ref = type1.ref;
4255 goto basic_type1;
4256 case TOK_STRUCT:
4257 struct_decl(&type1, VT_STRUCT);
4258 goto basic_type2;
4259 case TOK_UNION:
4260 struct_decl(&type1, VT_UNION);
4261 goto basic_type2;
4263 /* type modifiers */
4264 case TOK_CONST1:
4265 case TOK_CONST2:
4266 case TOK_CONST3:
4267 type->t = t;
4268 parse_btype_qualify(type, VT_CONSTANT);
4269 t = type->t;
4270 next();
4271 break;
4272 case TOK_VOLATILE1:
4273 case TOK_VOLATILE2:
4274 case TOK_VOLATILE3:
4275 type->t = t;
4276 parse_btype_qualify(type, VT_VOLATILE);
4277 t = type->t;
4278 next();
4279 break;
4280 case TOK_SIGNED1:
4281 case TOK_SIGNED2:
4282 case TOK_SIGNED3:
4283 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4284 tcc_error("signed and unsigned modifier");
4285 t |= VT_DEFSIGN;
4286 next();
4287 typespec_found = 1;
4288 break;
4289 case TOK_REGISTER:
4290 case TOK_AUTO:
4291 case TOK_RESTRICT1:
4292 case TOK_RESTRICT2:
4293 case TOK_RESTRICT3:
4294 next();
4295 break;
4296 case TOK_UNSIGNED:
4297 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4298 tcc_error("signed and unsigned modifier");
4299 t |= VT_DEFSIGN | VT_UNSIGNED;
4300 next();
4301 typespec_found = 1;
4302 break;
4304 /* storage */
4305 case TOK_EXTERN:
4306 g = VT_EXTERN;
4307 goto storage;
4308 case TOK_STATIC:
4309 g = VT_STATIC;
4310 goto storage;
4311 case TOK_TYPEDEF:
4312 g = VT_TYPEDEF;
4313 goto storage;
4314 storage:
4315 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4316 tcc_error("multiple storage classes");
4317 t |= g;
4318 next();
4319 break;
4320 case TOK_INLINE1:
4321 case TOK_INLINE2:
4322 case TOK_INLINE3:
4323 t |= VT_INLINE;
4324 next();
4325 break;
4326 case TOK_NORETURN3:
4327 /* currently, no need to handle it because tcc does not
4328 track unused objects */
4329 next();
4330 break;
4331 /* GNUC attribute */
4332 case TOK_ATTRIBUTE1:
4333 case TOK_ATTRIBUTE2:
4334 parse_attribute(ad);
4335 if (ad->attr_mode) {
4336 u = ad->attr_mode -1;
4337 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4339 continue;
4340 /* GNUC typeof */
4341 case TOK_TYPEOF1:
4342 case TOK_TYPEOF2:
4343 case TOK_TYPEOF3:
4344 next();
4345 parse_expr_type(&type1);
4346 /* remove all storage modifiers except typedef */
4347 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4348 if (type1.ref)
4349 sym_to_attr(ad, type1.ref);
4350 goto basic_type2;
4351 default:
4352 if (typespec_found)
4353 goto the_end;
4354 s = sym_find(tok);
4355 if (!s || !(s->type.t & VT_TYPEDEF))
4356 goto the_end;
4358 n = tok, next();
4359 if (tok == ':' && !in_generic) {
4360 /* ignore if it's a label */
4361 unget_tok(n);
4362 goto the_end;
4365 t &= ~(VT_BTYPE|VT_LONG);
4366 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4367 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4368 type->ref = s->type.ref;
4369 if (t)
4370 parse_btype_qualify(type, t);
4371 t = type->t;
4372 /* get attributes from typedef */
4373 sym_to_attr(ad, s);
4374 typespec_found = 1;
4375 st = bt = -2;
4376 break;
4378 type_found = 1;
4380 the_end:
4381 if (tcc_state->char_is_unsigned) {
4382 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4383 t |= VT_UNSIGNED;
4385 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4386 bt = t & (VT_BTYPE|VT_LONG);
4387 if (bt == VT_LONG)
4388 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4389 #ifdef TCC_TARGET_PE
4390 if (bt == VT_LDOUBLE)
4391 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4392 #endif
4393 type->t = t;
4394 return type_found;
4397 /* convert a function parameter type (array to pointer and function to
4398 function pointer) */
4399 static inline void convert_parameter_type(CType *pt)
4401 /* remove const and volatile qualifiers (XXX: const could be used
4402 to indicate a const function parameter */
4403 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4404 /* array must be transformed to pointer according to ANSI C */
4405 pt->t &= ~VT_ARRAY;
4406 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4407 mk_pointer(pt);
4411 ST_FUNC void parse_asm_str(CString *astr)
4413 skip('(');
4414 parse_mult_str(astr, "string constant");
4417 /* Parse an asm label and return the token */
4418 static int asm_label_instr(void)
4420 int v;
4421 CString astr;
4423 next();
4424 parse_asm_str(&astr);
4425 skip(')');
4426 #ifdef ASM_DEBUG
4427 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4428 #endif
4429 v = tok_alloc(astr.data, astr.size - 1)->tok;
4430 cstr_free(&astr);
4431 return v;
4434 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4436 int n, l, t1, arg_size, align, unused_align;
4437 Sym **plast, *s, *first;
4438 AttributeDef ad1;
4439 CType pt;
4441 if (tok == '(') {
4442 /* function type, or recursive declarator (return if so) */
4443 next();
4444 if (td && !(td & TYPE_ABSTRACT))
4445 return 0;
4446 if (tok == ')')
4447 l = 0;
4448 else if (parse_btype(&pt, &ad1))
4449 l = FUNC_NEW;
4450 else if (td) {
4451 merge_attr (ad, &ad1);
4452 return 0;
4453 } else
4454 l = FUNC_OLD;
4455 first = NULL;
4456 plast = &first;
4457 arg_size = 0;
4458 if (l) {
4459 for(;;) {
4460 /* read param name and compute offset */
4461 if (l != FUNC_OLD) {
4462 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4463 break;
4464 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4465 if ((pt.t & VT_BTYPE) == VT_VOID)
4466 tcc_error("parameter declared as void");
4467 } else {
4468 n = tok;
4469 if (n < TOK_UIDENT)
4470 expect("identifier");
4471 pt.t = VT_VOID; /* invalid type */
4472 next();
4474 convert_parameter_type(&pt);
4475 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4476 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4477 *plast = s;
4478 plast = &s->next;
4479 if (tok == ')')
4480 break;
4481 skip(',');
4482 if (l == FUNC_NEW && tok == TOK_DOTS) {
4483 l = FUNC_ELLIPSIS;
4484 next();
4485 break;
4487 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4488 tcc_error("invalid type");
4490 } else
4491 /* if no parameters, then old type prototype */
4492 l = FUNC_OLD;
4493 skip(')');
4494 /* NOTE: const is ignored in returned type as it has a special
4495 meaning in gcc / C++ */
4496 type->t &= ~VT_CONSTANT;
4497 /* some ancient pre-K&R C allows a function to return an array
4498 and the array brackets to be put after the arguments, such
4499 that "int c()[]" means something like "int[] c()" */
4500 if (tok == '[') {
4501 next();
4502 skip(']'); /* only handle simple "[]" */
4503 mk_pointer(type);
4505 /* we push a anonymous symbol which will contain the function prototype */
4506 ad->f.func_args = arg_size;
4507 ad->f.func_type = l;
4508 s = sym_push(SYM_FIELD, type, 0, 0);
4509 s->a = ad->a;
4510 s->f = ad->f;
4511 s->next = first;
4512 type->t = VT_FUNC;
4513 type->ref = s;
4514 } else if (tok == '[') {
4515 int saved_nocode_wanted = nocode_wanted;
4516 /* array definition */
4517 next();
4518 while (1) {
4519 /* XXX The optional type-quals and static should only be accepted
4520 in parameter decls. The '*' as well, and then even only
4521 in prototypes (not function defs). */
4522 switch (tok) {
4523 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4524 case TOK_CONST1:
4525 case TOK_VOLATILE1:
4526 case TOK_STATIC:
4527 case '*':
4528 next();
4529 continue;
4530 default:
4531 break;
4533 break;
4535 n = -1;
4536 t1 = 0;
4537 if (tok != ']') {
4538 if (!local_stack || (storage & VT_STATIC))
4539 vpushi(expr_const());
4540 else {
4541 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4542 length must always be evaluated, even under nocode_wanted,
4543 so that its size slot is initialized (e.g. under sizeof
4544 or typeof). */
4545 nocode_wanted = 0;
4546 gexpr();
4548 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4549 n = vtop->c.i;
4550 if (n < 0)
4551 tcc_error("invalid array size");
4552 } else {
4553 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4554 tcc_error("size of variable length array should be an integer");
4555 n = 0;
4556 t1 = VT_VLA;
4559 skip(']');
4560 /* parse next post type */
4561 post_type(type, ad, storage, 0);
4563 if ((type->t & VT_BTYPE) == VT_FUNC)
4564 tcc_error("declaration of an array of functions");
4565 if ((type->t & VT_BTYPE) == VT_VOID
4566 || type_size(type, &unused_align) < 0)
4567 tcc_error("declaration of an array of incomplete type elements");
4569 t1 |= type->t & VT_VLA;
4571 if (t1 & VT_VLA) {
4572 if (n < 0)
4573 tcc_error("need explicit inner array size in VLAs");
4574 loc -= type_size(&int_type, &align);
4575 loc &= -align;
4576 n = loc;
4578 vla_runtime_type_size(type, &align);
4579 gen_op('*');
4580 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4581 vswap();
4582 vstore();
4584 if (n != -1)
4585 vpop();
4586 nocode_wanted = saved_nocode_wanted;
4588 /* we push an anonymous symbol which will contain the array
4589 element type */
4590 s = sym_push(SYM_FIELD, type, 0, n);
4591 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4592 type->ref = s;
4594 return 1;
4597 /* Parse a type declarator (except basic type), and return the type
4598 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4599 expected. 'type' should contain the basic type. 'ad' is the
4600 attribute definition of the basic type. It can be modified by
4601 type_decl(). If this (possibly abstract) declarator is a pointer chain
4602 it returns the innermost pointed to type (equals *type, but is a different
4603 pointer), otherwise returns type itself, that's used for recursive calls. */
4604 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4606 CType *post, *ret;
4607 int qualifiers, storage;
4609 /* recursive type, remove storage bits first, apply them later again */
4610 storage = type->t & VT_STORAGE;
4611 type->t &= ~VT_STORAGE;
4612 post = ret = type;
4614 while (tok == '*') {
4615 qualifiers = 0;
4616 redo:
4617 next();
4618 switch(tok) {
4619 case TOK_CONST1:
4620 case TOK_CONST2:
4621 case TOK_CONST3:
4622 qualifiers |= VT_CONSTANT;
4623 goto redo;
4624 case TOK_VOLATILE1:
4625 case TOK_VOLATILE2:
4626 case TOK_VOLATILE3:
4627 qualifiers |= VT_VOLATILE;
4628 goto redo;
4629 case TOK_RESTRICT1:
4630 case TOK_RESTRICT2:
4631 case TOK_RESTRICT3:
4632 goto redo;
4633 /* XXX: clarify attribute handling */
4634 case TOK_ATTRIBUTE1:
4635 case TOK_ATTRIBUTE2:
4636 parse_attribute(ad);
4637 break;
4639 mk_pointer(type);
4640 type->t |= qualifiers;
4641 if (ret == type)
4642 /* innermost pointed to type is the one for the first derivation */
4643 ret = pointed_type(type);
4646 if (tok == '(') {
4647 /* This is possibly a parameter type list for abstract declarators
4648 ('int ()'), use post_type for testing this. */
4649 if (!post_type(type, ad, 0, td)) {
4650 /* It's not, so it's a nested declarator, and the post operations
4651 apply to the innermost pointed to type (if any). */
4652 /* XXX: this is not correct to modify 'ad' at this point, but
4653 the syntax is not clear */
4654 parse_attribute(ad);
4655 post = type_decl(type, ad, v, td);
4656 skip(')');
4657 } else
4658 goto abstract;
4659 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4660 /* type identifier */
4661 *v = tok;
4662 next();
4663 } else {
4664 abstract:
4665 if (!(td & TYPE_ABSTRACT))
4666 expect("identifier");
4667 *v = 0;
4669 post_type(post, ad, storage, 0);
4670 parse_attribute(ad);
4671 type->t |= storage;
4672 return ret;
4675 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4676 ST_FUNC int lvalue_type(int t)
4678 int bt, r;
4679 r = VT_LVAL;
4680 bt = t & VT_BTYPE;
4681 if (bt == VT_BYTE || bt == VT_BOOL)
4682 r |= VT_LVAL_BYTE;
4683 else if (bt == VT_SHORT)
4684 r |= VT_LVAL_SHORT;
4685 else
4686 return r;
4687 if (t & VT_UNSIGNED)
4688 r |= VT_LVAL_UNSIGNED;
4689 return r;
4692 /* indirection with full error checking and bound check */
4693 ST_FUNC void indir(void)
4695 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4696 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4697 return;
4698 expect("pointer");
4700 if (vtop->r & VT_LVAL)
4701 gv(RC_INT);
4702 vtop->type = *pointed_type(&vtop->type);
4703 /* Arrays and functions are never lvalues */
4704 if (!(vtop->type.t & VT_ARRAY) && !(vtop->type.t & VT_VLA)
4705 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4706 vtop->r |= lvalue_type(vtop->type.t);
4707 /* if bound checking, the referenced pointer must be checked */
4708 #ifdef CONFIG_TCC_BCHECK
4709 if (tcc_state->do_bounds_check)
4710 vtop->r |= VT_MUSTBOUND;
4711 #endif
4715 /* pass a parameter to a function and do type checking and casting */
4716 static void gfunc_param_typed(Sym *func, Sym *arg)
4718 int func_type;
4719 CType type;
4721 func_type = func->f.func_type;
4722 if (func_type == FUNC_OLD ||
4723 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4724 /* default casting : only need to convert float to double */
4725 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4726 gen_cast_s(VT_DOUBLE);
4727 } else if (vtop->type.t & VT_BITFIELD) {
4728 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4729 type.ref = vtop->type.ref;
4730 gen_cast(&type);
4732 } else if (arg == NULL) {
4733 tcc_error("too many arguments to function");
4734 } else {
4735 type = arg->type;
4736 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4737 gen_assign_cast(&type);
4741 /* parse an expression and return its type without any side effect. */
4742 static void expr_type(CType *type, void (*expr_fn)(void))
4744 nocode_wanted++;
4745 expr_fn();
4746 *type = vtop->type;
4747 vpop();
4748 nocode_wanted--;
4751 /* parse an expression of the form '(type)' or '(expr)' and return its
4752 type */
4753 static void parse_expr_type(CType *type)
4755 int n;
4756 AttributeDef ad;
4758 skip('(');
4759 if (parse_btype(type, &ad)) {
4760 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4761 } else {
4762 expr_type(type, gexpr);
4764 skip(')');
4767 static void parse_type(CType *type)
4769 AttributeDef ad;
4770 int n;
4772 if (!parse_btype(type, &ad)) {
4773 expect("type");
4775 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4778 static void parse_builtin_params(int nc, const char *args)
4780 char c, sep = '(';
4781 CType t;
4782 if (nc)
4783 nocode_wanted++;
4784 next();
4785 while ((c = *args++)) {
4786 skip(sep);
4787 sep = ',';
4788 switch (c) {
4789 case 'e': expr_eq(); continue;
4790 case 't': parse_type(&t); vpush(&t); continue;
4791 default: tcc_error("internal error"); break;
4794 skip(')');
4795 if (nc)
4796 nocode_wanted--;
4799 static void try_call_scope_cleanup(Sym *stop)
4801 Sym *cls = current_cleanups;
4803 for (; cls != stop; cls = cls->ncl) {
4804 Sym *fs = cls->next;
4805 Sym *vs = cls->prev_tok;
4807 vpushsym(&fs->type, fs);
4808 vset(&vs->type, vs->r, vs->c);
4809 vtop->sym = vs;
4810 mk_pointer(&vtop->type);
4811 gaddrof();
4812 gfunc_call(1);
4816 static void try_call_cleanup_goto(Sym *cleanupstate)
4818 Sym *oc, *cc;
4819 int ocd, ccd;
4821 if (!current_cleanups)
4822 return;
4824 /* search NCA of both cleanup chains given parents and initial depth */
4825 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
4826 for (ccd = ncleanups, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
4828 for (cc = current_cleanups; ccd > ocd; --ccd, cc = cc->ncl)
4830 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
4833 try_call_scope_cleanup(cc);
4836 ST_FUNC void unary(void)
4838 int n, t, align, size, r, sizeof_caller;
4839 CType type;
4840 Sym *s;
4841 AttributeDef ad;
4843 sizeof_caller = in_sizeof;
4844 in_sizeof = 0;
4845 type.ref = NULL;
4846 /* XXX: GCC 2.95.3 does not generate a table although it should be
4847 better here */
4848 tok_next:
4849 switch(tok) {
4850 case TOK_EXTENSION:
4851 next();
4852 goto tok_next;
4853 case TOK_LCHAR:
4854 #ifdef TCC_TARGET_PE
4855 t = VT_SHORT|VT_UNSIGNED;
4856 goto push_tokc;
4857 #endif
4858 case TOK_CINT:
4859 case TOK_CCHAR:
4860 t = VT_INT;
4861 push_tokc:
4862 type.t = t;
4863 vsetc(&type, VT_CONST, &tokc);
4864 next();
4865 break;
4866 case TOK_CUINT:
4867 t = VT_INT | VT_UNSIGNED;
4868 goto push_tokc;
4869 case TOK_CLLONG:
4870 t = VT_LLONG;
4871 goto push_tokc;
4872 case TOK_CULLONG:
4873 t = VT_LLONG | VT_UNSIGNED;
4874 goto push_tokc;
4875 case TOK_CFLOAT:
4876 t = VT_FLOAT;
4877 goto push_tokc;
4878 case TOK_CDOUBLE:
4879 t = VT_DOUBLE;
4880 goto push_tokc;
4881 case TOK_CLDOUBLE:
4882 t = VT_LDOUBLE;
4883 goto push_tokc;
4884 case TOK_CLONG:
4885 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4886 goto push_tokc;
4887 case TOK_CULONG:
4888 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4889 goto push_tokc;
4890 case TOK___FUNCTION__:
4891 if (!gnu_ext)
4892 goto tok_identifier;
4893 /* fall thru */
4894 case TOK___FUNC__:
4896 void *ptr;
4897 int len;
4898 /* special function name identifier */
4899 len = strlen(funcname) + 1;
4900 /* generate char[len] type */
4901 type.t = VT_BYTE;
4902 mk_pointer(&type);
4903 type.t |= VT_ARRAY;
4904 type.ref->c = len;
4905 vpush_ref(&type, data_section, data_section->data_offset, len);
4906 if (!NODATA_WANTED) {
4907 ptr = section_ptr_add(data_section, len);
4908 memcpy(ptr, funcname, len);
4910 next();
4912 break;
4913 case TOK_LSTR:
4914 #ifdef TCC_TARGET_PE
4915 t = VT_SHORT | VT_UNSIGNED;
4916 #else
4917 t = VT_INT;
4918 #endif
4919 goto str_init;
4920 case TOK_STR:
4921 /* string parsing */
4922 t = VT_BYTE;
4923 if (tcc_state->char_is_unsigned)
4924 t = VT_BYTE | VT_UNSIGNED;
4925 str_init:
4926 if (tcc_state->warn_write_strings)
4927 t |= VT_CONSTANT;
4928 type.t = t;
4929 mk_pointer(&type);
4930 type.t |= VT_ARRAY;
4931 memset(&ad, 0, sizeof(AttributeDef));
4932 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
4933 break;
4934 case '(':
4935 next();
4936 /* cast ? */
4937 if (parse_btype(&type, &ad)) {
4938 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
4939 skip(')');
4940 /* check ISOC99 compound literal */
4941 if (tok == '{') {
4942 /* data is allocated locally by default */
4943 if (global_expr)
4944 r = VT_CONST;
4945 else
4946 r = VT_LOCAL;
4947 /* all except arrays are lvalues */
4948 if (!(type.t & VT_ARRAY))
4949 r |= lvalue_type(type.t);
4950 memset(&ad, 0, sizeof(AttributeDef));
4951 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
4952 } else {
4953 if (sizeof_caller) {
4954 vpush(&type);
4955 return;
4957 unary();
4958 gen_cast(&type);
4960 } else if (tok == '{') {
4961 int saved_nocode_wanted = nocode_wanted;
4962 if (const_wanted)
4963 tcc_error("expected constant");
4964 /* save all registers */
4965 save_regs(0);
4966 /* statement expression : we do not accept break/continue
4967 inside as GCC does. We do retain the nocode_wanted state,
4968 as statement expressions can't ever be entered from the
4969 outside, so any reactivation of code emission (from labels
4970 or loop heads) can be disabled again after the end of it. */
4971 block(NULL, NULL, NULL, NULL, 1);
4972 nocode_wanted = saved_nocode_wanted;
4973 skip(')');
4974 } else {
4975 gexpr();
4976 skip(')');
4978 break;
4979 case '*':
4980 next();
4981 unary();
4982 indir();
4983 break;
4984 case '&':
4985 next();
4986 unary();
4987 /* functions names must be treated as function pointers,
4988 except for unary '&' and sizeof. Since we consider that
4989 functions are not lvalues, we only have to handle it
4990 there and in function calls. */
4991 /* arrays can also be used although they are not lvalues */
4992 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
4993 !(vtop->type.t & VT_ARRAY))
4994 test_lvalue();
4995 mk_pointer(&vtop->type);
4996 gaddrof();
4997 break;
4998 case '!':
4999 next();
5000 unary();
5001 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5002 gen_cast_s(VT_BOOL);
5003 vtop->c.i = !vtop->c.i;
5004 } else if ((vtop->r & VT_VALMASK) == VT_CMP)
5005 vtop->c.i ^= 1;
5006 else {
5007 save_regs(1);
5008 vseti(VT_JMP, gvtst(1, 0));
5010 break;
5011 case '~':
5012 next();
5013 unary();
5014 vpushi(-1);
5015 gen_op('^');
5016 break;
5017 case '+':
5018 next();
5019 unary();
5020 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5021 tcc_error("pointer not accepted for unary plus");
5022 /* In order to force cast, we add zero, except for floating point
5023 where we really need an noop (otherwise -0.0 will be transformed
5024 into +0.0). */
5025 if (!is_float(vtop->type.t)) {
5026 vpushi(0);
5027 gen_op('+');
5029 break;
5030 case TOK_SIZEOF:
5031 case TOK_ALIGNOF1:
5032 case TOK_ALIGNOF2:
5033 case TOK_ALIGNOF3:
5034 t = tok;
5035 next();
5036 in_sizeof++;
5037 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5038 s = vtop[1].sym; /* hack: accessing previous vtop */
5039 size = type_size(&type, &align);
5040 if (s && s->a.aligned)
5041 align = 1 << (s->a.aligned - 1);
5042 if (t == TOK_SIZEOF) {
5043 if (!(type.t & VT_VLA)) {
5044 if (size < 0)
5045 tcc_error("sizeof applied to an incomplete type");
5046 vpushs(size);
5047 } else {
5048 vla_runtime_type_size(&type, &align);
5050 } else {
5051 vpushs(align);
5053 vtop->type.t |= VT_UNSIGNED;
5054 break;
5056 case TOK_builtin_expect:
5057 /* __builtin_expect is a no-op for now */
5058 parse_builtin_params(0, "ee");
5059 vpop();
5060 break;
5061 case TOK_builtin_types_compatible_p:
5062 parse_builtin_params(0, "tt");
5063 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5064 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5065 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5066 vtop -= 2;
5067 vpushi(n);
5068 break;
5069 case TOK_builtin_choose_expr:
5071 int64_t c;
5072 next();
5073 skip('(');
5074 c = expr_const64();
5075 skip(',');
5076 if (!c) {
5077 nocode_wanted++;
5079 expr_eq();
5080 if (!c) {
5081 vpop();
5082 nocode_wanted--;
5084 skip(',');
5085 if (c) {
5086 nocode_wanted++;
5088 expr_eq();
5089 if (c) {
5090 vpop();
5091 nocode_wanted--;
5093 skip(')');
5095 break;
5096 case TOK_builtin_constant_p:
5097 parse_builtin_params(1, "e");
5098 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5099 vtop--;
5100 vpushi(n);
5101 break;
5102 case TOK_builtin_frame_address:
5103 case TOK_builtin_return_address:
5105 int tok1 = tok;
5106 int level;
5107 next();
5108 skip('(');
5109 if (tok != TOK_CINT) {
5110 tcc_error("%s only takes positive integers",
5111 tok1 == TOK_builtin_return_address ?
5112 "__builtin_return_address" :
5113 "__builtin_frame_address");
5115 level = (uint32_t)tokc.i;
5116 next();
5117 skip(')');
5118 type.t = VT_VOID;
5119 mk_pointer(&type);
5120 vset(&type, VT_LOCAL, 0); /* local frame */
5121 while (level--) {
5122 mk_pointer(&vtop->type);
5123 indir(); /* -> parent frame */
5125 if (tok1 == TOK_builtin_return_address) {
5126 // assume return address is just above frame pointer on stack
5127 vpushi(PTR_SIZE);
5128 gen_op('+');
5129 mk_pointer(&vtop->type);
5130 indir();
5133 break;
5134 #ifdef TCC_TARGET_X86_64
5135 #ifdef TCC_TARGET_PE
5136 case TOK_builtin_va_start:
5137 parse_builtin_params(0, "ee");
5138 r = vtop->r & VT_VALMASK;
5139 if (r == VT_LLOCAL)
5140 r = VT_LOCAL;
5141 if (r != VT_LOCAL)
5142 tcc_error("__builtin_va_start expects a local variable");
5143 vtop->r = r;
5144 vtop->type = char_pointer_type;
5145 vtop->c.i += 8;
5146 vstore();
5147 break;
5148 #else
5149 case TOK_builtin_va_arg_types:
5150 parse_builtin_params(0, "t");
5151 vpushi(classify_x86_64_va_arg(&vtop->type));
5152 vswap();
5153 vpop();
5154 break;
5155 #endif
5156 #endif
5158 #ifdef TCC_TARGET_ARM64
5159 case TOK___va_start: {
5160 parse_builtin_params(0, "ee");
5161 //xx check types
5162 gen_va_start();
5163 vpushi(0);
5164 vtop->type.t = VT_VOID;
5165 break;
5167 case TOK___va_arg: {
5168 parse_builtin_params(0, "et");
5169 type = vtop->type;
5170 vpop();
5171 //xx check types
5172 gen_va_arg(&type);
5173 vtop->type = type;
5174 break;
5176 case TOK___arm64_clear_cache: {
5177 parse_builtin_params(0, "ee");
5178 gen_clear_cache();
5179 vpushi(0);
5180 vtop->type.t = VT_VOID;
5181 break;
5183 #endif
5184 /* pre operations */
5185 case TOK_INC:
5186 case TOK_DEC:
5187 t = tok;
5188 next();
5189 unary();
5190 inc(0, t);
5191 break;
5192 case '-':
5193 next();
5194 unary();
5195 t = vtop->type.t & VT_BTYPE;
5196 if (is_float(t)) {
5197 /* In IEEE negate(x) isn't subtract(0,x), but rather
5198 subtract(-0, x). */
5199 vpush(&vtop->type);
5200 if (t == VT_FLOAT)
5201 vtop->c.f = -1.0 * 0.0;
5202 else if (t == VT_DOUBLE)
5203 vtop->c.d = -1.0 * 0.0;
5204 else
5205 vtop->c.ld = -1.0 * 0.0;
5206 } else
5207 vpushi(0);
5208 vswap();
5209 gen_op('-');
5210 break;
5211 case TOK_LAND:
5212 if (!gnu_ext)
5213 goto tok_identifier;
5214 next();
5215 /* allow to take the address of a label */
5216 if (tok < TOK_UIDENT)
5217 expect("label identifier");
5218 s = label_find(tok);
5219 if (!s) {
5220 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5221 } else {
5222 if (s->r == LABEL_DECLARED)
5223 s->r = LABEL_FORWARD;
5225 if (!s->type.t) {
5226 s->type.t = VT_VOID;
5227 mk_pointer(&s->type);
5228 s->type.t |= VT_STATIC;
5230 vpushsym(&s->type, s);
5231 next();
5232 break;
5234 case TOK_GENERIC:
5236 CType controlling_type;
5237 int has_default = 0;
5238 int has_match = 0;
5239 int learn = 0;
5240 TokenString *str = NULL;
5241 int saved_const_wanted = const_wanted;
5243 next();
5244 skip('(');
5245 const_wanted = 0;
5246 expr_type(&controlling_type, expr_eq);
5247 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5248 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5249 mk_pointer(&controlling_type);
5250 const_wanted = saved_const_wanted;
5251 for (;;) {
5252 learn = 0;
5253 skip(',');
5254 if (tok == TOK_DEFAULT) {
5255 if (has_default)
5256 tcc_error("too many 'default'");
5257 has_default = 1;
5258 if (!has_match)
5259 learn = 1;
5260 next();
5261 } else {
5262 AttributeDef ad_tmp;
5263 int itmp;
5264 CType cur_type;
5266 in_generic++;
5267 parse_btype(&cur_type, &ad_tmp);
5268 in_generic--;
5270 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5271 if (compare_types(&controlling_type, &cur_type, 0)) {
5272 if (has_match) {
5273 tcc_error("type match twice");
5275 has_match = 1;
5276 learn = 1;
5279 skip(':');
5280 if (learn) {
5281 if (str)
5282 tok_str_free(str);
5283 skip_or_save_block(&str);
5284 } else {
5285 skip_or_save_block(NULL);
5287 if (tok == ')')
5288 break;
5290 if (!str) {
5291 char buf[60];
5292 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5293 tcc_error("type '%s' does not match any association", buf);
5295 begin_macro(str, 1);
5296 next();
5297 expr_eq();
5298 if (tok != TOK_EOF)
5299 expect(",");
5300 end_macro();
5301 next();
5302 break;
5304 // special qnan , snan and infinity values
5305 case TOK___NAN__:
5306 n = 0x7fc00000;
5307 special_math_val:
5308 vpushi(n);
5309 vtop->type.t = VT_FLOAT;
5310 next();
5311 break;
5312 case TOK___SNAN__:
5313 n = 0x7f800001;
5314 goto special_math_val;
5315 case TOK___INF__:
5316 n = 0x7f800000;
5317 goto special_math_val;
5319 default:
5320 tok_identifier:
5321 t = tok;
5322 next();
5323 if (t < TOK_UIDENT)
5324 expect("identifier");
5325 s = sym_find(t);
5326 if (!s || IS_ASM_SYM(s)) {
5327 const char *name = get_tok_str(t, NULL);
5328 if (tok != '(')
5329 tcc_error("'%s' undeclared", name);
5330 /* for simple function calls, we tolerate undeclared
5331 external reference to int() function */
5332 if (tcc_state->warn_implicit_function_declaration
5333 #ifdef TCC_TARGET_PE
5334 /* people must be warned about using undeclared WINAPI functions
5335 (which usually start with uppercase letter) */
5336 || (name[0] >= 'A' && name[0] <= 'Z')
5337 #endif
5339 tcc_warning("implicit declaration of function '%s'", name);
5340 s = external_global_sym(t, &func_old_type);
5343 r = s->r;
5344 /* A symbol that has a register is a local register variable,
5345 which starts out as VT_LOCAL value. */
5346 if ((r & VT_VALMASK) < VT_CONST)
5347 r = (r & ~VT_VALMASK) | VT_LOCAL;
5349 vset(&s->type, r, s->c);
5350 /* Point to s as backpointer (even without r&VT_SYM).
5351 Will be used by at least the x86 inline asm parser for
5352 regvars. */
5353 vtop->sym = s;
5355 if (r & VT_SYM) {
5356 vtop->c.i = 0;
5357 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5358 vtop->c.i = s->enum_val;
5360 break;
5363 /* post operations */
5364 while (1) {
5365 if (tok == TOK_INC || tok == TOK_DEC) {
5366 inc(1, tok);
5367 next();
5368 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5369 int qualifiers, cumofs = 0;
5370 /* field */
5371 if (tok == TOK_ARROW)
5372 indir();
5373 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5374 test_lvalue();
5375 gaddrof();
5376 /* expect pointer on structure */
5377 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5378 expect("struct or union");
5379 if (tok == TOK_CDOUBLE)
5380 expect("field name");
5381 next();
5382 if (tok == TOK_CINT || tok == TOK_CUINT)
5383 expect("field name");
5384 s = find_field(&vtop->type, tok, &cumofs);
5385 if (!s)
5386 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5387 /* add field offset to pointer */
5388 vtop->type = char_pointer_type; /* change type to 'char *' */
5389 vpushi(cumofs + s->c);
5390 gen_op('+');
5391 /* change type to field type, and set to lvalue */
5392 vtop->type = s->type;
5393 vtop->type.t |= qualifiers;
5394 /* an array is never an lvalue */
5395 if (!(vtop->type.t & VT_ARRAY)) {
5396 vtop->r |= lvalue_type(vtop->type.t);
5397 #ifdef CONFIG_TCC_BCHECK
5398 /* if bound checking, the referenced pointer must be checked */
5399 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5400 vtop->r |= VT_MUSTBOUND;
5401 #endif
5403 next();
5404 } else if (tok == '[') {
5405 next();
5406 gexpr();
5407 gen_op('+');
5408 indir();
5409 skip(']');
5410 } else if (tok == '(') {
5411 SValue ret;
5412 Sym *sa;
5413 int nb_args, ret_nregs, ret_align, regsize, variadic;
5415 /* function call */
5416 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5417 /* pointer test (no array accepted) */
5418 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5419 vtop->type = *pointed_type(&vtop->type);
5420 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5421 goto error_func;
5422 } else {
5423 error_func:
5424 expect("function pointer");
5426 } else {
5427 vtop->r &= ~VT_LVAL; /* no lvalue */
5429 /* get return type */
5430 s = vtop->type.ref;
5431 next();
5432 sa = s->next; /* first parameter */
5433 nb_args = regsize = 0;
5434 ret.r2 = VT_CONST;
5435 /* compute first implicit argument if a structure is returned */
5436 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5437 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5438 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5439 &ret_align, &regsize);
5440 if (!ret_nregs) {
5441 /* get some space for the returned structure */
5442 size = type_size(&s->type, &align);
5443 #ifdef TCC_TARGET_ARM64
5444 /* On arm64, a small struct is return in registers.
5445 It is much easier to write it to memory if we know
5446 that we are allowed to write some extra bytes, so
5447 round the allocated space up to a power of 2: */
5448 if (size < 16)
5449 while (size & (size - 1))
5450 size = (size | (size - 1)) + 1;
5451 #endif
5452 loc = (loc - size) & -align;
5453 ret.type = s->type;
5454 ret.r = VT_LOCAL | VT_LVAL;
5455 /* pass it as 'int' to avoid structure arg passing
5456 problems */
5457 vseti(VT_LOCAL, loc);
5458 ret.c = vtop->c;
5459 nb_args++;
5461 } else {
5462 ret_nregs = 1;
5463 ret.type = s->type;
5466 if (ret_nregs) {
5467 /* return in register */
5468 if (is_float(ret.type.t)) {
5469 ret.r = reg_fret(ret.type.t);
5470 #ifdef TCC_TARGET_X86_64
5471 if ((ret.type.t & VT_BTYPE) == VT_QFLOAT)
5472 ret.r2 = REG_QRET;
5473 #endif
5474 } else {
5475 #ifndef TCC_TARGET_ARM64
5476 #ifdef TCC_TARGET_X86_64
5477 if ((ret.type.t & VT_BTYPE) == VT_QLONG)
5478 #else
5479 if ((ret.type.t & VT_BTYPE) == VT_LLONG)
5480 #endif
5481 ret.r2 = REG_LRET;
5482 #endif
5483 ret.r = REG_IRET;
5485 ret.c.i = 0;
5487 if (tok != ')') {
5488 for(;;) {
5489 expr_eq();
5490 gfunc_param_typed(s, sa);
5491 nb_args++;
5492 if (sa)
5493 sa = sa->next;
5494 if (tok == ')')
5495 break;
5496 skip(',');
5499 if (sa)
5500 tcc_error("too few arguments to function");
5501 skip(')');
5502 gfunc_call(nb_args);
5504 /* return value */
5505 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5506 vsetc(&ret.type, r, &ret.c);
5507 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5510 /* handle packed struct return */
5511 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5512 int addr, offset;
5514 size = type_size(&s->type, &align);
5515 /* We're writing whole regs often, make sure there's enough
5516 space. Assume register size is power of 2. */
5517 if (regsize > align)
5518 align = regsize;
5519 loc = (loc - size) & -align;
5520 addr = loc;
5521 offset = 0;
5522 for (;;) {
5523 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5524 vswap();
5525 vstore();
5526 vtop--;
5527 if (--ret_nregs == 0)
5528 break;
5529 offset += regsize;
5531 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5533 if (s->f.func_noreturn)
5534 CODE_OFF();
5535 } else {
5536 break;
5541 ST_FUNC void expr_prod(void)
5543 int t;
5545 unary();
5546 while (tok == '*' || tok == '/' || tok == '%') {
5547 t = tok;
5548 next();
5549 unary();
5550 gen_op(t);
5554 ST_FUNC void expr_sum(void)
5556 int t;
5558 expr_prod();
5559 while (tok == '+' || tok == '-') {
5560 t = tok;
5561 next();
5562 expr_prod();
5563 gen_op(t);
5567 static void expr_shift(void)
5569 int t;
5571 expr_sum();
5572 while (tok == TOK_SHL || tok == TOK_SAR) {
5573 t = tok;
5574 next();
5575 expr_sum();
5576 gen_op(t);
5580 static void expr_cmp(void)
5582 int t;
5584 expr_shift();
5585 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5586 tok == TOK_ULT || tok == TOK_UGE) {
5587 t = tok;
5588 next();
5589 expr_shift();
5590 gen_op(t);
5594 static void expr_cmpeq(void)
5596 int t;
5598 expr_cmp();
5599 while (tok == TOK_EQ || tok == TOK_NE) {
5600 t = tok;
5601 next();
5602 expr_cmp();
5603 gen_op(t);
5607 static void expr_and(void)
5609 expr_cmpeq();
5610 while (tok == '&') {
5611 next();
5612 expr_cmpeq();
5613 gen_op('&');
5617 static void expr_xor(void)
5619 expr_and();
5620 while (tok == '^') {
5621 next();
5622 expr_and();
5623 gen_op('^');
5627 static void expr_or(void)
5629 expr_xor();
5630 while (tok == '|') {
5631 next();
5632 expr_xor();
5633 gen_op('|');
5637 static int condition_3way(void);
5639 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5641 int t = 0, cc = 1, f = 0, c;
5642 for(;;) {
5643 c = f ? i : condition_3way();
5644 if (c < 0) {
5645 save_regs(1), cc = 0;
5646 } else if (c != i) {
5647 nocode_wanted++, f = 1;
5649 if (tok != e_op) {
5650 if (cc || f) {
5651 vpop();
5652 vpushi(i ^ f);
5653 gsym(t);
5654 nocode_wanted -= f;
5655 } else {
5656 vseti(VT_JMP + i, gvtst(i, t));
5658 break;
5660 if (c < 0)
5661 t = gvtst(i, t);
5662 else
5663 vpop();
5664 next();
5665 e_fn();
5669 static void expr_land(void)
5671 expr_or();
5672 if (tok == TOK_LAND)
5673 expr_landor(expr_or, TOK_LAND, 1);
5676 static void expr_lor(void)
5678 expr_land();
5679 if (tok == TOK_LOR)
5680 expr_landor(expr_land, TOK_LOR, 0);
5683 /* Assuming vtop is a value used in a conditional context
5684 (i.e. compared with zero) return 0 if it's false, 1 if
5685 true and -1 if it can't be statically determined. */
5686 static int condition_3way(void)
5688 int c = -1;
5689 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5690 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5691 vdup();
5692 gen_cast_s(VT_BOOL);
5693 c = vtop->c.i;
5694 vpop();
5696 return c;
5699 static void expr_cond(void)
5701 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5702 SValue sv;
5703 CType type, type1, type2;
5704 int ncw_prev;
5706 expr_lor();
5707 if (tok == '?') {
5708 next();
5709 c = condition_3way();
5710 g = (tok == ':' && gnu_ext);
5711 tt = 0;
5712 if (!g) {
5713 if (c < 0) {
5714 save_regs(1);
5715 tt = gvtst(1, 0);
5716 } else {
5717 vpop();
5719 } else if (c < 0) {
5720 /* needed to avoid having different registers saved in
5721 each branch */
5722 rc = RC_INT;
5723 if (is_float(vtop->type.t)) {
5724 rc = RC_FLOAT;
5725 #ifdef TCC_TARGET_X86_64
5726 if ((vtop->type.t & VT_BTYPE) == VT_LDOUBLE) {
5727 rc = RC_ST0;
5729 #endif
5731 gv(rc);
5732 save_regs(1);
5733 gv_dup();
5734 tt = gvtst(0, 0);
5737 ncw_prev = nocode_wanted;
5738 if (1) {
5739 if (c == 0)
5740 nocode_wanted++;
5741 if (!g)
5742 gexpr();
5744 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5745 mk_pointer(&vtop->type);
5746 type1 = vtop->type;
5747 sv = *vtop; /* save value to handle it later */
5748 vtop--; /* no vpop so that FP stack is not flushed */
5750 if (g) {
5751 u = tt;
5752 } else if (c < 0) {
5753 u = gjmp(0);
5754 gsym(tt);
5755 } else
5756 u = 0;
5758 nocode_wanted = ncw_prev;
5759 if (c == 1)
5760 nocode_wanted++;
5761 skip(':');
5762 expr_cond();
5765 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5766 mk_pointer(&vtop->type);
5767 type2=vtop->type;
5768 t1 = type1.t;
5769 bt1 = t1 & VT_BTYPE;
5770 t2 = type2.t;
5771 bt2 = t2 & VT_BTYPE;
5772 type.ref = NULL;
5774 /* cast operands to correct type according to ISOC rules */
5775 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5776 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5777 } else if (is_float(bt1) || is_float(bt2)) {
5778 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5779 type.t = VT_LDOUBLE;
5781 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5782 type.t = VT_DOUBLE;
5783 } else {
5784 type.t = VT_FLOAT;
5786 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5787 /* cast to biggest op */
5788 type.t = VT_LLONG | VT_LONG;
5789 if (bt1 == VT_LLONG)
5790 type.t &= t1;
5791 if (bt2 == VT_LLONG)
5792 type.t &= t2;
5793 /* convert to unsigned if it does not fit in a long long */
5794 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5795 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5796 type.t |= VT_UNSIGNED;
5797 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5798 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5799 /* If one is a null ptr constant the result type
5800 is the other. */
5801 if (is_null_pointer (vtop)) type = type1;
5802 else if (is_null_pointer (&sv)) type = type2;
5803 else if (bt1 != bt2)
5804 tcc_error("incompatible types in conditional expressions");
5805 else {
5806 CType *pt1 = pointed_type(&type1);
5807 CType *pt2 = pointed_type(&type2);
5808 int pbt1 = pt1->t & VT_BTYPE;
5809 int pbt2 = pt2->t & VT_BTYPE;
5810 int newquals, copied = 0;
5811 /* pointers to void get preferred, otherwise the
5812 pointed to types minus qualifs should be compatible */
5813 type = (pbt1 == VT_VOID) ? type1 : type2;
5814 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5815 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5816 tcc_warning("pointer type mismatch in conditional expression\n");
5818 /* combine qualifs */
5819 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5820 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5821 & newquals)
5823 /* copy the pointer target symbol */
5824 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5825 0, type.ref->c);
5826 copied = 1;
5827 pointed_type(&type)->t |= newquals;
5829 /* pointers to incomplete arrays get converted to
5830 pointers to completed ones if possible */
5831 if (pt1->t & VT_ARRAY
5832 && pt2->t & VT_ARRAY
5833 && pointed_type(&type)->ref->c < 0
5834 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5836 if (!copied)
5837 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5838 0, type.ref->c);
5839 pointed_type(&type)->ref =
5840 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5841 0, pointed_type(&type)->ref->c);
5842 pointed_type(&type)->ref->c =
5843 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5846 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5847 /* XXX: test structure compatibility */
5848 type = bt1 == VT_STRUCT ? type1 : type2;
5849 } else {
5850 /* integer operations */
5851 type.t = VT_INT | (VT_LONG & (t1 | t2));
5852 /* convert to unsigned if it does not fit in an integer */
5853 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5854 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5855 type.t |= VT_UNSIGNED;
5857 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5858 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5859 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5861 /* now we convert second operand */
5862 if (c != 1) {
5863 gen_cast(&type);
5864 if (islv) {
5865 mk_pointer(&vtop->type);
5866 gaddrof();
5867 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5868 gaddrof();
5871 rc = RC_INT;
5872 if (is_float(type.t)) {
5873 rc = RC_FLOAT;
5874 #ifdef TCC_TARGET_X86_64
5875 if ((type.t & VT_BTYPE) == VT_LDOUBLE) {
5876 rc = RC_ST0;
5878 #endif
5879 } else if ((type.t & VT_BTYPE) == VT_LLONG) {
5880 /* for long longs, we use fixed registers to avoid having
5881 to handle a complicated move */
5882 rc = RC_IRET;
5885 tt = r2 = 0;
5886 if (c < 0) {
5887 r2 = gv(rc);
5888 tt = gjmp(0);
5890 gsym(u);
5892 /* this is horrible, but we must also convert first
5893 operand */
5894 if (c != 0) {
5895 *vtop = sv;
5896 gen_cast(&type);
5897 if (islv) {
5898 mk_pointer(&vtop->type);
5899 gaddrof();
5900 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5901 gaddrof();
5904 if (c < 0) {
5905 r1 = gv(rc);
5906 move_reg(r2, r1, type.t);
5907 vtop->r = r2;
5908 gsym(tt);
5911 if (islv)
5912 indir();
5914 nocode_wanted = ncw_prev;
5918 static void expr_eq(void)
5920 int t;
5922 expr_cond();
5923 if (tok == '=' ||
5924 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
5925 tok == TOK_A_XOR || tok == TOK_A_OR ||
5926 tok == TOK_A_SHL || tok == TOK_A_SAR) {
5927 test_lvalue();
5928 t = tok;
5929 next();
5930 if (t == '=') {
5931 expr_eq();
5932 } else {
5933 vdup();
5934 expr_eq();
5935 gen_op(t & 0x7f);
5937 vstore();
5941 ST_FUNC void gexpr(void)
5943 while (1) {
5944 expr_eq();
5945 if (tok != ',')
5946 break;
5947 vpop();
5948 next();
5952 /* parse a constant expression and return value in vtop. */
5953 static void expr_const1(void)
5955 const_wanted++;
5956 nocode_wanted++;
5957 expr_cond();
5958 nocode_wanted--;
5959 const_wanted--;
5962 /* parse an integer constant and return its value. */
5963 static inline int64_t expr_const64(void)
5965 int64_t c;
5966 expr_const1();
5967 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
5968 expect("constant expression");
5969 c = vtop->c.i;
5970 vpop();
5971 return c;
5974 /* parse an integer constant and return its value.
5975 Complain if it doesn't fit 32bit (signed or unsigned). */
5976 ST_FUNC int expr_const(void)
5978 int c;
5979 int64_t wc = expr_const64();
5980 c = wc;
5981 if (c != wc && (unsigned)c != wc)
5982 tcc_error("constant exceeds 32 bit");
5983 return c;
5986 /* ------------------------------------------------------------------------- */
5987 /* return from function */
5989 #ifndef TCC_TARGET_ARM64
5990 static void gfunc_return(CType *func_type)
5992 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
5993 CType type, ret_type;
5994 int ret_align, ret_nregs, regsize;
5995 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
5996 &ret_align, &regsize);
5997 if (0 == ret_nregs) {
5998 /* if returning structure, must copy it to implicit
5999 first pointer arg location */
6000 type = *func_type;
6001 mk_pointer(&type);
6002 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6003 indir();
6004 vswap();
6005 /* copy structure value to pointer */
6006 vstore();
6007 } else {
6008 /* returning structure packed into registers */
6009 int r, size, addr, align;
6010 size = type_size(func_type,&align);
6011 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6012 (vtop->c.i & (ret_align-1)))
6013 && (align & (ret_align-1))) {
6014 loc = (loc - size) & -ret_align;
6015 addr = loc;
6016 type = *func_type;
6017 vset(&type, VT_LOCAL | VT_LVAL, addr);
6018 vswap();
6019 vstore();
6020 vpop();
6021 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6023 vtop->type = ret_type;
6024 if (is_float(ret_type.t))
6025 r = rc_fret(ret_type.t);
6026 else
6027 r = RC_IRET;
6029 if (ret_nregs == 1)
6030 gv(r);
6031 else {
6032 for (;;) {
6033 vdup();
6034 gv(r);
6035 vpop();
6036 if (--ret_nregs == 0)
6037 break;
6038 /* We assume that when a structure is returned in multiple
6039 registers, their classes are consecutive values of the
6040 suite s(n) = 2^n */
6041 r <<= 1;
6042 vtop->c.i += regsize;
6046 } else if (is_float(func_type->t)) {
6047 gv(rc_fret(func_type->t));
6048 } else {
6049 gv(RC_IRET);
6051 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6053 #endif
6055 static int case_cmp(const void *pa, const void *pb)
6057 int64_t a = (*(struct case_t**) pa)->v1;
6058 int64_t b = (*(struct case_t**) pb)->v1;
6059 return a < b ? -1 : a > b;
6062 static void gcase(struct case_t **base, int len, int *bsym)
6064 struct case_t *p;
6065 int e;
6066 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6067 gv(RC_INT);
6068 while (len > 4) {
6069 /* binary search */
6070 p = base[len/2];
6071 vdup();
6072 if (ll)
6073 vpushll(p->v2);
6074 else
6075 vpushi(p->v2);
6076 gen_op(TOK_LE);
6077 e = gtst(1, 0);
6078 vdup();
6079 if (ll)
6080 vpushll(p->v1);
6081 else
6082 vpushi(p->v1);
6083 gen_op(TOK_GE);
6084 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6085 /* x < v1 */
6086 gcase(base, len/2, bsym);
6087 if (cur_switch->def_sym)
6088 gjmp_addr(cur_switch->def_sym);
6089 else
6090 *bsym = gjmp(*bsym);
6091 /* x > v2 */
6092 gsym(e);
6093 e = len/2 + 1;
6094 base += e; len -= e;
6096 /* linear scan */
6097 while (len--) {
6098 p = *base++;
6099 vdup();
6100 if (ll)
6101 vpushll(p->v2);
6102 else
6103 vpushi(p->v2);
6104 if (p->v1 == p->v2) {
6105 gen_op(TOK_EQ);
6106 gtst_addr(0, p->sym);
6107 } else {
6108 gen_op(TOK_LE);
6109 e = gtst(1, 0);
6110 vdup();
6111 if (ll)
6112 vpushll(p->v1);
6113 else
6114 vpushi(p->v1);
6115 gen_op(TOK_GE);
6116 gtst_addr(0, p->sym);
6117 gsym(e);
6122 /* call 'func' for each __attribute__((cleanup(func))) */
6123 static void block_cleanup(Sym *lcleanup, int lncleanups)
6125 int jmp = 0;
6126 Sym *g, **pg;
6127 for (pg = &pending_gotos; (g = *pg) && g->c > lncleanups;) {
6128 if (g->prev_tok->r & LABEL_FORWARD) {
6129 Sym *pcl = g->next;
6130 if (!jmp)
6131 jmp = gjmp(0);
6132 gsym(pcl->jnext);
6133 try_call_scope_cleanup(lcleanup);
6134 pcl->jnext = gjmp(0);
6135 if (!lncleanups)
6136 goto remove_pending;
6137 g->c = lncleanups;
6138 pg = &g->prev;
6139 } else {
6140 remove_pending:
6141 *pg = g->prev;
6142 sym_free(g);
6145 gsym(jmp);
6146 try_call_scope_cleanup(lcleanup);
6147 current_cleanups = lcleanup;
6148 ncleanups = lncleanups;
6151 static void check_func_return(void)
6153 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6154 return;
6155 if (!strcmp (funcname, "main")
6156 && (func_vt.t & VT_BTYPE) == VT_INT) {
6157 /* main returns 0 by default */
6158 vpushi(0);
6159 gen_assign_cast(&func_vt);
6160 gfunc_return(&func_vt);
6161 } else {
6162 tcc_warning("function might return no value: '%s'", funcname);
6166 static void block(int *bsym, Sym *bcl, int *csym, Sym *ccl, int is_expr)
6168 int a, b, c, d, e, t;
6169 Sym *s;
6171 if (is_expr) {
6172 /* default return value is (void) */
6173 vpushi(0);
6174 vtop->type.t = VT_VOID;
6177 t = tok, next();
6179 if (t == TOK_IF) {
6180 skip('(');
6181 gexpr();
6182 skip(')');
6183 a = gvtst(1, 0);
6184 block(bsym, bcl, csym, ccl, 0);
6185 if (tok == TOK_ELSE) {
6186 d = gjmp(0);
6187 gsym(a);
6188 next();
6189 block(bsym, bcl, csym, ccl, 0);
6190 gsym(d); /* patch else jmp */
6191 } else {
6192 gsym(a);
6195 } else if (t == TOK_WHILE) {
6196 d = gind();
6197 vla_sp_restore();
6198 skip('(');
6199 gexpr();
6200 skip(')');
6201 a = gvtst(1, 0);
6202 b = 0;
6203 block(&a, current_cleanups, &b, current_cleanups, 0);
6204 gjmp_addr(d);
6205 gsym_addr(b, d);
6206 gsym(a);
6208 } else if (t == '{') {
6209 Sym *llabel, *lcleanup;
6210 int block_vla_sp_loc = vla_sp_loc, saved_vlas_in_scope = vlas_in_scope;
6211 int lncleanups = ncleanups;
6213 /* record local declaration stack position */
6214 s = local_stack;
6215 llabel = local_label_stack;
6216 lcleanup = current_cleanups;
6217 ++local_scope;
6219 /* handle local labels declarations */
6220 while (tok == TOK_LABEL) {
6221 do {
6222 next();
6223 if (tok < TOK_UIDENT)
6224 expect("label identifier");
6225 label_push(&local_label_stack, tok, LABEL_DECLARED);
6226 next();
6227 } while (tok == ',');
6228 skip(';');
6231 while (tok != '}') {
6232 decl(VT_LOCAL);
6233 if (tok != '}') {
6234 if (is_expr)
6235 vpop();
6236 block(bsym, bcl, csym, ccl, is_expr);
6240 if (current_cleanups != lcleanup)
6241 block_cleanup(lcleanup, lncleanups);
6243 /* pop locally defined labels */
6244 label_pop(&local_label_stack, llabel, is_expr);
6246 /* In the is_expr case (a statement expression is finished here),
6247 vtop might refer to symbols on the local_stack. Either via the
6248 type or via vtop->sym. We can't pop those nor any that in turn
6249 might be referred to. To make it easier we don't roll back
6250 any symbols in that case; some upper level call to block() will
6251 do that. We do have to remove such symbols from the lookup
6252 tables, though. sym_pop will do that. */
6254 /* pop locally defined symbols */
6255 sym_pop(&local_stack, s, is_expr);
6257 /* Pop VLA frames and restore stack pointer if required */
6258 if (vlas_in_scope > saved_vlas_in_scope) {
6259 vla_sp_loc = saved_vlas_in_scope ? block_vla_sp_loc : vla_sp_root_loc;
6260 vla_sp_restore();
6262 vlas_in_scope = saved_vlas_in_scope;
6264 if (0 == --local_scope && !nocode_wanted)
6265 check_func_return();
6266 next();
6268 } else if (t == TOK_RETURN) {
6269 a = tok != ';';
6270 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6271 if (a)
6272 gexpr(), gen_assign_cast(&func_vt);
6273 try_call_scope_cleanup(NULL);
6274 if (a && b)
6275 gfunc_return(&func_vt);
6276 else if (a)
6277 vtop--;
6278 else if (b)
6279 tcc_warning("'return' with no value.");
6280 skip(';');
6281 /* jump unless last stmt in top-level block */
6282 if (tok != '}' || local_scope != 1)
6283 rsym = gjmp(rsym);
6284 CODE_OFF();
6286 } else if (t == TOK_BREAK) {
6287 /* compute jump */
6288 if (!bsym)
6289 tcc_error("cannot break");
6290 try_call_scope_cleanup(bcl);
6291 *bsym = gjmp(*bsym);
6292 skip(';');
6294 } else if (t == TOK_CONTINUE) {
6295 /* compute jump */
6296 if (!csym)
6297 tcc_error("cannot continue");
6298 try_call_scope_cleanup(ccl);
6299 vla_sp_restore_root();
6300 *csym = gjmp(*csym);
6301 skip(';');
6303 } else if (t == TOK_FOR) {
6304 Sym *lcleanup = current_cleanups;
6305 int lncleanups = ncleanups;
6307 skip('(');
6308 s = local_stack;
6309 ++local_scope;
6310 if (tok != ';') {
6311 /* c99 for-loop init decl? */
6312 if (!decl0(VT_LOCAL, 1, NULL)) {
6313 /* no, regular for-loop init expr */
6314 gexpr();
6315 vpop();
6318 skip(';');
6319 a = b = 0;
6320 c = d = gind();
6321 vla_sp_restore();
6322 if (tok != ';') {
6323 gexpr();
6324 a = gvtst(1, 0);
6326 skip(';');
6327 if (tok != ')') {
6328 e = gjmp(0);
6329 d = gind();
6330 vla_sp_restore();
6331 gexpr();
6332 vpop();
6333 gjmp_addr(c);
6334 gsym(e);
6336 skip(')');
6337 block(&a, current_cleanups, &b, current_cleanups, 0);
6338 gjmp_addr(d);
6339 gsym_addr(b, d);
6340 gsym(a);
6341 --local_scope;
6342 try_call_scope_cleanup(lcleanup);
6343 ncleanups = lncleanups;
6344 current_cleanups = lcleanup;
6345 sym_pop(&local_stack, s, 0);
6347 } else if (t == TOK_DO) {
6348 a = b = 0;
6349 d = gind();
6350 vla_sp_restore();
6351 block(&a, current_cleanups, &b, current_cleanups, 0);
6352 gsym(b);
6353 skip(TOK_WHILE);
6354 skip('(');
6355 gexpr();
6356 skip(')');
6357 skip(';');
6358 c = gvtst(0, 0);
6359 gsym_addr(c, d);
6360 gsym(a);
6362 } else if (t == TOK_SWITCH) {
6363 struct switch_t *saved, sw;
6364 SValue switchval;
6366 skip('(');
6367 gexpr();
6368 skip(')');
6369 switchval = *vtop--;
6370 a = 0;
6371 b = gjmp(0); /* jump to first case */
6372 sw.p = NULL; sw.n = 0; sw.def_sym = 0;
6373 saved = cur_switch;
6374 cur_switch = &sw;
6375 block(&a, current_cleanups, csym, ccl, 0);
6376 a = gjmp(a); /* add implicit break */
6377 /* case lookup */
6378 gsym(b);
6379 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6380 for (b = 1; b < sw.n; b++)
6381 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6382 tcc_error("duplicate case value");
6383 /* Our switch table sorting is signed, so the compared
6384 value needs to be as well when it's 64bit. */
6385 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6386 switchval.type.t &= ~VT_UNSIGNED;
6387 vpushv(&switchval);
6388 gcase(sw.p, sw.n, &a);
6389 vpop();
6390 if (sw.def_sym)
6391 gjmp_addr(sw.def_sym);
6392 dynarray_reset(&sw.p, &sw.n);
6393 cur_switch = saved;
6394 /* break label */
6395 gsym(a);
6397 } else if (t == TOK_CASE) {
6398 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6399 if (!cur_switch)
6400 expect("switch");
6401 cr->v1 = cr->v2 = expr_const64();
6402 if (gnu_ext && tok == TOK_DOTS) {
6403 next();
6404 cr->v2 = expr_const64();
6405 if (cr->v2 < cr->v1)
6406 tcc_warning("empty case range");
6408 cr->sym = gind();
6409 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6410 skip(':');
6411 is_expr = 0;
6412 goto block_after_label;
6414 } else if (t == TOK_DEFAULT) {
6415 if (!cur_switch)
6416 expect("switch");
6417 if (cur_switch->def_sym)
6418 tcc_error("too many 'default'");
6419 cur_switch->def_sym = gind();
6420 skip(':');
6421 is_expr = 0;
6422 goto block_after_label;
6424 } else if (t == TOK_GOTO) {
6425 if (tok == '*' && gnu_ext) {
6426 /* computed goto */
6427 next();
6428 gexpr();
6429 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6430 expect("pointer");
6431 ggoto();
6432 } else if (tok >= TOK_UIDENT) {
6433 s = label_find(tok);
6434 /* put forward definition if needed */
6435 if (!s)
6436 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6437 else if (s->r == LABEL_DECLARED)
6438 s->r = LABEL_FORWARD;
6440 vla_sp_restore_root();
6441 if (s->r & LABEL_FORWARD) {
6442 /* start new goto chain for cleanups, linked via label->next */
6443 if (current_cleanups) {
6444 sym_push2(&pending_gotos, SYM_FIELD, 0, ncleanups);
6445 pending_gotos->prev_tok = s;
6446 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6447 pending_gotos->next = s;
6449 s->jnext = gjmp(s->jnext);
6450 } else {
6451 try_call_cleanup_goto(s->cleanupstate);
6452 gjmp_addr(s->jnext);
6454 next();
6456 } else {
6457 expect("label identifier");
6459 skip(';');
6461 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6462 asm_instr();
6464 } else {
6465 if (tok == ':' && t >= TOK_UIDENT) {
6466 /* label case */
6467 next();
6468 s = label_find(t);
6469 if (s) {
6470 if (s->r == LABEL_DEFINED)
6471 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6472 s->r = LABEL_DEFINED;
6473 if (s->next) {
6474 Sym *pcl; /* pending cleanup goto */
6475 for (pcl = s->next; pcl; pcl = pcl->prev)
6476 gsym(pcl->jnext);
6477 sym_pop(&s->next, NULL, 0);
6478 } else
6479 gsym(s->jnext);
6480 } else {
6481 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6483 s->jnext = gind();
6484 s->cleanupstate = current_cleanups;
6486 block_after_label:
6487 vla_sp_restore();
6488 /* we accept this, but it is a mistake */
6489 if (tok == '}') {
6490 tcc_warning("deprecated use of label at end of compound statement");
6491 } else {
6492 if (is_expr)
6493 vpop();
6494 block(bsym, bcl, csym, ccl, is_expr);
6497 } else {
6498 /* expression case */
6499 if (t != ';') {
6500 unget_tok(t);
6501 if (is_expr) {
6502 vpop();
6503 gexpr();
6504 } else {
6505 gexpr();
6506 vpop();
6508 skip(';');
6514 /* This skips over a stream of tokens containing balanced {} and ()
6515 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6516 with a '{'). If STR then allocates and stores the skipped tokens
6517 in *STR. This doesn't check if () and {} are nested correctly,
6518 i.e. "({)}" is accepted. */
6519 static void skip_or_save_block(TokenString **str)
6521 int braces = tok == '{';
6522 int level = 0;
6523 if (str)
6524 *str = tok_str_alloc();
6526 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6527 int t;
6528 if (tok == TOK_EOF) {
6529 if (str || level > 0)
6530 tcc_error("unexpected end of file");
6531 else
6532 break;
6534 if (str)
6535 tok_str_add_tok(*str);
6536 t = tok;
6537 next();
6538 if (t == '{' || t == '(') {
6539 level++;
6540 } else if (t == '}' || t == ')') {
6541 level--;
6542 if (level == 0 && braces && t == '}')
6543 break;
6546 if (str) {
6547 tok_str_add(*str, -1);
6548 tok_str_add(*str, 0);
6552 #define EXPR_CONST 1
6553 #define EXPR_ANY 2
6555 static void parse_init_elem(int expr_type)
6557 int saved_global_expr;
6558 switch(expr_type) {
6559 case EXPR_CONST:
6560 /* compound literals must be allocated globally in this case */
6561 saved_global_expr = global_expr;
6562 global_expr = 1;
6563 expr_const1();
6564 global_expr = saved_global_expr;
6565 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6566 (compound literals). */
6567 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6568 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6569 || vtop->sym->v < SYM_FIRST_ANOM))
6570 #ifdef TCC_TARGET_PE
6571 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6572 #endif
6574 tcc_error("initializer element is not constant");
6575 break;
6576 case EXPR_ANY:
6577 expr_eq();
6578 break;
6582 /* put zeros for variable based init */
6583 static void init_putz(Section *sec, unsigned long c, int size)
6585 if (sec) {
6586 /* nothing to do because globals are already set to zero */
6587 } else {
6588 vpush_global_sym(&func_old_type, TOK_memset);
6589 vseti(VT_LOCAL, c);
6590 #ifdef TCC_TARGET_ARM
6591 vpushs(size);
6592 vpushi(0);
6593 #else
6594 vpushi(0);
6595 vpushs(size);
6596 #endif
6597 gfunc_call(3);
6601 #define DIF_FIRST 1
6602 #define DIF_SIZE_ONLY 2
6603 #define DIF_HAVE_ELEM 4
6605 /* t is the array or struct type. c is the array or struct
6606 address. cur_field is the pointer to the current
6607 field, for arrays the 'c' member contains the current start
6608 index. 'flags' is as in decl_initializer.
6609 'al' contains the already initialized length of the
6610 current container (starting at c). This returns the new length of that. */
6611 static int decl_designator(CType *type, Section *sec, unsigned long c,
6612 Sym **cur_field, int flags, int al)
6614 Sym *s, *f;
6615 int index, index_last, align, l, nb_elems, elem_size;
6616 unsigned long corig = c;
6618 elem_size = 0;
6619 nb_elems = 1;
6621 if (flags & DIF_HAVE_ELEM)
6622 goto no_designator;
6624 if (gnu_ext && tok >= TOK_UIDENT) {
6625 l = tok, next();
6626 if (tok == ':')
6627 goto struct_field;
6628 unget_tok(l);
6631 /* NOTE: we only support ranges for last designator */
6632 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6633 if (tok == '[') {
6634 if (!(type->t & VT_ARRAY))
6635 expect("array type");
6636 next();
6637 index = index_last = expr_const();
6638 if (tok == TOK_DOTS && gnu_ext) {
6639 next();
6640 index_last = expr_const();
6642 skip(']');
6643 s = type->ref;
6644 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6645 index_last < index)
6646 tcc_error("invalid index");
6647 if (cur_field)
6648 (*cur_field)->c = index_last;
6649 type = pointed_type(type);
6650 elem_size = type_size(type, &align);
6651 c += index * elem_size;
6652 nb_elems = index_last - index + 1;
6653 } else {
6654 int cumofs = 0;
6655 next();
6656 l = tok;
6657 struct_field:
6658 next();
6659 if ((type->t & VT_BTYPE) != VT_STRUCT)
6660 expect("struct/union type");
6661 f = find_field(type, l, &cumofs);
6662 if (!f)
6663 expect("field");
6664 if (cur_field)
6665 *cur_field = f;
6666 type = &f->type;
6667 c += cumofs + f->c;
6669 cur_field = NULL;
6671 if (!cur_field) {
6672 if (tok == '=') {
6673 next();
6674 } else if (!gnu_ext) {
6675 expect("=");
6677 } else {
6678 no_designator:
6679 if (type->t & VT_ARRAY) {
6680 index = (*cur_field)->c;
6681 if (type->ref->c >= 0 && index >= type->ref->c)
6682 tcc_error("index too large");
6683 type = pointed_type(type);
6684 c += index * type_size(type, &align);
6685 } else {
6686 f = *cur_field;
6687 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6688 *cur_field = f = f->next;
6689 if (!f)
6690 tcc_error("too many field init");
6691 type = &f->type;
6692 c += f->c;
6695 /* must put zero in holes (note that doing it that way
6696 ensures that it even works with designators) */
6697 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6698 init_putz(sec, corig + al, c - corig - al);
6699 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6701 /* XXX: make it more general */
6702 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6703 unsigned long c_end;
6704 uint8_t *src, *dst;
6705 int i;
6707 if (!sec) {
6708 vset(type, VT_LOCAL|VT_LVAL, c);
6709 for (i = 1; i < nb_elems; i++) {
6710 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6711 vswap();
6712 vstore();
6714 vpop();
6715 } else if (!NODATA_WANTED) {
6716 c_end = c + nb_elems * elem_size;
6717 if (c_end > sec->data_allocated)
6718 section_realloc(sec, c_end);
6719 src = sec->data + c;
6720 dst = src;
6721 for(i = 1; i < nb_elems; i++) {
6722 dst += elem_size;
6723 memcpy(dst, src, elem_size);
6727 c += nb_elems * type_size(type, &align);
6728 if (c - corig > al)
6729 al = c - corig;
6730 return al;
6733 /* store a value or an expression directly in global data or in local array */
6734 static void init_putv(CType *type, Section *sec, unsigned long c)
6736 int bt;
6737 void *ptr;
6738 CType dtype;
6740 dtype = *type;
6741 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6743 if (sec) {
6744 int size, align;
6745 /* XXX: not portable */
6746 /* XXX: generate error if incorrect relocation */
6747 gen_assign_cast(&dtype);
6748 bt = type->t & VT_BTYPE;
6750 if ((vtop->r & VT_SYM)
6751 && bt != VT_PTR
6752 && bt != VT_FUNC
6753 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6754 || (type->t & VT_BITFIELD))
6755 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6757 tcc_error("initializer element is not computable at load time");
6759 if (NODATA_WANTED) {
6760 vtop--;
6761 return;
6764 size = type_size(type, &align);
6765 section_reserve(sec, c + size);
6766 ptr = sec->data + c;
6768 /* XXX: make code faster ? */
6769 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6770 vtop->sym->v >= SYM_FIRST_ANOM &&
6771 /* XXX This rejects compound literals like
6772 '(void *){ptr}'. The problem is that '&sym' is
6773 represented the same way, which would be ruled out
6774 by the SYM_FIRST_ANOM check above, but also '"string"'
6775 in 'char *p = "string"' is represented the same
6776 with the type being VT_PTR and the symbol being an
6777 anonymous one. That is, there's no difference in vtop
6778 between '(void *){x}' and '&(void *){x}'. Ignore
6779 pointer typed entities here. Hopefully no real code
6780 will every use compound literals with scalar type. */
6781 (vtop->type.t & VT_BTYPE) != VT_PTR) {
6782 /* These come from compound literals, memcpy stuff over. */
6783 Section *ssec;
6784 ElfSym *esym;
6785 ElfW_Rel *rel;
6786 esym = elfsym(vtop->sym);
6787 ssec = tcc_state->sections[esym->st_shndx];
6788 memmove (ptr, ssec->data + esym->st_value, size);
6789 if (ssec->reloc) {
6790 /* We need to copy over all memory contents, and that
6791 includes relocations. Use the fact that relocs are
6792 created it order, so look from the end of relocs
6793 until we hit one before the copied region. */
6794 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
6795 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
6796 while (num_relocs--) {
6797 rel--;
6798 if (rel->r_offset >= esym->st_value + size)
6799 continue;
6800 if (rel->r_offset < esym->st_value)
6801 break;
6802 /* Note: if the same fields are initialized multiple
6803 times (possible with designators) then we possibly
6804 add multiple relocations for the same offset here.
6805 That would lead to wrong code, the last reloc needs
6806 to win. We clean this up later after the whole
6807 initializer is parsed. */
6808 put_elf_reloca(symtab_section, sec,
6809 c + rel->r_offset - esym->st_value,
6810 ELFW(R_TYPE)(rel->r_info),
6811 ELFW(R_SYM)(rel->r_info),
6812 #if PTR_SIZE == 8
6813 rel->r_addend
6814 #else
6816 #endif
6820 } else {
6821 if (type->t & VT_BITFIELD) {
6822 int bit_pos, bit_size, bits, n;
6823 unsigned char *p, v, m;
6824 bit_pos = BIT_POS(vtop->type.t);
6825 bit_size = BIT_SIZE(vtop->type.t);
6826 p = (unsigned char*)ptr + (bit_pos >> 3);
6827 bit_pos &= 7, bits = 0;
6828 while (bit_size) {
6829 n = 8 - bit_pos;
6830 if (n > bit_size)
6831 n = bit_size;
6832 v = vtop->c.i >> bits << bit_pos;
6833 m = ((1 << n) - 1) << bit_pos;
6834 *p = (*p & ~m) | (v & m);
6835 bits += n, bit_size -= n, bit_pos = 0, ++p;
6837 } else
6838 switch(bt) {
6839 /* XXX: when cross-compiling we assume that each type has the
6840 same representation on host and target, which is likely to
6841 be wrong in the case of long double */
6842 case VT_BOOL:
6843 vtop->c.i = vtop->c.i != 0;
6844 case VT_BYTE:
6845 *(char *)ptr |= vtop->c.i;
6846 break;
6847 case VT_SHORT:
6848 *(short *)ptr |= vtop->c.i;
6849 break;
6850 case VT_FLOAT:
6851 *(float*)ptr = vtop->c.f;
6852 break;
6853 case VT_DOUBLE:
6854 *(double *)ptr = vtop->c.d;
6855 break;
6856 case VT_LDOUBLE:
6857 #if defined TCC_IS_NATIVE_387
6858 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6859 memcpy(ptr, &vtop->c.ld, 10);
6860 #ifdef __TINYC__
6861 else if (sizeof (long double) == sizeof (double))
6862 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
6863 #endif
6864 else if (vtop->c.ld == 0.0)
6866 else
6867 #endif
6868 if (sizeof(long double) == LDOUBLE_SIZE)
6869 *(long double*)ptr = vtop->c.ld;
6870 else if (sizeof(double) == LDOUBLE_SIZE)
6871 *(double *)ptr = (double)vtop->c.ld;
6872 else
6873 tcc_error("can't cross compile long double constants");
6874 break;
6875 #if PTR_SIZE != 8
6876 case VT_LLONG:
6877 *(long long *)ptr |= vtop->c.i;
6878 break;
6879 #else
6880 case VT_LLONG:
6881 #endif
6882 case VT_PTR:
6884 addr_t val = vtop->c.i;
6885 #if PTR_SIZE == 8
6886 if (vtop->r & VT_SYM)
6887 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6888 else
6889 *(addr_t *)ptr |= val;
6890 #else
6891 if (vtop->r & VT_SYM)
6892 greloc(sec, vtop->sym, c, R_DATA_PTR);
6893 *(addr_t *)ptr |= val;
6894 #endif
6895 break;
6897 default:
6899 int val = vtop->c.i;
6900 #if PTR_SIZE == 8
6901 if (vtop->r & VT_SYM)
6902 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
6903 else
6904 *(int *)ptr |= val;
6905 #else
6906 if (vtop->r & VT_SYM)
6907 greloc(sec, vtop->sym, c, R_DATA_PTR);
6908 *(int *)ptr |= val;
6909 #endif
6910 break;
6914 vtop--;
6915 } else {
6916 vset(&dtype, VT_LOCAL|VT_LVAL, c);
6917 vswap();
6918 vstore();
6919 vpop();
6923 /* 't' contains the type and storage info. 'c' is the offset of the
6924 object in section 'sec'. If 'sec' is NULL, it means stack based
6925 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6926 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6927 size only evaluation is wanted (only for arrays). */
6928 static void decl_initializer(CType *type, Section *sec, unsigned long c,
6929 int flags)
6931 int len, n, no_oblock, nb, i;
6932 int size1, align1;
6933 Sym *s, *f;
6934 Sym indexsym;
6935 CType *t1;
6937 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
6938 /* In case of strings we have special handling for arrays, so
6939 don't consume them as initializer value (which would commit them
6940 to some anonymous symbol). */
6941 tok != TOK_LSTR && tok != TOK_STR &&
6942 !(flags & DIF_SIZE_ONLY)) {
6943 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
6944 flags |= DIF_HAVE_ELEM;
6947 if ((flags & DIF_HAVE_ELEM) &&
6948 !(type->t & VT_ARRAY) &&
6949 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6950 The source type might have VT_CONSTANT set, which is
6951 of course assignable to non-const elements. */
6952 is_compatible_unqualified_types(type, &vtop->type)) {
6953 init_putv(type, sec, c);
6954 } else if (type->t & VT_ARRAY) {
6955 s = type->ref;
6956 n = s->c;
6957 t1 = pointed_type(type);
6958 size1 = type_size(t1, &align1);
6960 no_oblock = 1;
6961 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
6962 tok == '{') {
6963 if (tok != '{')
6964 tcc_error("character array initializer must be a literal,"
6965 " optionally enclosed in braces");
6966 skip('{');
6967 no_oblock = 0;
6970 /* only parse strings here if correct type (otherwise: handle
6971 them as ((w)char *) expressions */
6972 if ((tok == TOK_LSTR &&
6973 #ifdef TCC_TARGET_PE
6974 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
6975 #else
6976 (t1->t & VT_BTYPE) == VT_INT
6977 #endif
6978 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
6979 len = 0;
6980 while (tok == TOK_STR || tok == TOK_LSTR) {
6981 int cstr_len, ch;
6983 /* compute maximum number of chars wanted */
6984 if (tok == TOK_STR)
6985 cstr_len = tokc.str.size;
6986 else
6987 cstr_len = tokc.str.size / sizeof(nwchar_t);
6988 cstr_len--;
6989 nb = cstr_len;
6990 if (n >= 0 && nb > (n - len))
6991 nb = n - len;
6992 if (!(flags & DIF_SIZE_ONLY)) {
6993 if (cstr_len > nb)
6994 tcc_warning("initializer-string for array is too long");
6995 /* in order to go faster for common case (char
6996 string in global variable, we handle it
6997 specifically */
6998 if (sec && tok == TOK_STR && size1 == 1) {
6999 if (!NODATA_WANTED)
7000 memcpy(sec->data + c + len, tokc.str.data, nb);
7001 } else {
7002 for(i=0;i<nb;i++) {
7003 if (tok == TOK_STR)
7004 ch = ((unsigned char *)tokc.str.data)[i];
7005 else
7006 ch = ((nwchar_t *)tokc.str.data)[i];
7007 vpushi(ch);
7008 init_putv(t1, sec, c + (len + i) * size1);
7012 len += nb;
7013 next();
7015 /* only add trailing zero if enough storage (no
7016 warning in this case since it is standard) */
7017 if (n < 0 || len < n) {
7018 if (!(flags & DIF_SIZE_ONLY)) {
7019 vpushi(0);
7020 init_putv(t1, sec, c + (len * size1));
7022 len++;
7024 len *= size1;
7025 } else {
7026 indexsym.c = 0;
7027 f = &indexsym;
7029 do_init_list:
7030 len = 0;
7031 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7032 len = decl_designator(type, sec, c, &f, flags, len);
7033 flags &= ~DIF_HAVE_ELEM;
7034 if (type->t & VT_ARRAY) {
7035 ++indexsym.c;
7036 /* special test for multi dimensional arrays (may not
7037 be strictly correct if designators are used at the
7038 same time) */
7039 if (no_oblock && len >= n*size1)
7040 break;
7041 } else {
7042 if (s->type.t == VT_UNION)
7043 f = NULL;
7044 else
7045 f = f->next;
7046 if (no_oblock && f == NULL)
7047 break;
7050 if (tok == '}')
7051 break;
7052 skip(',');
7055 /* put zeros at the end */
7056 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7057 init_putz(sec, c + len, n*size1 - len);
7058 if (!no_oblock)
7059 skip('}');
7060 /* patch type size if needed, which happens only for array types */
7061 if (n < 0)
7062 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7063 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7064 size1 = 1;
7065 no_oblock = 1;
7066 if ((flags & DIF_FIRST) || tok == '{') {
7067 skip('{');
7068 no_oblock = 0;
7070 s = type->ref;
7071 f = s->next;
7072 n = s->c;
7073 goto do_init_list;
7074 } else if (tok == '{') {
7075 if (flags & DIF_HAVE_ELEM)
7076 skip(';');
7077 next();
7078 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7079 skip('}');
7080 } else if ((flags & DIF_SIZE_ONLY)) {
7081 /* If we supported only ISO C we wouldn't have to accept calling
7082 this on anything than an array if DIF_SIZE_ONLY (and even then
7083 only on the outermost level, so no recursion would be needed),
7084 because initializing a flex array member isn't supported.
7085 But GNU C supports it, so we need to recurse even into
7086 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7087 /* just skip expression */
7088 skip_or_save_block(NULL);
7089 } else {
7090 if (!(flags & DIF_HAVE_ELEM)) {
7091 /* This should happen only when we haven't parsed
7092 the init element above for fear of committing a
7093 string constant to memory too early. */
7094 if (tok != TOK_STR && tok != TOK_LSTR)
7095 expect("string constant");
7096 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7098 init_putv(type, sec, c);
7102 /* parse an initializer for type 't' if 'has_init' is non zero, and
7103 allocate space in local or global data space ('r' is either
7104 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7105 variable 'v' of scope 'scope' is declared before initializers
7106 are parsed. If 'v' is zero, then a reference to the new object
7107 is put in the value stack. If 'has_init' is 2, a special parsing
7108 is done to handle string constants. */
7109 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7110 int has_init, int v, int scope)
7112 int size, align, addr;
7113 TokenString *init_str = NULL;
7115 Section *sec;
7116 Sym *flexible_array;
7117 Sym *sym = NULL;
7118 int saved_nocode_wanted = nocode_wanted;
7119 #ifdef CONFIG_TCC_BCHECK
7120 int bcheck;
7121 #endif
7123 /* Always allocate static or global variables */
7124 if (v && (r & VT_VALMASK) == VT_CONST)
7125 nocode_wanted |= 0x80000000;
7127 #ifdef CONFIG_TCC_BCHECK
7128 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7129 #endif
7131 flexible_array = NULL;
7132 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7133 Sym *field = type->ref->next;
7134 if (field) {
7135 while (field->next)
7136 field = field->next;
7137 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7138 flexible_array = field;
7142 size = type_size(type, &align);
7143 /* If unknown size, we must evaluate it before
7144 evaluating initializers because
7145 initializers can generate global data too
7146 (e.g. string pointers or ISOC99 compound
7147 literals). It also simplifies local
7148 initializers handling */
7149 if (size < 0 || (flexible_array && has_init)) {
7150 if (!has_init)
7151 tcc_error("unknown type size");
7152 /* get all init string */
7153 if (has_init == 2) {
7154 init_str = tok_str_alloc();
7155 /* only get strings */
7156 while (tok == TOK_STR || tok == TOK_LSTR) {
7157 tok_str_add_tok(init_str);
7158 next();
7160 tok_str_add(init_str, -1);
7161 tok_str_add(init_str, 0);
7162 } else {
7163 skip_or_save_block(&init_str);
7165 unget_tok(0);
7167 /* compute size */
7168 begin_macro(init_str, 1);
7169 next();
7170 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7171 /* prepare second initializer parsing */
7172 macro_ptr = init_str->str;
7173 next();
7175 /* if still unknown size, error */
7176 size = type_size(type, &align);
7177 if (size < 0)
7178 tcc_error("unknown type size");
7180 /* If there's a flex member and it was used in the initializer
7181 adjust size. */
7182 if (flexible_array &&
7183 flexible_array->type.ref->c > 0)
7184 size += flexible_array->type.ref->c
7185 * pointed_size(&flexible_array->type);
7186 /* take into account specified alignment if bigger */
7187 if (ad->a.aligned) {
7188 int speca = 1 << (ad->a.aligned - 1);
7189 if (speca > align)
7190 align = speca;
7191 } else if (ad->a.packed) {
7192 align = 1;
7195 if (!v && NODATA_WANTED)
7196 size = 0, align = 1;
7198 if ((r & VT_VALMASK) == VT_LOCAL) {
7199 sec = NULL;
7200 #ifdef CONFIG_TCC_BCHECK
7201 if (bcheck && (type->t & VT_ARRAY)) {
7202 loc--;
7204 #endif
7205 loc = (loc - size) & -align;
7206 addr = loc;
7207 #ifdef CONFIG_TCC_BCHECK
7208 /* handles bounds */
7209 /* XXX: currently, since we do only one pass, we cannot track
7210 '&' operators, so we add only arrays */
7211 if (bcheck && (type->t & VT_ARRAY)) {
7212 addr_t *bounds_ptr;
7213 /* add padding between regions */
7214 loc--;
7215 /* then add local bound info */
7216 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7217 bounds_ptr[0] = addr;
7218 bounds_ptr[1] = size;
7220 #endif
7221 if (v) {
7222 /* local variable */
7223 #ifdef CONFIG_TCC_ASM
7224 if (ad->asm_label) {
7225 int reg = asm_parse_regvar(ad->asm_label);
7226 if (reg >= 0)
7227 r = (r & ~VT_VALMASK) | reg;
7229 #endif
7230 sym = sym_push(v, type, r, addr);
7231 if (ad->cleanup_func) {
7232 Sym *cls = sym_push2(&all_cleanups, SYM_FIELD | ++ncleanups, 0, 0);
7233 cls->prev_tok = sym;
7234 cls->next = ad->cleanup_func;
7235 cls->ncl = current_cleanups;
7236 current_cleanups = cls;
7239 sym->a = ad->a;
7240 } else {
7241 /* push local reference */
7242 vset(type, r, addr);
7244 } else {
7245 if (v && scope == VT_CONST) {
7246 /* see if the symbol was already defined */
7247 sym = sym_find(v);
7248 if (sym) {
7249 patch_storage(sym, ad, type);
7250 /* we accept several definitions of the same global variable. */
7251 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7252 goto no_alloc;
7256 /* allocate symbol in corresponding section */
7257 sec = ad->section;
7258 if (!sec) {
7259 if (has_init)
7260 sec = data_section;
7261 else if (tcc_state->nocommon)
7262 sec = bss_section;
7265 if (sec) {
7266 addr = section_add(sec, size, align);
7267 #ifdef CONFIG_TCC_BCHECK
7268 /* add padding if bound check */
7269 if (bcheck)
7270 section_add(sec, 1, 1);
7271 #endif
7272 } else {
7273 addr = align; /* SHN_COMMON is special, symbol value is align */
7274 sec = common_section;
7277 if (v) {
7278 if (!sym) {
7279 sym = sym_push(v, type, r | VT_SYM, 0);
7280 patch_storage(sym, ad, NULL);
7282 /* update symbol definition */
7283 put_extern_sym(sym, sec, addr, size);
7284 } else {
7285 /* push global reference */
7286 vpush_ref(type, sec, addr, size);
7287 sym = vtop->sym;
7288 vtop->r |= r;
7291 #ifdef CONFIG_TCC_BCHECK
7292 /* handles bounds now because the symbol must be defined
7293 before for the relocation */
7294 if (bcheck) {
7295 addr_t *bounds_ptr;
7297 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7298 /* then add global bound info */
7299 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7300 bounds_ptr[0] = 0; /* relocated */
7301 bounds_ptr[1] = size;
7303 #endif
7306 if (type->t & VT_VLA) {
7307 int a;
7309 if (NODATA_WANTED)
7310 goto no_alloc;
7312 /* save current stack pointer */
7313 if (vlas_in_scope == 0) {
7314 if (vla_sp_root_loc == -1)
7315 vla_sp_root_loc = (loc -= PTR_SIZE);
7316 gen_vla_sp_save(vla_sp_root_loc);
7319 vla_runtime_type_size(type, &a);
7320 gen_vla_alloc(type, a);
7321 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7322 /* on _WIN64, because of the function args scratch area, the
7323 result of alloca differs from RSP and is returned in RAX. */
7324 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7325 #endif
7326 gen_vla_sp_save(addr);
7327 vla_sp_loc = addr;
7328 vlas_in_scope++;
7330 } else if (has_init) {
7331 size_t oldreloc_offset = 0;
7332 if (sec && sec->reloc)
7333 oldreloc_offset = sec->reloc->data_offset;
7334 decl_initializer(type, sec, addr, DIF_FIRST);
7335 if (sec && sec->reloc)
7336 squeeze_multi_relocs(sec, oldreloc_offset);
7337 /* patch flexible array member size back to -1, */
7338 /* for possible subsequent similar declarations */
7339 if (flexible_array)
7340 flexible_array->type.ref->c = -1;
7343 no_alloc:
7344 /* restore parse state if needed */
7345 if (init_str) {
7346 end_macro();
7347 next();
7350 nocode_wanted = saved_nocode_wanted;
7353 /* parse a function defined by symbol 'sym' and generate its code in
7354 'cur_text_section' */
7355 static void gen_function(Sym *sym)
7357 nocode_wanted = 0;
7358 ind = cur_text_section->data_offset;
7359 if (sym->a.aligned) {
7360 size_t newoff = section_add(cur_text_section, 0,
7361 1 << (sym->a.aligned - 1));
7362 gen_fill_nops(newoff - ind);
7364 /* NOTE: we patch the symbol size later */
7365 put_extern_sym(sym, cur_text_section, ind, 0);
7366 funcname = get_tok_str(sym->v, NULL);
7367 func_ind = ind;
7368 /* Initialize VLA state */
7369 vla_sp_loc = -1;
7370 vla_sp_root_loc = -1;
7371 /* put debug symbol */
7372 tcc_debug_funcstart(tcc_state, sym);
7373 /* push a dummy symbol to enable local sym storage */
7374 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7375 local_scope = 1; /* for function parameters */
7376 gfunc_prolog(&sym->type);
7377 reset_local_scope();
7378 rsym = 0;
7379 clear_temp_local_var_list();
7380 block(NULL, NULL, NULL, NULL, 0);
7381 gsym(rsym);
7382 nocode_wanted = 0;
7383 gfunc_epilog();
7384 cur_text_section->data_offset = ind;
7385 label_pop(&global_label_stack, NULL, 0);
7386 /* reset local stack */
7387 reset_local_scope();
7388 sym_pop(&local_stack, NULL, 0);
7389 /* end of function */
7390 /* patch symbol size */
7391 elfsym(sym)->st_size = ind - func_ind;
7392 tcc_debug_funcend(tcc_state, ind - func_ind);
7393 /* It's better to crash than to generate wrong code */
7394 cur_text_section = NULL;
7395 funcname = ""; /* for safety */
7396 func_vt.t = VT_VOID; /* for safety */
7397 func_var = 0; /* for safety */
7398 ind = 0; /* for safety */
7399 nocode_wanted = 0x80000000;
7400 check_vstack();
7403 static void gen_inline_functions(TCCState *s)
7405 Sym *sym;
7406 int inline_generated, i, ln;
7407 struct InlineFunc *fn;
7409 ln = file->line_num;
7410 /* iterate while inline function are referenced */
7411 do {
7412 inline_generated = 0;
7413 for (i = 0; i < s->nb_inline_fns; ++i) {
7414 fn = s->inline_fns[i];
7415 sym = fn->sym;
7416 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7417 /* the function was used or forced (and then not internal):
7418 generate its code and convert it to a normal function */
7419 fn->sym = NULL;
7420 if (file)
7421 pstrcpy(file->filename, sizeof file->filename, fn->filename);
7422 begin_macro(fn->func_str, 1);
7423 next();
7424 cur_text_section = text_section;
7425 gen_function(sym);
7426 end_macro();
7428 inline_generated = 1;
7431 } while (inline_generated);
7432 file->line_num = ln;
7435 ST_FUNC void free_inline_functions(TCCState *s)
7437 int i;
7438 /* free tokens of unused inline functions */
7439 for (i = 0; i < s->nb_inline_fns; ++i) {
7440 struct InlineFunc *fn = s->inline_fns[i];
7441 if (fn->sym)
7442 tok_str_free(fn->func_str);
7444 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7447 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7448 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7449 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7451 int v, has_init, r;
7452 CType type, btype;
7453 Sym *sym;
7454 AttributeDef ad, adbase;
7456 while (1) {
7457 if (tok == TOK_STATIC_ASSERT) {
7458 int c;
7460 next();
7461 skip('(');
7462 c = expr_const();
7463 skip(',');
7464 if (c == 0)
7465 tcc_error("%s", get_tok_str(tok, &tokc));
7466 next();
7467 skip(')');
7468 skip(';');
7469 continue;
7471 if (!parse_btype(&btype, &adbase)) {
7472 if (is_for_loop_init)
7473 return 0;
7474 /* skip redundant ';' if not in old parameter decl scope */
7475 if (tok == ';' && l != VT_CMP) {
7476 next();
7477 continue;
7479 if (l != VT_CONST)
7480 break;
7481 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7482 /* global asm block */
7483 asm_global_instr();
7484 continue;
7486 if (tok >= TOK_UIDENT) {
7487 /* special test for old K&R protos without explicit int
7488 type. Only accepted when defining global data */
7489 btype.t = VT_INT;
7490 } else {
7491 if (tok != TOK_EOF)
7492 expect("declaration");
7493 break;
7496 if (tok == ';') {
7497 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7498 int v = btype.ref->v;
7499 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7500 tcc_warning("unnamed struct/union that defines no instances");
7501 next();
7502 continue;
7504 if (IS_ENUM(btype.t)) {
7505 next();
7506 continue;
7509 while (1) { /* iterate thru each declaration */
7510 type = btype;
7511 /* If the base type itself was an array type of unspecified
7512 size (like in 'typedef int arr[]; arr x = {1};') then
7513 we will overwrite the unknown size by the real one for
7514 this decl. We need to unshare the ref symbol holding
7515 that size. */
7516 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7517 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7519 ad = adbase;
7520 type_decl(&type, &ad, &v, TYPE_DIRECT);
7521 #if 0
7523 char buf[500];
7524 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7525 printf("type = '%s'\n", buf);
7527 #endif
7528 if ((type.t & VT_BTYPE) == VT_FUNC) {
7529 /* if old style function prototype, we accept a
7530 declaration list */
7531 sym = type.ref;
7532 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7533 decl0(VT_CMP, 0, sym);
7534 /* always compile 'extern inline' */
7535 if (type.t & VT_EXTERN)
7536 type.t &= ~VT_INLINE;
7539 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7540 ad.asm_label = asm_label_instr();
7541 /* parse one last attribute list, after asm label */
7542 parse_attribute(&ad);
7543 #if 0
7544 /* gcc does not allow __asm__("label") with function definition,
7545 but why not ... */
7546 if (tok == '{')
7547 expect(";");
7548 #endif
7551 #ifdef TCC_TARGET_PE
7552 if (ad.a.dllimport || ad.a.dllexport) {
7553 if (type.t & (VT_STATIC|VT_TYPEDEF))
7554 tcc_error("cannot have dll linkage with static or typedef");
7555 if (ad.a.dllimport) {
7556 if ((type.t & VT_BTYPE) == VT_FUNC)
7557 ad.a.dllimport = 0;
7558 else
7559 type.t |= VT_EXTERN;
7562 #endif
7563 if (tok == '{') {
7564 if (l != VT_CONST)
7565 tcc_error("cannot use local functions");
7566 if ((type.t & VT_BTYPE) != VT_FUNC)
7567 expect("function definition");
7569 /* reject abstract declarators in function definition
7570 make old style params without decl have int type */
7571 sym = type.ref;
7572 while ((sym = sym->next) != NULL) {
7573 if (!(sym->v & ~SYM_FIELD))
7574 expect("identifier");
7575 if (sym->type.t == VT_VOID)
7576 sym->type = int_type;
7579 /* put function symbol */
7580 type.t &= ~VT_EXTERN;
7581 sym = external_sym(v, &type, 0, &ad);
7582 /* static inline functions are just recorded as a kind
7583 of macro. Their code will be emitted at the end of
7584 the compilation unit only if they are used */
7585 if (sym->type.t & VT_INLINE) {
7586 struct InlineFunc *fn;
7587 const char *filename;
7589 filename = file ? file->filename : "";
7590 fn = tcc_malloc(sizeof *fn + strlen(filename));
7591 strcpy(fn->filename, filename);
7592 fn->sym = sym;
7593 skip_or_save_block(&fn->func_str);
7594 dynarray_add(&tcc_state->inline_fns,
7595 &tcc_state->nb_inline_fns, fn);
7596 } else {
7597 /* compute text section */
7598 cur_text_section = ad.section;
7599 if (!cur_text_section)
7600 cur_text_section = text_section;
7601 gen_function(sym);
7603 break;
7604 } else {
7605 if (l == VT_CMP) {
7606 /* find parameter in function parameter list */
7607 for (sym = func_sym->next; sym; sym = sym->next)
7608 if ((sym->v & ~SYM_FIELD) == v)
7609 goto found;
7610 tcc_error("declaration for parameter '%s' but no such parameter",
7611 get_tok_str(v, NULL));
7612 found:
7613 if (type.t & VT_STORAGE) /* 'register' is okay */
7614 tcc_error("storage class specified for '%s'",
7615 get_tok_str(v, NULL));
7616 if (sym->type.t != VT_VOID)
7617 tcc_error("redefinition of parameter '%s'",
7618 get_tok_str(v, NULL));
7619 convert_parameter_type(&type);
7620 sym->type = type;
7621 } else if (type.t & VT_TYPEDEF) {
7622 /* save typedefed type */
7623 /* XXX: test storage specifiers ? */
7624 sym = sym_find(v);
7625 if (sym && sym->sym_scope == local_scope) {
7626 if (!is_compatible_types(&sym->type, &type)
7627 || !(sym->type.t & VT_TYPEDEF))
7628 tcc_error("incompatible redefinition of '%s'",
7629 get_tok_str(v, NULL));
7630 sym->type = type;
7631 } else {
7632 sym = sym_push(v, &type, 0, 0);
7634 sym->a = ad.a;
7635 sym->f = ad.f;
7636 } else if ((type.t & VT_BTYPE) == VT_VOID
7637 && !(type.t & VT_EXTERN)) {
7638 tcc_error("declaration of void object");
7639 } else {
7640 r = 0;
7641 if ((type.t & VT_BTYPE) == VT_FUNC) {
7642 /* external function definition */
7643 /* specific case for func_call attribute */
7644 type.ref->f = ad.f;
7645 } else if (!(type.t & VT_ARRAY)) {
7646 /* not lvalue if array */
7647 r |= lvalue_type(type.t);
7649 has_init = (tok == '=');
7650 if (has_init && (type.t & VT_VLA))
7651 tcc_error("variable length array cannot be initialized");
7652 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7653 || (type.t & VT_BTYPE) == VT_FUNC
7654 /* as with GCC, uninitialized global arrays with no size
7655 are considered extern: */
7656 || ((type.t & VT_ARRAY) && !has_init
7657 && l == VT_CONST && type.ref->c < 0)
7659 /* external variable or function */
7660 type.t |= VT_EXTERN;
7661 sym = external_sym(v, &type, r, &ad);
7662 if (ad.alias_target) {
7663 ElfSym *esym;
7664 Sym *alias_target;
7665 alias_target = sym_find(ad.alias_target);
7666 esym = elfsym(alias_target);
7667 if (!esym)
7668 tcc_error("unsupported forward __alias__ attribute");
7669 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7671 } else {
7672 if (type.t & VT_STATIC)
7673 r |= VT_CONST;
7674 else
7675 r |= l;
7676 if (has_init)
7677 next();
7678 else if (l == VT_CONST)
7679 /* uninitialized global variables may be overridden */
7680 type.t |= VT_EXTERN;
7681 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7684 if (tok != ',') {
7685 if (is_for_loop_init)
7686 return 1;
7687 skip(';');
7688 break;
7690 next();
7694 return 0;
7697 static void decl(int l)
7699 decl0(l, 0, NULL);
7702 /* ------------------------------------------------------------------------- */
7703 #undef gjmp_addr
7704 #undef gjmp
7705 /* ------------------------------------------------------------------------- */