backtrace: test with DLLs
[tinycc.git] / tccgen.c
blob047332b833b22bedf79cb6108b4bd3db9454c0f8
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 static int last_line_num, new_file, func_ind; /* debug info control */
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_pointer_type;
85 #if PTR_SIZE == 4
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
88 #elif LONG_SIZE == 4
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
91 #else
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
94 #endif
96 ST_DATA struct switch_t {
97 struct case_t {
98 int64_t v1, v2;
99 int sym;
100 } **p; int n; /* list of case ranges */
101 int def_sym; /* default symbol */
102 int *bsym;
103 struct scope *scope;
104 struct switch_t *prev;
105 SValue sv;
106 } *cur_switch; /* current switch */
108 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
109 /*list of temporary local variables on the stack in current function. */
110 ST_DATA struct temp_local_variable {
111 int location; //offset on stack. Svalue.c.i
112 short size;
113 short align;
114 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
115 short nb_temp_local_vars;
117 static struct scope {
118 struct scope *prev;
119 struct { int loc, num; } vla;
120 struct { Sym *s; int n; } cl;
121 int *bsym, *csym;
122 Sym *lstk, *llstk;
123 } *cur_scope, *loop_scope, *root_scope;
125 /********************************************************/
126 #ifndef CONFIG_TCC_ASM
127 ST_FUNC void asm_instr(void)
129 tcc_error("inline asm() not supported");
131 ST_FUNC void asm_global_instr(void)
133 tcc_error("inline asm() not supported");
135 #endif
137 /* ------------------------------------------------------------------------- */
139 static void gen_cast(CType *type);
140 static void gen_cast_s(int t);
141 static inline CType *pointed_type(CType *type);
142 static int is_compatible_types(CType *type1, CType *type2);
143 static int parse_btype(CType *type, AttributeDef *ad);
144 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
145 static void parse_expr_type(CType *type);
146 static void init_putv(CType *type, Section *sec, unsigned long c);
147 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
148 static void block(int is_expr);
149 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
150 static void decl(int l);
151 static int decl0(int l, int is_for_loop_init, Sym *);
152 static void expr_eq(void);
153 static void vla_runtime_type_size(CType *type, int *a);
154 static int is_compatible_unqualified_types(CType *type1, CType *type2);
155 static inline int64_t expr_const64(void);
156 static void vpush64(int ty, unsigned long long v);
157 static void vpush(CType *type);
158 static int gvtst(int inv, int t);
159 static void gen_inline_functions(TCCState *s);
160 static void free_inline_functions(TCCState *s);
161 static void skip_or_save_block(TokenString **str);
162 static void gv_dup(void);
163 static int get_temp_local_var(int size,int align);
164 static void clear_temp_local_var_list();
165 static void cast_error(CType *st, CType *dt);
167 ST_INLN int is_float(int t)
169 int bt = t & VT_BTYPE;
170 return bt == VT_LDOUBLE
171 || bt == VT_DOUBLE
172 || bt == VT_FLOAT
173 || bt == VT_QFLOAT;
176 static inline int is_integer_btype(int bt)
178 return bt == VT_BYTE
179 || bt == VT_BOOL
180 || bt == VT_SHORT
181 || bt == VT_INT
182 || bt == VT_LLONG;
185 static int btype_size(int bt)
187 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
188 bt == VT_SHORT ? 2 :
189 bt == VT_INT ? 4 :
190 bt == VT_LLONG ? 8 :
191 bt == VT_PTR ? PTR_SIZE : 0;
194 /* returns function return register from type */
195 static int R_RET(int t)
197 if (!is_float(t))
198 return REG_IRET;
199 #ifdef TCC_TARGET_X86_64
200 if ((t & VT_BTYPE) == VT_LDOUBLE)
201 return TREG_ST0;
202 #elif defined TCC_TARGET_RISCV64
203 if ((t & VT_BTYPE) == VT_LDOUBLE)
204 return REG_IRET;
205 #endif
206 return REG_FRET;
209 /* returns 2nd function return register, if any */
210 static int R2_RET(int t)
212 t &= VT_BTYPE;
213 #if PTR_SIZE == 4
214 if (t == VT_LLONG)
215 return REG_IRE2;
216 #elif defined TCC_TARGET_X86_64
217 if (t == VT_QLONG)
218 return REG_IRE2;
219 if (t == VT_QFLOAT)
220 return REG_FRE2;
221 #elif defined TCC_TARGET_RISCV64
222 if (t == VT_LDOUBLE)
223 return REG_IRE2;
224 #endif
225 return VT_CONST;
228 /* returns true for two-word types */
229 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
231 /* put function return registers to stack value */
232 static void PUT_R_RET(SValue *sv, int t)
234 sv->r = R_RET(t), sv->r2 = R2_RET(t);
237 /* returns function return register class for type t */
238 static int RC_RET(int t)
240 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
243 /* returns generic register class for type t */
244 static int RC_TYPE(int t)
246 if (!is_float(t))
247 return RC_INT;
248 #ifdef TCC_TARGET_X86_64
249 if ((t & VT_BTYPE) == VT_LDOUBLE)
250 return RC_ST0;
251 if ((t & VT_BTYPE) == VT_QFLOAT)
252 return RC_FRET;
253 #elif defined TCC_TARGET_RISCV64
254 if ((t & VT_BTYPE) == VT_LDOUBLE)
255 return RC_INT;
256 #endif
257 return RC_FLOAT;
260 /* returns 2nd register class corresponding to t and rc */
261 static int RC2_TYPE(int t, int rc)
263 if (!USING_TWO_WORDS(t))
264 return 0;
265 #ifdef RC_IRE2
266 if (rc == RC_IRET)
267 return RC_IRE2;
268 #endif
269 #ifdef RC_FRE2
270 if (rc == RC_FRET)
271 return RC_FRE2;
272 #endif
273 if (rc & RC_FLOAT)
274 return RC_FLOAT;
275 return RC_INT;
278 /* we use our own 'finite' function to avoid potential problems with
279 non standard math libs */
280 /* XXX: endianness dependent */
281 ST_FUNC int ieee_finite(double d)
283 int p[4];
284 memcpy(p, &d, sizeof(double));
285 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
288 /* compiling intel long double natively */
289 #if (defined __i386__ || defined __x86_64__) \
290 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
291 # define TCC_IS_NATIVE_387
292 #endif
294 ST_FUNC void test_lvalue(void)
296 if (!(vtop->r & VT_LVAL))
297 expect("lvalue");
300 ST_FUNC void check_vstack(void)
302 if (vtop != vstack - 1)
303 tcc_error("internal compiler error: vstack leak (%d)", vtop - vstack + 1);
306 /* ------------------------------------------------------------------------- */
307 /* vstack debugging aid */
309 #if 0
310 void pv (const char *lbl, int a, int b)
312 int i;
313 for (i = a; i < a + b; ++i) {
314 SValue *p = &vtop[-i];
315 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
316 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
319 #endif
321 /* ------------------------------------------------------------------------- */
322 /* start of translation unit info */
323 ST_FUNC void tcc_debug_start(TCCState *s1)
325 if (s1->do_debug) {
326 char buf[512];
328 /* file info: full path + filename */
329 section_sym = put_elf_sym(symtab_section, 0, 0,
330 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
331 text_section->sh_num, NULL);
332 getcwd(buf, sizeof(buf));
333 #ifdef _WIN32
334 normalize_slashes(buf);
335 #endif
336 pstrcat(buf, sizeof(buf), "/");
337 put_stabs_r(s1, buf, N_SO, 0, 0,
338 text_section->data_offset, text_section, section_sym);
339 put_stabs_r(s1, file->prev->filename, N_SO, 0, 0,
340 text_section->data_offset, text_section, section_sym);
341 new_file = last_line_num = 0;
342 func_ind = -1;
343 /* we're currently 'including' the <command line> */
344 tcc_debug_bincl(s1);
347 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
348 symbols can be safely used */
349 put_elf_sym(symtab_section, 0, 0,
350 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
351 SHN_ABS, file->filename);
354 /* put end of translation unit info */
355 ST_FUNC void tcc_debug_end(TCCState *s1)
357 if (!s1->do_debug)
358 return;
359 put_stabs_r(s1, NULL, N_SO, 0, 0,
360 text_section->data_offset, text_section, section_sym);
363 static BufferedFile* put_new_file(TCCState *s1)
365 BufferedFile *f = file;
366 /* use upper file if from inline ":asm:" */
367 if (f->filename[0] == ':')
368 f = f->prev;
369 if (f && new_file) {
370 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
371 new_file = last_line_num = 0;
373 return f;
376 /* generate line number info */
377 ST_FUNC void tcc_debug_line(TCCState *s1)
379 BufferedFile *f;
380 if (!s1->do_debug
381 || cur_text_section != text_section
382 || !(f = put_new_file(s1))
383 || last_line_num == f->line_num)
384 return;
385 if (func_ind != -1) {
386 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
387 } else {
388 /* from tcc_assemble */
389 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
391 last_line_num = f->line_num;
394 /* put function symbol */
395 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
397 char buf[512];
398 BufferedFile *f;
399 if (!s1->do_debug || !(f = put_new_file(s1)))
400 return;
401 /* XXX: we put here a dummy type */
402 snprintf(buf, sizeof(buf), "%s:%c1",
403 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
404 put_stabs_r(s1, buf, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
405 tcc_debug_line(s1);
408 /* put function size */
409 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
411 if (!s1->do_debug)
412 return;
413 put_stabn(s1, N_FUN, 0, 0, size);
416 /* put alternative filename */
417 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
419 if (0 == strcmp(file->filename, filename))
420 return;
421 pstrcpy(file->filename, sizeof(file->filename), filename);
422 new_file = 1;
425 /* begin of #include */
426 ST_FUNC void tcc_debug_bincl(TCCState *s1)
428 if (!s1->do_debug)
429 return;
430 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
431 new_file = 1;
434 /* end of #include */
435 ST_FUNC void tcc_debug_eincl(TCCState *s1)
437 if (!s1->do_debug)
438 return;
439 put_stabn(s1, N_EINCL, 0, 0, 0);
440 new_file = 1;
443 /* ------------------------------------------------------------------------- */
444 /* initialize vstack and types. This must be done also for tcc -E */
445 ST_FUNC void tccgen_init(TCCState *s1)
447 vtop = vstack - 1;
448 memset(vtop, 0, sizeof *vtop);
450 /* define some often used types */
451 int_type.t = VT_INT;
452 char_pointer_type.t = VT_BYTE;
453 mk_pointer(&char_pointer_type);
454 func_old_type.t = VT_FUNC;
455 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
456 func_old_type.ref->f.func_call = FUNC_CDECL;
457 func_old_type.ref->f.func_type = FUNC_OLD;
460 ST_FUNC int tccgen_compile(TCCState *s1)
462 cur_text_section = NULL;
463 funcname = "";
464 anon_sym = SYM_FIRST_ANOM;
465 section_sym = 0;
466 const_wanted = 0;
467 nocode_wanted = 0x80000000;
468 local_scope = 0;
470 tcc_debug_start(s1);
471 #ifdef TCC_TARGET_ARM
472 arm_init(s1);
473 #endif
474 #ifdef INC_DEBUG
475 printf("%s: **** new file\n", file->filename);
476 #endif
477 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
478 next();
479 decl(VT_CONST);
480 gen_inline_functions(s1);
481 check_vstack();
482 /* end of translation unit info */
483 tcc_debug_end(s1);
484 return 0;
487 ST_FUNC void tccgen_finish(TCCState *s1)
489 free_inline_functions(s1);
490 sym_pop(&global_stack, NULL, 0);
491 sym_pop(&local_stack, NULL, 0);
492 /* free preprocessor macros */
493 free_defines(NULL);
494 /* free sym_pools */
495 dynarray_reset(&sym_pools, &nb_sym_pools);
496 sym_free_first = NULL;
499 /* ------------------------------------------------------------------------- */
500 ST_FUNC ElfSym *elfsym(Sym *s)
502 if (!s || !s->c)
503 return NULL;
504 return &((ElfSym *)symtab_section->data)[s->c];
507 /* apply storage attributes to Elf symbol */
508 ST_FUNC void update_storage(Sym *sym)
510 ElfSym *esym;
511 int sym_bind, old_sym_bind;
513 esym = elfsym(sym);
514 if (!esym)
515 return;
517 if (sym->a.visibility)
518 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
519 | sym->a.visibility;
521 if (sym->type.t & (VT_STATIC | VT_INLINE))
522 sym_bind = STB_LOCAL;
523 else if (sym->a.weak)
524 sym_bind = STB_WEAK;
525 else
526 sym_bind = STB_GLOBAL;
527 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
528 if (sym_bind != old_sym_bind) {
529 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
532 #ifdef TCC_TARGET_PE
533 if (sym->a.dllimport)
534 esym->st_other |= ST_PE_IMPORT;
535 if (sym->a.dllexport)
536 esym->st_other |= ST_PE_EXPORT;
537 #endif
539 #if 0
540 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
541 get_tok_str(sym->v, NULL),
542 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
543 sym->a.visibility,
544 sym->a.dllexport,
545 sym->a.dllimport
547 #endif
550 /* ------------------------------------------------------------------------- */
551 /* update sym->c so that it points to an external symbol in section
552 'section' with value 'value' */
554 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
555 addr_t value, unsigned long size,
556 int can_add_underscore)
558 int sym_type, sym_bind, info, other, t;
559 ElfSym *esym;
560 const char *name;
561 char buf1[256];
562 #ifdef CONFIG_TCC_BCHECK
563 char buf[32];
564 #endif
566 if (!sym->c) {
567 name = get_tok_str(sym->v, NULL);
568 #ifdef CONFIG_TCC_BCHECK
569 if (tcc_state->do_bounds_check) {
570 /* XXX: avoid doing that for statics ? */
571 /* if bound checking is activated, we change some function
572 names by adding the "__bound" prefix */
573 switch(sym->v) {
574 #ifdef TCC_TARGET_PE
575 /* XXX: we rely only on malloc hooks */
576 case TOK_malloc:
577 case TOK_free:
578 case TOK_realloc:
579 case TOK_memalign:
580 case TOK_calloc:
581 #endif
582 case TOK_memcpy:
583 case TOK_memmove:
584 case TOK_memset:
585 case TOK_memcmp:
586 case TOK_strlen:
587 case TOK_strcpy:
588 case TOK_strncpy:
589 case TOK_strcmp:
590 case TOK_strncmp:
591 case TOK_strcat:
592 case TOK_strchr:
593 case TOK_strdup:
594 case TOK_alloca:
595 case TOK_mmap:
596 case TOK_munmap:
597 strcpy(buf, "__bound_");
598 strcat(buf, name);
599 name = buf;
600 break;
603 #endif
604 t = sym->type.t;
605 if ((t & VT_BTYPE) == VT_FUNC) {
606 sym_type = STT_FUNC;
607 } else if ((t & VT_BTYPE) == VT_VOID) {
608 sym_type = STT_NOTYPE;
609 } else {
610 sym_type = STT_OBJECT;
612 if (t & (VT_STATIC | VT_INLINE))
613 sym_bind = STB_LOCAL;
614 else
615 sym_bind = STB_GLOBAL;
616 other = 0;
617 #ifdef TCC_TARGET_PE
618 if (sym_type == STT_FUNC && sym->type.ref) {
619 Sym *ref = sym->type.ref;
620 if (ref->a.nodecorate) {
621 can_add_underscore = 0;
623 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
624 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
625 name = buf1;
626 other |= ST_PE_STDCALL;
627 can_add_underscore = 0;
630 #endif
631 if (tcc_state->leading_underscore && can_add_underscore) {
632 buf1[0] = '_';
633 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
634 name = buf1;
636 if (sym->asm_label)
637 name = get_tok_str(sym->asm_label, NULL);
638 info = ELFW(ST_INFO)(sym_bind, sym_type);
639 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
640 } else {
641 esym = elfsym(sym);
642 esym->st_value = value;
643 esym->st_size = size;
644 esym->st_shndx = sh_num;
646 update_storage(sym);
649 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
650 addr_t value, unsigned long size)
652 int sh_num = section ? section->sh_num : SHN_UNDEF;
653 put_extern_sym2(sym, sh_num, value, size, 1);
656 /* add a new relocation entry to symbol 'sym' in section 's' */
657 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
658 addr_t addend)
660 int c = 0;
662 if (nocode_wanted && s == cur_text_section)
663 return;
665 if (sym) {
666 if (0 == sym->c)
667 put_extern_sym(sym, NULL, 0, 0);
668 c = sym->c;
671 /* now we can add ELF relocation info */
672 put_elf_reloca(symtab_section, s, offset, type, c, addend);
675 #if PTR_SIZE == 4
676 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
678 greloca(s, sym, offset, type, 0);
680 #endif
682 /* ------------------------------------------------------------------------- */
683 /* symbol allocator */
684 static Sym *__sym_malloc(void)
686 Sym *sym_pool, *sym, *last_sym;
687 int i;
689 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
690 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
692 last_sym = sym_free_first;
693 sym = sym_pool;
694 for(i = 0; i < SYM_POOL_NB; i++) {
695 sym->next = last_sym;
696 last_sym = sym;
697 sym++;
699 sym_free_first = last_sym;
700 return last_sym;
703 static inline Sym *sym_malloc(void)
705 Sym *sym;
706 #ifndef SYM_DEBUG
707 sym = sym_free_first;
708 if (!sym)
709 sym = __sym_malloc();
710 sym_free_first = sym->next;
711 return sym;
712 #else
713 sym = tcc_malloc(sizeof(Sym));
714 return sym;
715 #endif
718 ST_INLN void sym_free(Sym *sym)
720 #ifndef SYM_DEBUG
721 sym->next = sym_free_first;
722 sym_free_first = sym;
723 #else
724 tcc_free(sym);
725 #endif
728 /* push, without hashing */
729 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
731 Sym *s;
733 s = sym_malloc();
734 memset(s, 0, sizeof *s);
735 s->v = v;
736 s->type.t = t;
737 s->c = c;
738 /* add in stack */
739 s->prev = *ps;
740 *ps = s;
741 return s;
744 /* find a symbol and return its associated structure. 's' is the top
745 of the symbol stack */
746 ST_FUNC Sym *sym_find2(Sym *s, int v)
748 while (s) {
749 if (s->v == v)
750 return s;
751 else if (s->v == -1)
752 return NULL;
753 s = s->prev;
755 return NULL;
758 /* structure lookup */
759 ST_INLN Sym *struct_find(int v)
761 v -= TOK_IDENT;
762 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
763 return NULL;
764 return table_ident[v]->sym_struct;
767 /* find an identifier */
768 ST_INLN Sym *sym_find(int v)
770 v -= TOK_IDENT;
771 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
772 return NULL;
773 return table_ident[v]->sym_identifier;
776 static int sym_scope(Sym *s)
778 if (IS_ENUM_VAL (s->type.t))
779 return s->type.ref->sym_scope;
780 else
781 return s->sym_scope;
784 /* push a given symbol on the symbol stack */
785 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
787 Sym *s, **ps;
788 TokenSym *ts;
790 if (local_stack)
791 ps = &local_stack;
792 else
793 ps = &global_stack;
794 s = sym_push2(ps, v, type->t, c);
795 s->type.ref = type->ref;
796 s->r = r;
797 /* don't record fields or anonymous symbols */
798 /* XXX: simplify */
799 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
800 /* record symbol in token array */
801 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
802 if (v & SYM_STRUCT)
803 ps = &ts->sym_struct;
804 else
805 ps = &ts->sym_identifier;
806 s->prev_tok = *ps;
807 *ps = s;
808 s->sym_scope = local_scope;
809 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
810 tcc_error("redeclaration of '%s'",
811 get_tok_str(v & ~SYM_STRUCT, NULL));
813 return s;
816 /* push a global identifier */
817 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
819 Sym *s, **ps;
820 s = sym_push2(&global_stack, v, t, c);
821 s->r = VT_CONST | VT_SYM;
822 /* don't record anonymous symbol */
823 if (v < SYM_FIRST_ANOM) {
824 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
825 /* modify the top most local identifier, so that sym_identifier will
826 point to 's' when popped; happens when called from inline asm */
827 while (*ps != NULL && (*ps)->sym_scope)
828 ps = &(*ps)->prev_tok;
829 s->prev_tok = *ps;
830 *ps = s;
832 return s;
835 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
836 pop them yet from the list, but do remove them from the token array. */
837 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
839 Sym *s, *ss, **ps;
840 TokenSym *ts;
841 int v;
843 s = *ptop;
844 while(s != b) {
845 ss = s->prev;
846 v = s->v;
847 /* remove symbol in token array */
848 /* XXX: simplify */
849 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
850 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
851 if (v & SYM_STRUCT)
852 ps = &ts->sym_struct;
853 else
854 ps = &ts->sym_identifier;
855 *ps = s->prev_tok;
857 if (!keep)
858 sym_free(s);
859 s = ss;
861 if (!keep)
862 *ptop = b;
865 /* ------------------------------------------------------------------------- */
866 static void vcheck_cmp(void)
868 /* cannot let cpu flags if other instruction are generated. Also
869 avoid leaving VT_JMP anywhere except on the top of the stack
870 because it would complicate the code generator.
872 Don't do this when nocode_wanted. vtop might come from
873 !nocode_wanted regions (see 88_codeopt.c) and transforming
874 it to a register without actually generating code is wrong
875 as their value might still be used for real. All values
876 we push under nocode_wanted will eventually be popped
877 again, so that the VT_CMP/VT_JMP value will be in vtop
878 when code is unsuppressed again. */
880 if (vtop->r == VT_CMP && !nocode_wanted)
881 gv(RC_INT);
884 static void vsetc(CType *type, int r, CValue *vc)
886 if (vtop >= vstack + (VSTACK_SIZE - 1))
887 tcc_error("memory full (vstack)");
888 vcheck_cmp();
889 vtop++;
890 vtop->type = *type;
891 vtop->r = r;
892 vtop->r2 = VT_CONST;
893 vtop->c = *vc;
894 vtop->sym = NULL;
897 ST_FUNC void vswap(void)
899 SValue tmp;
901 vcheck_cmp();
902 tmp = vtop[0];
903 vtop[0] = vtop[-1];
904 vtop[-1] = tmp;
907 /* pop stack value */
908 ST_FUNC void vpop(void)
910 int v;
911 v = vtop->r & VT_VALMASK;
912 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
913 /* for x86, we need to pop the FP stack */
914 if (v == TREG_ST0) {
915 o(0xd8dd); /* fstp %st(0) */
916 } else
917 #endif
918 if (v == VT_CMP) {
919 /* need to put correct jump if && or || without test */
920 gsym(vtop->jtrue);
921 gsym(vtop->jfalse);
923 vtop--;
926 /* push constant of type "type" with useless value */
927 static void vpush(CType *type)
929 vset(type, VT_CONST, 0);
932 /* push arbitrary 64bit constant */
933 static void vpush64(int ty, unsigned long long v)
935 CValue cval;
936 CType ctype;
937 ctype.t = ty;
938 ctype.ref = NULL;
939 cval.i = v;
940 vsetc(&ctype, VT_CONST, &cval);
943 /* push integer constant */
944 ST_FUNC void vpushi(int v)
946 vpush64(VT_INT, v);
949 /* push a pointer sized constant */
950 static void vpushs(addr_t v)
952 vpush64(VT_SIZE_T, v);
955 /* push long long constant */
956 static inline void vpushll(long long v)
958 vpush64(VT_LLONG, v);
961 ST_FUNC void vset(CType *type, int r, int v)
963 CValue cval;
964 cval.i = v;
965 vsetc(type, r, &cval);
968 static void vseti(int r, int v)
970 CType type;
971 type.t = VT_INT;
972 type.ref = NULL;
973 vset(&type, r, v);
976 ST_FUNC void vpushv(SValue *v)
978 if (vtop >= vstack + (VSTACK_SIZE - 1))
979 tcc_error("memory full (vstack)");
980 vtop++;
981 *vtop = *v;
984 static void vdup(void)
986 vpushv(vtop);
989 /* rotate n first stack elements to the bottom
990 I1 ... In -> I2 ... In I1 [top is right]
992 ST_FUNC void vrotb(int n)
994 int i;
995 SValue tmp;
997 vcheck_cmp();
998 tmp = vtop[-n + 1];
999 for(i=-n+1;i!=0;i++)
1000 vtop[i] = vtop[i+1];
1001 vtop[0] = tmp;
1004 /* rotate the n elements before entry e towards the top
1005 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1007 ST_FUNC void vrote(SValue *e, int n)
1009 int i;
1010 SValue tmp;
1012 vcheck_cmp();
1013 tmp = *e;
1014 for(i = 0;i < n - 1; i++)
1015 e[-i] = e[-i - 1];
1016 e[-n + 1] = tmp;
1019 /* rotate n first stack elements to the top
1020 I1 ... In -> In I1 ... I(n-1) [top is right]
1022 ST_FUNC void vrott(int n)
1024 vrote(vtop, n);
1027 /* ------------------------------------------------------------------------- */
1028 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1030 /* called from generators to set the result from relational ops */
1031 ST_FUNC void vset_VT_CMP(int op)
1033 vtop->r = VT_CMP;
1034 vtop->cmp_op = op;
1035 vtop->jfalse = 0;
1036 vtop->jtrue = 0;
1039 /* called once before asking generators to load VT_CMP to a register */
1040 static void vset_VT_JMP(void)
1042 int op = vtop->cmp_op;
1043 if (vtop->jtrue || vtop->jfalse) {
1044 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1045 int inv = op & (op < 2); /* small optimization */
1046 vseti(VT_JMP+inv, gvtst(inv, 0));
1047 } else {
1048 /* otherwise convert flags (rsp. 0/1) to register */
1049 vtop->c.i = op;
1050 if (op < 2) /* doesn't seem to happen */
1051 vtop->r = VT_CONST;
1055 /* Set CPU Flags, doesn't yet jump */
1056 static void gvtst_set(int inv, int t)
1058 int *p;
1059 if (vtop->r != VT_CMP) {
1060 vpushi(0);
1061 gen_op(TOK_NE);
1062 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
1064 else if (vtop->r == VT_CONST)
1065 vset_VT_CMP(vtop->c.i != 0);
1066 else
1067 tcc_error("ICE");
1069 p = inv ? &vtop->jfalse : &vtop->jtrue;
1070 *p = gjmp_append(*p, t);
1073 /* Generate value test
1075 * Generate a test for any value (jump, comparison and integers) */
1076 static int gvtst(int inv, int t)
1078 int op, u, x;
1080 gvtst_set(inv, t);
1082 t = vtop->jtrue, u = vtop->jfalse;
1083 if (inv)
1084 x = u, u = t, t = x;
1085 op = vtop->cmp_op;
1087 /* jump to the wanted target */
1088 if (op > 1)
1089 t = gjmp_cond(op ^ inv, t);
1090 else if (op != inv)
1091 t = gjmp(t);
1092 /* resolve complementary jumps to here */
1093 gsym(u);
1095 vtop--;
1096 return t;
1099 /* generate a zero or nozero test */
1100 static void gen_test_zero(int op)
1102 if (vtop->r == VT_CMP) {
1103 int j;
1104 if (op == TOK_EQ) {
1105 j = vtop->jfalse;
1106 vtop->jfalse = vtop->jtrue;
1107 vtop->jtrue = j;
1108 vtop->cmp_op ^= 1;
1110 } else {
1111 vpushi(0);
1112 gen_op(op);
1116 /* ------------------------------------------------------------------------- */
1117 /* push a symbol value of TYPE */
1118 static inline void vpushsym(CType *type, Sym *sym)
1120 CValue cval;
1121 cval.i = 0;
1122 vsetc(type, VT_CONST | VT_SYM, &cval);
1123 vtop->sym = sym;
1126 /* Return a static symbol pointing to a section */
1127 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1129 int v;
1130 Sym *sym;
1132 v = anon_sym++;
1133 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1134 sym->type.t |= VT_STATIC;
1135 put_extern_sym(sym, sec, offset, size);
1136 return sym;
1139 /* push a reference to a section offset by adding a dummy symbol */
1140 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1142 vpushsym(type, get_sym_ref(type, sec, offset, size));
1145 /* define a new external reference to a symbol 'v' of type 'u' */
1146 ST_FUNC Sym *external_global_sym(int v, CType *type)
1148 Sym *s;
1150 s = sym_find(v);
1151 if (!s) {
1152 /* push forward reference */
1153 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1154 s->type.ref = type->ref;
1155 } else if (IS_ASM_SYM(s)) {
1156 s->type.t = type->t | (s->type.t & VT_EXTERN);
1157 s->type.ref = type->ref;
1158 update_storage(s);
1160 return s;
1163 /* Merge symbol attributes. */
1164 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1166 if (sa1->aligned && !sa->aligned)
1167 sa->aligned = sa1->aligned;
1168 sa->packed |= sa1->packed;
1169 sa->weak |= sa1->weak;
1170 if (sa1->visibility != STV_DEFAULT) {
1171 int vis = sa->visibility;
1172 if (vis == STV_DEFAULT
1173 || vis > sa1->visibility)
1174 vis = sa1->visibility;
1175 sa->visibility = vis;
1177 sa->dllexport |= sa1->dllexport;
1178 sa->nodecorate |= sa1->nodecorate;
1179 sa->dllimport |= sa1->dllimport;
1182 /* Merge function attributes. */
1183 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1185 if (fa1->func_call && !fa->func_call)
1186 fa->func_call = fa1->func_call;
1187 if (fa1->func_type && !fa->func_type)
1188 fa->func_type = fa1->func_type;
1189 if (fa1->func_args && !fa->func_args)
1190 fa->func_args = fa1->func_args;
1193 /* Merge attributes. */
1194 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1196 merge_symattr(&ad->a, &ad1->a);
1197 merge_funcattr(&ad->f, &ad1->f);
1199 if (ad1->section)
1200 ad->section = ad1->section;
1201 if (ad1->alias_target)
1202 ad->alias_target = ad1->alias_target;
1203 if (ad1->asm_label)
1204 ad->asm_label = ad1->asm_label;
1205 if (ad1->attr_mode)
1206 ad->attr_mode = ad1->attr_mode;
1209 /* Merge some type attributes. */
1210 static void patch_type(Sym *sym, CType *type)
1212 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1213 if (!(sym->type.t & VT_EXTERN))
1214 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1215 sym->type.t &= ~VT_EXTERN;
1218 if (IS_ASM_SYM(sym)) {
1219 /* stay static if both are static */
1220 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1221 sym->type.ref = type->ref;
1224 if (!is_compatible_types(&sym->type, type)) {
1225 tcc_error("incompatible types for redefinition of '%s'",
1226 get_tok_str(sym->v, NULL));
1228 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1229 int static_proto = sym->type.t & VT_STATIC;
1230 /* warn if static follows non-static function declaration */
1231 if ((type->t & VT_STATIC) && !static_proto
1232 /* XXX this test for inline shouldn't be here. Until we
1233 implement gnu-inline mode again it silences a warning for
1234 mingw caused by our workarounds. */
1235 && !((type->t | sym->type.t) & VT_INLINE))
1236 tcc_warning("static storage ignored for redefinition of '%s'",
1237 get_tok_str(sym->v, NULL));
1239 /* set 'inline' if both agree or if one has static */
1240 if ((type->t | sym->type.t) & VT_INLINE) {
1241 if (!((type->t ^ sym->type.t) & VT_INLINE)
1242 || ((type->t | sym->type.t) & VT_STATIC))
1243 static_proto |= VT_INLINE;
1246 if (0 == (type->t & VT_EXTERN)) {
1247 /* put complete type, use static from prototype */
1248 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1249 sym->type.ref = type->ref;
1250 } else {
1251 sym->type.t &= ~VT_INLINE | static_proto;
1254 if (sym->type.ref->f.func_type == FUNC_OLD
1255 && type->ref->f.func_type != FUNC_OLD) {
1256 sym->type.ref = type->ref;
1259 } else {
1260 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1261 /* set array size if it was omitted in extern declaration */
1262 sym->type.ref->c = type->ref->c;
1264 if ((type->t ^ sym->type.t) & VT_STATIC)
1265 tcc_warning("storage mismatch for redefinition of '%s'",
1266 get_tok_str(sym->v, NULL));
1270 /* Merge some storage attributes. */
1271 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1273 if (type)
1274 patch_type(sym, type);
1276 #ifdef TCC_TARGET_PE
1277 if (sym->a.dllimport != ad->a.dllimport)
1278 tcc_error("incompatible dll linkage for redefinition of '%s'",
1279 get_tok_str(sym->v, NULL));
1280 #endif
1281 merge_symattr(&sym->a, &ad->a);
1282 if (ad->asm_label)
1283 sym->asm_label = ad->asm_label;
1284 update_storage(sym);
1287 /* copy sym to other stack */
1288 static Sym *sym_copy(Sym *s0, Sym **ps)
1290 Sym *s;
1291 s = sym_malloc(), *s = *s0;
1292 s->prev = *ps, *ps = s;
1293 if (s->v < SYM_FIRST_ANOM) {
1294 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1295 s->prev_tok = *ps, *ps = s;
1297 return s;
1300 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1301 static void sym_copy_ref(Sym *s, Sym **ps)
1303 int bt = s->type.t & VT_BTYPE;
1304 if (bt == VT_FUNC || bt == VT_PTR) {
1305 Sym **sp = &s->type.ref;
1306 for (s = *sp, *sp = NULL; s; s = s->next) {
1307 Sym *s2 = sym_copy(s, ps);
1308 sp = &(*sp = s2)->next;
1309 sym_copy_ref(s2, ps);
1314 /* define a new external reference to a symbol 'v' */
1315 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1317 Sym *s;
1319 /* look for global symbol */
1320 s = sym_find(v);
1321 while (s && s->sym_scope)
1322 s = s->prev_tok;
1324 if (!s) {
1325 /* push forward reference */
1326 s = global_identifier_push(v, type->t, 0);
1327 s->r |= r;
1328 s->a = ad->a;
1329 s->asm_label = ad->asm_label;
1330 s->type.ref = type->ref;
1331 /* copy type to the global stack */
1332 if (local_stack)
1333 sym_copy_ref(s, &global_stack);
1334 } else {
1335 patch_storage(s, ad, type);
1337 /* push variables on local_stack if any */
1338 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1339 s = sym_copy(s, &local_stack);
1340 return s;
1343 /* push a reference to global symbol v */
1344 ST_FUNC void vpush_global_sym(CType *type, int v)
1346 vpushsym(type, external_global_sym(v, type));
1349 /* save registers up to (vtop - n) stack entry */
1350 ST_FUNC void save_regs(int n)
1352 SValue *p, *p1;
1353 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1354 save_reg(p->r);
1357 /* save r to the memory stack, and mark it as being free */
1358 ST_FUNC void save_reg(int r)
1360 save_reg_upstack(r, 0);
1363 /* save r to the memory stack, and mark it as being free,
1364 if seen up to (vtop - n) stack entry */
1365 ST_FUNC void save_reg_upstack(int r, int n)
1367 int l, size, align, bt;
1368 SValue *p, *p1, sv;
1370 if ((r &= VT_VALMASK) >= VT_CONST)
1371 return;
1372 if (nocode_wanted)
1373 return;
1374 l = 0;
1375 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1376 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1377 /* must save value on stack if not already done */
1378 if (!l) {
1379 bt = p->type.t & VT_BTYPE;
1380 if (bt == VT_VOID)
1381 continue;
1382 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1383 bt = VT_PTR;
1384 sv.type.t = bt;
1385 size = type_size(&sv.type, &align);
1386 l = get_temp_local_var(size,align);
1387 sv.r = VT_LOCAL | VT_LVAL;
1388 sv.c.i = l;
1389 store(p->r & VT_VALMASK, &sv);
1390 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1391 /* x86 specific: need to pop fp register ST0 if saved */
1392 if (r == TREG_ST0) {
1393 o(0xd8dd); /* fstp %st(0) */
1395 #endif
1396 /* special long long case */
1397 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1398 sv.c.i += PTR_SIZE;
1399 store(p->r2, &sv);
1402 /* mark that stack entry as being saved on the stack */
1403 if (p->r & VT_LVAL) {
1404 /* also clear the bounded flag because the
1405 relocation address of the function was stored in
1406 p->c.i */
1407 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1408 } else {
1409 p->r = VT_LVAL | VT_LOCAL;
1411 p->r2 = VT_CONST;
1412 p->c.i = l;
1417 #ifdef TCC_TARGET_ARM
1418 /* find a register of class 'rc2' with at most one reference on stack.
1419 * If none, call get_reg(rc) */
1420 ST_FUNC int get_reg_ex(int rc, int rc2)
1422 int r;
1423 SValue *p;
1425 for(r=0;r<NB_REGS;r++) {
1426 if (reg_classes[r] & rc2) {
1427 int n;
1428 n=0;
1429 for(p = vstack; p <= vtop; p++) {
1430 if ((p->r & VT_VALMASK) == r ||
1431 p->r2 == r)
1432 n++;
1434 if (n <= 1)
1435 return r;
1438 return get_reg(rc);
1440 #endif
1442 /* find a free register of class 'rc'. If none, save one register */
1443 ST_FUNC int get_reg(int rc)
1445 int r;
1446 SValue *p;
1448 /* find a free register */
1449 for(r=0;r<NB_REGS;r++) {
1450 if (reg_classes[r] & rc) {
1451 if (nocode_wanted)
1452 return r;
1453 for(p=vstack;p<=vtop;p++) {
1454 if ((p->r & VT_VALMASK) == r ||
1455 p->r2 == r)
1456 goto notfound;
1458 return r;
1460 notfound: ;
1463 /* no register left : free the first one on the stack (VERY
1464 IMPORTANT to start from the bottom to ensure that we don't
1465 spill registers used in gen_opi()) */
1466 for(p=vstack;p<=vtop;p++) {
1467 /* look at second register (if long long) */
1468 r = p->r2;
1469 if (r < VT_CONST && (reg_classes[r] & rc))
1470 goto save_found;
1471 r = p->r & VT_VALMASK;
1472 if (r < VT_CONST && (reg_classes[r] & rc)) {
1473 save_found:
1474 save_reg(r);
1475 return r;
1478 /* Should never comes here */
1479 return -1;
1482 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1483 static int get_temp_local_var(int size,int align){
1484 int i;
1485 struct temp_local_variable *temp_var;
1486 int found_var;
1487 SValue *p;
1488 int r;
1489 char free;
1490 char found;
1491 found=0;
1492 for(i=0;i<nb_temp_local_vars;i++){
1493 temp_var=&arr_temp_local_vars[i];
1494 if(temp_var->size<size||align!=temp_var->align){
1495 continue;
1497 /*check if temp_var is free*/
1498 free=1;
1499 for(p=vstack;p<=vtop;p++) {
1500 r=p->r&VT_VALMASK;
1501 if(r==VT_LOCAL||r==VT_LLOCAL){
1502 if(p->c.i==temp_var->location){
1503 free=0;
1504 break;
1508 if(free){
1509 found_var=temp_var->location;
1510 found=1;
1511 break;
1514 if(!found){
1515 loc = (loc - size) & -align;
1516 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1517 temp_var=&arr_temp_local_vars[i];
1518 temp_var->location=loc;
1519 temp_var->size=size;
1520 temp_var->align=align;
1521 nb_temp_local_vars++;
1523 found_var=loc;
1525 return found_var;
1528 static void clear_temp_local_var_list(){
1529 nb_temp_local_vars=0;
1532 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1533 if needed */
1534 static void move_reg(int r, int s, int t)
1536 SValue sv;
1538 if (r != s) {
1539 save_reg(r);
1540 sv.type.t = t;
1541 sv.type.ref = NULL;
1542 sv.r = s;
1543 sv.c.i = 0;
1544 load(r, &sv);
1548 /* get address of vtop (vtop MUST BE an lvalue) */
1549 ST_FUNC void gaddrof(void)
1551 vtop->r &= ~VT_LVAL;
1552 /* tricky: if saved lvalue, then we can go back to lvalue */
1553 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1554 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1557 #ifdef CONFIG_TCC_BCHECK
1558 /* generate lvalue bound code */
1559 static void gbound(void)
1561 CType type1;
1563 vtop->r &= ~VT_MUSTBOUND;
1564 /* if lvalue, then use checking code before dereferencing */
1565 if (vtop->r & VT_LVAL) {
1566 /* if not VT_BOUNDED value, then make one */
1567 if (!(vtop->r & VT_BOUNDED)) {
1568 /* must save type because we must set it to int to get pointer */
1569 type1 = vtop->type;
1570 vtop->type.t = VT_PTR;
1571 gaddrof();
1572 vpushi(0);
1573 gen_bounded_ptr_add();
1574 vtop->r |= VT_LVAL;
1575 vtop->type = type1;
1577 /* then check for dereferencing */
1578 gen_bounded_ptr_deref();
1582 /* we need to call __bound_ptr_add before we start to load function
1583 args into registers */
1584 ST_FUNC void gbound_args(int nb_args)
1586 int i;
1587 for (i = 1; i <= nb_args; ++i)
1588 if (vtop[1 - i].r & VT_MUSTBOUND) {
1589 vrotb(i);
1590 gbound();
1591 vrott(i);
1595 /* Add bounds for local symbols from S to E (via ->prev) */
1596 static void add_local_bounds(Sym *s, Sym *e)
1598 for (; s != e; s = s->prev) {
1599 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1600 continue;
1601 /* Add arrays/structs/unions because we always take address */
1602 if ((s->type.t & VT_ARRAY)
1603 || (s->type.t & VT_BTYPE) == VT_STRUCT
1604 || s->a.addrtaken) {
1605 /* add local bound info */
1606 int align, size = type_size(&s->type, &align);
1607 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1608 2 * sizeof(addr_t));
1609 bounds_ptr[0] = s->c;
1610 bounds_ptr[1] = size;
1614 #endif
1616 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1617 static void pop_local_syms(Sym **ptop, Sym *b, int keep, int ellipsis)
1619 #ifdef CONFIG_TCC_BCHECK
1620 if (!ellipsis && !keep && tcc_state->do_bounds_check)
1621 add_local_bounds(*ptop, b);
1622 #endif
1623 sym_pop(ptop, b, keep);
1626 static void incr_bf_adr(int o)
1628 vtop->type = char_pointer_type;
1629 gaddrof();
1630 vpushs(o);
1631 gen_op('+');
1632 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1633 vtop->r |= VT_LVAL;
1636 /* single-byte load mode for packed or otherwise unaligned bitfields */
1637 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1639 int n, o, bits;
1640 save_reg_upstack(vtop->r, 1);
1641 vpush64(type->t & VT_BTYPE, 0); // B X
1642 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1643 do {
1644 vswap(); // X B
1645 incr_bf_adr(o);
1646 vdup(); // X B B
1647 n = 8 - bit_pos;
1648 if (n > bit_size)
1649 n = bit_size;
1650 if (bit_pos)
1651 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1652 if (n < 8)
1653 vpushi((1 << n) - 1), gen_op('&');
1654 gen_cast(type);
1655 if (bits)
1656 vpushi(bits), gen_op(TOK_SHL);
1657 vrotb(3); // B Y X
1658 gen_op('|'); // B X
1659 bits += n, bit_size -= n, o = 1;
1660 } while (bit_size);
1661 vswap(), vpop();
1662 if (!(type->t & VT_UNSIGNED)) {
1663 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1664 vpushi(n), gen_op(TOK_SHL);
1665 vpushi(n), gen_op(TOK_SAR);
1669 /* single-byte store mode for packed or otherwise unaligned bitfields */
1670 static void store_packed_bf(int bit_pos, int bit_size)
1672 int bits, n, o, m, c;
1674 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1675 vswap(); // X B
1676 save_reg_upstack(vtop->r, 1);
1677 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1678 do {
1679 incr_bf_adr(o); // X B
1680 vswap(); //B X
1681 c ? vdup() : gv_dup(); // B V X
1682 vrott(3); // X B V
1683 if (bits)
1684 vpushi(bits), gen_op(TOK_SHR);
1685 if (bit_pos)
1686 vpushi(bit_pos), gen_op(TOK_SHL);
1687 n = 8 - bit_pos;
1688 if (n > bit_size)
1689 n = bit_size;
1690 if (n < 8) {
1691 m = ((1 << n) - 1) << bit_pos;
1692 vpushi(m), gen_op('&'); // X B V1
1693 vpushv(vtop-1); // X B V1 B
1694 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1695 gen_op('&'); // X B V1 B1
1696 gen_op('|'); // X B V2
1698 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1699 vstore(), vpop(); // X B
1700 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1701 } while (bit_size);
1702 vpop(), vpop();
1705 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1707 int t;
1708 if (0 == sv->type.ref)
1709 return 0;
1710 t = sv->type.ref->auxtype;
1711 if (t != -1 && t != VT_STRUCT) {
1712 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1713 sv->r |= VT_LVAL;
1715 return t;
1718 /* store vtop a register belonging to class 'rc'. lvalues are
1719 converted to values. Cannot be used if cannot be converted to
1720 register value (such as structures). */
1721 ST_FUNC int gv(int rc)
1723 int r, r2, r_ok, r2_ok, rc2, bt;
1724 int bit_pos, bit_size, size, align;
1726 /* NOTE: get_reg can modify vstack[] */
1727 if (vtop->type.t & VT_BITFIELD) {
1728 CType type;
1730 bit_pos = BIT_POS(vtop->type.t);
1731 bit_size = BIT_SIZE(vtop->type.t);
1732 /* remove bit field info to avoid loops */
1733 vtop->type.t &= ~VT_STRUCT_MASK;
1735 type.ref = NULL;
1736 type.t = vtop->type.t & VT_UNSIGNED;
1737 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1738 type.t |= VT_UNSIGNED;
1740 r = adjust_bf(vtop, bit_pos, bit_size);
1742 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1743 type.t |= VT_LLONG;
1744 else
1745 type.t |= VT_INT;
1747 if (r == VT_STRUCT) {
1748 load_packed_bf(&type, bit_pos, bit_size);
1749 } else {
1750 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1751 /* cast to int to propagate signedness in following ops */
1752 gen_cast(&type);
1753 /* generate shifts */
1754 vpushi(bits - (bit_pos + bit_size));
1755 gen_op(TOK_SHL);
1756 vpushi(bits - bit_size);
1757 /* NOTE: transformed to SHR if unsigned */
1758 gen_op(TOK_SAR);
1760 r = gv(rc);
1761 } else {
1762 if (is_float(vtop->type.t) &&
1763 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1764 unsigned long offset;
1765 /* CPUs usually cannot use float constants, so we store them
1766 generically in data segment */
1767 size = type_size(&vtop->type, &align);
1768 if (NODATA_WANTED)
1769 size = 0, align = 1;
1770 offset = section_add(data_section, size, align);
1771 vpush_ref(&vtop->type, data_section, offset, size);
1772 vswap();
1773 init_putv(&vtop->type, data_section, offset);
1774 vtop->r |= VT_LVAL;
1776 #ifdef CONFIG_TCC_BCHECK
1777 if (vtop->r & VT_MUSTBOUND)
1778 gbound();
1779 #endif
1781 bt = vtop->type.t & VT_BTYPE;
1783 #ifdef TCC_TARGET_RISCV64
1784 /* XXX mega hack */
1785 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1786 rc = RC_INT;
1787 #endif
1788 rc2 = RC2_TYPE(bt, rc);
1790 /* need to reload if:
1791 - constant
1792 - lvalue (need to dereference pointer)
1793 - already a register, but not in the right class */
1794 r = vtop->r & VT_VALMASK;
1795 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1796 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1798 if (!r_ok || !r2_ok) {
1799 if (!r_ok)
1800 r = get_reg(rc);
1801 if (rc2) {
1802 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1803 int original_type = vtop->type.t;
1805 /* two register type load :
1806 expand to two words temporarily */
1807 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1808 /* load constant */
1809 unsigned long long ll = vtop->c.i;
1810 vtop->c.i = ll; /* first word */
1811 load(r, vtop);
1812 vtop->r = r; /* save register value */
1813 vpushi(ll >> 32); /* second word */
1814 } else if (vtop->r & VT_LVAL) {
1815 /* We do not want to modifier the long long pointer here.
1816 So we save any other instances down the stack */
1817 save_reg_upstack(vtop->r, 1);
1818 /* load from memory */
1819 vtop->type.t = load_type;
1820 load(r, vtop);
1821 vdup();
1822 vtop[-1].r = r; /* save register value */
1823 /* increment pointer to get second word */
1824 vtop->type.t = VT_PTRDIFF_T;
1825 gaddrof();
1826 vpushs(PTR_SIZE);
1827 gen_op('+');
1828 vtop->r |= VT_LVAL;
1829 vtop->type.t = load_type;
1830 } else {
1831 /* move registers */
1832 if (!r_ok)
1833 load(r, vtop);
1834 if (r2_ok && vtop->r2 < VT_CONST)
1835 goto done;
1836 vdup();
1837 vtop[-1].r = r; /* save register value */
1838 vtop->r = vtop[-1].r2;
1840 /* Allocate second register. Here we rely on the fact that
1841 get_reg() tries first to free r2 of an SValue. */
1842 r2 = get_reg(rc2);
1843 load(r2, vtop);
1844 vpop();
1845 /* write second register */
1846 vtop->r2 = r2;
1847 done:
1848 vtop->type.t = original_type;
1849 } else {
1850 if (vtop->r == VT_CMP)
1851 vset_VT_JMP();
1852 /* one register type load */
1853 load(r, vtop);
1856 vtop->r = r;
1857 #ifdef TCC_TARGET_C67
1858 /* uses register pairs for doubles */
1859 if (bt == VT_DOUBLE)
1860 vtop->r2 = r+1;
1861 #endif
1863 return r;
1866 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1867 ST_FUNC void gv2(int rc1, int rc2)
1869 /* generate more generic register first. But VT_JMP or VT_CMP
1870 values must be generated first in all cases to avoid possible
1871 reload errors */
1872 if (vtop->r != VT_CMP && rc1 <= rc2) {
1873 vswap();
1874 gv(rc1);
1875 vswap();
1876 gv(rc2);
1877 /* test if reload is needed for first register */
1878 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1879 vswap();
1880 gv(rc1);
1881 vswap();
1883 } else {
1884 gv(rc2);
1885 vswap();
1886 gv(rc1);
1887 vswap();
1888 /* test if reload is needed for first register */
1889 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1890 gv(rc2);
1895 #if PTR_SIZE == 4
1896 /* expand 64bit on stack in two ints */
1897 ST_FUNC void lexpand(void)
1899 int u, v;
1900 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1901 v = vtop->r & (VT_VALMASK | VT_LVAL);
1902 if (v == VT_CONST) {
1903 vdup();
1904 vtop[0].c.i >>= 32;
1905 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1906 vdup();
1907 vtop[0].c.i += 4;
1908 } else {
1909 gv(RC_INT);
1910 vdup();
1911 vtop[0].r = vtop[-1].r2;
1912 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1914 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1916 #endif
1918 #if PTR_SIZE == 4
1919 /* build a long long from two ints */
1920 static void lbuild(int t)
1922 gv2(RC_INT, RC_INT);
1923 vtop[-1].r2 = vtop[0].r;
1924 vtop[-1].type.t = t;
1925 vpop();
1927 #endif
1929 /* convert stack entry to register and duplicate its value in another
1930 register */
1931 static void gv_dup(void)
1933 int t, rc, r;
1935 t = vtop->type.t;
1936 #if PTR_SIZE == 4
1937 if ((t & VT_BTYPE) == VT_LLONG) {
1938 if (t & VT_BITFIELD) {
1939 gv(RC_INT);
1940 t = vtop->type.t;
1942 lexpand();
1943 gv_dup();
1944 vswap();
1945 vrotb(3);
1946 gv_dup();
1947 vrotb(4);
1948 /* stack: H L L1 H1 */
1949 lbuild(t);
1950 vrotb(3);
1951 vrotb(3);
1952 vswap();
1953 lbuild(t);
1954 vswap();
1955 return;
1957 #endif
1958 /* duplicate value */
1959 rc = RC_TYPE(t);
1960 gv(rc);
1961 r = get_reg(rc);
1962 vdup();
1963 load(r, vtop);
1964 vtop->r = r;
1967 #if PTR_SIZE == 4
1968 /* generate CPU independent (unsigned) long long operations */
1969 static void gen_opl(int op)
1971 int t, a, b, op1, c, i;
1972 int func;
1973 unsigned short reg_iret = REG_IRET;
1974 unsigned short reg_lret = REG_IRE2;
1975 SValue tmp;
1977 switch(op) {
1978 case '/':
1979 case TOK_PDIV:
1980 func = TOK___divdi3;
1981 goto gen_func;
1982 case TOK_UDIV:
1983 func = TOK___udivdi3;
1984 goto gen_func;
1985 case '%':
1986 func = TOK___moddi3;
1987 goto gen_mod_func;
1988 case TOK_UMOD:
1989 func = TOK___umoddi3;
1990 gen_mod_func:
1991 #ifdef TCC_ARM_EABI
1992 reg_iret = TREG_R2;
1993 reg_lret = TREG_R3;
1994 #endif
1995 gen_func:
1996 /* call generic long long function */
1997 vpush_global_sym(&func_old_type, func);
1998 vrott(3);
1999 gfunc_call(2);
2000 vpushi(0);
2001 vtop->r = reg_iret;
2002 vtop->r2 = reg_lret;
2003 break;
2004 case '^':
2005 case '&':
2006 case '|':
2007 case '*':
2008 case '+':
2009 case '-':
2010 //pv("gen_opl A",0,2);
2011 t = vtop->type.t;
2012 vswap();
2013 lexpand();
2014 vrotb(3);
2015 lexpand();
2016 /* stack: L1 H1 L2 H2 */
2017 tmp = vtop[0];
2018 vtop[0] = vtop[-3];
2019 vtop[-3] = tmp;
2020 tmp = vtop[-2];
2021 vtop[-2] = vtop[-3];
2022 vtop[-3] = tmp;
2023 vswap();
2024 /* stack: H1 H2 L1 L2 */
2025 //pv("gen_opl B",0,4);
2026 if (op == '*') {
2027 vpushv(vtop - 1);
2028 vpushv(vtop - 1);
2029 gen_op(TOK_UMULL);
2030 lexpand();
2031 /* stack: H1 H2 L1 L2 ML MH */
2032 for(i=0;i<4;i++)
2033 vrotb(6);
2034 /* stack: ML MH H1 H2 L1 L2 */
2035 tmp = vtop[0];
2036 vtop[0] = vtop[-2];
2037 vtop[-2] = tmp;
2038 /* stack: ML MH H1 L2 H2 L1 */
2039 gen_op('*');
2040 vrotb(3);
2041 vrotb(3);
2042 gen_op('*');
2043 /* stack: ML MH M1 M2 */
2044 gen_op('+');
2045 gen_op('+');
2046 } else if (op == '+' || op == '-') {
2047 /* XXX: add non carry method too (for MIPS or alpha) */
2048 if (op == '+')
2049 op1 = TOK_ADDC1;
2050 else
2051 op1 = TOK_SUBC1;
2052 gen_op(op1);
2053 /* stack: H1 H2 (L1 op L2) */
2054 vrotb(3);
2055 vrotb(3);
2056 gen_op(op1 + 1); /* TOK_xxxC2 */
2057 } else {
2058 gen_op(op);
2059 /* stack: H1 H2 (L1 op L2) */
2060 vrotb(3);
2061 vrotb(3);
2062 /* stack: (L1 op L2) H1 H2 */
2063 gen_op(op);
2064 /* stack: (L1 op L2) (H1 op H2) */
2066 /* stack: L H */
2067 lbuild(t);
2068 break;
2069 case TOK_SAR:
2070 case TOK_SHR:
2071 case TOK_SHL:
2072 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2073 t = vtop[-1].type.t;
2074 vswap();
2075 lexpand();
2076 vrotb(3);
2077 /* stack: L H shift */
2078 c = (int)vtop->c.i;
2079 /* constant: simpler */
2080 /* NOTE: all comments are for SHL. the other cases are
2081 done by swapping words */
2082 vpop();
2083 if (op != TOK_SHL)
2084 vswap();
2085 if (c >= 32) {
2086 /* stack: L H */
2087 vpop();
2088 if (c > 32) {
2089 vpushi(c - 32);
2090 gen_op(op);
2092 if (op != TOK_SAR) {
2093 vpushi(0);
2094 } else {
2095 gv_dup();
2096 vpushi(31);
2097 gen_op(TOK_SAR);
2099 vswap();
2100 } else {
2101 vswap();
2102 gv_dup();
2103 /* stack: H L L */
2104 vpushi(c);
2105 gen_op(op);
2106 vswap();
2107 vpushi(32 - c);
2108 if (op == TOK_SHL)
2109 gen_op(TOK_SHR);
2110 else
2111 gen_op(TOK_SHL);
2112 vrotb(3);
2113 /* stack: L L H */
2114 vpushi(c);
2115 if (op == TOK_SHL)
2116 gen_op(TOK_SHL);
2117 else
2118 gen_op(TOK_SHR);
2119 gen_op('|');
2121 if (op != TOK_SHL)
2122 vswap();
2123 lbuild(t);
2124 } else {
2125 /* XXX: should provide a faster fallback on x86 ? */
2126 switch(op) {
2127 case TOK_SAR:
2128 func = TOK___ashrdi3;
2129 goto gen_func;
2130 case TOK_SHR:
2131 func = TOK___lshrdi3;
2132 goto gen_func;
2133 case TOK_SHL:
2134 func = TOK___ashldi3;
2135 goto gen_func;
2138 break;
2139 default:
2140 /* compare operations */
2141 t = vtop->type.t;
2142 vswap();
2143 lexpand();
2144 vrotb(3);
2145 lexpand();
2146 /* stack: L1 H1 L2 H2 */
2147 tmp = vtop[-1];
2148 vtop[-1] = vtop[-2];
2149 vtop[-2] = tmp;
2150 /* stack: L1 L2 H1 H2 */
2151 save_regs(4);
2152 /* compare high */
2153 op1 = op;
2154 /* when values are equal, we need to compare low words. since
2155 the jump is inverted, we invert the test too. */
2156 if (op1 == TOK_LT)
2157 op1 = TOK_LE;
2158 else if (op1 == TOK_GT)
2159 op1 = TOK_GE;
2160 else if (op1 == TOK_ULT)
2161 op1 = TOK_ULE;
2162 else if (op1 == TOK_UGT)
2163 op1 = TOK_UGE;
2164 a = 0;
2165 b = 0;
2166 gen_op(op1);
2167 if (op == TOK_NE) {
2168 b = gvtst(0, 0);
2169 } else {
2170 a = gvtst(1, 0);
2171 if (op != TOK_EQ) {
2172 /* generate non equal test */
2173 vpushi(0);
2174 vset_VT_CMP(TOK_NE);
2175 b = gvtst(0, 0);
2178 /* compare low. Always unsigned */
2179 op1 = op;
2180 if (op1 == TOK_LT)
2181 op1 = TOK_ULT;
2182 else if (op1 == TOK_LE)
2183 op1 = TOK_ULE;
2184 else if (op1 == TOK_GT)
2185 op1 = TOK_UGT;
2186 else if (op1 == TOK_GE)
2187 op1 = TOK_UGE;
2188 gen_op(op1);
2189 #if 0//def TCC_TARGET_I386
2190 if (op == TOK_NE) { gsym(b); break; }
2191 if (op == TOK_EQ) { gsym(a); break; }
2192 #endif
2193 gvtst_set(1, a);
2194 gvtst_set(0, b);
2195 break;
2198 #endif
2200 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2202 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2203 return (a ^ b) >> 63 ? -x : x;
2206 static int gen_opic_lt(uint64_t a, uint64_t b)
2208 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2211 /* handle integer constant optimizations and various machine
2212 independent opt */
2213 static void gen_opic(int op)
2215 SValue *v1 = vtop - 1;
2216 SValue *v2 = vtop;
2217 int t1 = v1->type.t & VT_BTYPE;
2218 int t2 = v2->type.t & VT_BTYPE;
2219 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2220 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2221 uint64_t l1 = c1 ? v1->c.i : 0;
2222 uint64_t l2 = c2 ? v2->c.i : 0;
2223 int shm = (t1 == VT_LLONG) ? 63 : 31;
2225 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2226 l1 = ((uint32_t)l1 |
2227 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2228 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2229 l2 = ((uint32_t)l2 |
2230 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2232 if (c1 && c2) {
2233 switch(op) {
2234 case '+': l1 += l2; break;
2235 case '-': l1 -= l2; break;
2236 case '&': l1 &= l2; break;
2237 case '^': l1 ^= l2; break;
2238 case '|': l1 |= l2; break;
2239 case '*': l1 *= l2; break;
2241 case TOK_PDIV:
2242 case '/':
2243 case '%':
2244 case TOK_UDIV:
2245 case TOK_UMOD:
2246 /* if division by zero, generate explicit division */
2247 if (l2 == 0) {
2248 if (const_wanted && !(nocode_wanted & unevalmask))
2249 tcc_error("division by zero in constant");
2250 goto general_case;
2252 switch(op) {
2253 default: l1 = gen_opic_sdiv(l1, l2); break;
2254 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2255 case TOK_UDIV: l1 = l1 / l2; break;
2256 case TOK_UMOD: l1 = l1 % l2; break;
2258 break;
2259 case TOK_SHL: l1 <<= (l2 & shm); break;
2260 case TOK_SHR: l1 >>= (l2 & shm); break;
2261 case TOK_SAR:
2262 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2263 break;
2264 /* tests */
2265 case TOK_ULT: l1 = l1 < l2; break;
2266 case TOK_UGE: l1 = l1 >= l2; break;
2267 case TOK_EQ: l1 = l1 == l2; break;
2268 case TOK_NE: l1 = l1 != l2; break;
2269 case TOK_ULE: l1 = l1 <= l2; break;
2270 case TOK_UGT: l1 = l1 > l2; break;
2271 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2272 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2273 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2274 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2275 /* logical */
2276 case TOK_LAND: l1 = l1 && l2; break;
2277 case TOK_LOR: l1 = l1 || l2; break;
2278 default:
2279 goto general_case;
2281 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2282 l1 = ((uint32_t)l1 |
2283 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2284 v1->c.i = l1;
2285 vtop--;
2286 } else {
2287 /* if commutative ops, put c2 as constant */
2288 if (c1 && (op == '+' || op == '&' || op == '^' ||
2289 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2290 vswap();
2291 c2 = c1; //c = c1, c1 = c2, c2 = c;
2292 l2 = l1; //l = l1, l1 = l2, l2 = l;
2294 if (!const_wanted &&
2295 c1 && ((l1 == 0 &&
2296 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2297 (l1 == -1 && op == TOK_SAR))) {
2298 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2299 vtop--;
2300 } else if (!const_wanted &&
2301 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2302 (op == '|' &&
2303 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2304 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2305 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2306 if (l2 == 1)
2307 vtop->c.i = 0;
2308 vswap();
2309 vtop--;
2310 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2311 op == TOK_PDIV) &&
2312 l2 == 1) ||
2313 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2314 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2315 l2 == 0) ||
2316 (op == '&' &&
2317 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2318 /* filter out NOP operations like x*1, x-0, x&-1... */
2319 vtop--;
2320 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2321 /* try to use shifts instead of muls or divs */
2322 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2323 int n = -1;
2324 while (l2) {
2325 l2 >>= 1;
2326 n++;
2328 vtop->c.i = n;
2329 if (op == '*')
2330 op = TOK_SHL;
2331 else if (op == TOK_PDIV)
2332 op = TOK_SAR;
2333 else
2334 op = TOK_SHR;
2336 goto general_case;
2337 } else if (c2 && (op == '+' || op == '-') &&
2338 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2339 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2340 /* symbol + constant case */
2341 if (op == '-')
2342 l2 = -l2;
2343 l2 += vtop[-1].c.i;
2344 /* The backends can't always deal with addends to symbols
2345 larger than +-1<<31. Don't construct such. */
2346 if ((int)l2 != l2)
2347 goto general_case;
2348 vtop--;
2349 vtop->c.i = l2;
2350 } else {
2351 general_case:
2352 /* call low level op generator */
2353 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2354 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2355 gen_opl(op);
2356 else
2357 gen_opi(op);
2362 /* generate a floating point operation with constant propagation */
2363 static void gen_opif(int op)
2365 int c1, c2;
2366 SValue *v1, *v2;
2367 #if defined _MSC_VER && defined __x86_64__
2368 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2369 volatile
2370 #endif
2371 long double f1, f2;
2373 v1 = vtop - 1;
2374 v2 = vtop;
2375 /* currently, we cannot do computations with forward symbols */
2376 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2377 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2378 if (c1 && c2) {
2379 if (v1->type.t == VT_FLOAT) {
2380 f1 = v1->c.f;
2381 f2 = v2->c.f;
2382 } else if (v1->type.t == VT_DOUBLE) {
2383 f1 = v1->c.d;
2384 f2 = v2->c.d;
2385 } else {
2386 f1 = v1->c.ld;
2387 f2 = v2->c.ld;
2390 /* NOTE: we only do constant propagation if finite number (not
2391 NaN or infinity) (ANSI spec) */
2392 if (!ieee_finite(f1) || !ieee_finite(f2))
2393 goto general_case;
2395 switch(op) {
2396 case '+': f1 += f2; break;
2397 case '-': f1 -= f2; break;
2398 case '*': f1 *= f2; break;
2399 case '/':
2400 if (f2 == 0.0) {
2401 /* If not in initializer we need to potentially generate
2402 FP exceptions at runtime, otherwise we want to fold. */
2403 if (!const_wanted)
2404 goto general_case;
2406 f1 /= f2;
2407 break;
2408 /* XXX: also handles tests ? */
2409 default:
2410 goto general_case;
2412 /* XXX: overflow test ? */
2413 if (v1->type.t == VT_FLOAT) {
2414 v1->c.f = f1;
2415 } else if (v1->type.t == VT_DOUBLE) {
2416 v1->c.d = f1;
2417 } else {
2418 v1->c.ld = f1;
2420 vtop--;
2421 } else {
2422 general_case:
2423 gen_opf(op);
2427 static int pointed_size(CType *type)
2429 int align;
2430 return type_size(pointed_type(type), &align);
2433 static void vla_runtime_pointed_size(CType *type)
2435 int align;
2436 vla_runtime_type_size(pointed_type(type), &align);
2439 static inline int is_null_pointer(SValue *p)
2441 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2442 return 0;
2443 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2444 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2445 ((p->type.t & VT_BTYPE) == VT_PTR &&
2446 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2447 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2448 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2452 /* check types for comparison or subtraction of pointers */
2453 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2455 CType *type1, *type2, tmp_type1, tmp_type2;
2456 int bt1, bt2;
2458 /* null pointers are accepted for all comparisons as gcc */
2459 if (is_null_pointer(p1) || is_null_pointer(p2))
2460 return;
2461 type1 = &p1->type;
2462 type2 = &p2->type;
2463 bt1 = type1->t & VT_BTYPE;
2464 bt2 = type2->t & VT_BTYPE;
2465 /* accept comparison between pointer and integer with a warning */
2466 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2467 if (op != TOK_LOR && op != TOK_LAND )
2468 tcc_warning("comparison between pointer and integer");
2469 return;
2472 /* both must be pointers or implicit function pointers */
2473 if (bt1 == VT_PTR) {
2474 type1 = pointed_type(type1);
2475 } else if (bt1 != VT_FUNC)
2476 goto invalid_operands;
2478 if (bt2 == VT_PTR) {
2479 type2 = pointed_type(type2);
2480 } else if (bt2 != VT_FUNC) {
2481 invalid_operands:
2482 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2484 if ((type1->t & VT_BTYPE) == VT_VOID ||
2485 (type2->t & VT_BTYPE) == VT_VOID)
2486 return;
2487 tmp_type1 = *type1;
2488 tmp_type2 = *type2;
2489 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2490 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2491 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2492 /* gcc-like error if '-' is used */
2493 if (op == '-')
2494 goto invalid_operands;
2495 else
2496 tcc_warning("comparison of distinct pointer types lacks a cast");
2500 /* generic gen_op: handles types problems */
2501 ST_FUNC void gen_op(int op)
2503 int u, t1, t2, bt1, bt2, t;
2504 CType type1;
2506 redo:
2507 t1 = vtop[-1].type.t;
2508 t2 = vtop[0].type.t;
2509 bt1 = t1 & VT_BTYPE;
2510 bt2 = t2 & VT_BTYPE;
2512 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2513 tcc_error("operation on a struct");
2514 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2515 if (bt2 == VT_FUNC) {
2516 mk_pointer(&vtop->type);
2517 gaddrof();
2519 if (bt1 == VT_FUNC) {
2520 vswap();
2521 mk_pointer(&vtop->type);
2522 gaddrof();
2523 vswap();
2525 goto redo;
2526 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2527 /* at least one operand is a pointer */
2528 /* relational op: must be both pointers */
2529 if (op >= TOK_ULT && op <= TOK_LOR) {
2530 check_comparison_pointer_types(vtop - 1, vtop, op);
2531 /* pointers are handled are unsigned */
2532 #if PTR_SIZE == 8
2533 t = VT_LLONG | VT_UNSIGNED;
2534 #else
2535 t = VT_INT | VT_UNSIGNED;
2536 #endif
2537 goto std_op;
2539 /* if both pointers, then it must be the '-' op */
2540 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2541 if (op != '-')
2542 tcc_error("cannot use pointers here");
2543 check_comparison_pointer_types(vtop - 1, vtop, op);
2544 /* XXX: check that types are compatible */
2545 if (vtop[-1].type.t & VT_VLA) {
2546 vla_runtime_pointed_size(&vtop[-1].type);
2547 } else {
2548 vpushi(pointed_size(&vtop[-1].type));
2550 vrott(3);
2551 gen_opic(op);
2552 vtop->type.t = VT_PTRDIFF_T;
2553 vswap();
2554 gen_op(TOK_PDIV);
2555 } else {
2556 /* exactly one pointer : must be '+' or '-'. */
2557 if (op != '-' && op != '+')
2558 tcc_error("cannot use pointers here");
2559 /* Put pointer as first operand */
2560 if (bt2 == VT_PTR) {
2561 vswap();
2562 t = t1, t1 = t2, t2 = t;
2564 #if PTR_SIZE == 4
2565 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2566 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2567 gen_cast_s(VT_INT);
2568 #endif
2569 type1 = vtop[-1].type;
2570 type1.t &= ~VT_ARRAY;
2571 if (vtop[-1].type.t & VT_VLA)
2572 vla_runtime_pointed_size(&vtop[-1].type);
2573 else {
2574 u = pointed_size(&vtop[-1].type);
2575 if (u < 0)
2576 tcc_error("unknown array element size");
2577 #if PTR_SIZE == 8
2578 vpushll(u);
2579 #else
2580 /* XXX: cast to int ? (long long case) */
2581 vpushi(u);
2582 #endif
2584 gen_op('*');
2585 #ifdef CONFIG_TCC_BCHECK
2586 if (tcc_state->do_bounds_check && !const_wanted) {
2587 /* if bounded pointers, we generate a special code to
2588 test bounds */
2589 if (op == '-') {
2590 vpushi(0);
2591 vswap();
2592 gen_op('-');
2594 gen_bounded_ptr_add();
2595 } else
2596 #endif
2598 gen_opic(op);
2600 /* put again type if gen_opic() swaped operands */
2601 vtop->type = type1;
2603 } else if (is_float(bt1) || is_float(bt2)) {
2604 /* compute bigger type and do implicit casts */
2605 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2606 t = VT_LDOUBLE;
2607 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2608 t = VT_DOUBLE;
2609 } else {
2610 t = VT_FLOAT;
2612 /* floats can only be used for a few operations */
2613 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2614 (op < TOK_ULT || op > TOK_GT))
2615 tcc_error("invalid operands for binary operation");
2616 goto std_op;
2617 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2618 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2619 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2620 t |= VT_UNSIGNED;
2621 t |= (VT_LONG & t1);
2622 goto std_op;
2623 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2624 /* cast to biggest op */
2625 t = VT_LLONG | VT_LONG;
2626 if (bt1 == VT_LLONG)
2627 t &= t1;
2628 if (bt2 == VT_LLONG)
2629 t &= t2;
2630 /* convert to unsigned if it does not fit in a long long */
2631 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2632 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2633 t |= VT_UNSIGNED;
2634 goto std_op;
2635 } else {
2636 /* integer operations */
2637 t = VT_INT | (VT_LONG & (t1 | t2));
2638 /* convert to unsigned if it does not fit in an integer */
2639 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2640 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2641 t |= VT_UNSIGNED;
2642 std_op:
2643 /* XXX: currently, some unsigned operations are explicit, so
2644 we modify them here */
2645 if (t & VT_UNSIGNED) {
2646 if (op == TOK_SAR)
2647 op = TOK_SHR;
2648 else if (op == '/')
2649 op = TOK_UDIV;
2650 else if (op == '%')
2651 op = TOK_UMOD;
2652 else if (op == TOK_LT)
2653 op = TOK_ULT;
2654 else if (op == TOK_GT)
2655 op = TOK_UGT;
2656 else if (op == TOK_LE)
2657 op = TOK_ULE;
2658 else if (op == TOK_GE)
2659 op = TOK_UGE;
2661 vswap();
2662 type1.t = t;
2663 type1.ref = NULL;
2664 gen_cast(&type1);
2665 vswap();
2666 /* special case for shifts and long long: we keep the shift as
2667 an integer */
2668 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2669 type1.t = VT_INT;
2670 gen_cast(&type1);
2671 if (is_float(t))
2672 gen_opif(op);
2673 else
2674 gen_opic(op);
2675 if (op >= TOK_ULT && op <= TOK_GT) {
2676 /* relational op: the result is an int */
2677 vtop->type.t = VT_INT;
2678 } else {
2679 vtop->type.t = t;
2682 // Make sure that we have converted to an rvalue:
2683 if (vtop->r & VT_LVAL)
2684 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2687 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2688 #define gen_cvt_itof1 gen_cvt_itof
2689 #else
2690 /* generic itof for unsigned long long case */
2691 static void gen_cvt_itof1(int t)
2693 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2694 (VT_LLONG | VT_UNSIGNED)) {
2696 if (t == VT_FLOAT)
2697 vpush_global_sym(&func_old_type, TOK___floatundisf);
2698 #if LDOUBLE_SIZE != 8
2699 else if (t == VT_LDOUBLE)
2700 vpush_global_sym(&func_old_type, TOK___floatundixf);
2701 #endif
2702 else
2703 vpush_global_sym(&func_old_type, TOK___floatundidf);
2704 vrott(2);
2705 gfunc_call(1);
2706 vpushi(0);
2707 PUT_R_RET(vtop, t);
2708 } else {
2709 gen_cvt_itof(t);
2712 #endif
2714 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2715 #define gen_cvt_ftoi1 gen_cvt_ftoi
2716 #else
2717 /* generic ftoi for unsigned long long case */
2718 static void gen_cvt_ftoi1(int t)
2720 int st;
2721 if (t == (VT_LLONG | VT_UNSIGNED)) {
2722 /* not handled natively */
2723 st = vtop->type.t & VT_BTYPE;
2724 if (st == VT_FLOAT)
2725 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2726 #if LDOUBLE_SIZE != 8
2727 else if (st == VT_LDOUBLE)
2728 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2729 #endif
2730 else
2731 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2732 vrott(2);
2733 gfunc_call(1);
2734 vpushi(0);
2735 PUT_R_RET(vtop, t);
2736 } else {
2737 gen_cvt_ftoi(t);
2740 #endif
2742 /* special delayed cast for char/short */
2743 static void force_charshort_cast(void)
2745 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
2746 int dbt = vtop->type.t;
2747 vtop->r &= ~VT_MUSTCAST;
2748 vtop->type.t = sbt;
2749 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
2750 vtop->type.t = dbt;
2753 static void gen_cast_s(int t)
2755 CType type;
2756 type.t = t;
2757 type.ref = NULL;
2758 gen_cast(&type);
2761 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2762 static void gen_cast(CType *type)
2764 int sbt, dbt, sf, df, c;
2765 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
2767 /* special delayed cast for char/short */
2768 if (vtop->r & VT_MUSTCAST)
2769 force_charshort_cast();
2771 /* bitfields first get cast to ints */
2772 if (vtop->type.t & VT_BITFIELD)
2773 gv(RC_INT);
2775 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2776 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2777 if (sbt == VT_FUNC)
2778 sbt = VT_PTR;
2780 again:
2781 if (sbt != dbt) {
2782 sf = is_float(sbt);
2783 df = is_float(dbt);
2784 dbt_bt = dbt & VT_BTYPE;
2785 sbt_bt = sbt & VT_BTYPE;
2787 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2788 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2789 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
2790 #endif
2791 if (c) {
2792 /* constant case: we can do it now */
2793 /* XXX: in ISOC, cannot do it if error in convert */
2794 if (sbt == VT_FLOAT)
2795 vtop->c.ld = vtop->c.f;
2796 else if (sbt == VT_DOUBLE)
2797 vtop->c.ld = vtop->c.d;
2799 if (df) {
2800 if (sbt_bt == VT_LLONG) {
2801 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2802 vtop->c.ld = vtop->c.i;
2803 else
2804 vtop->c.ld = -(long double)-vtop->c.i;
2805 } else if(!sf) {
2806 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2807 vtop->c.ld = (uint32_t)vtop->c.i;
2808 else
2809 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2812 if (dbt == VT_FLOAT)
2813 vtop->c.f = (float)vtop->c.ld;
2814 else if (dbt == VT_DOUBLE)
2815 vtop->c.d = (double)vtop->c.ld;
2816 } else if (sf && dbt == VT_BOOL) {
2817 vtop->c.i = (vtop->c.ld != 0);
2818 } else {
2819 if(sf)
2820 vtop->c.i = vtop->c.ld;
2821 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
2823 else if (sbt & VT_UNSIGNED)
2824 vtop->c.i = (uint32_t)vtop->c.i;
2825 else
2826 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
2828 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
2830 else if (dbt == VT_BOOL)
2831 vtop->c.i = (vtop->c.i != 0);
2832 else {
2833 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
2834 dbt_bt == VT_SHORT ? 0xffff :
2835 0xffffffff;
2836 vtop->c.i &= m;
2837 if (!(dbt & VT_UNSIGNED))
2838 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2841 goto done;
2843 } else if (dbt == VT_BOOL
2844 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
2845 == (VT_CONST | VT_SYM)) {
2846 /* addresses are considered non-zero (see tcctest.c:sinit23) */
2847 vtop->r = VT_CONST;
2848 vtop->c.i = 1;
2849 goto done;
2852 /* cannot generate code for global or static initializers */
2853 if (STATIC_DATA_WANTED)
2854 goto done;
2856 /* non constant case: generate code */
2857 if (dbt == VT_BOOL) {
2858 gen_test_zero(TOK_NE);
2859 goto done;
2862 if (sf || df) {
2863 if (sf && df) {
2864 /* convert from fp to fp */
2865 gen_cvt_ftof(dbt);
2866 } else if (df) {
2867 /* convert int to fp */
2868 gen_cvt_itof1(dbt);
2869 } else {
2870 /* convert fp to int */
2871 sbt = dbt;
2872 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
2873 sbt = VT_INT;
2874 gen_cvt_ftoi1(sbt);
2875 goto again; /* may need char/short cast */
2877 goto done;
2880 ds = btype_size(dbt_bt);
2881 ss = btype_size(sbt_bt);
2882 if (ds == 0 || ss == 0) {
2883 if (dbt_bt == VT_VOID)
2884 goto done;
2885 cast_error(&vtop->type, type);
2887 if (IS_ENUM(type->t) && type->ref->c < 0)
2888 tcc_error("cast to incomplete type");
2890 /* same size and no sign conversion needed */
2891 if (ds == ss && ds >= 4)
2892 goto done;
2893 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
2894 tcc_warning("cast between pointer and integer of different size");
2895 if (sbt_bt == VT_PTR) {
2896 /* put integer type to allow logical operations below */
2897 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
2901 /* processor allows { int a = 0, b = *(char*)&a; }
2902 That means that if we cast to less width, we can just
2903 change the type and read it still later. */
2904 #define ALLOW_SUBTYPE_ACCESS 1
2906 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
2907 /* value still in memory */
2908 if (ds <= ss)
2909 goto done;
2910 /* ss <= 4 here */
2911 if (ds <= 4) {
2912 gv(RC_INT);
2913 goto done; /* no 64bit envolved */
2916 gv(RC_INT);
2918 trunc = 0;
2919 #if PTR_SIZE == 4
2920 if (ds == 8) {
2921 /* generate high word */
2922 if (sbt & VT_UNSIGNED) {
2923 vpushi(0);
2924 gv(RC_INT);
2925 } else {
2926 gv_dup();
2927 vpushi(31);
2928 gen_op(TOK_SAR);
2930 lbuild(dbt);
2931 } else if (ss == 8) {
2932 /* from long long: just take low order word */
2933 lexpand();
2934 vpop();
2936 ss = 4;
2938 #elif PTR_SIZE == 8
2939 if (ds == 8) {
2940 /* need to convert from 32bit to 64bit */
2941 if (sbt & VT_UNSIGNED) {
2942 #if defined(TCC_TARGET_RISCV64)
2943 /* RISC-V keeps 32bit vals in registers sign-extended.
2944 So here we need a zero-extension. */
2945 trunc = 32;
2946 #else
2947 goto done;
2948 #endif
2949 } else {
2950 gen_cvt_sxtw();
2951 goto done;
2953 ss = ds, ds = 4, dbt = sbt;
2954 } else if (ss == 8) {
2955 /* XXX some architectures (e.g. risc-v) would like it
2956 better for this merely being a 32-to-64 sign or zero-
2957 extension. */
2958 trunc = 32; /* zero upper 32 bits */
2959 } else {
2960 ss = 4;
2962 #endif
2964 if (ds >= ss)
2965 goto done;
2966 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
2967 if (ss == 4) {
2968 gen_cvt_csti(dbt);
2969 goto done;
2971 #endif
2972 bits = (ss - ds) * 8;
2973 /* for unsigned, gen_op will convert SAR to SHR */
2974 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
2975 vpushi(bits);
2976 gen_op(TOK_SHL);
2977 vpushi(bits - trunc);
2978 gen_op(TOK_SAR);
2979 vpushi(trunc);
2980 gen_op(TOK_SHR);
2982 done:
2983 vtop->type = *type;
2984 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2987 /* return type size as known at compile time. Put alignment at 'a' */
2988 ST_FUNC int type_size(CType *type, int *a)
2990 Sym *s;
2991 int bt;
2993 bt = type->t & VT_BTYPE;
2994 if (bt == VT_STRUCT) {
2995 /* struct/union */
2996 s = type->ref;
2997 *a = s->r;
2998 return s->c;
2999 } else if (bt == VT_PTR) {
3000 if (type->t & VT_ARRAY) {
3001 int ts;
3003 s = type->ref;
3004 ts = type_size(&s->type, a);
3006 if (ts < 0 && s->c < 0)
3007 ts = -ts;
3009 return ts * s->c;
3010 } else {
3011 *a = PTR_SIZE;
3012 return PTR_SIZE;
3014 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3015 return -1; /* incomplete enum */
3016 } else if (bt == VT_LDOUBLE) {
3017 *a = LDOUBLE_ALIGN;
3018 return LDOUBLE_SIZE;
3019 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3020 #ifdef TCC_TARGET_I386
3021 #ifdef TCC_TARGET_PE
3022 *a = 8;
3023 #else
3024 *a = 4;
3025 #endif
3026 #elif defined(TCC_TARGET_ARM)
3027 #ifdef TCC_ARM_EABI
3028 *a = 8;
3029 #else
3030 *a = 4;
3031 #endif
3032 #else
3033 *a = 8;
3034 #endif
3035 return 8;
3036 } else if (bt == VT_INT || bt == VT_FLOAT) {
3037 *a = 4;
3038 return 4;
3039 } else if (bt == VT_SHORT) {
3040 *a = 2;
3041 return 2;
3042 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3043 *a = 8;
3044 return 16;
3045 } else {
3046 /* char, void, function, _Bool */
3047 *a = 1;
3048 return 1;
3052 /* push type size as known at runtime time on top of value stack. Put
3053 alignment at 'a' */
3054 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3056 if (type->t & VT_VLA) {
3057 type_size(&type->ref->type, a);
3058 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3059 } else {
3060 vpushi(type_size(type, a));
3064 /* return the pointed type of t */
3065 static inline CType *pointed_type(CType *type)
3067 return &type->ref->type;
3070 /* modify type so that its it is a pointer to type. */
3071 ST_FUNC void mk_pointer(CType *type)
3073 Sym *s;
3074 s = sym_push(SYM_FIELD, type, 0, -1);
3075 type->t = VT_PTR | (type->t & VT_STORAGE);
3076 type->ref = s;
3079 /* compare function types. OLD functions match any new functions */
3080 static int is_compatible_func(CType *type1, CType *type2)
3082 Sym *s1, *s2;
3084 s1 = type1->ref;
3085 s2 = type2->ref;
3086 if (s1->f.func_call != s2->f.func_call)
3087 return 0;
3088 if (s1->f.func_type != s2->f.func_type
3089 && s1->f.func_type != FUNC_OLD
3090 && s2->f.func_type != FUNC_OLD)
3091 return 0;
3092 /* we should check the function return type for FUNC_OLD too
3093 but that causes problems with the internally used support
3094 functions such as TOK_memmove */
3095 if (s1->f.func_type == FUNC_OLD && !s1->next)
3096 return 1;
3097 if (s2->f.func_type == FUNC_OLD && !s2->next)
3098 return 1;
3099 for (;;) {
3100 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3101 return 0;
3102 s1 = s1->next;
3103 s2 = s2->next;
3104 if (!s1)
3105 return !s2;
3106 if (!s2)
3107 return 0;
3111 /* return true if type1 and type2 are the same. If unqualified is
3112 true, qualifiers on the types are ignored.
3114 static int compare_types(CType *type1, CType *type2, int unqualified)
3116 int bt1, t1, t2;
3118 t1 = type1->t & VT_TYPE;
3119 t2 = type2->t & VT_TYPE;
3120 if (unqualified) {
3121 /* strip qualifiers before comparing */
3122 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3123 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3126 /* Default Vs explicit signedness only matters for char */
3127 if ((t1 & VT_BTYPE) != VT_BYTE) {
3128 t1 &= ~VT_DEFSIGN;
3129 t2 &= ~VT_DEFSIGN;
3131 /* XXX: bitfields ? */
3132 if (t1 != t2)
3133 return 0;
3135 if ((t1 & VT_ARRAY)
3136 && !(type1->ref->c < 0
3137 || type2->ref->c < 0
3138 || type1->ref->c == type2->ref->c))
3139 return 0;
3141 /* test more complicated cases */
3142 bt1 = t1 & VT_BTYPE;
3143 if (bt1 == VT_PTR) {
3144 type1 = pointed_type(type1);
3145 type2 = pointed_type(type2);
3146 return is_compatible_types(type1, type2);
3147 } else if (bt1 == VT_STRUCT) {
3148 return (type1->ref == type2->ref);
3149 } else if (bt1 == VT_FUNC) {
3150 return is_compatible_func(type1, type2);
3151 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3152 return type1->ref == type2->ref;
3153 } else {
3154 return 1;
3158 /* return true if type1 and type2 are exactly the same (including
3159 qualifiers).
3161 static int is_compatible_types(CType *type1, CType *type2)
3163 return compare_types(type1,type2,0);
3166 /* return true if type1 and type2 are the same (ignoring qualifiers).
3168 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3170 return compare_types(type1,type2,1);
3173 /* print a type. If 'varstr' is not NULL, then the variable is also
3174 printed in the type */
3175 /* XXX: union */
3176 /* XXX: add array and function pointers */
3177 static void type_to_str(char *buf, int buf_size,
3178 CType *type, const char *varstr)
3180 int bt, v, t;
3181 Sym *s, *sa;
3182 char buf1[256];
3183 const char *tstr;
3185 t = type->t;
3186 bt = t & VT_BTYPE;
3187 buf[0] = '\0';
3189 if (t & VT_EXTERN)
3190 pstrcat(buf, buf_size, "extern ");
3191 if (t & VT_STATIC)
3192 pstrcat(buf, buf_size, "static ");
3193 if (t & VT_TYPEDEF)
3194 pstrcat(buf, buf_size, "typedef ");
3195 if (t & VT_INLINE)
3196 pstrcat(buf, buf_size, "inline ");
3197 if (t & VT_VOLATILE)
3198 pstrcat(buf, buf_size, "volatile ");
3199 if (t & VT_CONSTANT)
3200 pstrcat(buf, buf_size, "const ");
3202 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3203 || ((t & VT_UNSIGNED)
3204 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3205 && !IS_ENUM(t)
3207 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3209 buf_size -= strlen(buf);
3210 buf += strlen(buf);
3212 switch(bt) {
3213 case VT_VOID:
3214 tstr = "void";
3215 goto add_tstr;
3216 case VT_BOOL:
3217 tstr = "_Bool";
3218 goto add_tstr;
3219 case VT_BYTE:
3220 tstr = "char";
3221 goto add_tstr;
3222 case VT_SHORT:
3223 tstr = "short";
3224 goto add_tstr;
3225 case VT_INT:
3226 tstr = "int";
3227 goto maybe_long;
3228 case VT_LLONG:
3229 tstr = "long long";
3230 maybe_long:
3231 if (t & VT_LONG)
3232 tstr = "long";
3233 if (!IS_ENUM(t))
3234 goto add_tstr;
3235 tstr = "enum ";
3236 goto tstruct;
3237 case VT_FLOAT:
3238 tstr = "float";
3239 goto add_tstr;
3240 case VT_DOUBLE:
3241 tstr = "double";
3242 goto add_tstr;
3243 case VT_LDOUBLE:
3244 tstr = "long double";
3245 add_tstr:
3246 pstrcat(buf, buf_size, tstr);
3247 break;
3248 case VT_STRUCT:
3249 tstr = "struct ";
3250 if (IS_UNION(t))
3251 tstr = "union ";
3252 tstruct:
3253 pstrcat(buf, buf_size, tstr);
3254 v = type->ref->v & ~SYM_STRUCT;
3255 if (v >= SYM_FIRST_ANOM)
3256 pstrcat(buf, buf_size, "<anonymous>");
3257 else
3258 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3259 break;
3260 case VT_FUNC:
3261 s = type->ref;
3262 buf1[0]=0;
3263 if (varstr && '*' == *varstr) {
3264 pstrcat(buf1, sizeof(buf1), "(");
3265 pstrcat(buf1, sizeof(buf1), varstr);
3266 pstrcat(buf1, sizeof(buf1), ")");
3268 pstrcat(buf1, buf_size, "(");
3269 sa = s->next;
3270 while (sa != NULL) {
3271 char buf2[256];
3272 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3273 pstrcat(buf1, sizeof(buf1), buf2);
3274 sa = sa->next;
3275 if (sa)
3276 pstrcat(buf1, sizeof(buf1), ", ");
3278 if (s->f.func_type == FUNC_ELLIPSIS)
3279 pstrcat(buf1, sizeof(buf1), ", ...");
3280 pstrcat(buf1, sizeof(buf1), ")");
3281 type_to_str(buf, buf_size, &s->type, buf1);
3282 goto no_var;
3283 case VT_PTR:
3284 s = type->ref;
3285 if (t & VT_ARRAY) {
3286 if (varstr && '*' == *varstr)
3287 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3288 else
3289 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3290 type_to_str(buf, buf_size, &s->type, buf1);
3291 goto no_var;
3293 pstrcpy(buf1, sizeof(buf1), "*");
3294 if (t & VT_CONSTANT)
3295 pstrcat(buf1, buf_size, "const ");
3296 if (t & VT_VOLATILE)
3297 pstrcat(buf1, buf_size, "volatile ");
3298 if (varstr)
3299 pstrcat(buf1, sizeof(buf1), varstr);
3300 type_to_str(buf, buf_size, &s->type, buf1);
3301 goto no_var;
3303 if (varstr) {
3304 pstrcat(buf, buf_size, " ");
3305 pstrcat(buf, buf_size, varstr);
3307 no_var: ;
3310 static void cast_error(CType *st, CType *dt)
3312 char buf1[256], buf2[256];
3313 type_to_str(buf1, sizeof(buf1), st, NULL);
3314 type_to_str(buf2, sizeof(buf2), dt, NULL);
3315 tcc_error("cannot convert '%s' to '%s'", buf1, buf2);
3318 /* verify type compatibility to store vtop in 'dt' type */
3319 static void verify_assign_cast(CType *dt)
3321 CType *st, *type1, *type2;
3322 int dbt, sbt, qualwarn, lvl;
3324 st = &vtop->type; /* source type */
3325 dbt = dt->t & VT_BTYPE;
3326 sbt = st->t & VT_BTYPE;
3327 if (dt->t & VT_CONSTANT)
3328 tcc_warning("assignment of read-only location");
3329 switch(dbt) {
3330 case VT_VOID:
3331 if (sbt != dbt)
3332 tcc_error("assignment to void expression");
3333 break;
3334 case VT_PTR:
3335 /* special cases for pointers */
3336 /* '0' can also be a pointer */
3337 if (is_null_pointer(vtop))
3338 break;
3339 /* accept implicit pointer to integer cast with warning */
3340 if (is_integer_btype(sbt)) {
3341 tcc_warning("assignment makes pointer from integer without a cast");
3342 break;
3344 type1 = pointed_type(dt);
3345 if (sbt == VT_PTR)
3346 type2 = pointed_type(st);
3347 else if (sbt == VT_FUNC)
3348 type2 = st; /* a function is implicitly a function pointer */
3349 else
3350 goto error;
3351 if (is_compatible_types(type1, type2))
3352 break;
3353 for (qualwarn = lvl = 0;; ++lvl) {
3354 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3355 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3356 qualwarn = 1;
3357 dbt = type1->t & (VT_BTYPE|VT_LONG);
3358 sbt = type2->t & (VT_BTYPE|VT_LONG);
3359 if (dbt != VT_PTR || sbt != VT_PTR)
3360 break;
3361 type1 = pointed_type(type1);
3362 type2 = pointed_type(type2);
3364 if (!is_compatible_unqualified_types(type1, type2)) {
3365 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3366 /* void * can match anything */
3367 } else if (dbt == sbt
3368 && is_integer_btype(sbt & VT_BTYPE)
3369 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3370 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3371 /* Like GCC don't warn by default for merely changes
3372 in pointer target signedness. Do warn for different
3373 base types, though, in particular for unsigned enums
3374 and signed int targets. */
3375 } else {
3376 tcc_warning("assignment from incompatible pointer type");
3377 break;
3380 if (qualwarn)
3381 tcc_warning("assignment discards qualifiers from pointer target type");
3382 break;
3383 case VT_BYTE:
3384 case VT_SHORT:
3385 case VT_INT:
3386 case VT_LLONG:
3387 if (sbt == VT_PTR || sbt == VT_FUNC) {
3388 tcc_warning("assignment makes integer from pointer without a cast");
3389 } else if (sbt == VT_STRUCT) {
3390 goto case_VT_STRUCT;
3392 /* XXX: more tests */
3393 break;
3394 case VT_STRUCT:
3395 case_VT_STRUCT:
3396 if (!is_compatible_unqualified_types(dt, st)) {
3397 error:
3398 cast_error(st, dt);
3400 break;
3404 static void gen_assign_cast(CType *dt)
3406 verify_assign_cast(dt);
3407 gen_cast(dt);
3410 /* store vtop in lvalue pushed on stack */
3411 ST_FUNC void vstore(void)
3413 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3415 ft = vtop[-1].type.t;
3416 sbt = vtop->type.t & VT_BTYPE;
3417 dbt = ft & VT_BTYPE;
3419 verify_assign_cast(&vtop[-1].type);
3421 if (sbt == VT_STRUCT) {
3422 /* if structure, only generate pointer */
3423 /* structure assignment : generate memcpy */
3424 /* XXX: optimize if small size */
3425 size = type_size(&vtop->type, &align);
3427 /* destination */
3428 vswap();
3429 #ifdef CONFIG_TCC_BCHECK
3430 if (vtop->r & VT_MUSTBOUND)
3431 gbound(); /* check would be wrong after gaddrof() */
3432 #endif
3433 vtop->type.t = VT_PTR;
3434 gaddrof();
3436 /* address of memcpy() */
3437 #ifdef TCC_ARM_EABI
3438 if(!(align & 7))
3439 vpush_global_sym(&func_old_type, TOK_memcpy8);
3440 else if(!(align & 3))
3441 vpush_global_sym(&func_old_type, TOK_memcpy4);
3442 else
3443 #endif
3444 /* Use memmove, rather than memcpy, as dest and src may be same: */
3445 vpush_global_sym(&func_old_type, TOK_memmove);
3447 vswap();
3448 /* source */
3449 vpushv(vtop - 2);
3450 #ifdef CONFIG_TCC_BCHECK
3451 if (vtop->r & VT_MUSTBOUND)
3452 gbound();
3453 #endif
3454 vtop->type.t = VT_PTR;
3455 gaddrof();
3456 /* type size */
3457 vpushi(size);
3458 gfunc_call(3);
3459 /* leave source on stack */
3461 } else if (ft & VT_BITFIELD) {
3462 /* bitfield store handling */
3464 /* save lvalue as expression result (example: s.b = s.a = n;) */
3465 vdup(), vtop[-1] = vtop[-2];
3467 bit_pos = BIT_POS(ft);
3468 bit_size = BIT_SIZE(ft);
3469 /* remove bit field info to avoid loops */
3470 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3472 if (dbt == VT_BOOL) {
3473 gen_cast(&vtop[-1].type);
3474 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3476 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3477 if (dbt != VT_BOOL) {
3478 gen_cast(&vtop[-1].type);
3479 dbt = vtop[-1].type.t & VT_BTYPE;
3481 if (r == VT_STRUCT) {
3482 store_packed_bf(bit_pos, bit_size);
3483 } else {
3484 unsigned long long mask = (1ULL << bit_size) - 1;
3485 if (dbt != VT_BOOL) {
3486 /* mask source */
3487 if (dbt == VT_LLONG)
3488 vpushll(mask);
3489 else
3490 vpushi((unsigned)mask);
3491 gen_op('&');
3493 /* shift source */
3494 vpushi(bit_pos);
3495 gen_op(TOK_SHL);
3496 vswap();
3497 /* duplicate destination */
3498 vdup();
3499 vrott(3);
3500 /* load destination, mask and or with source */
3501 if (dbt == VT_LLONG)
3502 vpushll(~(mask << bit_pos));
3503 else
3504 vpushi(~((unsigned)mask << bit_pos));
3505 gen_op('&');
3506 gen_op('|');
3507 /* store result */
3508 vstore();
3509 /* ... and discard */
3510 vpop();
3512 } else if (dbt == VT_VOID) {
3513 --vtop;
3514 } else {
3515 /* optimize char/short casts */
3516 delayed_cast = 0;
3517 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3518 && is_integer_btype(sbt)
3520 if ((vtop->r & VT_MUSTCAST)
3521 && btype_size(dbt) > btype_size(sbt)
3523 force_charshort_cast();
3524 delayed_cast = 1;
3525 } else {
3526 gen_cast(&vtop[-1].type);
3529 #ifdef CONFIG_TCC_BCHECK
3530 /* bound check case */
3531 if (vtop[-1].r & VT_MUSTBOUND) {
3532 vswap();
3533 gbound();
3534 vswap();
3536 #endif
3537 gv(RC_TYPE(dbt)); /* generate value */
3539 if (delayed_cast) {
3540 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3541 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3542 vtop->type.t = ft & VT_TYPE;
3545 /* if lvalue was saved on stack, must read it */
3546 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3547 SValue sv;
3548 r = get_reg(RC_INT);
3549 sv.type.t = VT_PTRDIFF_T;
3550 sv.r = VT_LOCAL | VT_LVAL;
3551 sv.c.i = vtop[-1].c.i;
3552 load(r, &sv);
3553 vtop[-1].r = r | VT_LVAL;
3556 r = vtop->r & VT_VALMASK;
3557 /* two word case handling :
3558 store second register at word + 4 (or +8 for x86-64) */
3559 if (USING_TWO_WORDS(dbt)) {
3560 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3561 vtop[-1].type.t = load_type;
3562 store(r, vtop - 1);
3563 vswap();
3564 /* convert to int to increment easily */
3565 vtop->type.t = VT_PTRDIFF_T;
3566 gaddrof();
3567 vpushs(PTR_SIZE);
3568 gen_op('+');
3569 vtop->r |= VT_LVAL;
3570 vswap();
3571 vtop[-1].type.t = load_type;
3572 /* XXX: it works because r2 is spilled last ! */
3573 store(vtop->r2, vtop - 1);
3574 } else {
3575 /* single word */
3576 store(r, vtop - 1);
3578 vswap();
3579 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3583 /* post defines POST/PRE add. c is the token ++ or -- */
3584 ST_FUNC void inc(int post, int c)
3586 test_lvalue();
3587 vdup(); /* save lvalue */
3588 if (post) {
3589 gv_dup(); /* duplicate value */
3590 vrotb(3);
3591 vrotb(3);
3593 /* add constant */
3594 vpushi(c - TOK_MID);
3595 gen_op('+');
3596 vstore(); /* store value */
3597 if (post)
3598 vpop(); /* if post op, return saved value */
3601 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3603 /* read the string */
3604 if (tok != TOK_STR)
3605 expect(msg);
3606 cstr_new(astr);
3607 while (tok == TOK_STR) {
3608 /* XXX: add \0 handling too ? */
3609 cstr_cat(astr, tokc.str.data, -1);
3610 next();
3612 cstr_ccat(astr, '\0');
3615 /* If I is >= 1 and a power of two, returns log2(i)+1.
3616 If I is 0 returns 0. */
3617 static int exact_log2p1(int i)
3619 int ret;
3620 if (!i)
3621 return 0;
3622 for (ret = 1; i >= 1 << 8; ret += 8)
3623 i >>= 8;
3624 if (i >= 1 << 4)
3625 ret += 4, i >>= 4;
3626 if (i >= 1 << 2)
3627 ret += 2, i >>= 2;
3628 if (i >= 1 << 1)
3629 ret++;
3630 return ret;
3633 /* Parse __attribute__((...)) GNUC extension. */
3634 static void parse_attribute(AttributeDef *ad)
3636 int t, n;
3637 CString astr;
3639 redo:
3640 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3641 return;
3642 next();
3643 skip('(');
3644 skip('(');
3645 while (tok != ')') {
3646 if (tok < TOK_IDENT)
3647 expect("attribute name");
3648 t = tok;
3649 next();
3650 switch(t) {
3651 case TOK_CLEANUP1:
3652 case TOK_CLEANUP2:
3654 Sym *s;
3656 skip('(');
3657 s = sym_find(tok);
3658 if (!s) {
3659 tcc_warning("implicit declaration of function '%s'",
3660 get_tok_str(tok, &tokc));
3661 s = external_global_sym(tok, &func_old_type);
3663 ad->cleanup_func = s;
3664 next();
3665 skip(')');
3666 break;
3668 case TOK_CONSTRUCTOR1:
3669 case TOK_CONSTRUCTOR2:
3670 ad->f.func_ctor = 1;
3671 break;
3672 case TOK_DESTRUCTOR1:
3673 case TOK_DESTRUCTOR2:
3674 ad->f.func_dtor = 1;
3675 break;
3676 case TOK_SECTION1:
3677 case TOK_SECTION2:
3678 skip('(');
3679 parse_mult_str(&astr, "section name");
3680 ad->section = find_section(tcc_state, (char *)astr.data);
3681 skip(')');
3682 cstr_free(&astr);
3683 break;
3684 case TOK_ALIAS1:
3685 case TOK_ALIAS2:
3686 skip('(');
3687 parse_mult_str(&astr, "alias(\"target\")");
3688 ad->alias_target = /* save string as token, for later */
3689 tok_alloc((char*)astr.data, astr.size-1)->tok;
3690 skip(')');
3691 cstr_free(&astr);
3692 break;
3693 case TOK_VISIBILITY1:
3694 case TOK_VISIBILITY2:
3695 skip('(');
3696 parse_mult_str(&astr,
3697 "visibility(\"default|hidden|internal|protected\")");
3698 if (!strcmp (astr.data, "default"))
3699 ad->a.visibility = STV_DEFAULT;
3700 else if (!strcmp (astr.data, "hidden"))
3701 ad->a.visibility = STV_HIDDEN;
3702 else if (!strcmp (astr.data, "internal"))
3703 ad->a.visibility = STV_INTERNAL;
3704 else if (!strcmp (astr.data, "protected"))
3705 ad->a.visibility = STV_PROTECTED;
3706 else
3707 expect("visibility(\"default|hidden|internal|protected\")");
3708 skip(')');
3709 cstr_free(&astr);
3710 break;
3711 case TOK_ALIGNED1:
3712 case TOK_ALIGNED2:
3713 if (tok == '(') {
3714 next();
3715 n = expr_const();
3716 if (n <= 0 || (n & (n - 1)) != 0)
3717 tcc_error("alignment must be a positive power of two");
3718 skip(')');
3719 } else {
3720 n = MAX_ALIGN;
3722 ad->a.aligned = exact_log2p1(n);
3723 if (n != 1 << (ad->a.aligned - 1))
3724 tcc_error("alignment of %d is larger than implemented", n);
3725 break;
3726 case TOK_PACKED1:
3727 case TOK_PACKED2:
3728 ad->a.packed = 1;
3729 break;
3730 case TOK_WEAK1:
3731 case TOK_WEAK2:
3732 ad->a.weak = 1;
3733 break;
3734 case TOK_UNUSED1:
3735 case TOK_UNUSED2:
3736 /* currently, no need to handle it because tcc does not
3737 track unused objects */
3738 break;
3739 case TOK_NORETURN1:
3740 case TOK_NORETURN2:
3741 ad->f.func_noreturn = 1;
3742 break;
3743 case TOK_CDECL1:
3744 case TOK_CDECL2:
3745 case TOK_CDECL3:
3746 ad->f.func_call = FUNC_CDECL;
3747 break;
3748 case TOK_STDCALL1:
3749 case TOK_STDCALL2:
3750 case TOK_STDCALL3:
3751 ad->f.func_call = FUNC_STDCALL;
3752 break;
3753 #ifdef TCC_TARGET_I386
3754 case TOK_REGPARM1:
3755 case TOK_REGPARM2:
3756 skip('(');
3757 n = expr_const();
3758 if (n > 3)
3759 n = 3;
3760 else if (n < 0)
3761 n = 0;
3762 if (n > 0)
3763 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3764 skip(')');
3765 break;
3766 case TOK_FASTCALL1:
3767 case TOK_FASTCALL2:
3768 case TOK_FASTCALL3:
3769 ad->f.func_call = FUNC_FASTCALLW;
3770 break;
3771 #endif
3772 case TOK_MODE:
3773 skip('(');
3774 switch(tok) {
3775 case TOK_MODE_DI:
3776 ad->attr_mode = VT_LLONG + 1;
3777 break;
3778 case TOK_MODE_QI:
3779 ad->attr_mode = VT_BYTE + 1;
3780 break;
3781 case TOK_MODE_HI:
3782 ad->attr_mode = VT_SHORT + 1;
3783 break;
3784 case TOK_MODE_SI:
3785 case TOK_MODE_word:
3786 ad->attr_mode = VT_INT + 1;
3787 break;
3788 default:
3789 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3790 break;
3792 next();
3793 skip(')');
3794 break;
3795 case TOK_DLLEXPORT:
3796 ad->a.dllexport = 1;
3797 break;
3798 case TOK_NODECORATE:
3799 ad->a.nodecorate = 1;
3800 break;
3801 case TOK_DLLIMPORT:
3802 ad->a.dllimport = 1;
3803 break;
3804 default:
3805 if (tcc_state->warn_unsupported)
3806 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3807 /* skip parameters */
3808 if (tok == '(') {
3809 int parenthesis = 0;
3810 do {
3811 if (tok == '(')
3812 parenthesis++;
3813 else if (tok == ')')
3814 parenthesis--;
3815 next();
3816 } while (parenthesis && tok != -1);
3818 break;
3820 if (tok != ',')
3821 break;
3822 next();
3824 skip(')');
3825 skip(')');
3826 goto redo;
3829 static Sym * find_field (CType *type, int v, int *cumofs)
3831 Sym *s = type->ref;
3832 v |= SYM_FIELD;
3833 while ((s = s->next) != NULL) {
3834 if ((s->v & SYM_FIELD) &&
3835 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3836 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3837 Sym *ret = find_field (&s->type, v, cumofs);
3838 if (ret) {
3839 *cumofs += s->c;
3840 return ret;
3843 if (s->v == v)
3844 break;
3846 return s;
3849 static void struct_layout(CType *type, AttributeDef *ad)
3851 int size, align, maxalign, offset, c, bit_pos, bit_size;
3852 int packed, a, bt, prevbt, prev_bit_size;
3853 int pcc = !tcc_state->ms_bitfields;
3854 int pragma_pack = *tcc_state->pack_stack_ptr;
3855 Sym *f;
3857 maxalign = 1;
3858 offset = 0;
3859 c = 0;
3860 bit_pos = 0;
3861 prevbt = VT_STRUCT; /* make it never match */
3862 prev_bit_size = 0;
3864 //#define BF_DEBUG
3866 for (f = type->ref->next; f; f = f->next) {
3867 if (f->type.t & VT_BITFIELD)
3868 bit_size = BIT_SIZE(f->type.t);
3869 else
3870 bit_size = -1;
3871 size = type_size(&f->type, &align);
3872 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3873 packed = 0;
3875 if (pcc && bit_size == 0) {
3876 /* in pcc mode, packing does not affect zero-width bitfields */
3878 } else {
3879 /* in pcc mode, attribute packed overrides if set. */
3880 if (pcc && (f->a.packed || ad->a.packed))
3881 align = packed = 1;
3883 /* pragma pack overrides align if lesser and packs bitfields always */
3884 if (pragma_pack) {
3885 packed = 1;
3886 if (pragma_pack < align)
3887 align = pragma_pack;
3888 /* in pcc mode pragma pack also overrides individual align */
3889 if (pcc && pragma_pack < a)
3890 a = 0;
3893 /* some individual align was specified */
3894 if (a)
3895 align = a;
3897 if (type->ref->type.t == VT_UNION) {
3898 if (pcc && bit_size >= 0)
3899 size = (bit_size + 7) >> 3;
3900 offset = 0;
3901 if (size > c)
3902 c = size;
3904 } else if (bit_size < 0) {
3905 if (pcc)
3906 c += (bit_pos + 7) >> 3;
3907 c = (c + align - 1) & -align;
3908 offset = c;
3909 if (size > 0)
3910 c += size;
3911 bit_pos = 0;
3912 prevbt = VT_STRUCT;
3913 prev_bit_size = 0;
3915 } else {
3916 /* A bit-field. Layout is more complicated. There are two
3917 options: PCC (GCC) compatible and MS compatible */
3918 if (pcc) {
3919 /* In PCC layout a bit-field is placed adjacent to the
3920 preceding bit-fields, except if:
3921 - it has zero-width
3922 - an individual alignment was given
3923 - it would overflow its base type container and
3924 there is no packing */
3925 if (bit_size == 0) {
3926 new_field:
3927 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3928 bit_pos = 0;
3929 } else if (f->a.aligned) {
3930 goto new_field;
3931 } else if (!packed) {
3932 int a8 = align * 8;
3933 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3934 if (ofs > size / align)
3935 goto new_field;
3938 /* in pcc mode, long long bitfields have type int if they fit */
3939 if (size == 8 && bit_size <= 32)
3940 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3942 while (bit_pos >= align * 8)
3943 c += align, bit_pos -= align * 8;
3944 offset = c;
3946 /* In PCC layout named bit-fields influence the alignment
3947 of the containing struct using the base types alignment,
3948 except for packed fields (which here have correct align). */
3949 if (f->v & SYM_FIRST_ANOM
3950 // && bit_size // ??? gcc on ARM/rpi does that
3952 align = 1;
3954 } else {
3955 bt = f->type.t & VT_BTYPE;
3956 if ((bit_pos + bit_size > size * 8)
3957 || (bit_size > 0) == (bt != prevbt)
3959 c = (c + align - 1) & -align;
3960 offset = c;
3961 bit_pos = 0;
3962 /* In MS bitfield mode a bit-field run always uses
3963 at least as many bits as the underlying type.
3964 To start a new run it's also required that this
3965 or the last bit-field had non-zero width. */
3966 if (bit_size || prev_bit_size)
3967 c += size;
3969 /* In MS layout the records alignment is normally
3970 influenced by the field, except for a zero-width
3971 field at the start of a run (but by further zero-width
3972 fields it is again). */
3973 if (bit_size == 0 && prevbt != bt)
3974 align = 1;
3975 prevbt = bt;
3976 prev_bit_size = bit_size;
3979 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3980 | (bit_pos << VT_STRUCT_SHIFT);
3981 bit_pos += bit_size;
3983 if (align > maxalign)
3984 maxalign = align;
3986 #ifdef BF_DEBUG
3987 printf("set field %s offset %-2d size %-2d align %-2d",
3988 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3989 if (f->type.t & VT_BITFIELD) {
3990 printf(" pos %-2d bits %-2d",
3991 BIT_POS(f->type.t),
3992 BIT_SIZE(f->type.t)
3995 printf("\n");
3996 #endif
3998 f->c = offset;
3999 f->r = 0;
4002 if (pcc)
4003 c += (bit_pos + 7) >> 3;
4005 /* store size and alignment */
4006 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4007 if (a < maxalign)
4008 a = maxalign;
4009 type->ref->r = a;
4010 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4011 /* can happen if individual align for some member was given. In
4012 this case MSVC ignores maxalign when aligning the size */
4013 a = pragma_pack;
4014 if (a < bt)
4015 a = bt;
4017 c = (c + a - 1) & -a;
4018 type->ref->c = c;
4020 #ifdef BF_DEBUG
4021 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4022 #endif
4024 /* check whether we can access bitfields by their type */
4025 for (f = type->ref->next; f; f = f->next) {
4026 int s, px, cx, c0;
4027 CType t;
4029 if (0 == (f->type.t & VT_BITFIELD))
4030 continue;
4031 f->type.ref = f;
4032 f->auxtype = -1;
4033 bit_size = BIT_SIZE(f->type.t);
4034 if (bit_size == 0)
4035 continue;
4036 bit_pos = BIT_POS(f->type.t);
4037 size = type_size(&f->type, &align);
4038 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4039 continue;
4041 /* try to access the field using a different type */
4042 c0 = -1, s = align = 1;
4043 for (;;) {
4044 px = f->c * 8 + bit_pos;
4045 cx = (px >> 3) & -align;
4046 px = px - (cx << 3);
4047 if (c0 == cx)
4048 break;
4049 s = (px + bit_size + 7) >> 3;
4050 if (s > 4) {
4051 t.t = VT_LLONG;
4052 } else if (s > 2) {
4053 t.t = VT_INT;
4054 } else if (s > 1) {
4055 t.t = VT_SHORT;
4056 } else {
4057 t.t = VT_BYTE;
4059 s = type_size(&t, &align);
4060 c0 = cx;
4063 if (px + bit_size <= s * 8 && cx + s <= c) {
4064 /* update offset and bit position */
4065 f->c = cx;
4066 bit_pos = px;
4067 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4068 | (bit_pos << VT_STRUCT_SHIFT);
4069 if (s != size)
4070 f->auxtype = t.t;
4071 #ifdef BF_DEBUG
4072 printf("FIX field %s offset %-2d size %-2d align %-2d "
4073 "pos %-2d bits %-2d\n",
4074 get_tok_str(f->v & ~SYM_FIELD, NULL),
4075 cx, s, align, px, bit_size);
4076 #endif
4077 } else {
4078 /* fall back to load/store single-byte wise */
4079 f->auxtype = VT_STRUCT;
4080 #ifdef BF_DEBUG
4081 printf("FIX field %s : load byte-wise\n",
4082 get_tok_str(f->v & ~SYM_FIELD, NULL));
4083 #endif
4088 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4089 static void struct_decl(CType *type, int u)
4091 int v, c, size, align, flexible;
4092 int bit_size, bsize, bt;
4093 Sym *s, *ss, **ps;
4094 AttributeDef ad, ad1;
4095 CType type1, btype;
4097 memset(&ad, 0, sizeof ad);
4098 next();
4099 parse_attribute(&ad);
4100 if (tok != '{') {
4101 v = tok;
4102 next();
4103 /* struct already defined ? return it */
4104 if (v < TOK_IDENT)
4105 expect("struct/union/enum name");
4106 s = struct_find(v);
4107 if (s && (s->sym_scope == local_scope || tok != '{')) {
4108 if (u == s->type.t)
4109 goto do_decl;
4110 if (u == VT_ENUM && IS_ENUM(s->type.t))
4111 goto do_decl;
4112 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4114 } else {
4115 v = anon_sym++;
4117 /* Record the original enum/struct/union token. */
4118 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4119 type1.ref = NULL;
4120 /* we put an undefined size for struct/union */
4121 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4122 s->r = 0; /* default alignment is zero as gcc */
4123 do_decl:
4124 type->t = s->type.t;
4125 type->ref = s;
4127 if (tok == '{') {
4128 next();
4129 if (s->c != -1)
4130 tcc_error("struct/union/enum already defined");
4131 s->c = -2;
4132 /* cannot be empty */
4133 /* non empty enums are not allowed */
4134 ps = &s->next;
4135 if (u == VT_ENUM) {
4136 long long ll = 0, pl = 0, nl = 0;
4137 CType t;
4138 t.ref = s;
4139 /* enum symbols have static storage */
4140 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4141 for(;;) {
4142 v = tok;
4143 if (v < TOK_UIDENT)
4144 expect("identifier");
4145 ss = sym_find(v);
4146 if (ss && !local_stack)
4147 tcc_error("redefinition of enumerator '%s'",
4148 get_tok_str(v, NULL));
4149 next();
4150 if (tok == '=') {
4151 next();
4152 ll = expr_const64();
4154 ss = sym_push(v, &t, VT_CONST, 0);
4155 ss->enum_val = ll;
4156 *ps = ss, ps = &ss->next;
4157 if (ll < nl)
4158 nl = ll;
4159 if (ll > pl)
4160 pl = ll;
4161 if (tok != ',')
4162 break;
4163 next();
4164 ll++;
4165 /* NOTE: we accept a trailing comma */
4166 if (tok == '}')
4167 break;
4169 skip('}');
4170 /* set integral type of the enum */
4171 t.t = VT_INT;
4172 if (nl >= 0) {
4173 if (pl != (unsigned)pl)
4174 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4175 t.t |= VT_UNSIGNED;
4176 } else if (pl != (int)pl || nl != (int)nl)
4177 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4178 s->type.t = type->t = t.t | VT_ENUM;
4179 s->c = 0;
4180 /* set type for enum members */
4181 for (ss = s->next; ss; ss = ss->next) {
4182 ll = ss->enum_val;
4183 if (ll == (int)ll) /* default is int if it fits */
4184 continue;
4185 if (t.t & VT_UNSIGNED) {
4186 ss->type.t |= VT_UNSIGNED;
4187 if (ll == (unsigned)ll)
4188 continue;
4190 ss->type.t = (ss->type.t & ~VT_BTYPE)
4191 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4193 } else {
4194 c = 0;
4195 flexible = 0;
4196 while (tok != '}') {
4197 if (!parse_btype(&btype, &ad1)) {
4198 skip(';');
4199 continue;
4201 while (1) {
4202 if (flexible)
4203 tcc_error("flexible array member '%s' not at the end of struct",
4204 get_tok_str(v, NULL));
4205 bit_size = -1;
4206 v = 0;
4207 type1 = btype;
4208 if (tok != ':') {
4209 if (tok != ';')
4210 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4211 if (v == 0) {
4212 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4213 expect("identifier");
4214 else {
4215 int v = btype.ref->v;
4216 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4217 if (tcc_state->ms_extensions == 0)
4218 expect("identifier");
4222 if (type_size(&type1, &align) < 0) {
4223 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4224 flexible = 1;
4225 else
4226 tcc_error("field '%s' has incomplete type",
4227 get_tok_str(v, NULL));
4229 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4230 (type1.t & VT_BTYPE) == VT_VOID ||
4231 (type1.t & VT_STORAGE))
4232 tcc_error("invalid type for '%s'",
4233 get_tok_str(v, NULL));
4235 if (tok == ':') {
4236 next();
4237 bit_size = expr_const();
4238 /* XXX: handle v = 0 case for messages */
4239 if (bit_size < 0)
4240 tcc_error("negative width in bit-field '%s'",
4241 get_tok_str(v, NULL));
4242 if (v && bit_size == 0)
4243 tcc_error("zero width for bit-field '%s'",
4244 get_tok_str(v, NULL));
4245 parse_attribute(&ad1);
4247 size = type_size(&type1, &align);
4248 if (bit_size >= 0) {
4249 bt = type1.t & VT_BTYPE;
4250 if (bt != VT_INT &&
4251 bt != VT_BYTE &&
4252 bt != VT_SHORT &&
4253 bt != VT_BOOL &&
4254 bt != VT_LLONG)
4255 tcc_error("bitfields must have scalar type");
4256 bsize = size * 8;
4257 if (bit_size > bsize) {
4258 tcc_error("width of '%s' exceeds its type",
4259 get_tok_str(v, NULL));
4260 } else if (bit_size == bsize
4261 && !ad.a.packed && !ad1.a.packed) {
4262 /* no need for bit fields */
4264 } else if (bit_size == 64) {
4265 tcc_error("field width 64 not implemented");
4266 } else {
4267 type1.t = (type1.t & ~VT_STRUCT_MASK)
4268 | VT_BITFIELD
4269 | (bit_size << (VT_STRUCT_SHIFT + 6));
4272 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4273 /* Remember we've seen a real field to check
4274 for placement of flexible array member. */
4275 c = 1;
4277 /* If member is a struct or bit-field, enforce
4278 placing into the struct (as anonymous). */
4279 if (v == 0 &&
4280 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4281 bit_size >= 0)) {
4282 v = anon_sym++;
4284 if (v) {
4285 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4286 ss->a = ad1.a;
4287 *ps = ss;
4288 ps = &ss->next;
4290 if (tok == ';' || tok == TOK_EOF)
4291 break;
4292 skip(',');
4294 skip(';');
4296 skip('}');
4297 parse_attribute(&ad);
4298 struct_layout(type, &ad);
4303 static void sym_to_attr(AttributeDef *ad, Sym *s)
4305 merge_symattr(&ad->a, &s->a);
4306 merge_funcattr(&ad->f, &s->f);
4309 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4310 are added to the element type, copied because it could be a typedef. */
4311 static void parse_btype_qualify(CType *type, int qualifiers)
4313 while (type->t & VT_ARRAY) {
4314 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4315 type = &type->ref->type;
4317 type->t |= qualifiers;
4320 /* return 0 if no type declaration. otherwise, return the basic type
4321 and skip it.
4323 static int parse_btype(CType *type, AttributeDef *ad)
4325 int t, u, bt, st, type_found, typespec_found, g, n;
4326 Sym *s;
4327 CType type1;
4329 memset(ad, 0, sizeof(AttributeDef));
4330 type_found = 0;
4331 typespec_found = 0;
4332 t = VT_INT;
4333 bt = st = -1;
4334 type->ref = NULL;
4336 while(1) {
4337 switch(tok) {
4338 case TOK_EXTENSION:
4339 /* currently, we really ignore extension */
4340 next();
4341 continue;
4343 /* basic types */
4344 case TOK_CHAR:
4345 u = VT_BYTE;
4346 basic_type:
4347 next();
4348 basic_type1:
4349 if (u == VT_SHORT || u == VT_LONG) {
4350 if (st != -1 || (bt != -1 && bt != VT_INT))
4351 tmbt: tcc_error("too many basic types");
4352 st = u;
4353 } else {
4354 if (bt != -1 || (st != -1 && u != VT_INT))
4355 goto tmbt;
4356 bt = u;
4358 if (u != VT_INT)
4359 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4360 typespec_found = 1;
4361 break;
4362 case TOK_VOID:
4363 u = VT_VOID;
4364 goto basic_type;
4365 case TOK_SHORT:
4366 u = VT_SHORT;
4367 goto basic_type;
4368 case TOK_INT:
4369 u = VT_INT;
4370 goto basic_type;
4371 case TOK_ALIGNAS:
4372 { int n;
4373 AttributeDef ad1;
4374 next();
4375 skip('(');
4376 memset(&ad1, 0, sizeof(AttributeDef));
4377 if (parse_btype(&type1, &ad1)) {
4378 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4379 if (ad1.a.aligned)
4380 n = 1 << (ad1.a.aligned - 1);
4381 else
4382 type_size(&type1, &n);
4383 } else {
4384 n = expr_const();
4385 if (n <= 0 || (n & (n - 1)) != 0)
4386 tcc_error("alignment must be a positive power of two");
4388 skip(')');
4389 ad->a.aligned = exact_log2p1(n);
4391 continue;
4392 case TOK_LONG:
4393 if ((t & VT_BTYPE) == VT_DOUBLE) {
4394 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4395 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4396 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4397 } else {
4398 u = VT_LONG;
4399 goto basic_type;
4401 next();
4402 break;
4403 #ifdef TCC_TARGET_ARM64
4404 case TOK_UINT128:
4405 /* GCC's __uint128_t appears in some Linux header files. Make it a
4406 synonym for long double to get the size and alignment right. */
4407 u = VT_LDOUBLE;
4408 goto basic_type;
4409 #endif
4410 case TOK_BOOL:
4411 u = VT_BOOL;
4412 goto basic_type;
4413 case TOK_FLOAT:
4414 u = VT_FLOAT;
4415 goto basic_type;
4416 case TOK_DOUBLE:
4417 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4418 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4419 } else {
4420 u = VT_DOUBLE;
4421 goto basic_type;
4423 next();
4424 break;
4425 case TOK_ENUM:
4426 struct_decl(&type1, VT_ENUM);
4427 basic_type2:
4428 u = type1.t;
4429 type->ref = type1.ref;
4430 goto basic_type1;
4431 case TOK_STRUCT:
4432 struct_decl(&type1, VT_STRUCT);
4433 goto basic_type2;
4434 case TOK_UNION:
4435 struct_decl(&type1, VT_UNION);
4436 goto basic_type2;
4438 /* type modifiers */
4439 case TOK_CONST1:
4440 case TOK_CONST2:
4441 case TOK_CONST3:
4442 type->t = t;
4443 parse_btype_qualify(type, VT_CONSTANT);
4444 t = type->t;
4445 next();
4446 break;
4447 case TOK_VOLATILE1:
4448 case TOK_VOLATILE2:
4449 case TOK_VOLATILE3:
4450 type->t = t;
4451 parse_btype_qualify(type, VT_VOLATILE);
4452 t = type->t;
4453 next();
4454 break;
4455 case TOK_SIGNED1:
4456 case TOK_SIGNED2:
4457 case TOK_SIGNED3:
4458 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4459 tcc_error("signed and unsigned modifier");
4460 t |= VT_DEFSIGN;
4461 next();
4462 typespec_found = 1;
4463 break;
4464 case TOK_REGISTER:
4465 case TOK_AUTO:
4466 case TOK_RESTRICT1:
4467 case TOK_RESTRICT2:
4468 case TOK_RESTRICT3:
4469 next();
4470 break;
4471 case TOK_UNSIGNED:
4472 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4473 tcc_error("signed and unsigned modifier");
4474 t |= VT_DEFSIGN | VT_UNSIGNED;
4475 next();
4476 typespec_found = 1;
4477 break;
4479 /* storage */
4480 case TOK_EXTERN:
4481 g = VT_EXTERN;
4482 goto storage;
4483 case TOK_STATIC:
4484 g = VT_STATIC;
4485 goto storage;
4486 case TOK_TYPEDEF:
4487 g = VT_TYPEDEF;
4488 goto storage;
4489 storage:
4490 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4491 tcc_error("multiple storage classes");
4492 t |= g;
4493 next();
4494 break;
4495 case TOK_INLINE1:
4496 case TOK_INLINE2:
4497 case TOK_INLINE3:
4498 t |= VT_INLINE;
4499 next();
4500 break;
4501 case TOK_NORETURN3:
4502 next();
4503 ad->f.func_noreturn = 1;
4504 break;
4505 /* GNUC attribute */
4506 case TOK_ATTRIBUTE1:
4507 case TOK_ATTRIBUTE2:
4508 parse_attribute(ad);
4509 if (ad->attr_mode) {
4510 u = ad->attr_mode -1;
4511 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4513 continue;
4514 /* GNUC typeof */
4515 case TOK_TYPEOF1:
4516 case TOK_TYPEOF2:
4517 case TOK_TYPEOF3:
4518 next();
4519 parse_expr_type(&type1);
4520 /* remove all storage modifiers except typedef */
4521 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4522 if (type1.ref)
4523 sym_to_attr(ad, type1.ref);
4524 goto basic_type2;
4525 default:
4526 if (typespec_found)
4527 goto the_end;
4528 s = sym_find(tok);
4529 if (!s || !(s->type.t & VT_TYPEDEF))
4530 goto the_end;
4532 n = tok, next();
4533 if (tok == ':' && !in_generic) {
4534 /* ignore if it's a label */
4535 unget_tok(n);
4536 goto the_end;
4539 t &= ~(VT_BTYPE|VT_LONG);
4540 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4541 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4542 type->ref = s->type.ref;
4543 if (t)
4544 parse_btype_qualify(type, t);
4545 t = type->t;
4546 /* get attributes from typedef */
4547 sym_to_attr(ad, s);
4548 typespec_found = 1;
4549 st = bt = -2;
4550 break;
4552 type_found = 1;
4554 the_end:
4555 if (tcc_state->char_is_unsigned) {
4556 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4557 t |= VT_UNSIGNED;
4559 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4560 bt = t & (VT_BTYPE|VT_LONG);
4561 if (bt == VT_LONG)
4562 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4563 #ifdef TCC_TARGET_PE
4564 if (bt == VT_LDOUBLE)
4565 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4566 #endif
4567 type->t = t;
4568 return type_found;
4571 /* convert a function parameter type (array to pointer and function to
4572 function pointer) */
4573 static inline void convert_parameter_type(CType *pt)
4575 /* remove const and volatile qualifiers (XXX: const could be used
4576 to indicate a const function parameter */
4577 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4578 /* array must be transformed to pointer according to ANSI C */
4579 pt->t &= ~VT_ARRAY;
4580 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4581 mk_pointer(pt);
4585 ST_FUNC void parse_asm_str(CString *astr)
4587 skip('(');
4588 parse_mult_str(astr, "string constant");
4591 /* Parse an asm label and return the token */
4592 static int asm_label_instr(void)
4594 int v;
4595 CString astr;
4597 next();
4598 parse_asm_str(&astr);
4599 skip(')');
4600 #ifdef ASM_DEBUG
4601 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4602 #endif
4603 v = tok_alloc(astr.data, astr.size - 1)->tok;
4604 cstr_free(&astr);
4605 return v;
4608 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4610 int n, l, t1, arg_size, align, unused_align;
4611 Sym **plast, *s, *first;
4612 AttributeDef ad1;
4613 CType pt;
4615 if (tok == '(') {
4616 /* function type, or recursive declarator (return if so) */
4617 next();
4618 if (td && !(td & TYPE_ABSTRACT))
4619 return 0;
4620 if (tok == ')')
4621 l = 0;
4622 else if (parse_btype(&pt, &ad1))
4623 l = FUNC_NEW;
4624 else if (td) {
4625 merge_attr (ad, &ad1);
4626 return 0;
4627 } else
4628 l = FUNC_OLD;
4629 first = NULL;
4630 plast = &first;
4631 arg_size = 0;
4632 if (l) {
4633 for(;;) {
4634 /* read param name and compute offset */
4635 if (l != FUNC_OLD) {
4636 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4637 break;
4638 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4639 if ((pt.t & VT_BTYPE) == VT_VOID)
4640 tcc_error("parameter declared as void");
4641 } else {
4642 n = tok;
4643 if (n < TOK_UIDENT)
4644 expect("identifier");
4645 pt.t = VT_VOID; /* invalid type */
4646 pt.ref = NULL;
4647 next();
4649 convert_parameter_type(&pt);
4650 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4651 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4652 *plast = s;
4653 plast = &s->next;
4654 if (tok == ')')
4655 break;
4656 skip(',');
4657 if (l == FUNC_NEW && tok == TOK_DOTS) {
4658 l = FUNC_ELLIPSIS;
4659 next();
4660 break;
4662 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4663 tcc_error("invalid type");
4665 } else
4666 /* if no parameters, then old type prototype */
4667 l = FUNC_OLD;
4668 skip(')');
4669 /* NOTE: const is ignored in returned type as it has a special
4670 meaning in gcc / C++ */
4671 type->t &= ~VT_CONSTANT;
4672 /* some ancient pre-K&R C allows a function to return an array
4673 and the array brackets to be put after the arguments, such
4674 that "int c()[]" means something like "int[] c()" */
4675 if (tok == '[') {
4676 next();
4677 skip(']'); /* only handle simple "[]" */
4678 mk_pointer(type);
4680 /* we push a anonymous symbol which will contain the function prototype */
4681 ad->f.func_args = arg_size;
4682 ad->f.func_type = l;
4683 s = sym_push(SYM_FIELD, type, 0, 0);
4684 s->a = ad->a;
4685 s->f = ad->f;
4686 s->next = first;
4687 type->t = VT_FUNC;
4688 type->ref = s;
4689 } else if (tok == '[') {
4690 int saved_nocode_wanted = nocode_wanted;
4691 /* array definition */
4692 next();
4693 while (1) {
4694 /* XXX The optional type-quals and static should only be accepted
4695 in parameter decls. The '*' as well, and then even only
4696 in prototypes (not function defs). */
4697 switch (tok) {
4698 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4699 case TOK_CONST1:
4700 case TOK_VOLATILE1:
4701 case TOK_STATIC:
4702 case '*':
4703 next();
4704 continue;
4705 default:
4706 break;
4708 break;
4710 n = -1;
4711 t1 = 0;
4712 if (tok != ']') {
4713 if (!local_stack || (storage & VT_STATIC))
4714 vpushi(expr_const());
4715 else {
4716 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4717 length must always be evaluated, even under nocode_wanted,
4718 so that its size slot is initialized (e.g. under sizeof
4719 or typeof). */
4720 nocode_wanted = 0;
4721 gexpr();
4723 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4724 n = vtop->c.i;
4725 if (n < 0)
4726 tcc_error("invalid array size");
4727 } else {
4728 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4729 tcc_error("size of variable length array should be an integer");
4730 n = 0;
4731 t1 = VT_VLA;
4734 skip(']');
4735 /* parse next post type */
4736 post_type(type, ad, storage, 0);
4738 if ((type->t & VT_BTYPE) == VT_FUNC)
4739 tcc_error("declaration of an array of functions");
4740 if ((type->t & VT_BTYPE) == VT_VOID
4741 || type_size(type, &unused_align) < 0)
4742 tcc_error("declaration of an array of incomplete type elements");
4744 t1 |= type->t & VT_VLA;
4746 if (t1 & VT_VLA) {
4747 if (n < 0)
4748 tcc_error("need explicit inner array size in VLAs");
4749 loc -= type_size(&int_type, &align);
4750 loc &= -align;
4751 n = loc;
4753 vla_runtime_type_size(type, &align);
4754 gen_op('*');
4755 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4756 vswap();
4757 vstore();
4759 if (n != -1)
4760 vpop();
4761 nocode_wanted = saved_nocode_wanted;
4763 /* we push an anonymous symbol which will contain the array
4764 element type */
4765 s = sym_push(SYM_FIELD, type, 0, n);
4766 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4767 type->ref = s;
4769 return 1;
4772 /* Parse a type declarator (except basic type), and return the type
4773 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4774 expected. 'type' should contain the basic type. 'ad' is the
4775 attribute definition of the basic type. It can be modified by
4776 type_decl(). If this (possibly abstract) declarator is a pointer chain
4777 it returns the innermost pointed to type (equals *type, but is a different
4778 pointer), otherwise returns type itself, that's used for recursive calls. */
4779 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4781 CType *post, *ret;
4782 int qualifiers, storage;
4784 /* recursive type, remove storage bits first, apply them later again */
4785 storage = type->t & VT_STORAGE;
4786 type->t &= ~VT_STORAGE;
4787 post = ret = type;
4789 while (tok == '*') {
4790 qualifiers = 0;
4791 redo:
4792 next();
4793 switch(tok) {
4794 case TOK_CONST1:
4795 case TOK_CONST2:
4796 case TOK_CONST3:
4797 qualifiers |= VT_CONSTANT;
4798 goto redo;
4799 case TOK_VOLATILE1:
4800 case TOK_VOLATILE2:
4801 case TOK_VOLATILE3:
4802 qualifiers |= VT_VOLATILE;
4803 goto redo;
4804 case TOK_RESTRICT1:
4805 case TOK_RESTRICT2:
4806 case TOK_RESTRICT3:
4807 goto redo;
4808 /* XXX: clarify attribute handling */
4809 case TOK_ATTRIBUTE1:
4810 case TOK_ATTRIBUTE2:
4811 parse_attribute(ad);
4812 break;
4814 mk_pointer(type);
4815 type->t |= qualifiers;
4816 if (ret == type)
4817 /* innermost pointed to type is the one for the first derivation */
4818 ret = pointed_type(type);
4821 if (tok == '(') {
4822 /* This is possibly a parameter type list for abstract declarators
4823 ('int ()'), use post_type for testing this. */
4824 if (!post_type(type, ad, 0, td)) {
4825 /* It's not, so it's a nested declarator, and the post operations
4826 apply to the innermost pointed to type (if any). */
4827 /* XXX: this is not correct to modify 'ad' at this point, but
4828 the syntax is not clear */
4829 parse_attribute(ad);
4830 post = type_decl(type, ad, v, td);
4831 skip(')');
4832 } else
4833 goto abstract;
4834 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4835 /* type identifier */
4836 *v = tok;
4837 next();
4838 } else {
4839 abstract:
4840 if (!(td & TYPE_ABSTRACT))
4841 expect("identifier");
4842 *v = 0;
4844 post_type(post, ad, storage, 0);
4845 parse_attribute(ad);
4846 type->t |= storage;
4847 return ret;
4850 /* indirection with full error checking and bound check */
4851 ST_FUNC void indir(void)
4853 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4854 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4855 return;
4856 expect("pointer");
4858 if (vtop->r & VT_LVAL)
4859 gv(RC_INT);
4860 vtop->type = *pointed_type(&vtop->type);
4861 /* Arrays and functions are never lvalues */
4862 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
4863 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4864 vtop->r |= VT_LVAL;
4865 /* if bound checking, the referenced pointer must be checked */
4866 #ifdef CONFIG_TCC_BCHECK
4867 if (tcc_state->do_bounds_check)
4868 vtop->r |= VT_MUSTBOUND;
4869 #endif
4873 /* pass a parameter to a function and do type checking and casting */
4874 static void gfunc_param_typed(Sym *func, Sym *arg)
4876 int func_type;
4877 CType type;
4879 func_type = func->f.func_type;
4880 if (func_type == FUNC_OLD ||
4881 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4882 /* default casting : only need to convert float to double */
4883 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4884 gen_cast_s(VT_DOUBLE);
4885 } else if (vtop->type.t & VT_BITFIELD) {
4886 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4887 type.ref = vtop->type.ref;
4888 gen_cast(&type);
4889 } else if (vtop->r & VT_MUSTCAST) {
4890 force_charshort_cast();
4892 } else if (arg == NULL) {
4893 tcc_error("too many arguments to function");
4894 } else {
4895 type = arg->type;
4896 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4897 gen_assign_cast(&type);
4901 /* parse an expression and return its type without any side effect. */
4902 static void expr_type(CType *type, void (*expr_fn)(void))
4904 nocode_wanted++;
4905 expr_fn();
4906 *type = vtop->type;
4907 vpop();
4908 nocode_wanted--;
4911 /* parse an expression of the form '(type)' or '(expr)' and return its
4912 type */
4913 static void parse_expr_type(CType *type)
4915 int n;
4916 AttributeDef ad;
4918 skip('(');
4919 if (parse_btype(type, &ad)) {
4920 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4921 } else {
4922 expr_type(type, gexpr);
4924 skip(')');
4927 static void parse_type(CType *type)
4929 AttributeDef ad;
4930 int n;
4932 if (!parse_btype(type, &ad)) {
4933 expect("type");
4935 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4938 static void parse_builtin_params(int nc, const char *args)
4940 char c, sep = '(';
4941 CType t;
4942 if (nc)
4943 nocode_wanted++;
4944 next();
4945 while ((c = *args++)) {
4946 skip(sep);
4947 sep = ',';
4948 switch (c) {
4949 case 'e': expr_eq(); continue;
4950 case 't': parse_type(&t); vpush(&t); continue;
4951 default: tcc_error("internal error"); break;
4954 skip(')');
4955 if (nc)
4956 nocode_wanted--;
4959 ST_FUNC void unary(void)
4961 int n, t, align, size, r, sizeof_caller;
4962 CType type;
4963 Sym *s;
4964 AttributeDef ad;
4966 /* generate line number info */
4967 if (tcc_state->do_debug)
4968 tcc_debug_line(tcc_state);
4970 sizeof_caller = in_sizeof;
4971 in_sizeof = 0;
4972 type.ref = NULL;
4973 /* XXX: GCC 2.95.3 does not generate a table although it should be
4974 better here */
4975 tok_next:
4976 switch(tok) {
4977 case TOK_EXTENSION:
4978 next();
4979 goto tok_next;
4980 case TOK_LCHAR:
4981 #ifdef TCC_TARGET_PE
4982 t = VT_SHORT|VT_UNSIGNED;
4983 goto push_tokc;
4984 #endif
4985 case TOK_CINT:
4986 case TOK_CCHAR:
4987 t = VT_INT;
4988 push_tokc:
4989 type.t = t;
4990 vsetc(&type, VT_CONST, &tokc);
4991 next();
4992 break;
4993 case TOK_CUINT:
4994 t = VT_INT | VT_UNSIGNED;
4995 goto push_tokc;
4996 case TOK_CLLONG:
4997 t = VT_LLONG;
4998 goto push_tokc;
4999 case TOK_CULLONG:
5000 t = VT_LLONG | VT_UNSIGNED;
5001 goto push_tokc;
5002 case TOK_CFLOAT:
5003 t = VT_FLOAT;
5004 goto push_tokc;
5005 case TOK_CDOUBLE:
5006 t = VT_DOUBLE;
5007 goto push_tokc;
5008 case TOK_CLDOUBLE:
5009 t = VT_LDOUBLE;
5010 goto push_tokc;
5011 case TOK_CLONG:
5012 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5013 goto push_tokc;
5014 case TOK_CULONG:
5015 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5016 goto push_tokc;
5017 case TOK___FUNCTION__:
5018 if (!gnu_ext)
5019 goto tok_identifier;
5020 /* fall thru */
5021 case TOK___FUNC__:
5023 void *ptr;
5024 int len;
5025 /* special function name identifier */
5026 len = strlen(funcname) + 1;
5027 /* generate char[len] type */
5028 type.t = VT_BYTE;
5029 mk_pointer(&type);
5030 type.t |= VT_ARRAY;
5031 type.ref->c = len;
5032 vpush_ref(&type, data_section, data_section->data_offset, len);
5033 if (!NODATA_WANTED) {
5034 ptr = section_ptr_add(data_section, len);
5035 memcpy(ptr, funcname, len);
5037 next();
5039 break;
5040 case TOK_LSTR:
5041 #ifdef TCC_TARGET_PE
5042 t = VT_SHORT | VT_UNSIGNED;
5043 #else
5044 t = VT_INT;
5045 #endif
5046 goto str_init;
5047 case TOK_STR:
5048 /* string parsing */
5049 t = VT_BYTE;
5050 if (tcc_state->char_is_unsigned)
5051 t = VT_BYTE | VT_UNSIGNED;
5052 str_init:
5053 if (tcc_state->warn_write_strings)
5054 t |= VT_CONSTANT;
5055 type.t = t;
5056 mk_pointer(&type);
5057 type.t |= VT_ARRAY;
5058 memset(&ad, 0, sizeof(AttributeDef));
5059 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5060 break;
5061 case '(':
5062 next();
5063 /* cast ? */
5064 if (parse_btype(&type, &ad)) {
5065 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5066 skip(')');
5067 /* check ISOC99 compound literal */
5068 if (tok == '{') {
5069 /* data is allocated locally by default */
5070 if (global_expr)
5071 r = VT_CONST;
5072 else
5073 r = VT_LOCAL;
5074 /* all except arrays are lvalues */
5075 if (!(type.t & VT_ARRAY))
5076 r |= VT_LVAL;
5077 memset(&ad, 0, sizeof(AttributeDef));
5078 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5079 } else {
5080 if (sizeof_caller) {
5081 vpush(&type);
5082 return;
5084 unary();
5085 gen_cast(&type);
5087 } else if (tok == '{') {
5088 int saved_nocode_wanted = nocode_wanted;
5089 if (const_wanted && !(nocode_wanted & unevalmask))
5090 tcc_error("expected constant");
5091 /* save all registers */
5092 save_regs(0);
5093 /* statement expression : we do not accept break/continue
5094 inside as GCC does. We do retain the nocode_wanted state,
5095 as statement expressions can't ever be entered from the
5096 outside, so any reactivation of code emission (from labels
5097 or loop heads) can be disabled again after the end of it. */
5098 block(1);
5099 nocode_wanted = saved_nocode_wanted;
5100 skip(')');
5101 } else {
5102 gexpr();
5103 skip(')');
5105 break;
5106 case '*':
5107 next();
5108 unary();
5109 indir();
5110 break;
5111 case '&':
5112 next();
5113 unary();
5114 /* functions names must be treated as function pointers,
5115 except for unary '&' and sizeof. Since we consider that
5116 functions are not lvalues, we only have to handle it
5117 there and in function calls. */
5118 /* arrays can also be used although they are not lvalues */
5119 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5120 !(vtop->type.t & VT_ARRAY))
5121 test_lvalue();
5122 if (vtop->sym)
5123 vtop->sym->a.addrtaken = 1;
5124 mk_pointer(&vtop->type);
5125 gaddrof();
5126 break;
5127 case '!':
5128 next();
5129 unary();
5130 gen_test_zero(TOK_EQ);
5131 break;
5132 case '~':
5133 next();
5134 unary();
5135 vpushi(-1);
5136 gen_op('^');
5137 break;
5138 case '+':
5139 next();
5140 unary();
5141 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5142 tcc_error("pointer not accepted for unary plus");
5143 /* In order to force cast, we add zero, except for floating point
5144 where we really need an noop (otherwise -0.0 will be transformed
5145 into +0.0). */
5146 if (!is_float(vtop->type.t)) {
5147 vpushi(0);
5148 gen_op('+');
5150 break;
5151 case TOK_SIZEOF:
5152 case TOK_ALIGNOF1:
5153 case TOK_ALIGNOF2:
5154 case TOK_ALIGNOF3:
5155 t = tok;
5156 next();
5157 in_sizeof++;
5158 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5159 s = NULL;
5160 if (vtop[1].r & VT_SYM)
5161 s = vtop[1].sym; /* hack: accessing previous vtop */
5162 size = type_size(&type, &align);
5163 if (s && s->a.aligned)
5164 align = 1 << (s->a.aligned - 1);
5165 if (t == TOK_SIZEOF) {
5166 if (!(type.t & VT_VLA)) {
5167 if (size < 0)
5168 tcc_error("sizeof applied to an incomplete type");
5169 vpushs(size);
5170 } else {
5171 vla_runtime_type_size(&type, &align);
5173 } else {
5174 vpushs(align);
5176 vtop->type.t |= VT_UNSIGNED;
5177 break;
5179 case TOK_builtin_expect:
5180 /* __builtin_expect is a no-op for now */
5181 parse_builtin_params(0, "ee");
5182 vpop();
5183 break;
5184 case TOK_builtin_types_compatible_p:
5185 parse_builtin_params(0, "tt");
5186 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5187 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5188 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5189 vtop -= 2;
5190 vpushi(n);
5191 break;
5192 case TOK_builtin_choose_expr:
5194 int64_t c;
5195 next();
5196 skip('(');
5197 c = expr_const64();
5198 skip(',');
5199 if (!c) {
5200 nocode_wanted++;
5202 expr_eq();
5203 if (!c) {
5204 vpop();
5205 nocode_wanted--;
5207 skip(',');
5208 if (c) {
5209 nocode_wanted++;
5211 expr_eq();
5212 if (c) {
5213 vpop();
5214 nocode_wanted--;
5216 skip(')');
5218 break;
5219 case TOK_builtin_constant_p:
5220 parse_builtin_params(1, "e");
5221 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5222 vtop--;
5223 vpushi(n);
5224 break;
5225 case TOK_builtin_frame_address:
5226 case TOK_builtin_return_address:
5228 int tok1 = tok;
5229 int level;
5230 next();
5231 skip('(');
5232 if (tok != TOK_CINT) {
5233 tcc_error("%s only takes positive integers",
5234 tok1 == TOK_builtin_return_address ?
5235 "__builtin_return_address" :
5236 "__builtin_frame_address");
5238 level = (uint32_t)tokc.i;
5239 next();
5240 skip(')');
5241 type.t = VT_VOID;
5242 mk_pointer(&type);
5243 vset(&type, VT_LOCAL, 0); /* local frame */
5244 while (level--) {
5245 mk_pointer(&vtop->type);
5246 indir(); /* -> parent frame */
5248 if (tok1 == TOK_builtin_return_address) {
5249 // assume return address is just above frame pointer on stack
5250 vpushi(PTR_SIZE);
5251 gen_op('+');
5252 mk_pointer(&vtop->type);
5253 indir();
5256 break;
5257 #ifdef TCC_TARGET_RISCV64
5258 case TOK_builtin_va_start:
5259 parse_builtin_params(0, "ee");
5260 r = vtop->r & VT_VALMASK;
5261 if (r == VT_LLOCAL)
5262 r = VT_LOCAL;
5263 if (r != VT_LOCAL)
5264 tcc_error("__builtin_va_start expects a local variable");
5265 gen_va_start();
5266 vstore();
5267 break;
5268 #endif
5269 #ifdef TCC_TARGET_X86_64
5270 #ifdef TCC_TARGET_PE
5271 case TOK_builtin_va_start:
5272 parse_builtin_params(0, "ee");
5273 r = vtop->r & VT_VALMASK;
5274 if (r == VT_LLOCAL)
5275 r = VT_LOCAL;
5276 if (r != VT_LOCAL)
5277 tcc_error("__builtin_va_start expects a local variable");
5278 vtop->r = r;
5279 vtop->type = char_pointer_type;
5280 vtop->c.i += 8;
5281 vstore();
5282 break;
5283 #else
5284 case TOK_builtin_va_arg_types:
5285 parse_builtin_params(0, "t");
5286 vpushi(classify_x86_64_va_arg(&vtop->type));
5287 vswap();
5288 vpop();
5289 break;
5290 #endif
5291 #endif
5293 #ifdef TCC_TARGET_ARM64
5294 case TOK___va_start: {
5295 parse_builtin_params(0, "ee");
5296 //xx check types
5297 gen_va_start();
5298 vpushi(0);
5299 vtop->type.t = VT_VOID;
5300 break;
5302 case TOK___va_arg: {
5303 parse_builtin_params(0, "et");
5304 type = vtop->type;
5305 vpop();
5306 //xx check types
5307 gen_va_arg(&type);
5308 vtop->type = type;
5309 break;
5311 case TOK___arm64_clear_cache: {
5312 parse_builtin_params(0, "ee");
5313 gen_clear_cache();
5314 vpushi(0);
5315 vtop->type.t = VT_VOID;
5316 break;
5318 #endif
5319 /* pre operations */
5320 case TOK_INC:
5321 case TOK_DEC:
5322 t = tok;
5323 next();
5324 unary();
5325 inc(0, t);
5326 break;
5327 case '-':
5328 next();
5329 unary();
5330 t = vtop->type.t & VT_BTYPE;
5331 if (is_float(t)) {
5332 /* In IEEE negate(x) isn't subtract(0,x), but rather
5333 subtract(-0, x). */
5334 vpush(&vtop->type);
5335 if (t == VT_FLOAT)
5336 vtop->c.f = -1.0 * 0.0;
5337 else if (t == VT_DOUBLE)
5338 vtop->c.d = -1.0 * 0.0;
5339 else
5340 vtop->c.ld = -1.0 * 0.0;
5341 } else
5342 vpushi(0);
5343 vswap();
5344 gen_op('-');
5345 break;
5346 case TOK_LAND:
5347 if (!gnu_ext)
5348 goto tok_identifier;
5349 next();
5350 /* allow to take the address of a label */
5351 if (tok < TOK_UIDENT)
5352 expect("label identifier");
5353 s = label_find(tok);
5354 if (!s) {
5355 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5356 } else {
5357 if (s->r == LABEL_DECLARED)
5358 s->r = LABEL_FORWARD;
5360 if (!s->type.t) {
5361 s->type.t = VT_VOID;
5362 mk_pointer(&s->type);
5363 s->type.t |= VT_STATIC;
5365 vpushsym(&s->type, s);
5366 next();
5367 break;
5369 case TOK_GENERIC:
5371 CType controlling_type;
5372 int has_default = 0;
5373 int has_match = 0;
5374 int learn = 0;
5375 TokenString *str = NULL;
5376 int saved_const_wanted = const_wanted;
5378 next();
5379 skip('(');
5380 const_wanted = 0;
5381 expr_type(&controlling_type, expr_eq);
5382 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5383 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5384 mk_pointer(&controlling_type);
5385 const_wanted = saved_const_wanted;
5386 for (;;) {
5387 learn = 0;
5388 skip(',');
5389 if (tok == TOK_DEFAULT) {
5390 if (has_default)
5391 tcc_error("too many 'default'");
5392 has_default = 1;
5393 if (!has_match)
5394 learn = 1;
5395 next();
5396 } else {
5397 AttributeDef ad_tmp;
5398 int itmp;
5399 CType cur_type;
5401 in_generic++;
5402 parse_btype(&cur_type, &ad_tmp);
5403 in_generic--;
5405 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5406 if (compare_types(&controlling_type, &cur_type, 0)) {
5407 if (has_match) {
5408 tcc_error("type match twice");
5410 has_match = 1;
5411 learn = 1;
5414 skip(':');
5415 if (learn) {
5416 if (str)
5417 tok_str_free(str);
5418 skip_or_save_block(&str);
5419 } else {
5420 skip_or_save_block(NULL);
5422 if (tok == ')')
5423 break;
5425 if (!str) {
5426 char buf[60];
5427 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5428 tcc_error("type '%s' does not match any association", buf);
5430 begin_macro(str, 1);
5431 next();
5432 expr_eq();
5433 if (tok != TOK_EOF)
5434 expect(",");
5435 end_macro();
5436 next();
5437 break;
5439 // special qnan , snan and infinity values
5440 case TOK___NAN__:
5441 n = 0x7fc00000;
5442 special_math_val:
5443 vpushi(n);
5444 vtop->type.t = VT_FLOAT;
5445 next();
5446 break;
5447 case TOK___SNAN__:
5448 n = 0x7f800001;
5449 goto special_math_val;
5450 case TOK___INF__:
5451 n = 0x7f800000;
5452 goto special_math_val;
5454 default:
5455 tok_identifier:
5456 t = tok;
5457 next();
5458 if (t < TOK_UIDENT)
5459 expect("identifier");
5460 s = sym_find(t);
5461 if (!s || IS_ASM_SYM(s)) {
5462 const char *name = get_tok_str(t, NULL);
5463 if (tok != '(')
5464 tcc_error("'%s' undeclared", name);
5465 /* for simple function calls, we tolerate undeclared
5466 external reference to int() function */
5467 if (tcc_state->warn_implicit_function_declaration
5468 #ifdef TCC_TARGET_PE
5469 /* people must be warned about using undeclared WINAPI functions
5470 (which usually start with uppercase letter) */
5471 || (name[0] >= 'A' && name[0] <= 'Z')
5472 #endif
5474 tcc_warning("implicit declaration of function '%s'", name);
5475 s = external_global_sym(t, &func_old_type);
5478 r = s->r;
5479 /* A symbol that has a register is a local register variable,
5480 which starts out as VT_LOCAL value. */
5481 if ((r & VT_VALMASK) < VT_CONST)
5482 r = (r & ~VT_VALMASK) | VT_LOCAL;
5484 vset(&s->type, r, s->c);
5485 /* Point to s as backpointer (even without r&VT_SYM).
5486 Will be used by at least the x86 inline asm parser for
5487 regvars. */
5488 vtop->sym = s;
5490 if (r & VT_SYM) {
5491 vtop->c.i = 0;
5492 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5493 vtop->c.i = s->enum_val;
5495 break;
5498 /* post operations */
5499 while (1) {
5500 if (tok == TOK_INC || tok == TOK_DEC) {
5501 inc(1, tok);
5502 next();
5503 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5504 int qualifiers, cumofs = 0;
5505 /* field */
5506 if (tok == TOK_ARROW)
5507 indir();
5508 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5509 test_lvalue();
5510 gaddrof();
5511 /* expect pointer on structure */
5512 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5513 expect("struct or union");
5514 if (tok == TOK_CDOUBLE)
5515 expect("field name");
5516 next();
5517 if (tok == TOK_CINT || tok == TOK_CUINT)
5518 expect("field name");
5519 s = find_field(&vtop->type, tok, &cumofs);
5520 if (!s)
5521 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5522 /* add field offset to pointer */
5523 vtop->type = char_pointer_type; /* change type to 'char *' */
5524 vpushi(cumofs + s->c);
5525 gen_op('+');
5526 /* change type to field type, and set to lvalue */
5527 vtop->type = s->type;
5528 vtop->type.t |= qualifiers;
5529 /* an array is never an lvalue */
5530 if (!(vtop->type.t & VT_ARRAY)) {
5531 vtop->r |= VT_LVAL;
5532 #ifdef CONFIG_TCC_BCHECK
5533 /* if bound checking, the referenced pointer must be checked */
5534 if (tcc_state->do_bounds_check)
5535 vtop->r |= VT_MUSTBOUND;
5536 #endif
5538 next();
5539 } else if (tok == '[') {
5540 next();
5541 gexpr();
5542 gen_op('+');
5543 indir();
5544 skip(']');
5545 } else if (tok == '(') {
5546 SValue ret;
5547 Sym *sa;
5548 int nb_args, ret_nregs, ret_align, regsize, variadic;
5550 /* function call */
5551 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5552 /* pointer test (no array accepted) */
5553 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5554 vtop->type = *pointed_type(&vtop->type);
5555 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5556 goto error_func;
5557 } else {
5558 error_func:
5559 expect("function pointer");
5561 } else {
5562 vtop->r &= ~VT_LVAL; /* no lvalue */
5564 /* get return type */
5565 s = vtop->type.ref;
5566 next();
5567 sa = s->next; /* first parameter */
5568 nb_args = regsize = 0;
5569 ret.r2 = VT_CONST;
5570 /* compute first implicit argument if a structure is returned */
5571 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5572 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5573 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5574 &ret_align, &regsize);
5575 if (ret_nregs <= 0) {
5576 /* get some space for the returned structure */
5577 size = type_size(&s->type, &align);
5578 #ifdef TCC_TARGET_ARM64
5579 /* On arm64, a small struct is return in registers.
5580 It is much easier to write it to memory if we know
5581 that we are allowed to write some extra bytes, so
5582 round the allocated space up to a power of 2: */
5583 if (size < 16)
5584 while (size & (size - 1))
5585 size = (size | (size - 1)) + 1;
5586 #endif
5587 loc = (loc - size) & -align;
5588 ret.type = s->type;
5589 ret.r = VT_LOCAL | VT_LVAL;
5590 /* pass it as 'int' to avoid structure arg passing
5591 problems */
5592 vseti(VT_LOCAL, loc);
5593 ret.c = vtop->c;
5594 if (ret_nregs < 0)
5595 vtop--;
5596 else
5597 nb_args++;
5599 } else {
5600 ret_nregs = 1;
5601 ret.type = s->type;
5604 if (ret_nregs > 0) {
5605 /* return in register */
5606 ret.c.i = 0;
5607 PUT_R_RET(&ret, ret.type.t);
5609 if (tok != ')') {
5610 for(;;) {
5611 expr_eq();
5612 gfunc_param_typed(s, sa);
5613 nb_args++;
5614 if (sa)
5615 sa = sa->next;
5616 if (tok == ')')
5617 break;
5618 skip(',');
5621 if (sa)
5622 tcc_error("too few arguments to function");
5623 skip(')');
5624 gfunc_call(nb_args);
5626 if (ret_nregs < 0) {
5627 vsetc(&ret.type, ret.r, &ret.c);
5628 #ifdef TCC_TARGET_RISCV64
5629 arch_transfer_ret_regs(1);
5630 #endif
5631 } else {
5632 /* return value */
5633 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5634 vsetc(&ret.type, r, &ret.c);
5635 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5638 /* handle packed struct return */
5639 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5640 int addr, offset;
5642 size = type_size(&s->type, &align);
5643 /* We're writing whole regs often, make sure there's enough
5644 space. Assume register size is power of 2. */
5645 if (regsize > align)
5646 align = regsize;
5647 loc = (loc - size) & -align;
5648 addr = loc;
5649 offset = 0;
5650 for (;;) {
5651 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5652 vswap();
5653 vstore();
5654 vtop--;
5655 if (--ret_nregs == 0)
5656 break;
5657 offset += regsize;
5659 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5662 /* Promote char/short return values. This is matters only
5663 for calling function that were not compiled by TCC and
5664 only on some architectures. For those where it doesn't
5665 matter we expect things to be already promoted to int,
5666 but not larger. */
5667 t = s->type.t & VT_BTYPE;
5668 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5669 #ifdef PROMOTE_RET
5670 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5671 #else
5672 vtop->type.t = VT_INT;
5673 #endif
5676 if (s->f.func_noreturn)
5677 CODE_OFF();
5678 } else {
5679 break;
5684 ST_FUNC void expr_prod(void)
5686 int t;
5688 unary();
5689 while (tok == '*' || tok == '/' || tok == '%') {
5690 t = tok;
5691 next();
5692 unary();
5693 gen_op(t);
5697 ST_FUNC void expr_sum(void)
5699 int t;
5701 expr_prod();
5702 while (tok == '+' || tok == '-') {
5703 t = tok;
5704 next();
5705 expr_prod();
5706 gen_op(t);
5710 static void expr_shift(void)
5712 int t;
5714 expr_sum();
5715 while (tok == TOK_SHL || tok == TOK_SAR) {
5716 t = tok;
5717 next();
5718 expr_sum();
5719 gen_op(t);
5723 static void expr_cmp(void)
5725 int t;
5727 expr_shift();
5728 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5729 tok == TOK_ULT || tok == TOK_UGE) {
5730 t = tok;
5731 next();
5732 expr_shift();
5733 gen_op(t);
5737 static void expr_cmpeq(void)
5739 int t;
5741 expr_cmp();
5742 while (tok == TOK_EQ || tok == TOK_NE) {
5743 t = tok;
5744 next();
5745 expr_cmp();
5746 gen_op(t);
5750 static void expr_and(void)
5752 expr_cmpeq();
5753 while (tok == '&') {
5754 next();
5755 expr_cmpeq();
5756 gen_op('&');
5760 static void expr_xor(void)
5762 expr_and();
5763 while (tok == '^') {
5764 next();
5765 expr_and();
5766 gen_op('^');
5770 static void expr_or(void)
5772 expr_xor();
5773 while (tok == '|') {
5774 next();
5775 expr_xor();
5776 gen_op('|');
5780 static int condition_3way(void);
5782 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5784 int t = 0, cc = 1, f = 0, c;
5785 for(;;) {
5786 c = f ? i : condition_3way();
5787 if (c < 0) {
5788 save_regs(1), cc = 0;
5789 } else if (c != i) {
5790 nocode_wanted++, f = 1;
5792 if (tok != e_op) {
5793 if (cc || f) {
5794 vpop();
5795 vpushi(i ^ f);
5796 gsym(t);
5797 nocode_wanted -= f;
5798 } else {
5799 gvtst_set(i, t);
5801 break;
5803 if (c < 0)
5804 t = gvtst(i, t);
5805 else
5806 vpop();
5807 next();
5808 e_fn();
5812 static void expr_land(void)
5814 expr_or();
5815 if (tok == TOK_LAND)
5816 expr_landor(expr_or, TOK_LAND, 1);
5819 static void expr_lor(void)
5821 expr_land();
5822 if (tok == TOK_LOR)
5823 expr_landor(expr_land, TOK_LOR, 0);
5826 /* Assuming vtop is a value used in a conditional context
5827 (i.e. compared with zero) return 0 if it's false, 1 if
5828 true and -1 if it can't be statically determined. */
5829 static int condition_3way(void)
5831 int c = -1;
5832 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5833 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5834 vdup();
5835 gen_cast_s(VT_BOOL);
5836 c = vtop->c.i;
5837 vpop();
5839 return c;
5842 static int is_cond_bool(SValue *sv)
5844 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5845 && (sv->type.t & VT_BTYPE) == VT_INT)
5846 return (unsigned)sv->c.i < 2;
5847 if (sv->r == VT_CMP)
5848 return 1;
5849 return 0;
5852 static void expr_cond(void)
5854 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5855 SValue sv;
5856 CType type, type1, type2;
5857 int ncw_prev;
5859 expr_lor();
5860 if (tok == '?') {
5861 next();
5862 c = condition_3way();
5863 g = (tok == ':' && gnu_ext);
5864 tt = 0;
5865 if (!g) {
5866 if (c < 0) {
5867 save_regs(1);
5868 tt = gvtst(1, 0);
5869 } else {
5870 vpop();
5872 } else if (c < 0) {
5873 /* needed to avoid having different registers saved in
5874 each branch */
5875 save_regs(1);
5876 gv_dup();
5877 tt = gvtst(0, 0);
5880 ncw_prev = nocode_wanted;
5881 if (1) {
5882 if (c == 0)
5883 nocode_wanted++;
5884 if (!g)
5885 gexpr();
5887 if (c < 0 && vtop->r == VT_CMP) {
5888 t1 = gvtst(0, 0);
5889 vpushi(0);
5890 gvtst_set(0, t1);
5893 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5894 mk_pointer(&vtop->type);
5895 type1 = vtop->type;
5896 sv = *vtop; /* save value to handle it later */
5897 vtop--; /* no vpop so that FP stack is not flushed */
5899 if (g) {
5900 u = tt;
5901 } else if (c < 0) {
5902 u = gjmp(0);
5903 gsym(tt);
5904 } else
5905 u = 0;
5907 nocode_wanted = ncw_prev;
5908 if (c == 1)
5909 nocode_wanted++;
5910 skip(':');
5911 expr_cond();
5913 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5914 if (sv.r == VT_CMP) {
5915 t1 = sv.jtrue;
5916 t2 = u;
5917 } else {
5918 t1 = gvtst(0, 0);
5919 t2 = gjmp(0);
5920 gsym(u);
5921 vpushv(&sv);
5923 gvtst_set(0, t1);
5924 gvtst_set(1, t2);
5925 nocode_wanted = ncw_prev;
5926 // tcc_warning("two conditions expr_cond");
5927 return;
5930 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5931 mk_pointer(&vtop->type);
5932 type2=vtop->type;
5933 t1 = type1.t;
5934 bt1 = t1 & VT_BTYPE;
5935 t2 = type2.t;
5936 bt2 = t2 & VT_BTYPE;
5937 type.ref = NULL;
5939 /* cast operands to correct type according to ISOC rules */
5940 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5941 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5942 } else if (is_float(bt1) || is_float(bt2)) {
5943 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5944 type.t = VT_LDOUBLE;
5946 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5947 type.t = VT_DOUBLE;
5948 } else {
5949 type.t = VT_FLOAT;
5951 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5952 /* cast to biggest op */
5953 type.t = VT_LLONG | VT_LONG;
5954 if (bt1 == VT_LLONG)
5955 type.t &= t1;
5956 if (bt2 == VT_LLONG)
5957 type.t &= t2;
5958 /* convert to unsigned if it does not fit in a long long */
5959 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5960 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5961 type.t |= VT_UNSIGNED;
5962 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5963 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5964 /* If one is a null ptr constant the result type
5965 is the other. */
5966 if (is_null_pointer (vtop)) type = type1;
5967 else if (is_null_pointer (&sv)) type = type2;
5968 else if (bt1 != bt2)
5969 tcc_error("incompatible types in conditional expressions");
5970 else {
5971 CType *pt1 = pointed_type(&type1);
5972 CType *pt2 = pointed_type(&type2);
5973 int pbt1 = pt1->t & VT_BTYPE;
5974 int pbt2 = pt2->t & VT_BTYPE;
5975 int newquals, copied = 0;
5976 /* pointers to void get preferred, otherwise the
5977 pointed to types minus qualifs should be compatible */
5978 type = (pbt1 == VT_VOID) ? type1 : type2;
5979 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5980 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5981 tcc_warning("pointer type mismatch in conditional expression\n");
5983 /* combine qualifs */
5984 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5985 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5986 & newquals)
5988 /* copy the pointer target symbol */
5989 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5990 0, type.ref->c);
5991 copied = 1;
5992 pointed_type(&type)->t |= newquals;
5994 /* pointers to incomplete arrays get converted to
5995 pointers to completed ones if possible */
5996 if (pt1->t & VT_ARRAY
5997 && pt2->t & VT_ARRAY
5998 && pointed_type(&type)->ref->c < 0
5999 && (pt1->ref->c > 0 || pt2->ref->c > 0))
6001 if (!copied)
6002 type.ref = sym_push(SYM_FIELD, &type.ref->type,
6003 0, type.ref->c);
6004 pointed_type(&type)->ref =
6005 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
6006 0, pointed_type(&type)->ref->c);
6007 pointed_type(&type)->ref->c =
6008 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
6011 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
6012 /* XXX: test structure compatibility */
6013 type = bt1 == VT_STRUCT ? type1 : type2;
6014 } else {
6015 /* integer operations */
6016 type.t = VT_INT | (VT_LONG & (t1 | t2));
6017 /* convert to unsigned if it does not fit in an integer */
6018 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
6019 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
6020 type.t |= VT_UNSIGNED;
6022 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6023 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6024 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6026 /* now we convert second operand */
6027 if (c != 1) {
6028 gen_cast(&type);
6029 if (islv) {
6030 mk_pointer(&vtop->type);
6031 gaddrof();
6032 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6033 gaddrof();
6036 rc = RC_TYPE(type.t);
6037 /* for long longs, we use fixed registers to avoid having
6038 to handle a complicated move */
6039 if (USING_TWO_WORDS(type.t))
6040 rc = RC_RET(type.t);
6042 tt = r2 = 0;
6043 if (c < 0) {
6044 r2 = gv(rc);
6045 tt = gjmp(0);
6047 gsym(u);
6048 nocode_wanted = ncw_prev;
6050 /* this is horrible, but we must also convert first
6051 operand */
6052 if (c != 0) {
6053 *vtop = sv;
6054 gen_cast(&type);
6055 if (islv) {
6056 mk_pointer(&vtop->type);
6057 gaddrof();
6058 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6059 gaddrof();
6062 if (c < 0) {
6063 r1 = gv(rc);
6064 move_reg(r2, r1, islv ? VT_PTR : type.t);
6065 vtop->r = r2;
6066 gsym(tt);
6069 if (islv)
6070 indir();
6075 static void expr_eq(void)
6077 int t;
6079 expr_cond();
6080 if (tok == '=' ||
6081 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6082 tok == TOK_A_XOR || tok == TOK_A_OR ||
6083 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6084 test_lvalue();
6085 t = tok;
6086 next();
6087 if (t == '=') {
6088 expr_eq();
6089 } else {
6090 vdup();
6091 expr_eq();
6092 gen_op(t & 0x7f);
6094 vstore();
6098 ST_FUNC void gexpr(void)
6100 while (1) {
6101 expr_eq();
6102 if (tok != ',')
6103 break;
6104 vpop();
6105 next();
6109 /* parse a constant expression and return value in vtop. */
6110 static void expr_const1(void)
6112 const_wanted++;
6113 nocode_wanted += unevalmask + 1;
6114 expr_cond();
6115 nocode_wanted -= unevalmask + 1;
6116 const_wanted--;
6119 /* parse an integer constant and return its value. */
6120 static inline int64_t expr_const64(void)
6122 int64_t c;
6123 expr_const1();
6124 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6125 expect("constant expression");
6126 c = vtop->c.i;
6127 vpop();
6128 return c;
6131 /* parse an integer constant and return its value.
6132 Complain if it doesn't fit 32bit (signed or unsigned). */
6133 ST_FUNC int expr_const(void)
6135 int c;
6136 int64_t wc = expr_const64();
6137 c = wc;
6138 if (c != wc && (unsigned)c != wc)
6139 tcc_error("constant exceeds 32 bit");
6140 return c;
6143 /* ------------------------------------------------------------------------- */
6144 /* return from function */
6146 #ifndef TCC_TARGET_ARM64
6147 static void gfunc_return(CType *func_type)
6149 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6150 CType type, ret_type;
6151 int ret_align, ret_nregs, regsize;
6152 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6153 &ret_align, &regsize);
6154 if (ret_nregs < 0) {
6155 #ifdef TCC_TARGET_RISCV64
6156 arch_transfer_ret_regs(0);
6157 #endif
6158 } else if (0 == ret_nregs) {
6159 /* if returning structure, must copy it to implicit
6160 first pointer arg location */
6161 type = *func_type;
6162 mk_pointer(&type);
6163 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6164 indir();
6165 vswap();
6166 /* copy structure value to pointer */
6167 vstore();
6168 } else {
6169 /* returning structure packed into registers */
6170 int size, addr, align, rc;
6171 size = type_size(func_type,&align);
6172 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6173 (vtop->c.i & (ret_align-1)))
6174 && (align & (ret_align-1))) {
6175 loc = (loc - size) & -ret_align;
6176 addr = loc;
6177 type = *func_type;
6178 vset(&type, VT_LOCAL | VT_LVAL, addr);
6179 vswap();
6180 vstore();
6181 vpop();
6182 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6184 vtop->type = ret_type;
6185 rc = RC_RET(ret_type.t);
6186 if (ret_nregs == 1)
6187 gv(rc);
6188 else {
6189 for (;;) {
6190 vdup();
6191 gv(rc);
6192 vpop();
6193 if (--ret_nregs == 0)
6194 break;
6195 /* We assume that when a structure is returned in multiple
6196 registers, their classes are consecutive values of the
6197 suite s(n) = 2^n */
6198 rc <<= 1;
6199 vtop->c.i += regsize;
6203 } else {
6204 gv(RC_RET(func_type->t));
6206 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6208 #endif
6210 static void check_func_return(void)
6212 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6213 return;
6214 if (!strcmp (funcname, "main")
6215 && (func_vt.t & VT_BTYPE) == VT_INT) {
6216 /* main returns 0 by default */
6217 vpushi(0);
6218 gen_assign_cast(&func_vt);
6219 gfunc_return(&func_vt);
6220 } else {
6221 tcc_warning("function might return no value: '%s'", funcname);
6225 /* ------------------------------------------------------------------------- */
6226 /* switch/case */
6228 static int case_cmp(const void *pa, const void *pb)
6230 int64_t a = (*(struct case_t**) pa)->v1;
6231 int64_t b = (*(struct case_t**) pb)->v1;
6232 return a < b ? -1 : a > b;
6235 static void gtst_addr(int t, int a)
6237 gsym_addr(gvtst(0, t), a);
6240 static void gcase(struct case_t **base, int len, int *bsym)
6242 struct case_t *p;
6243 int e;
6244 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6245 while (len > 8) {
6246 /* binary search */
6247 p = base[len/2];
6248 vdup();
6249 if (ll)
6250 vpushll(p->v2);
6251 else
6252 vpushi(p->v2);
6253 gen_op(TOK_LE);
6254 e = gvtst(1, 0);
6255 vdup();
6256 if (ll)
6257 vpushll(p->v1);
6258 else
6259 vpushi(p->v1);
6260 gen_op(TOK_GE);
6261 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6262 /* x < v1 */
6263 gcase(base, len/2, bsym);
6264 /* x > v2 */
6265 gsym(e);
6266 e = len/2 + 1;
6267 base += e; len -= e;
6269 /* linear scan */
6270 while (len--) {
6271 p = *base++;
6272 vdup();
6273 if (ll)
6274 vpushll(p->v2);
6275 else
6276 vpushi(p->v2);
6277 if (p->v1 == p->v2) {
6278 gen_op(TOK_EQ);
6279 gtst_addr(0, p->sym);
6280 } else {
6281 gen_op(TOK_LE);
6282 e = gvtst(1, 0);
6283 vdup();
6284 if (ll)
6285 vpushll(p->v1);
6286 else
6287 vpushi(p->v1);
6288 gen_op(TOK_GE);
6289 gtst_addr(0, p->sym);
6290 gsym(e);
6293 *bsym = gjmp(*bsym);
6296 /* ------------------------------------------------------------------------- */
6297 /* __attribute__((cleanup(fn))) */
6299 static void try_call_scope_cleanup(Sym *stop)
6301 Sym *cls = cur_scope->cl.s;
6303 for (; cls != stop; cls = cls->ncl) {
6304 Sym *fs = cls->next;
6305 Sym *vs = cls->prev_tok;
6307 vpushsym(&fs->type, fs);
6308 vset(&vs->type, vs->r, vs->c);
6309 vtop->sym = vs;
6310 mk_pointer(&vtop->type);
6311 gaddrof();
6312 gfunc_call(1);
6316 static void try_call_cleanup_goto(Sym *cleanupstate)
6318 Sym *oc, *cc;
6319 int ocd, ccd;
6321 if (!cur_scope->cl.s)
6322 return;
6324 /* search NCA of both cleanup chains given parents and initial depth */
6325 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6326 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6328 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6330 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6333 try_call_scope_cleanup(cc);
6336 /* call 'func' for each __attribute__((cleanup(func))) */
6337 static void block_cleanup(struct scope *o)
6339 int jmp = 0;
6340 Sym *g, **pg;
6341 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6342 if (g->prev_tok->r & LABEL_FORWARD) {
6343 Sym *pcl = g->next;
6344 if (!jmp)
6345 jmp = gjmp(0);
6346 gsym(pcl->jnext);
6347 try_call_scope_cleanup(o->cl.s);
6348 pcl->jnext = gjmp(0);
6349 if (!o->cl.n)
6350 goto remove_pending;
6351 g->c = o->cl.n;
6352 pg = &g->prev;
6353 } else {
6354 remove_pending:
6355 *pg = g->prev;
6356 sym_free(g);
6359 gsym(jmp);
6360 try_call_scope_cleanup(o->cl.s);
6363 /* ------------------------------------------------------------------------- */
6364 /* VLA */
6366 static void vla_restore(int loc)
6368 if (loc)
6369 gen_vla_sp_restore(loc);
6372 static void vla_leave(struct scope *o)
6374 if (o->vla.num < cur_scope->vla.num)
6375 vla_restore(o->vla.loc);
6378 /* ------------------------------------------------------------------------- */
6379 /* local scopes */
6381 void new_scope(struct scope *o)
6383 /* copy and link previous scope */
6384 *o = *cur_scope;
6385 o->prev = cur_scope;
6386 cur_scope = o;
6388 /* record local declaration stack position */
6389 o->lstk = local_stack;
6390 o->llstk = local_label_stack;
6392 ++local_scope;
6395 void prev_scope(struct scope *o, int is_expr)
6397 vla_leave(o->prev);
6399 if (o->cl.s != o->prev->cl.s)
6400 block_cleanup(o->prev);
6402 /* pop locally defined labels */
6403 label_pop(&local_label_stack, o->llstk, is_expr);
6405 /* In the is_expr case (a statement expression is finished here),
6406 vtop might refer to symbols on the local_stack. Either via the
6407 type or via vtop->sym. We can't pop those nor any that in turn
6408 might be referred to. To make it easier we don't roll back
6409 any symbols in that case; some upper level call to block() will
6410 do that. We do have to remove such symbols from the lookup
6411 tables, though. sym_pop will do that. */
6413 /* pop locally defined symbols */
6414 pop_local_syms(&local_stack, o->lstk, is_expr, 0);
6416 cur_scope = o->prev;
6417 --local_scope;
6420 /* leave a scope via break/continue(/goto) */
6421 void leave_scope(struct scope *o)
6423 if (!o)
6424 return;
6425 try_call_scope_cleanup(o->cl.s);
6426 vla_leave(o);
6429 /* ------------------------------------------------------------------------- */
6430 /* call block from 'for do while' loops */
6432 static void lblock(int *bsym, int *csym)
6434 struct scope *lo = loop_scope, *co = cur_scope;
6435 int *b = co->bsym, *c = co->csym;
6436 if (csym) {
6437 co->csym = csym;
6438 loop_scope = co;
6440 co->bsym = bsym;
6441 block(0);
6442 co->bsym = b;
6443 if (csym) {
6444 co->csym = c;
6445 loop_scope = lo;
6449 static void block(int is_expr)
6451 int a, b, c, d, e, t;
6452 struct scope o;
6453 Sym *s;
6455 if (is_expr) {
6456 /* default return value is (void) */
6457 vpushi(0);
6458 vtop->type.t = VT_VOID;
6461 again:
6462 t = tok, next();
6464 if (t == TOK_IF) {
6465 skip('(');
6466 gexpr();
6467 skip(')');
6468 a = gvtst(1, 0);
6469 block(0);
6470 if (tok == TOK_ELSE) {
6471 d = gjmp(0);
6472 gsym(a);
6473 next();
6474 block(0);
6475 gsym(d); /* patch else jmp */
6476 } else {
6477 gsym(a);
6480 } else if (t == TOK_WHILE) {
6481 d = gind();
6482 skip('(');
6483 gexpr();
6484 skip(')');
6485 a = gvtst(1, 0);
6486 b = 0;
6487 lblock(&a, &b);
6488 gjmp_addr(d);
6489 gsym_addr(b, d);
6490 gsym(a);
6492 } else if (t == '{') {
6493 new_scope(&o);
6495 /* handle local labels declarations */
6496 while (tok == TOK_LABEL) {
6497 do {
6498 next();
6499 if (tok < TOK_UIDENT)
6500 expect("label identifier");
6501 label_push(&local_label_stack, tok, LABEL_DECLARED);
6502 next();
6503 } while (tok == ',');
6504 skip(';');
6507 while (tok != '}') {
6508 decl(VT_LOCAL);
6509 if (tok != '}') {
6510 if (is_expr)
6511 vpop();
6512 block(is_expr);
6516 prev_scope(&o, is_expr);
6517 if (local_scope)
6518 next();
6519 else if (!nocode_wanted)
6520 check_func_return();
6522 } else if (t == TOK_RETURN) {
6523 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6524 if (tok != ';') {
6525 gexpr();
6526 if (b) {
6527 gen_assign_cast(&func_vt);
6528 } else {
6529 if (vtop->type.t != VT_VOID)
6530 tcc_warning("void function returns a value");
6531 vtop--;
6533 } else if (b) {
6534 tcc_warning("'return' with no value");
6535 b = 0;
6537 leave_scope(root_scope);
6538 if (b)
6539 gfunc_return(&func_vt);
6540 skip(';');
6541 /* jump unless last stmt in top-level block */
6542 if (tok != '}' || local_scope != 1)
6543 rsym = gjmp(rsym);
6544 CODE_OFF();
6546 } else if (t == TOK_BREAK) {
6547 /* compute jump */
6548 if (!cur_scope->bsym)
6549 tcc_error("cannot break");
6550 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6551 leave_scope(cur_switch->scope);
6552 else
6553 leave_scope(loop_scope);
6554 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6555 skip(';');
6557 } else if (t == TOK_CONTINUE) {
6558 /* compute jump */
6559 if (!cur_scope->csym)
6560 tcc_error("cannot continue");
6561 leave_scope(loop_scope);
6562 *cur_scope->csym = gjmp(*cur_scope->csym);
6563 skip(';');
6565 } else if (t == TOK_FOR) {
6566 new_scope(&o);
6568 skip('(');
6569 if (tok != ';') {
6570 /* c99 for-loop init decl? */
6571 if (!decl0(VT_LOCAL, 1, NULL)) {
6572 /* no, regular for-loop init expr */
6573 gexpr();
6574 vpop();
6577 skip(';');
6578 a = b = 0;
6579 c = d = gind();
6580 if (tok != ';') {
6581 gexpr();
6582 a = gvtst(1, 0);
6584 skip(';');
6585 if (tok != ')') {
6586 e = gjmp(0);
6587 d = gind();
6588 gexpr();
6589 vpop();
6590 gjmp_addr(c);
6591 gsym(e);
6593 skip(')');
6594 lblock(&a, &b);
6595 gjmp_addr(d);
6596 gsym_addr(b, d);
6597 gsym(a);
6598 prev_scope(&o, 0);
6600 } else if (t == TOK_DO) {
6601 a = b = 0;
6602 d = gind();
6603 lblock(&a, &b);
6604 gsym(b);
6605 skip(TOK_WHILE);
6606 skip('(');
6607 gexpr();
6608 skip(')');
6609 skip(';');
6610 c = gvtst(0, 0);
6611 gsym_addr(c, d);
6612 gsym(a);
6614 } else if (t == TOK_SWITCH) {
6615 struct switch_t *sw;
6617 sw = tcc_mallocz(sizeof *sw);
6618 sw->bsym = &a;
6619 sw->scope = cur_scope;
6620 sw->prev = cur_switch;
6621 cur_switch = sw;
6623 skip('(');
6624 gexpr();
6625 skip(')');
6626 sw->sv = *vtop--; /* save switch value */
6628 a = 0;
6629 b = gjmp(0); /* jump to first case */
6630 lblock(&a, NULL);
6631 a = gjmp(a); /* add implicit break */
6632 /* case lookup */
6633 gsym(b);
6635 qsort(sw->p, sw->n, sizeof(void*), case_cmp);
6636 for (b = 1; b < sw->n; b++)
6637 if (sw->p[b - 1]->v2 >= sw->p[b]->v1)
6638 tcc_error("duplicate case value");
6640 /* Our switch table sorting is signed, so the compared
6641 value needs to be as well when it's 64bit. */
6642 vpushv(&sw->sv);
6643 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
6644 vtop->type.t &= ~VT_UNSIGNED;
6645 gv(RC_INT);
6646 d = 0, gcase(sw->p, sw->n, &d);
6647 vpop();
6648 if (sw->def_sym)
6649 gsym_addr(d, sw->def_sym);
6650 else
6651 gsym(d);
6652 /* break label */
6653 gsym(a);
6655 dynarray_reset(&sw->p, &sw->n);
6656 cur_switch = sw->prev;
6657 tcc_free(sw);
6659 } else if (t == TOK_CASE) {
6660 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6661 if (!cur_switch)
6662 expect("switch");
6663 cr->v1 = cr->v2 = expr_const64();
6664 if (gnu_ext && tok == TOK_DOTS) {
6665 next();
6666 cr->v2 = expr_const64();
6667 if (cr->v2 < cr->v1)
6668 tcc_warning("empty case range");
6670 cr->sym = gind();
6671 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6672 skip(':');
6673 is_expr = 0;
6674 goto block_after_label;
6676 } else if (t == TOK_DEFAULT) {
6677 if (!cur_switch)
6678 expect("switch");
6679 if (cur_switch->def_sym)
6680 tcc_error("too many 'default'");
6681 cur_switch->def_sym = gind();
6682 skip(':');
6683 is_expr = 0;
6684 goto block_after_label;
6686 } else if (t == TOK_GOTO) {
6687 vla_restore(root_scope->vla.loc);
6688 if (tok == '*' && gnu_ext) {
6689 /* computed goto */
6690 next();
6691 gexpr();
6692 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6693 expect("pointer");
6694 ggoto();
6696 } else if (tok >= TOK_UIDENT) {
6697 s = label_find(tok);
6698 /* put forward definition if needed */
6699 if (!s)
6700 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6701 else if (s->r == LABEL_DECLARED)
6702 s->r = LABEL_FORWARD;
6704 if (s->r & LABEL_FORWARD) {
6705 /* start new goto chain for cleanups, linked via label->next */
6706 if (cur_scope->cl.s && !nocode_wanted) {
6707 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6708 pending_gotos->prev_tok = s;
6709 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6710 pending_gotos->next = s;
6712 s->jnext = gjmp(s->jnext);
6713 } else {
6714 try_call_cleanup_goto(s->cleanupstate);
6715 gjmp_addr(s->jnext);
6717 next();
6719 } else {
6720 expect("label identifier");
6722 skip(';');
6724 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6725 asm_instr();
6727 } else {
6728 if (tok == ':' && t >= TOK_UIDENT) {
6729 /* label case */
6730 next();
6731 s = label_find(t);
6732 if (s) {
6733 if (s->r == LABEL_DEFINED)
6734 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6735 s->r = LABEL_DEFINED;
6736 if (s->next) {
6737 Sym *pcl; /* pending cleanup goto */
6738 for (pcl = s->next; pcl; pcl = pcl->prev)
6739 gsym(pcl->jnext);
6740 sym_pop(&s->next, NULL, 0);
6741 } else
6742 gsym(s->jnext);
6743 } else {
6744 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6746 s->jnext = gind();
6747 s->cleanupstate = cur_scope->cl.s;
6749 block_after_label:
6750 vla_restore(cur_scope->vla.loc);
6751 /* we accept this, but it is a mistake */
6752 if (tok == '}') {
6753 tcc_warning("deprecated use of label at end of compound statement");
6754 } else {
6755 goto again;
6758 } else {
6759 /* expression case */
6760 if (t != ';') {
6761 unget_tok(t);
6762 if (is_expr) {
6763 vpop();
6764 gexpr();
6765 } else {
6766 gexpr();
6767 vpop();
6769 skip(';');
6775 /* This skips over a stream of tokens containing balanced {} and ()
6776 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6777 with a '{'). If STR then allocates and stores the skipped tokens
6778 in *STR. This doesn't check if () and {} are nested correctly,
6779 i.e. "({)}" is accepted. */
6780 static void skip_or_save_block(TokenString **str)
6782 int braces = tok == '{';
6783 int level = 0;
6784 if (str)
6785 *str = tok_str_alloc();
6787 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6788 int t;
6789 if (tok == TOK_EOF) {
6790 if (str || level > 0)
6791 tcc_error("unexpected end of file");
6792 else
6793 break;
6795 if (str)
6796 tok_str_add_tok(*str);
6797 t = tok;
6798 next();
6799 if (t == '{' || t == '(') {
6800 level++;
6801 } else if (t == '}' || t == ')') {
6802 level--;
6803 if (level == 0 && braces && t == '}')
6804 break;
6807 if (str) {
6808 tok_str_add(*str, -1);
6809 tok_str_add(*str, 0);
6813 #define EXPR_CONST 1
6814 #define EXPR_ANY 2
6816 static void parse_init_elem(int expr_type)
6818 int saved_global_expr;
6819 switch(expr_type) {
6820 case EXPR_CONST:
6821 /* compound literals must be allocated globally in this case */
6822 saved_global_expr = global_expr;
6823 global_expr = 1;
6824 expr_const1();
6825 global_expr = saved_global_expr;
6826 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6827 (compound literals). */
6828 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6829 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6830 || vtop->sym->v < SYM_FIRST_ANOM))
6831 #ifdef TCC_TARGET_PE
6832 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6833 #endif
6835 tcc_error("initializer element is not constant");
6836 break;
6837 case EXPR_ANY:
6838 expr_eq();
6839 break;
6843 /* put zeros for variable based init */
6844 static void init_putz(Section *sec, unsigned long c, int size)
6846 if (sec) {
6847 /* nothing to do because globals are already set to zero */
6848 } else {
6849 vpush_global_sym(&func_old_type, TOK_memset);
6850 vseti(VT_LOCAL, c);
6851 #ifdef TCC_TARGET_ARM
6852 vpushs(size);
6853 vpushi(0);
6854 #else
6855 vpushi(0);
6856 vpushs(size);
6857 #endif
6858 gfunc_call(3);
6862 #define DIF_FIRST 1
6863 #define DIF_SIZE_ONLY 2
6864 #define DIF_HAVE_ELEM 4
6866 /* t is the array or struct type. c is the array or struct
6867 address. cur_field is the pointer to the current
6868 field, for arrays the 'c' member contains the current start
6869 index. 'flags' is as in decl_initializer.
6870 'al' contains the already initialized length of the
6871 current container (starting at c). This returns the new length of that. */
6872 static int decl_designator(CType *type, Section *sec, unsigned long c,
6873 Sym **cur_field, int flags, int al)
6875 Sym *s, *f;
6876 int index, index_last, align, l, nb_elems, elem_size;
6877 unsigned long corig = c;
6879 elem_size = 0;
6880 nb_elems = 1;
6882 if (flags & DIF_HAVE_ELEM)
6883 goto no_designator;
6885 if (gnu_ext && tok >= TOK_UIDENT) {
6886 l = tok, next();
6887 if (tok == ':')
6888 goto struct_field;
6889 unget_tok(l);
6892 /* NOTE: we only support ranges for last designator */
6893 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6894 if (tok == '[') {
6895 if (!(type->t & VT_ARRAY))
6896 expect("array type");
6897 next();
6898 index = index_last = expr_const();
6899 if (tok == TOK_DOTS && gnu_ext) {
6900 next();
6901 index_last = expr_const();
6903 skip(']');
6904 s = type->ref;
6905 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6906 index_last < index)
6907 tcc_error("invalid index");
6908 if (cur_field)
6909 (*cur_field)->c = index_last;
6910 type = pointed_type(type);
6911 elem_size = type_size(type, &align);
6912 c += index * elem_size;
6913 nb_elems = index_last - index + 1;
6914 } else {
6915 int cumofs;
6916 next();
6917 l = tok;
6918 struct_field:
6919 next();
6920 if ((type->t & VT_BTYPE) != VT_STRUCT)
6921 expect("struct/union type");
6922 cumofs = 0;
6923 f = find_field(type, l, &cumofs);
6924 if (!f)
6925 expect("field");
6926 if (cur_field)
6927 *cur_field = f;
6928 type = &f->type;
6929 c += cumofs + f->c;
6931 cur_field = NULL;
6933 if (!cur_field) {
6934 if (tok == '=') {
6935 next();
6936 } else if (!gnu_ext) {
6937 expect("=");
6939 } else {
6940 no_designator:
6941 if (type->t & VT_ARRAY) {
6942 index = (*cur_field)->c;
6943 if (type->ref->c >= 0 && index >= type->ref->c)
6944 tcc_error("index too large");
6945 type = pointed_type(type);
6946 c += index * type_size(type, &align);
6947 } else {
6948 f = *cur_field;
6949 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6950 *cur_field = f = f->next;
6951 if (!f)
6952 tcc_error("too many field init");
6953 type = &f->type;
6954 c += f->c;
6957 /* must put zero in holes (note that doing it that way
6958 ensures that it even works with designators) */
6959 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6960 init_putz(sec, corig + al, c - corig - al);
6961 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6963 /* XXX: make it more general */
6964 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6965 unsigned long c_end;
6966 uint8_t *src, *dst;
6967 int i;
6969 if (!sec) {
6970 vset(type, VT_LOCAL|VT_LVAL, c);
6971 for (i = 1; i < nb_elems; i++) {
6972 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6973 vswap();
6974 vstore();
6976 vpop();
6977 } else if (!NODATA_WANTED) {
6978 c_end = c + nb_elems * elem_size;
6979 if (c_end > sec->data_allocated)
6980 section_realloc(sec, c_end);
6981 src = sec->data + c;
6982 dst = src;
6983 for(i = 1; i < nb_elems; i++) {
6984 dst += elem_size;
6985 memcpy(dst, src, elem_size);
6989 c += nb_elems * type_size(type, &align);
6990 if (c - corig > al)
6991 al = c - corig;
6992 return al;
6995 /* store a value or an expression directly in global data or in local array */
6996 static void init_putv(CType *type, Section *sec, unsigned long c)
6998 int bt;
6999 void *ptr;
7000 CType dtype;
7002 dtype = *type;
7003 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7005 if (sec) {
7006 int size, align;
7007 /* XXX: not portable */
7008 /* XXX: generate error if incorrect relocation */
7009 gen_assign_cast(&dtype);
7010 bt = type->t & VT_BTYPE;
7012 if ((vtop->r & VT_SYM)
7013 && bt != VT_PTR
7014 && bt != VT_FUNC
7015 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7016 || (type->t & VT_BITFIELD))
7017 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7019 tcc_error("initializer element is not computable at load time");
7021 if (NODATA_WANTED) {
7022 vtop--;
7023 return;
7026 size = type_size(type, &align);
7027 section_reserve(sec, c + size);
7028 ptr = sec->data + c;
7030 /* XXX: make code faster ? */
7031 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7032 vtop->sym->v >= SYM_FIRST_ANOM &&
7033 /* XXX This rejects compound literals like
7034 '(void *){ptr}'. The problem is that '&sym' is
7035 represented the same way, which would be ruled out
7036 by the SYM_FIRST_ANOM check above, but also '"string"'
7037 in 'char *p = "string"' is represented the same
7038 with the type being VT_PTR and the symbol being an
7039 anonymous one. That is, there's no difference in vtop
7040 between '(void *){x}' and '&(void *){x}'. Ignore
7041 pointer typed entities here. Hopefully no real code
7042 will every use compound literals with scalar type. */
7043 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7044 /* These come from compound literals, memcpy stuff over. */
7045 Section *ssec;
7046 ElfSym *esym;
7047 ElfW_Rel *rel;
7048 esym = elfsym(vtop->sym);
7049 ssec = tcc_state->sections[esym->st_shndx];
7050 memmove (ptr, ssec->data + esym->st_value, size);
7051 if (ssec->reloc) {
7052 /* We need to copy over all memory contents, and that
7053 includes relocations. Use the fact that relocs are
7054 created it order, so look from the end of relocs
7055 until we hit one before the copied region. */
7056 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7057 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7058 while (num_relocs--) {
7059 rel--;
7060 if (rel->r_offset >= esym->st_value + size)
7061 continue;
7062 if (rel->r_offset < esym->st_value)
7063 break;
7064 /* Note: if the same fields are initialized multiple
7065 times (possible with designators) then we possibly
7066 add multiple relocations for the same offset here.
7067 That would lead to wrong code, the last reloc needs
7068 to win. We clean this up later after the whole
7069 initializer is parsed. */
7070 put_elf_reloca(symtab_section, sec,
7071 c + rel->r_offset - esym->st_value,
7072 ELFW(R_TYPE)(rel->r_info),
7073 ELFW(R_SYM)(rel->r_info),
7074 #if PTR_SIZE == 8
7075 rel->r_addend
7076 #else
7078 #endif
7082 } else {
7083 if (type->t & VT_BITFIELD) {
7084 int bit_pos, bit_size, bits, n;
7085 unsigned char *p, v, m;
7086 bit_pos = BIT_POS(vtop->type.t);
7087 bit_size = BIT_SIZE(vtop->type.t);
7088 p = (unsigned char*)ptr + (bit_pos >> 3);
7089 bit_pos &= 7, bits = 0;
7090 while (bit_size) {
7091 n = 8 - bit_pos;
7092 if (n > bit_size)
7093 n = bit_size;
7094 v = vtop->c.i >> bits << bit_pos;
7095 m = ((1 << n) - 1) << bit_pos;
7096 *p = (*p & ~m) | (v & m);
7097 bits += n, bit_size -= n, bit_pos = 0, ++p;
7099 } else
7100 switch(bt) {
7101 /* XXX: when cross-compiling we assume that each type has the
7102 same representation on host and target, which is likely to
7103 be wrong in the case of long double */
7104 case VT_BOOL:
7105 vtop->c.i = vtop->c.i != 0;
7106 case VT_BYTE:
7107 *(char *)ptr |= vtop->c.i;
7108 break;
7109 case VT_SHORT:
7110 *(short *)ptr |= vtop->c.i;
7111 break;
7112 case VT_FLOAT:
7113 *(float*)ptr = vtop->c.f;
7114 break;
7115 case VT_DOUBLE:
7116 *(double *)ptr = vtop->c.d;
7117 break;
7118 case VT_LDOUBLE:
7119 #if defined TCC_IS_NATIVE_387
7120 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7121 memcpy(ptr, &vtop->c.ld, 10);
7122 #ifdef __TINYC__
7123 else if (sizeof (long double) == sizeof (double))
7124 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7125 #endif
7126 else if (vtop->c.ld == 0.0)
7128 else
7129 #endif
7130 if (sizeof(long double) == LDOUBLE_SIZE)
7131 *(long double*)ptr = vtop->c.ld;
7132 else if (sizeof(double) == LDOUBLE_SIZE)
7133 *(double *)ptr = (double)vtop->c.ld;
7134 else
7135 tcc_error("can't cross compile long double constants");
7136 break;
7137 #if PTR_SIZE != 8
7138 case VT_LLONG:
7139 *(long long *)ptr |= vtop->c.i;
7140 break;
7141 #else
7142 case VT_LLONG:
7143 #endif
7144 case VT_PTR:
7146 addr_t val = vtop->c.i;
7147 #if PTR_SIZE == 8
7148 if (vtop->r & VT_SYM)
7149 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7150 else
7151 *(addr_t *)ptr |= val;
7152 #else
7153 if (vtop->r & VT_SYM)
7154 greloc(sec, vtop->sym, c, R_DATA_PTR);
7155 *(addr_t *)ptr |= val;
7156 #endif
7157 break;
7159 default:
7161 int val = vtop->c.i;
7162 #if PTR_SIZE == 8
7163 if (vtop->r & VT_SYM)
7164 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7165 else
7166 *(int *)ptr |= val;
7167 #else
7168 if (vtop->r & VT_SYM)
7169 greloc(sec, vtop->sym, c, R_DATA_PTR);
7170 *(int *)ptr |= val;
7171 #endif
7172 break;
7176 vtop--;
7177 } else {
7178 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7179 vswap();
7180 vstore();
7181 vpop();
7185 /* 't' contains the type and storage info. 'c' is the offset of the
7186 object in section 'sec'. If 'sec' is NULL, it means stack based
7187 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7188 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7189 size only evaluation is wanted (only for arrays). */
7190 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7191 int flags)
7193 int len, n, no_oblock, nb, i;
7194 int size1, align1;
7195 Sym *s, *f;
7196 Sym indexsym;
7197 CType *t1;
7199 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7200 /* In case of strings we have special handling for arrays, so
7201 don't consume them as initializer value (which would commit them
7202 to some anonymous symbol). */
7203 tok != TOK_LSTR && tok != TOK_STR &&
7204 !(flags & DIF_SIZE_ONLY)) {
7205 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7206 flags |= DIF_HAVE_ELEM;
7209 if ((flags & DIF_HAVE_ELEM) &&
7210 !(type->t & VT_ARRAY) &&
7211 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7212 The source type might have VT_CONSTANT set, which is
7213 of course assignable to non-const elements. */
7214 is_compatible_unqualified_types(type, &vtop->type)) {
7215 init_putv(type, sec, c);
7216 } else if (type->t & VT_ARRAY) {
7217 s = type->ref;
7218 n = s->c;
7219 t1 = pointed_type(type);
7220 size1 = type_size(t1, &align1);
7222 no_oblock = 1;
7223 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7224 tok == '{') {
7225 if (tok != '{')
7226 tcc_error("character array initializer must be a literal,"
7227 " optionally enclosed in braces");
7228 skip('{');
7229 no_oblock = 0;
7232 /* only parse strings here if correct type (otherwise: handle
7233 them as ((w)char *) expressions */
7234 if ((tok == TOK_LSTR &&
7235 #ifdef TCC_TARGET_PE
7236 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7237 #else
7238 (t1->t & VT_BTYPE) == VT_INT
7239 #endif
7240 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7241 len = 0;
7242 while (tok == TOK_STR || tok == TOK_LSTR) {
7243 int cstr_len, ch;
7245 /* compute maximum number of chars wanted */
7246 if (tok == TOK_STR)
7247 cstr_len = tokc.str.size;
7248 else
7249 cstr_len = tokc.str.size / sizeof(nwchar_t);
7250 cstr_len--;
7251 nb = cstr_len;
7252 if (n >= 0 && nb > (n - len))
7253 nb = n - len;
7254 if (!(flags & DIF_SIZE_ONLY)) {
7255 if (cstr_len > nb)
7256 tcc_warning("initializer-string for array is too long");
7257 /* in order to go faster for common case (char
7258 string in global variable, we handle it
7259 specifically */
7260 if (sec && tok == TOK_STR && size1 == 1) {
7261 if (!NODATA_WANTED)
7262 memcpy(sec->data + c + len, tokc.str.data, nb);
7263 } else {
7264 for(i=0;i<nb;i++) {
7265 if (tok == TOK_STR)
7266 ch = ((unsigned char *)tokc.str.data)[i];
7267 else
7268 ch = ((nwchar_t *)tokc.str.data)[i];
7269 vpushi(ch);
7270 init_putv(t1, sec, c + (len + i) * size1);
7274 len += nb;
7275 next();
7277 /* only add trailing zero if enough storage (no
7278 warning in this case since it is standard) */
7279 if (n < 0 || len < n) {
7280 if (!(flags & DIF_SIZE_ONLY)) {
7281 vpushi(0);
7282 init_putv(t1, sec, c + (len * size1));
7284 len++;
7286 len *= size1;
7287 } else {
7288 indexsym.c = 0;
7289 f = &indexsym;
7291 do_init_list:
7292 len = 0;
7293 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7294 len = decl_designator(type, sec, c, &f, flags, len);
7295 flags &= ~DIF_HAVE_ELEM;
7296 if (type->t & VT_ARRAY) {
7297 ++indexsym.c;
7298 /* special test for multi dimensional arrays (may not
7299 be strictly correct if designators are used at the
7300 same time) */
7301 if (no_oblock && len >= n*size1)
7302 break;
7303 } else {
7304 if (s->type.t == VT_UNION)
7305 f = NULL;
7306 else
7307 f = f->next;
7308 if (no_oblock && f == NULL)
7309 break;
7312 if (tok == '}')
7313 break;
7314 skip(',');
7317 /* put zeros at the end */
7318 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7319 init_putz(sec, c + len, n*size1 - len);
7320 if (!no_oblock)
7321 skip('}');
7322 /* patch type size if needed, which happens only for array types */
7323 if (n < 0)
7324 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7325 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7326 size1 = 1;
7327 no_oblock = 1;
7328 if ((flags & DIF_FIRST) || tok == '{') {
7329 skip('{');
7330 no_oblock = 0;
7332 s = type->ref;
7333 f = s->next;
7334 n = s->c;
7335 goto do_init_list;
7336 } else if (tok == '{') {
7337 if (flags & DIF_HAVE_ELEM)
7338 skip(';');
7339 next();
7340 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7341 skip('}');
7342 } else if ((flags & DIF_SIZE_ONLY)) {
7343 /* If we supported only ISO C we wouldn't have to accept calling
7344 this on anything than an array if DIF_SIZE_ONLY (and even then
7345 only on the outermost level, so no recursion would be needed),
7346 because initializing a flex array member isn't supported.
7347 But GNU C supports it, so we need to recurse even into
7348 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7349 /* just skip expression */
7350 skip_or_save_block(NULL);
7351 } else {
7352 if (!(flags & DIF_HAVE_ELEM)) {
7353 /* This should happen only when we haven't parsed
7354 the init element above for fear of committing a
7355 string constant to memory too early. */
7356 if (tok != TOK_STR && tok != TOK_LSTR)
7357 expect("string constant");
7358 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7360 init_putv(type, sec, c);
7364 /* parse an initializer for type 't' if 'has_init' is non zero, and
7365 allocate space in local or global data space ('r' is either
7366 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7367 variable 'v' of scope 'scope' is declared before initializers
7368 are parsed. If 'v' is zero, then a reference to the new object
7369 is put in the value stack. If 'has_init' is 2, a special parsing
7370 is done to handle string constants. */
7371 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7372 int has_init, int v, int scope)
7374 int size, align, addr;
7375 TokenString *init_str = NULL;
7377 Section *sec;
7378 Sym *flexible_array;
7379 Sym *sym = NULL;
7380 int saved_nocode_wanted = nocode_wanted;
7381 #ifdef CONFIG_TCC_BCHECK
7382 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7383 #endif
7385 /* Always allocate static or global variables */
7386 if (v && (r & VT_VALMASK) == VT_CONST)
7387 nocode_wanted |= 0x80000000;
7389 flexible_array = NULL;
7390 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7391 Sym *field = type->ref->next;
7392 if (field) {
7393 while (field->next)
7394 field = field->next;
7395 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7396 flexible_array = field;
7400 size = type_size(type, &align);
7401 /* If unknown size, we must evaluate it before
7402 evaluating initializers because
7403 initializers can generate global data too
7404 (e.g. string pointers or ISOC99 compound
7405 literals). It also simplifies local
7406 initializers handling */
7407 if (size < 0 || (flexible_array && has_init)) {
7408 if (!has_init)
7409 tcc_error("unknown type size");
7410 /* get all init string */
7411 if (has_init == 2) {
7412 init_str = tok_str_alloc();
7413 /* only get strings */
7414 while (tok == TOK_STR || tok == TOK_LSTR) {
7415 tok_str_add_tok(init_str);
7416 next();
7418 tok_str_add(init_str, -1);
7419 tok_str_add(init_str, 0);
7420 } else {
7421 skip_or_save_block(&init_str);
7423 unget_tok(0);
7425 /* compute size */
7426 begin_macro(init_str, 1);
7427 next();
7428 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7429 /* prepare second initializer parsing */
7430 macro_ptr = init_str->str;
7431 next();
7433 /* if still unknown size, error */
7434 size = type_size(type, &align);
7435 if (size < 0)
7436 tcc_error("unknown type size");
7438 /* If there's a flex member and it was used in the initializer
7439 adjust size. */
7440 if (flexible_array &&
7441 flexible_array->type.ref->c > 0)
7442 size += flexible_array->type.ref->c
7443 * pointed_size(&flexible_array->type);
7444 /* take into account specified alignment if bigger */
7445 if (ad->a.aligned) {
7446 int speca = 1 << (ad->a.aligned - 1);
7447 if (speca > align)
7448 align = speca;
7449 } else if (ad->a.packed) {
7450 align = 1;
7453 if (!v && NODATA_WANTED)
7454 size = 0, align = 1;
7456 if ((r & VT_VALMASK) == VT_LOCAL) {
7457 sec = NULL;
7458 #ifdef CONFIG_TCC_BCHECK
7459 if (bcheck && v) {
7460 /* add padding between stack variables for bound checking */
7461 loc--;
7463 #endif
7464 loc = (loc - size) & -align;
7465 addr = loc;
7466 #ifdef CONFIG_TCC_BCHECK
7467 if (bcheck && v) {
7468 /* add padding between stack variables for bound checking */
7469 loc--;
7471 #endif
7472 if (v) {
7473 /* local variable */
7474 #ifdef CONFIG_TCC_ASM
7475 if (ad->asm_label) {
7476 int reg = asm_parse_regvar(ad->asm_label);
7477 if (reg >= 0)
7478 r = (r & ~VT_VALMASK) | reg;
7480 #endif
7481 sym = sym_push(v, type, r, addr);
7482 if (ad->cleanup_func) {
7483 Sym *cls = sym_push2(&all_cleanups,
7484 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7485 cls->prev_tok = sym;
7486 cls->next = ad->cleanup_func;
7487 cls->ncl = cur_scope->cl.s;
7488 cur_scope->cl.s = cls;
7491 sym->a = ad->a;
7492 } else {
7493 /* push local reference */
7494 vset(type, r, addr);
7496 } else {
7497 if (v && scope == VT_CONST) {
7498 /* see if the symbol was already defined */
7499 sym = sym_find(v);
7500 if (sym) {
7501 patch_storage(sym, ad, type);
7502 /* we accept several definitions of the same global variable. */
7503 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7504 goto no_alloc;
7508 /* allocate symbol in corresponding section */
7509 sec = ad->section;
7510 if (!sec) {
7511 if (has_init)
7512 sec = data_section;
7513 else if (tcc_state->nocommon)
7514 sec = bss_section;
7517 if (sec) {
7518 addr = section_add(sec, size, align);
7519 #ifdef CONFIG_TCC_BCHECK
7520 /* add padding if bound check */
7521 if (bcheck)
7522 section_add(sec, 1, 1);
7523 #endif
7524 } else {
7525 addr = align; /* SHN_COMMON is special, symbol value is align */
7526 sec = common_section;
7529 if (v) {
7530 if (!sym) {
7531 sym = sym_push(v, type, r | VT_SYM, 0);
7532 patch_storage(sym, ad, NULL);
7534 /* update symbol definition */
7535 put_extern_sym(sym, sec, addr, size);
7536 } else {
7537 /* push global reference */
7538 vpush_ref(type, sec, addr, size);
7539 sym = vtop->sym;
7540 vtop->r |= r;
7543 #ifdef CONFIG_TCC_BCHECK
7544 /* handles bounds now because the symbol must be defined
7545 before for the relocation */
7546 if (bcheck) {
7547 addr_t *bounds_ptr;
7549 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7550 /* then add global bound info */
7551 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7552 bounds_ptr[0] = 0; /* relocated */
7553 bounds_ptr[1] = size;
7555 #endif
7558 if (type->t & VT_VLA) {
7559 int a;
7561 if (NODATA_WANTED)
7562 goto no_alloc;
7564 /* save current stack pointer */
7565 if (root_scope->vla.loc == 0) {
7566 struct scope *v = cur_scope;
7567 gen_vla_sp_save(loc -= PTR_SIZE);
7568 do v->vla.loc = loc; while ((v = v->prev));
7571 vla_runtime_type_size(type, &a);
7572 gen_vla_alloc(type, a);
7573 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7574 /* on _WIN64, because of the function args scratch area, the
7575 result of alloca differs from RSP and is returned in RAX. */
7576 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7577 #endif
7578 gen_vla_sp_save(addr);
7579 cur_scope->vla.loc = addr;
7580 cur_scope->vla.num++;
7581 } else if (has_init) {
7582 size_t oldreloc_offset = 0;
7583 if (sec && sec->reloc)
7584 oldreloc_offset = sec->reloc->data_offset;
7585 decl_initializer(type, sec, addr, DIF_FIRST);
7586 if (sec && sec->reloc)
7587 squeeze_multi_relocs(sec, oldreloc_offset);
7588 /* patch flexible array member size back to -1, */
7589 /* for possible subsequent similar declarations */
7590 if (flexible_array)
7591 flexible_array->type.ref->c = -1;
7594 no_alloc:
7595 /* restore parse state if needed */
7596 if (init_str) {
7597 end_macro();
7598 next();
7601 nocode_wanted = saved_nocode_wanted;
7604 /* parse a function defined by symbol 'sym' and generate its code in
7605 'cur_text_section' */
7606 static void gen_function(Sym *sym)
7608 /* Initialize VLA state */
7609 struct scope f = { 0 };
7610 cur_scope = root_scope = &f;
7612 nocode_wanted = 0;
7613 ind = cur_text_section->data_offset;
7614 if (sym->a.aligned) {
7615 size_t newoff = section_add(cur_text_section, 0,
7616 1 << (sym->a.aligned - 1));
7617 gen_fill_nops(newoff - ind);
7619 /* NOTE: we patch the symbol size later */
7620 put_extern_sym(sym, cur_text_section, ind, 0);
7621 if (sym->type.ref->f.func_ctor)
7622 add_array (tcc_state, ".init_array", sym->c);
7623 if (sym->type.ref->f.func_dtor)
7624 add_array (tcc_state, ".fini_array", sym->c);
7625 funcname = get_tok_str(sym->v, NULL);
7626 func_ind = ind;
7627 /* put debug symbol */
7628 tcc_debug_funcstart(tcc_state, sym);
7629 /* push a dummy symbol to enable local sym storage */
7630 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7631 local_scope = 1; /* for function parameters */
7632 gfunc_prolog(sym);
7633 local_scope = 0;
7634 rsym = 0;
7635 clear_temp_local_var_list();
7636 block(0);
7637 gsym(rsym);
7638 nocode_wanted = 0;
7639 /* reset local stack */
7640 pop_local_syms(&local_stack, NULL, 0,
7641 sym->type.ref->f.func_type == FUNC_ELLIPSIS);
7642 gfunc_epilog();
7643 cur_text_section->data_offset = ind;
7644 local_scope = 0;
7645 label_pop(&global_label_stack, NULL, 0);
7646 sym_pop(&all_cleanups, NULL, 0);
7647 /* patch symbol size */
7648 elfsym(sym)->st_size = ind - func_ind;
7649 /* end of function */
7650 tcc_debug_funcend(tcc_state, ind - func_ind);
7651 /* It's better to crash than to generate wrong code */
7652 cur_text_section = NULL;
7653 funcname = ""; /* for safety */
7654 func_vt.t = VT_VOID; /* for safety */
7655 func_var = 0; /* for safety */
7656 ind = 0; /* for safety */
7657 nocode_wanted = 0x80000000;
7658 check_vstack();
7659 /* do this after funcend debug info */
7660 next();
7663 static void gen_inline_functions(TCCState *s)
7665 Sym *sym;
7666 int inline_generated, i;
7667 struct InlineFunc *fn;
7669 tcc_open_bf(s, ":inline:", 0);
7670 /* iterate while inline function are referenced */
7671 do {
7672 inline_generated = 0;
7673 for (i = 0; i < s->nb_inline_fns; ++i) {
7674 fn = s->inline_fns[i];
7675 sym = fn->sym;
7676 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7677 /* the function was used or forced (and then not internal):
7678 generate its code and convert it to a normal function */
7679 fn->sym = NULL;
7680 tcc_debug_putfile(s, fn->filename);
7681 begin_macro(fn->func_str, 1);
7682 next();
7683 cur_text_section = text_section;
7684 gen_function(sym);
7685 end_macro();
7687 inline_generated = 1;
7690 } while (inline_generated);
7691 tcc_close();
7694 static void free_inline_functions(TCCState *s)
7696 int i;
7697 /* free tokens of unused inline functions */
7698 for (i = 0; i < s->nb_inline_fns; ++i) {
7699 struct InlineFunc *fn = s->inline_fns[i];
7700 if (fn->sym)
7701 tok_str_free(fn->func_str);
7703 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7706 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7707 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7708 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7710 int v, has_init, r;
7711 CType type, btype;
7712 Sym *sym;
7713 AttributeDef ad, adbase;
7715 while (1) {
7716 if (tok == TOK_STATIC_ASSERT) {
7717 int c;
7719 next();
7720 skip('(');
7721 c = expr_const();
7722 skip(',');
7723 if (c == 0)
7724 tcc_error("%s", get_tok_str(tok, &tokc));
7725 next();
7726 skip(')');
7727 skip(';');
7728 continue;
7730 if (!parse_btype(&btype, &adbase)) {
7731 if (is_for_loop_init)
7732 return 0;
7733 /* skip redundant ';' if not in old parameter decl scope */
7734 if (tok == ';' && l != VT_CMP) {
7735 next();
7736 continue;
7738 if (l != VT_CONST)
7739 break;
7740 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7741 /* global asm block */
7742 asm_global_instr();
7743 continue;
7745 if (tok >= TOK_UIDENT) {
7746 /* special test for old K&R protos without explicit int
7747 type. Only accepted when defining global data */
7748 btype.t = VT_INT;
7749 } else {
7750 if (tok != TOK_EOF)
7751 expect("declaration");
7752 break;
7755 if (tok == ';') {
7756 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7757 int v = btype.ref->v;
7758 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7759 tcc_warning("unnamed struct/union that defines no instances");
7760 next();
7761 continue;
7763 if (IS_ENUM(btype.t)) {
7764 next();
7765 continue;
7768 while (1) { /* iterate thru each declaration */
7769 type = btype;
7770 /* If the base type itself was an array type of unspecified
7771 size (like in 'typedef int arr[]; arr x = {1};') then
7772 we will overwrite the unknown size by the real one for
7773 this decl. We need to unshare the ref symbol holding
7774 that size. */
7775 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7776 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7778 ad = adbase;
7779 type_decl(&type, &ad, &v, TYPE_DIRECT);
7780 #if 0
7782 char buf[500];
7783 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7784 printf("type = '%s'\n", buf);
7786 #endif
7787 if ((type.t & VT_BTYPE) == VT_FUNC) {
7788 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
7789 tcc_error("function without file scope cannot be static");
7790 /* if old style function prototype, we accept a
7791 declaration list */
7792 sym = type.ref;
7793 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7794 decl0(VT_CMP, 0, sym);
7795 /* always compile 'extern inline' */
7796 if (type.t & VT_EXTERN)
7797 type.t &= ~VT_INLINE;
7800 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7801 ad.asm_label = asm_label_instr();
7802 /* parse one last attribute list, after asm label */
7803 parse_attribute(&ad);
7804 #if 0
7805 /* gcc does not allow __asm__("label") with function definition,
7806 but why not ... */
7807 if (tok == '{')
7808 expect(";");
7809 #endif
7812 #ifdef TCC_TARGET_PE
7813 if (ad.a.dllimport || ad.a.dllexport) {
7814 if (type.t & VT_STATIC)
7815 tcc_error("cannot have dll linkage with static");
7816 if (type.t & VT_TYPEDEF) {
7817 tcc_warning("'%s' attribute ignored for typedef",
7818 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7819 (ad.a.dllexport = 0, "dllexport"));
7820 } else if (ad.a.dllimport) {
7821 if ((type.t & VT_BTYPE) == VT_FUNC)
7822 ad.a.dllimport = 0;
7823 else
7824 type.t |= VT_EXTERN;
7827 #endif
7828 if (tok == '{') {
7829 if (l != VT_CONST)
7830 tcc_error("cannot use local functions");
7831 if ((type.t & VT_BTYPE) != VT_FUNC)
7832 expect("function definition");
7834 /* reject abstract declarators in function definition
7835 make old style params without decl have int type */
7836 sym = type.ref;
7837 while ((sym = sym->next) != NULL) {
7838 if (!(sym->v & ~SYM_FIELD))
7839 expect("identifier");
7840 if (sym->type.t == VT_VOID)
7841 sym->type = int_type;
7844 /* put function symbol */
7845 type.t &= ~VT_EXTERN;
7846 sym = external_sym(v, &type, 0, &ad);
7847 /* static inline functions are just recorded as a kind
7848 of macro. Their code will be emitted at the end of
7849 the compilation unit only if they are used */
7850 if (sym->type.t & VT_INLINE) {
7851 struct InlineFunc *fn;
7852 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
7853 strcpy(fn->filename, file->filename);
7854 fn->sym = sym;
7855 skip_or_save_block(&fn->func_str);
7856 dynarray_add(&tcc_state->inline_fns,
7857 &tcc_state->nb_inline_fns, fn);
7858 } else {
7859 /* compute text section */
7860 cur_text_section = ad.section;
7861 if (!cur_text_section)
7862 cur_text_section = text_section;
7863 gen_function(sym);
7865 break;
7866 } else {
7867 if (l == VT_CMP) {
7868 /* find parameter in function parameter list */
7869 for (sym = func_sym->next; sym; sym = sym->next)
7870 if ((sym->v & ~SYM_FIELD) == v)
7871 goto found;
7872 tcc_error("declaration for parameter '%s' but no such parameter",
7873 get_tok_str(v, NULL));
7874 found:
7875 if (type.t & VT_STORAGE) /* 'register' is okay */
7876 tcc_error("storage class specified for '%s'",
7877 get_tok_str(v, NULL));
7878 if (sym->type.t != VT_VOID)
7879 tcc_error("redefinition of parameter '%s'",
7880 get_tok_str(v, NULL));
7881 convert_parameter_type(&type);
7882 sym->type = type;
7883 } else if (type.t & VT_TYPEDEF) {
7884 /* save typedefed type */
7885 /* XXX: test storage specifiers ? */
7886 sym = sym_find(v);
7887 if (sym && sym->sym_scope == local_scope) {
7888 if (!is_compatible_types(&sym->type, &type)
7889 || !(sym->type.t & VT_TYPEDEF))
7890 tcc_error("incompatible redefinition of '%s'",
7891 get_tok_str(v, NULL));
7892 sym->type = type;
7893 } else {
7894 sym = sym_push(v, &type, 0, 0);
7896 sym->a = ad.a;
7897 sym->f = ad.f;
7898 } else if ((type.t & VT_BTYPE) == VT_VOID
7899 && !(type.t & VT_EXTERN)) {
7900 tcc_error("declaration of void object");
7901 } else {
7902 r = 0;
7903 if ((type.t & VT_BTYPE) == VT_FUNC) {
7904 /* external function definition */
7905 /* specific case for func_call attribute */
7906 type.ref->f = ad.f;
7907 } else if (!(type.t & VT_ARRAY)) {
7908 /* not lvalue if array */
7909 r |= VT_LVAL;
7911 has_init = (tok == '=');
7912 if (has_init && (type.t & VT_VLA))
7913 tcc_error("variable length array cannot be initialized");
7914 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7915 || (type.t & VT_BTYPE) == VT_FUNC
7916 /* as with GCC, uninitialized global arrays with no size
7917 are considered extern: */
7918 || ((type.t & VT_ARRAY) && !has_init
7919 && l == VT_CONST && type.ref->c < 0)
7921 /* external variable or function */
7922 type.t |= VT_EXTERN;
7923 sym = external_sym(v, &type, r, &ad);
7924 if (ad.alias_target) {
7925 ElfSym *esym;
7926 Sym *alias_target;
7927 alias_target = sym_find(ad.alias_target);
7928 esym = elfsym(alias_target);
7929 if (!esym)
7930 tcc_error("unsupported forward __alias__ attribute");
7931 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7933 } else {
7934 if (type.t & VT_STATIC)
7935 r |= VT_CONST;
7936 else
7937 r |= l;
7938 if (has_init)
7939 next();
7940 else if (l == VT_CONST)
7941 /* uninitialized global variables may be overridden */
7942 type.t |= VT_EXTERN;
7943 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7946 if (tok != ',') {
7947 if (is_for_loop_init)
7948 return 1;
7949 skip(';');
7950 break;
7952 next();
7956 return 0;
7959 static void decl(int l)
7961 decl0(l, 0, NULL);
7964 /* ------------------------------------------------------------------------- */
7965 #undef gjmp_addr
7966 #undef gjmp
7967 /* ------------------------------------------------------------------------- */