riscv64: adjust for cast changes
[tinycc.git] / tccgen.c
blob0475b3c59f0e1b15ff71079e99ebda6adc3dafc9
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
57 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 /* Automagical code suppression ----> */
60 #define CODE_OFF() (nocode_wanted |= 0x20000000)
61 #define CODE_ON() (nocode_wanted &= ~0x20000000)
63 /* Clear 'nocode_wanted' at label if it was used */
64 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
65 static int gind(void) { CODE_ON(); return ind; }
67 /* Set 'nocode_wanted' after unconditional jumps */
68 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
69 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
71 /* These are #undef'd at the end of this file */
72 #define gjmp_addr gjmp_addr_acs
73 #define gjmp gjmp_acs
74 /* <---- */
76 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
77 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
78 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
79 ST_DATA int func_vc;
80 static int last_line_num, new_file, func_ind; /* debug info control */
81 ST_DATA const char *funcname;
82 ST_DATA CType int_type, func_old_type, char_pointer_type;
84 #if PTR_SIZE == 4
85 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
86 #define VT_PTRDIFF_T VT_INT
87 #elif LONG_SIZE == 4
88 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
89 #define VT_PTRDIFF_T VT_LLONG
90 #else
91 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
92 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
93 #endif
95 ST_DATA struct switch_t {
96 struct case_t {
97 int64_t v1, v2;
98 int sym;
99 } **p; int n; /* list of case ranges */
100 int def_sym; /* default symbol */
101 int *bsym;
102 struct scope *scope;
103 } *cur_switch; /* current switch */
105 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
106 /*list of temporary local variables on the stack in current function. */
107 ST_DATA struct temp_local_variable {
108 int location; //offset on stack. Svalue.c.i
109 short size;
110 short align;
111 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
112 short nb_temp_local_vars;
114 static struct scope {
115 struct scope *prev;
116 struct { int loc, num; } vla;
117 struct { Sym *s; int n; } cl;
118 int *bsym, *csym;
119 Sym *lstk, *llstk;
120 } *cur_scope, *loop_scope, *root_scope;
122 /********************************************************/
123 #ifndef CONFIG_TCC_ASM
124 ST_FUNC void asm_instr(void)
126 tcc_error("inline asm() not supported");
128 ST_FUNC void asm_global_instr(void)
130 tcc_error("inline asm() not supported");
132 #endif
134 /* ------------------------------------------------------------------------- */
136 static void gen_cast(CType *type);
137 static void gen_cast_s(int t);
138 static inline CType *pointed_type(CType *type);
139 static int is_compatible_types(CType *type1, CType *type2);
140 static int parse_btype(CType *type, AttributeDef *ad);
141 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
142 static void parse_expr_type(CType *type);
143 static void init_putv(CType *type, Section *sec, unsigned long c);
144 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
145 static void block(int is_expr);
146 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
147 static void decl(int l);
148 static int decl0(int l, int is_for_loop_init, Sym *);
149 static void expr_eq(void);
150 static void vla_runtime_type_size(CType *type, int *a);
151 static int is_compatible_unqualified_types(CType *type1, CType *type2);
152 static inline int64_t expr_const64(void);
153 static void vpush64(int ty, unsigned long long v);
154 static void vpush(CType *type);
155 static int gvtst(int inv, int t);
156 static void gen_inline_functions(TCCState *s);
157 static void free_inline_functions(TCCState *s);
158 static void skip_or_save_block(TokenString **str);
159 static void gv_dup(void);
160 static int get_temp_local_var(int size,int align);
161 static void clear_temp_local_var_list();
162 static void cast_error(CType *st, CType *dt);
164 ST_INLN int is_float(int t)
166 int bt = t & VT_BTYPE;
167 return bt == VT_LDOUBLE
168 || bt == VT_DOUBLE
169 || bt == VT_FLOAT
170 || bt == VT_QFLOAT;
173 static inline int is_integer_btype(int bt)
175 return bt == VT_BYTE
176 || bt == VT_BOOL
177 || bt == VT_SHORT
178 || bt == VT_INT
179 || bt == VT_LLONG;
182 static int btype_size(int bt)
184 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
185 bt == VT_SHORT ? 2 :
186 bt == VT_INT ? 4 :
187 bt == VT_LLONG ? 8 :
188 bt == VT_PTR ? PTR_SIZE : 0;
191 /* returns function return register from type */
192 static int R_RET(int t)
194 if (!is_float(t))
195 return REG_IRET;
196 #ifdef TCC_TARGET_X86_64
197 if ((t & VT_BTYPE) == VT_LDOUBLE)
198 return TREG_ST0;
199 #elif defined TCC_TARGET_RISCV64
200 if ((t & VT_BTYPE) == VT_LDOUBLE)
201 return REG_IRET;
202 #endif
203 return REG_FRET;
206 /* returns 2nd function return register, if any */
207 static int R2_RET(int t)
209 t &= VT_BTYPE;
210 #if PTR_SIZE == 4
211 if (t == VT_LLONG)
212 return REG_IRE2;
213 #elif defined TCC_TARGET_X86_64
214 if (t == VT_QLONG)
215 return REG_IRE2;
216 if (t == VT_QFLOAT)
217 return REG_FRE2;
218 #elif defined TCC_TARGET_RISCV64
219 if (t == VT_LDOUBLE)
220 return REG_IRE2;
221 #endif
222 return VT_CONST;
225 /* returns true for two-word types */
226 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
228 /* put function return registers to stack value */
229 static void PUT_R_RET(SValue *sv, int t)
231 sv->r = R_RET(t), sv->r2 = R2_RET(t);
234 /* returns function return register class for type t */
235 static int RC_RET(int t)
237 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
240 /* returns generic register class for type t */
241 static int RC_TYPE(int t)
243 if (!is_float(t))
244 return RC_INT;
245 #ifdef TCC_TARGET_X86_64
246 if ((t & VT_BTYPE) == VT_LDOUBLE)
247 return RC_ST0;
248 if ((t & VT_BTYPE) == VT_QFLOAT)
249 return RC_FRET;
250 #elif defined TCC_TARGET_RISCV64
251 if ((t & VT_BTYPE) == VT_LDOUBLE)
252 return RC_INT;
253 #endif
254 return RC_FLOAT;
257 /* returns 2nd register class corresponding to t and rc */
258 static int RC2_TYPE(int t, int rc)
260 if (!USING_TWO_WORDS(t))
261 return 0;
262 #ifdef RC_IRE2
263 if (rc == RC_IRET)
264 return RC_IRE2;
265 #endif
266 #ifdef RC_FRE2
267 if (rc == RC_FRET)
268 return RC_FRE2;
269 #endif
270 if (rc & RC_FLOAT)
271 return RC_FLOAT;
272 return RC_INT;
275 /* we use our own 'finite' function to avoid potential problems with
276 non standard math libs */
277 /* XXX: endianness dependent */
278 ST_FUNC int ieee_finite(double d)
280 int p[4];
281 memcpy(p, &d, sizeof(double));
282 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
285 /* compiling intel long double natively */
286 #if (defined __i386__ || defined __x86_64__) \
287 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
288 # define TCC_IS_NATIVE_387
289 #endif
291 ST_FUNC void test_lvalue(void)
293 if (!(vtop->r & VT_LVAL))
294 expect("lvalue");
297 ST_FUNC void check_vstack(void)
299 if (vtop != vstack - 1)
300 tcc_error("internal compiler error: vstack leak (%d)", vtop - vstack + 1);
303 /* ------------------------------------------------------------------------- */
304 /* vstack debugging aid */
306 #if 0
307 void pv (const char *lbl, int a, int b)
309 int i;
310 for (i = a; i < a + b; ++i) {
311 SValue *p = &vtop[-i];
312 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
313 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
316 #endif
318 /* ------------------------------------------------------------------------- */
319 /* start of translation unit info */
320 ST_FUNC void tcc_debug_start(TCCState *s1)
322 if (s1->do_debug) {
323 char buf[512];
325 /* file info: full path + filename */
326 section_sym = put_elf_sym(symtab_section, 0, 0,
327 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
328 text_section->sh_num, NULL);
329 getcwd(buf, sizeof(buf));
330 #ifdef _WIN32
331 normalize_slashes(buf);
332 #endif
333 pstrcat(buf, sizeof(buf), "/");
334 put_stabs_r(s1, buf, N_SO, 0, 0,
335 text_section->data_offset, text_section, section_sym);
336 put_stabs_r(s1, file->prev->filename, N_SO, 0, 0,
337 text_section->data_offset, text_section, section_sym);
338 new_file = last_line_num = 0;
339 func_ind = -1;
340 /* we're currently 'including' the <command line> */
341 tcc_debug_bincl(s1);
344 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
345 symbols can be safely used */
346 put_elf_sym(symtab_section, 0, 0,
347 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
348 SHN_ABS, file->filename);
351 /* put end of translation unit info */
352 ST_FUNC void tcc_debug_end(TCCState *s1)
354 if (!s1->do_debug)
355 return;
356 put_stabs_r(s1, NULL, N_SO, 0, 0,
357 text_section->data_offset, text_section, section_sym);
360 static BufferedFile* put_new_file(TCCState *s1)
362 BufferedFile *f = file;
363 /* use upper file if from inline ":asm:" */
364 if (f->filename[0] == ':')
365 f = f->prev;
366 if (f && new_file) {
367 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
368 new_file = last_line_num = 0;
370 return f;
373 /* generate line number info */
374 ST_FUNC void tcc_debug_line(TCCState *s1)
376 BufferedFile *f;
377 if (!s1->do_debug || !(f = put_new_file(s1)))
378 return;
379 if (last_line_num == f->line_num)
380 return;
381 if (text_section != cur_text_section)
382 return;
383 if (func_ind != -1) {
384 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
385 } else {
386 /* from tcc_assemble */
387 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
389 last_line_num = f->line_num;
392 /* put function symbol */
393 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
395 char buf[512];
396 BufferedFile *f;
397 if (!s1->do_debug || !(f = put_new_file(s1)))
398 return;
399 /* XXX: we put here a dummy type */
400 snprintf(buf, sizeof(buf), "%s:%c1",
401 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
402 put_stabs_r(s1, buf, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
403 tcc_debug_line(s1);
406 /* put function size */
407 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
409 if (!s1->do_debug)
410 return;
411 #if 0 // this seems to confuse gnu tools
412 put_stabn(s1, N_FUN, 0, 0, size);
413 #endif
416 /* put alternative filename */
417 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
419 if (0 == strcmp(file->filename, filename))
420 return;
421 pstrcpy(file->filename, sizeof(file->filename), filename);
422 new_file = 1;
425 /* begin of #include */
426 ST_FUNC void tcc_debug_bincl(TCCState *s1)
428 if (!s1->do_debug)
429 return;
430 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
431 new_file = 1;
434 /* end of #include */
435 ST_FUNC void tcc_debug_eincl(TCCState *s1)
437 if (!s1->do_debug)
438 return;
439 put_stabn(s1, N_EINCL, 0, 0, 0);
440 new_file = 1;
443 /* ------------------------------------------------------------------------- */
444 /* initialize vstack and types. This must be done also for tcc -E */
445 ST_FUNC void tccgen_init(TCCState *s1)
447 vtop = vstack - 1;
448 memset(vtop, 0, sizeof *vtop);
450 /* define some often used types */
451 int_type.t = VT_INT;
452 char_pointer_type.t = VT_BYTE;
453 mk_pointer(&char_pointer_type);
454 func_old_type.t = VT_FUNC;
455 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
456 func_old_type.ref->f.func_call = FUNC_CDECL;
457 func_old_type.ref->f.func_type = FUNC_OLD;
460 ST_FUNC int tccgen_compile(TCCState *s1)
462 cur_text_section = NULL;
463 funcname = "";
464 anon_sym = SYM_FIRST_ANOM;
465 section_sym = 0;
466 const_wanted = 0;
467 nocode_wanted = 0x80000000;
468 local_scope = 0;
470 tcc_debug_start(s1);
471 #ifdef TCC_TARGET_ARM
472 arm_init(s1);
473 #endif
474 #ifdef INC_DEBUG
475 printf("%s: **** new file\n", file->filename);
476 #endif
477 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
478 next();
479 decl(VT_CONST);
480 gen_inline_functions(s1);
481 check_vstack();
482 /* end of translation unit info */
483 tcc_debug_end(s1);
484 return 0;
487 ST_FUNC void tccgen_finish(TCCState *s1)
489 free_inline_functions(s1);
490 sym_pop(&global_stack, NULL, 0);
491 sym_pop(&local_stack, NULL, 0);
492 /* free preprocessor macros */
493 free_defines(NULL);
494 /* free sym_pools */
495 dynarray_reset(&sym_pools, &nb_sym_pools);
496 sym_free_first = NULL;
499 /* ------------------------------------------------------------------------- */
500 ST_FUNC ElfSym *elfsym(Sym *s)
502 if (!s || !s->c)
503 return NULL;
504 return &((ElfSym *)symtab_section->data)[s->c];
507 /* apply storage attributes to Elf symbol */
508 ST_FUNC void update_storage(Sym *sym)
510 ElfSym *esym;
511 int sym_bind, old_sym_bind;
513 esym = elfsym(sym);
514 if (!esym)
515 return;
517 if (sym->a.visibility)
518 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
519 | sym->a.visibility;
521 if (sym->type.t & (VT_STATIC | VT_INLINE))
522 sym_bind = STB_LOCAL;
523 else if (sym->a.weak)
524 sym_bind = STB_WEAK;
525 else
526 sym_bind = STB_GLOBAL;
527 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
528 if (sym_bind != old_sym_bind) {
529 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
532 #ifdef TCC_TARGET_PE
533 if (sym->a.dllimport)
534 esym->st_other |= ST_PE_IMPORT;
535 if (sym->a.dllexport)
536 esym->st_other |= ST_PE_EXPORT;
537 #endif
539 #if 0
540 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
541 get_tok_str(sym->v, NULL),
542 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
543 sym->a.visibility,
544 sym->a.dllexport,
545 sym->a.dllimport
547 #endif
550 /* ------------------------------------------------------------------------- */
551 /* update sym->c so that it points to an external symbol in section
552 'section' with value 'value' */
554 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
555 addr_t value, unsigned long size,
556 int can_add_underscore)
558 int sym_type, sym_bind, info, other, t;
559 ElfSym *esym;
560 const char *name;
561 char buf1[256];
562 #ifdef CONFIG_TCC_BCHECK
563 char buf[32];
564 #endif
566 if (!sym->c) {
567 name = get_tok_str(sym->v, NULL);
568 #ifdef CONFIG_TCC_BCHECK
569 if (tcc_state->do_bounds_check) {
570 /* XXX: avoid doing that for statics ? */
571 /* if bound checking is activated, we change some function
572 names by adding the "__bound" prefix */
573 switch(sym->v) {
574 #ifdef TCC_TARGET_PE
575 /* XXX: we rely only on malloc hooks */
576 case TOK_malloc:
577 case TOK_free:
578 case TOK_realloc:
579 case TOK_memalign:
580 case TOK_calloc:
581 #endif
582 case TOK_memcpy:
583 case TOK_memmove:
584 case TOK_memset:
585 case TOK_memcmp:
586 case TOK_strlen:
587 case TOK_strcpy:
588 case TOK_strncpy:
589 case TOK_strcmp:
590 case TOK_strncmp:
591 case TOK_strcat:
592 case TOK_strchr:
593 case TOK_strdup:
594 case TOK_alloca:
595 case TOK_mmap:
596 case TOK_munmap:
597 strcpy(buf, "__bound_");
598 strcat(buf, name);
599 name = buf;
600 break;
603 #endif
604 t = sym->type.t;
605 if ((t & VT_BTYPE) == VT_FUNC) {
606 sym_type = STT_FUNC;
607 } else if ((t & VT_BTYPE) == VT_VOID) {
608 sym_type = STT_NOTYPE;
609 } else {
610 sym_type = STT_OBJECT;
612 if (t & (VT_STATIC | VT_INLINE))
613 sym_bind = STB_LOCAL;
614 else
615 sym_bind = STB_GLOBAL;
616 other = 0;
617 #ifdef TCC_TARGET_PE
618 if (sym_type == STT_FUNC && sym->type.ref) {
619 Sym *ref = sym->type.ref;
620 if (ref->a.nodecorate) {
621 can_add_underscore = 0;
623 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
624 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
625 name = buf1;
626 other |= ST_PE_STDCALL;
627 can_add_underscore = 0;
630 #endif
631 if (tcc_state->leading_underscore && can_add_underscore) {
632 buf1[0] = '_';
633 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
634 name = buf1;
636 if (sym->asm_label)
637 name = get_tok_str(sym->asm_label, NULL);
638 info = ELFW(ST_INFO)(sym_bind, sym_type);
639 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
640 } else {
641 esym = elfsym(sym);
642 esym->st_value = value;
643 esym->st_size = size;
644 esym->st_shndx = sh_num;
646 update_storage(sym);
649 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
650 addr_t value, unsigned long size)
652 int sh_num = section ? section->sh_num : SHN_UNDEF;
653 put_extern_sym2(sym, sh_num, value, size, 1);
656 /* add a new relocation entry to symbol 'sym' in section 's' */
657 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
658 addr_t addend)
660 int c = 0;
662 if (nocode_wanted && s == cur_text_section)
663 return;
665 if (sym) {
666 if (0 == sym->c)
667 put_extern_sym(sym, NULL, 0, 0);
668 c = sym->c;
671 /* now we can add ELF relocation info */
672 put_elf_reloca(symtab_section, s, offset, type, c, addend);
675 #if PTR_SIZE == 4
676 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
678 greloca(s, sym, offset, type, 0);
680 #endif
682 /* ------------------------------------------------------------------------- */
683 /* symbol allocator */
684 static Sym *__sym_malloc(void)
686 Sym *sym_pool, *sym, *last_sym;
687 int i;
689 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
690 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
692 last_sym = sym_free_first;
693 sym = sym_pool;
694 for(i = 0; i < SYM_POOL_NB; i++) {
695 sym->next = last_sym;
696 last_sym = sym;
697 sym++;
699 sym_free_first = last_sym;
700 return last_sym;
703 static inline Sym *sym_malloc(void)
705 Sym *sym;
706 #ifndef SYM_DEBUG
707 sym = sym_free_first;
708 if (!sym)
709 sym = __sym_malloc();
710 sym_free_first = sym->next;
711 return sym;
712 #else
713 sym = tcc_malloc(sizeof(Sym));
714 return sym;
715 #endif
718 ST_INLN void sym_free(Sym *sym)
720 #ifndef SYM_DEBUG
721 sym->next = sym_free_first;
722 sym_free_first = sym;
723 #else
724 tcc_free(sym);
725 #endif
728 /* push, without hashing */
729 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
731 Sym *s;
733 s = sym_malloc();
734 memset(s, 0, sizeof *s);
735 s->v = v;
736 s->type.t = t;
737 s->c = c;
738 /* add in stack */
739 s->prev = *ps;
740 *ps = s;
741 return s;
744 /* find a symbol and return its associated structure. 's' is the top
745 of the symbol stack */
746 ST_FUNC Sym *sym_find2(Sym *s, int v)
748 while (s) {
749 if (s->v == v)
750 return s;
751 else if (s->v == -1)
752 return NULL;
753 s = s->prev;
755 return NULL;
758 /* structure lookup */
759 ST_INLN Sym *struct_find(int v)
761 v -= TOK_IDENT;
762 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
763 return NULL;
764 return table_ident[v]->sym_struct;
767 /* find an identifier */
768 ST_INLN Sym *sym_find(int v)
770 v -= TOK_IDENT;
771 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
772 return NULL;
773 return table_ident[v]->sym_identifier;
776 static int sym_scope(Sym *s)
778 if (IS_ENUM_VAL (s->type.t))
779 return s->type.ref->sym_scope;
780 else
781 return s->sym_scope;
784 /* push a given symbol on the symbol stack */
785 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
787 Sym *s, **ps;
788 TokenSym *ts;
790 if (local_stack)
791 ps = &local_stack;
792 else
793 ps = &global_stack;
794 s = sym_push2(ps, v, type->t, c);
795 s->type.ref = type->ref;
796 s->r = r;
797 /* don't record fields or anonymous symbols */
798 /* XXX: simplify */
799 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
800 /* record symbol in token array */
801 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
802 if (v & SYM_STRUCT)
803 ps = &ts->sym_struct;
804 else
805 ps = &ts->sym_identifier;
806 s->prev_tok = *ps;
807 *ps = s;
808 s->sym_scope = local_scope;
809 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
810 tcc_error("redeclaration of '%s'",
811 get_tok_str(v & ~SYM_STRUCT, NULL));
813 return s;
816 /* push a global identifier */
817 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
819 Sym *s, **ps;
820 s = sym_push2(&global_stack, v, t, c);
821 s->r = VT_CONST | VT_SYM;
822 /* don't record anonymous symbol */
823 if (v < SYM_FIRST_ANOM) {
824 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
825 /* modify the top most local identifier, so that sym_identifier will
826 point to 's' when popped; happens when called from inline asm */
827 while (*ps != NULL && (*ps)->sym_scope)
828 ps = &(*ps)->prev_tok;
829 s->prev_tok = *ps;
830 *ps = s;
832 return s;
835 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
836 pop them yet from the list, but do remove them from the token array. */
837 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
839 Sym *s, *ss, **ps;
840 TokenSym *ts;
841 int v;
843 s = *ptop;
844 while(s != b) {
845 ss = s->prev;
846 v = s->v;
847 /* remove symbol in token array */
848 /* XXX: simplify */
849 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
850 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
851 if (v & SYM_STRUCT)
852 ps = &ts->sym_struct;
853 else
854 ps = &ts->sym_identifier;
855 *ps = s->prev_tok;
857 if (!keep)
858 sym_free(s);
859 s = ss;
861 if (!keep)
862 *ptop = b;
865 /* ------------------------------------------------------------------------- */
866 static void vcheck_cmp(void)
868 /* cannot let cpu flags if other instruction are generated. Also
869 avoid leaving VT_JMP anywhere except on the top of the stack
870 because it would complicate the code generator.
872 Don't do this when nocode_wanted. vtop might come from
873 !nocode_wanted regions (see 88_codeopt.c) and transforming
874 it to a register without actually generating code is wrong
875 as their value might still be used for real. All values
876 we push under nocode_wanted will eventually be popped
877 again, so that the VT_CMP/VT_JMP value will be in vtop
878 when code is unsuppressed again. */
880 if (vtop->r == VT_CMP && !nocode_wanted)
881 gv(RC_INT);
884 static void vsetc(CType *type, int r, CValue *vc)
886 if (vtop >= vstack + (VSTACK_SIZE - 1))
887 tcc_error("memory full (vstack)");
888 vcheck_cmp();
889 vtop++;
890 vtop->type = *type;
891 vtop->r = r;
892 vtop->r2 = VT_CONST;
893 vtop->c = *vc;
894 vtop->sym = NULL;
897 ST_FUNC void vswap(void)
899 SValue tmp;
901 vcheck_cmp();
902 tmp = vtop[0];
903 vtop[0] = vtop[-1];
904 vtop[-1] = tmp;
907 /* pop stack value */
908 ST_FUNC void vpop(void)
910 int v;
911 v = vtop->r & VT_VALMASK;
912 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
913 /* for x86, we need to pop the FP stack */
914 if (v == TREG_ST0) {
915 o(0xd8dd); /* fstp %st(0) */
916 } else
917 #endif
918 if (v == VT_CMP) {
919 /* need to put correct jump if && or || without test */
920 gsym(vtop->jtrue);
921 gsym(vtop->jfalse);
923 vtop--;
926 /* push constant of type "type" with useless value */
927 static void vpush(CType *type)
929 vset(type, VT_CONST, 0);
932 /* push arbitrary 64bit constant */
933 static void vpush64(int ty, unsigned long long v)
935 CValue cval;
936 CType ctype;
937 ctype.t = ty;
938 ctype.ref = NULL;
939 cval.i = v;
940 vsetc(&ctype, VT_CONST, &cval);
943 /* push integer constant */
944 ST_FUNC void vpushi(int v)
946 vpush64(VT_INT, v);
949 /* push a pointer sized constant */
950 static void vpushs(addr_t v)
952 vpush64(VT_SIZE_T, v);
955 /* push long long constant */
956 static inline void vpushll(long long v)
958 vpush64(VT_LLONG, v);
961 ST_FUNC void vset(CType *type, int r, int v)
963 CValue cval;
964 cval.i = v;
965 vsetc(type, r, &cval);
968 static void vseti(int r, int v)
970 CType type;
971 type.t = VT_INT;
972 type.ref = NULL;
973 vset(&type, r, v);
976 ST_FUNC void vpushv(SValue *v)
978 if (vtop >= vstack + (VSTACK_SIZE - 1))
979 tcc_error("memory full (vstack)");
980 vtop++;
981 *vtop = *v;
984 static void vdup(void)
986 vpushv(vtop);
989 /* rotate n first stack elements to the bottom
990 I1 ... In -> I2 ... In I1 [top is right]
992 ST_FUNC void vrotb(int n)
994 int i;
995 SValue tmp;
997 vcheck_cmp();
998 tmp = vtop[-n + 1];
999 for(i=-n+1;i!=0;i++)
1000 vtop[i] = vtop[i+1];
1001 vtop[0] = tmp;
1004 /* rotate the n elements before entry e towards the top
1005 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1007 ST_FUNC void vrote(SValue *e, int n)
1009 int i;
1010 SValue tmp;
1012 vcheck_cmp();
1013 tmp = *e;
1014 for(i = 0;i < n - 1; i++)
1015 e[-i] = e[-i - 1];
1016 e[-n + 1] = tmp;
1019 /* rotate n first stack elements to the top
1020 I1 ... In -> In I1 ... I(n-1) [top is right]
1022 ST_FUNC void vrott(int n)
1024 vrote(vtop, n);
1027 /* ------------------------------------------------------------------------- */
1028 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1030 /* called from generators to set the result from relational ops */
1031 ST_FUNC void vset_VT_CMP(int op)
1033 vtop->r = VT_CMP;
1034 vtop->cmp_op = op;
1035 vtop->jfalse = 0;
1036 vtop->jtrue = 0;
1039 /* called once before asking generators to load VT_CMP to a register */
1040 static void vset_VT_JMP(void)
1042 int op = vtop->cmp_op;
1043 if (vtop->jtrue || vtop->jfalse) {
1044 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1045 int inv = op & (op < 2); /* small optimization */
1046 vseti(VT_JMP+inv, gvtst(inv, 0));
1047 } else {
1048 /* otherwise convert flags (rsp. 0/1) to register */
1049 vtop->c.i = op;
1050 if (op < 2) /* doesn't seem to happen */
1051 vtop->r = VT_CONST;
1055 /* Set CPU Flags, doesn't yet jump */
1056 static void gvtst_set(int inv, int t)
1058 int *p;
1059 if (vtop->r != VT_CMP) {
1060 vpushi(0);
1061 gen_op(TOK_NE);
1062 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
1064 else if (vtop->r == VT_CONST)
1065 vset_VT_CMP(vtop->c.i != 0);
1066 else
1067 tcc_error("ICE");
1069 p = inv ? &vtop->jfalse : &vtop->jtrue;
1070 *p = gjmp_append(*p, t);
1073 /* Generate value test
1075 * Generate a test for any value (jump, comparison and integers) */
1076 static int gvtst(int inv, int t)
1078 int op, u, x;
1080 gvtst_set(inv, t);
1082 t = vtop->jtrue, u = vtop->jfalse;
1083 if (inv)
1084 x = u, u = t, t = x;
1085 op = vtop->cmp_op;
1087 /* jump to the wanted target */
1088 if (op > 1)
1089 t = gjmp_cond(op ^ inv, t);
1090 else if (op != inv)
1091 t = gjmp(t);
1092 /* resolve complementary jumps to here */
1093 gsym(u);
1095 vtop--;
1096 return t;
1099 /* generate a zero or nozero test */
1100 static void gen_test_zero(int op)
1102 if (vtop->r == VT_CMP) {
1103 int j;
1104 if (op == TOK_EQ) {
1105 j = vtop->jfalse;
1106 vtop->jfalse = vtop->jtrue;
1107 vtop->jtrue = j;
1108 vtop->cmp_op ^= 1;
1110 } else {
1111 vpushi(0);
1112 gen_op(op);
1116 /* ------------------------------------------------------------------------- */
1117 /* push a symbol value of TYPE */
1118 static inline void vpushsym(CType *type, Sym *sym)
1120 CValue cval;
1121 cval.i = 0;
1122 vsetc(type, VT_CONST | VT_SYM, &cval);
1123 vtop->sym = sym;
1126 /* Return a static symbol pointing to a section */
1127 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1129 int v;
1130 Sym *sym;
1132 v = anon_sym++;
1133 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1134 sym->type.t |= VT_STATIC;
1135 put_extern_sym(sym, sec, offset, size);
1136 return sym;
1139 /* push a reference to a section offset by adding a dummy symbol */
1140 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1142 vpushsym(type, get_sym_ref(type, sec, offset, size));
1145 /* define a new external reference to a symbol 'v' of type 'u' */
1146 ST_FUNC Sym *external_global_sym(int v, CType *type)
1148 Sym *s;
1150 s = sym_find(v);
1151 if (!s) {
1152 /* push forward reference */
1153 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1154 s->type.ref = type->ref;
1155 } else if (IS_ASM_SYM(s)) {
1156 s->type.t = type->t | (s->type.t & VT_EXTERN);
1157 s->type.ref = type->ref;
1158 update_storage(s);
1160 return s;
1163 /* Merge symbol attributes. */
1164 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1166 if (sa1->aligned && !sa->aligned)
1167 sa->aligned = sa1->aligned;
1168 sa->packed |= sa1->packed;
1169 sa->weak |= sa1->weak;
1170 if (sa1->visibility != STV_DEFAULT) {
1171 int vis = sa->visibility;
1172 if (vis == STV_DEFAULT
1173 || vis > sa1->visibility)
1174 vis = sa1->visibility;
1175 sa->visibility = vis;
1177 sa->dllexport |= sa1->dllexport;
1178 sa->nodecorate |= sa1->nodecorate;
1179 sa->dllimport |= sa1->dllimport;
1182 /* Merge function attributes. */
1183 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1185 if (fa1->func_call && !fa->func_call)
1186 fa->func_call = fa1->func_call;
1187 if (fa1->func_type && !fa->func_type)
1188 fa->func_type = fa1->func_type;
1189 if (fa1->func_args && !fa->func_args)
1190 fa->func_args = fa1->func_args;
1193 /* Merge attributes. */
1194 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1196 merge_symattr(&ad->a, &ad1->a);
1197 merge_funcattr(&ad->f, &ad1->f);
1199 if (ad1->section)
1200 ad->section = ad1->section;
1201 if (ad1->alias_target)
1202 ad->alias_target = ad1->alias_target;
1203 if (ad1->asm_label)
1204 ad->asm_label = ad1->asm_label;
1205 if (ad1->attr_mode)
1206 ad->attr_mode = ad1->attr_mode;
1209 /* Merge some type attributes. */
1210 static void patch_type(Sym *sym, CType *type)
1212 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1213 if (!(sym->type.t & VT_EXTERN))
1214 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1215 sym->type.t &= ~VT_EXTERN;
1218 if (IS_ASM_SYM(sym)) {
1219 /* stay static if both are static */
1220 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1221 sym->type.ref = type->ref;
1224 if (!is_compatible_types(&sym->type, type)) {
1225 tcc_error("incompatible types for redefinition of '%s'",
1226 get_tok_str(sym->v, NULL));
1228 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1229 int static_proto = sym->type.t & VT_STATIC;
1230 /* warn if static follows non-static function declaration */
1231 if ((type->t & VT_STATIC) && !static_proto
1232 /* XXX this test for inline shouldn't be here. Until we
1233 implement gnu-inline mode again it silences a warning for
1234 mingw caused by our workarounds. */
1235 && !((type->t | sym->type.t) & VT_INLINE))
1236 tcc_warning("static storage ignored for redefinition of '%s'",
1237 get_tok_str(sym->v, NULL));
1239 /* set 'inline' if both agree or if one has static */
1240 if ((type->t | sym->type.t) & VT_INLINE) {
1241 if (!((type->t ^ sym->type.t) & VT_INLINE)
1242 || ((type->t | sym->type.t) & VT_STATIC))
1243 static_proto |= VT_INLINE;
1246 if (0 == (type->t & VT_EXTERN)) {
1247 /* put complete type, use static from prototype */
1248 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1249 sym->type.ref = type->ref;
1250 } else {
1251 sym->type.t &= ~VT_INLINE | static_proto;
1254 if (sym->type.ref->f.func_type == FUNC_OLD
1255 && type->ref->f.func_type != FUNC_OLD) {
1256 sym->type.ref = type->ref;
1259 } else {
1260 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1261 /* set array size if it was omitted in extern declaration */
1262 sym->type.ref->c = type->ref->c;
1264 if ((type->t ^ sym->type.t) & VT_STATIC)
1265 tcc_warning("storage mismatch for redefinition of '%s'",
1266 get_tok_str(sym->v, NULL));
1270 /* Merge some storage attributes. */
1271 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1273 if (type)
1274 patch_type(sym, type);
1276 #ifdef TCC_TARGET_PE
1277 if (sym->a.dllimport != ad->a.dllimport)
1278 tcc_error("incompatible dll linkage for redefinition of '%s'",
1279 get_tok_str(sym->v, NULL));
1280 #endif
1281 merge_symattr(&sym->a, &ad->a);
1282 if (ad->asm_label)
1283 sym->asm_label = ad->asm_label;
1284 update_storage(sym);
1287 /* copy sym to other stack */
1288 static Sym *sym_copy(Sym *s0, Sym **ps)
1290 Sym *s;
1291 s = sym_malloc(), *s = *s0;
1292 s->prev = *ps, *ps = s;
1293 if (s->v < SYM_FIRST_ANOM) {
1294 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1295 s->prev_tok = *ps, *ps = s;
1297 return s;
1300 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1301 static void sym_copy_ref(Sym *s, Sym **ps)
1303 int bt = s->type.t & VT_BTYPE;
1304 if (bt == VT_FUNC || bt == VT_PTR) {
1305 Sym **sp = &s->type.ref;
1306 for (s = *sp, *sp = NULL; s; s = s->next) {
1307 Sym *s2 = sym_copy(s, ps);
1308 sp = &(*sp = s2)->next;
1309 sym_copy_ref(s2, ps);
1314 /* define a new external reference to a symbol 'v' */
1315 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1317 Sym *s;
1319 /* look for global symbol */
1320 s = sym_find(v);
1321 while (s && s->sym_scope)
1322 s = s->prev_tok;
1324 if (!s) {
1325 /* push forward reference */
1326 s = global_identifier_push(v, type->t, 0);
1327 s->r |= r;
1328 s->a = ad->a;
1329 s->asm_label = ad->asm_label;
1330 s->type.ref = type->ref;
1331 /* copy type to the global stack */
1332 if (local_stack)
1333 sym_copy_ref(s, &global_stack);
1334 } else {
1335 patch_storage(s, ad, type);
1337 /* push variables on local_stack if any */
1338 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1339 s = sym_copy(s, &local_stack);
1340 return s;
1343 /* push a reference to global symbol v */
1344 ST_FUNC void vpush_global_sym(CType *type, int v)
1346 vpushsym(type, external_global_sym(v, type));
1349 /* save registers up to (vtop - n) stack entry */
1350 ST_FUNC void save_regs(int n)
1352 SValue *p, *p1;
1353 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1354 save_reg(p->r);
1357 /* save r to the memory stack, and mark it as being free */
1358 ST_FUNC void save_reg(int r)
1360 save_reg_upstack(r, 0);
1363 /* save r to the memory stack, and mark it as being free,
1364 if seen up to (vtop - n) stack entry */
1365 ST_FUNC void save_reg_upstack(int r, int n)
1367 int l, size, align, bt;
1368 SValue *p, *p1, sv;
1370 if ((r &= VT_VALMASK) >= VT_CONST)
1371 return;
1372 if (nocode_wanted)
1373 return;
1374 l = 0;
1375 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1376 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1377 /* must save value on stack if not already done */
1378 if (!l) {
1379 bt = p->type.t & VT_BTYPE;
1380 if (bt == VT_VOID)
1381 continue;
1382 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1383 bt = VT_PTR;
1384 sv.type.t = bt;
1385 size = type_size(&sv.type, &align);
1386 #ifdef CONFIG_TCC_BCHECK
1387 if (tcc_state->do_bounds_check)
1388 l = loc = (loc - size) & -align;
1389 else
1390 #endif
1391 l = get_temp_local_var(size,align);
1392 sv.r = VT_LOCAL | VT_LVAL;
1393 sv.c.i = l;
1394 store(p->r & VT_VALMASK, &sv);
1395 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1396 /* x86 specific: need to pop fp register ST0 if saved */
1397 if (r == TREG_ST0) {
1398 o(0xd8dd); /* fstp %st(0) */
1400 #endif
1401 /* special long long case */
1402 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1403 sv.c.i += PTR_SIZE;
1404 store(p->r2, &sv);
1407 /* mark that stack entry as being saved on the stack */
1408 if (p->r & VT_LVAL) {
1409 /* also clear the bounded flag because the
1410 relocation address of the function was stored in
1411 p->c.i */
1412 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1413 } else {
1414 p->r = VT_LVAL | VT_LOCAL;
1416 p->r2 = VT_CONST;
1417 p->c.i = l;
1422 #ifdef TCC_TARGET_ARM
1423 /* find a register of class 'rc2' with at most one reference on stack.
1424 * If none, call get_reg(rc) */
1425 ST_FUNC int get_reg_ex(int rc, int rc2)
1427 int r;
1428 SValue *p;
1430 for(r=0;r<NB_REGS;r++) {
1431 if (reg_classes[r] & rc2) {
1432 int n;
1433 n=0;
1434 for(p = vstack; p <= vtop; p++) {
1435 if ((p->r & VT_VALMASK) == r ||
1436 p->r2 == r)
1437 n++;
1439 if (n <= 1)
1440 return r;
1443 return get_reg(rc);
1445 #endif
1447 /* find a free register of class 'rc'. If none, save one register */
1448 ST_FUNC int get_reg(int rc)
1450 int r;
1451 SValue *p;
1453 /* find a free register */
1454 for(r=0;r<NB_REGS;r++) {
1455 if (reg_classes[r] & rc) {
1456 if (nocode_wanted)
1457 return r;
1458 for(p=vstack;p<=vtop;p++) {
1459 if ((p->r & VT_VALMASK) == r ||
1460 p->r2 == r)
1461 goto notfound;
1463 return r;
1465 notfound: ;
1468 /* no register left : free the first one on the stack (VERY
1469 IMPORTANT to start from the bottom to ensure that we don't
1470 spill registers used in gen_opi()) */
1471 for(p=vstack;p<=vtop;p++) {
1472 /* look at second register (if long long) */
1473 r = p->r2;
1474 if (r < VT_CONST && (reg_classes[r] & rc))
1475 goto save_found;
1476 r = p->r & VT_VALMASK;
1477 if (r < VT_CONST && (reg_classes[r] & rc)) {
1478 save_found:
1479 save_reg(r);
1480 return r;
1483 /* Should never comes here */
1484 return -1;
1487 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1488 static int get_temp_local_var(int size,int align){
1489 int i;
1490 struct temp_local_variable *temp_var;
1491 int found_var;
1492 SValue *p;
1493 int r;
1494 char free;
1495 char found;
1496 found=0;
1497 for(i=0;i<nb_temp_local_vars;i++){
1498 temp_var=&arr_temp_local_vars[i];
1499 if(temp_var->size<size||align!=temp_var->align){
1500 continue;
1502 /*check if temp_var is free*/
1503 free=1;
1504 for(p=vstack;p<=vtop;p++) {
1505 r=p->r&VT_VALMASK;
1506 if(r==VT_LOCAL||r==VT_LLOCAL){
1507 if(p->c.i==temp_var->location){
1508 free=0;
1509 break;
1513 if(free){
1514 found_var=temp_var->location;
1515 found=1;
1516 break;
1519 if(!found){
1520 loc = (loc - size) & -align;
1521 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1522 temp_var=&arr_temp_local_vars[i];
1523 temp_var->location=loc;
1524 temp_var->size=size;
1525 temp_var->align=align;
1526 nb_temp_local_vars++;
1528 found_var=loc;
1530 return found_var;
1533 static void clear_temp_local_var_list(){
1534 nb_temp_local_vars=0;
1537 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1538 if needed */
1539 static void move_reg(int r, int s, int t)
1541 SValue sv;
1543 if (r != s) {
1544 save_reg(r);
1545 sv.type.t = t;
1546 sv.type.ref = NULL;
1547 sv.r = s;
1548 sv.c.i = 0;
1549 load(r, &sv);
1553 /* get address of vtop (vtop MUST BE an lvalue) */
1554 ST_FUNC void gaddrof(void)
1556 vtop->r &= ~VT_LVAL;
1557 /* tricky: if saved lvalue, then we can go back to lvalue */
1558 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1559 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1562 #ifdef CONFIG_TCC_BCHECK
1563 /* generate lvalue bound code */
1564 static void gbound(void)
1566 CType type1;
1568 vtop->r &= ~VT_MUSTBOUND;
1569 /* if lvalue, then use checking code before dereferencing */
1570 if (vtop->r & VT_LVAL) {
1571 /* if not VT_BOUNDED value, then make one */
1572 if (!(vtop->r & VT_BOUNDED)) {
1573 /* must save type because we must set it to int to get pointer */
1574 type1 = vtop->type;
1575 vtop->type.t = VT_PTR;
1576 gaddrof();
1577 vpushi(0);
1578 gen_bounded_ptr_add();
1579 vtop->r |= VT_LVAL;
1580 vtop->type = type1;
1582 /* then check for dereferencing */
1583 gen_bounded_ptr_deref();
1587 /* we need to call __bound_ptr_add before we start to load function
1588 args into registers */
1589 ST_FUNC void gbound_args(int nb_args)
1591 int i;
1592 for (i = 1; i <= nb_args; ++i)
1593 if (vtop[1 - i].r & VT_MUSTBOUND) {
1594 vrotb(i);
1595 gbound();
1596 vrott(i);
1599 #endif
1601 static void incr_bf_adr(int o)
1603 vtop->type = char_pointer_type;
1604 gaddrof();
1605 vpushs(o);
1606 gen_op('+');
1607 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1608 vtop->r |= VT_LVAL;
1611 /* single-byte load mode for packed or otherwise unaligned bitfields */
1612 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1614 int n, o, bits;
1615 save_reg_upstack(vtop->r, 1);
1616 vpush64(type->t & VT_BTYPE, 0); // B X
1617 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1618 do {
1619 vswap(); // X B
1620 incr_bf_adr(o);
1621 vdup(); // X B B
1622 n = 8 - bit_pos;
1623 if (n > bit_size)
1624 n = bit_size;
1625 if (bit_pos)
1626 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1627 if (n < 8)
1628 vpushi((1 << n) - 1), gen_op('&');
1629 gen_cast(type);
1630 if (bits)
1631 vpushi(bits), gen_op(TOK_SHL);
1632 vrotb(3); // B Y X
1633 gen_op('|'); // B X
1634 bits += n, bit_size -= n, o = 1;
1635 } while (bit_size);
1636 vswap(), vpop();
1637 if (!(type->t & VT_UNSIGNED)) {
1638 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1639 vpushi(n), gen_op(TOK_SHL);
1640 vpushi(n), gen_op(TOK_SAR);
1644 /* single-byte store mode for packed or otherwise unaligned bitfields */
1645 static void store_packed_bf(int bit_pos, int bit_size)
1647 int bits, n, o, m, c;
1649 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1650 vswap(); // X B
1651 save_reg_upstack(vtop->r, 1);
1652 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1653 do {
1654 incr_bf_adr(o); // X B
1655 vswap(); //B X
1656 c ? vdup() : gv_dup(); // B V X
1657 vrott(3); // X B V
1658 if (bits)
1659 vpushi(bits), gen_op(TOK_SHR);
1660 if (bit_pos)
1661 vpushi(bit_pos), gen_op(TOK_SHL);
1662 n = 8 - bit_pos;
1663 if (n > bit_size)
1664 n = bit_size;
1665 if (n < 8) {
1666 m = ((1 << n) - 1) << bit_pos;
1667 vpushi(m), gen_op('&'); // X B V1
1668 vpushv(vtop-1); // X B V1 B
1669 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1670 gen_op('&'); // X B V1 B1
1671 gen_op('|'); // X B V2
1673 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1674 vstore(), vpop(); // X B
1675 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1676 } while (bit_size);
1677 vpop(), vpop();
1680 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1682 int t;
1683 if (0 == sv->type.ref)
1684 return 0;
1685 t = sv->type.ref->auxtype;
1686 if (t != -1 && t != VT_STRUCT) {
1687 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1688 sv->r |= VT_LVAL;
1690 return t;
1693 /* store vtop a register belonging to class 'rc'. lvalues are
1694 converted to values. Cannot be used if cannot be converted to
1695 register value (such as structures). */
1696 ST_FUNC int gv(int rc)
1698 int r, r2, r_ok, r2_ok, rc2, bt;
1699 int bit_pos, bit_size, size, align;
1701 /* NOTE: get_reg can modify vstack[] */
1702 if (vtop->type.t & VT_BITFIELD) {
1703 CType type;
1705 bit_pos = BIT_POS(vtop->type.t);
1706 bit_size = BIT_SIZE(vtop->type.t);
1707 /* remove bit field info to avoid loops */
1708 vtop->type.t &= ~VT_STRUCT_MASK;
1710 type.ref = NULL;
1711 type.t = vtop->type.t & VT_UNSIGNED;
1712 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1713 type.t |= VT_UNSIGNED;
1715 r = adjust_bf(vtop, bit_pos, bit_size);
1717 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1718 type.t |= VT_LLONG;
1719 else
1720 type.t |= VT_INT;
1722 if (r == VT_STRUCT) {
1723 load_packed_bf(&type, bit_pos, bit_size);
1724 } else {
1725 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1726 /* cast to int to propagate signedness in following ops */
1727 gen_cast(&type);
1728 /* generate shifts */
1729 vpushi(bits - (bit_pos + bit_size));
1730 gen_op(TOK_SHL);
1731 vpushi(bits - bit_size);
1732 /* NOTE: transformed to SHR if unsigned */
1733 gen_op(TOK_SAR);
1735 r = gv(rc);
1736 } else {
1737 if (is_float(vtop->type.t) &&
1738 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1739 unsigned long offset;
1740 /* CPUs usually cannot use float constants, so we store them
1741 generically in data segment */
1742 size = type_size(&vtop->type, &align);
1743 if (NODATA_WANTED)
1744 size = 0, align = 1;
1745 offset = section_add(data_section, size, align);
1746 vpush_ref(&vtop->type, data_section, offset, size);
1747 vswap();
1748 init_putv(&vtop->type, data_section, offset);
1749 vtop->r |= VT_LVAL;
1751 #ifdef CONFIG_TCC_BCHECK
1752 if (vtop->r & VT_MUSTBOUND)
1753 gbound();
1754 #endif
1756 bt = vtop->type.t & VT_BTYPE;
1758 #ifdef TCC_TARGET_RISCV64
1759 /* XXX mega hack */
1760 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1761 rc = RC_INT;
1762 #endif
1763 rc2 = RC2_TYPE(bt, rc);
1765 /* need to reload if:
1766 - constant
1767 - lvalue (need to dereference pointer)
1768 - already a register, but not in the right class */
1769 r = vtop->r & VT_VALMASK;
1770 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1771 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1773 if (!r_ok || !r2_ok) {
1774 if (!r_ok)
1775 r = get_reg(rc);
1776 if (rc2) {
1777 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1778 int original_type = vtop->type.t;
1780 /* two register type load :
1781 expand to two words temporarily */
1782 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1783 /* load constant */
1784 unsigned long long ll = vtop->c.i;
1785 vtop->c.i = ll; /* first word */
1786 load(r, vtop);
1787 vtop->r = r; /* save register value */
1788 vpushi(ll >> 32); /* second word */
1789 } else if (vtop->r & VT_LVAL) {
1790 /* We do not want to modifier the long long pointer here.
1791 So we save any other instances down the stack */
1792 save_reg_upstack(vtop->r, 1);
1793 /* load from memory */
1794 vtop->type.t = load_type;
1795 load(r, vtop);
1796 vdup();
1797 vtop[-1].r = r; /* save register value */
1798 /* increment pointer to get second word */
1799 vtop->type.t = VT_PTRDIFF_T;
1800 gaddrof();
1801 vpushs(PTR_SIZE);
1802 gen_op('+');
1803 vtop->r |= VT_LVAL;
1804 vtop->type.t = load_type;
1805 } else {
1806 /* move registers */
1807 if (!r_ok)
1808 load(r, vtop);
1809 if (r2_ok && vtop->r2 < VT_CONST)
1810 goto done;
1811 vdup();
1812 vtop[-1].r = r; /* save register value */
1813 vtop->r = vtop[-1].r2;
1815 /* Allocate second register. Here we rely on the fact that
1816 get_reg() tries first to free r2 of an SValue. */
1817 r2 = get_reg(rc2);
1818 load(r2, vtop);
1819 vpop();
1820 /* write second register */
1821 vtop->r2 = r2;
1822 done:
1823 vtop->type.t = original_type;
1824 } else {
1825 if (vtop->r == VT_CMP)
1826 vset_VT_JMP();
1827 /* one register type load */
1828 load(r, vtop);
1831 vtop->r = r;
1832 #ifdef TCC_TARGET_C67
1833 /* uses register pairs for doubles */
1834 if (bt == VT_DOUBLE)
1835 vtop->r2 = r+1;
1836 #endif
1838 return r;
1841 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1842 ST_FUNC void gv2(int rc1, int rc2)
1844 /* generate more generic register first. But VT_JMP or VT_CMP
1845 values must be generated first in all cases to avoid possible
1846 reload errors */
1847 if (vtop->r != VT_CMP && rc1 <= rc2) {
1848 vswap();
1849 gv(rc1);
1850 vswap();
1851 gv(rc2);
1852 /* test if reload is needed for first register */
1853 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1854 vswap();
1855 gv(rc1);
1856 vswap();
1858 } else {
1859 gv(rc2);
1860 vswap();
1861 gv(rc1);
1862 vswap();
1863 /* test if reload is needed for first register */
1864 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1865 gv(rc2);
1870 #if PTR_SIZE == 4
1871 /* expand 64bit on stack in two ints */
1872 ST_FUNC void lexpand(void)
1874 int u, v;
1875 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1876 v = vtop->r & (VT_VALMASK | VT_LVAL);
1877 if (v == VT_CONST) {
1878 vdup();
1879 vtop[0].c.i >>= 32;
1880 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1881 vdup();
1882 vtop[0].c.i += 4;
1883 } else {
1884 gv(RC_INT);
1885 vdup();
1886 vtop[0].r = vtop[-1].r2;
1887 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1889 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1891 #endif
1893 #if PTR_SIZE == 4
1894 /* build a long long from two ints */
1895 static void lbuild(int t)
1897 gv2(RC_INT, RC_INT);
1898 vtop[-1].r2 = vtop[0].r;
1899 vtop[-1].type.t = t;
1900 vpop();
1902 #endif
1904 /* convert stack entry to register and duplicate its value in another
1905 register */
1906 static void gv_dup(void)
1908 int t, rc, r;
1910 t = vtop->type.t;
1911 #if PTR_SIZE == 4
1912 if ((t & VT_BTYPE) == VT_LLONG) {
1913 if (t & VT_BITFIELD) {
1914 gv(RC_INT);
1915 t = vtop->type.t;
1917 lexpand();
1918 gv_dup();
1919 vswap();
1920 vrotb(3);
1921 gv_dup();
1922 vrotb(4);
1923 /* stack: H L L1 H1 */
1924 lbuild(t);
1925 vrotb(3);
1926 vrotb(3);
1927 vswap();
1928 lbuild(t);
1929 vswap();
1930 return;
1932 #endif
1933 /* duplicate value */
1934 rc = RC_TYPE(t);
1935 gv(rc);
1936 r = get_reg(rc);
1937 vdup();
1938 load(r, vtop);
1939 vtop->r = r;
1942 #if PTR_SIZE == 4
1943 /* generate CPU independent (unsigned) long long operations */
1944 static void gen_opl(int op)
1946 int t, a, b, op1, c, i;
1947 int func;
1948 unsigned short reg_iret = REG_IRET;
1949 unsigned short reg_lret = REG_IRE2;
1950 SValue tmp;
1952 switch(op) {
1953 case '/':
1954 case TOK_PDIV:
1955 func = TOK___divdi3;
1956 goto gen_func;
1957 case TOK_UDIV:
1958 func = TOK___udivdi3;
1959 goto gen_func;
1960 case '%':
1961 func = TOK___moddi3;
1962 goto gen_mod_func;
1963 case TOK_UMOD:
1964 func = TOK___umoddi3;
1965 gen_mod_func:
1966 #ifdef TCC_ARM_EABI
1967 reg_iret = TREG_R2;
1968 reg_lret = TREG_R3;
1969 #endif
1970 gen_func:
1971 /* call generic long long function */
1972 vpush_global_sym(&func_old_type, func);
1973 vrott(3);
1974 gfunc_call(2);
1975 vpushi(0);
1976 vtop->r = reg_iret;
1977 vtop->r2 = reg_lret;
1978 break;
1979 case '^':
1980 case '&':
1981 case '|':
1982 case '*':
1983 case '+':
1984 case '-':
1985 //pv("gen_opl A",0,2);
1986 t = vtop->type.t;
1987 vswap();
1988 lexpand();
1989 vrotb(3);
1990 lexpand();
1991 /* stack: L1 H1 L2 H2 */
1992 tmp = vtop[0];
1993 vtop[0] = vtop[-3];
1994 vtop[-3] = tmp;
1995 tmp = vtop[-2];
1996 vtop[-2] = vtop[-3];
1997 vtop[-3] = tmp;
1998 vswap();
1999 /* stack: H1 H2 L1 L2 */
2000 //pv("gen_opl B",0,4);
2001 if (op == '*') {
2002 vpushv(vtop - 1);
2003 vpushv(vtop - 1);
2004 gen_op(TOK_UMULL);
2005 lexpand();
2006 /* stack: H1 H2 L1 L2 ML MH */
2007 for(i=0;i<4;i++)
2008 vrotb(6);
2009 /* stack: ML MH H1 H2 L1 L2 */
2010 tmp = vtop[0];
2011 vtop[0] = vtop[-2];
2012 vtop[-2] = tmp;
2013 /* stack: ML MH H1 L2 H2 L1 */
2014 gen_op('*');
2015 vrotb(3);
2016 vrotb(3);
2017 gen_op('*');
2018 /* stack: ML MH M1 M2 */
2019 gen_op('+');
2020 gen_op('+');
2021 } else if (op == '+' || op == '-') {
2022 /* XXX: add non carry method too (for MIPS or alpha) */
2023 if (op == '+')
2024 op1 = TOK_ADDC1;
2025 else
2026 op1 = TOK_SUBC1;
2027 gen_op(op1);
2028 /* stack: H1 H2 (L1 op L2) */
2029 vrotb(3);
2030 vrotb(3);
2031 gen_op(op1 + 1); /* TOK_xxxC2 */
2032 } else {
2033 gen_op(op);
2034 /* stack: H1 H2 (L1 op L2) */
2035 vrotb(3);
2036 vrotb(3);
2037 /* stack: (L1 op L2) H1 H2 */
2038 gen_op(op);
2039 /* stack: (L1 op L2) (H1 op H2) */
2041 /* stack: L H */
2042 lbuild(t);
2043 break;
2044 case TOK_SAR:
2045 case TOK_SHR:
2046 case TOK_SHL:
2047 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2048 t = vtop[-1].type.t;
2049 vswap();
2050 lexpand();
2051 vrotb(3);
2052 /* stack: L H shift */
2053 c = (int)vtop->c.i;
2054 /* constant: simpler */
2055 /* NOTE: all comments are for SHL. the other cases are
2056 done by swapping words */
2057 vpop();
2058 if (op != TOK_SHL)
2059 vswap();
2060 if (c >= 32) {
2061 /* stack: L H */
2062 vpop();
2063 if (c > 32) {
2064 vpushi(c - 32);
2065 gen_op(op);
2067 if (op != TOK_SAR) {
2068 vpushi(0);
2069 } else {
2070 gv_dup();
2071 vpushi(31);
2072 gen_op(TOK_SAR);
2074 vswap();
2075 } else {
2076 vswap();
2077 gv_dup();
2078 /* stack: H L L */
2079 vpushi(c);
2080 gen_op(op);
2081 vswap();
2082 vpushi(32 - c);
2083 if (op == TOK_SHL)
2084 gen_op(TOK_SHR);
2085 else
2086 gen_op(TOK_SHL);
2087 vrotb(3);
2088 /* stack: L L H */
2089 vpushi(c);
2090 if (op == TOK_SHL)
2091 gen_op(TOK_SHL);
2092 else
2093 gen_op(TOK_SHR);
2094 gen_op('|');
2096 if (op != TOK_SHL)
2097 vswap();
2098 lbuild(t);
2099 } else {
2100 /* XXX: should provide a faster fallback on x86 ? */
2101 switch(op) {
2102 case TOK_SAR:
2103 func = TOK___ashrdi3;
2104 goto gen_func;
2105 case TOK_SHR:
2106 func = TOK___lshrdi3;
2107 goto gen_func;
2108 case TOK_SHL:
2109 func = TOK___ashldi3;
2110 goto gen_func;
2113 break;
2114 default:
2115 /* compare operations */
2116 t = vtop->type.t;
2117 vswap();
2118 lexpand();
2119 vrotb(3);
2120 lexpand();
2121 /* stack: L1 H1 L2 H2 */
2122 tmp = vtop[-1];
2123 vtop[-1] = vtop[-2];
2124 vtop[-2] = tmp;
2125 /* stack: L1 L2 H1 H2 */
2126 save_regs(4);
2127 /* compare high */
2128 op1 = op;
2129 /* when values are equal, we need to compare low words. since
2130 the jump is inverted, we invert the test too. */
2131 if (op1 == TOK_LT)
2132 op1 = TOK_LE;
2133 else if (op1 == TOK_GT)
2134 op1 = TOK_GE;
2135 else if (op1 == TOK_ULT)
2136 op1 = TOK_ULE;
2137 else if (op1 == TOK_UGT)
2138 op1 = TOK_UGE;
2139 a = 0;
2140 b = 0;
2141 gen_op(op1);
2142 if (op == TOK_NE) {
2143 b = gvtst(0, 0);
2144 } else {
2145 a = gvtst(1, 0);
2146 if (op != TOK_EQ) {
2147 /* generate non equal test */
2148 vpushi(0);
2149 vset_VT_CMP(TOK_NE);
2150 b = gvtst(0, 0);
2153 /* compare low. Always unsigned */
2154 op1 = op;
2155 if (op1 == TOK_LT)
2156 op1 = TOK_ULT;
2157 else if (op1 == TOK_LE)
2158 op1 = TOK_ULE;
2159 else if (op1 == TOK_GT)
2160 op1 = TOK_UGT;
2161 else if (op1 == TOK_GE)
2162 op1 = TOK_UGE;
2163 gen_op(op1);
2164 #if 0//def TCC_TARGET_I386
2165 if (op == TOK_NE) { gsym(b); break; }
2166 if (op == TOK_EQ) { gsym(a); break; }
2167 #endif
2168 gvtst_set(1, a);
2169 gvtst_set(0, b);
2170 break;
2173 #endif
2175 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2177 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2178 return (a ^ b) >> 63 ? -x : x;
2181 static int gen_opic_lt(uint64_t a, uint64_t b)
2183 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2186 /* handle integer constant optimizations and various machine
2187 independent opt */
2188 static void gen_opic(int op)
2190 SValue *v1 = vtop - 1;
2191 SValue *v2 = vtop;
2192 int t1 = v1->type.t & VT_BTYPE;
2193 int t2 = v2->type.t & VT_BTYPE;
2194 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2195 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2196 uint64_t l1 = c1 ? v1->c.i : 0;
2197 uint64_t l2 = c2 ? v2->c.i : 0;
2198 int shm = (t1 == VT_LLONG) ? 63 : 31;
2200 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2201 l1 = ((uint32_t)l1 |
2202 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2203 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2204 l2 = ((uint32_t)l2 |
2205 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2207 if (c1 && c2) {
2208 switch(op) {
2209 case '+': l1 += l2; break;
2210 case '-': l1 -= l2; break;
2211 case '&': l1 &= l2; break;
2212 case '^': l1 ^= l2; break;
2213 case '|': l1 |= l2; break;
2214 case '*': l1 *= l2; break;
2216 case TOK_PDIV:
2217 case '/':
2218 case '%':
2219 case TOK_UDIV:
2220 case TOK_UMOD:
2221 /* if division by zero, generate explicit division */
2222 if (l2 == 0) {
2223 if (const_wanted)
2224 tcc_error("division by zero in constant");
2225 goto general_case;
2227 switch(op) {
2228 default: l1 = gen_opic_sdiv(l1, l2); break;
2229 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2230 case TOK_UDIV: l1 = l1 / l2; break;
2231 case TOK_UMOD: l1 = l1 % l2; break;
2233 break;
2234 case TOK_SHL: l1 <<= (l2 & shm); break;
2235 case TOK_SHR: l1 >>= (l2 & shm); break;
2236 case TOK_SAR:
2237 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2238 break;
2239 /* tests */
2240 case TOK_ULT: l1 = l1 < l2; break;
2241 case TOK_UGE: l1 = l1 >= l2; break;
2242 case TOK_EQ: l1 = l1 == l2; break;
2243 case TOK_NE: l1 = l1 != l2; break;
2244 case TOK_ULE: l1 = l1 <= l2; break;
2245 case TOK_UGT: l1 = l1 > l2; break;
2246 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2247 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2248 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2249 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2250 /* logical */
2251 case TOK_LAND: l1 = l1 && l2; break;
2252 case TOK_LOR: l1 = l1 || l2; break;
2253 default:
2254 goto general_case;
2256 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2257 l1 = ((uint32_t)l1 |
2258 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2259 v1->c.i = l1;
2260 vtop--;
2261 } else {
2262 /* if commutative ops, put c2 as constant */
2263 if (c1 && (op == '+' || op == '&' || op == '^' ||
2264 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2265 vswap();
2266 c2 = c1; //c = c1, c1 = c2, c2 = c;
2267 l2 = l1; //l = l1, l1 = l2, l2 = l;
2269 if (!const_wanted &&
2270 c1 && ((l1 == 0 &&
2271 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2272 (l1 == -1 && op == TOK_SAR))) {
2273 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2274 vtop--;
2275 } else if (!const_wanted &&
2276 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2277 (op == '|' &&
2278 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2279 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2280 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2281 if (l2 == 1)
2282 vtop->c.i = 0;
2283 vswap();
2284 vtop--;
2285 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2286 op == TOK_PDIV) &&
2287 l2 == 1) ||
2288 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2289 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2290 l2 == 0) ||
2291 (op == '&' &&
2292 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2293 /* filter out NOP operations like x*1, x-0, x&-1... */
2294 vtop--;
2295 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2296 /* try to use shifts instead of muls or divs */
2297 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2298 int n = -1;
2299 while (l2) {
2300 l2 >>= 1;
2301 n++;
2303 vtop->c.i = n;
2304 if (op == '*')
2305 op = TOK_SHL;
2306 else if (op == TOK_PDIV)
2307 op = TOK_SAR;
2308 else
2309 op = TOK_SHR;
2311 goto general_case;
2312 } else if (c2 && (op == '+' || op == '-') &&
2313 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2314 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2315 /* symbol + constant case */
2316 if (op == '-')
2317 l2 = -l2;
2318 l2 += vtop[-1].c.i;
2319 /* The backends can't always deal with addends to symbols
2320 larger than +-1<<31. Don't construct such. */
2321 if ((int)l2 != l2)
2322 goto general_case;
2323 vtop--;
2324 vtop->c.i = l2;
2325 } else {
2326 general_case:
2327 /* call low level op generator */
2328 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2329 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2330 gen_opl(op);
2331 else
2332 gen_opi(op);
2337 /* generate a floating point operation with constant propagation */
2338 static void gen_opif(int op)
2340 int c1, c2;
2341 SValue *v1, *v2;
2342 #if defined _MSC_VER && defined __x86_64__
2343 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2344 volatile
2345 #endif
2346 long double f1, f2;
2348 v1 = vtop - 1;
2349 v2 = vtop;
2350 /* currently, we cannot do computations with forward symbols */
2351 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2352 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2353 if (c1 && c2) {
2354 if (v1->type.t == VT_FLOAT) {
2355 f1 = v1->c.f;
2356 f2 = v2->c.f;
2357 } else if (v1->type.t == VT_DOUBLE) {
2358 f1 = v1->c.d;
2359 f2 = v2->c.d;
2360 } else {
2361 f1 = v1->c.ld;
2362 f2 = v2->c.ld;
2365 /* NOTE: we only do constant propagation if finite number (not
2366 NaN or infinity) (ANSI spec) */
2367 if (!ieee_finite(f1) || !ieee_finite(f2))
2368 goto general_case;
2370 switch(op) {
2371 case '+': f1 += f2; break;
2372 case '-': f1 -= f2; break;
2373 case '*': f1 *= f2; break;
2374 case '/':
2375 if (f2 == 0.0) {
2376 /* If not in initializer we need to potentially generate
2377 FP exceptions at runtime, otherwise we want to fold. */
2378 if (!const_wanted)
2379 goto general_case;
2381 f1 /= f2;
2382 break;
2383 /* XXX: also handles tests ? */
2384 default:
2385 goto general_case;
2387 /* XXX: overflow test ? */
2388 if (v1->type.t == VT_FLOAT) {
2389 v1->c.f = f1;
2390 } else if (v1->type.t == VT_DOUBLE) {
2391 v1->c.d = f1;
2392 } else {
2393 v1->c.ld = f1;
2395 vtop--;
2396 } else {
2397 general_case:
2398 gen_opf(op);
2402 static int pointed_size(CType *type)
2404 int align;
2405 return type_size(pointed_type(type), &align);
2408 static void vla_runtime_pointed_size(CType *type)
2410 int align;
2411 vla_runtime_type_size(pointed_type(type), &align);
2414 static inline int is_null_pointer(SValue *p)
2416 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2417 return 0;
2418 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2419 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2420 ((p->type.t & VT_BTYPE) == VT_PTR &&
2421 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2422 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2423 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2427 /* check types for comparison or subtraction of pointers */
2428 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2430 CType *type1, *type2, tmp_type1, tmp_type2;
2431 int bt1, bt2;
2433 /* null pointers are accepted for all comparisons as gcc */
2434 if (is_null_pointer(p1) || is_null_pointer(p2))
2435 return;
2436 type1 = &p1->type;
2437 type2 = &p2->type;
2438 bt1 = type1->t & VT_BTYPE;
2439 bt2 = type2->t & VT_BTYPE;
2440 /* accept comparison between pointer and integer with a warning */
2441 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2442 if (op != TOK_LOR && op != TOK_LAND )
2443 tcc_warning("comparison between pointer and integer");
2444 return;
2447 /* both must be pointers or implicit function pointers */
2448 if (bt1 == VT_PTR) {
2449 type1 = pointed_type(type1);
2450 } else if (bt1 != VT_FUNC)
2451 goto invalid_operands;
2453 if (bt2 == VT_PTR) {
2454 type2 = pointed_type(type2);
2455 } else if (bt2 != VT_FUNC) {
2456 invalid_operands:
2457 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2459 if ((type1->t & VT_BTYPE) == VT_VOID ||
2460 (type2->t & VT_BTYPE) == VT_VOID)
2461 return;
2462 tmp_type1 = *type1;
2463 tmp_type2 = *type2;
2464 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2465 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2466 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2467 /* gcc-like error if '-' is used */
2468 if (op == '-')
2469 goto invalid_operands;
2470 else
2471 tcc_warning("comparison of distinct pointer types lacks a cast");
2475 /* generic gen_op: handles types problems */
2476 ST_FUNC void gen_op(int op)
2478 int u, t1, t2, bt1, bt2, t;
2479 CType type1;
2481 redo:
2482 t1 = vtop[-1].type.t;
2483 t2 = vtop[0].type.t;
2484 bt1 = t1 & VT_BTYPE;
2485 bt2 = t2 & VT_BTYPE;
2487 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2488 tcc_error("operation on a struct");
2489 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2490 if (bt2 == VT_FUNC) {
2491 mk_pointer(&vtop->type);
2492 gaddrof();
2494 if (bt1 == VT_FUNC) {
2495 vswap();
2496 mk_pointer(&vtop->type);
2497 gaddrof();
2498 vswap();
2500 goto redo;
2501 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2502 /* at least one operand is a pointer */
2503 /* relational op: must be both pointers */
2504 if (op >= TOK_ULT && op <= TOK_LOR) {
2505 check_comparison_pointer_types(vtop - 1, vtop, op);
2506 /* pointers are handled are unsigned */
2507 #if PTR_SIZE == 8
2508 t = VT_LLONG | VT_UNSIGNED;
2509 #else
2510 t = VT_INT | VT_UNSIGNED;
2511 #endif
2512 goto std_op;
2514 /* if both pointers, then it must be the '-' op */
2515 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2516 if (op != '-')
2517 tcc_error("cannot use pointers here");
2518 check_comparison_pointer_types(vtop - 1, vtop, op);
2519 /* XXX: check that types are compatible */
2520 if (vtop[-1].type.t & VT_VLA) {
2521 vla_runtime_pointed_size(&vtop[-1].type);
2522 } else {
2523 vpushi(pointed_size(&vtop[-1].type));
2525 vrott(3);
2526 gen_opic(op);
2527 vtop->type.t = VT_PTRDIFF_T;
2528 vswap();
2529 gen_op(TOK_PDIV);
2530 } else {
2531 /* exactly one pointer : must be '+' or '-'. */
2532 if (op != '-' && op != '+')
2533 tcc_error("cannot use pointers here");
2534 /* Put pointer as first operand */
2535 if (bt2 == VT_PTR) {
2536 vswap();
2537 t = t1, t1 = t2, t2 = t;
2539 #if PTR_SIZE == 4
2540 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2541 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2542 gen_cast_s(VT_INT);
2543 #endif
2544 type1 = vtop[-1].type;
2545 type1.t &= ~VT_ARRAY;
2546 if (vtop[-1].type.t & VT_VLA)
2547 vla_runtime_pointed_size(&vtop[-1].type);
2548 else {
2549 u = pointed_size(&vtop[-1].type);
2550 if (u < 0)
2551 tcc_error("unknown array element size");
2552 #if PTR_SIZE == 8
2553 vpushll(u);
2554 #else
2555 /* XXX: cast to int ? (long long case) */
2556 vpushi(u);
2557 #endif
2559 gen_op('*');
2560 #ifdef CONFIG_TCC_BCHECK
2561 if (tcc_state->do_bounds_check && !const_wanted) {
2562 /* if bounded pointers, we generate a special code to
2563 test bounds */
2564 if (op == '-') {
2565 vpushi(0);
2566 vswap();
2567 gen_op('-');
2569 vtop[-1].r &= ~VT_MUSTBOUND;
2570 gen_bounded_ptr_add();
2571 } else
2572 #endif
2574 gen_opic(op);
2576 /* put again type if gen_opic() swaped operands */
2577 vtop->type = type1;
2579 } else if (is_float(bt1) || is_float(bt2)) {
2580 /* compute bigger type and do implicit casts */
2581 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2582 t = VT_LDOUBLE;
2583 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2584 t = VT_DOUBLE;
2585 } else {
2586 t = VT_FLOAT;
2588 /* floats can only be used for a few operations */
2589 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2590 (op < TOK_ULT || op > TOK_GT))
2591 tcc_error("invalid operands for binary operation");
2592 goto std_op;
2593 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2594 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2595 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2596 t |= VT_UNSIGNED;
2597 t |= (VT_LONG & t1);
2598 goto std_op;
2599 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2600 /* cast to biggest op */
2601 t = VT_LLONG | VT_LONG;
2602 if (bt1 == VT_LLONG)
2603 t &= t1;
2604 if (bt2 == VT_LLONG)
2605 t &= t2;
2606 /* convert to unsigned if it does not fit in a long long */
2607 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2608 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2609 t |= VT_UNSIGNED;
2610 goto std_op;
2611 } else {
2612 /* integer operations */
2613 t = VT_INT | (VT_LONG & (t1 | t2));
2614 /* convert to unsigned if it does not fit in an integer */
2615 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2616 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2617 t |= VT_UNSIGNED;
2618 std_op:
2619 /* XXX: currently, some unsigned operations are explicit, so
2620 we modify them here */
2621 if (t & VT_UNSIGNED) {
2622 if (op == TOK_SAR)
2623 op = TOK_SHR;
2624 else if (op == '/')
2625 op = TOK_UDIV;
2626 else if (op == '%')
2627 op = TOK_UMOD;
2628 else if (op == TOK_LT)
2629 op = TOK_ULT;
2630 else if (op == TOK_GT)
2631 op = TOK_UGT;
2632 else if (op == TOK_LE)
2633 op = TOK_ULE;
2634 else if (op == TOK_GE)
2635 op = TOK_UGE;
2637 vswap();
2638 type1.t = t;
2639 type1.ref = NULL;
2640 gen_cast(&type1);
2641 vswap();
2642 /* special case for shifts and long long: we keep the shift as
2643 an integer */
2644 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2645 type1.t = VT_INT;
2646 gen_cast(&type1);
2647 if (is_float(t))
2648 gen_opif(op);
2649 else
2650 gen_opic(op);
2651 if (op >= TOK_ULT && op <= TOK_GT) {
2652 /* relational op: the result is an int */
2653 vtop->type.t = VT_INT;
2654 } else {
2655 vtop->type.t = t;
2658 // Make sure that we have converted to an rvalue:
2659 if (vtop->r & VT_LVAL)
2660 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2663 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2664 #define gen_cvt_itof1 gen_cvt_itof
2665 #else
2666 /* generic itof for unsigned long long case */
2667 static void gen_cvt_itof1(int t)
2669 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2670 (VT_LLONG | VT_UNSIGNED)) {
2672 if (t == VT_FLOAT)
2673 vpush_global_sym(&func_old_type, TOK___floatundisf);
2674 #if LDOUBLE_SIZE != 8
2675 else if (t == VT_LDOUBLE)
2676 vpush_global_sym(&func_old_type, TOK___floatundixf);
2677 #endif
2678 else
2679 vpush_global_sym(&func_old_type, TOK___floatundidf);
2680 vrott(2);
2681 gfunc_call(1);
2682 vpushi(0);
2683 PUT_R_RET(vtop, t);
2684 } else {
2685 gen_cvt_itof(t);
2688 #endif
2690 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2691 #define gen_cvt_ftoi1 gen_cvt_ftoi
2692 #else
2693 /* generic ftoi for unsigned long long case */
2694 static void gen_cvt_ftoi1(int t)
2696 int st;
2697 if (t == (VT_LLONG | VT_UNSIGNED)) {
2698 /* not handled natively */
2699 st = vtop->type.t & VT_BTYPE;
2700 if (st == VT_FLOAT)
2701 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2702 #if LDOUBLE_SIZE != 8
2703 else if (st == VT_LDOUBLE)
2704 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2705 #endif
2706 else
2707 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2708 vrott(2);
2709 gfunc_call(1);
2710 vpushi(0);
2711 PUT_R_RET(vtop, t);
2712 } else {
2713 gen_cvt_ftoi(t);
2716 #endif
2718 /* special delayed cast for char/short */
2719 static void force_charshort_cast(void)
2721 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
2722 int dbt = vtop->type.t;
2723 vtop->r &= ~VT_MUSTCAST;
2724 vtop->type.t = sbt;
2725 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
2726 vtop->type.t = dbt;
2729 static void gen_cast_s(int t)
2731 CType type;
2732 type.t = t;
2733 type.ref = NULL;
2734 gen_cast(&type);
2737 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2738 static void gen_cast(CType *type)
2740 int sbt, dbt, sf, df, c;
2741 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
2743 /* special delayed cast for char/short */
2744 if (vtop->r & VT_MUSTCAST)
2745 force_charshort_cast();
2747 /* bitfields first get cast to ints */
2748 if (vtop->type.t & VT_BITFIELD)
2749 gv(RC_INT);
2751 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2752 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2753 if (sbt == VT_FUNC)
2754 sbt = VT_PTR;
2756 again:
2757 if (sbt != dbt) {
2758 sf = is_float(sbt);
2759 df = is_float(dbt);
2760 dbt_bt = dbt & VT_BTYPE;
2761 sbt_bt = sbt & VT_BTYPE;
2763 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2764 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2765 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
2766 #endif
2767 if (c) {
2768 /* constant case: we can do it now */
2769 /* XXX: in ISOC, cannot do it if error in convert */
2770 if (sbt == VT_FLOAT)
2771 vtop->c.ld = vtop->c.f;
2772 else if (sbt == VT_DOUBLE)
2773 vtop->c.ld = vtop->c.d;
2775 if (df) {
2776 if (sbt_bt == VT_LLONG) {
2777 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2778 vtop->c.ld = vtop->c.i;
2779 else
2780 vtop->c.ld = -(long double)-vtop->c.i;
2781 } else if(!sf) {
2782 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2783 vtop->c.ld = (uint32_t)vtop->c.i;
2784 else
2785 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2788 if (dbt == VT_FLOAT)
2789 vtop->c.f = (float)vtop->c.ld;
2790 else if (dbt == VT_DOUBLE)
2791 vtop->c.d = (double)vtop->c.ld;
2792 } else if (sf && dbt == VT_BOOL) {
2793 vtop->c.i = (vtop->c.ld != 0);
2794 } else {
2795 if(sf)
2796 vtop->c.i = vtop->c.ld;
2797 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
2799 else if (sbt & VT_UNSIGNED)
2800 vtop->c.i = (uint32_t)vtop->c.i;
2801 else
2802 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
2804 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
2806 else if (dbt == VT_BOOL)
2807 vtop->c.i = (vtop->c.i != 0);
2808 else {
2809 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
2810 dbt_bt == VT_SHORT ? 0xffff :
2811 0xffffffff;
2812 vtop->c.i &= m;
2813 if (!(dbt & VT_UNSIGNED))
2814 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2817 goto done;
2819 } else if (dbt == VT_BOOL
2820 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
2821 == (VT_CONST | VT_SYM)) {
2822 /* addresses are considered non-zero (see tcctest.c:sinit23) */
2823 vtop->r = VT_CONST;
2824 vtop->c.i = 1;
2825 goto done;
2828 /* cannot generate code for global or static initializers */
2829 if (STATIC_DATA_WANTED)
2830 goto done;
2832 /* non constant case: generate code */
2833 if (dbt == VT_BOOL) {
2834 gen_test_zero(TOK_NE);
2835 goto done;
2838 if (sf || df) {
2839 if (sf && df) {
2840 /* convert from fp to fp */
2841 gen_cvt_ftof(dbt);
2842 } else if (df) {
2843 /* convert int to fp */
2844 gen_cvt_itof1(dbt);
2845 } else {
2846 /* convert fp to int */
2847 sbt = dbt;
2848 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
2849 sbt = VT_INT;
2850 gen_cvt_ftoi1(sbt);
2851 goto again; /* may need char/short cast */
2853 goto done;
2856 ds = btype_size(dbt_bt);
2857 ss = btype_size(sbt_bt);
2858 if (ds == 0 || ss == 0) {
2859 if (dbt_bt == VT_VOID)
2860 goto done;
2861 cast_error(&vtop->type, type);
2863 if (IS_ENUM(type->t) && type->ref->c < 0)
2864 tcc_error("cast to incomplete type");
2866 /* same size and no sign conversion needed */
2867 if (ds == ss && ds >= 4)
2868 goto done;
2869 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
2870 tcc_warning("cast between pointer and integer of different size");
2871 if (sbt_bt == VT_PTR) {
2872 /* put integer type to allow logical operations below */
2873 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
2877 /* processor allows { int a = 0, b = *(char*)&a; }
2878 That means that if we cast to less width, we can just
2879 change the type and read it still later. */
2880 #define ALLOW_SUBTYPE_ACCESS 1
2882 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
2883 /* value still in memory */
2884 if (ds <= ss)
2885 goto done;
2886 /* ss <= 4 here */
2887 if (ds <= 4) {
2888 gv(RC_INT);
2889 goto done; /* no 64bit envolved */
2892 gv(RC_INT);
2894 trunc = 0;
2895 #if PTR_SIZE == 4
2896 if (ds == 8) {
2897 /* generate high word */
2898 if (sbt & VT_UNSIGNED) {
2899 vpushi(0);
2900 gv(RC_INT);
2901 } else {
2902 gv_dup();
2903 vpushi(31);
2904 gen_op(TOK_SAR);
2906 lbuild(dbt);
2907 } else if (ss == 8) {
2908 /* from long long: just take low order word */
2909 lexpand();
2910 vpop();
2912 ss = 4;
2914 #elif PTR_SIZE == 8
2915 if (ds == 8) {
2916 /* need to convert from 32bit to 64bit */
2917 if (sbt & VT_UNSIGNED) {
2918 #if defined(TCC_TARGET_RISCV64)
2919 /* RISC-V keeps 32bit vals in registers sign-extended.
2920 So here we need a zero-extension. */
2921 trunc = 32;
2922 #else
2923 goto done;
2924 #endif
2925 } else {
2926 gen_cvt_sxtw();
2927 goto done;
2929 ss = ds, ds = 4, dbt = sbt;
2930 } else if (ss == 8) {
2931 /* XXX some architectures (e.g. risc-v) would like it
2932 better for this merely being a 32-to-64 sign or zero-
2933 extension. */
2934 trunc = 32; /* zero upper 32 bits */
2935 } else {
2936 ss = 4;
2938 #endif
2940 if (ds >= ss)
2941 goto done;
2942 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
2943 if (ss == 4) {
2944 gen_cvt_csti(dbt);
2945 goto done;
2947 #endif
2948 bits = (ss - ds) * 8;
2949 /* for unsigned, gen_op will convert SAR to SHR */
2950 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
2951 vpushi(bits);
2952 gen_op(TOK_SHL);
2953 vpushi(bits - trunc);
2954 gen_op(TOK_SAR);
2955 vpushi(trunc);
2956 gen_op(TOK_SHR);
2958 done:
2959 vtop->type = *type;
2960 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2963 /* return type size as known at compile time. Put alignment at 'a' */
2964 ST_FUNC int type_size(CType *type, int *a)
2966 Sym *s;
2967 int bt;
2969 bt = type->t & VT_BTYPE;
2970 if (bt == VT_STRUCT) {
2971 /* struct/union */
2972 s = type->ref;
2973 *a = s->r;
2974 return s->c;
2975 } else if (bt == VT_PTR) {
2976 if (type->t & VT_ARRAY) {
2977 int ts;
2979 s = type->ref;
2980 ts = type_size(&s->type, a);
2982 if (ts < 0 && s->c < 0)
2983 ts = -ts;
2985 return ts * s->c;
2986 } else {
2987 *a = PTR_SIZE;
2988 return PTR_SIZE;
2990 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
2991 return -1; /* incomplete enum */
2992 } else if (bt == VT_LDOUBLE) {
2993 *a = LDOUBLE_ALIGN;
2994 return LDOUBLE_SIZE;
2995 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
2996 #ifdef TCC_TARGET_I386
2997 #ifdef TCC_TARGET_PE
2998 *a = 8;
2999 #else
3000 *a = 4;
3001 #endif
3002 #elif defined(TCC_TARGET_ARM)
3003 #ifdef TCC_ARM_EABI
3004 *a = 8;
3005 #else
3006 *a = 4;
3007 #endif
3008 #else
3009 *a = 8;
3010 #endif
3011 return 8;
3012 } else if (bt == VT_INT || bt == VT_FLOAT) {
3013 *a = 4;
3014 return 4;
3015 } else if (bt == VT_SHORT) {
3016 *a = 2;
3017 return 2;
3018 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3019 *a = 8;
3020 return 16;
3021 } else {
3022 /* char, void, function, _Bool */
3023 *a = 1;
3024 return 1;
3028 /* push type size as known at runtime time on top of value stack. Put
3029 alignment at 'a' */
3030 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3032 if (type->t & VT_VLA) {
3033 type_size(&type->ref->type, a);
3034 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3035 } else {
3036 vpushi(type_size(type, a));
3040 /* return the pointed type of t */
3041 static inline CType *pointed_type(CType *type)
3043 return &type->ref->type;
3046 /* modify type so that its it is a pointer to type. */
3047 ST_FUNC void mk_pointer(CType *type)
3049 Sym *s;
3050 s = sym_push(SYM_FIELD, type, 0, -1);
3051 type->t = VT_PTR | (type->t & VT_STORAGE);
3052 type->ref = s;
3055 /* compare function types. OLD functions match any new functions */
3056 static int is_compatible_func(CType *type1, CType *type2)
3058 Sym *s1, *s2;
3060 s1 = type1->ref;
3061 s2 = type2->ref;
3062 if (s1->f.func_call != s2->f.func_call)
3063 return 0;
3064 if (s1->f.func_type != s2->f.func_type
3065 && s1->f.func_type != FUNC_OLD
3066 && s2->f.func_type != FUNC_OLD)
3067 return 0;
3068 /* we should check the function return type for FUNC_OLD too
3069 but that causes problems with the internally used support
3070 functions such as TOK_memmove */
3071 if (s1->f.func_type == FUNC_OLD && !s1->next)
3072 return 1;
3073 if (s2->f.func_type == FUNC_OLD && !s2->next)
3074 return 1;
3075 for (;;) {
3076 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3077 return 0;
3078 s1 = s1->next;
3079 s2 = s2->next;
3080 if (!s1)
3081 return !s2;
3082 if (!s2)
3083 return 0;
3087 /* return true if type1 and type2 are the same. If unqualified is
3088 true, qualifiers on the types are ignored.
3090 static int compare_types(CType *type1, CType *type2, int unqualified)
3092 int bt1, t1, t2;
3094 t1 = type1->t & VT_TYPE;
3095 t2 = type2->t & VT_TYPE;
3096 if (unqualified) {
3097 /* strip qualifiers before comparing */
3098 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3099 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3102 /* Default Vs explicit signedness only matters for char */
3103 if ((t1 & VT_BTYPE) != VT_BYTE) {
3104 t1 &= ~VT_DEFSIGN;
3105 t2 &= ~VT_DEFSIGN;
3107 /* XXX: bitfields ? */
3108 if (t1 != t2)
3109 return 0;
3111 if ((t1 & VT_ARRAY)
3112 && !(type1->ref->c < 0
3113 || type2->ref->c < 0
3114 || type1->ref->c == type2->ref->c))
3115 return 0;
3117 /* test more complicated cases */
3118 bt1 = t1 & VT_BTYPE;
3119 if (bt1 == VT_PTR) {
3120 type1 = pointed_type(type1);
3121 type2 = pointed_type(type2);
3122 return is_compatible_types(type1, type2);
3123 } else if (bt1 == VT_STRUCT) {
3124 return (type1->ref == type2->ref);
3125 } else if (bt1 == VT_FUNC) {
3126 return is_compatible_func(type1, type2);
3127 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3128 return type1->ref == type2->ref;
3129 } else {
3130 return 1;
3134 /* return true if type1 and type2 are exactly the same (including
3135 qualifiers).
3137 static int is_compatible_types(CType *type1, CType *type2)
3139 return compare_types(type1,type2,0);
3142 /* return true if type1 and type2 are the same (ignoring qualifiers).
3144 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3146 return compare_types(type1,type2,1);
3149 /* print a type. If 'varstr' is not NULL, then the variable is also
3150 printed in the type */
3151 /* XXX: union */
3152 /* XXX: add array and function pointers */
3153 static void type_to_str(char *buf, int buf_size,
3154 CType *type, const char *varstr)
3156 int bt, v, t;
3157 Sym *s, *sa;
3158 char buf1[256];
3159 const char *tstr;
3161 t = type->t;
3162 bt = t & VT_BTYPE;
3163 buf[0] = '\0';
3165 if (t & VT_EXTERN)
3166 pstrcat(buf, buf_size, "extern ");
3167 if (t & VT_STATIC)
3168 pstrcat(buf, buf_size, "static ");
3169 if (t & VT_TYPEDEF)
3170 pstrcat(buf, buf_size, "typedef ");
3171 if (t & VT_INLINE)
3172 pstrcat(buf, buf_size, "inline ");
3173 if (t & VT_VOLATILE)
3174 pstrcat(buf, buf_size, "volatile ");
3175 if (t & VT_CONSTANT)
3176 pstrcat(buf, buf_size, "const ");
3178 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3179 || ((t & VT_UNSIGNED)
3180 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3181 && !IS_ENUM(t)
3183 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3185 buf_size -= strlen(buf);
3186 buf += strlen(buf);
3188 switch(bt) {
3189 case VT_VOID:
3190 tstr = "void";
3191 goto add_tstr;
3192 case VT_BOOL:
3193 tstr = "_Bool";
3194 goto add_tstr;
3195 case VT_BYTE:
3196 tstr = "char";
3197 goto add_tstr;
3198 case VT_SHORT:
3199 tstr = "short";
3200 goto add_tstr;
3201 case VT_INT:
3202 tstr = "int";
3203 goto maybe_long;
3204 case VT_LLONG:
3205 tstr = "long long";
3206 maybe_long:
3207 if (t & VT_LONG)
3208 tstr = "long";
3209 if (!IS_ENUM(t))
3210 goto add_tstr;
3211 tstr = "enum ";
3212 goto tstruct;
3213 case VT_FLOAT:
3214 tstr = "float";
3215 goto add_tstr;
3216 case VT_DOUBLE:
3217 tstr = "double";
3218 goto add_tstr;
3219 case VT_LDOUBLE:
3220 tstr = "long double";
3221 add_tstr:
3222 pstrcat(buf, buf_size, tstr);
3223 break;
3224 case VT_STRUCT:
3225 tstr = "struct ";
3226 if (IS_UNION(t))
3227 tstr = "union ";
3228 tstruct:
3229 pstrcat(buf, buf_size, tstr);
3230 v = type->ref->v & ~SYM_STRUCT;
3231 if (v >= SYM_FIRST_ANOM)
3232 pstrcat(buf, buf_size, "<anonymous>");
3233 else
3234 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3235 break;
3236 case VT_FUNC:
3237 s = type->ref;
3238 buf1[0]=0;
3239 if (varstr && '*' == *varstr) {
3240 pstrcat(buf1, sizeof(buf1), "(");
3241 pstrcat(buf1, sizeof(buf1), varstr);
3242 pstrcat(buf1, sizeof(buf1), ")");
3244 pstrcat(buf1, buf_size, "(");
3245 sa = s->next;
3246 while (sa != NULL) {
3247 char buf2[256];
3248 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3249 pstrcat(buf1, sizeof(buf1), buf2);
3250 sa = sa->next;
3251 if (sa)
3252 pstrcat(buf1, sizeof(buf1), ", ");
3254 if (s->f.func_type == FUNC_ELLIPSIS)
3255 pstrcat(buf1, sizeof(buf1), ", ...");
3256 pstrcat(buf1, sizeof(buf1), ")");
3257 type_to_str(buf, buf_size, &s->type, buf1);
3258 goto no_var;
3259 case VT_PTR:
3260 s = type->ref;
3261 if (t & VT_ARRAY) {
3262 if (varstr && '*' == *varstr)
3263 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3264 else
3265 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3266 type_to_str(buf, buf_size, &s->type, buf1);
3267 goto no_var;
3269 pstrcpy(buf1, sizeof(buf1), "*");
3270 if (t & VT_CONSTANT)
3271 pstrcat(buf1, buf_size, "const ");
3272 if (t & VT_VOLATILE)
3273 pstrcat(buf1, buf_size, "volatile ");
3274 if (varstr)
3275 pstrcat(buf1, sizeof(buf1), varstr);
3276 type_to_str(buf, buf_size, &s->type, buf1);
3277 goto no_var;
3279 if (varstr) {
3280 pstrcat(buf, buf_size, " ");
3281 pstrcat(buf, buf_size, varstr);
3283 no_var: ;
3286 static void cast_error(CType *st, CType *dt)
3288 char buf1[256], buf2[256];
3289 type_to_str(buf1, sizeof(buf1), st, NULL);
3290 type_to_str(buf2, sizeof(buf2), dt, NULL);
3291 tcc_error("cannot convert '%s' to '%s'", buf1, buf2);
3294 /* verify type compatibility to store vtop in 'dt' type */
3295 static void verify_assign_cast(CType *dt)
3297 CType *st, *type1, *type2;
3298 int dbt, sbt, qualwarn, lvl;
3300 st = &vtop->type; /* source type */
3301 dbt = dt->t & VT_BTYPE;
3302 sbt = st->t & VT_BTYPE;
3303 if (dt->t & VT_CONSTANT)
3304 tcc_warning("assignment of read-only location");
3305 switch(dbt) {
3306 case VT_VOID:
3307 if (sbt != dbt)
3308 tcc_error("assignment to void expression");
3309 break;
3310 case VT_PTR:
3311 /* special cases for pointers */
3312 /* '0' can also be a pointer */
3313 if (is_null_pointer(vtop))
3314 break;
3315 /* accept implicit pointer to integer cast with warning */
3316 if (is_integer_btype(sbt)) {
3317 tcc_warning("assignment makes pointer from integer without a cast");
3318 break;
3320 type1 = pointed_type(dt);
3321 if (sbt == VT_PTR)
3322 type2 = pointed_type(st);
3323 else if (sbt == VT_FUNC)
3324 type2 = st; /* a function is implicitly a function pointer */
3325 else
3326 goto error;
3327 if (is_compatible_types(type1, type2))
3328 break;
3329 for (qualwarn = lvl = 0;; ++lvl) {
3330 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3331 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3332 qualwarn = 1;
3333 dbt = type1->t & (VT_BTYPE|VT_LONG);
3334 sbt = type2->t & (VT_BTYPE|VT_LONG);
3335 if (dbt != VT_PTR || sbt != VT_PTR)
3336 break;
3337 type1 = pointed_type(type1);
3338 type2 = pointed_type(type2);
3340 if (!is_compatible_unqualified_types(type1, type2)) {
3341 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3342 /* void * can match anything */
3343 } else if (dbt == sbt
3344 && is_integer_btype(sbt & VT_BTYPE)
3345 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3346 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3347 /* Like GCC don't warn by default for merely changes
3348 in pointer target signedness. Do warn for different
3349 base types, though, in particular for unsigned enums
3350 and signed int targets. */
3351 } else {
3352 tcc_warning("assignment from incompatible pointer type");
3353 break;
3356 if (qualwarn)
3357 tcc_warning("assignment discards qualifiers from pointer target type");
3358 break;
3359 case VT_BYTE:
3360 case VT_SHORT:
3361 case VT_INT:
3362 case VT_LLONG:
3363 if (sbt == VT_PTR || sbt == VT_FUNC) {
3364 tcc_warning("assignment makes integer from pointer without a cast");
3365 } else if (sbt == VT_STRUCT) {
3366 goto case_VT_STRUCT;
3368 /* XXX: more tests */
3369 break;
3370 case VT_STRUCT:
3371 case_VT_STRUCT:
3372 if (!is_compatible_unqualified_types(dt, st)) {
3373 error:
3374 cast_error(st, dt);
3376 break;
3380 static void gen_assign_cast(CType *dt)
3382 verify_assign_cast(dt);
3383 gen_cast(dt);
3386 /* store vtop in lvalue pushed on stack */
3387 ST_FUNC void vstore(void)
3389 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3391 ft = vtop[-1].type.t;
3392 sbt = vtop->type.t & VT_BTYPE;
3393 dbt = ft & VT_BTYPE;
3395 verify_assign_cast(&vtop[-1].type);
3397 if (sbt == VT_STRUCT) {
3398 /* if structure, only generate pointer */
3399 /* structure assignment : generate memcpy */
3400 /* XXX: optimize if small size */
3401 size = type_size(&vtop->type, &align);
3403 /* destination */
3404 vswap();
3405 vtop->type.t = VT_PTR;
3406 gaddrof();
3408 /* address of memcpy() */
3409 #ifdef TCC_ARM_EABI
3410 if(!(align & 7))
3411 vpush_global_sym(&func_old_type, TOK_memcpy8);
3412 else if(!(align & 3))
3413 vpush_global_sym(&func_old_type, TOK_memcpy4);
3414 else
3415 #endif
3416 /* Use memmove, rather than memcpy, as dest and src may be same: */
3417 vpush_global_sym(&func_old_type, TOK_memmove);
3419 vswap();
3420 /* source */
3421 vpushv(vtop - 2);
3422 vtop->type.t = VT_PTR;
3423 gaddrof();
3424 /* type size */
3425 vpushi(size);
3426 gfunc_call(3);
3427 /* leave source on stack */
3429 } else if (ft & VT_BITFIELD) {
3430 /* bitfield store handling */
3432 /* save lvalue as expression result (example: s.b = s.a = n;) */
3433 vdup(), vtop[-1] = vtop[-2];
3435 bit_pos = BIT_POS(ft);
3436 bit_size = BIT_SIZE(ft);
3437 /* remove bit field info to avoid loops */
3438 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3440 if (dbt == VT_BOOL) {
3441 gen_cast(&vtop[-1].type);
3442 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3444 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3445 if (dbt != VT_BOOL) {
3446 gen_cast(&vtop[-1].type);
3447 dbt = vtop[-1].type.t & VT_BTYPE;
3449 if (r == VT_STRUCT) {
3450 store_packed_bf(bit_pos, bit_size);
3451 } else {
3452 unsigned long long mask = (1ULL << bit_size) - 1;
3453 if (dbt != VT_BOOL) {
3454 /* mask source */
3455 if (dbt == VT_LLONG)
3456 vpushll(mask);
3457 else
3458 vpushi((unsigned)mask);
3459 gen_op('&');
3461 /* shift source */
3462 vpushi(bit_pos);
3463 gen_op(TOK_SHL);
3464 vswap();
3465 /* duplicate destination */
3466 vdup();
3467 vrott(3);
3468 /* load destination, mask and or with source */
3469 if (dbt == VT_LLONG)
3470 vpushll(~(mask << bit_pos));
3471 else
3472 vpushi(~((unsigned)mask << bit_pos));
3473 gen_op('&');
3474 gen_op('|');
3475 /* store result */
3476 vstore();
3477 /* ... and discard */
3478 vpop();
3480 } else if (dbt == VT_VOID) {
3481 --vtop;
3482 } else {
3483 /* optimize char/short casts */
3484 delayed_cast = 0;
3485 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3486 && is_integer_btype(sbt)
3488 if ((vtop->r & VT_MUSTCAST)
3489 && btype_size(dbt) > btype_size(sbt)
3491 force_charshort_cast();
3492 delayed_cast = 1;
3493 } else {
3494 gen_cast(&vtop[-1].type);
3497 #ifdef CONFIG_TCC_BCHECK
3498 /* bound check case */
3499 if (vtop[-1].r & VT_MUSTBOUND) {
3500 vswap();
3501 gbound();
3502 vswap();
3504 #endif
3505 gv(RC_TYPE(dbt)); /* generate value */
3507 if (delayed_cast) {
3508 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3509 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3510 vtop->type.t = ft & VT_TYPE;
3513 /* if lvalue was saved on stack, must read it */
3514 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3515 SValue sv;
3516 r = get_reg(RC_INT);
3517 sv.type.t = VT_PTRDIFF_T;
3518 sv.r = VT_LOCAL | VT_LVAL;
3519 sv.c.i = vtop[-1].c.i;
3520 load(r, &sv);
3521 vtop[-1].r = r | VT_LVAL;
3524 r = vtop->r & VT_VALMASK;
3525 /* two word case handling :
3526 store second register at word + 4 (or +8 for x86-64) */
3527 if (USING_TWO_WORDS(dbt)) {
3528 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3529 vtop[-1].type.t = load_type;
3530 store(r, vtop - 1);
3531 vswap();
3532 /* convert to int to increment easily */
3533 vtop->type.t = VT_PTRDIFF_T;
3534 gaddrof();
3535 vpushs(PTR_SIZE);
3536 gen_op('+');
3537 vtop->r |= VT_LVAL;
3538 vswap();
3539 vtop[-1].type.t = load_type;
3540 /* XXX: it works because r2 is spilled last ! */
3541 store(vtop->r2, vtop - 1);
3542 } else {
3543 /* single word */
3544 store(r, vtop - 1);
3546 vswap();
3547 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3551 /* post defines POST/PRE add. c is the token ++ or -- */
3552 ST_FUNC void inc(int post, int c)
3554 test_lvalue();
3555 vdup(); /* save lvalue */
3556 if (post) {
3557 gv_dup(); /* duplicate value */
3558 vrotb(3);
3559 vrotb(3);
3561 /* add constant */
3562 vpushi(c - TOK_MID);
3563 gen_op('+');
3564 vstore(); /* store value */
3565 if (post)
3566 vpop(); /* if post op, return saved value */
3569 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3571 /* read the string */
3572 if (tok != TOK_STR)
3573 expect(msg);
3574 cstr_new(astr);
3575 while (tok == TOK_STR) {
3576 /* XXX: add \0 handling too ? */
3577 cstr_cat(astr, tokc.str.data, -1);
3578 next();
3580 cstr_ccat(astr, '\0');
3583 /* If I is >= 1 and a power of two, returns log2(i)+1.
3584 If I is 0 returns 0. */
3585 static int exact_log2p1(int i)
3587 int ret;
3588 if (!i)
3589 return 0;
3590 for (ret = 1; i >= 1 << 8; ret += 8)
3591 i >>= 8;
3592 if (i >= 1 << 4)
3593 ret += 4, i >>= 4;
3594 if (i >= 1 << 2)
3595 ret += 2, i >>= 2;
3596 if (i >= 1 << 1)
3597 ret++;
3598 return ret;
3601 /* Parse __attribute__((...)) GNUC extension. */
3602 static void parse_attribute(AttributeDef *ad)
3604 int t, n;
3605 CString astr;
3607 redo:
3608 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3609 return;
3610 next();
3611 skip('(');
3612 skip('(');
3613 while (tok != ')') {
3614 if (tok < TOK_IDENT)
3615 expect("attribute name");
3616 t = tok;
3617 next();
3618 switch(t) {
3619 case TOK_CLEANUP1:
3620 case TOK_CLEANUP2:
3622 Sym *s;
3624 skip('(');
3625 s = sym_find(tok);
3626 if (!s) {
3627 tcc_warning("implicit declaration of function '%s'",
3628 get_tok_str(tok, &tokc));
3629 s = external_global_sym(tok, &func_old_type);
3631 ad->cleanup_func = s;
3632 next();
3633 skip(')');
3634 break;
3636 case TOK_CONSTRUCTOR1:
3637 case TOK_CONSTRUCTOR2:
3638 ad->a.constructor = 1;
3639 break;
3640 case TOK_DESTRUCTOR1:
3641 case TOK_DESTRUCTOR2:
3642 ad->a.destructor = 1;
3643 break;
3644 case TOK_SECTION1:
3645 case TOK_SECTION2:
3646 skip('(');
3647 parse_mult_str(&astr, "section name");
3648 ad->section = find_section(tcc_state, (char *)astr.data);
3649 skip(')');
3650 cstr_free(&astr);
3651 break;
3652 case TOK_ALIAS1:
3653 case TOK_ALIAS2:
3654 skip('(');
3655 parse_mult_str(&astr, "alias(\"target\")");
3656 ad->alias_target = /* save string as token, for later */
3657 tok_alloc((char*)astr.data, astr.size-1)->tok;
3658 skip(')');
3659 cstr_free(&astr);
3660 break;
3661 case TOK_VISIBILITY1:
3662 case TOK_VISIBILITY2:
3663 skip('(');
3664 parse_mult_str(&astr,
3665 "visibility(\"default|hidden|internal|protected\")");
3666 if (!strcmp (astr.data, "default"))
3667 ad->a.visibility = STV_DEFAULT;
3668 else if (!strcmp (astr.data, "hidden"))
3669 ad->a.visibility = STV_HIDDEN;
3670 else if (!strcmp (astr.data, "internal"))
3671 ad->a.visibility = STV_INTERNAL;
3672 else if (!strcmp (astr.data, "protected"))
3673 ad->a.visibility = STV_PROTECTED;
3674 else
3675 expect("visibility(\"default|hidden|internal|protected\")");
3676 skip(')');
3677 cstr_free(&astr);
3678 break;
3679 case TOK_ALIGNED1:
3680 case TOK_ALIGNED2:
3681 if (tok == '(') {
3682 next();
3683 n = expr_const();
3684 if (n <= 0 || (n & (n - 1)) != 0)
3685 tcc_error("alignment must be a positive power of two");
3686 skip(')');
3687 } else {
3688 n = MAX_ALIGN;
3690 ad->a.aligned = exact_log2p1(n);
3691 if (n != 1 << (ad->a.aligned - 1))
3692 tcc_error("alignment of %d is larger than implemented", n);
3693 break;
3694 case TOK_PACKED1:
3695 case TOK_PACKED2:
3696 ad->a.packed = 1;
3697 break;
3698 case TOK_WEAK1:
3699 case TOK_WEAK2:
3700 ad->a.weak = 1;
3701 break;
3702 case TOK_UNUSED1:
3703 case TOK_UNUSED2:
3704 /* currently, no need to handle it because tcc does not
3705 track unused objects */
3706 break;
3707 case TOK_NORETURN1:
3708 case TOK_NORETURN2:
3709 ad->f.func_noreturn = 1;
3710 break;
3711 case TOK_CDECL1:
3712 case TOK_CDECL2:
3713 case TOK_CDECL3:
3714 ad->f.func_call = FUNC_CDECL;
3715 break;
3716 case TOK_STDCALL1:
3717 case TOK_STDCALL2:
3718 case TOK_STDCALL3:
3719 ad->f.func_call = FUNC_STDCALL;
3720 break;
3721 #ifdef TCC_TARGET_I386
3722 case TOK_REGPARM1:
3723 case TOK_REGPARM2:
3724 skip('(');
3725 n = expr_const();
3726 if (n > 3)
3727 n = 3;
3728 else if (n < 0)
3729 n = 0;
3730 if (n > 0)
3731 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3732 skip(')');
3733 break;
3734 case TOK_FASTCALL1:
3735 case TOK_FASTCALL2:
3736 case TOK_FASTCALL3:
3737 ad->f.func_call = FUNC_FASTCALLW;
3738 break;
3739 #endif
3740 case TOK_MODE:
3741 skip('(');
3742 switch(tok) {
3743 case TOK_MODE_DI:
3744 ad->attr_mode = VT_LLONG + 1;
3745 break;
3746 case TOK_MODE_QI:
3747 ad->attr_mode = VT_BYTE + 1;
3748 break;
3749 case TOK_MODE_HI:
3750 ad->attr_mode = VT_SHORT + 1;
3751 break;
3752 case TOK_MODE_SI:
3753 case TOK_MODE_word:
3754 ad->attr_mode = VT_INT + 1;
3755 break;
3756 default:
3757 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3758 break;
3760 next();
3761 skip(')');
3762 break;
3763 case TOK_DLLEXPORT:
3764 ad->a.dllexport = 1;
3765 break;
3766 case TOK_NODECORATE:
3767 ad->a.nodecorate = 1;
3768 break;
3769 case TOK_DLLIMPORT:
3770 ad->a.dllimport = 1;
3771 break;
3772 default:
3773 if (tcc_state->warn_unsupported)
3774 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3775 /* skip parameters */
3776 if (tok == '(') {
3777 int parenthesis = 0;
3778 do {
3779 if (tok == '(')
3780 parenthesis++;
3781 else if (tok == ')')
3782 parenthesis--;
3783 next();
3784 } while (parenthesis && tok != -1);
3786 break;
3788 if (tok != ',')
3789 break;
3790 next();
3792 skip(')');
3793 skip(')');
3794 goto redo;
3797 static Sym * find_field (CType *type, int v, int *cumofs)
3799 Sym *s = type->ref;
3800 v |= SYM_FIELD;
3801 while ((s = s->next) != NULL) {
3802 if ((s->v & SYM_FIELD) &&
3803 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3804 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3805 Sym *ret = find_field (&s->type, v, cumofs);
3806 if (ret) {
3807 *cumofs += s->c;
3808 return ret;
3811 if (s->v == v)
3812 break;
3814 return s;
3817 static void struct_layout(CType *type, AttributeDef *ad)
3819 int size, align, maxalign, offset, c, bit_pos, bit_size;
3820 int packed, a, bt, prevbt, prev_bit_size;
3821 int pcc = !tcc_state->ms_bitfields;
3822 int pragma_pack = *tcc_state->pack_stack_ptr;
3823 Sym *f;
3825 maxalign = 1;
3826 offset = 0;
3827 c = 0;
3828 bit_pos = 0;
3829 prevbt = VT_STRUCT; /* make it never match */
3830 prev_bit_size = 0;
3832 //#define BF_DEBUG
3834 for (f = type->ref->next; f; f = f->next) {
3835 if (f->type.t & VT_BITFIELD)
3836 bit_size = BIT_SIZE(f->type.t);
3837 else
3838 bit_size = -1;
3839 size = type_size(&f->type, &align);
3840 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3841 packed = 0;
3843 if (pcc && bit_size == 0) {
3844 /* in pcc mode, packing does not affect zero-width bitfields */
3846 } else {
3847 /* in pcc mode, attribute packed overrides if set. */
3848 if (pcc && (f->a.packed || ad->a.packed))
3849 align = packed = 1;
3851 /* pragma pack overrides align if lesser and packs bitfields always */
3852 if (pragma_pack) {
3853 packed = 1;
3854 if (pragma_pack < align)
3855 align = pragma_pack;
3856 /* in pcc mode pragma pack also overrides individual align */
3857 if (pcc && pragma_pack < a)
3858 a = 0;
3861 /* some individual align was specified */
3862 if (a)
3863 align = a;
3865 if (type->ref->type.t == VT_UNION) {
3866 if (pcc && bit_size >= 0)
3867 size = (bit_size + 7) >> 3;
3868 offset = 0;
3869 if (size > c)
3870 c = size;
3872 } else if (bit_size < 0) {
3873 if (pcc)
3874 c += (bit_pos + 7) >> 3;
3875 c = (c + align - 1) & -align;
3876 offset = c;
3877 if (size > 0)
3878 c += size;
3879 bit_pos = 0;
3880 prevbt = VT_STRUCT;
3881 prev_bit_size = 0;
3883 } else {
3884 /* A bit-field. Layout is more complicated. There are two
3885 options: PCC (GCC) compatible and MS compatible */
3886 if (pcc) {
3887 /* In PCC layout a bit-field is placed adjacent to the
3888 preceding bit-fields, except if:
3889 - it has zero-width
3890 - an individual alignment was given
3891 - it would overflow its base type container and
3892 there is no packing */
3893 if (bit_size == 0) {
3894 new_field:
3895 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3896 bit_pos = 0;
3897 } else if (f->a.aligned) {
3898 goto new_field;
3899 } else if (!packed) {
3900 int a8 = align * 8;
3901 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3902 if (ofs > size / align)
3903 goto new_field;
3906 /* in pcc mode, long long bitfields have type int if they fit */
3907 if (size == 8 && bit_size <= 32)
3908 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3910 while (bit_pos >= align * 8)
3911 c += align, bit_pos -= align * 8;
3912 offset = c;
3914 /* In PCC layout named bit-fields influence the alignment
3915 of the containing struct using the base types alignment,
3916 except for packed fields (which here have correct align). */
3917 if (f->v & SYM_FIRST_ANOM
3918 // && bit_size // ??? gcc on ARM/rpi does that
3920 align = 1;
3922 } else {
3923 bt = f->type.t & VT_BTYPE;
3924 if ((bit_pos + bit_size > size * 8)
3925 || (bit_size > 0) == (bt != prevbt)
3927 c = (c + align - 1) & -align;
3928 offset = c;
3929 bit_pos = 0;
3930 /* In MS bitfield mode a bit-field run always uses
3931 at least as many bits as the underlying type.
3932 To start a new run it's also required that this
3933 or the last bit-field had non-zero width. */
3934 if (bit_size || prev_bit_size)
3935 c += size;
3937 /* In MS layout the records alignment is normally
3938 influenced by the field, except for a zero-width
3939 field at the start of a run (but by further zero-width
3940 fields it is again). */
3941 if (bit_size == 0 && prevbt != bt)
3942 align = 1;
3943 prevbt = bt;
3944 prev_bit_size = bit_size;
3947 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3948 | (bit_pos << VT_STRUCT_SHIFT);
3949 bit_pos += bit_size;
3951 if (align > maxalign)
3952 maxalign = align;
3954 #ifdef BF_DEBUG
3955 printf("set field %s offset %-2d size %-2d align %-2d",
3956 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3957 if (f->type.t & VT_BITFIELD) {
3958 printf(" pos %-2d bits %-2d",
3959 BIT_POS(f->type.t),
3960 BIT_SIZE(f->type.t)
3963 printf("\n");
3964 #endif
3966 f->c = offset;
3967 f->r = 0;
3970 if (pcc)
3971 c += (bit_pos + 7) >> 3;
3973 /* store size and alignment */
3974 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
3975 if (a < maxalign)
3976 a = maxalign;
3977 type->ref->r = a;
3978 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
3979 /* can happen if individual align for some member was given. In
3980 this case MSVC ignores maxalign when aligning the size */
3981 a = pragma_pack;
3982 if (a < bt)
3983 a = bt;
3985 c = (c + a - 1) & -a;
3986 type->ref->c = c;
3988 #ifdef BF_DEBUG
3989 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
3990 #endif
3992 /* check whether we can access bitfields by their type */
3993 for (f = type->ref->next; f; f = f->next) {
3994 int s, px, cx, c0;
3995 CType t;
3997 if (0 == (f->type.t & VT_BITFIELD))
3998 continue;
3999 f->type.ref = f;
4000 f->auxtype = -1;
4001 bit_size = BIT_SIZE(f->type.t);
4002 if (bit_size == 0)
4003 continue;
4004 bit_pos = BIT_POS(f->type.t);
4005 size = type_size(&f->type, &align);
4006 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4007 continue;
4009 /* try to access the field using a different type */
4010 c0 = -1, s = align = 1;
4011 for (;;) {
4012 px = f->c * 8 + bit_pos;
4013 cx = (px >> 3) & -align;
4014 px = px - (cx << 3);
4015 if (c0 == cx)
4016 break;
4017 s = (px + bit_size + 7) >> 3;
4018 if (s > 4) {
4019 t.t = VT_LLONG;
4020 } else if (s > 2) {
4021 t.t = VT_INT;
4022 } else if (s > 1) {
4023 t.t = VT_SHORT;
4024 } else {
4025 t.t = VT_BYTE;
4027 s = type_size(&t, &align);
4028 c0 = cx;
4031 if (px + bit_size <= s * 8 && cx + s <= c) {
4032 /* update offset and bit position */
4033 f->c = cx;
4034 bit_pos = px;
4035 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4036 | (bit_pos << VT_STRUCT_SHIFT);
4037 if (s != size)
4038 f->auxtype = t.t;
4039 #ifdef BF_DEBUG
4040 printf("FIX field %s offset %-2d size %-2d align %-2d "
4041 "pos %-2d bits %-2d\n",
4042 get_tok_str(f->v & ~SYM_FIELD, NULL),
4043 cx, s, align, px, bit_size);
4044 #endif
4045 } else {
4046 /* fall back to load/store single-byte wise */
4047 f->auxtype = VT_STRUCT;
4048 #ifdef BF_DEBUG
4049 printf("FIX field %s : load byte-wise\n",
4050 get_tok_str(f->v & ~SYM_FIELD, NULL));
4051 #endif
4056 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4057 static void struct_decl(CType *type, int u)
4059 int v, c, size, align, flexible;
4060 int bit_size, bsize, bt;
4061 Sym *s, *ss, **ps;
4062 AttributeDef ad, ad1;
4063 CType type1, btype;
4065 memset(&ad, 0, sizeof ad);
4066 next();
4067 parse_attribute(&ad);
4068 if (tok != '{') {
4069 v = tok;
4070 next();
4071 /* struct already defined ? return it */
4072 if (v < TOK_IDENT)
4073 expect("struct/union/enum name");
4074 s = struct_find(v);
4075 if (s && (s->sym_scope == local_scope || tok != '{')) {
4076 if (u == s->type.t)
4077 goto do_decl;
4078 if (u == VT_ENUM && IS_ENUM(s->type.t))
4079 goto do_decl;
4080 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4082 } else {
4083 v = anon_sym++;
4085 /* Record the original enum/struct/union token. */
4086 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4087 type1.ref = NULL;
4088 /* we put an undefined size for struct/union */
4089 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4090 s->r = 0; /* default alignment is zero as gcc */
4091 do_decl:
4092 type->t = s->type.t;
4093 type->ref = s;
4095 if (tok == '{') {
4096 next();
4097 if (s->c != -1)
4098 tcc_error("struct/union/enum already defined");
4099 s->c = -2;
4100 /* cannot be empty */
4101 /* non empty enums are not allowed */
4102 ps = &s->next;
4103 if (u == VT_ENUM) {
4104 long long ll = 0, pl = 0, nl = 0;
4105 CType t;
4106 t.ref = s;
4107 /* enum symbols have static storage */
4108 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4109 for(;;) {
4110 v = tok;
4111 if (v < TOK_UIDENT)
4112 expect("identifier");
4113 ss = sym_find(v);
4114 if (ss && !local_stack)
4115 tcc_error("redefinition of enumerator '%s'",
4116 get_tok_str(v, NULL));
4117 next();
4118 if (tok == '=') {
4119 next();
4120 ll = expr_const64();
4122 ss = sym_push(v, &t, VT_CONST, 0);
4123 ss->enum_val = ll;
4124 *ps = ss, ps = &ss->next;
4125 if (ll < nl)
4126 nl = ll;
4127 if (ll > pl)
4128 pl = ll;
4129 if (tok != ',')
4130 break;
4131 next();
4132 ll++;
4133 /* NOTE: we accept a trailing comma */
4134 if (tok == '}')
4135 break;
4137 skip('}');
4138 /* set integral type of the enum */
4139 t.t = VT_INT;
4140 if (nl >= 0) {
4141 if (pl != (unsigned)pl)
4142 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4143 t.t |= VT_UNSIGNED;
4144 } else if (pl != (int)pl || nl != (int)nl)
4145 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4146 s->type.t = type->t = t.t | VT_ENUM;
4147 s->c = 0;
4148 /* set type for enum members */
4149 for (ss = s->next; ss; ss = ss->next) {
4150 ll = ss->enum_val;
4151 if (ll == (int)ll) /* default is int if it fits */
4152 continue;
4153 if (t.t & VT_UNSIGNED) {
4154 ss->type.t |= VT_UNSIGNED;
4155 if (ll == (unsigned)ll)
4156 continue;
4158 ss->type.t = (ss->type.t & ~VT_BTYPE)
4159 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4161 } else {
4162 c = 0;
4163 flexible = 0;
4164 while (tok != '}') {
4165 if (!parse_btype(&btype, &ad1)) {
4166 skip(';');
4167 continue;
4169 while (1) {
4170 if (flexible)
4171 tcc_error("flexible array member '%s' not at the end of struct",
4172 get_tok_str(v, NULL));
4173 bit_size = -1;
4174 v = 0;
4175 type1 = btype;
4176 if (tok != ':') {
4177 if (tok != ';')
4178 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4179 if (v == 0) {
4180 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4181 expect("identifier");
4182 else {
4183 int v = btype.ref->v;
4184 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4185 if (tcc_state->ms_extensions == 0)
4186 expect("identifier");
4190 if (type_size(&type1, &align) < 0) {
4191 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4192 flexible = 1;
4193 else
4194 tcc_error("field '%s' has incomplete type",
4195 get_tok_str(v, NULL));
4197 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4198 (type1.t & VT_BTYPE) == VT_VOID ||
4199 (type1.t & VT_STORAGE))
4200 tcc_error("invalid type for '%s'",
4201 get_tok_str(v, NULL));
4203 if (tok == ':') {
4204 next();
4205 bit_size = expr_const();
4206 /* XXX: handle v = 0 case for messages */
4207 if (bit_size < 0)
4208 tcc_error("negative width in bit-field '%s'",
4209 get_tok_str(v, NULL));
4210 if (v && bit_size == 0)
4211 tcc_error("zero width for bit-field '%s'",
4212 get_tok_str(v, NULL));
4213 parse_attribute(&ad1);
4215 size = type_size(&type1, &align);
4216 if (bit_size >= 0) {
4217 bt = type1.t & VT_BTYPE;
4218 if (bt != VT_INT &&
4219 bt != VT_BYTE &&
4220 bt != VT_SHORT &&
4221 bt != VT_BOOL &&
4222 bt != VT_LLONG)
4223 tcc_error("bitfields must have scalar type");
4224 bsize = size * 8;
4225 if (bit_size > bsize) {
4226 tcc_error("width of '%s' exceeds its type",
4227 get_tok_str(v, NULL));
4228 } else if (bit_size == bsize
4229 && !ad.a.packed && !ad1.a.packed) {
4230 /* no need for bit fields */
4232 } else if (bit_size == 64) {
4233 tcc_error("field width 64 not implemented");
4234 } else {
4235 type1.t = (type1.t & ~VT_STRUCT_MASK)
4236 | VT_BITFIELD
4237 | (bit_size << (VT_STRUCT_SHIFT + 6));
4240 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4241 /* Remember we've seen a real field to check
4242 for placement of flexible array member. */
4243 c = 1;
4245 /* If member is a struct or bit-field, enforce
4246 placing into the struct (as anonymous). */
4247 if (v == 0 &&
4248 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4249 bit_size >= 0)) {
4250 v = anon_sym++;
4252 if (v) {
4253 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4254 ss->a = ad1.a;
4255 *ps = ss;
4256 ps = &ss->next;
4258 if (tok == ';' || tok == TOK_EOF)
4259 break;
4260 skip(',');
4262 skip(';');
4264 skip('}');
4265 parse_attribute(&ad);
4266 struct_layout(type, &ad);
4271 static void sym_to_attr(AttributeDef *ad, Sym *s)
4273 merge_symattr(&ad->a, &s->a);
4274 merge_funcattr(&ad->f, &s->f);
4277 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4278 are added to the element type, copied because it could be a typedef. */
4279 static void parse_btype_qualify(CType *type, int qualifiers)
4281 while (type->t & VT_ARRAY) {
4282 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4283 type = &type->ref->type;
4285 type->t |= qualifiers;
4288 /* return 0 if no type declaration. otherwise, return the basic type
4289 and skip it.
4291 static int parse_btype(CType *type, AttributeDef *ad)
4293 int t, u, bt, st, type_found, typespec_found, g, n;
4294 Sym *s;
4295 CType type1;
4297 memset(ad, 0, sizeof(AttributeDef));
4298 type_found = 0;
4299 typespec_found = 0;
4300 t = VT_INT;
4301 bt = st = -1;
4302 type->ref = NULL;
4304 while(1) {
4305 switch(tok) {
4306 case TOK_EXTENSION:
4307 /* currently, we really ignore extension */
4308 next();
4309 continue;
4311 /* basic types */
4312 case TOK_CHAR:
4313 u = VT_BYTE;
4314 basic_type:
4315 next();
4316 basic_type1:
4317 if (u == VT_SHORT || u == VT_LONG) {
4318 if (st != -1 || (bt != -1 && bt != VT_INT))
4319 tmbt: tcc_error("too many basic types");
4320 st = u;
4321 } else {
4322 if (bt != -1 || (st != -1 && u != VT_INT))
4323 goto tmbt;
4324 bt = u;
4326 if (u != VT_INT)
4327 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4328 typespec_found = 1;
4329 break;
4330 case TOK_VOID:
4331 u = VT_VOID;
4332 goto basic_type;
4333 case TOK_SHORT:
4334 u = VT_SHORT;
4335 goto basic_type;
4336 case TOK_INT:
4337 u = VT_INT;
4338 goto basic_type;
4339 case TOK_ALIGNAS:
4340 { int n;
4341 AttributeDef ad1;
4342 next();
4343 skip('(');
4344 memset(&ad1, 0, sizeof(AttributeDef));
4345 if (parse_btype(&type1, &ad1)) {
4346 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4347 if (ad1.a.aligned)
4348 n = 1 << (ad1.a.aligned - 1);
4349 else
4350 type_size(&type1, &n);
4351 } else {
4352 n = expr_const();
4353 if (n <= 0 || (n & (n - 1)) != 0)
4354 tcc_error("alignment must be a positive power of two");
4356 skip(')');
4357 ad->a.aligned = exact_log2p1(n);
4359 continue;
4360 case TOK_LONG:
4361 if ((t & VT_BTYPE) == VT_DOUBLE) {
4362 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4363 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4364 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4365 } else {
4366 u = VT_LONG;
4367 goto basic_type;
4369 next();
4370 break;
4371 #ifdef TCC_TARGET_ARM64
4372 case TOK_UINT128:
4373 /* GCC's __uint128_t appears in some Linux header files. Make it a
4374 synonym for long double to get the size and alignment right. */
4375 u = VT_LDOUBLE;
4376 goto basic_type;
4377 #endif
4378 case TOK_BOOL:
4379 u = VT_BOOL;
4380 goto basic_type;
4381 case TOK_FLOAT:
4382 u = VT_FLOAT;
4383 goto basic_type;
4384 case TOK_DOUBLE:
4385 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4386 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4387 } else {
4388 u = VT_DOUBLE;
4389 goto basic_type;
4391 next();
4392 break;
4393 case TOK_ENUM:
4394 struct_decl(&type1, VT_ENUM);
4395 basic_type2:
4396 u = type1.t;
4397 type->ref = type1.ref;
4398 goto basic_type1;
4399 case TOK_STRUCT:
4400 struct_decl(&type1, VT_STRUCT);
4401 goto basic_type2;
4402 case TOK_UNION:
4403 struct_decl(&type1, VT_UNION);
4404 goto basic_type2;
4406 /* type modifiers */
4407 case TOK_CONST1:
4408 case TOK_CONST2:
4409 case TOK_CONST3:
4410 type->t = t;
4411 parse_btype_qualify(type, VT_CONSTANT);
4412 t = type->t;
4413 next();
4414 break;
4415 case TOK_VOLATILE1:
4416 case TOK_VOLATILE2:
4417 case TOK_VOLATILE3:
4418 type->t = t;
4419 parse_btype_qualify(type, VT_VOLATILE);
4420 t = type->t;
4421 next();
4422 break;
4423 case TOK_SIGNED1:
4424 case TOK_SIGNED2:
4425 case TOK_SIGNED3:
4426 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4427 tcc_error("signed and unsigned modifier");
4428 t |= VT_DEFSIGN;
4429 next();
4430 typespec_found = 1;
4431 break;
4432 case TOK_REGISTER:
4433 case TOK_AUTO:
4434 case TOK_RESTRICT1:
4435 case TOK_RESTRICT2:
4436 case TOK_RESTRICT3:
4437 next();
4438 break;
4439 case TOK_UNSIGNED:
4440 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4441 tcc_error("signed and unsigned modifier");
4442 t |= VT_DEFSIGN | VT_UNSIGNED;
4443 next();
4444 typespec_found = 1;
4445 break;
4447 /* storage */
4448 case TOK_EXTERN:
4449 g = VT_EXTERN;
4450 goto storage;
4451 case TOK_STATIC:
4452 g = VT_STATIC;
4453 goto storage;
4454 case TOK_TYPEDEF:
4455 g = VT_TYPEDEF;
4456 goto storage;
4457 storage:
4458 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4459 tcc_error("multiple storage classes");
4460 t |= g;
4461 next();
4462 break;
4463 case TOK_INLINE1:
4464 case TOK_INLINE2:
4465 case TOK_INLINE3:
4466 t |= VT_INLINE;
4467 next();
4468 break;
4469 case TOK_NORETURN3:
4470 next();
4471 ad->f.func_noreturn = 1;
4472 break;
4473 /* GNUC attribute */
4474 case TOK_ATTRIBUTE1:
4475 case TOK_ATTRIBUTE2:
4476 parse_attribute(ad);
4477 if (ad->attr_mode) {
4478 u = ad->attr_mode -1;
4479 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4481 continue;
4482 /* GNUC typeof */
4483 case TOK_TYPEOF1:
4484 case TOK_TYPEOF2:
4485 case TOK_TYPEOF3:
4486 next();
4487 parse_expr_type(&type1);
4488 /* remove all storage modifiers except typedef */
4489 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4490 if (type1.ref)
4491 sym_to_attr(ad, type1.ref);
4492 goto basic_type2;
4493 default:
4494 if (typespec_found)
4495 goto the_end;
4496 s = sym_find(tok);
4497 if (!s || !(s->type.t & VT_TYPEDEF))
4498 goto the_end;
4500 n = tok, next();
4501 if (tok == ':' && !in_generic) {
4502 /* ignore if it's a label */
4503 unget_tok(n);
4504 goto the_end;
4507 t &= ~(VT_BTYPE|VT_LONG);
4508 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4509 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4510 type->ref = s->type.ref;
4511 if (t)
4512 parse_btype_qualify(type, t);
4513 t = type->t;
4514 /* get attributes from typedef */
4515 sym_to_attr(ad, s);
4516 typespec_found = 1;
4517 st = bt = -2;
4518 break;
4520 type_found = 1;
4522 the_end:
4523 if (tcc_state->char_is_unsigned) {
4524 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4525 t |= VT_UNSIGNED;
4527 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4528 bt = t & (VT_BTYPE|VT_LONG);
4529 if (bt == VT_LONG)
4530 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4531 #ifdef TCC_TARGET_PE
4532 if (bt == VT_LDOUBLE)
4533 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4534 #endif
4535 type->t = t;
4536 return type_found;
4539 /* convert a function parameter type (array to pointer and function to
4540 function pointer) */
4541 static inline void convert_parameter_type(CType *pt)
4543 /* remove const and volatile qualifiers (XXX: const could be used
4544 to indicate a const function parameter */
4545 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4546 /* array must be transformed to pointer according to ANSI C */
4547 pt->t &= ~VT_ARRAY;
4548 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4549 mk_pointer(pt);
4553 ST_FUNC void parse_asm_str(CString *astr)
4555 skip('(');
4556 parse_mult_str(astr, "string constant");
4559 /* Parse an asm label and return the token */
4560 static int asm_label_instr(void)
4562 int v;
4563 CString astr;
4565 next();
4566 parse_asm_str(&astr);
4567 skip(')');
4568 #ifdef ASM_DEBUG
4569 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4570 #endif
4571 v = tok_alloc(astr.data, astr.size - 1)->tok;
4572 cstr_free(&astr);
4573 return v;
4576 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4578 int n, l, t1, arg_size, align, unused_align;
4579 Sym **plast, *s, *first;
4580 AttributeDef ad1;
4581 CType pt;
4583 if (tok == '(') {
4584 /* function type, or recursive declarator (return if so) */
4585 next();
4586 if (td && !(td & TYPE_ABSTRACT))
4587 return 0;
4588 if (tok == ')')
4589 l = 0;
4590 else if (parse_btype(&pt, &ad1))
4591 l = FUNC_NEW;
4592 else if (td) {
4593 merge_attr (ad, &ad1);
4594 return 0;
4595 } else
4596 l = FUNC_OLD;
4597 first = NULL;
4598 plast = &first;
4599 arg_size = 0;
4600 if (l) {
4601 for(;;) {
4602 /* read param name and compute offset */
4603 if (l != FUNC_OLD) {
4604 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4605 break;
4606 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4607 if ((pt.t & VT_BTYPE) == VT_VOID)
4608 tcc_error("parameter declared as void");
4609 } else {
4610 n = tok;
4611 if (n < TOK_UIDENT)
4612 expect("identifier");
4613 pt.t = VT_VOID; /* invalid type */
4614 pt.ref = NULL;
4615 next();
4617 convert_parameter_type(&pt);
4618 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4619 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4620 *plast = s;
4621 plast = &s->next;
4622 if (tok == ')')
4623 break;
4624 skip(',');
4625 if (l == FUNC_NEW && tok == TOK_DOTS) {
4626 l = FUNC_ELLIPSIS;
4627 next();
4628 break;
4630 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4631 tcc_error("invalid type");
4633 } else
4634 /* if no parameters, then old type prototype */
4635 l = FUNC_OLD;
4636 skip(')');
4637 /* NOTE: const is ignored in returned type as it has a special
4638 meaning in gcc / C++ */
4639 type->t &= ~VT_CONSTANT;
4640 /* some ancient pre-K&R C allows a function to return an array
4641 and the array brackets to be put after the arguments, such
4642 that "int c()[]" means something like "int[] c()" */
4643 if (tok == '[') {
4644 next();
4645 skip(']'); /* only handle simple "[]" */
4646 mk_pointer(type);
4648 /* we push a anonymous symbol which will contain the function prototype */
4649 ad->f.func_args = arg_size;
4650 ad->f.func_type = l;
4651 s = sym_push(SYM_FIELD, type, 0, 0);
4652 s->a = ad->a;
4653 s->f = ad->f;
4654 s->next = first;
4655 type->t = VT_FUNC;
4656 type->ref = s;
4657 } else if (tok == '[') {
4658 int saved_nocode_wanted = nocode_wanted;
4659 /* array definition */
4660 next();
4661 while (1) {
4662 /* XXX The optional type-quals and static should only be accepted
4663 in parameter decls. The '*' as well, and then even only
4664 in prototypes (not function defs). */
4665 switch (tok) {
4666 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4667 case TOK_CONST1:
4668 case TOK_VOLATILE1:
4669 case TOK_STATIC:
4670 case '*':
4671 next();
4672 continue;
4673 default:
4674 break;
4676 break;
4678 n = -1;
4679 t1 = 0;
4680 if (tok != ']') {
4681 if (!local_stack || (storage & VT_STATIC))
4682 vpushi(expr_const());
4683 else {
4684 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4685 length must always be evaluated, even under nocode_wanted,
4686 so that its size slot is initialized (e.g. under sizeof
4687 or typeof). */
4688 nocode_wanted = 0;
4689 gexpr();
4691 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4692 n = vtop->c.i;
4693 if (n < 0)
4694 tcc_error("invalid array size");
4695 } else {
4696 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4697 tcc_error("size of variable length array should be an integer");
4698 n = 0;
4699 t1 = VT_VLA;
4702 skip(']');
4703 /* parse next post type */
4704 post_type(type, ad, storage, 0);
4706 if ((type->t & VT_BTYPE) == VT_FUNC)
4707 tcc_error("declaration of an array of functions");
4708 if ((type->t & VT_BTYPE) == VT_VOID
4709 || type_size(type, &unused_align) < 0)
4710 tcc_error("declaration of an array of incomplete type elements");
4712 t1 |= type->t & VT_VLA;
4714 if (t1 & VT_VLA) {
4715 if (n < 0)
4716 tcc_error("need explicit inner array size in VLAs");
4717 loc -= type_size(&int_type, &align);
4718 loc &= -align;
4719 n = loc;
4721 vla_runtime_type_size(type, &align);
4722 gen_op('*');
4723 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4724 vswap();
4725 vstore();
4727 if (n != -1)
4728 vpop();
4729 nocode_wanted = saved_nocode_wanted;
4731 /* we push an anonymous symbol which will contain the array
4732 element type */
4733 s = sym_push(SYM_FIELD, type, 0, n);
4734 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4735 type->ref = s;
4737 return 1;
4740 /* Parse a type declarator (except basic type), and return the type
4741 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4742 expected. 'type' should contain the basic type. 'ad' is the
4743 attribute definition of the basic type. It can be modified by
4744 type_decl(). If this (possibly abstract) declarator is a pointer chain
4745 it returns the innermost pointed to type (equals *type, but is a different
4746 pointer), otherwise returns type itself, that's used for recursive calls. */
4747 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4749 CType *post, *ret;
4750 int qualifiers, storage;
4752 /* recursive type, remove storage bits first, apply them later again */
4753 storage = type->t & VT_STORAGE;
4754 type->t &= ~VT_STORAGE;
4755 post = ret = type;
4757 while (tok == '*') {
4758 qualifiers = 0;
4759 redo:
4760 next();
4761 switch(tok) {
4762 case TOK_CONST1:
4763 case TOK_CONST2:
4764 case TOK_CONST3:
4765 qualifiers |= VT_CONSTANT;
4766 goto redo;
4767 case TOK_VOLATILE1:
4768 case TOK_VOLATILE2:
4769 case TOK_VOLATILE3:
4770 qualifiers |= VT_VOLATILE;
4771 goto redo;
4772 case TOK_RESTRICT1:
4773 case TOK_RESTRICT2:
4774 case TOK_RESTRICT3:
4775 goto redo;
4776 /* XXX: clarify attribute handling */
4777 case TOK_ATTRIBUTE1:
4778 case TOK_ATTRIBUTE2:
4779 parse_attribute(ad);
4780 break;
4782 mk_pointer(type);
4783 type->t |= qualifiers;
4784 if (ret == type)
4785 /* innermost pointed to type is the one for the first derivation */
4786 ret = pointed_type(type);
4789 if (tok == '(') {
4790 /* This is possibly a parameter type list for abstract declarators
4791 ('int ()'), use post_type for testing this. */
4792 if (!post_type(type, ad, 0, td)) {
4793 /* It's not, so it's a nested declarator, and the post operations
4794 apply to the innermost pointed to type (if any). */
4795 /* XXX: this is not correct to modify 'ad' at this point, but
4796 the syntax is not clear */
4797 parse_attribute(ad);
4798 post = type_decl(type, ad, v, td);
4799 skip(')');
4800 } else
4801 goto abstract;
4802 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4803 /* type identifier */
4804 *v = tok;
4805 next();
4806 } else {
4807 abstract:
4808 if (!(td & TYPE_ABSTRACT))
4809 expect("identifier");
4810 *v = 0;
4812 post_type(post, ad, storage, 0);
4813 parse_attribute(ad);
4814 type->t |= storage;
4815 return ret;
4818 /* indirection with full error checking and bound check */
4819 ST_FUNC void indir(void)
4821 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4822 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4823 return;
4824 expect("pointer");
4826 if (vtop->r & VT_LVAL)
4827 gv(RC_INT);
4828 vtop->type = *pointed_type(&vtop->type);
4829 /* Arrays and functions are never lvalues */
4830 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
4831 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4832 vtop->r |= VT_LVAL;
4833 /* if bound checking, the referenced pointer must be checked */
4834 #ifdef CONFIG_TCC_BCHECK
4835 if (tcc_state->do_bounds_check)
4836 vtop->r |= VT_MUSTBOUND;
4837 #endif
4841 /* pass a parameter to a function and do type checking and casting */
4842 static void gfunc_param_typed(Sym *func, Sym *arg)
4844 int func_type;
4845 CType type;
4847 func_type = func->f.func_type;
4848 if (func_type == FUNC_OLD ||
4849 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4850 /* default casting : only need to convert float to double */
4851 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4852 gen_cast_s(VT_DOUBLE);
4853 } else if (vtop->type.t & VT_BITFIELD) {
4854 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4855 type.ref = vtop->type.ref;
4856 gen_cast(&type);
4857 } else if (vtop->r & VT_MUSTCAST) {
4858 force_charshort_cast();
4860 } else if (arg == NULL) {
4861 tcc_error("too many arguments to function");
4862 } else {
4863 type = arg->type;
4864 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4865 gen_assign_cast(&type);
4869 /* parse an expression and return its type without any side effect. */
4870 static void expr_type(CType *type, void (*expr_fn)(void))
4872 nocode_wanted++;
4873 expr_fn();
4874 *type = vtop->type;
4875 vpop();
4876 nocode_wanted--;
4879 /* parse an expression of the form '(type)' or '(expr)' and return its
4880 type */
4881 static void parse_expr_type(CType *type)
4883 int n;
4884 AttributeDef ad;
4886 skip('(');
4887 if (parse_btype(type, &ad)) {
4888 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4889 } else {
4890 expr_type(type, gexpr);
4892 skip(')');
4895 static void parse_type(CType *type)
4897 AttributeDef ad;
4898 int n;
4900 if (!parse_btype(type, &ad)) {
4901 expect("type");
4903 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4906 static void parse_builtin_params(int nc, const char *args)
4908 char c, sep = '(';
4909 CType t;
4910 if (nc)
4911 nocode_wanted++;
4912 next();
4913 while ((c = *args++)) {
4914 skip(sep);
4915 sep = ',';
4916 switch (c) {
4917 case 'e': expr_eq(); continue;
4918 case 't': parse_type(&t); vpush(&t); continue;
4919 default: tcc_error("internal error"); break;
4922 skip(')');
4923 if (nc)
4924 nocode_wanted--;
4927 ST_FUNC void unary(void)
4929 int n, t, align, size, r, sizeof_caller;
4930 CType type;
4931 Sym *s;
4932 AttributeDef ad;
4934 sizeof_caller = in_sizeof;
4935 in_sizeof = 0;
4936 type.ref = NULL;
4937 /* XXX: GCC 2.95.3 does not generate a table although it should be
4938 better here */
4939 tok_next:
4940 switch(tok) {
4941 case TOK_EXTENSION:
4942 next();
4943 goto tok_next;
4944 case TOK_LCHAR:
4945 #ifdef TCC_TARGET_PE
4946 t = VT_SHORT|VT_UNSIGNED;
4947 goto push_tokc;
4948 #endif
4949 case TOK_CINT:
4950 case TOK_CCHAR:
4951 t = VT_INT;
4952 push_tokc:
4953 type.t = t;
4954 vsetc(&type, VT_CONST, &tokc);
4955 next();
4956 break;
4957 case TOK_CUINT:
4958 t = VT_INT | VT_UNSIGNED;
4959 goto push_tokc;
4960 case TOK_CLLONG:
4961 t = VT_LLONG;
4962 goto push_tokc;
4963 case TOK_CULLONG:
4964 t = VT_LLONG | VT_UNSIGNED;
4965 goto push_tokc;
4966 case TOK_CFLOAT:
4967 t = VT_FLOAT;
4968 goto push_tokc;
4969 case TOK_CDOUBLE:
4970 t = VT_DOUBLE;
4971 goto push_tokc;
4972 case TOK_CLDOUBLE:
4973 t = VT_LDOUBLE;
4974 goto push_tokc;
4975 case TOK_CLONG:
4976 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
4977 goto push_tokc;
4978 case TOK_CULONG:
4979 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
4980 goto push_tokc;
4981 case TOK___FUNCTION__:
4982 if (!gnu_ext)
4983 goto tok_identifier;
4984 /* fall thru */
4985 case TOK___FUNC__:
4987 void *ptr;
4988 int len;
4989 /* special function name identifier */
4990 len = strlen(funcname) + 1;
4991 /* generate char[len] type */
4992 type.t = VT_BYTE;
4993 mk_pointer(&type);
4994 type.t |= VT_ARRAY;
4995 type.ref->c = len;
4996 vpush_ref(&type, data_section, data_section->data_offset, len);
4997 if (!NODATA_WANTED) {
4998 ptr = section_ptr_add(data_section, len);
4999 memcpy(ptr, funcname, len);
5001 next();
5003 break;
5004 case TOK_LSTR:
5005 #ifdef TCC_TARGET_PE
5006 t = VT_SHORT | VT_UNSIGNED;
5007 #else
5008 t = VT_INT;
5009 #endif
5010 goto str_init;
5011 case TOK_STR:
5012 /* string parsing */
5013 t = VT_BYTE;
5014 if (tcc_state->char_is_unsigned)
5015 t = VT_BYTE | VT_UNSIGNED;
5016 str_init:
5017 if (tcc_state->warn_write_strings)
5018 t |= VT_CONSTANT;
5019 type.t = t;
5020 mk_pointer(&type);
5021 type.t |= VT_ARRAY;
5022 memset(&ad, 0, sizeof(AttributeDef));
5023 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5024 break;
5025 case '(':
5026 next();
5027 /* cast ? */
5028 if (parse_btype(&type, &ad)) {
5029 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5030 skip(')');
5031 /* check ISOC99 compound literal */
5032 if (tok == '{') {
5033 /* data is allocated locally by default */
5034 if (global_expr)
5035 r = VT_CONST;
5036 else
5037 r = VT_LOCAL;
5038 /* all except arrays are lvalues */
5039 if (!(type.t & VT_ARRAY))
5040 r |= VT_LVAL;
5041 memset(&ad, 0, sizeof(AttributeDef));
5042 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5043 } else {
5044 if (sizeof_caller) {
5045 vpush(&type);
5046 return;
5048 unary();
5049 gen_cast(&type);
5051 } else if (tok == '{') {
5052 int saved_nocode_wanted = nocode_wanted;
5053 if (const_wanted)
5054 tcc_error("expected constant");
5055 /* save all registers */
5056 save_regs(0);
5057 /* statement expression : we do not accept break/continue
5058 inside as GCC does. We do retain the nocode_wanted state,
5059 as statement expressions can't ever be entered from the
5060 outside, so any reactivation of code emission (from labels
5061 or loop heads) can be disabled again after the end of it. */
5062 block(1);
5063 nocode_wanted = saved_nocode_wanted;
5064 skip(')');
5065 } else {
5066 gexpr();
5067 skip(')');
5069 break;
5070 case '*':
5071 next();
5072 unary();
5073 indir();
5074 break;
5075 case '&':
5076 next();
5077 unary();
5078 /* functions names must be treated as function pointers,
5079 except for unary '&' and sizeof. Since we consider that
5080 functions are not lvalues, we only have to handle it
5081 there and in function calls. */
5082 /* arrays can also be used although they are not lvalues */
5083 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5084 !(vtop->type.t & VT_ARRAY))
5085 test_lvalue();
5086 mk_pointer(&vtop->type);
5087 gaddrof();
5088 break;
5089 case '!':
5090 next();
5091 unary();
5092 gen_test_zero(TOK_EQ);
5093 break;
5094 case '~':
5095 next();
5096 unary();
5097 vpushi(-1);
5098 gen_op('^');
5099 break;
5100 case '+':
5101 next();
5102 unary();
5103 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5104 tcc_error("pointer not accepted for unary plus");
5105 /* In order to force cast, we add zero, except for floating point
5106 where we really need an noop (otherwise -0.0 will be transformed
5107 into +0.0). */
5108 if (!is_float(vtop->type.t)) {
5109 vpushi(0);
5110 gen_op('+');
5112 break;
5113 case TOK_SIZEOF:
5114 case TOK_ALIGNOF1:
5115 case TOK_ALIGNOF2:
5116 case TOK_ALIGNOF3:
5117 t = tok;
5118 next();
5119 in_sizeof++;
5120 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5121 s = NULL;
5122 if (vtop[1].r & VT_SYM)
5123 s = vtop[1].sym; /* hack: accessing previous vtop */
5124 size = type_size(&type, &align);
5125 if (s && s->a.aligned)
5126 align = 1 << (s->a.aligned - 1);
5127 if (t == TOK_SIZEOF) {
5128 if (!(type.t & VT_VLA)) {
5129 if (size < 0)
5130 tcc_error("sizeof applied to an incomplete type");
5131 vpushs(size);
5132 } else {
5133 vla_runtime_type_size(&type, &align);
5135 } else {
5136 vpushs(align);
5138 vtop->type.t |= VT_UNSIGNED;
5139 break;
5141 case TOK_builtin_expect:
5142 /* __builtin_expect is a no-op for now */
5143 parse_builtin_params(0, "ee");
5144 vpop();
5145 break;
5146 case TOK_builtin_types_compatible_p:
5147 parse_builtin_params(0, "tt");
5148 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5149 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5150 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5151 vtop -= 2;
5152 vpushi(n);
5153 break;
5154 case TOK_builtin_choose_expr:
5156 int64_t c;
5157 next();
5158 skip('(');
5159 c = expr_const64();
5160 skip(',');
5161 if (!c) {
5162 nocode_wanted++;
5164 expr_eq();
5165 if (!c) {
5166 vpop();
5167 nocode_wanted--;
5169 skip(',');
5170 if (c) {
5171 nocode_wanted++;
5173 expr_eq();
5174 if (c) {
5175 vpop();
5176 nocode_wanted--;
5178 skip(')');
5180 break;
5181 case TOK_builtin_constant_p:
5182 parse_builtin_params(1, "e");
5183 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5184 vtop--;
5185 vpushi(n);
5186 break;
5187 case TOK_builtin_frame_address:
5188 case TOK_builtin_return_address:
5190 int tok1 = tok;
5191 int level;
5192 next();
5193 skip('(');
5194 if (tok != TOK_CINT) {
5195 tcc_error("%s only takes positive integers",
5196 tok1 == TOK_builtin_return_address ?
5197 "__builtin_return_address" :
5198 "__builtin_frame_address");
5200 level = (uint32_t)tokc.i;
5201 next();
5202 skip(')');
5203 type.t = VT_VOID;
5204 mk_pointer(&type);
5205 vset(&type, VT_LOCAL, 0); /* local frame */
5206 while (level--) {
5207 mk_pointer(&vtop->type);
5208 indir(); /* -> parent frame */
5210 if (tok1 == TOK_builtin_return_address) {
5211 // assume return address is just above frame pointer on stack
5212 vpushi(PTR_SIZE);
5213 gen_op('+');
5214 mk_pointer(&vtop->type);
5215 indir();
5218 break;
5219 #ifdef TCC_TARGET_RISCV64
5220 case TOK_builtin_va_start:
5221 parse_builtin_params(0, "ee");
5222 r = vtop->r & VT_VALMASK;
5223 if (r == VT_LLOCAL)
5224 r = VT_LOCAL;
5225 if (r != VT_LOCAL)
5226 tcc_error("__builtin_va_start expects a local variable");
5227 gen_va_start();
5228 vstore();
5229 break;
5230 #endif
5231 #ifdef TCC_TARGET_X86_64
5232 #ifdef TCC_TARGET_PE
5233 case TOK_builtin_va_start:
5234 parse_builtin_params(0, "ee");
5235 r = vtop->r & VT_VALMASK;
5236 if (r == VT_LLOCAL)
5237 r = VT_LOCAL;
5238 if (r != VT_LOCAL)
5239 tcc_error("__builtin_va_start expects a local variable");
5240 vtop->r = r;
5241 vtop->type = char_pointer_type;
5242 vtop->c.i += 8;
5243 vstore();
5244 break;
5245 #else
5246 case TOK_builtin_va_arg_types:
5247 parse_builtin_params(0, "t");
5248 vpushi(classify_x86_64_va_arg(&vtop->type));
5249 vswap();
5250 vpop();
5251 break;
5252 #endif
5253 #endif
5255 #ifdef TCC_TARGET_ARM64
5256 case TOK___va_start: {
5257 parse_builtin_params(0, "ee");
5258 //xx check types
5259 gen_va_start();
5260 vpushi(0);
5261 vtop->type.t = VT_VOID;
5262 break;
5264 case TOK___va_arg: {
5265 parse_builtin_params(0, "et");
5266 type = vtop->type;
5267 vpop();
5268 //xx check types
5269 gen_va_arg(&type);
5270 vtop->type = type;
5271 break;
5273 case TOK___arm64_clear_cache: {
5274 parse_builtin_params(0, "ee");
5275 gen_clear_cache();
5276 vpushi(0);
5277 vtop->type.t = VT_VOID;
5278 break;
5280 #endif
5281 /* pre operations */
5282 case TOK_INC:
5283 case TOK_DEC:
5284 t = tok;
5285 next();
5286 unary();
5287 inc(0, t);
5288 break;
5289 case '-':
5290 next();
5291 unary();
5292 t = vtop->type.t & VT_BTYPE;
5293 if (is_float(t)) {
5294 /* In IEEE negate(x) isn't subtract(0,x), but rather
5295 subtract(-0, x). */
5296 vpush(&vtop->type);
5297 if (t == VT_FLOAT)
5298 vtop->c.f = -1.0 * 0.0;
5299 else if (t == VT_DOUBLE)
5300 vtop->c.d = -1.0 * 0.0;
5301 else
5302 vtop->c.ld = -1.0 * 0.0;
5303 } else
5304 vpushi(0);
5305 vswap();
5306 gen_op('-');
5307 break;
5308 case TOK_LAND:
5309 if (!gnu_ext)
5310 goto tok_identifier;
5311 next();
5312 /* allow to take the address of a label */
5313 if (tok < TOK_UIDENT)
5314 expect("label identifier");
5315 s = label_find(tok);
5316 if (!s) {
5317 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5318 } else {
5319 if (s->r == LABEL_DECLARED)
5320 s->r = LABEL_FORWARD;
5322 if (!s->type.t) {
5323 s->type.t = VT_VOID;
5324 mk_pointer(&s->type);
5325 s->type.t |= VT_STATIC;
5327 vpushsym(&s->type, s);
5328 next();
5329 break;
5331 case TOK_GENERIC:
5333 CType controlling_type;
5334 int has_default = 0;
5335 int has_match = 0;
5336 int learn = 0;
5337 TokenString *str = NULL;
5338 int saved_const_wanted = const_wanted;
5340 next();
5341 skip('(');
5342 const_wanted = 0;
5343 expr_type(&controlling_type, expr_eq);
5344 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5345 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5346 mk_pointer(&controlling_type);
5347 const_wanted = saved_const_wanted;
5348 for (;;) {
5349 learn = 0;
5350 skip(',');
5351 if (tok == TOK_DEFAULT) {
5352 if (has_default)
5353 tcc_error("too many 'default'");
5354 has_default = 1;
5355 if (!has_match)
5356 learn = 1;
5357 next();
5358 } else {
5359 AttributeDef ad_tmp;
5360 int itmp;
5361 CType cur_type;
5363 in_generic++;
5364 parse_btype(&cur_type, &ad_tmp);
5365 in_generic--;
5367 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5368 if (compare_types(&controlling_type, &cur_type, 0)) {
5369 if (has_match) {
5370 tcc_error("type match twice");
5372 has_match = 1;
5373 learn = 1;
5376 skip(':');
5377 if (learn) {
5378 if (str)
5379 tok_str_free(str);
5380 skip_or_save_block(&str);
5381 } else {
5382 skip_or_save_block(NULL);
5384 if (tok == ')')
5385 break;
5387 if (!str) {
5388 char buf[60];
5389 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5390 tcc_error("type '%s' does not match any association", buf);
5392 begin_macro(str, 1);
5393 next();
5394 expr_eq();
5395 if (tok != TOK_EOF)
5396 expect(",");
5397 end_macro();
5398 next();
5399 break;
5401 // special qnan , snan and infinity values
5402 case TOK___NAN__:
5403 n = 0x7fc00000;
5404 special_math_val:
5405 vpushi(n);
5406 vtop->type.t = VT_FLOAT;
5407 next();
5408 break;
5409 case TOK___SNAN__:
5410 n = 0x7f800001;
5411 goto special_math_val;
5412 case TOK___INF__:
5413 n = 0x7f800000;
5414 goto special_math_val;
5416 default:
5417 tok_identifier:
5418 t = tok;
5419 next();
5420 if (t < TOK_UIDENT)
5421 expect("identifier");
5422 s = sym_find(t);
5423 if (!s || IS_ASM_SYM(s)) {
5424 const char *name = get_tok_str(t, NULL);
5425 if (tok != '(')
5426 tcc_error("'%s' undeclared", name);
5427 /* for simple function calls, we tolerate undeclared
5428 external reference to int() function */
5429 if (tcc_state->warn_implicit_function_declaration
5430 #ifdef TCC_TARGET_PE
5431 /* people must be warned about using undeclared WINAPI functions
5432 (which usually start with uppercase letter) */
5433 || (name[0] >= 'A' && name[0] <= 'Z')
5434 #endif
5436 tcc_warning("implicit declaration of function '%s'", name);
5437 s = external_global_sym(t, &func_old_type);
5440 r = s->r;
5441 /* A symbol that has a register is a local register variable,
5442 which starts out as VT_LOCAL value. */
5443 if ((r & VT_VALMASK) < VT_CONST)
5444 r = (r & ~VT_VALMASK) | VT_LOCAL;
5446 vset(&s->type, r, s->c);
5447 /* Point to s as backpointer (even without r&VT_SYM).
5448 Will be used by at least the x86 inline asm parser for
5449 regvars. */
5450 vtop->sym = s;
5452 if (r & VT_SYM) {
5453 vtop->c.i = 0;
5454 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5455 vtop->c.i = s->enum_val;
5457 break;
5460 /* post operations */
5461 while (1) {
5462 if (tok == TOK_INC || tok == TOK_DEC) {
5463 inc(1, tok);
5464 next();
5465 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5466 int qualifiers, cumofs = 0;
5467 /* field */
5468 if (tok == TOK_ARROW)
5469 indir();
5470 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5471 test_lvalue();
5472 gaddrof();
5473 /* expect pointer on structure */
5474 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5475 expect("struct or union");
5476 if (tok == TOK_CDOUBLE)
5477 expect("field name");
5478 next();
5479 if (tok == TOK_CINT || tok == TOK_CUINT)
5480 expect("field name");
5481 s = find_field(&vtop->type, tok, &cumofs);
5482 if (!s)
5483 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5484 /* add field offset to pointer */
5485 vtop->type = char_pointer_type; /* change type to 'char *' */
5486 vpushi(cumofs + s->c);
5487 gen_op('+');
5488 /* change type to field type, and set to lvalue */
5489 vtop->type = s->type;
5490 vtop->type.t |= qualifiers;
5491 /* an array is never an lvalue */
5492 if (!(vtop->type.t & VT_ARRAY)) {
5493 vtop->r |= VT_LVAL;
5494 #ifdef CONFIG_TCC_BCHECK
5495 /* if bound checking, the referenced pointer must be checked */
5496 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5497 vtop->r |= VT_MUSTBOUND;
5498 #endif
5500 next();
5501 } else if (tok == '[') {
5502 next();
5503 gexpr();
5504 gen_op('+');
5505 indir();
5506 skip(']');
5507 } else if (tok == '(') {
5508 SValue ret;
5509 Sym *sa;
5510 int nb_args, ret_nregs, ret_align, regsize, variadic;
5512 #ifdef CONFIG_TCC_BCHECK
5513 if (tcc_state->do_bounds_check && (vtop->r & VT_SYM) && vtop->sym->v == TOK_alloca) {
5514 addr_t *bounds_ptr;
5516 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
5517 bounds_ptr[0] = 1; /* marks alloca/vla used */
5518 bounds_ptr[1] = 0;
5520 #endif
5521 /* function call */
5522 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5523 /* pointer test (no array accepted) */
5524 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5525 vtop->type = *pointed_type(&vtop->type);
5526 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5527 goto error_func;
5528 } else {
5529 error_func:
5530 expect("function pointer");
5532 } else {
5533 vtop->r &= ~VT_LVAL; /* no lvalue */
5535 /* get return type */
5536 s = vtop->type.ref;
5537 next();
5538 sa = s->next; /* first parameter */
5539 nb_args = regsize = 0;
5540 ret.r2 = VT_CONST;
5541 /* compute first implicit argument if a structure is returned */
5542 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5543 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5544 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5545 &ret_align, &regsize);
5546 if (ret_nregs <= 0) {
5547 /* get some space for the returned structure */
5548 size = type_size(&s->type, &align);
5549 #ifdef TCC_TARGET_ARM64
5550 /* On arm64, a small struct is return in registers.
5551 It is much easier to write it to memory if we know
5552 that we are allowed to write some extra bytes, so
5553 round the allocated space up to a power of 2: */
5554 if (size < 16)
5555 while (size & (size - 1))
5556 size = (size | (size - 1)) + 1;
5557 #endif
5558 loc = (loc - size) & -align;
5559 ret.type = s->type;
5560 ret.r = VT_LOCAL | VT_LVAL;
5561 /* pass it as 'int' to avoid structure arg passing
5562 problems */
5563 vseti(VT_LOCAL, loc);
5564 ret.c = vtop->c;
5565 if (ret_nregs < 0)
5566 vtop--;
5567 else
5568 nb_args++;
5570 } else {
5571 ret_nregs = 1;
5572 ret.type = s->type;
5575 if (ret_nregs > 0) {
5576 /* return in register */
5577 ret.c.i = 0;
5578 PUT_R_RET(&ret, ret.type.t);
5580 if (tok != ')') {
5581 for(;;) {
5582 expr_eq();
5583 gfunc_param_typed(s, sa);
5584 nb_args++;
5585 if (sa)
5586 sa = sa->next;
5587 if (tok == ')')
5588 break;
5589 skip(',');
5592 if (sa)
5593 tcc_error("too few arguments to function");
5594 skip(')');
5595 gfunc_call(nb_args);
5597 if (ret_nregs < 0) {
5598 vsetc(&ret.type, ret.r, &ret.c);
5599 #ifdef TCC_TARGET_RISCV64
5600 arch_transfer_ret_regs(1);
5601 #endif
5602 } else {
5603 /* return value */
5604 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5605 vsetc(&ret.type, r, &ret.c);
5606 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5609 /* handle packed struct return */
5610 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5611 int addr, offset;
5613 size = type_size(&s->type, &align);
5614 /* We're writing whole regs often, make sure there's enough
5615 space. Assume register size is power of 2. */
5616 if (regsize > align)
5617 align = regsize;
5618 loc = (loc - size) & -align;
5619 addr = loc;
5620 offset = 0;
5621 for (;;) {
5622 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5623 vswap();
5624 vstore();
5625 vtop--;
5626 if (--ret_nregs == 0)
5627 break;
5628 offset += regsize;
5630 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5633 /* Promote char/short return values. This is matters only
5634 for calling function that were not compiled by TCC */
5635 t = s->type.t & VT_BTYPE;
5636 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL)
5637 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5639 if (s->f.func_noreturn)
5640 CODE_OFF();
5641 } else {
5642 break;
5647 ST_FUNC void expr_prod(void)
5649 int t;
5651 unary();
5652 while (tok == '*' || tok == '/' || tok == '%') {
5653 t = tok;
5654 next();
5655 unary();
5656 gen_op(t);
5660 ST_FUNC void expr_sum(void)
5662 int t;
5664 expr_prod();
5665 while (tok == '+' || tok == '-') {
5666 t = tok;
5667 next();
5668 expr_prod();
5669 gen_op(t);
5673 static void expr_shift(void)
5675 int t;
5677 expr_sum();
5678 while (tok == TOK_SHL || tok == TOK_SAR) {
5679 t = tok;
5680 next();
5681 expr_sum();
5682 gen_op(t);
5686 static void expr_cmp(void)
5688 int t;
5690 expr_shift();
5691 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5692 tok == TOK_ULT || tok == TOK_UGE) {
5693 t = tok;
5694 next();
5695 expr_shift();
5696 gen_op(t);
5700 static void expr_cmpeq(void)
5702 int t;
5704 expr_cmp();
5705 while (tok == TOK_EQ || tok == TOK_NE) {
5706 t = tok;
5707 next();
5708 expr_cmp();
5709 gen_op(t);
5713 static void expr_and(void)
5715 expr_cmpeq();
5716 while (tok == '&') {
5717 next();
5718 expr_cmpeq();
5719 gen_op('&');
5723 static void expr_xor(void)
5725 expr_and();
5726 while (tok == '^') {
5727 next();
5728 expr_and();
5729 gen_op('^');
5733 static void expr_or(void)
5735 expr_xor();
5736 while (tok == '|') {
5737 next();
5738 expr_xor();
5739 gen_op('|');
5743 static int condition_3way(void);
5745 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5747 int t = 0, cc = 1, f = 0, c;
5748 for(;;) {
5749 c = f ? i : condition_3way();
5750 if (c < 0) {
5751 save_regs(1), cc = 0;
5752 } else if (c != i) {
5753 nocode_wanted++, f = 1;
5755 if (tok != e_op) {
5756 if (cc || f) {
5757 vpop();
5758 vpushi(i ^ f);
5759 gsym(t);
5760 nocode_wanted -= f;
5761 } else {
5762 gvtst_set(i, t);
5764 break;
5766 if (c < 0)
5767 t = gvtst(i, t);
5768 else
5769 vpop();
5770 next();
5771 e_fn();
5775 static void expr_land(void)
5777 expr_or();
5778 if (tok == TOK_LAND)
5779 expr_landor(expr_or, TOK_LAND, 1);
5782 static void expr_lor(void)
5784 expr_land();
5785 if (tok == TOK_LOR)
5786 expr_landor(expr_land, TOK_LOR, 0);
5789 /* Assuming vtop is a value used in a conditional context
5790 (i.e. compared with zero) return 0 if it's false, 1 if
5791 true and -1 if it can't be statically determined. */
5792 static int condition_3way(void)
5794 int c = -1;
5795 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5796 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5797 vdup();
5798 gen_cast_s(VT_BOOL);
5799 c = vtop->c.i;
5800 vpop();
5802 return c;
5805 static int is_cond_bool(SValue *sv)
5807 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5808 && (sv->type.t & VT_BTYPE) == VT_INT)
5809 return (unsigned)sv->c.i < 2;
5810 if (sv->r == VT_CMP)
5811 return 1;
5812 return 0;
5815 static void expr_cond(void)
5817 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5818 SValue sv;
5819 CType type, type1, type2;
5820 int ncw_prev;
5822 expr_lor();
5823 if (tok == '?') {
5824 next();
5825 c = condition_3way();
5826 g = (tok == ':' && gnu_ext);
5827 tt = 0;
5828 if (!g) {
5829 if (c < 0) {
5830 save_regs(1);
5831 tt = gvtst(1, 0);
5832 } else {
5833 vpop();
5835 } else if (c < 0) {
5836 /* needed to avoid having different registers saved in
5837 each branch */
5838 save_regs(1);
5839 gv_dup();
5840 tt = gvtst(0, 0);
5843 ncw_prev = nocode_wanted;
5844 if (1) {
5845 if (c == 0)
5846 nocode_wanted++;
5847 if (!g)
5848 gexpr();
5850 if (c < 0 && vtop->r == VT_CMP) {
5851 t1 = gvtst(0, 0);
5852 vpushi(0);
5853 gvtst_set(0, t1);
5856 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5857 mk_pointer(&vtop->type);
5858 type1 = vtop->type;
5859 sv = *vtop; /* save value to handle it later */
5860 vtop--; /* no vpop so that FP stack is not flushed */
5862 if (g) {
5863 u = tt;
5864 } else if (c < 0) {
5865 u = gjmp(0);
5866 gsym(tt);
5867 } else
5868 u = 0;
5870 nocode_wanted = ncw_prev;
5871 if (c == 1)
5872 nocode_wanted++;
5873 skip(':');
5874 expr_cond();
5876 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5877 if (sv.r == VT_CMP) {
5878 t1 = sv.jtrue;
5879 t2 = u;
5880 } else {
5881 t1 = gvtst(0, 0);
5882 t2 = gjmp(0);
5883 gsym(u);
5884 vpushv(&sv);
5886 gvtst_set(0, t1);
5887 gvtst_set(1, t2);
5888 nocode_wanted = ncw_prev;
5889 // tcc_warning("two conditions expr_cond");
5890 return;
5893 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5894 mk_pointer(&vtop->type);
5895 type2=vtop->type;
5896 t1 = type1.t;
5897 bt1 = t1 & VT_BTYPE;
5898 t2 = type2.t;
5899 bt2 = t2 & VT_BTYPE;
5900 type.ref = NULL;
5902 /* cast operands to correct type according to ISOC rules */
5903 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5904 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5905 } else if (is_float(bt1) || is_float(bt2)) {
5906 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5907 type.t = VT_LDOUBLE;
5909 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5910 type.t = VT_DOUBLE;
5911 } else {
5912 type.t = VT_FLOAT;
5914 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5915 /* cast to biggest op */
5916 type.t = VT_LLONG | VT_LONG;
5917 if (bt1 == VT_LLONG)
5918 type.t &= t1;
5919 if (bt2 == VT_LLONG)
5920 type.t &= t2;
5921 /* convert to unsigned if it does not fit in a long long */
5922 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5923 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5924 type.t |= VT_UNSIGNED;
5925 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5926 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5927 /* If one is a null ptr constant the result type
5928 is the other. */
5929 if (is_null_pointer (vtop)) type = type1;
5930 else if (is_null_pointer (&sv)) type = type2;
5931 else if (bt1 != bt2)
5932 tcc_error("incompatible types in conditional expressions");
5933 else {
5934 CType *pt1 = pointed_type(&type1);
5935 CType *pt2 = pointed_type(&type2);
5936 int pbt1 = pt1->t & VT_BTYPE;
5937 int pbt2 = pt2->t & VT_BTYPE;
5938 int newquals, copied = 0;
5939 /* pointers to void get preferred, otherwise the
5940 pointed to types minus qualifs should be compatible */
5941 type = (pbt1 == VT_VOID) ? type1 : type2;
5942 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5943 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5944 tcc_warning("pointer type mismatch in conditional expression\n");
5946 /* combine qualifs */
5947 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5948 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5949 & newquals)
5951 /* copy the pointer target symbol */
5952 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5953 0, type.ref->c);
5954 copied = 1;
5955 pointed_type(&type)->t |= newquals;
5957 /* pointers to incomplete arrays get converted to
5958 pointers to completed ones if possible */
5959 if (pt1->t & VT_ARRAY
5960 && pt2->t & VT_ARRAY
5961 && pointed_type(&type)->ref->c < 0
5962 && (pt1->ref->c > 0 || pt2->ref->c > 0))
5964 if (!copied)
5965 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5966 0, type.ref->c);
5967 pointed_type(&type)->ref =
5968 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
5969 0, pointed_type(&type)->ref->c);
5970 pointed_type(&type)->ref->c =
5971 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
5974 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
5975 /* XXX: test structure compatibility */
5976 type = bt1 == VT_STRUCT ? type1 : type2;
5977 } else {
5978 /* integer operations */
5979 type.t = VT_INT | (VT_LONG & (t1 | t2));
5980 /* convert to unsigned if it does not fit in an integer */
5981 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
5982 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
5983 type.t |= VT_UNSIGNED;
5985 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5986 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5987 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
5989 /* now we convert second operand */
5990 if (c != 1) {
5991 gen_cast(&type);
5992 if (islv) {
5993 mk_pointer(&vtop->type);
5994 gaddrof();
5995 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
5996 gaddrof();
5999 rc = RC_TYPE(type.t);
6000 /* for long longs, we use fixed registers to avoid having
6001 to handle a complicated move */
6002 if (USING_TWO_WORDS(type.t))
6003 rc = RC_RET(type.t);
6005 tt = r2 = 0;
6006 if (c < 0) {
6007 r2 = gv(rc);
6008 tt = gjmp(0);
6010 gsym(u);
6011 nocode_wanted = ncw_prev;
6013 /* this is horrible, but we must also convert first
6014 operand */
6015 if (c != 0) {
6016 *vtop = sv;
6017 gen_cast(&type);
6018 if (islv) {
6019 mk_pointer(&vtop->type);
6020 gaddrof();
6021 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6022 gaddrof();
6025 if (c < 0) {
6026 r1 = gv(rc);
6027 move_reg(r2, r1, islv ? VT_PTR : type.t);
6028 vtop->r = r2;
6029 gsym(tt);
6032 if (islv)
6033 indir();
6038 static void expr_eq(void)
6040 int t;
6042 expr_cond();
6043 if (tok == '=' ||
6044 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6045 tok == TOK_A_XOR || tok == TOK_A_OR ||
6046 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6047 test_lvalue();
6048 t = tok;
6049 next();
6050 if (t == '=') {
6051 expr_eq();
6052 } else {
6053 vdup();
6054 expr_eq();
6055 gen_op(t & 0x7f);
6057 vstore();
6061 ST_FUNC void gexpr(void)
6063 while (1) {
6064 expr_eq();
6065 if (tok != ',')
6066 break;
6067 vpop();
6068 next();
6072 /* parse a constant expression and return value in vtop. */
6073 static void expr_const1(void)
6075 const_wanted++;
6076 nocode_wanted++;
6077 expr_cond();
6078 nocode_wanted--;
6079 const_wanted--;
6082 /* parse an integer constant and return its value. */
6083 static inline int64_t expr_const64(void)
6085 int64_t c;
6086 expr_const1();
6087 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6088 expect("constant expression");
6089 c = vtop->c.i;
6090 vpop();
6091 return c;
6094 /* parse an integer constant and return its value.
6095 Complain if it doesn't fit 32bit (signed or unsigned). */
6096 ST_FUNC int expr_const(void)
6098 int c;
6099 int64_t wc = expr_const64();
6100 c = wc;
6101 if (c != wc && (unsigned)c != wc)
6102 tcc_error("constant exceeds 32 bit");
6103 return c;
6106 /* ------------------------------------------------------------------------- */
6107 /* return from function */
6109 #ifndef TCC_TARGET_ARM64
6110 static void gfunc_return(CType *func_type)
6112 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6113 CType type, ret_type;
6114 int ret_align, ret_nregs, regsize;
6115 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6116 &ret_align, &regsize);
6117 if (ret_nregs < 0) {
6118 #ifdef TCC_TARGET_RISCV64
6119 arch_transfer_ret_regs(0);
6120 #endif
6121 } else if (0 == ret_nregs) {
6122 /* if returning structure, must copy it to implicit
6123 first pointer arg location */
6124 type = *func_type;
6125 mk_pointer(&type);
6126 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6127 indir();
6128 vswap();
6129 /* copy structure value to pointer */
6130 vstore();
6131 } else {
6132 /* returning structure packed into registers */
6133 int size, addr, align, rc;
6134 size = type_size(func_type,&align);
6135 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6136 (vtop->c.i & (ret_align-1)))
6137 && (align & (ret_align-1))) {
6138 loc = (loc - size) & -ret_align;
6139 addr = loc;
6140 type = *func_type;
6141 vset(&type, VT_LOCAL | VT_LVAL, addr);
6142 vswap();
6143 vstore();
6144 vpop();
6145 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6147 vtop->type = ret_type;
6148 rc = RC_RET(ret_type.t);
6149 if (ret_nregs == 1)
6150 gv(rc);
6151 else {
6152 for (;;) {
6153 vdup();
6154 gv(rc);
6155 vpop();
6156 if (--ret_nregs == 0)
6157 break;
6158 /* We assume that when a structure is returned in multiple
6159 registers, their classes are consecutive values of the
6160 suite s(n) = 2^n */
6161 rc <<= 1;
6162 vtop->c.i += regsize;
6166 } else {
6167 gv(RC_RET(func_type->t));
6169 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6171 #endif
6173 static void check_func_return(void)
6175 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6176 return;
6177 if (!strcmp (funcname, "main")
6178 && (func_vt.t & VT_BTYPE) == VT_INT) {
6179 /* main returns 0 by default */
6180 vpushi(0);
6181 gen_assign_cast(&func_vt);
6182 gfunc_return(&func_vt);
6183 } else {
6184 tcc_warning("function might return no value: '%s'", funcname);
6188 /* ------------------------------------------------------------------------- */
6189 /* switch/case */
6191 static int case_cmp(const void *pa, const void *pb)
6193 int64_t a = (*(struct case_t**) pa)->v1;
6194 int64_t b = (*(struct case_t**) pb)->v1;
6195 return a < b ? -1 : a > b;
6198 static void gtst_addr(int t, int a)
6200 gsym_addr(gvtst(0, t), a);
6203 static void gcase(struct case_t **base, int len, int *bsym)
6205 struct case_t *p;
6206 int e;
6207 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6208 while (len > 8) {
6209 /* binary search */
6210 p = base[len/2];
6211 vdup();
6212 if (ll)
6213 vpushll(p->v2);
6214 else
6215 vpushi(p->v2);
6216 gen_op(TOK_LE);
6217 e = gvtst(1, 0);
6218 vdup();
6219 if (ll)
6220 vpushll(p->v1);
6221 else
6222 vpushi(p->v1);
6223 gen_op(TOK_GE);
6224 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6225 /* x < v1 */
6226 gcase(base, len/2, bsym);
6227 /* x > v2 */
6228 gsym(e);
6229 e = len/2 + 1;
6230 base += e; len -= e;
6232 /* linear scan */
6233 while (len--) {
6234 p = *base++;
6235 vdup();
6236 if (ll)
6237 vpushll(p->v2);
6238 else
6239 vpushi(p->v2);
6240 if (p->v1 == p->v2) {
6241 gen_op(TOK_EQ);
6242 gtst_addr(0, p->sym);
6243 } else {
6244 gen_op(TOK_LE);
6245 e = gvtst(1, 0);
6246 vdup();
6247 if (ll)
6248 vpushll(p->v1);
6249 else
6250 vpushi(p->v1);
6251 gen_op(TOK_GE);
6252 gtst_addr(0, p->sym);
6253 gsym(e);
6256 *bsym = gjmp(*bsym);
6259 /* ------------------------------------------------------------------------- */
6260 /* __attribute__((cleanup(fn))) */
6262 static void try_call_scope_cleanup(Sym *stop)
6264 Sym *cls = cur_scope->cl.s;
6266 for (; cls != stop; cls = cls->ncl) {
6267 Sym *fs = cls->next;
6268 Sym *vs = cls->prev_tok;
6270 vpushsym(&fs->type, fs);
6271 vset(&vs->type, vs->r, vs->c);
6272 vtop->sym = vs;
6273 mk_pointer(&vtop->type);
6274 gaddrof();
6275 gfunc_call(1);
6279 static void try_call_cleanup_goto(Sym *cleanupstate)
6281 Sym *oc, *cc;
6282 int ocd, ccd;
6284 if (!cur_scope->cl.s)
6285 return;
6287 /* search NCA of both cleanup chains given parents and initial depth */
6288 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6289 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6291 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6293 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6296 try_call_scope_cleanup(cc);
6299 /* call 'func' for each __attribute__((cleanup(func))) */
6300 static void block_cleanup(struct scope *o)
6302 int jmp = 0;
6303 Sym *g, **pg;
6304 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6305 if (g->prev_tok->r & LABEL_FORWARD) {
6306 Sym *pcl = g->next;
6307 if (!jmp)
6308 jmp = gjmp(0);
6309 gsym(pcl->jnext);
6310 try_call_scope_cleanup(o->cl.s);
6311 pcl->jnext = gjmp(0);
6312 if (!o->cl.n)
6313 goto remove_pending;
6314 g->c = o->cl.n;
6315 pg = &g->prev;
6316 } else {
6317 remove_pending:
6318 *pg = g->prev;
6319 sym_free(g);
6322 gsym(jmp);
6323 try_call_scope_cleanup(o->cl.s);
6326 /* ------------------------------------------------------------------------- */
6327 /* VLA */
6329 static void vla_restore(int loc)
6331 if (loc)
6332 gen_vla_sp_restore(loc);
6335 static void vla_leave(struct scope *o)
6337 if (o->vla.num < cur_scope->vla.num)
6338 vla_restore(o->vla.loc);
6341 /* ------------------------------------------------------------------------- */
6342 /* local scopes */
6344 void new_scope(struct scope *o)
6346 /* copy and link previous scope */
6347 *o = *cur_scope;
6348 o->prev = cur_scope;
6349 cur_scope = o;
6351 /* record local declaration stack position */
6352 o->lstk = local_stack;
6353 o->llstk = local_label_stack;
6355 ++local_scope;
6358 void prev_scope(struct scope *o, int is_expr)
6360 vla_leave(o->prev);
6362 if (o->cl.s != o->prev->cl.s)
6363 block_cleanup(o->prev);
6365 /* pop locally defined labels */
6366 label_pop(&local_label_stack, o->llstk, is_expr);
6368 /* In the is_expr case (a statement expression is finished here),
6369 vtop might refer to symbols on the local_stack. Either via the
6370 type or via vtop->sym. We can't pop those nor any that in turn
6371 might be referred to. To make it easier we don't roll back
6372 any symbols in that case; some upper level call to block() will
6373 do that. We do have to remove such symbols from the lookup
6374 tables, though. sym_pop will do that. */
6376 /* pop locally defined symbols */
6377 sym_pop(&local_stack, o->lstk, is_expr);
6379 cur_scope = o->prev;
6380 --local_scope;
6383 /* leave a scope via break/continue(/goto) */
6384 void leave_scope(struct scope *o)
6386 if (!o)
6387 return;
6388 try_call_scope_cleanup(o->cl.s);
6389 vla_leave(o);
6392 /* ------------------------------------------------------------------------- */
6393 /* call block from 'for do while' loops */
6395 static void lblock(int *bsym, int *csym)
6397 struct scope *lo = loop_scope, *co = cur_scope;
6398 int *b = co->bsym, *c = co->csym;
6399 if (csym) {
6400 co->csym = csym;
6401 loop_scope = co;
6403 co->bsym = bsym;
6404 block(0);
6405 co->bsym = b;
6406 if (csym) {
6407 co->csym = c;
6408 loop_scope = lo;
6412 static void block(int is_expr)
6414 int a, b, c, d, e, t;
6415 Sym *s;
6417 if (is_expr) {
6418 /* default return value is (void) */
6419 vpushi(0);
6420 vtop->type.t = VT_VOID;
6423 again:
6424 t = tok, next();
6426 if (t == TOK_IF) {
6427 skip('(');
6428 gexpr();
6429 skip(')');
6430 a = gvtst(1, 0);
6431 block(0);
6432 if (tok == TOK_ELSE) {
6433 d = gjmp(0);
6434 gsym(a);
6435 next();
6436 block(0);
6437 gsym(d); /* patch else jmp */
6438 } else {
6439 gsym(a);
6442 } else if (t == TOK_WHILE) {
6443 d = gind();
6444 skip('(');
6445 gexpr();
6446 skip(')');
6447 a = gvtst(1, 0);
6448 b = 0;
6449 lblock(&a, &b);
6450 gjmp_addr(d);
6451 gsym_addr(b, d);
6452 gsym(a);
6454 } else if (t == '{') {
6455 struct scope o;
6456 new_scope(&o);
6458 /* handle local labels declarations */
6459 while (tok == TOK_LABEL) {
6460 do {
6461 next();
6462 if (tok < TOK_UIDENT)
6463 expect("label identifier");
6464 label_push(&local_label_stack, tok, LABEL_DECLARED);
6465 next();
6466 } while (tok == ',');
6467 skip(';');
6470 while (tok != '}') {
6471 decl(VT_LOCAL);
6472 if (tok != '}') {
6473 if (is_expr)
6474 vpop();
6475 block(is_expr);
6479 prev_scope(&o, is_expr);
6481 if (0 == local_scope && !nocode_wanted)
6482 check_func_return();
6483 next();
6485 } else if (t == TOK_RETURN) {
6486 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6487 if (tok != ';') {
6488 gexpr();
6489 if (b) {
6490 gen_assign_cast(&func_vt);
6491 } else {
6492 if (vtop->type.t != VT_VOID)
6493 tcc_warning("void function returns a value");
6494 vtop--;
6496 } else if (b) {
6497 tcc_warning("'return' with no value");
6498 b = 0;
6500 leave_scope(root_scope);
6501 if (b)
6502 gfunc_return(&func_vt);
6503 skip(';');
6504 /* jump unless last stmt in top-level block */
6505 if (tok != '}' || local_scope != 1)
6506 rsym = gjmp(rsym);
6507 CODE_OFF();
6509 } else if (t == TOK_BREAK) {
6510 /* compute jump */
6511 if (!cur_scope->bsym)
6512 tcc_error("cannot break");
6513 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6514 leave_scope(loop_scope);
6515 else
6516 leave_scope(cur_switch->scope);
6517 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6518 skip(';');
6520 } else if (t == TOK_CONTINUE) {
6521 /* compute jump */
6522 if (!cur_scope->csym)
6523 tcc_error("cannot continue");
6524 leave_scope(loop_scope);
6525 *cur_scope->csym = gjmp(*cur_scope->csym);
6526 skip(';');
6528 } else if (t == TOK_FOR) {
6529 struct scope o;
6530 new_scope(&o);
6532 skip('(');
6533 if (tok != ';') {
6534 /* c99 for-loop init decl? */
6535 if (!decl0(VT_LOCAL, 1, NULL)) {
6536 /* no, regular for-loop init expr */
6537 gexpr();
6538 vpop();
6541 skip(';');
6542 a = b = 0;
6543 c = d = gind();
6544 if (tok != ';') {
6545 gexpr();
6546 a = gvtst(1, 0);
6548 skip(';');
6549 if (tok != ')') {
6550 e = gjmp(0);
6551 d = gind();
6552 gexpr();
6553 vpop();
6554 gjmp_addr(c);
6555 gsym(e);
6557 skip(')');
6558 lblock(&a, &b);
6559 gjmp_addr(d);
6560 gsym_addr(b, d);
6561 gsym(a);
6562 prev_scope(&o, 0);
6564 } else if (t == TOK_DO) {
6565 a = b = 0;
6566 d = gind();
6567 lblock(&a, &b);
6568 gsym(b);
6569 skip(TOK_WHILE);
6570 skip('(');
6571 gexpr();
6572 skip(')');
6573 skip(';');
6574 c = gvtst(0, 0);
6575 gsym_addr(c, d);
6576 gsym(a);
6578 } else if (t == TOK_SWITCH) {
6579 struct switch_t *saved, sw;
6580 SValue switchval;
6582 sw.p = NULL;
6583 sw.n = 0;
6584 sw.def_sym = 0;
6585 sw.bsym = &a;
6586 sw.scope = cur_scope;
6588 saved = cur_switch;
6589 cur_switch = &sw;
6591 skip('(');
6592 gexpr();
6593 skip(')');
6594 switchval = *vtop--;
6596 a = 0;
6597 b = gjmp(0); /* jump to first case */
6598 lblock(&a, NULL);
6599 a = gjmp(a); /* add implicit break */
6600 /* case lookup */
6601 gsym(b);
6603 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6604 for (b = 1; b < sw.n; b++)
6605 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6606 tcc_error("duplicate case value");
6608 /* Our switch table sorting is signed, so the compared
6609 value needs to be as well when it's 64bit. */
6610 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6611 switchval.type.t &= ~VT_UNSIGNED;
6612 vpushv(&switchval);
6613 gv(RC_INT);
6614 d = 0, gcase(sw.p, sw.n, &d);
6615 vpop();
6616 if (sw.def_sym)
6617 gsym_addr(d, sw.def_sym);
6618 else
6619 gsym(d);
6620 /* break label */
6621 gsym(a);
6623 dynarray_reset(&sw.p, &sw.n);
6624 cur_switch = saved;
6626 } else if (t == TOK_CASE) {
6627 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6628 if (!cur_switch)
6629 expect("switch");
6630 cr->v1 = cr->v2 = expr_const64();
6631 if (gnu_ext && tok == TOK_DOTS) {
6632 next();
6633 cr->v2 = expr_const64();
6634 if (cr->v2 < cr->v1)
6635 tcc_warning("empty case range");
6637 cr->sym = gind();
6638 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6639 skip(':');
6640 is_expr = 0;
6641 goto block_after_label;
6643 } else if (t == TOK_DEFAULT) {
6644 if (!cur_switch)
6645 expect("switch");
6646 if (cur_switch->def_sym)
6647 tcc_error("too many 'default'");
6648 cur_switch->def_sym = gind();
6649 skip(':');
6650 is_expr = 0;
6651 goto block_after_label;
6653 } else if (t == TOK_GOTO) {
6654 vla_restore(root_scope->vla.loc);
6655 if (tok == '*' && gnu_ext) {
6656 /* computed goto */
6657 next();
6658 gexpr();
6659 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6660 expect("pointer");
6661 ggoto();
6663 } else if (tok >= TOK_UIDENT) {
6664 s = label_find(tok);
6665 /* put forward definition if needed */
6666 if (!s)
6667 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6668 else if (s->r == LABEL_DECLARED)
6669 s->r = LABEL_FORWARD;
6671 if (s->r & LABEL_FORWARD) {
6672 /* start new goto chain for cleanups, linked via label->next */
6673 if (cur_scope->cl.s && !nocode_wanted) {
6674 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6675 pending_gotos->prev_tok = s;
6676 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6677 pending_gotos->next = s;
6679 s->jnext = gjmp(s->jnext);
6680 } else {
6681 try_call_cleanup_goto(s->cleanupstate);
6682 gjmp_addr(s->jnext);
6684 next();
6686 } else {
6687 expect("label identifier");
6689 skip(';');
6691 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6692 asm_instr();
6694 } else {
6695 if (tok == ':' && t >= TOK_UIDENT) {
6696 /* label case */
6697 next();
6698 s = label_find(t);
6699 if (s) {
6700 if (s->r == LABEL_DEFINED)
6701 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6702 s->r = LABEL_DEFINED;
6703 if (s->next) {
6704 Sym *pcl; /* pending cleanup goto */
6705 for (pcl = s->next; pcl; pcl = pcl->prev)
6706 gsym(pcl->jnext);
6707 sym_pop(&s->next, NULL, 0);
6708 } else
6709 gsym(s->jnext);
6710 } else {
6711 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6713 s->jnext = gind();
6714 s->cleanupstate = cur_scope->cl.s;
6716 block_after_label:
6717 vla_restore(cur_scope->vla.loc);
6718 /* we accept this, but it is a mistake */
6719 if (tok == '}') {
6720 tcc_warning("deprecated use of label at end of compound statement");
6721 } else {
6722 goto again;
6725 } else {
6726 /* expression case */
6727 if (t != ';') {
6728 unget_tok(t);
6729 if (is_expr) {
6730 vpop();
6731 gexpr();
6732 } else {
6733 gexpr();
6734 vpop();
6736 skip(';');
6742 /* This skips over a stream of tokens containing balanced {} and ()
6743 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6744 with a '{'). If STR then allocates and stores the skipped tokens
6745 in *STR. This doesn't check if () and {} are nested correctly,
6746 i.e. "({)}" is accepted. */
6747 static void skip_or_save_block(TokenString **str)
6749 int braces = tok == '{';
6750 int level = 0;
6751 if (str)
6752 *str = tok_str_alloc();
6754 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6755 int t;
6756 if (tok == TOK_EOF) {
6757 if (str || level > 0)
6758 tcc_error("unexpected end of file");
6759 else
6760 break;
6762 if (str)
6763 tok_str_add_tok(*str);
6764 t = tok;
6765 next();
6766 if (t == '{' || t == '(') {
6767 level++;
6768 } else if (t == '}' || t == ')') {
6769 level--;
6770 if (level == 0 && braces && t == '}')
6771 break;
6774 if (str) {
6775 tok_str_add(*str, -1);
6776 tok_str_add(*str, 0);
6780 #define EXPR_CONST 1
6781 #define EXPR_ANY 2
6783 static void parse_init_elem(int expr_type)
6785 int saved_global_expr;
6786 switch(expr_type) {
6787 case EXPR_CONST:
6788 /* compound literals must be allocated globally in this case */
6789 saved_global_expr = global_expr;
6790 global_expr = 1;
6791 expr_const1();
6792 global_expr = saved_global_expr;
6793 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6794 (compound literals). */
6795 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6796 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6797 || vtop->sym->v < SYM_FIRST_ANOM))
6798 #ifdef TCC_TARGET_PE
6799 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6800 #endif
6802 tcc_error("initializer element is not constant");
6803 break;
6804 case EXPR_ANY:
6805 expr_eq();
6806 break;
6810 /* put zeros for variable based init */
6811 static void init_putz(Section *sec, unsigned long c, int size)
6813 if (sec) {
6814 /* nothing to do because globals are already set to zero */
6815 } else {
6816 vpush_global_sym(&func_old_type, TOK_memset);
6817 vseti(VT_LOCAL, c);
6818 #ifdef TCC_TARGET_ARM
6819 vpushs(size);
6820 vpushi(0);
6821 #else
6822 vpushi(0);
6823 vpushs(size);
6824 #endif
6825 gfunc_call(3);
6829 #define DIF_FIRST 1
6830 #define DIF_SIZE_ONLY 2
6831 #define DIF_HAVE_ELEM 4
6833 /* t is the array or struct type. c is the array or struct
6834 address. cur_field is the pointer to the current
6835 field, for arrays the 'c' member contains the current start
6836 index. 'flags' is as in decl_initializer.
6837 'al' contains the already initialized length of the
6838 current container (starting at c). This returns the new length of that. */
6839 static int decl_designator(CType *type, Section *sec, unsigned long c,
6840 Sym **cur_field, int flags, int al)
6842 Sym *s, *f;
6843 int index, index_last, align, l, nb_elems, elem_size;
6844 unsigned long corig = c;
6846 elem_size = 0;
6847 nb_elems = 1;
6849 if (flags & DIF_HAVE_ELEM)
6850 goto no_designator;
6852 if (gnu_ext && tok >= TOK_UIDENT) {
6853 l = tok, next();
6854 if (tok == ':')
6855 goto struct_field;
6856 unget_tok(l);
6859 /* NOTE: we only support ranges for last designator */
6860 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6861 if (tok == '[') {
6862 if (!(type->t & VT_ARRAY))
6863 expect("array type");
6864 next();
6865 index = index_last = expr_const();
6866 if (tok == TOK_DOTS && gnu_ext) {
6867 next();
6868 index_last = expr_const();
6870 skip(']');
6871 s = type->ref;
6872 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6873 index_last < index)
6874 tcc_error("invalid index");
6875 if (cur_field)
6876 (*cur_field)->c = index_last;
6877 type = pointed_type(type);
6878 elem_size = type_size(type, &align);
6879 c += index * elem_size;
6880 nb_elems = index_last - index + 1;
6881 } else {
6882 int cumofs;
6883 next();
6884 l = tok;
6885 struct_field:
6886 next();
6887 if ((type->t & VT_BTYPE) != VT_STRUCT)
6888 expect("struct/union type");
6889 cumofs = 0;
6890 f = find_field(type, l, &cumofs);
6891 if (!f)
6892 expect("field");
6893 if (cur_field)
6894 *cur_field = f;
6895 type = &f->type;
6896 c += cumofs + f->c;
6898 cur_field = NULL;
6900 if (!cur_field) {
6901 if (tok == '=') {
6902 next();
6903 } else if (!gnu_ext) {
6904 expect("=");
6906 } else {
6907 no_designator:
6908 if (type->t & VT_ARRAY) {
6909 index = (*cur_field)->c;
6910 if (type->ref->c >= 0 && index >= type->ref->c)
6911 tcc_error("index too large");
6912 type = pointed_type(type);
6913 c += index * type_size(type, &align);
6914 } else {
6915 f = *cur_field;
6916 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6917 *cur_field = f = f->next;
6918 if (!f)
6919 tcc_error("too many field init");
6920 type = &f->type;
6921 c += f->c;
6924 /* must put zero in holes (note that doing it that way
6925 ensures that it even works with designators) */
6926 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6927 init_putz(sec, corig + al, c - corig - al);
6928 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6930 /* XXX: make it more general */
6931 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6932 unsigned long c_end;
6933 uint8_t *src, *dst;
6934 int i;
6936 if (!sec) {
6937 vset(type, VT_LOCAL|VT_LVAL, c);
6938 for (i = 1; i < nb_elems; i++) {
6939 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6940 vswap();
6941 vstore();
6943 vpop();
6944 } else if (!NODATA_WANTED) {
6945 c_end = c + nb_elems * elem_size;
6946 if (c_end > sec->data_allocated)
6947 section_realloc(sec, c_end);
6948 src = sec->data + c;
6949 dst = src;
6950 for(i = 1; i < nb_elems; i++) {
6951 dst += elem_size;
6952 memcpy(dst, src, elem_size);
6956 c += nb_elems * type_size(type, &align);
6957 if (c - corig > al)
6958 al = c - corig;
6959 return al;
6962 /* store a value or an expression directly in global data or in local array */
6963 static void init_putv(CType *type, Section *sec, unsigned long c)
6965 int bt;
6966 void *ptr;
6967 CType dtype;
6969 dtype = *type;
6970 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
6972 if (sec) {
6973 int size, align;
6974 /* XXX: not portable */
6975 /* XXX: generate error if incorrect relocation */
6976 gen_assign_cast(&dtype);
6977 bt = type->t & VT_BTYPE;
6979 if ((vtop->r & VT_SYM)
6980 && bt != VT_PTR
6981 && bt != VT_FUNC
6982 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
6983 || (type->t & VT_BITFIELD))
6984 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
6986 tcc_error("initializer element is not computable at load time");
6988 if (NODATA_WANTED) {
6989 vtop--;
6990 return;
6993 size = type_size(type, &align);
6994 section_reserve(sec, c + size);
6995 ptr = sec->data + c;
6997 /* XXX: make code faster ? */
6998 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
6999 vtop->sym->v >= SYM_FIRST_ANOM &&
7000 /* XXX This rejects compound literals like
7001 '(void *){ptr}'. The problem is that '&sym' is
7002 represented the same way, which would be ruled out
7003 by the SYM_FIRST_ANOM check above, but also '"string"'
7004 in 'char *p = "string"' is represented the same
7005 with the type being VT_PTR and the symbol being an
7006 anonymous one. That is, there's no difference in vtop
7007 between '(void *){x}' and '&(void *){x}'. Ignore
7008 pointer typed entities here. Hopefully no real code
7009 will every use compound literals with scalar type. */
7010 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7011 /* These come from compound literals, memcpy stuff over. */
7012 Section *ssec;
7013 ElfSym *esym;
7014 ElfW_Rel *rel;
7015 esym = elfsym(vtop->sym);
7016 ssec = tcc_state->sections[esym->st_shndx];
7017 memmove (ptr, ssec->data + esym->st_value, size);
7018 if (ssec->reloc) {
7019 /* We need to copy over all memory contents, and that
7020 includes relocations. Use the fact that relocs are
7021 created it order, so look from the end of relocs
7022 until we hit one before the copied region. */
7023 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7024 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7025 while (num_relocs--) {
7026 rel--;
7027 if (rel->r_offset >= esym->st_value + size)
7028 continue;
7029 if (rel->r_offset < esym->st_value)
7030 break;
7031 /* Note: if the same fields are initialized multiple
7032 times (possible with designators) then we possibly
7033 add multiple relocations for the same offset here.
7034 That would lead to wrong code, the last reloc needs
7035 to win. We clean this up later after the whole
7036 initializer is parsed. */
7037 put_elf_reloca(symtab_section, sec,
7038 c + rel->r_offset - esym->st_value,
7039 ELFW(R_TYPE)(rel->r_info),
7040 ELFW(R_SYM)(rel->r_info),
7041 #if PTR_SIZE == 8
7042 rel->r_addend
7043 #else
7045 #endif
7049 } else {
7050 if (type->t & VT_BITFIELD) {
7051 int bit_pos, bit_size, bits, n;
7052 unsigned char *p, v, m;
7053 bit_pos = BIT_POS(vtop->type.t);
7054 bit_size = BIT_SIZE(vtop->type.t);
7055 p = (unsigned char*)ptr + (bit_pos >> 3);
7056 bit_pos &= 7, bits = 0;
7057 while (bit_size) {
7058 n = 8 - bit_pos;
7059 if (n > bit_size)
7060 n = bit_size;
7061 v = vtop->c.i >> bits << bit_pos;
7062 m = ((1 << n) - 1) << bit_pos;
7063 *p = (*p & ~m) | (v & m);
7064 bits += n, bit_size -= n, bit_pos = 0, ++p;
7066 } else
7067 switch(bt) {
7068 /* XXX: when cross-compiling we assume that each type has the
7069 same representation on host and target, which is likely to
7070 be wrong in the case of long double */
7071 case VT_BOOL:
7072 vtop->c.i = vtop->c.i != 0;
7073 case VT_BYTE:
7074 *(char *)ptr |= vtop->c.i;
7075 break;
7076 case VT_SHORT:
7077 *(short *)ptr |= vtop->c.i;
7078 break;
7079 case VT_FLOAT:
7080 *(float*)ptr = vtop->c.f;
7081 break;
7082 case VT_DOUBLE:
7083 *(double *)ptr = vtop->c.d;
7084 break;
7085 case VT_LDOUBLE:
7086 #if defined TCC_IS_NATIVE_387
7087 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7088 memcpy(ptr, &vtop->c.ld, 10);
7089 #ifdef __TINYC__
7090 else if (sizeof (long double) == sizeof (double))
7091 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7092 #endif
7093 else if (vtop->c.ld == 0.0)
7095 else
7096 #endif
7097 if (sizeof(long double) == LDOUBLE_SIZE)
7098 *(long double*)ptr = vtop->c.ld;
7099 else if (sizeof(double) == LDOUBLE_SIZE)
7100 *(double *)ptr = (double)vtop->c.ld;
7101 else
7102 tcc_error("can't cross compile long double constants");
7103 break;
7104 #if PTR_SIZE != 8
7105 case VT_LLONG:
7106 *(long long *)ptr |= vtop->c.i;
7107 break;
7108 #else
7109 case VT_LLONG:
7110 #endif
7111 case VT_PTR:
7113 addr_t val = vtop->c.i;
7114 #if PTR_SIZE == 8
7115 if (vtop->r & VT_SYM)
7116 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7117 else
7118 *(addr_t *)ptr |= val;
7119 #else
7120 if (vtop->r & VT_SYM)
7121 greloc(sec, vtop->sym, c, R_DATA_PTR);
7122 *(addr_t *)ptr |= val;
7123 #endif
7124 break;
7126 default:
7128 int val = vtop->c.i;
7129 #if PTR_SIZE == 8
7130 if (vtop->r & VT_SYM)
7131 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7132 else
7133 *(int *)ptr |= val;
7134 #else
7135 if (vtop->r & VT_SYM)
7136 greloc(sec, vtop->sym, c, R_DATA_PTR);
7137 *(int *)ptr |= val;
7138 #endif
7139 break;
7143 vtop--;
7144 } else {
7145 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7146 vswap();
7147 vstore();
7148 vpop();
7152 /* 't' contains the type and storage info. 'c' is the offset of the
7153 object in section 'sec'. If 'sec' is NULL, it means stack based
7154 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7155 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7156 size only evaluation is wanted (only for arrays). */
7157 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7158 int flags)
7160 int len, n, no_oblock, nb, i;
7161 int size1, align1;
7162 Sym *s, *f;
7163 Sym indexsym;
7164 CType *t1;
7166 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7167 /* In case of strings we have special handling for arrays, so
7168 don't consume them as initializer value (which would commit them
7169 to some anonymous symbol). */
7170 tok != TOK_LSTR && tok != TOK_STR &&
7171 !(flags & DIF_SIZE_ONLY)) {
7172 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7173 flags |= DIF_HAVE_ELEM;
7176 if ((flags & DIF_HAVE_ELEM) &&
7177 !(type->t & VT_ARRAY) &&
7178 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7179 The source type might have VT_CONSTANT set, which is
7180 of course assignable to non-const elements. */
7181 is_compatible_unqualified_types(type, &vtop->type)) {
7182 init_putv(type, sec, c);
7183 } else if (type->t & VT_ARRAY) {
7184 s = type->ref;
7185 n = s->c;
7186 t1 = pointed_type(type);
7187 size1 = type_size(t1, &align1);
7189 no_oblock = 1;
7190 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7191 tok == '{') {
7192 if (tok != '{')
7193 tcc_error("character array initializer must be a literal,"
7194 " optionally enclosed in braces");
7195 skip('{');
7196 no_oblock = 0;
7199 /* only parse strings here if correct type (otherwise: handle
7200 them as ((w)char *) expressions */
7201 if ((tok == TOK_LSTR &&
7202 #ifdef TCC_TARGET_PE
7203 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7204 #else
7205 (t1->t & VT_BTYPE) == VT_INT
7206 #endif
7207 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7208 len = 0;
7209 while (tok == TOK_STR || tok == TOK_LSTR) {
7210 int cstr_len, ch;
7212 /* compute maximum number of chars wanted */
7213 if (tok == TOK_STR)
7214 cstr_len = tokc.str.size;
7215 else
7216 cstr_len = tokc.str.size / sizeof(nwchar_t);
7217 cstr_len--;
7218 nb = cstr_len;
7219 if (n >= 0 && nb > (n - len))
7220 nb = n - len;
7221 if (!(flags & DIF_SIZE_ONLY)) {
7222 if (cstr_len > nb)
7223 tcc_warning("initializer-string for array is too long");
7224 /* in order to go faster for common case (char
7225 string in global variable, we handle it
7226 specifically */
7227 if (sec && tok == TOK_STR && size1 == 1) {
7228 if (!NODATA_WANTED)
7229 memcpy(sec->data + c + len, tokc.str.data, nb);
7230 } else {
7231 for(i=0;i<nb;i++) {
7232 if (tok == TOK_STR)
7233 ch = ((unsigned char *)tokc.str.data)[i];
7234 else
7235 ch = ((nwchar_t *)tokc.str.data)[i];
7236 vpushi(ch);
7237 init_putv(t1, sec, c + (len + i) * size1);
7241 len += nb;
7242 next();
7244 /* only add trailing zero if enough storage (no
7245 warning in this case since it is standard) */
7246 if (n < 0 || len < n) {
7247 if (!(flags & DIF_SIZE_ONLY)) {
7248 vpushi(0);
7249 init_putv(t1, sec, c + (len * size1));
7251 len++;
7253 len *= size1;
7254 } else {
7255 indexsym.c = 0;
7256 f = &indexsym;
7258 do_init_list:
7259 len = 0;
7260 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7261 len = decl_designator(type, sec, c, &f, flags, len);
7262 flags &= ~DIF_HAVE_ELEM;
7263 if (type->t & VT_ARRAY) {
7264 ++indexsym.c;
7265 /* special test for multi dimensional arrays (may not
7266 be strictly correct if designators are used at the
7267 same time) */
7268 if (no_oblock && len >= n*size1)
7269 break;
7270 } else {
7271 if (s->type.t == VT_UNION)
7272 f = NULL;
7273 else
7274 f = f->next;
7275 if (no_oblock && f == NULL)
7276 break;
7279 if (tok == '}')
7280 break;
7281 skip(',');
7284 /* put zeros at the end */
7285 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7286 init_putz(sec, c + len, n*size1 - len);
7287 if (!no_oblock)
7288 skip('}');
7289 /* patch type size if needed, which happens only for array types */
7290 if (n < 0)
7291 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7292 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7293 size1 = 1;
7294 no_oblock = 1;
7295 if ((flags & DIF_FIRST) || tok == '{') {
7296 skip('{');
7297 no_oblock = 0;
7299 s = type->ref;
7300 f = s->next;
7301 n = s->c;
7302 goto do_init_list;
7303 } else if (tok == '{') {
7304 if (flags & DIF_HAVE_ELEM)
7305 skip(';');
7306 next();
7307 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7308 skip('}');
7309 } else if ((flags & DIF_SIZE_ONLY)) {
7310 /* If we supported only ISO C we wouldn't have to accept calling
7311 this on anything than an array if DIF_SIZE_ONLY (and even then
7312 only on the outermost level, so no recursion would be needed),
7313 because initializing a flex array member isn't supported.
7314 But GNU C supports it, so we need to recurse even into
7315 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7316 /* just skip expression */
7317 skip_or_save_block(NULL);
7318 } else {
7319 if (!(flags & DIF_HAVE_ELEM)) {
7320 /* This should happen only when we haven't parsed
7321 the init element above for fear of committing a
7322 string constant to memory too early. */
7323 if (tok != TOK_STR && tok != TOK_LSTR)
7324 expect("string constant");
7325 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7327 init_putv(type, sec, c);
7331 /* parse an initializer for type 't' if 'has_init' is non zero, and
7332 allocate space in local or global data space ('r' is either
7333 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7334 variable 'v' of scope 'scope' is declared before initializers
7335 are parsed. If 'v' is zero, then a reference to the new object
7336 is put in the value stack. If 'has_init' is 2, a special parsing
7337 is done to handle string constants. */
7338 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7339 int has_init, int v, int scope)
7341 int size, align, addr;
7342 TokenString *init_str = NULL;
7344 Section *sec;
7345 Sym *flexible_array;
7346 Sym *sym = NULL;
7347 int saved_nocode_wanted = nocode_wanted;
7348 #ifdef CONFIG_TCC_BCHECK
7349 int bcheck;
7350 #endif
7352 /* Always allocate static or global variables */
7353 if (v && (r & VT_VALMASK) == VT_CONST)
7354 nocode_wanted |= 0x80000000;
7356 #ifdef CONFIG_TCC_BCHECK
7357 bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7358 #endif
7360 flexible_array = NULL;
7361 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7362 Sym *field = type->ref->next;
7363 if (field) {
7364 while (field->next)
7365 field = field->next;
7366 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7367 flexible_array = field;
7371 size = type_size(type, &align);
7372 /* If unknown size, we must evaluate it before
7373 evaluating initializers because
7374 initializers can generate global data too
7375 (e.g. string pointers or ISOC99 compound
7376 literals). It also simplifies local
7377 initializers handling */
7378 if (size < 0 || (flexible_array && has_init)) {
7379 if (!has_init)
7380 tcc_error("unknown type size");
7381 /* get all init string */
7382 if (has_init == 2) {
7383 init_str = tok_str_alloc();
7384 /* only get strings */
7385 while (tok == TOK_STR || tok == TOK_LSTR) {
7386 tok_str_add_tok(init_str);
7387 next();
7389 tok_str_add(init_str, -1);
7390 tok_str_add(init_str, 0);
7391 } else {
7392 skip_or_save_block(&init_str);
7394 unget_tok(0);
7396 /* compute size */
7397 begin_macro(init_str, 1);
7398 next();
7399 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7400 /* prepare second initializer parsing */
7401 macro_ptr = init_str->str;
7402 next();
7404 /* if still unknown size, error */
7405 size = type_size(type, &align);
7406 if (size < 0)
7407 tcc_error("unknown type size");
7409 /* If there's a flex member and it was used in the initializer
7410 adjust size. */
7411 if (flexible_array &&
7412 flexible_array->type.ref->c > 0)
7413 size += flexible_array->type.ref->c
7414 * pointed_size(&flexible_array->type);
7415 /* take into account specified alignment if bigger */
7416 if (ad->a.aligned) {
7417 int speca = 1 << (ad->a.aligned - 1);
7418 if (speca > align)
7419 align = speca;
7420 } else if (ad->a.packed) {
7421 align = 1;
7424 if (!v && NODATA_WANTED)
7425 size = 0, align = 1;
7427 if ((r & VT_VALMASK) == VT_LOCAL) {
7428 sec = NULL;
7429 #ifdef CONFIG_TCC_BCHECK
7430 if (bcheck && ((type->t & VT_ARRAY) ||
7431 (type->t & VT_BTYPE) == VT_STRUCT)) {
7432 loc--;
7434 #endif
7435 loc = (loc - size) & -align;
7436 addr = loc;
7437 #ifdef CONFIG_TCC_BCHECK
7438 /* handles bounds */
7439 /* XXX: currently, since we do only one pass, we cannot track
7440 '&' operators, so we add only arrays/structs/unions */
7441 if (bcheck && ((type->t & VT_ARRAY) ||
7442 (type->t & VT_BTYPE) == VT_STRUCT)) {
7443 addr_t *bounds_ptr;
7444 /* add padding between regions */
7445 loc--;
7446 /* then add local bound info */
7447 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7448 bounds_ptr[0] = addr;
7449 bounds_ptr[1] = size;
7451 #endif
7452 if (v) {
7453 /* local variable */
7454 #ifdef CONFIG_TCC_ASM
7455 if (ad->asm_label) {
7456 int reg = asm_parse_regvar(ad->asm_label);
7457 if (reg >= 0)
7458 r = (r & ~VT_VALMASK) | reg;
7460 #endif
7461 sym = sym_push(v, type, r, addr);
7462 if (ad->cleanup_func) {
7463 Sym *cls = sym_push2(&all_cleanups,
7464 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7465 cls->prev_tok = sym;
7466 cls->next = ad->cleanup_func;
7467 cls->ncl = cur_scope->cl.s;
7468 cur_scope->cl.s = cls;
7471 sym->a = ad->a;
7472 } else {
7473 /* push local reference */
7474 vset(type, r, addr);
7476 } else {
7477 if (v && scope == VT_CONST) {
7478 /* see if the symbol was already defined */
7479 sym = sym_find(v);
7480 if (sym) {
7481 patch_storage(sym, ad, type);
7482 /* we accept several definitions of the same global variable. */
7483 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7484 goto no_alloc;
7488 /* allocate symbol in corresponding section */
7489 sec = ad->section;
7490 if (!sec) {
7491 if (has_init)
7492 sec = data_section;
7493 else if (tcc_state->nocommon)
7494 sec = bss_section;
7497 if (sec) {
7498 addr = section_add(sec, size, align);
7499 #ifdef CONFIG_TCC_BCHECK
7500 /* add padding if bound check */
7501 if (bcheck)
7502 section_add(sec, 1, 1);
7503 #endif
7504 } else {
7505 addr = align; /* SHN_COMMON is special, symbol value is align */
7506 sec = common_section;
7509 if (v) {
7510 if (!sym) {
7511 sym = sym_push(v, type, r | VT_SYM, 0);
7512 patch_storage(sym, ad, NULL);
7514 /* update symbol definition */
7515 put_extern_sym(sym, sec, addr, size);
7516 } else {
7517 /* push global reference */
7518 vpush_ref(type, sec, addr, size);
7519 sym = vtop->sym;
7520 vtop->r |= r;
7523 #ifdef CONFIG_TCC_BCHECK
7524 /* handles bounds now because the symbol must be defined
7525 before for the relocation */
7526 if (bcheck) {
7527 addr_t *bounds_ptr;
7529 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7530 /* then add global bound info */
7531 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7532 bounds_ptr[0] = 0; /* relocated */
7533 bounds_ptr[1] = size;
7535 #endif
7538 if (type->t & VT_VLA) {
7539 int a;
7541 if (NODATA_WANTED)
7542 goto no_alloc;
7544 /* save current stack pointer */
7545 if (root_scope->vla.loc == 0) {
7546 struct scope *v = cur_scope;
7547 gen_vla_sp_save(loc -= PTR_SIZE);
7548 do v->vla.loc = loc; while ((v = v->prev));
7551 vla_runtime_type_size(type, &a);
7552 gen_vla_alloc(type, a);
7553 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7554 /* on _WIN64, because of the function args scratch area, the
7555 result of alloca differs from RSP and is returned in RAX. */
7556 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7557 #endif
7558 gen_vla_sp_save(addr);
7559 cur_scope->vla.loc = addr;
7560 cur_scope->vla.num++;
7561 #ifdef CONFIG_TCC_BCHECK
7562 if (bcheck) {
7563 addr_t *bounds_ptr;
7565 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7566 bounds_ptr[0] = 1; /* marks alloca/vla used */
7567 bounds_ptr[1] = 0;
7569 #endif
7571 } else if (has_init) {
7572 size_t oldreloc_offset = 0;
7573 if (sec && sec->reloc)
7574 oldreloc_offset = sec->reloc->data_offset;
7575 decl_initializer(type, sec, addr, DIF_FIRST);
7576 if (sec && sec->reloc)
7577 squeeze_multi_relocs(sec, oldreloc_offset);
7578 /* patch flexible array member size back to -1, */
7579 /* for possible subsequent similar declarations */
7580 if (flexible_array)
7581 flexible_array->type.ref->c = -1;
7584 no_alloc:
7585 /* restore parse state if needed */
7586 if (init_str) {
7587 end_macro();
7588 next();
7591 nocode_wanted = saved_nocode_wanted;
7594 /* parse a function defined by symbol 'sym' and generate its code in
7595 'cur_text_section' */
7596 static void gen_function(Sym *sym, AttributeDef *ad)
7598 /* Initialize VLA state */
7599 struct scope f = { 0 };
7600 cur_scope = root_scope = &f;
7602 nocode_wanted = 0;
7603 ind = cur_text_section->data_offset;
7604 if (sym->a.aligned) {
7605 size_t newoff = section_add(cur_text_section, 0,
7606 1 << (sym->a.aligned - 1));
7607 gen_fill_nops(newoff - ind);
7609 /* NOTE: we patch the symbol size later */
7610 put_extern_sym(sym, cur_text_section, ind, 0);
7612 if (ad && ad->a.constructor) {
7613 add_init_array (tcc_state, sym);
7615 if (ad && ad->a.destructor) {
7616 add_fini_array (tcc_state, sym);
7619 funcname = get_tok_str(sym->v, NULL);
7620 func_ind = ind;
7622 /* put debug symbol */
7623 tcc_debug_funcstart(tcc_state, sym);
7624 /* push a dummy symbol to enable local sym storage */
7625 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7626 local_scope = 1; /* for function parameters */
7627 gfunc_prolog(sym);
7628 local_scope = 0;
7629 rsym = 0;
7630 clear_temp_local_var_list();
7631 block(0);
7632 gsym(rsym);
7633 nocode_wanted = 0;
7634 gfunc_epilog();
7635 cur_text_section->data_offset = ind;
7636 /* reset local stack */
7637 sym_pop(&local_stack, NULL, 0);
7638 local_scope = 0;
7639 label_pop(&global_label_stack, NULL, 0);
7640 sym_pop(&all_cleanups, NULL, 0);
7641 /* patch symbol size */
7642 elfsym(sym)->st_size = ind - func_ind;
7643 /* end of function */
7644 tcc_debug_funcend(tcc_state, ind - func_ind);
7645 /* It's better to crash than to generate wrong code */
7646 cur_text_section = NULL;
7647 funcname = ""; /* for safety */
7648 func_vt.t = VT_VOID; /* for safety */
7649 func_var = 0; /* for safety */
7650 ind = 0; /* for safety */
7651 nocode_wanted = 0x80000000;
7652 check_vstack();
7655 static void gen_inline_functions(TCCState *s)
7657 Sym *sym;
7658 int inline_generated, i;
7659 struct InlineFunc *fn;
7661 tcc_open_bf(s, ":inline:", 0);
7662 /* iterate while inline function are referenced */
7663 do {
7664 inline_generated = 0;
7665 for (i = 0; i < s->nb_inline_fns; ++i) {
7666 fn = s->inline_fns[i];
7667 sym = fn->sym;
7668 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7669 /* the function was used or forced (and then not internal):
7670 generate its code and convert it to a normal function */
7671 fn->sym = NULL;
7672 tcc_debug_putfile(s, fn->filename);
7673 begin_macro(fn->func_str, 1);
7674 next();
7675 cur_text_section = text_section;
7676 gen_function(sym, NULL);
7677 end_macro();
7679 inline_generated = 1;
7682 } while (inline_generated);
7683 tcc_close();
7686 static void free_inline_functions(TCCState *s)
7688 int i;
7689 /* free tokens of unused inline functions */
7690 for (i = 0; i < s->nb_inline_fns; ++i) {
7691 struct InlineFunc *fn = s->inline_fns[i];
7692 if (fn->sym)
7693 tok_str_free(fn->func_str);
7695 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7698 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7699 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7700 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7702 int v, has_init, r;
7703 CType type, btype;
7704 Sym *sym;
7705 AttributeDef ad, adbase;
7707 while (1) {
7708 if (tok == TOK_STATIC_ASSERT) {
7709 int c;
7711 next();
7712 skip('(');
7713 c = expr_const();
7714 skip(',');
7715 if (c == 0)
7716 tcc_error("%s", get_tok_str(tok, &tokc));
7717 next();
7718 skip(')');
7719 skip(';');
7720 continue;
7722 if (!parse_btype(&btype, &adbase)) {
7723 if (is_for_loop_init)
7724 return 0;
7725 /* skip redundant ';' if not in old parameter decl scope */
7726 if (tok == ';' && l != VT_CMP) {
7727 next();
7728 continue;
7730 if (l != VT_CONST)
7731 break;
7732 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7733 /* global asm block */
7734 asm_global_instr();
7735 continue;
7737 if (tok >= TOK_UIDENT) {
7738 /* special test for old K&R protos without explicit int
7739 type. Only accepted when defining global data */
7740 btype.t = VT_INT;
7741 } else {
7742 if (tok != TOK_EOF)
7743 expect("declaration");
7744 break;
7747 if (tok == ';') {
7748 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7749 int v = btype.ref->v;
7750 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7751 tcc_warning("unnamed struct/union that defines no instances");
7752 next();
7753 continue;
7755 if (IS_ENUM(btype.t)) {
7756 next();
7757 continue;
7760 while (1) { /* iterate thru each declaration */
7761 type = btype;
7762 /* If the base type itself was an array type of unspecified
7763 size (like in 'typedef int arr[]; arr x = {1};') then
7764 we will overwrite the unknown size by the real one for
7765 this decl. We need to unshare the ref symbol holding
7766 that size. */
7767 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7768 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7770 ad = adbase;
7771 type_decl(&type, &ad, &v, TYPE_DIRECT);
7772 #if 0
7774 char buf[500];
7775 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7776 printf("type = '%s'\n", buf);
7778 #endif
7779 if ((type.t & VT_BTYPE) == VT_FUNC) {
7780 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
7781 tcc_error("function without file scope cannot be static");
7782 /* if old style function prototype, we accept a
7783 declaration list */
7784 sym = type.ref;
7785 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7786 decl0(VT_CMP, 0, sym);
7787 /* always compile 'extern inline' */
7788 if (type.t & VT_EXTERN)
7789 type.t &= ~VT_INLINE;
7792 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7793 ad.asm_label = asm_label_instr();
7794 /* parse one last attribute list, after asm label */
7795 parse_attribute(&ad);
7796 #if 0
7797 /* gcc does not allow __asm__("label") with function definition,
7798 but why not ... */
7799 if (tok == '{')
7800 expect(";");
7801 #endif
7804 #ifdef TCC_TARGET_PE
7805 if (ad.a.dllimport || ad.a.dllexport) {
7806 if (type.t & VT_STATIC)
7807 tcc_error("cannot have dll linkage with static");
7808 if (type.t & VT_TYPEDEF) {
7809 tcc_warning("'%s' attribute ignored for typedef",
7810 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7811 (ad.a.dllexport = 0, "dllexport"));
7812 } else if (ad.a.dllimport) {
7813 if ((type.t & VT_BTYPE) == VT_FUNC)
7814 ad.a.dllimport = 0;
7815 else
7816 type.t |= VT_EXTERN;
7819 #endif
7820 if (tok == '{') {
7821 if (l != VT_CONST)
7822 tcc_error("cannot use local functions");
7823 if ((type.t & VT_BTYPE) != VT_FUNC)
7824 expect("function definition");
7826 /* reject abstract declarators in function definition
7827 make old style params without decl have int type */
7828 sym = type.ref;
7829 while ((sym = sym->next) != NULL) {
7830 if (!(sym->v & ~SYM_FIELD))
7831 expect("identifier");
7832 if (sym->type.t == VT_VOID)
7833 sym->type = int_type;
7836 /* put function symbol */
7837 type.t &= ~VT_EXTERN;
7838 sym = external_sym(v, &type, 0, &ad);
7839 /* static inline functions are just recorded as a kind
7840 of macro. Their code will be emitted at the end of
7841 the compilation unit only if they are used */
7842 if (sym->type.t & VT_INLINE) {
7843 struct InlineFunc *fn;
7844 const char *filename;
7846 filename = file ? file->filename : "";
7847 fn = tcc_malloc(sizeof *fn + strlen(filename));
7848 strcpy(fn->filename, filename);
7849 fn->sym = sym;
7850 skip_or_save_block(&fn->func_str);
7851 dynarray_add(&tcc_state->inline_fns,
7852 &tcc_state->nb_inline_fns, fn);
7853 } else {
7854 /* compute text section */
7855 cur_text_section = ad.section;
7856 if (!cur_text_section)
7857 cur_text_section = text_section;
7858 gen_function(sym, &ad);
7860 break;
7861 } else {
7862 if (l == VT_CMP) {
7863 /* find parameter in function parameter list */
7864 for (sym = func_sym->next; sym; sym = sym->next)
7865 if ((sym->v & ~SYM_FIELD) == v)
7866 goto found;
7867 tcc_error("declaration for parameter '%s' but no such parameter",
7868 get_tok_str(v, NULL));
7869 found:
7870 if (type.t & VT_STORAGE) /* 'register' is okay */
7871 tcc_error("storage class specified for '%s'",
7872 get_tok_str(v, NULL));
7873 if (sym->type.t != VT_VOID)
7874 tcc_error("redefinition of parameter '%s'",
7875 get_tok_str(v, NULL));
7876 convert_parameter_type(&type);
7877 sym->type = type;
7878 } else if (type.t & VT_TYPEDEF) {
7879 /* save typedefed type */
7880 /* XXX: test storage specifiers ? */
7881 sym = sym_find(v);
7882 if (sym && sym->sym_scope == local_scope) {
7883 if (!is_compatible_types(&sym->type, &type)
7884 || !(sym->type.t & VT_TYPEDEF))
7885 tcc_error("incompatible redefinition of '%s'",
7886 get_tok_str(v, NULL));
7887 sym->type = type;
7888 } else {
7889 sym = sym_push(v, &type, 0, 0);
7891 sym->a = ad.a;
7892 sym->f = ad.f;
7893 } else if ((type.t & VT_BTYPE) == VT_VOID
7894 && !(type.t & VT_EXTERN)) {
7895 tcc_error("declaration of void object");
7896 } else {
7897 r = 0;
7898 if ((type.t & VT_BTYPE) == VT_FUNC) {
7899 /* external function definition */
7900 /* specific case for func_call attribute */
7901 type.ref->f = ad.f;
7902 } else if (!(type.t & VT_ARRAY)) {
7903 /* not lvalue if array */
7904 r |= VT_LVAL;
7906 has_init = (tok == '=');
7907 if (has_init && (type.t & VT_VLA))
7908 tcc_error("variable length array cannot be initialized");
7909 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7910 || (type.t & VT_BTYPE) == VT_FUNC
7911 /* as with GCC, uninitialized global arrays with no size
7912 are considered extern: */
7913 || ((type.t & VT_ARRAY) && !has_init
7914 && l == VT_CONST && type.ref->c < 0)
7916 /* external variable or function */
7917 type.t |= VT_EXTERN;
7918 sym = external_sym(v, &type, r, &ad);
7919 if (ad.alias_target) {
7920 ElfSym *esym;
7921 Sym *alias_target;
7922 alias_target = sym_find(ad.alias_target);
7923 esym = elfsym(alias_target);
7924 if (!esym)
7925 tcc_error("unsupported forward __alias__ attribute");
7926 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7928 } else {
7929 if (type.t & VT_STATIC)
7930 r |= VT_CONST;
7931 else
7932 r |= l;
7933 if (has_init)
7934 next();
7935 else if (l == VT_CONST)
7936 /* uninitialized global variables may be overridden */
7937 type.t |= VT_EXTERN;
7938 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7941 if (tok != ',') {
7942 if (is_for_loop_init)
7943 return 1;
7944 skip(';');
7945 break;
7947 next();
7951 return 0;
7954 static void decl(int l)
7956 decl0(l, 0, NULL);
7959 /* ------------------------------------------------------------------------- */
7960 #undef gjmp_addr
7961 #undef gjmp
7962 /* ------------------------------------------------------------------------- */