Merge branch 'mob' of git://repo.or.cz/tinycc into mypatch
[tinycc.git] / tccgen.c
blob9b911f49ea9867f2414df3d40ffe78264febf558
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 static int last_line_num, new_file, func_ind; /* debug info control */
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_pointer_type;
85 #if PTR_SIZE == 4
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
88 #elif LONG_SIZE == 4
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
91 #else
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
94 #endif
96 ST_DATA struct switch_t {
97 struct case_t {
98 int64_t v1, v2;
99 int sym;
100 } **p; int n; /* list of case ranges */
101 int def_sym; /* default symbol */
102 int *bsym;
103 struct scope *scope;
104 } *cur_switch; /* current switch */
106 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
107 /*list of temporary local variables on the stack in current function. */
108 ST_DATA struct temp_local_variable {
109 int location; //offset on stack. Svalue.c.i
110 short size;
111 short align;
112 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
113 short nb_temp_local_vars;
115 static struct scope {
116 struct scope *prev;
117 struct { int loc, num; } vla;
118 struct { Sym *s; int n; } cl;
119 int *bsym, *csym;
120 Sym *lstk, *llstk;
121 } *cur_scope, *loop_scope, *root_scope;
123 /********************************************************/
124 #ifndef CONFIG_TCC_ASM
125 ST_FUNC void asm_instr(void)
127 tcc_error("inline asm() not supported");
129 ST_FUNC void asm_global_instr(void)
131 tcc_error("inline asm() not supported");
133 #endif
135 /* ------------------------------------------------------------------------- */
137 static void gen_cast(CType *type);
138 static void gen_cast_s(int t);
139 static inline CType *pointed_type(CType *type);
140 static int is_compatible_types(CType *type1, CType *type2);
141 static int parse_btype(CType *type, AttributeDef *ad);
142 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
143 static void parse_expr_type(CType *type);
144 static void init_putv(CType *type, Section *sec, unsigned long c);
145 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
146 static void block(int is_expr);
147 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
148 static void decl(int l);
149 static int decl0(int l, int is_for_loop_init, Sym *);
150 static void expr_eq(void);
151 static void vla_runtime_type_size(CType *type, int *a);
152 static int is_compatible_unqualified_types(CType *type1, CType *type2);
153 static inline int64_t expr_const64(void);
154 static void vpush64(int ty, unsigned long long v);
155 static void vpush(CType *type);
156 static int gvtst(int inv, int t);
157 static void gen_inline_functions(TCCState *s);
158 static void free_inline_functions(TCCState *s);
159 static void skip_or_save_block(TokenString **str);
160 static void gv_dup(void);
161 static int get_temp_local_var(int size,int align);
162 static void clear_temp_local_var_list();
163 static void cast_error(CType *st, CType *dt);
165 ST_INLN int is_float(int t)
167 int bt = t & VT_BTYPE;
168 return bt == VT_LDOUBLE
169 || bt == VT_DOUBLE
170 || bt == VT_FLOAT
171 || bt == VT_QFLOAT;
174 static inline int is_integer_btype(int bt)
176 return bt == VT_BYTE
177 || bt == VT_BOOL
178 || bt == VT_SHORT
179 || bt == VT_INT
180 || bt == VT_LLONG;
183 static int btype_size(int bt)
185 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
186 bt == VT_SHORT ? 2 :
187 bt == VT_INT ? 4 :
188 bt == VT_LLONG ? 8 :
189 bt == VT_PTR ? PTR_SIZE : 0;
192 /* returns function return register from type */
193 static int R_RET(int t)
195 if (!is_float(t))
196 return REG_IRET;
197 #ifdef TCC_TARGET_X86_64
198 if ((t & VT_BTYPE) == VT_LDOUBLE)
199 return TREG_ST0;
200 #elif defined TCC_TARGET_RISCV64
201 if ((t & VT_BTYPE) == VT_LDOUBLE)
202 return REG_IRET;
203 #endif
204 return REG_FRET;
207 /* returns 2nd function return register, if any */
208 static int R2_RET(int t)
210 t &= VT_BTYPE;
211 #if PTR_SIZE == 4
212 if (t == VT_LLONG)
213 return REG_IRE2;
214 #elif defined TCC_TARGET_X86_64
215 if (t == VT_QLONG)
216 return REG_IRE2;
217 if (t == VT_QFLOAT)
218 return REG_FRE2;
219 #elif defined TCC_TARGET_RISCV64
220 if (t == VT_LDOUBLE)
221 return REG_IRE2;
222 #endif
223 return VT_CONST;
226 /* returns true for two-word types */
227 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
229 /* put function return registers to stack value */
230 static void PUT_R_RET(SValue *sv, int t)
232 sv->r = R_RET(t), sv->r2 = R2_RET(t);
235 /* returns function return register class for type t */
236 static int RC_RET(int t)
238 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
241 /* returns generic register class for type t */
242 static int RC_TYPE(int t)
244 if (!is_float(t))
245 return RC_INT;
246 #ifdef TCC_TARGET_X86_64
247 if ((t & VT_BTYPE) == VT_LDOUBLE)
248 return RC_ST0;
249 if ((t & VT_BTYPE) == VT_QFLOAT)
250 return RC_FRET;
251 #elif defined TCC_TARGET_RISCV64
252 if ((t & VT_BTYPE) == VT_LDOUBLE)
253 return RC_INT;
254 #endif
255 return RC_FLOAT;
258 /* returns 2nd register class corresponding to t and rc */
259 static int RC2_TYPE(int t, int rc)
261 if (!USING_TWO_WORDS(t))
262 return 0;
263 #ifdef RC_IRE2
264 if (rc == RC_IRET)
265 return RC_IRE2;
266 #endif
267 #ifdef RC_FRE2
268 if (rc == RC_FRET)
269 return RC_FRE2;
270 #endif
271 if (rc & RC_FLOAT)
272 return RC_FLOAT;
273 return RC_INT;
276 /* we use our own 'finite' function to avoid potential problems with
277 non standard math libs */
278 /* XXX: endianness dependent */
279 ST_FUNC int ieee_finite(double d)
281 int p[4];
282 memcpy(p, &d, sizeof(double));
283 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
286 /* compiling intel long double natively */
287 #if (defined __i386__ || defined __x86_64__) \
288 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
289 # define TCC_IS_NATIVE_387
290 #endif
292 ST_FUNC void test_lvalue(void)
294 if (!(vtop->r & VT_LVAL))
295 expect("lvalue");
298 ST_FUNC void check_vstack(void)
300 if (vtop != vstack - 1)
301 tcc_error("internal compiler error: vstack leak (%d)", vtop - vstack + 1);
304 /* ------------------------------------------------------------------------- */
305 /* vstack debugging aid */
307 #if 0
308 void pv (const char *lbl, int a, int b)
310 int i;
311 for (i = a; i < a + b; ++i) {
312 SValue *p = &vtop[-i];
313 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
314 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
317 #endif
319 /* ------------------------------------------------------------------------- */
320 /* start of translation unit info */
321 ST_FUNC void tcc_debug_start(TCCState *s1)
323 if (s1->do_debug) {
324 char buf[512];
326 /* file info: full path + filename */
327 section_sym = put_elf_sym(symtab_section, 0, 0,
328 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
329 text_section->sh_num, NULL);
330 getcwd(buf, sizeof(buf));
331 #ifdef _WIN32
332 normalize_slashes(buf);
333 #endif
334 pstrcat(buf, sizeof(buf), "/");
335 put_stabs_r(s1, buf, N_SO, 0, 0,
336 text_section->data_offset, text_section, section_sym);
337 put_stabs_r(s1, file->prev->filename, N_SO, 0, 0,
338 text_section->data_offset, text_section, section_sym);
339 new_file = last_line_num = 0;
340 func_ind = -1;
341 /* we're currently 'including' the <command line> */
342 tcc_debug_bincl(s1);
345 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
346 symbols can be safely used */
347 put_elf_sym(symtab_section, 0, 0,
348 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
349 SHN_ABS, file->filename);
352 /* put end of translation unit info */
353 ST_FUNC void tcc_debug_end(TCCState *s1)
355 if (!s1->do_debug)
356 return;
357 put_stabs_r(s1, NULL, N_SO, 0, 0,
358 text_section->data_offset, text_section, section_sym);
361 static BufferedFile* put_new_file(TCCState *s1)
363 BufferedFile *f = file;
364 /* use upper file if from inline ":asm:" */
365 if (f->filename[0] == ':')
366 f = f->prev;
367 if (f && new_file) {
368 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
369 new_file = last_line_num = 0;
371 return f;
374 /* generate line number info */
375 ST_FUNC void tcc_debug_line(TCCState *s1)
377 BufferedFile *f;
378 if (!s1->do_debug || !(f = put_new_file(s1)))
379 return;
380 if (last_line_num == f->line_num)
381 return;
382 if (text_section != cur_text_section)
383 return;
384 if (func_ind != -1) {
385 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
386 } else {
387 /* from tcc_assemble */
388 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
390 last_line_num = f->line_num;
393 /* put function symbol */
394 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
396 char buf[512];
397 BufferedFile *f;
398 if (!s1->do_debug || !(f = put_new_file(s1)))
399 return;
400 /* XXX: we put here a dummy type */
401 snprintf(buf, sizeof(buf), "%s:%c1",
402 funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
403 put_stabs_r(s1, buf, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
404 tcc_debug_line(s1);
407 /* put function size */
408 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
410 if (!s1->do_debug)
411 return;
412 #if 0 // this seems to confuse gnu tools
413 put_stabn(s1, N_FUN, 0, 0, size);
414 #endif
417 /* put alternative filename */
418 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
420 if (0 == strcmp(file->filename, filename))
421 return;
422 pstrcpy(file->filename, sizeof(file->filename), filename);
423 new_file = 1;
426 /* begin of #include */
427 ST_FUNC void tcc_debug_bincl(TCCState *s1)
429 if (!s1->do_debug)
430 return;
431 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
432 new_file = 1;
435 /* end of #include */
436 ST_FUNC void tcc_debug_eincl(TCCState *s1)
438 if (!s1->do_debug)
439 return;
440 put_stabn(s1, N_EINCL, 0, 0, 0);
441 new_file = 1;
444 /* ------------------------------------------------------------------------- */
445 /* initialize vstack and types. This must be done also for tcc -E */
446 ST_FUNC void tccgen_init(TCCState *s1)
448 vtop = vstack - 1;
449 memset(vtop, 0, sizeof *vtop);
451 /* define some often used types */
452 int_type.t = VT_INT;
453 char_pointer_type.t = VT_BYTE;
454 mk_pointer(&char_pointer_type);
455 func_old_type.t = VT_FUNC;
456 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
457 func_old_type.ref->f.func_call = FUNC_CDECL;
458 func_old_type.ref->f.func_type = FUNC_OLD;
461 ST_FUNC int tccgen_compile(TCCState *s1)
463 cur_text_section = NULL;
464 funcname = "";
465 anon_sym = SYM_FIRST_ANOM;
466 section_sym = 0;
467 const_wanted = 0;
468 nocode_wanted = 0x80000000;
469 local_scope = 0;
471 tcc_debug_start(s1);
472 #ifdef TCC_TARGET_ARM
473 arm_init(s1);
474 #endif
475 #ifdef INC_DEBUG
476 printf("%s: **** new file\n", file->filename);
477 #endif
478 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
479 next();
480 decl(VT_CONST);
481 gen_inline_functions(s1);
482 check_vstack();
483 /* end of translation unit info */
484 tcc_debug_end(s1);
485 return 0;
488 ST_FUNC void tccgen_finish(TCCState *s1)
490 free_inline_functions(s1);
491 sym_pop(&global_stack, NULL, 0);
492 sym_pop(&local_stack, NULL, 0);
493 /* free preprocessor macros */
494 free_defines(NULL);
495 /* free sym_pools */
496 dynarray_reset(&sym_pools, &nb_sym_pools);
497 sym_free_first = NULL;
500 /* ------------------------------------------------------------------------- */
501 ST_FUNC ElfSym *elfsym(Sym *s)
503 if (!s || !s->c)
504 return NULL;
505 return &((ElfSym *)symtab_section->data)[s->c];
508 /* apply storage attributes to Elf symbol */
509 ST_FUNC void update_storage(Sym *sym)
511 ElfSym *esym;
512 int sym_bind, old_sym_bind;
514 esym = elfsym(sym);
515 if (!esym)
516 return;
518 if (sym->a.visibility)
519 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
520 | sym->a.visibility;
522 if (sym->type.t & (VT_STATIC | VT_INLINE))
523 sym_bind = STB_LOCAL;
524 else if (sym->a.weak)
525 sym_bind = STB_WEAK;
526 else
527 sym_bind = STB_GLOBAL;
528 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
529 if (sym_bind != old_sym_bind) {
530 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
533 #ifdef TCC_TARGET_PE
534 if (sym->a.dllimport)
535 esym->st_other |= ST_PE_IMPORT;
536 if (sym->a.dllexport)
537 esym->st_other |= ST_PE_EXPORT;
538 #endif
540 #if 0
541 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
542 get_tok_str(sym->v, NULL),
543 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
544 sym->a.visibility,
545 sym->a.dllexport,
546 sym->a.dllimport
548 #endif
551 /* ------------------------------------------------------------------------- */
552 /* update sym->c so that it points to an external symbol in section
553 'section' with value 'value' */
555 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
556 addr_t value, unsigned long size,
557 int can_add_underscore)
559 int sym_type, sym_bind, info, other, t;
560 ElfSym *esym;
561 const char *name;
562 char buf1[256];
563 #ifdef CONFIG_TCC_BCHECK
564 char buf[32];
565 #endif
567 if (!sym->c) {
568 name = get_tok_str(sym->v, NULL);
569 #ifdef CONFIG_TCC_BCHECK
570 if (tcc_state->do_bounds_check) {
571 /* XXX: avoid doing that for statics ? */
572 /* if bound checking is activated, we change some function
573 names by adding the "__bound" prefix */
574 switch(sym->v) {
575 #ifdef TCC_TARGET_PE
576 /* XXX: we rely only on malloc hooks */
577 case TOK_malloc:
578 case TOK_free:
579 case TOK_realloc:
580 case TOK_memalign:
581 case TOK_calloc:
582 #endif
583 case TOK_memcpy:
584 case TOK_memmove:
585 case TOK_memset:
586 case TOK_memcmp:
587 case TOK_strlen:
588 case TOK_strcpy:
589 case TOK_strncpy:
590 case TOK_strcmp:
591 case TOK_strncmp:
592 case TOK_strcat:
593 case TOK_strchr:
594 case TOK_strdup:
595 case TOK_alloca:
596 case TOK_mmap:
597 case TOK_munmap:
598 strcpy(buf, "__bound_");
599 strcat(buf, name);
600 name = buf;
601 break;
604 #endif
605 t = sym->type.t;
606 if ((t & VT_BTYPE) == VT_FUNC) {
607 sym_type = STT_FUNC;
608 } else if ((t & VT_BTYPE) == VT_VOID) {
609 sym_type = STT_NOTYPE;
610 } else {
611 sym_type = STT_OBJECT;
613 if (t & (VT_STATIC | VT_INLINE))
614 sym_bind = STB_LOCAL;
615 else
616 sym_bind = STB_GLOBAL;
617 other = 0;
618 #ifdef TCC_TARGET_PE
619 if (sym_type == STT_FUNC && sym->type.ref) {
620 Sym *ref = sym->type.ref;
621 if (ref->a.nodecorate) {
622 can_add_underscore = 0;
624 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
625 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
626 name = buf1;
627 other |= ST_PE_STDCALL;
628 can_add_underscore = 0;
631 #endif
632 if (tcc_state->leading_underscore && can_add_underscore) {
633 buf1[0] = '_';
634 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
635 name = buf1;
637 if (sym->asm_label)
638 name = get_tok_str(sym->asm_label, NULL);
639 info = ELFW(ST_INFO)(sym_bind, sym_type);
640 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
641 } else {
642 esym = elfsym(sym);
643 esym->st_value = value;
644 esym->st_size = size;
645 esym->st_shndx = sh_num;
647 update_storage(sym);
650 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
651 addr_t value, unsigned long size)
653 int sh_num = section ? section->sh_num : SHN_UNDEF;
654 put_extern_sym2(sym, sh_num, value, size, 1);
657 /* add a new relocation entry to symbol 'sym' in section 's' */
658 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
659 addr_t addend)
661 int c = 0;
663 if (nocode_wanted && s == cur_text_section)
664 return;
666 if (sym) {
667 if (0 == sym->c)
668 put_extern_sym(sym, NULL, 0, 0);
669 c = sym->c;
672 /* now we can add ELF relocation info */
673 put_elf_reloca(symtab_section, s, offset, type, c, addend);
676 #if PTR_SIZE == 4
677 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
679 greloca(s, sym, offset, type, 0);
681 #endif
683 /* ------------------------------------------------------------------------- */
684 /* symbol allocator */
685 static Sym *__sym_malloc(void)
687 Sym *sym_pool, *sym, *last_sym;
688 int i;
690 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
691 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
693 last_sym = sym_free_first;
694 sym = sym_pool;
695 for(i = 0; i < SYM_POOL_NB; i++) {
696 sym->next = last_sym;
697 last_sym = sym;
698 sym++;
700 sym_free_first = last_sym;
701 return last_sym;
704 static inline Sym *sym_malloc(void)
706 Sym *sym;
707 #ifndef SYM_DEBUG
708 sym = sym_free_first;
709 if (!sym)
710 sym = __sym_malloc();
711 sym_free_first = sym->next;
712 return sym;
713 #else
714 sym = tcc_malloc(sizeof(Sym));
715 return sym;
716 #endif
719 ST_INLN void sym_free(Sym *sym)
721 #ifndef SYM_DEBUG
722 sym->next = sym_free_first;
723 sym_free_first = sym;
724 #else
725 tcc_free(sym);
726 #endif
729 /* push, without hashing */
730 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
732 Sym *s;
734 s = sym_malloc();
735 memset(s, 0, sizeof *s);
736 s->v = v;
737 s->type.t = t;
738 s->c = c;
739 /* add in stack */
740 s->prev = *ps;
741 *ps = s;
742 return s;
745 /* find a symbol and return its associated structure. 's' is the top
746 of the symbol stack */
747 ST_FUNC Sym *sym_find2(Sym *s, int v)
749 while (s) {
750 if (s->v == v)
751 return s;
752 else if (s->v == -1)
753 return NULL;
754 s = s->prev;
756 return NULL;
759 /* structure lookup */
760 ST_INLN Sym *struct_find(int v)
762 v -= TOK_IDENT;
763 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
764 return NULL;
765 return table_ident[v]->sym_struct;
768 /* find an identifier */
769 ST_INLN Sym *sym_find(int v)
771 v -= TOK_IDENT;
772 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
773 return NULL;
774 return table_ident[v]->sym_identifier;
777 static int sym_scope(Sym *s)
779 if (IS_ENUM_VAL (s->type.t))
780 return s->type.ref->sym_scope;
781 else
782 return s->sym_scope;
785 /* push a given symbol on the symbol stack */
786 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
788 Sym *s, **ps;
789 TokenSym *ts;
791 if (local_stack)
792 ps = &local_stack;
793 else
794 ps = &global_stack;
795 s = sym_push2(ps, v, type->t, c);
796 s->type.ref = type->ref;
797 s->r = r;
798 /* don't record fields or anonymous symbols */
799 /* XXX: simplify */
800 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
801 /* record symbol in token array */
802 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
803 if (v & SYM_STRUCT)
804 ps = &ts->sym_struct;
805 else
806 ps = &ts->sym_identifier;
807 s->prev_tok = *ps;
808 *ps = s;
809 s->sym_scope = local_scope;
810 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
811 tcc_error("redeclaration of '%s'",
812 get_tok_str(v & ~SYM_STRUCT, NULL));
814 return s;
817 /* push a global identifier */
818 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
820 Sym *s, **ps;
821 s = sym_push2(&global_stack, v, t, c);
822 s->r = VT_CONST | VT_SYM;
823 /* don't record anonymous symbol */
824 if (v < SYM_FIRST_ANOM) {
825 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
826 /* modify the top most local identifier, so that sym_identifier will
827 point to 's' when popped; happens when called from inline asm */
828 while (*ps != NULL && (*ps)->sym_scope)
829 ps = &(*ps)->prev_tok;
830 s->prev_tok = *ps;
831 *ps = s;
833 return s;
836 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
837 pop them yet from the list, but do remove them from the token array. */
838 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
840 Sym *s, *ss, **ps;
841 TokenSym *ts;
842 int v;
844 s = *ptop;
845 while(s != b) {
846 ss = s->prev;
847 v = s->v;
848 /* remove symbol in token array */
849 /* XXX: simplify */
850 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
851 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
852 if (v & SYM_STRUCT)
853 ps = &ts->sym_struct;
854 else
855 ps = &ts->sym_identifier;
856 *ps = s->prev_tok;
858 if (!keep)
859 sym_free(s);
860 s = ss;
862 if (!keep)
863 *ptop = b;
866 /* ------------------------------------------------------------------------- */
867 static void vcheck_cmp(void)
869 /* cannot let cpu flags if other instruction are generated. Also
870 avoid leaving VT_JMP anywhere except on the top of the stack
871 because it would complicate the code generator.
873 Don't do this when nocode_wanted. vtop might come from
874 !nocode_wanted regions (see 88_codeopt.c) and transforming
875 it to a register without actually generating code is wrong
876 as their value might still be used for real. All values
877 we push under nocode_wanted will eventually be popped
878 again, so that the VT_CMP/VT_JMP value will be in vtop
879 when code is unsuppressed again. */
881 if (vtop->r == VT_CMP && !nocode_wanted)
882 gv(RC_INT);
885 static void vsetc(CType *type, int r, CValue *vc)
887 if (vtop >= vstack + (VSTACK_SIZE - 1))
888 tcc_error("memory full (vstack)");
889 vcheck_cmp();
890 vtop++;
891 vtop->type = *type;
892 vtop->r = r;
893 vtop->r2 = VT_CONST;
894 vtop->c = *vc;
895 vtop->sym = NULL;
898 ST_FUNC void vswap(void)
900 SValue tmp;
902 vcheck_cmp();
903 tmp = vtop[0];
904 vtop[0] = vtop[-1];
905 vtop[-1] = tmp;
908 /* pop stack value */
909 ST_FUNC void vpop(void)
911 int v;
912 v = vtop->r & VT_VALMASK;
913 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
914 /* for x86, we need to pop the FP stack */
915 if (v == TREG_ST0) {
916 o(0xd8dd); /* fstp %st(0) */
917 } else
918 #endif
919 if (v == VT_CMP) {
920 /* need to put correct jump if && or || without test */
921 gsym(vtop->jtrue);
922 gsym(vtop->jfalse);
924 vtop--;
927 /* push constant of type "type" with useless value */
928 static void vpush(CType *type)
930 vset(type, VT_CONST, 0);
933 /* push arbitrary 64bit constant */
934 static void vpush64(int ty, unsigned long long v)
936 CValue cval;
937 CType ctype;
938 ctype.t = ty;
939 ctype.ref = NULL;
940 cval.i = v;
941 vsetc(&ctype, VT_CONST, &cval);
944 /* push integer constant */
945 ST_FUNC void vpushi(int v)
947 vpush64(VT_INT, v);
950 /* push a pointer sized constant */
951 static void vpushs(addr_t v)
953 vpush64(VT_SIZE_T, v);
956 /* push long long constant */
957 static inline void vpushll(long long v)
959 vpush64(VT_LLONG, v);
962 ST_FUNC void vset(CType *type, int r, int v)
964 CValue cval;
965 cval.i = v;
966 vsetc(type, r, &cval);
969 static void vseti(int r, int v)
971 CType type;
972 type.t = VT_INT;
973 type.ref = NULL;
974 vset(&type, r, v);
977 ST_FUNC void vpushv(SValue *v)
979 if (vtop >= vstack + (VSTACK_SIZE - 1))
980 tcc_error("memory full (vstack)");
981 vtop++;
982 *vtop = *v;
985 static void vdup(void)
987 vpushv(vtop);
990 /* rotate n first stack elements to the bottom
991 I1 ... In -> I2 ... In I1 [top is right]
993 ST_FUNC void vrotb(int n)
995 int i;
996 SValue tmp;
998 vcheck_cmp();
999 tmp = vtop[-n + 1];
1000 for(i=-n+1;i!=0;i++)
1001 vtop[i] = vtop[i+1];
1002 vtop[0] = tmp;
1005 /* rotate the n elements before entry e towards the top
1006 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1008 ST_FUNC void vrote(SValue *e, int n)
1010 int i;
1011 SValue tmp;
1013 vcheck_cmp();
1014 tmp = *e;
1015 for(i = 0;i < n - 1; i++)
1016 e[-i] = e[-i - 1];
1017 e[-n + 1] = tmp;
1020 /* rotate n first stack elements to the top
1021 I1 ... In -> In I1 ... I(n-1) [top is right]
1023 ST_FUNC void vrott(int n)
1025 vrote(vtop, n);
1028 /* ------------------------------------------------------------------------- */
1029 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1031 /* called from generators to set the result from relational ops */
1032 ST_FUNC void vset_VT_CMP(int op)
1034 vtop->r = VT_CMP;
1035 vtop->cmp_op = op;
1036 vtop->jfalse = 0;
1037 vtop->jtrue = 0;
1040 /* called once before asking generators to load VT_CMP to a register */
1041 static void vset_VT_JMP(void)
1043 int op = vtop->cmp_op;
1044 if (vtop->jtrue || vtop->jfalse) {
1045 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1046 int inv = op & (op < 2); /* small optimization */
1047 vseti(VT_JMP+inv, gvtst(inv, 0));
1048 } else {
1049 /* otherwise convert flags (rsp. 0/1) to register */
1050 vtop->c.i = op;
1051 if (op < 2) /* doesn't seem to happen */
1052 vtop->r = VT_CONST;
1056 /* Set CPU Flags, doesn't yet jump */
1057 static void gvtst_set(int inv, int t)
1059 int *p;
1060 if (vtop->r != VT_CMP) {
1061 vpushi(0);
1062 gen_op(TOK_NE);
1063 if (vtop->r == VT_CMP) /* must be VT_CONST otherwise */
1065 else if (vtop->r == VT_CONST)
1066 vset_VT_CMP(vtop->c.i != 0);
1067 else
1068 tcc_error("ICE");
1070 p = inv ? &vtop->jfalse : &vtop->jtrue;
1071 *p = gjmp_append(*p, t);
1074 /* Generate value test
1076 * Generate a test for any value (jump, comparison and integers) */
1077 static int gvtst(int inv, int t)
1079 int op, u, x;
1081 gvtst_set(inv, t);
1083 t = vtop->jtrue, u = vtop->jfalse;
1084 if (inv)
1085 x = u, u = t, t = x;
1086 op = vtop->cmp_op;
1088 /* jump to the wanted target */
1089 if (op > 1)
1090 t = gjmp_cond(op ^ inv, t);
1091 else if (op != inv)
1092 t = gjmp(t);
1093 /* resolve complementary jumps to here */
1094 gsym(u);
1096 vtop--;
1097 return t;
1100 /* generate a zero or nozero test */
1101 static void gen_test_zero(int op)
1103 if (vtop->r == VT_CMP) {
1104 int j;
1105 if (op == TOK_EQ) {
1106 j = vtop->jfalse;
1107 vtop->jfalse = vtop->jtrue;
1108 vtop->jtrue = j;
1109 vtop->cmp_op ^= 1;
1111 } else {
1112 vpushi(0);
1113 gen_op(op);
1117 /* ------------------------------------------------------------------------- */
1118 /* push a symbol value of TYPE */
1119 static inline void vpushsym(CType *type, Sym *sym)
1121 CValue cval;
1122 cval.i = 0;
1123 vsetc(type, VT_CONST | VT_SYM, &cval);
1124 vtop->sym = sym;
1127 /* Return a static symbol pointing to a section */
1128 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1130 int v;
1131 Sym *sym;
1133 v = anon_sym++;
1134 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1135 sym->type.t |= VT_STATIC;
1136 put_extern_sym(sym, sec, offset, size);
1137 return sym;
1140 /* push a reference to a section offset by adding a dummy symbol */
1141 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1143 vpushsym(type, get_sym_ref(type, sec, offset, size));
1146 /* define a new external reference to a symbol 'v' of type 'u' */
1147 ST_FUNC Sym *external_global_sym(int v, CType *type)
1149 Sym *s;
1151 s = sym_find(v);
1152 if (!s) {
1153 /* push forward reference */
1154 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1155 s->type.ref = type->ref;
1156 } else if (IS_ASM_SYM(s)) {
1157 s->type.t = type->t | (s->type.t & VT_EXTERN);
1158 s->type.ref = type->ref;
1159 update_storage(s);
1161 return s;
1164 /* Merge symbol attributes. */
1165 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1167 if (sa1->aligned && !sa->aligned)
1168 sa->aligned = sa1->aligned;
1169 sa->packed |= sa1->packed;
1170 sa->weak |= sa1->weak;
1171 if (sa1->visibility != STV_DEFAULT) {
1172 int vis = sa->visibility;
1173 if (vis == STV_DEFAULT
1174 || vis > sa1->visibility)
1175 vis = sa1->visibility;
1176 sa->visibility = vis;
1178 sa->dllexport |= sa1->dllexport;
1179 sa->nodecorate |= sa1->nodecorate;
1180 sa->dllimport |= sa1->dllimport;
1183 /* Merge function attributes. */
1184 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1186 if (fa1->func_call && !fa->func_call)
1187 fa->func_call = fa1->func_call;
1188 if (fa1->func_type && !fa->func_type)
1189 fa->func_type = fa1->func_type;
1190 if (fa1->func_args && !fa->func_args)
1191 fa->func_args = fa1->func_args;
1194 /* Merge attributes. */
1195 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1197 merge_symattr(&ad->a, &ad1->a);
1198 merge_funcattr(&ad->f, &ad1->f);
1200 if (ad1->section)
1201 ad->section = ad1->section;
1202 if (ad1->alias_target)
1203 ad->alias_target = ad1->alias_target;
1204 if (ad1->asm_label)
1205 ad->asm_label = ad1->asm_label;
1206 if (ad1->attr_mode)
1207 ad->attr_mode = ad1->attr_mode;
1210 /* Merge some type attributes. */
1211 static void patch_type(Sym *sym, CType *type)
1213 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1214 if (!(sym->type.t & VT_EXTERN))
1215 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1216 sym->type.t &= ~VT_EXTERN;
1219 if (IS_ASM_SYM(sym)) {
1220 /* stay static if both are static */
1221 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1222 sym->type.ref = type->ref;
1225 if (!is_compatible_types(&sym->type, type)) {
1226 tcc_error("incompatible types for redefinition of '%s'",
1227 get_tok_str(sym->v, NULL));
1229 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1230 int static_proto = sym->type.t & VT_STATIC;
1231 /* warn if static follows non-static function declaration */
1232 if ((type->t & VT_STATIC) && !static_proto
1233 /* XXX this test for inline shouldn't be here. Until we
1234 implement gnu-inline mode again it silences a warning for
1235 mingw caused by our workarounds. */
1236 && !((type->t | sym->type.t) & VT_INLINE))
1237 tcc_warning("static storage ignored for redefinition of '%s'",
1238 get_tok_str(sym->v, NULL));
1240 /* set 'inline' if both agree or if one has static */
1241 if ((type->t | sym->type.t) & VT_INLINE) {
1242 if (!((type->t ^ sym->type.t) & VT_INLINE)
1243 || ((type->t | sym->type.t) & VT_STATIC))
1244 static_proto |= VT_INLINE;
1247 if (0 == (type->t & VT_EXTERN)) {
1248 /* put complete type, use static from prototype */
1249 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1250 sym->type.ref = type->ref;
1251 } else {
1252 sym->type.t &= ~VT_INLINE | static_proto;
1255 if (sym->type.ref->f.func_type == FUNC_OLD
1256 && type->ref->f.func_type != FUNC_OLD) {
1257 sym->type.ref = type->ref;
1260 } else {
1261 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1262 /* set array size if it was omitted in extern declaration */
1263 sym->type.ref->c = type->ref->c;
1265 if ((type->t ^ sym->type.t) & VT_STATIC)
1266 tcc_warning("storage mismatch for redefinition of '%s'",
1267 get_tok_str(sym->v, NULL));
1271 /* Merge some storage attributes. */
1272 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1274 if (type)
1275 patch_type(sym, type);
1277 #ifdef TCC_TARGET_PE
1278 if (sym->a.dllimport != ad->a.dllimport)
1279 tcc_error("incompatible dll linkage for redefinition of '%s'",
1280 get_tok_str(sym->v, NULL));
1281 #endif
1282 merge_symattr(&sym->a, &ad->a);
1283 if (ad->asm_label)
1284 sym->asm_label = ad->asm_label;
1285 update_storage(sym);
1288 /* copy sym to other stack */
1289 static Sym *sym_copy(Sym *s0, Sym **ps)
1291 Sym *s;
1292 s = sym_malloc(), *s = *s0;
1293 s->prev = *ps, *ps = s;
1294 if (s->v < SYM_FIRST_ANOM) {
1295 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1296 s->prev_tok = *ps, *ps = s;
1298 return s;
1301 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1302 static void sym_copy_ref(Sym *s, Sym **ps)
1304 int bt = s->type.t & VT_BTYPE;
1305 if (bt == VT_FUNC || bt == VT_PTR) {
1306 Sym **sp = &s->type.ref;
1307 for (s = *sp, *sp = NULL; s; s = s->next) {
1308 Sym *s2 = sym_copy(s, ps);
1309 sp = &(*sp = s2)->next;
1310 sym_copy_ref(s2, ps);
1315 /* define a new external reference to a symbol 'v' */
1316 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1318 Sym *s;
1320 /* look for global symbol */
1321 s = sym_find(v);
1322 while (s && s->sym_scope)
1323 s = s->prev_tok;
1325 if (!s) {
1326 /* push forward reference */
1327 s = global_identifier_push(v, type->t, 0);
1328 s->r |= r;
1329 s->a = ad->a;
1330 s->asm_label = ad->asm_label;
1331 s->type.ref = type->ref;
1332 /* copy type to the global stack */
1333 if (local_stack)
1334 sym_copy_ref(s, &global_stack);
1335 } else {
1336 patch_storage(s, ad, type);
1338 /* push variables on local_stack if any */
1339 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1340 s = sym_copy(s, &local_stack);
1341 return s;
1344 /* push a reference to global symbol v */
1345 ST_FUNC void vpush_global_sym(CType *type, int v)
1347 vpushsym(type, external_global_sym(v, type));
1350 /* save registers up to (vtop - n) stack entry */
1351 ST_FUNC void save_regs(int n)
1353 SValue *p, *p1;
1354 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1355 save_reg(p->r);
1358 /* save r to the memory stack, and mark it as being free */
1359 ST_FUNC void save_reg(int r)
1361 save_reg_upstack(r, 0);
1364 /* save r to the memory stack, and mark it as being free,
1365 if seen up to (vtop - n) stack entry */
1366 ST_FUNC void save_reg_upstack(int r, int n)
1368 int l, size, align, bt;
1369 SValue *p, *p1, sv;
1371 if ((r &= VT_VALMASK) >= VT_CONST)
1372 return;
1373 if (nocode_wanted)
1374 return;
1375 l = 0;
1376 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1377 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1378 /* must save value on stack if not already done */
1379 if (!l) {
1380 bt = p->type.t & VT_BTYPE;
1381 if (bt == VT_VOID)
1382 continue;
1383 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1384 bt = VT_PTR;
1385 sv.type.t = bt;
1386 size = type_size(&sv.type, &align);
1387 l = get_temp_local_var(size,align);
1388 sv.r = VT_LOCAL | VT_LVAL;
1389 sv.c.i = l;
1390 store(p->r & VT_VALMASK, &sv);
1391 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1392 /* x86 specific: need to pop fp register ST0 if saved */
1393 if (r == TREG_ST0) {
1394 o(0xd8dd); /* fstp %st(0) */
1396 #endif
1397 /* special long long case */
1398 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1399 sv.c.i += PTR_SIZE;
1400 store(p->r2, &sv);
1403 /* mark that stack entry as being saved on the stack */
1404 if (p->r & VT_LVAL) {
1405 /* also clear the bounded flag because the
1406 relocation address of the function was stored in
1407 p->c.i */
1408 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1409 } else {
1410 p->r = VT_LVAL | VT_LOCAL;
1412 p->r2 = VT_CONST;
1413 p->c.i = l;
1418 #ifdef TCC_TARGET_ARM
1419 /* find a register of class 'rc2' with at most one reference on stack.
1420 * If none, call get_reg(rc) */
1421 ST_FUNC int get_reg_ex(int rc, int rc2)
1423 int r;
1424 SValue *p;
1426 for(r=0;r<NB_REGS;r++) {
1427 if (reg_classes[r] & rc2) {
1428 int n;
1429 n=0;
1430 for(p = vstack; p <= vtop; p++) {
1431 if ((p->r & VT_VALMASK) == r ||
1432 p->r2 == r)
1433 n++;
1435 if (n <= 1)
1436 return r;
1439 return get_reg(rc);
1441 #endif
1443 /* find a free register of class 'rc'. If none, save one register */
1444 ST_FUNC int get_reg(int rc)
1446 int r;
1447 SValue *p;
1449 /* find a free register */
1450 for(r=0;r<NB_REGS;r++) {
1451 if (reg_classes[r] & rc) {
1452 if (nocode_wanted)
1453 return r;
1454 for(p=vstack;p<=vtop;p++) {
1455 if ((p->r & VT_VALMASK) == r ||
1456 p->r2 == r)
1457 goto notfound;
1459 return r;
1461 notfound: ;
1464 /* no register left : free the first one on the stack (VERY
1465 IMPORTANT to start from the bottom to ensure that we don't
1466 spill registers used in gen_opi()) */
1467 for(p=vstack;p<=vtop;p++) {
1468 /* look at second register (if long long) */
1469 r = p->r2;
1470 if (r < VT_CONST && (reg_classes[r] & rc))
1471 goto save_found;
1472 r = p->r & VT_VALMASK;
1473 if (r < VT_CONST && (reg_classes[r] & rc)) {
1474 save_found:
1475 save_reg(r);
1476 return r;
1479 /* Should never comes here */
1480 return -1;
1483 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1484 static int get_temp_local_var(int size,int align){
1485 int i;
1486 struct temp_local_variable *temp_var;
1487 int found_var;
1488 SValue *p;
1489 int r;
1490 char free;
1491 char found;
1492 found=0;
1493 for(i=0;i<nb_temp_local_vars;i++){
1494 temp_var=&arr_temp_local_vars[i];
1495 if(temp_var->size<size||align!=temp_var->align){
1496 continue;
1498 /*check if temp_var is free*/
1499 free=1;
1500 for(p=vstack;p<=vtop;p++) {
1501 r=p->r&VT_VALMASK;
1502 if(r==VT_LOCAL||r==VT_LLOCAL){
1503 if(p->c.i==temp_var->location){
1504 free=0;
1505 break;
1509 if(free){
1510 found_var=temp_var->location;
1511 found=1;
1512 break;
1515 if(!found){
1516 loc = (loc - size) & -align;
1517 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1518 temp_var=&arr_temp_local_vars[i];
1519 temp_var->location=loc;
1520 temp_var->size=size;
1521 temp_var->align=align;
1522 nb_temp_local_vars++;
1524 found_var=loc;
1526 return found_var;
1529 static void clear_temp_local_var_list(){
1530 nb_temp_local_vars=0;
1533 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1534 if needed */
1535 static void move_reg(int r, int s, int t)
1537 SValue sv;
1539 if (r != s) {
1540 save_reg(r);
1541 sv.type.t = t;
1542 sv.type.ref = NULL;
1543 sv.r = s;
1544 sv.c.i = 0;
1545 load(r, &sv);
1549 /* get address of vtop (vtop MUST BE an lvalue) */
1550 ST_FUNC void gaddrof(void)
1552 vtop->r &= ~VT_LVAL;
1553 /* tricky: if saved lvalue, then we can go back to lvalue */
1554 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1555 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1558 #ifdef CONFIG_TCC_BCHECK
1559 /* generate lvalue bound code */
1560 static void gbound(void)
1562 CType type1;
1564 vtop->r &= ~VT_MUSTBOUND;
1565 /* if lvalue, then use checking code before dereferencing */
1566 if (vtop->r & VT_LVAL) {
1567 /* if not VT_BOUNDED value, then make one */
1568 if (!(vtop->r & VT_BOUNDED)) {
1569 /* must save type because we must set it to int to get pointer */
1570 type1 = vtop->type;
1571 vtop->type.t = VT_PTR;
1572 gaddrof();
1573 vpushi(0);
1574 gen_bounded_ptr_add();
1575 vtop->r |= VT_LVAL;
1576 vtop->type = type1;
1578 /* then check for dereferencing */
1579 gen_bounded_ptr_deref();
1583 /* we need to call __bound_ptr_add before we start to load function
1584 args into registers */
1585 ST_FUNC void gbound_args(int nb_args)
1587 int i;
1588 for (i = 1; i <= nb_args; ++i)
1589 if (vtop[1 - i].r & VT_MUSTBOUND) {
1590 vrotb(i);
1591 gbound();
1592 vrott(i);
1596 /* Add bounds for local symbols from S to E (via ->prev) */
1597 static void add_local_bounds(Sym *s, Sym *e)
1599 for (; s != e; s = s->prev) {
1600 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1601 continue;
1602 /* Add arrays/structs/unions because we always take address */
1603 if ((s->type.t & VT_ARRAY)
1604 || (s->type.t & VT_BTYPE) == VT_STRUCT
1605 || s->a.addrtaken) {
1606 /* add local bound info */
1607 int align, size = type_size(&s->type, &align);
1608 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1609 2 * sizeof(addr_t));
1610 bounds_ptr[0] = s->c;
1611 bounds_ptr[1] = size;
1615 #endif
1617 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1618 static void pop_local_syms(Sym **ptop, Sym *b, int keep)
1620 #ifdef CONFIG_TCC_BCHECK
1621 if (!keep && tcc_state->do_bounds_check)
1622 add_local_bounds(*ptop, b);
1623 #endif
1624 sym_pop(ptop, b, keep);
1627 static void incr_bf_adr(int o)
1629 vtop->type = char_pointer_type;
1630 gaddrof();
1631 vpushs(o);
1632 gen_op('+');
1633 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1634 vtop->r |= VT_LVAL;
1637 /* single-byte load mode for packed or otherwise unaligned bitfields */
1638 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1640 int n, o, bits;
1641 save_reg_upstack(vtop->r, 1);
1642 vpush64(type->t & VT_BTYPE, 0); // B X
1643 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1644 do {
1645 vswap(); // X B
1646 incr_bf_adr(o);
1647 vdup(); // X B B
1648 n = 8 - bit_pos;
1649 if (n > bit_size)
1650 n = bit_size;
1651 if (bit_pos)
1652 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1653 if (n < 8)
1654 vpushi((1 << n) - 1), gen_op('&');
1655 gen_cast(type);
1656 if (bits)
1657 vpushi(bits), gen_op(TOK_SHL);
1658 vrotb(3); // B Y X
1659 gen_op('|'); // B X
1660 bits += n, bit_size -= n, o = 1;
1661 } while (bit_size);
1662 vswap(), vpop();
1663 if (!(type->t & VT_UNSIGNED)) {
1664 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1665 vpushi(n), gen_op(TOK_SHL);
1666 vpushi(n), gen_op(TOK_SAR);
1670 /* single-byte store mode for packed or otherwise unaligned bitfields */
1671 static void store_packed_bf(int bit_pos, int bit_size)
1673 int bits, n, o, m, c;
1675 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1676 vswap(); // X B
1677 save_reg_upstack(vtop->r, 1);
1678 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1679 do {
1680 incr_bf_adr(o); // X B
1681 vswap(); //B X
1682 c ? vdup() : gv_dup(); // B V X
1683 vrott(3); // X B V
1684 if (bits)
1685 vpushi(bits), gen_op(TOK_SHR);
1686 if (bit_pos)
1687 vpushi(bit_pos), gen_op(TOK_SHL);
1688 n = 8 - bit_pos;
1689 if (n > bit_size)
1690 n = bit_size;
1691 if (n < 8) {
1692 m = ((1 << n) - 1) << bit_pos;
1693 vpushi(m), gen_op('&'); // X B V1
1694 vpushv(vtop-1); // X B V1 B
1695 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
1696 gen_op('&'); // X B V1 B1
1697 gen_op('|'); // X B V2
1699 vdup(), vtop[-1] = vtop[-2]; // X B B V2
1700 vstore(), vpop(); // X B
1701 bits += n, bit_size -= n, bit_pos = 0, o = 1;
1702 } while (bit_size);
1703 vpop(), vpop();
1706 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
1708 int t;
1709 if (0 == sv->type.ref)
1710 return 0;
1711 t = sv->type.ref->auxtype;
1712 if (t != -1 && t != VT_STRUCT) {
1713 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
1714 sv->r |= VT_LVAL;
1716 return t;
1719 /* store vtop a register belonging to class 'rc'. lvalues are
1720 converted to values. Cannot be used if cannot be converted to
1721 register value (such as structures). */
1722 ST_FUNC int gv(int rc)
1724 int r, r2, r_ok, r2_ok, rc2, bt;
1725 int bit_pos, bit_size, size, align;
1727 /* NOTE: get_reg can modify vstack[] */
1728 if (vtop->type.t & VT_BITFIELD) {
1729 CType type;
1731 bit_pos = BIT_POS(vtop->type.t);
1732 bit_size = BIT_SIZE(vtop->type.t);
1733 /* remove bit field info to avoid loops */
1734 vtop->type.t &= ~VT_STRUCT_MASK;
1736 type.ref = NULL;
1737 type.t = vtop->type.t & VT_UNSIGNED;
1738 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
1739 type.t |= VT_UNSIGNED;
1741 r = adjust_bf(vtop, bit_pos, bit_size);
1743 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
1744 type.t |= VT_LLONG;
1745 else
1746 type.t |= VT_INT;
1748 if (r == VT_STRUCT) {
1749 load_packed_bf(&type, bit_pos, bit_size);
1750 } else {
1751 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
1752 /* cast to int to propagate signedness in following ops */
1753 gen_cast(&type);
1754 /* generate shifts */
1755 vpushi(bits - (bit_pos + bit_size));
1756 gen_op(TOK_SHL);
1757 vpushi(bits - bit_size);
1758 /* NOTE: transformed to SHR if unsigned */
1759 gen_op(TOK_SAR);
1761 r = gv(rc);
1762 } else {
1763 if (is_float(vtop->type.t) &&
1764 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1765 unsigned long offset;
1766 /* CPUs usually cannot use float constants, so we store them
1767 generically in data segment */
1768 size = type_size(&vtop->type, &align);
1769 if (NODATA_WANTED)
1770 size = 0, align = 1;
1771 offset = section_add(data_section, size, align);
1772 vpush_ref(&vtop->type, data_section, offset, size);
1773 vswap();
1774 init_putv(&vtop->type, data_section, offset);
1775 vtop->r |= VT_LVAL;
1777 #ifdef CONFIG_TCC_BCHECK
1778 if (vtop->r & VT_MUSTBOUND)
1779 gbound();
1780 #endif
1782 bt = vtop->type.t & VT_BTYPE;
1784 #ifdef TCC_TARGET_RISCV64
1785 /* XXX mega hack */
1786 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
1787 rc = RC_INT;
1788 #endif
1789 rc2 = RC2_TYPE(bt, rc);
1791 /* need to reload if:
1792 - constant
1793 - lvalue (need to dereference pointer)
1794 - already a register, but not in the right class */
1795 r = vtop->r & VT_VALMASK;
1796 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
1797 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
1799 if (!r_ok || !r2_ok) {
1800 if (!r_ok)
1801 r = get_reg(rc);
1802 if (rc2) {
1803 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
1804 int original_type = vtop->type.t;
1806 /* two register type load :
1807 expand to two words temporarily */
1808 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
1809 /* load constant */
1810 unsigned long long ll = vtop->c.i;
1811 vtop->c.i = ll; /* first word */
1812 load(r, vtop);
1813 vtop->r = r; /* save register value */
1814 vpushi(ll >> 32); /* second word */
1815 } else if (vtop->r & VT_LVAL) {
1816 /* We do not want to modifier the long long pointer here.
1817 So we save any other instances down the stack */
1818 save_reg_upstack(vtop->r, 1);
1819 /* load from memory */
1820 vtop->type.t = load_type;
1821 load(r, vtop);
1822 vdup();
1823 vtop[-1].r = r; /* save register value */
1824 /* increment pointer to get second word */
1825 vtop->type.t = VT_PTRDIFF_T;
1826 gaddrof();
1827 vpushs(PTR_SIZE);
1828 gen_op('+');
1829 vtop->r |= VT_LVAL;
1830 vtop->type.t = load_type;
1831 } else {
1832 /* move registers */
1833 if (!r_ok)
1834 load(r, vtop);
1835 if (r2_ok && vtop->r2 < VT_CONST)
1836 goto done;
1837 vdup();
1838 vtop[-1].r = r; /* save register value */
1839 vtop->r = vtop[-1].r2;
1841 /* Allocate second register. Here we rely on the fact that
1842 get_reg() tries first to free r2 of an SValue. */
1843 r2 = get_reg(rc2);
1844 load(r2, vtop);
1845 vpop();
1846 /* write second register */
1847 vtop->r2 = r2;
1848 done:
1849 vtop->type.t = original_type;
1850 } else {
1851 if (vtop->r == VT_CMP)
1852 vset_VT_JMP();
1853 /* one register type load */
1854 load(r, vtop);
1857 vtop->r = r;
1858 #ifdef TCC_TARGET_C67
1859 /* uses register pairs for doubles */
1860 if (bt == VT_DOUBLE)
1861 vtop->r2 = r+1;
1862 #endif
1864 return r;
1867 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1868 ST_FUNC void gv2(int rc1, int rc2)
1870 /* generate more generic register first. But VT_JMP or VT_CMP
1871 values must be generated first in all cases to avoid possible
1872 reload errors */
1873 if (vtop->r != VT_CMP && rc1 <= rc2) {
1874 vswap();
1875 gv(rc1);
1876 vswap();
1877 gv(rc2);
1878 /* test if reload is needed for first register */
1879 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
1880 vswap();
1881 gv(rc1);
1882 vswap();
1884 } else {
1885 gv(rc2);
1886 vswap();
1887 gv(rc1);
1888 vswap();
1889 /* test if reload is needed for first register */
1890 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
1891 gv(rc2);
1896 #if PTR_SIZE == 4
1897 /* expand 64bit on stack in two ints */
1898 ST_FUNC void lexpand(void)
1900 int u, v;
1901 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
1902 v = vtop->r & (VT_VALMASK | VT_LVAL);
1903 if (v == VT_CONST) {
1904 vdup();
1905 vtop[0].c.i >>= 32;
1906 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
1907 vdup();
1908 vtop[0].c.i += 4;
1909 } else {
1910 gv(RC_INT);
1911 vdup();
1912 vtop[0].r = vtop[-1].r2;
1913 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
1915 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
1917 #endif
1919 #if PTR_SIZE == 4
1920 /* build a long long from two ints */
1921 static void lbuild(int t)
1923 gv2(RC_INT, RC_INT);
1924 vtop[-1].r2 = vtop[0].r;
1925 vtop[-1].type.t = t;
1926 vpop();
1928 #endif
1930 /* convert stack entry to register and duplicate its value in another
1931 register */
1932 static void gv_dup(void)
1934 int t, rc, r;
1936 t = vtop->type.t;
1937 #if PTR_SIZE == 4
1938 if ((t & VT_BTYPE) == VT_LLONG) {
1939 if (t & VT_BITFIELD) {
1940 gv(RC_INT);
1941 t = vtop->type.t;
1943 lexpand();
1944 gv_dup();
1945 vswap();
1946 vrotb(3);
1947 gv_dup();
1948 vrotb(4);
1949 /* stack: H L L1 H1 */
1950 lbuild(t);
1951 vrotb(3);
1952 vrotb(3);
1953 vswap();
1954 lbuild(t);
1955 vswap();
1956 return;
1958 #endif
1959 /* duplicate value */
1960 rc = RC_TYPE(t);
1961 gv(rc);
1962 r = get_reg(rc);
1963 vdup();
1964 load(r, vtop);
1965 vtop->r = r;
1968 #if PTR_SIZE == 4
1969 /* generate CPU independent (unsigned) long long operations */
1970 static void gen_opl(int op)
1972 int t, a, b, op1, c, i;
1973 int func;
1974 unsigned short reg_iret = REG_IRET;
1975 unsigned short reg_lret = REG_IRE2;
1976 SValue tmp;
1978 switch(op) {
1979 case '/':
1980 case TOK_PDIV:
1981 func = TOK___divdi3;
1982 goto gen_func;
1983 case TOK_UDIV:
1984 func = TOK___udivdi3;
1985 goto gen_func;
1986 case '%':
1987 func = TOK___moddi3;
1988 goto gen_mod_func;
1989 case TOK_UMOD:
1990 func = TOK___umoddi3;
1991 gen_mod_func:
1992 #ifdef TCC_ARM_EABI
1993 reg_iret = TREG_R2;
1994 reg_lret = TREG_R3;
1995 #endif
1996 gen_func:
1997 /* call generic long long function */
1998 vpush_global_sym(&func_old_type, func);
1999 vrott(3);
2000 gfunc_call(2);
2001 vpushi(0);
2002 vtop->r = reg_iret;
2003 vtop->r2 = reg_lret;
2004 break;
2005 case '^':
2006 case '&':
2007 case '|':
2008 case '*':
2009 case '+':
2010 case '-':
2011 //pv("gen_opl A",0,2);
2012 t = vtop->type.t;
2013 vswap();
2014 lexpand();
2015 vrotb(3);
2016 lexpand();
2017 /* stack: L1 H1 L2 H2 */
2018 tmp = vtop[0];
2019 vtop[0] = vtop[-3];
2020 vtop[-3] = tmp;
2021 tmp = vtop[-2];
2022 vtop[-2] = vtop[-3];
2023 vtop[-3] = tmp;
2024 vswap();
2025 /* stack: H1 H2 L1 L2 */
2026 //pv("gen_opl B",0,4);
2027 if (op == '*') {
2028 vpushv(vtop - 1);
2029 vpushv(vtop - 1);
2030 gen_op(TOK_UMULL);
2031 lexpand();
2032 /* stack: H1 H2 L1 L2 ML MH */
2033 for(i=0;i<4;i++)
2034 vrotb(6);
2035 /* stack: ML MH H1 H2 L1 L2 */
2036 tmp = vtop[0];
2037 vtop[0] = vtop[-2];
2038 vtop[-2] = tmp;
2039 /* stack: ML MH H1 L2 H2 L1 */
2040 gen_op('*');
2041 vrotb(3);
2042 vrotb(3);
2043 gen_op('*');
2044 /* stack: ML MH M1 M2 */
2045 gen_op('+');
2046 gen_op('+');
2047 } else if (op == '+' || op == '-') {
2048 /* XXX: add non carry method too (for MIPS or alpha) */
2049 if (op == '+')
2050 op1 = TOK_ADDC1;
2051 else
2052 op1 = TOK_SUBC1;
2053 gen_op(op1);
2054 /* stack: H1 H2 (L1 op L2) */
2055 vrotb(3);
2056 vrotb(3);
2057 gen_op(op1 + 1); /* TOK_xxxC2 */
2058 } else {
2059 gen_op(op);
2060 /* stack: H1 H2 (L1 op L2) */
2061 vrotb(3);
2062 vrotb(3);
2063 /* stack: (L1 op L2) H1 H2 */
2064 gen_op(op);
2065 /* stack: (L1 op L2) (H1 op H2) */
2067 /* stack: L H */
2068 lbuild(t);
2069 break;
2070 case TOK_SAR:
2071 case TOK_SHR:
2072 case TOK_SHL:
2073 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2074 t = vtop[-1].type.t;
2075 vswap();
2076 lexpand();
2077 vrotb(3);
2078 /* stack: L H shift */
2079 c = (int)vtop->c.i;
2080 /* constant: simpler */
2081 /* NOTE: all comments are for SHL. the other cases are
2082 done by swapping words */
2083 vpop();
2084 if (op != TOK_SHL)
2085 vswap();
2086 if (c >= 32) {
2087 /* stack: L H */
2088 vpop();
2089 if (c > 32) {
2090 vpushi(c - 32);
2091 gen_op(op);
2093 if (op != TOK_SAR) {
2094 vpushi(0);
2095 } else {
2096 gv_dup();
2097 vpushi(31);
2098 gen_op(TOK_SAR);
2100 vswap();
2101 } else {
2102 vswap();
2103 gv_dup();
2104 /* stack: H L L */
2105 vpushi(c);
2106 gen_op(op);
2107 vswap();
2108 vpushi(32 - c);
2109 if (op == TOK_SHL)
2110 gen_op(TOK_SHR);
2111 else
2112 gen_op(TOK_SHL);
2113 vrotb(3);
2114 /* stack: L L H */
2115 vpushi(c);
2116 if (op == TOK_SHL)
2117 gen_op(TOK_SHL);
2118 else
2119 gen_op(TOK_SHR);
2120 gen_op('|');
2122 if (op != TOK_SHL)
2123 vswap();
2124 lbuild(t);
2125 } else {
2126 /* XXX: should provide a faster fallback on x86 ? */
2127 switch(op) {
2128 case TOK_SAR:
2129 func = TOK___ashrdi3;
2130 goto gen_func;
2131 case TOK_SHR:
2132 func = TOK___lshrdi3;
2133 goto gen_func;
2134 case TOK_SHL:
2135 func = TOK___ashldi3;
2136 goto gen_func;
2139 break;
2140 default:
2141 /* compare operations */
2142 t = vtop->type.t;
2143 vswap();
2144 lexpand();
2145 vrotb(3);
2146 lexpand();
2147 /* stack: L1 H1 L2 H2 */
2148 tmp = vtop[-1];
2149 vtop[-1] = vtop[-2];
2150 vtop[-2] = tmp;
2151 /* stack: L1 L2 H1 H2 */
2152 save_regs(4);
2153 /* compare high */
2154 op1 = op;
2155 /* when values are equal, we need to compare low words. since
2156 the jump is inverted, we invert the test too. */
2157 if (op1 == TOK_LT)
2158 op1 = TOK_LE;
2159 else if (op1 == TOK_GT)
2160 op1 = TOK_GE;
2161 else if (op1 == TOK_ULT)
2162 op1 = TOK_ULE;
2163 else if (op1 == TOK_UGT)
2164 op1 = TOK_UGE;
2165 a = 0;
2166 b = 0;
2167 gen_op(op1);
2168 if (op == TOK_NE) {
2169 b = gvtst(0, 0);
2170 } else {
2171 a = gvtst(1, 0);
2172 if (op != TOK_EQ) {
2173 /* generate non equal test */
2174 vpushi(0);
2175 vset_VT_CMP(TOK_NE);
2176 b = gvtst(0, 0);
2179 /* compare low. Always unsigned */
2180 op1 = op;
2181 if (op1 == TOK_LT)
2182 op1 = TOK_ULT;
2183 else if (op1 == TOK_LE)
2184 op1 = TOK_ULE;
2185 else if (op1 == TOK_GT)
2186 op1 = TOK_UGT;
2187 else if (op1 == TOK_GE)
2188 op1 = TOK_UGE;
2189 gen_op(op1);
2190 #if 0//def TCC_TARGET_I386
2191 if (op == TOK_NE) { gsym(b); break; }
2192 if (op == TOK_EQ) { gsym(a); break; }
2193 #endif
2194 gvtst_set(1, a);
2195 gvtst_set(0, b);
2196 break;
2199 #endif
2201 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2203 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2204 return (a ^ b) >> 63 ? -x : x;
2207 static int gen_opic_lt(uint64_t a, uint64_t b)
2209 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2212 /* handle integer constant optimizations and various machine
2213 independent opt */
2214 static void gen_opic(int op)
2216 SValue *v1 = vtop - 1;
2217 SValue *v2 = vtop;
2218 int t1 = v1->type.t & VT_BTYPE;
2219 int t2 = v2->type.t & VT_BTYPE;
2220 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2221 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2222 uint64_t l1 = c1 ? v1->c.i : 0;
2223 uint64_t l2 = c2 ? v2->c.i : 0;
2224 int shm = (t1 == VT_LLONG) ? 63 : 31;
2226 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2227 l1 = ((uint32_t)l1 |
2228 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2229 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2230 l2 = ((uint32_t)l2 |
2231 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2233 if (c1 && c2) {
2234 switch(op) {
2235 case '+': l1 += l2; break;
2236 case '-': l1 -= l2; break;
2237 case '&': l1 &= l2; break;
2238 case '^': l1 ^= l2; break;
2239 case '|': l1 |= l2; break;
2240 case '*': l1 *= l2; break;
2242 case TOK_PDIV:
2243 case '/':
2244 case '%':
2245 case TOK_UDIV:
2246 case TOK_UMOD:
2247 /* if division by zero, generate explicit division */
2248 if (l2 == 0) {
2249 if (const_wanted && !(nocode_wanted & unevalmask))
2250 tcc_error("division by zero in constant");
2251 goto general_case;
2253 switch(op) {
2254 default: l1 = gen_opic_sdiv(l1, l2); break;
2255 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2256 case TOK_UDIV: l1 = l1 / l2; break;
2257 case TOK_UMOD: l1 = l1 % l2; break;
2259 break;
2260 case TOK_SHL: l1 <<= (l2 & shm); break;
2261 case TOK_SHR: l1 >>= (l2 & shm); break;
2262 case TOK_SAR:
2263 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2264 break;
2265 /* tests */
2266 case TOK_ULT: l1 = l1 < l2; break;
2267 case TOK_UGE: l1 = l1 >= l2; break;
2268 case TOK_EQ: l1 = l1 == l2; break;
2269 case TOK_NE: l1 = l1 != l2; break;
2270 case TOK_ULE: l1 = l1 <= l2; break;
2271 case TOK_UGT: l1 = l1 > l2; break;
2272 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2273 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2274 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2275 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2276 /* logical */
2277 case TOK_LAND: l1 = l1 && l2; break;
2278 case TOK_LOR: l1 = l1 || l2; break;
2279 default:
2280 goto general_case;
2282 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2283 l1 = ((uint32_t)l1 |
2284 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2285 v1->c.i = l1;
2286 vtop--;
2287 } else {
2288 /* if commutative ops, put c2 as constant */
2289 if (c1 && (op == '+' || op == '&' || op == '^' ||
2290 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2291 vswap();
2292 c2 = c1; //c = c1, c1 = c2, c2 = c;
2293 l2 = l1; //l = l1, l1 = l2, l2 = l;
2295 if (!const_wanted &&
2296 c1 && ((l1 == 0 &&
2297 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2298 (l1 == -1 && op == TOK_SAR))) {
2299 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2300 vtop--;
2301 } else if (!const_wanted &&
2302 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2303 (op == '|' &&
2304 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2305 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2306 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2307 if (l2 == 1)
2308 vtop->c.i = 0;
2309 vswap();
2310 vtop--;
2311 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2312 op == TOK_PDIV) &&
2313 l2 == 1) ||
2314 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2315 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2316 l2 == 0) ||
2317 (op == '&' &&
2318 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2319 /* filter out NOP operations like x*1, x-0, x&-1... */
2320 vtop--;
2321 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2322 /* try to use shifts instead of muls or divs */
2323 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2324 int n = -1;
2325 while (l2) {
2326 l2 >>= 1;
2327 n++;
2329 vtop->c.i = n;
2330 if (op == '*')
2331 op = TOK_SHL;
2332 else if (op == TOK_PDIV)
2333 op = TOK_SAR;
2334 else
2335 op = TOK_SHR;
2337 goto general_case;
2338 } else if (c2 && (op == '+' || op == '-') &&
2339 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2340 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2341 /* symbol + constant case */
2342 if (op == '-')
2343 l2 = -l2;
2344 l2 += vtop[-1].c.i;
2345 /* The backends can't always deal with addends to symbols
2346 larger than +-1<<31. Don't construct such. */
2347 if ((int)l2 != l2)
2348 goto general_case;
2349 vtop--;
2350 vtop->c.i = l2;
2351 } else {
2352 general_case:
2353 /* call low level op generator */
2354 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2355 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2356 gen_opl(op);
2357 else
2358 gen_opi(op);
2363 /* generate a floating point operation with constant propagation */
2364 static void gen_opif(int op)
2366 int c1, c2;
2367 SValue *v1, *v2;
2368 #if defined _MSC_VER && defined __x86_64__
2369 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2370 volatile
2371 #endif
2372 long double f1, f2;
2374 v1 = vtop - 1;
2375 v2 = vtop;
2376 /* currently, we cannot do computations with forward symbols */
2377 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2378 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2379 if (c1 && c2) {
2380 if (v1->type.t == VT_FLOAT) {
2381 f1 = v1->c.f;
2382 f2 = v2->c.f;
2383 } else if (v1->type.t == VT_DOUBLE) {
2384 f1 = v1->c.d;
2385 f2 = v2->c.d;
2386 } else {
2387 f1 = v1->c.ld;
2388 f2 = v2->c.ld;
2391 /* NOTE: we only do constant propagation if finite number (not
2392 NaN or infinity) (ANSI spec) */
2393 if (!ieee_finite(f1) || !ieee_finite(f2))
2394 goto general_case;
2396 switch(op) {
2397 case '+': f1 += f2; break;
2398 case '-': f1 -= f2; break;
2399 case '*': f1 *= f2; break;
2400 case '/':
2401 if (f2 == 0.0) {
2402 /* If not in initializer we need to potentially generate
2403 FP exceptions at runtime, otherwise we want to fold. */
2404 if (!const_wanted)
2405 goto general_case;
2407 f1 /= f2;
2408 break;
2409 /* XXX: also handles tests ? */
2410 default:
2411 goto general_case;
2413 /* XXX: overflow test ? */
2414 if (v1->type.t == VT_FLOAT) {
2415 v1->c.f = f1;
2416 } else if (v1->type.t == VT_DOUBLE) {
2417 v1->c.d = f1;
2418 } else {
2419 v1->c.ld = f1;
2421 vtop--;
2422 } else {
2423 general_case:
2424 gen_opf(op);
2428 static int pointed_size(CType *type)
2430 int align;
2431 return type_size(pointed_type(type), &align);
2434 static void vla_runtime_pointed_size(CType *type)
2436 int align;
2437 vla_runtime_type_size(pointed_type(type), &align);
2440 static inline int is_null_pointer(SValue *p)
2442 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2443 return 0;
2444 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2445 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2446 ((p->type.t & VT_BTYPE) == VT_PTR &&
2447 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2448 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2449 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2453 /* check types for comparison or subtraction of pointers */
2454 static void check_comparison_pointer_types(SValue *p1, SValue *p2, int op)
2456 CType *type1, *type2, tmp_type1, tmp_type2;
2457 int bt1, bt2;
2459 /* null pointers are accepted for all comparisons as gcc */
2460 if (is_null_pointer(p1) || is_null_pointer(p2))
2461 return;
2462 type1 = &p1->type;
2463 type2 = &p2->type;
2464 bt1 = type1->t & VT_BTYPE;
2465 bt2 = type2->t & VT_BTYPE;
2466 /* accept comparison between pointer and integer with a warning */
2467 if ((is_integer_btype(bt1) || is_integer_btype(bt2)) && op != '-') {
2468 if (op != TOK_LOR && op != TOK_LAND )
2469 tcc_warning("comparison between pointer and integer");
2470 return;
2473 /* both must be pointers or implicit function pointers */
2474 if (bt1 == VT_PTR) {
2475 type1 = pointed_type(type1);
2476 } else if (bt1 != VT_FUNC)
2477 goto invalid_operands;
2479 if (bt2 == VT_PTR) {
2480 type2 = pointed_type(type2);
2481 } else if (bt2 != VT_FUNC) {
2482 invalid_operands:
2483 tcc_error("invalid operands to binary %s", get_tok_str(op, NULL));
2485 if ((type1->t & VT_BTYPE) == VT_VOID ||
2486 (type2->t & VT_BTYPE) == VT_VOID)
2487 return;
2488 tmp_type1 = *type1;
2489 tmp_type2 = *type2;
2490 tmp_type1.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2491 tmp_type2.t &= ~(VT_DEFSIGN | VT_UNSIGNED | VT_CONSTANT | VT_VOLATILE);
2492 if (!is_compatible_types(&tmp_type1, &tmp_type2)) {
2493 /* gcc-like error if '-' is used */
2494 if (op == '-')
2495 goto invalid_operands;
2496 else
2497 tcc_warning("comparison of distinct pointer types lacks a cast");
2501 /* generic gen_op: handles types problems */
2502 ST_FUNC void gen_op(int op)
2504 int u, t1, t2, bt1, bt2, t;
2505 CType type1;
2507 redo:
2508 t1 = vtop[-1].type.t;
2509 t2 = vtop[0].type.t;
2510 bt1 = t1 & VT_BTYPE;
2511 bt2 = t2 & VT_BTYPE;
2513 if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
2514 tcc_error("operation on a struct");
2515 } else if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
2516 if (bt2 == VT_FUNC) {
2517 mk_pointer(&vtop->type);
2518 gaddrof();
2520 if (bt1 == VT_FUNC) {
2521 vswap();
2522 mk_pointer(&vtop->type);
2523 gaddrof();
2524 vswap();
2526 goto redo;
2527 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
2528 /* at least one operand is a pointer */
2529 /* relational op: must be both pointers */
2530 if (op >= TOK_ULT && op <= TOK_LOR) {
2531 check_comparison_pointer_types(vtop - 1, vtop, op);
2532 /* pointers are handled are unsigned */
2533 #if PTR_SIZE == 8
2534 t = VT_LLONG | VT_UNSIGNED;
2535 #else
2536 t = VT_INT | VT_UNSIGNED;
2537 #endif
2538 goto std_op;
2540 /* if both pointers, then it must be the '-' op */
2541 if (bt1 == VT_PTR && bt2 == VT_PTR) {
2542 if (op != '-')
2543 tcc_error("cannot use pointers here");
2544 check_comparison_pointer_types(vtop - 1, vtop, op);
2545 /* XXX: check that types are compatible */
2546 if (vtop[-1].type.t & VT_VLA) {
2547 vla_runtime_pointed_size(&vtop[-1].type);
2548 } else {
2549 vpushi(pointed_size(&vtop[-1].type));
2551 vrott(3);
2552 gen_opic(op);
2553 vtop->type.t = VT_PTRDIFF_T;
2554 vswap();
2555 gen_op(TOK_PDIV);
2556 } else {
2557 /* exactly one pointer : must be '+' or '-'. */
2558 if (op != '-' && op != '+')
2559 tcc_error("cannot use pointers here");
2560 /* Put pointer as first operand */
2561 if (bt2 == VT_PTR) {
2562 vswap();
2563 t = t1, t1 = t2, t2 = t;
2565 #if PTR_SIZE == 4
2566 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
2567 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2568 gen_cast_s(VT_INT);
2569 #endif
2570 type1 = vtop[-1].type;
2571 type1.t &= ~VT_ARRAY;
2572 if (vtop[-1].type.t & VT_VLA)
2573 vla_runtime_pointed_size(&vtop[-1].type);
2574 else {
2575 u = pointed_size(&vtop[-1].type);
2576 if (u < 0)
2577 tcc_error("unknown array element size");
2578 #if PTR_SIZE == 8
2579 vpushll(u);
2580 #else
2581 /* XXX: cast to int ? (long long case) */
2582 vpushi(u);
2583 #endif
2585 gen_op('*');
2586 #ifdef CONFIG_TCC_BCHECK
2587 if (tcc_state->do_bounds_check && !const_wanted) {
2588 /* if bounded pointers, we generate a special code to
2589 test bounds */
2590 if (op == '-') {
2591 vpushi(0);
2592 vswap();
2593 gen_op('-');
2595 vtop[-1].r &= ~VT_MUSTBOUND;
2596 gen_bounded_ptr_add();
2597 } else
2598 #endif
2600 gen_opic(op);
2602 /* put again type if gen_opic() swaped operands */
2603 vtop->type = type1;
2605 } else if (is_float(bt1) || is_float(bt2)) {
2606 /* compute bigger type and do implicit casts */
2607 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
2608 t = VT_LDOUBLE;
2609 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
2610 t = VT_DOUBLE;
2611 } else {
2612 t = VT_FLOAT;
2614 /* floats can only be used for a few operations */
2615 if (op != '+' && op != '-' && op != '*' && op != '/' &&
2616 (op < TOK_ULT || op > TOK_GT))
2617 tcc_error("invalid operands for binary operation");
2618 goto std_op;
2619 } else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
2620 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
2621 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
2622 t |= VT_UNSIGNED;
2623 t |= (VT_LONG & t1);
2624 goto std_op;
2625 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
2626 /* cast to biggest op */
2627 t = VT_LLONG | VT_LONG;
2628 if (bt1 == VT_LLONG)
2629 t &= t1;
2630 if (bt2 == VT_LLONG)
2631 t &= t2;
2632 /* convert to unsigned if it does not fit in a long long */
2633 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
2634 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
2635 t |= VT_UNSIGNED;
2636 goto std_op;
2637 } else {
2638 /* integer operations */
2639 t = VT_INT | (VT_LONG & (t1 | t2));
2640 /* convert to unsigned if it does not fit in an integer */
2641 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
2642 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
2643 t |= VT_UNSIGNED;
2644 std_op:
2645 /* XXX: currently, some unsigned operations are explicit, so
2646 we modify them here */
2647 if (t & VT_UNSIGNED) {
2648 if (op == TOK_SAR)
2649 op = TOK_SHR;
2650 else if (op == '/')
2651 op = TOK_UDIV;
2652 else if (op == '%')
2653 op = TOK_UMOD;
2654 else if (op == TOK_LT)
2655 op = TOK_ULT;
2656 else if (op == TOK_GT)
2657 op = TOK_UGT;
2658 else if (op == TOK_LE)
2659 op = TOK_ULE;
2660 else if (op == TOK_GE)
2661 op = TOK_UGE;
2663 vswap();
2664 type1.t = t;
2665 type1.ref = NULL;
2666 gen_cast(&type1);
2667 vswap();
2668 /* special case for shifts and long long: we keep the shift as
2669 an integer */
2670 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
2671 type1.t = VT_INT;
2672 gen_cast(&type1);
2673 if (is_float(t))
2674 gen_opif(op);
2675 else
2676 gen_opic(op);
2677 if (op >= TOK_ULT && op <= TOK_GT) {
2678 /* relational op: the result is an int */
2679 vtop->type.t = VT_INT;
2680 } else {
2681 vtop->type.t = t;
2684 // Make sure that we have converted to an rvalue:
2685 if (vtop->r & VT_LVAL)
2686 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
2689 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
2690 #define gen_cvt_itof1 gen_cvt_itof
2691 #else
2692 /* generic itof for unsigned long long case */
2693 static void gen_cvt_itof1(int t)
2695 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
2696 (VT_LLONG | VT_UNSIGNED)) {
2698 if (t == VT_FLOAT)
2699 vpush_global_sym(&func_old_type, TOK___floatundisf);
2700 #if LDOUBLE_SIZE != 8
2701 else if (t == VT_LDOUBLE)
2702 vpush_global_sym(&func_old_type, TOK___floatundixf);
2703 #endif
2704 else
2705 vpush_global_sym(&func_old_type, TOK___floatundidf);
2706 vrott(2);
2707 gfunc_call(1);
2708 vpushi(0);
2709 PUT_R_RET(vtop, t);
2710 } else {
2711 gen_cvt_itof(t);
2714 #endif
2716 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2717 #define gen_cvt_ftoi1 gen_cvt_ftoi
2718 #else
2719 /* generic ftoi for unsigned long long case */
2720 static void gen_cvt_ftoi1(int t)
2722 int st;
2723 if (t == (VT_LLONG | VT_UNSIGNED)) {
2724 /* not handled natively */
2725 st = vtop->type.t & VT_BTYPE;
2726 if (st == VT_FLOAT)
2727 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
2728 #if LDOUBLE_SIZE != 8
2729 else if (st == VT_LDOUBLE)
2730 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
2731 #endif
2732 else
2733 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
2734 vrott(2);
2735 gfunc_call(1);
2736 vpushi(0);
2737 PUT_R_RET(vtop, t);
2738 } else {
2739 gen_cvt_ftoi(t);
2742 #endif
2744 /* special delayed cast for char/short */
2745 static void force_charshort_cast(void)
2747 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
2748 int dbt = vtop->type.t;
2749 vtop->r &= ~VT_MUSTCAST;
2750 vtop->type.t = sbt;
2751 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
2752 vtop->type.t = dbt;
2755 static void gen_cast_s(int t)
2757 CType type;
2758 type.t = t;
2759 type.ref = NULL;
2760 gen_cast(&type);
2763 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2764 static void gen_cast(CType *type)
2766 int sbt, dbt, sf, df, c;
2767 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
2769 /* special delayed cast for char/short */
2770 if (vtop->r & VT_MUSTCAST)
2771 force_charshort_cast();
2773 /* bitfields first get cast to ints */
2774 if (vtop->type.t & VT_BITFIELD)
2775 gv(RC_INT);
2777 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
2778 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
2779 if (sbt == VT_FUNC)
2780 sbt = VT_PTR;
2782 again:
2783 if (sbt != dbt) {
2784 sf = is_float(sbt);
2785 df = is_float(dbt);
2786 dbt_bt = dbt & VT_BTYPE;
2787 sbt_bt = sbt & VT_BTYPE;
2789 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2790 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2791 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
2792 #endif
2793 if (c) {
2794 /* constant case: we can do it now */
2795 /* XXX: in ISOC, cannot do it if error in convert */
2796 if (sbt == VT_FLOAT)
2797 vtop->c.ld = vtop->c.f;
2798 else if (sbt == VT_DOUBLE)
2799 vtop->c.ld = vtop->c.d;
2801 if (df) {
2802 if (sbt_bt == VT_LLONG) {
2803 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
2804 vtop->c.ld = vtop->c.i;
2805 else
2806 vtop->c.ld = -(long double)-vtop->c.i;
2807 } else if(!sf) {
2808 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
2809 vtop->c.ld = (uint32_t)vtop->c.i;
2810 else
2811 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
2814 if (dbt == VT_FLOAT)
2815 vtop->c.f = (float)vtop->c.ld;
2816 else if (dbt == VT_DOUBLE)
2817 vtop->c.d = (double)vtop->c.ld;
2818 } else if (sf && dbt == VT_BOOL) {
2819 vtop->c.i = (vtop->c.ld != 0);
2820 } else {
2821 if(sf)
2822 vtop->c.i = vtop->c.ld;
2823 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
2825 else if (sbt & VT_UNSIGNED)
2826 vtop->c.i = (uint32_t)vtop->c.i;
2827 else
2828 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
2830 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
2832 else if (dbt == VT_BOOL)
2833 vtop->c.i = (vtop->c.i != 0);
2834 else {
2835 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
2836 dbt_bt == VT_SHORT ? 0xffff :
2837 0xffffffff;
2838 vtop->c.i &= m;
2839 if (!(dbt & VT_UNSIGNED))
2840 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
2843 goto done;
2845 } else if (dbt == VT_BOOL
2846 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
2847 == (VT_CONST | VT_SYM)) {
2848 /* addresses are considered non-zero (see tcctest.c:sinit23) */
2849 vtop->r = VT_CONST;
2850 vtop->c.i = 1;
2851 goto done;
2854 /* cannot generate code for global or static initializers */
2855 if (STATIC_DATA_WANTED)
2856 goto done;
2858 /* non constant case: generate code */
2859 if (dbt == VT_BOOL) {
2860 gen_test_zero(TOK_NE);
2861 goto done;
2864 if (sf || df) {
2865 if (sf && df) {
2866 /* convert from fp to fp */
2867 gen_cvt_ftof(dbt);
2868 } else if (df) {
2869 /* convert int to fp */
2870 gen_cvt_itof1(dbt);
2871 } else {
2872 /* convert fp to int */
2873 sbt = dbt;
2874 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
2875 sbt = VT_INT;
2876 gen_cvt_ftoi1(sbt);
2877 goto again; /* may need char/short cast */
2879 goto done;
2882 ds = btype_size(dbt_bt);
2883 ss = btype_size(sbt_bt);
2884 if (ds == 0 || ss == 0) {
2885 if (dbt_bt == VT_VOID)
2886 goto done;
2887 cast_error(&vtop->type, type);
2889 if (IS_ENUM(type->t) && type->ref->c < 0)
2890 tcc_error("cast to incomplete type");
2892 /* same size and no sign conversion needed */
2893 if (ds == ss && ds >= 4)
2894 goto done;
2895 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
2896 tcc_warning("cast between pointer and integer of different size");
2897 if (sbt_bt == VT_PTR) {
2898 /* put integer type to allow logical operations below */
2899 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
2903 /* processor allows { int a = 0, b = *(char*)&a; }
2904 That means that if we cast to less width, we can just
2905 change the type and read it still later. */
2906 #define ALLOW_SUBTYPE_ACCESS 1
2908 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
2909 /* value still in memory */
2910 if (ds <= ss)
2911 goto done;
2912 /* ss <= 4 here */
2913 if (ds <= 4) {
2914 gv(RC_INT);
2915 goto done; /* no 64bit envolved */
2918 gv(RC_INT);
2920 trunc = 0;
2921 #if PTR_SIZE == 4
2922 if (ds == 8) {
2923 /* generate high word */
2924 if (sbt & VT_UNSIGNED) {
2925 vpushi(0);
2926 gv(RC_INT);
2927 } else {
2928 gv_dup();
2929 vpushi(31);
2930 gen_op(TOK_SAR);
2932 lbuild(dbt);
2933 } else if (ss == 8) {
2934 /* from long long: just take low order word */
2935 lexpand();
2936 vpop();
2938 ss = 4;
2940 #elif PTR_SIZE == 8
2941 if (ds == 8) {
2942 /* need to convert from 32bit to 64bit */
2943 if (sbt & VT_UNSIGNED) {
2944 #if defined(TCC_TARGET_RISCV64)
2945 /* RISC-V keeps 32bit vals in registers sign-extended.
2946 So here we need a zero-extension. */
2947 trunc = 32;
2948 #else
2949 goto done;
2950 #endif
2951 } else {
2952 gen_cvt_sxtw();
2953 goto done;
2955 ss = ds, ds = 4, dbt = sbt;
2956 } else if (ss == 8) {
2957 /* XXX some architectures (e.g. risc-v) would like it
2958 better for this merely being a 32-to-64 sign or zero-
2959 extension. */
2960 trunc = 32; /* zero upper 32 bits */
2961 } else {
2962 ss = 4;
2964 #endif
2966 if (ds >= ss)
2967 goto done;
2968 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
2969 if (ss == 4) {
2970 gen_cvt_csti(dbt);
2971 goto done;
2973 #endif
2974 bits = (ss - ds) * 8;
2975 /* for unsigned, gen_op will convert SAR to SHR */
2976 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
2977 vpushi(bits);
2978 gen_op(TOK_SHL);
2979 vpushi(bits - trunc);
2980 gen_op(TOK_SAR);
2981 vpushi(trunc);
2982 gen_op(TOK_SHR);
2984 done:
2985 vtop->type = *type;
2986 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
2989 /* return type size as known at compile time. Put alignment at 'a' */
2990 ST_FUNC int type_size(CType *type, int *a)
2992 Sym *s;
2993 int bt;
2995 bt = type->t & VT_BTYPE;
2996 if (bt == VT_STRUCT) {
2997 /* struct/union */
2998 s = type->ref;
2999 *a = s->r;
3000 return s->c;
3001 } else if (bt == VT_PTR) {
3002 if (type->t & VT_ARRAY) {
3003 int ts;
3005 s = type->ref;
3006 ts = type_size(&s->type, a);
3008 if (ts < 0 && s->c < 0)
3009 ts = -ts;
3011 return ts * s->c;
3012 } else {
3013 *a = PTR_SIZE;
3014 return PTR_SIZE;
3016 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3017 return -1; /* incomplete enum */
3018 } else if (bt == VT_LDOUBLE) {
3019 *a = LDOUBLE_ALIGN;
3020 return LDOUBLE_SIZE;
3021 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3022 #ifdef TCC_TARGET_I386
3023 #ifdef TCC_TARGET_PE
3024 *a = 8;
3025 #else
3026 *a = 4;
3027 #endif
3028 #elif defined(TCC_TARGET_ARM)
3029 #ifdef TCC_ARM_EABI
3030 *a = 8;
3031 #else
3032 *a = 4;
3033 #endif
3034 #else
3035 *a = 8;
3036 #endif
3037 return 8;
3038 } else if (bt == VT_INT || bt == VT_FLOAT) {
3039 *a = 4;
3040 return 4;
3041 } else if (bt == VT_SHORT) {
3042 *a = 2;
3043 return 2;
3044 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3045 *a = 8;
3046 return 16;
3047 } else {
3048 /* char, void, function, _Bool */
3049 *a = 1;
3050 return 1;
3054 /* push type size as known at runtime time on top of value stack. Put
3055 alignment at 'a' */
3056 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3058 if (type->t & VT_VLA) {
3059 type_size(&type->ref->type, a);
3060 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3061 } else {
3062 vpushi(type_size(type, a));
3066 /* return the pointed type of t */
3067 static inline CType *pointed_type(CType *type)
3069 return &type->ref->type;
3072 /* modify type so that its it is a pointer to type. */
3073 ST_FUNC void mk_pointer(CType *type)
3075 Sym *s;
3076 s = sym_push(SYM_FIELD, type, 0, -1);
3077 type->t = VT_PTR | (type->t & VT_STORAGE);
3078 type->ref = s;
3081 /* compare function types. OLD functions match any new functions */
3082 static int is_compatible_func(CType *type1, CType *type2)
3084 Sym *s1, *s2;
3086 s1 = type1->ref;
3087 s2 = type2->ref;
3088 if (s1->f.func_call != s2->f.func_call)
3089 return 0;
3090 if (s1->f.func_type != s2->f.func_type
3091 && s1->f.func_type != FUNC_OLD
3092 && s2->f.func_type != FUNC_OLD)
3093 return 0;
3094 /* we should check the function return type for FUNC_OLD too
3095 but that causes problems with the internally used support
3096 functions such as TOK_memmove */
3097 if (s1->f.func_type == FUNC_OLD && !s1->next)
3098 return 1;
3099 if (s2->f.func_type == FUNC_OLD && !s2->next)
3100 return 1;
3101 for (;;) {
3102 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3103 return 0;
3104 s1 = s1->next;
3105 s2 = s2->next;
3106 if (!s1)
3107 return !s2;
3108 if (!s2)
3109 return 0;
3113 /* return true if type1 and type2 are the same. If unqualified is
3114 true, qualifiers on the types are ignored.
3116 static int compare_types(CType *type1, CType *type2, int unqualified)
3118 int bt1, t1, t2;
3120 t1 = type1->t & VT_TYPE;
3121 t2 = type2->t & VT_TYPE;
3122 if (unqualified) {
3123 /* strip qualifiers before comparing */
3124 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3125 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3128 /* Default Vs explicit signedness only matters for char */
3129 if ((t1 & VT_BTYPE) != VT_BYTE) {
3130 t1 &= ~VT_DEFSIGN;
3131 t2 &= ~VT_DEFSIGN;
3133 /* XXX: bitfields ? */
3134 if (t1 != t2)
3135 return 0;
3137 if ((t1 & VT_ARRAY)
3138 && !(type1->ref->c < 0
3139 || type2->ref->c < 0
3140 || type1->ref->c == type2->ref->c))
3141 return 0;
3143 /* test more complicated cases */
3144 bt1 = t1 & VT_BTYPE;
3145 if (bt1 == VT_PTR) {
3146 type1 = pointed_type(type1);
3147 type2 = pointed_type(type2);
3148 return is_compatible_types(type1, type2);
3149 } else if (bt1 == VT_STRUCT) {
3150 return (type1->ref == type2->ref);
3151 } else if (bt1 == VT_FUNC) {
3152 return is_compatible_func(type1, type2);
3153 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
3154 return type1->ref == type2->ref;
3155 } else {
3156 return 1;
3160 /* return true if type1 and type2 are exactly the same (including
3161 qualifiers).
3163 static int is_compatible_types(CType *type1, CType *type2)
3165 return compare_types(type1,type2,0);
3168 /* return true if type1 and type2 are the same (ignoring qualifiers).
3170 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3172 return compare_types(type1,type2,1);
3175 /* print a type. If 'varstr' is not NULL, then the variable is also
3176 printed in the type */
3177 /* XXX: union */
3178 /* XXX: add array and function pointers */
3179 static void type_to_str(char *buf, int buf_size,
3180 CType *type, const char *varstr)
3182 int bt, v, t;
3183 Sym *s, *sa;
3184 char buf1[256];
3185 const char *tstr;
3187 t = type->t;
3188 bt = t & VT_BTYPE;
3189 buf[0] = '\0';
3191 if (t & VT_EXTERN)
3192 pstrcat(buf, buf_size, "extern ");
3193 if (t & VT_STATIC)
3194 pstrcat(buf, buf_size, "static ");
3195 if (t & VT_TYPEDEF)
3196 pstrcat(buf, buf_size, "typedef ");
3197 if (t & VT_INLINE)
3198 pstrcat(buf, buf_size, "inline ");
3199 if (t & VT_VOLATILE)
3200 pstrcat(buf, buf_size, "volatile ");
3201 if (t & VT_CONSTANT)
3202 pstrcat(buf, buf_size, "const ");
3204 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3205 || ((t & VT_UNSIGNED)
3206 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3207 && !IS_ENUM(t)
3209 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3211 buf_size -= strlen(buf);
3212 buf += strlen(buf);
3214 switch(bt) {
3215 case VT_VOID:
3216 tstr = "void";
3217 goto add_tstr;
3218 case VT_BOOL:
3219 tstr = "_Bool";
3220 goto add_tstr;
3221 case VT_BYTE:
3222 tstr = "char";
3223 goto add_tstr;
3224 case VT_SHORT:
3225 tstr = "short";
3226 goto add_tstr;
3227 case VT_INT:
3228 tstr = "int";
3229 goto maybe_long;
3230 case VT_LLONG:
3231 tstr = "long long";
3232 maybe_long:
3233 if (t & VT_LONG)
3234 tstr = "long";
3235 if (!IS_ENUM(t))
3236 goto add_tstr;
3237 tstr = "enum ";
3238 goto tstruct;
3239 case VT_FLOAT:
3240 tstr = "float";
3241 goto add_tstr;
3242 case VT_DOUBLE:
3243 tstr = "double";
3244 goto add_tstr;
3245 case VT_LDOUBLE:
3246 tstr = "long double";
3247 add_tstr:
3248 pstrcat(buf, buf_size, tstr);
3249 break;
3250 case VT_STRUCT:
3251 tstr = "struct ";
3252 if (IS_UNION(t))
3253 tstr = "union ";
3254 tstruct:
3255 pstrcat(buf, buf_size, tstr);
3256 v = type->ref->v & ~SYM_STRUCT;
3257 if (v >= SYM_FIRST_ANOM)
3258 pstrcat(buf, buf_size, "<anonymous>");
3259 else
3260 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3261 break;
3262 case VT_FUNC:
3263 s = type->ref;
3264 buf1[0]=0;
3265 if (varstr && '*' == *varstr) {
3266 pstrcat(buf1, sizeof(buf1), "(");
3267 pstrcat(buf1, sizeof(buf1), varstr);
3268 pstrcat(buf1, sizeof(buf1), ")");
3270 pstrcat(buf1, buf_size, "(");
3271 sa = s->next;
3272 while (sa != NULL) {
3273 char buf2[256];
3274 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3275 pstrcat(buf1, sizeof(buf1), buf2);
3276 sa = sa->next;
3277 if (sa)
3278 pstrcat(buf1, sizeof(buf1), ", ");
3280 if (s->f.func_type == FUNC_ELLIPSIS)
3281 pstrcat(buf1, sizeof(buf1), ", ...");
3282 pstrcat(buf1, sizeof(buf1), ")");
3283 type_to_str(buf, buf_size, &s->type, buf1);
3284 goto no_var;
3285 case VT_PTR:
3286 s = type->ref;
3287 if (t & VT_ARRAY) {
3288 if (varstr && '*' == *varstr)
3289 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3290 else
3291 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3292 type_to_str(buf, buf_size, &s->type, buf1);
3293 goto no_var;
3295 pstrcpy(buf1, sizeof(buf1), "*");
3296 if (t & VT_CONSTANT)
3297 pstrcat(buf1, buf_size, "const ");
3298 if (t & VT_VOLATILE)
3299 pstrcat(buf1, buf_size, "volatile ");
3300 if (varstr)
3301 pstrcat(buf1, sizeof(buf1), varstr);
3302 type_to_str(buf, buf_size, &s->type, buf1);
3303 goto no_var;
3305 if (varstr) {
3306 pstrcat(buf, buf_size, " ");
3307 pstrcat(buf, buf_size, varstr);
3309 no_var: ;
3312 static void cast_error(CType *st, CType *dt)
3314 char buf1[256], buf2[256];
3315 type_to_str(buf1, sizeof(buf1), st, NULL);
3316 type_to_str(buf2, sizeof(buf2), dt, NULL);
3317 tcc_error("cannot convert '%s' to '%s'", buf1, buf2);
3320 /* verify type compatibility to store vtop in 'dt' type */
3321 static void verify_assign_cast(CType *dt)
3323 CType *st, *type1, *type2;
3324 int dbt, sbt, qualwarn, lvl;
3326 st = &vtop->type; /* source type */
3327 dbt = dt->t & VT_BTYPE;
3328 sbt = st->t & VT_BTYPE;
3329 if (dt->t & VT_CONSTANT)
3330 tcc_warning("assignment of read-only location");
3331 switch(dbt) {
3332 case VT_VOID:
3333 if (sbt != dbt)
3334 tcc_error("assignment to void expression");
3335 break;
3336 case VT_PTR:
3337 /* special cases for pointers */
3338 /* '0' can also be a pointer */
3339 if (is_null_pointer(vtop))
3340 break;
3341 /* accept implicit pointer to integer cast with warning */
3342 if (is_integer_btype(sbt)) {
3343 tcc_warning("assignment makes pointer from integer without a cast");
3344 break;
3346 type1 = pointed_type(dt);
3347 if (sbt == VT_PTR)
3348 type2 = pointed_type(st);
3349 else if (sbt == VT_FUNC)
3350 type2 = st; /* a function is implicitly a function pointer */
3351 else
3352 goto error;
3353 if (is_compatible_types(type1, type2))
3354 break;
3355 for (qualwarn = lvl = 0;; ++lvl) {
3356 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3357 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3358 qualwarn = 1;
3359 dbt = type1->t & (VT_BTYPE|VT_LONG);
3360 sbt = type2->t & (VT_BTYPE|VT_LONG);
3361 if (dbt != VT_PTR || sbt != VT_PTR)
3362 break;
3363 type1 = pointed_type(type1);
3364 type2 = pointed_type(type2);
3366 if (!is_compatible_unqualified_types(type1, type2)) {
3367 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3368 /* void * can match anything */
3369 } else if (dbt == sbt
3370 && is_integer_btype(sbt & VT_BTYPE)
3371 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3372 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3373 /* Like GCC don't warn by default for merely changes
3374 in pointer target signedness. Do warn for different
3375 base types, though, in particular for unsigned enums
3376 and signed int targets. */
3377 } else {
3378 tcc_warning("assignment from incompatible pointer type");
3379 break;
3382 if (qualwarn)
3383 tcc_warning("assignment discards qualifiers from pointer target type");
3384 break;
3385 case VT_BYTE:
3386 case VT_SHORT:
3387 case VT_INT:
3388 case VT_LLONG:
3389 if (sbt == VT_PTR || sbt == VT_FUNC) {
3390 tcc_warning("assignment makes integer from pointer without a cast");
3391 } else if (sbt == VT_STRUCT) {
3392 goto case_VT_STRUCT;
3394 /* XXX: more tests */
3395 break;
3396 case VT_STRUCT:
3397 case_VT_STRUCT:
3398 if (!is_compatible_unqualified_types(dt, st)) {
3399 error:
3400 cast_error(st, dt);
3402 break;
3406 static void gen_assign_cast(CType *dt)
3408 verify_assign_cast(dt);
3409 gen_cast(dt);
3412 /* store vtop in lvalue pushed on stack */
3413 ST_FUNC void vstore(void)
3415 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3417 ft = vtop[-1].type.t;
3418 sbt = vtop->type.t & VT_BTYPE;
3419 dbt = ft & VT_BTYPE;
3421 verify_assign_cast(&vtop[-1].type);
3423 if (sbt == VT_STRUCT) {
3424 /* if structure, only generate pointer */
3425 /* structure assignment : generate memcpy */
3426 /* XXX: optimize if small size */
3427 size = type_size(&vtop->type, &align);
3429 /* destination */
3430 vswap();
3431 vtop->type.t = VT_PTR;
3432 gaddrof();
3434 /* address of memcpy() */
3435 #ifdef TCC_ARM_EABI
3436 if(!(align & 7))
3437 vpush_global_sym(&func_old_type, TOK_memcpy8);
3438 else if(!(align & 3))
3439 vpush_global_sym(&func_old_type, TOK_memcpy4);
3440 else
3441 #endif
3442 /* Use memmove, rather than memcpy, as dest and src may be same: */
3443 vpush_global_sym(&func_old_type, TOK_memmove);
3445 vswap();
3446 /* source */
3447 vtop->r &= ~VT_MUSTBOUND;
3448 vpushv(vtop - 2);
3449 vtop->type.t = VT_PTR;
3450 gaddrof();
3451 /* type size */
3452 vpushi(size);
3453 gfunc_call(3);
3454 /* leave source on stack */
3456 } else if (ft & VT_BITFIELD) {
3457 /* bitfield store handling */
3459 /* save lvalue as expression result (example: s.b = s.a = n;) */
3460 vdup(), vtop[-1] = vtop[-2];
3462 bit_pos = BIT_POS(ft);
3463 bit_size = BIT_SIZE(ft);
3464 /* remove bit field info to avoid loops */
3465 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3467 if (dbt == VT_BOOL) {
3468 gen_cast(&vtop[-1].type);
3469 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3471 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3472 if (dbt != VT_BOOL) {
3473 gen_cast(&vtop[-1].type);
3474 dbt = vtop[-1].type.t & VT_BTYPE;
3476 if (r == VT_STRUCT) {
3477 store_packed_bf(bit_pos, bit_size);
3478 } else {
3479 unsigned long long mask = (1ULL << bit_size) - 1;
3480 if (dbt != VT_BOOL) {
3481 /* mask source */
3482 if (dbt == VT_LLONG)
3483 vpushll(mask);
3484 else
3485 vpushi((unsigned)mask);
3486 gen_op('&');
3488 /* shift source */
3489 vpushi(bit_pos);
3490 gen_op(TOK_SHL);
3491 vswap();
3492 /* duplicate destination */
3493 vdup();
3494 vrott(3);
3495 /* load destination, mask and or with source */
3496 if (dbt == VT_LLONG)
3497 vpushll(~(mask << bit_pos));
3498 else
3499 vpushi(~((unsigned)mask << bit_pos));
3500 gen_op('&');
3501 gen_op('|');
3502 /* store result */
3503 vstore();
3504 /* ... and discard */
3505 vpop();
3507 } else if (dbt == VT_VOID) {
3508 --vtop;
3509 } else {
3510 /* optimize char/short casts */
3511 delayed_cast = 0;
3512 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3513 && is_integer_btype(sbt)
3515 if ((vtop->r & VT_MUSTCAST)
3516 && btype_size(dbt) > btype_size(sbt)
3518 force_charshort_cast();
3519 delayed_cast = 1;
3520 } else {
3521 gen_cast(&vtop[-1].type);
3524 #ifdef CONFIG_TCC_BCHECK
3525 /* bound check case */
3526 if (vtop[-1].r & VT_MUSTBOUND) {
3527 vswap();
3528 gbound();
3529 vswap();
3531 #endif
3532 gv(RC_TYPE(dbt)); /* generate value */
3534 if (delayed_cast) {
3535 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3536 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3537 vtop->type.t = ft & VT_TYPE;
3540 /* if lvalue was saved on stack, must read it */
3541 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3542 SValue sv;
3543 r = get_reg(RC_INT);
3544 sv.type.t = VT_PTRDIFF_T;
3545 sv.r = VT_LOCAL | VT_LVAL;
3546 sv.c.i = vtop[-1].c.i;
3547 load(r, &sv);
3548 vtop[-1].r = r | VT_LVAL;
3551 r = vtop->r & VT_VALMASK;
3552 /* two word case handling :
3553 store second register at word + 4 (or +8 for x86-64) */
3554 if (USING_TWO_WORDS(dbt)) {
3555 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3556 vtop[-1].type.t = load_type;
3557 store(r, vtop - 1);
3558 vswap();
3559 /* convert to int to increment easily */
3560 vtop->type.t = VT_PTRDIFF_T;
3561 gaddrof();
3562 vpushs(PTR_SIZE);
3563 gen_op('+');
3564 vtop->r |= VT_LVAL;
3565 vswap();
3566 vtop[-1].type.t = load_type;
3567 /* XXX: it works because r2 is spilled last ! */
3568 store(vtop->r2, vtop - 1);
3569 } else {
3570 /* single word */
3571 store(r, vtop - 1);
3573 vswap();
3574 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3578 /* post defines POST/PRE add. c is the token ++ or -- */
3579 ST_FUNC void inc(int post, int c)
3581 test_lvalue();
3582 vdup(); /* save lvalue */
3583 if (post) {
3584 gv_dup(); /* duplicate value */
3585 vrotb(3);
3586 vrotb(3);
3588 /* add constant */
3589 vpushi(c - TOK_MID);
3590 gen_op('+');
3591 vstore(); /* store value */
3592 if (post)
3593 vpop(); /* if post op, return saved value */
3596 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3598 /* read the string */
3599 if (tok != TOK_STR)
3600 expect(msg);
3601 cstr_new(astr);
3602 while (tok == TOK_STR) {
3603 /* XXX: add \0 handling too ? */
3604 cstr_cat(astr, tokc.str.data, -1);
3605 next();
3607 cstr_ccat(astr, '\0');
3610 /* If I is >= 1 and a power of two, returns log2(i)+1.
3611 If I is 0 returns 0. */
3612 static int exact_log2p1(int i)
3614 int ret;
3615 if (!i)
3616 return 0;
3617 for (ret = 1; i >= 1 << 8; ret += 8)
3618 i >>= 8;
3619 if (i >= 1 << 4)
3620 ret += 4, i >>= 4;
3621 if (i >= 1 << 2)
3622 ret += 2, i >>= 2;
3623 if (i >= 1 << 1)
3624 ret++;
3625 return ret;
3628 /* Parse __attribute__((...)) GNUC extension. */
3629 static void parse_attribute(AttributeDef *ad)
3631 int t, n;
3632 CString astr;
3634 redo:
3635 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3636 return;
3637 next();
3638 skip('(');
3639 skip('(');
3640 while (tok != ')') {
3641 if (tok < TOK_IDENT)
3642 expect("attribute name");
3643 t = tok;
3644 next();
3645 switch(t) {
3646 case TOK_CLEANUP1:
3647 case TOK_CLEANUP2:
3649 Sym *s;
3651 skip('(');
3652 s = sym_find(tok);
3653 if (!s) {
3654 tcc_warning("implicit declaration of function '%s'",
3655 get_tok_str(tok, &tokc));
3656 s = external_global_sym(tok, &func_old_type);
3658 ad->cleanup_func = s;
3659 next();
3660 skip(')');
3661 break;
3663 case TOK_CONSTRUCTOR1:
3664 case TOK_CONSTRUCTOR2:
3665 ad->constructor = 1;
3666 break;
3667 case TOK_DESTRUCTOR1:
3668 case TOK_DESTRUCTOR2:
3669 ad->destructor = 1;
3670 break;
3671 case TOK_SECTION1:
3672 case TOK_SECTION2:
3673 skip('(');
3674 parse_mult_str(&astr, "section name");
3675 ad->section = find_section(tcc_state, (char *)astr.data);
3676 skip(')');
3677 cstr_free(&astr);
3678 break;
3679 case TOK_ALIAS1:
3680 case TOK_ALIAS2:
3681 skip('(');
3682 parse_mult_str(&astr, "alias(\"target\")");
3683 ad->alias_target = /* save string as token, for later */
3684 tok_alloc((char*)astr.data, astr.size-1)->tok;
3685 skip(')');
3686 cstr_free(&astr);
3687 break;
3688 case TOK_VISIBILITY1:
3689 case TOK_VISIBILITY2:
3690 skip('(');
3691 parse_mult_str(&astr,
3692 "visibility(\"default|hidden|internal|protected\")");
3693 if (!strcmp (astr.data, "default"))
3694 ad->a.visibility = STV_DEFAULT;
3695 else if (!strcmp (astr.data, "hidden"))
3696 ad->a.visibility = STV_HIDDEN;
3697 else if (!strcmp (astr.data, "internal"))
3698 ad->a.visibility = STV_INTERNAL;
3699 else if (!strcmp (astr.data, "protected"))
3700 ad->a.visibility = STV_PROTECTED;
3701 else
3702 expect("visibility(\"default|hidden|internal|protected\")");
3703 skip(')');
3704 cstr_free(&astr);
3705 break;
3706 case TOK_ALIGNED1:
3707 case TOK_ALIGNED2:
3708 if (tok == '(') {
3709 next();
3710 n = expr_const();
3711 if (n <= 0 || (n & (n - 1)) != 0)
3712 tcc_error("alignment must be a positive power of two");
3713 skip(')');
3714 } else {
3715 n = MAX_ALIGN;
3717 ad->a.aligned = exact_log2p1(n);
3718 if (n != 1 << (ad->a.aligned - 1))
3719 tcc_error("alignment of %d is larger than implemented", n);
3720 break;
3721 case TOK_PACKED1:
3722 case TOK_PACKED2:
3723 ad->a.packed = 1;
3724 break;
3725 case TOK_WEAK1:
3726 case TOK_WEAK2:
3727 ad->a.weak = 1;
3728 break;
3729 case TOK_UNUSED1:
3730 case TOK_UNUSED2:
3731 /* currently, no need to handle it because tcc does not
3732 track unused objects */
3733 break;
3734 case TOK_NORETURN1:
3735 case TOK_NORETURN2:
3736 ad->f.func_noreturn = 1;
3737 break;
3738 case TOK_CDECL1:
3739 case TOK_CDECL2:
3740 case TOK_CDECL3:
3741 ad->f.func_call = FUNC_CDECL;
3742 break;
3743 case TOK_STDCALL1:
3744 case TOK_STDCALL2:
3745 case TOK_STDCALL3:
3746 ad->f.func_call = FUNC_STDCALL;
3747 break;
3748 #ifdef TCC_TARGET_I386
3749 case TOK_REGPARM1:
3750 case TOK_REGPARM2:
3751 skip('(');
3752 n = expr_const();
3753 if (n > 3)
3754 n = 3;
3755 else if (n < 0)
3756 n = 0;
3757 if (n > 0)
3758 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
3759 skip(')');
3760 break;
3761 case TOK_FASTCALL1:
3762 case TOK_FASTCALL2:
3763 case TOK_FASTCALL3:
3764 ad->f.func_call = FUNC_FASTCALLW;
3765 break;
3766 #endif
3767 case TOK_MODE:
3768 skip('(');
3769 switch(tok) {
3770 case TOK_MODE_DI:
3771 ad->attr_mode = VT_LLONG + 1;
3772 break;
3773 case TOK_MODE_QI:
3774 ad->attr_mode = VT_BYTE + 1;
3775 break;
3776 case TOK_MODE_HI:
3777 ad->attr_mode = VT_SHORT + 1;
3778 break;
3779 case TOK_MODE_SI:
3780 case TOK_MODE_word:
3781 ad->attr_mode = VT_INT + 1;
3782 break;
3783 default:
3784 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
3785 break;
3787 next();
3788 skip(')');
3789 break;
3790 case TOK_DLLEXPORT:
3791 ad->a.dllexport = 1;
3792 break;
3793 case TOK_NODECORATE:
3794 ad->a.nodecorate = 1;
3795 break;
3796 case TOK_DLLIMPORT:
3797 ad->a.dllimport = 1;
3798 break;
3799 default:
3800 if (tcc_state->warn_unsupported)
3801 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
3802 /* skip parameters */
3803 if (tok == '(') {
3804 int parenthesis = 0;
3805 do {
3806 if (tok == '(')
3807 parenthesis++;
3808 else if (tok == ')')
3809 parenthesis--;
3810 next();
3811 } while (parenthesis && tok != -1);
3813 break;
3815 if (tok != ',')
3816 break;
3817 next();
3819 skip(')');
3820 skip(')');
3821 goto redo;
3824 static Sym * find_field (CType *type, int v, int *cumofs)
3826 Sym *s = type->ref;
3827 v |= SYM_FIELD;
3828 while ((s = s->next) != NULL) {
3829 if ((s->v & SYM_FIELD) &&
3830 (s->type.t & VT_BTYPE) == VT_STRUCT &&
3831 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
3832 Sym *ret = find_field (&s->type, v, cumofs);
3833 if (ret) {
3834 *cumofs += s->c;
3835 return ret;
3838 if (s->v == v)
3839 break;
3841 return s;
3844 static void struct_layout(CType *type, AttributeDef *ad)
3846 int size, align, maxalign, offset, c, bit_pos, bit_size;
3847 int packed, a, bt, prevbt, prev_bit_size;
3848 int pcc = !tcc_state->ms_bitfields;
3849 int pragma_pack = *tcc_state->pack_stack_ptr;
3850 Sym *f;
3852 maxalign = 1;
3853 offset = 0;
3854 c = 0;
3855 bit_pos = 0;
3856 prevbt = VT_STRUCT; /* make it never match */
3857 prev_bit_size = 0;
3859 //#define BF_DEBUG
3861 for (f = type->ref->next; f; f = f->next) {
3862 if (f->type.t & VT_BITFIELD)
3863 bit_size = BIT_SIZE(f->type.t);
3864 else
3865 bit_size = -1;
3866 size = type_size(&f->type, &align);
3867 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
3868 packed = 0;
3870 if (pcc && bit_size == 0) {
3871 /* in pcc mode, packing does not affect zero-width bitfields */
3873 } else {
3874 /* in pcc mode, attribute packed overrides if set. */
3875 if (pcc && (f->a.packed || ad->a.packed))
3876 align = packed = 1;
3878 /* pragma pack overrides align if lesser and packs bitfields always */
3879 if (pragma_pack) {
3880 packed = 1;
3881 if (pragma_pack < align)
3882 align = pragma_pack;
3883 /* in pcc mode pragma pack also overrides individual align */
3884 if (pcc && pragma_pack < a)
3885 a = 0;
3888 /* some individual align was specified */
3889 if (a)
3890 align = a;
3892 if (type->ref->type.t == VT_UNION) {
3893 if (pcc && bit_size >= 0)
3894 size = (bit_size + 7) >> 3;
3895 offset = 0;
3896 if (size > c)
3897 c = size;
3899 } else if (bit_size < 0) {
3900 if (pcc)
3901 c += (bit_pos + 7) >> 3;
3902 c = (c + align - 1) & -align;
3903 offset = c;
3904 if (size > 0)
3905 c += size;
3906 bit_pos = 0;
3907 prevbt = VT_STRUCT;
3908 prev_bit_size = 0;
3910 } else {
3911 /* A bit-field. Layout is more complicated. There are two
3912 options: PCC (GCC) compatible and MS compatible */
3913 if (pcc) {
3914 /* In PCC layout a bit-field is placed adjacent to the
3915 preceding bit-fields, except if:
3916 - it has zero-width
3917 - an individual alignment was given
3918 - it would overflow its base type container and
3919 there is no packing */
3920 if (bit_size == 0) {
3921 new_field:
3922 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
3923 bit_pos = 0;
3924 } else if (f->a.aligned) {
3925 goto new_field;
3926 } else if (!packed) {
3927 int a8 = align * 8;
3928 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
3929 if (ofs > size / align)
3930 goto new_field;
3933 /* in pcc mode, long long bitfields have type int if they fit */
3934 if (size == 8 && bit_size <= 32)
3935 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
3937 while (bit_pos >= align * 8)
3938 c += align, bit_pos -= align * 8;
3939 offset = c;
3941 /* In PCC layout named bit-fields influence the alignment
3942 of the containing struct using the base types alignment,
3943 except for packed fields (which here have correct align). */
3944 if (f->v & SYM_FIRST_ANOM
3945 // && bit_size // ??? gcc on ARM/rpi does that
3947 align = 1;
3949 } else {
3950 bt = f->type.t & VT_BTYPE;
3951 if ((bit_pos + bit_size > size * 8)
3952 || (bit_size > 0) == (bt != prevbt)
3954 c = (c + align - 1) & -align;
3955 offset = c;
3956 bit_pos = 0;
3957 /* In MS bitfield mode a bit-field run always uses
3958 at least as many bits as the underlying type.
3959 To start a new run it's also required that this
3960 or the last bit-field had non-zero width. */
3961 if (bit_size || prev_bit_size)
3962 c += size;
3964 /* In MS layout the records alignment is normally
3965 influenced by the field, except for a zero-width
3966 field at the start of a run (but by further zero-width
3967 fields it is again). */
3968 if (bit_size == 0 && prevbt != bt)
3969 align = 1;
3970 prevbt = bt;
3971 prev_bit_size = bit_size;
3974 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
3975 | (bit_pos << VT_STRUCT_SHIFT);
3976 bit_pos += bit_size;
3978 if (align > maxalign)
3979 maxalign = align;
3981 #ifdef BF_DEBUG
3982 printf("set field %s offset %-2d size %-2d align %-2d",
3983 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
3984 if (f->type.t & VT_BITFIELD) {
3985 printf(" pos %-2d bits %-2d",
3986 BIT_POS(f->type.t),
3987 BIT_SIZE(f->type.t)
3990 printf("\n");
3991 #endif
3993 f->c = offset;
3994 f->r = 0;
3997 if (pcc)
3998 c += (bit_pos + 7) >> 3;
4000 /* store size and alignment */
4001 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4002 if (a < maxalign)
4003 a = maxalign;
4004 type->ref->r = a;
4005 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4006 /* can happen if individual align for some member was given. In
4007 this case MSVC ignores maxalign when aligning the size */
4008 a = pragma_pack;
4009 if (a < bt)
4010 a = bt;
4012 c = (c + a - 1) & -a;
4013 type->ref->c = c;
4015 #ifdef BF_DEBUG
4016 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4017 #endif
4019 /* check whether we can access bitfields by their type */
4020 for (f = type->ref->next; f; f = f->next) {
4021 int s, px, cx, c0;
4022 CType t;
4024 if (0 == (f->type.t & VT_BITFIELD))
4025 continue;
4026 f->type.ref = f;
4027 f->auxtype = -1;
4028 bit_size = BIT_SIZE(f->type.t);
4029 if (bit_size == 0)
4030 continue;
4031 bit_pos = BIT_POS(f->type.t);
4032 size = type_size(&f->type, &align);
4033 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4034 continue;
4036 /* try to access the field using a different type */
4037 c0 = -1, s = align = 1;
4038 for (;;) {
4039 px = f->c * 8 + bit_pos;
4040 cx = (px >> 3) & -align;
4041 px = px - (cx << 3);
4042 if (c0 == cx)
4043 break;
4044 s = (px + bit_size + 7) >> 3;
4045 if (s > 4) {
4046 t.t = VT_LLONG;
4047 } else if (s > 2) {
4048 t.t = VT_INT;
4049 } else if (s > 1) {
4050 t.t = VT_SHORT;
4051 } else {
4052 t.t = VT_BYTE;
4054 s = type_size(&t, &align);
4055 c0 = cx;
4058 if (px + bit_size <= s * 8 && cx + s <= c) {
4059 /* update offset and bit position */
4060 f->c = cx;
4061 bit_pos = px;
4062 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4063 | (bit_pos << VT_STRUCT_SHIFT);
4064 if (s != size)
4065 f->auxtype = t.t;
4066 #ifdef BF_DEBUG
4067 printf("FIX field %s offset %-2d size %-2d align %-2d "
4068 "pos %-2d bits %-2d\n",
4069 get_tok_str(f->v & ~SYM_FIELD, NULL),
4070 cx, s, align, px, bit_size);
4071 #endif
4072 } else {
4073 /* fall back to load/store single-byte wise */
4074 f->auxtype = VT_STRUCT;
4075 #ifdef BF_DEBUG
4076 printf("FIX field %s : load byte-wise\n",
4077 get_tok_str(f->v & ~SYM_FIELD, NULL));
4078 #endif
4083 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4084 static void struct_decl(CType *type, int u)
4086 int v, c, size, align, flexible;
4087 int bit_size, bsize, bt;
4088 Sym *s, *ss, **ps;
4089 AttributeDef ad, ad1;
4090 CType type1, btype;
4092 memset(&ad, 0, sizeof ad);
4093 next();
4094 parse_attribute(&ad);
4095 if (tok != '{') {
4096 v = tok;
4097 next();
4098 /* struct already defined ? return it */
4099 if (v < TOK_IDENT)
4100 expect("struct/union/enum name");
4101 s = struct_find(v);
4102 if (s && (s->sym_scope == local_scope || tok != '{')) {
4103 if (u == s->type.t)
4104 goto do_decl;
4105 if (u == VT_ENUM && IS_ENUM(s->type.t))
4106 goto do_decl;
4107 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4109 } else {
4110 v = anon_sym++;
4112 /* Record the original enum/struct/union token. */
4113 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4114 type1.ref = NULL;
4115 /* we put an undefined size for struct/union */
4116 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4117 s->r = 0; /* default alignment is zero as gcc */
4118 do_decl:
4119 type->t = s->type.t;
4120 type->ref = s;
4122 if (tok == '{') {
4123 next();
4124 if (s->c != -1)
4125 tcc_error("struct/union/enum already defined");
4126 s->c = -2;
4127 /* cannot be empty */
4128 /* non empty enums are not allowed */
4129 ps = &s->next;
4130 if (u == VT_ENUM) {
4131 long long ll = 0, pl = 0, nl = 0;
4132 CType t;
4133 t.ref = s;
4134 /* enum symbols have static storage */
4135 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4136 for(;;) {
4137 v = tok;
4138 if (v < TOK_UIDENT)
4139 expect("identifier");
4140 ss = sym_find(v);
4141 if (ss && !local_stack)
4142 tcc_error("redefinition of enumerator '%s'",
4143 get_tok_str(v, NULL));
4144 next();
4145 if (tok == '=') {
4146 next();
4147 ll = expr_const64();
4149 ss = sym_push(v, &t, VT_CONST, 0);
4150 ss->enum_val = ll;
4151 *ps = ss, ps = &ss->next;
4152 if (ll < nl)
4153 nl = ll;
4154 if (ll > pl)
4155 pl = ll;
4156 if (tok != ',')
4157 break;
4158 next();
4159 ll++;
4160 /* NOTE: we accept a trailing comma */
4161 if (tok == '}')
4162 break;
4164 skip('}');
4165 /* set integral type of the enum */
4166 t.t = VT_INT;
4167 if (nl >= 0) {
4168 if (pl != (unsigned)pl)
4169 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4170 t.t |= VT_UNSIGNED;
4171 } else if (pl != (int)pl || nl != (int)nl)
4172 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4173 s->type.t = type->t = t.t | VT_ENUM;
4174 s->c = 0;
4175 /* set type for enum members */
4176 for (ss = s->next; ss; ss = ss->next) {
4177 ll = ss->enum_val;
4178 if (ll == (int)ll) /* default is int if it fits */
4179 continue;
4180 if (t.t & VT_UNSIGNED) {
4181 ss->type.t |= VT_UNSIGNED;
4182 if (ll == (unsigned)ll)
4183 continue;
4185 ss->type.t = (ss->type.t & ~VT_BTYPE)
4186 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4188 } else {
4189 c = 0;
4190 flexible = 0;
4191 while (tok != '}') {
4192 if (!parse_btype(&btype, &ad1)) {
4193 skip(';');
4194 continue;
4196 while (1) {
4197 if (flexible)
4198 tcc_error("flexible array member '%s' not at the end of struct",
4199 get_tok_str(v, NULL));
4200 bit_size = -1;
4201 v = 0;
4202 type1 = btype;
4203 if (tok != ':') {
4204 if (tok != ';')
4205 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4206 if (v == 0) {
4207 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4208 expect("identifier");
4209 else {
4210 int v = btype.ref->v;
4211 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4212 if (tcc_state->ms_extensions == 0)
4213 expect("identifier");
4217 if (type_size(&type1, &align) < 0) {
4218 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4219 flexible = 1;
4220 else
4221 tcc_error("field '%s' has incomplete type",
4222 get_tok_str(v, NULL));
4224 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4225 (type1.t & VT_BTYPE) == VT_VOID ||
4226 (type1.t & VT_STORAGE))
4227 tcc_error("invalid type for '%s'",
4228 get_tok_str(v, NULL));
4230 if (tok == ':') {
4231 next();
4232 bit_size = expr_const();
4233 /* XXX: handle v = 0 case for messages */
4234 if (bit_size < 0)
4235 tcc_error("negative width in bit-field '%s'",
4236 get_tok_str(v, NULL));
4237 if (v && bit_size == 0)
4238 tcc_error("zero width for bit-field '%s'",
4239 get_tok_str(v, NULL));
4240 parse_attribute(&ad1);
4242 size = type_size(&type1, &align);
4243 if (bit_size >= 0) {
4244 bt = type1.t & VT_BTYPE;
4245 if (bt != VT_INT &&
4246 bt != VT_BYTE &&
4247 bt != VT_SHORT &&
4248 bt != VT_BOOL &&
4249 bt != VT_LLONG)
4250 tcc_error("bitfields must have scalar type");
4251 bsize = size * 8;
4252 if (bit_size > bsize) {
4253 tcc_error("width of '%s' exceeds its type",
4254 get_tok_str(v, NULL));
4255 } else if (bit_size == bsize
4256 && !ad.a.packed && !ad1.a.packed) {
4257 /* no need for bit fields */
4259 } else if (bit_size == 64) {
4260 tcc_error("field width 64 not implemented");
4261 } else {
4262 type1.t = (type1.t & ~VT_STRUCT_MASK)
4263 | VT_BITFIELD
4264 | (bit_size << (VT_STRUCT_SHIFT + 6));
4267 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4268 /* Remember we've seen a real field to check
4269 for placement of flexible array member. */
4270 c = 1;
4272 /* If member is a struct or bit-field, enforce
4273 placing into the struct (as anonymous). */
4274 if (v == 0 &&
4275 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4276 bit_size >= 0)) {
4277 v = anon_sym++;
4279 if (v) {
4280 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4281 ss->a = ad1.a;
4282 *ps = ss;
4283 ps = &ss->next;
4285 if (tok == ';' || tok == TOK_EOF)
4286 break;
4287 skip(',');
4289 skip(';');
4291 skip('}');
4292 parse_attribute(&ad);
4293 struct_layout(type, &ad);
4298 static void sym_to_attr(AttributeDef *ad, Sym *s)
4300 merge_symattr(&ad->a, &s->a);
4301 merge_funcattr(&ad->f, &s->f);
4304 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4305 are added to the element type, copied because it could be a typedef. */
4306 static void parse_btype_qualify(CType *type, int qualifiers)
4308 while (type->t & VT_ARRAY) {
4309 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4310 type = &type->ref->type;
4312 type->t |= qualifiers;
4315 /* return 0 if no type declaration. otherwise, return the basic type
4316 and skip it.
4318 static int parse_btype(CType *type, AttributeDef *ad)
4320 int t, u, bt, st, type_found, typespec_found, g, n;
4321 Sym *s;
4322 CType type1;
4324 memset(ad, 0, sizeof(AttributeDef));
4325 type_found = 0;
4326 typespec_found = 0;
4327 t = VT_INT;
4328 bt = st = -1;
4329 type->ref = NULL;
4331 while(1) {
4332 switch(tok) {
4333 case TOK_EXTENSION:
4334 /* currently, we really ignore extension */
4335 next();
4336 continue;
4338 /* basic types */
4339 case TOK_CHAR:
4340 u = VT_BYTE;
4341 basic_type:
4342 next();
4343 basic_type1:
4344 if (u == VT_SHORT || u == VT_LONG) {
4345 if (st != -1 || (bt != -1 && bt != VT_INT))
4346 tmbt: tcc_error("too many basic types");
4347 st = u;
4348 } else {
4349 if (bt != -1 || (st != -1 && u != VT_INT))
4350 goto tmbt;
4351 bt = u;
4353 if (u != VT_INT)
4354 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4355 typespec_found = 1;
4356 break;
4357 case TOK_VOID:
4358 u = VT_VOID;
4359 goto basic_type;
4360 case TOK_SHORT:
4361 u = VT_SHORT;
4362 goto basic_type;
4363 case TOK_INT:
4364 u = VT_INT;
4365 goto basic_type;
4366 case TOK_ALIGNAS:
4367 { int n;
4368 AttributeDef ad1;
4369 next();
4370 skip('(');
4371 memset(&ad1, 0, sizeof(AttributeDef));
4372 if (parse_btype(&type1, &ad1)) {
4373 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4374 if (ad1.a.aligned)
4375 n = 1 << (ad1.a.aligned - 1);
4376 else
4377 type_size(&type1, &n);
4378 } else {
4379 n = expr_const();
4380 if (n <= 0 || (n & (n - 1)) != 0)
4381 tcc_error("alignment must be a positive power of two");
4383 skip(')');
4384 ad->a.aligned = exact_log2p1(n);
4386 continue;
4387 case TOK_LONG:
4388 if ((t & VT_BTYPE) == VT_DOUBLE) {
4389 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4390 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4391 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4392 } else {
4393 u = VT_LONG;
4394 goto basic_type;
4396 next();
4397 break;
4398 #ifdef TCC_TARGET_ARM64
4399 case TOK_UINT128:
4400 /* GCC's __uint128_t appears in some Linux header files. Make it a
4401 synonym for long double to get the size and alignment right. */
4402 u = VT_LDOUBLE;
4403 goto basic_type;
4404 #endif
4405 case TOK_BOOL:
4406 u = VT_BOOL;
4407 goto basic_type;
4408 case TOK_FLOAT:
4409 u = VT_FLOAT;
4410 goto basic_type;
4411 case TOK_DOUBLE:
4412 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4413 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4414 } else {
4415 u = VT_DOUBLE;
4416 goto basic_type;
4418 next();
4419 break;
4420 case TOK_ENUM:
4421 struct_decl(&type1, VT_ENUM);
4422 basic_type2:
4423 u = type1.t;
4424 type->ref = type1.ref;
4425 goto basic_type1;
4426 case TOK_STRUCT:
4427 struct_decl(&type1, VT_STRUCT);
4428 goto basic_type2;
4429 case TOK_UNION:
4430 struct_decl(&type1, VT_UNION);
4431 goto basic_type2;
4433 /* type modifiers */
4434 case TOK_CONST1:
4435 case TOK_CONST2:
4436 case TOK_CONST3:
4437 type->t = t;
4438 parse_btype_qualify(type, VT_CONSTANT);
4439 t = type->t;
4440 next();
4441 break;
4442 case TOK_VOLATILE1:
4443 case TOK_VOLATILE2:
4444 case TOK_VOLATILE3:
4445 type->t = t;
4446 parse_btype_qualify(type, VT_VOLATILE);
4447 t = type->t;
4448 next();
4449 break;
4450 case TOK_SIGNED1:
4451 case TOK_SIGNED2:
4452 case TOK_SIGNED3:
4453 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4454 tcc_error("signed and unsigned modifier");
4455 t |= VT_DEFSIGN;
4456 next();
4457 typespec_found = 1;
4458 break;
4459 case TOK_REGISTER:
4460 case TOK_AUTO:
4461 case TOK_RESTRICT1:
4462 case TOK_RESTRICT2:
4463 case TOK_RESTRICT3:
4464 next();
4465 break;
4466 case TOK_UNSIGNED:
4467 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4468 tcc_error("signed and unsigned modifier");
4469 t |= VT_DEFSIGN | VT_UNSIGNED;
4470 next();
4471 typespec_found = 1;
4472 break;
4474 /* storage */
4475 case TOK_EXTERN:
4476 g = VT_EXTERN;
4477 goto storage;
4478 case TOK_STATIC:
4479 g = VT_STATIC;
4480 goto storage;
4481 case TOK_TYPEDEF:
4482 g = VT_TYPEDEF;
4483 goto storage;
4484 storage:
4485 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4486 tcc_error("multiple storage classes");
4487 t |= g;
4488 next();
4489 break;
4490 case TOK_INLINE1:
4491 case TOK_INLINE2:
4492 case TOK_INLINE3:
4493 t |= VT_INLINE;
4494 next();
4495 break;
4496 case TOK_NORETURN3:
4497 next();
4498 ad->f.func_noreturn = 1;
4499 break;
4500 /* GNUC attribute */
4501 case TOK_ATTRIBUTE1:
4502 case TOK_ATTRIBUTE2:
4503 parse_attribute(ad);
4504 if (ad->attr_mode) {
4505 u = ad->attr_mode -1;
4506 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4508 continue;
4509 /* GNUC typeof */
4510 case TOK_TYPEOF1:
4511 case TOK_TYPEOF2:
4512 case TOK_TYPEOF3:
4513 next();
4514 parse_expr_type(&type1);
4515 /* remove all storage modifiers except typedef */
4516 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4517 if (type1.ref)
4518 sym_to_attr(ad, type1.ref);
4519 goto basic_type2;
4520 default:
4521 if (typespec_found)
4522 goto the_end;
4523 s = sym_find(tok);
4524 if (!s || !(s->type.t & VT_TYPEDEF))
4525 goto the_end;
4527 n = tok, next();
4528 if (tok == ':' && !in_generic) {
4529 /* ignore if it's a label */
4530 unget_tok(n);
4531 goto the_end;
4534 t &= ~(VT_BTYPE|VT_LONG);
4535 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4536 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4537 type->ref = s->type.ref;
4538 if (t)
4539 parse_btype_qualify(type, t);
4540 t = type->t;
4541 /* get attributes from typedef */
4542 sym_to_attr(ad, s);
4543 typespec_found = 1;
4544 st = bt = -2;
4545 break;
4547 type_found = 1;
4549 the_end:
4550 if (tcc_state->char_is_unsigned) {
4551 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4552 t |= VT_UNSIGNED;
4554 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4555 bt = t & (VT_BTYPE|VT_LONG);
4556 if (bt == VT_LONG)
4557 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4558 #ifdef TCC_TARGET_PE
4559 if (bt == VT_LDOUBLE)
4560 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_DOUBLE;
4561 #endif
4562 type->t = t;
4563 return type_found;
4566 /* convert a function parameter type (array to pointer and function to
4567 function pointer) */
4568 static inline void convert_parameter_type(CType *pt)
4570 /* remove const and volatile qualifiers (XXX: const could be used
4571 to indicate a const function parameter */
4572 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4573 /* array must be transformed to pointer according to ANSI C */
4574 pt->t &= ~VT_ARRAY;
4575 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4576 mk_pointer(pt);
4580 ST_FUNC void parse_asm_str(CString *astr)
4582 skip('(');
4583 parse_mult_str(astr, "string constant");
4586 /* Parse an asm label and return the token */
4587 static int asm_label_instr(void)
4589 int v;
4590 CString astr;
4592 next();
4593 parse_asm_str(&astr);
4594 skip(')');
4595 #ifdef ASM_DEBUG
4596 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4597 #endif
4598 v = tok_alloc(astr.data, astr.size - 1)->tok;
4599 cstr_free(&astr);
4600 return v;
4603 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4605 int n, l, t1, arg_size, align, unused_align;
4606 Sym **plast, *s, *first;
4607 AttributeDef ad1;
4608 CType pt;
4610 if (tok == '(') {
4611 /* function type, or recursive declarator (return if so) */
4612 next();
4613 if (td && !(td & TYPE_ABSTRACT))
4614 return 0;
4615 if (tok == ')')
4616 l = 0;
4617 else if (parse_btype(&pt, &ad1))
4618 l = FUNC_NEW;
4619 else if (td) {
4620 merge_attr (ad, &ad1);
4621 return 0;
4622 } else
4623 l = FUNC_OLD;
4624 first = NULL;
4625 plast = &first;
4626 arg_size = 0;
4627 if (l) {
4628 for(;;) {
4629 /* read param name and compute offset */
4630 if (l != FUNC_OLD) {
4631 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
4632 break;
4633 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
4634 if ((pt.t & VT_BTYPE) == VT_VOID)
4635 tcc_error("parameter declared as void");
4636 } else {
4637 n = tok;
4638 if (n < TOK_UIDENT)
4639 expect("identifier");
4640 pt.t = VT_VOID; /* invalid type */
4641 pt.ref = NULL;
4642 next();
4644 convert_parameter_type(&pt);
4645 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
4646 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
4647 *plast = s;
4648 plast = &s->next;
4649 if (tok == ')')
4650 break;
4651 skip(',');
4652 if (l == FUNC_NEW && tok == TOK_DOTS) {
4653 l = FUNC_ELLIPSIS;
4654 next();
4655 break;
4657 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
4658 tcc_error("invalid type");
4660 } else
4661 /* if no parameters, then old type prototype */
4662 l = FUNC_OLD;
4663 skip(')');
4664 /* NOTE: const is ignored in returned type as it has a special
4665 meaning in gcc / C++ */
4666 type->t &= ~VT_CONSTANT;
4667 /* some ancient pre-K&R C allows a function to return an array
4668 and the array brackets to be put after the arguments, such
4669 that "int c()[]" means something like "int[] c()" */
4670 if (tok == '[') {
4671 next();
4672 skip(']'); /* only handle simple "[]" */
4673 mk_pointer(type);
4675 /* we push a anonymous symbol which will contain the function prototype */
4676 ad->f.func_args = arg_size;
4677 ad->f.func_type = l;
4678 s = sym_push(SYM_FIELD, type, 0, 0);
4679 s->a = ad->a;
4680 s->f = ad->f;
4681 s->next = first;
4682 type->t = VT_FUNC;
4683 type->ref = s;
4684 } else if (tok == '[') {
4685 int saved_nocode_wanted = nocode_wanted;
4686 /* array definition */
4687 next();
4688 while (1) {
4689 /* XXX The optional type-quals and static should only be accepted
4690 in parameter decls. The '*' as well, and then even only
4691 in prototypes (not function defs). */
4692 switch (tok) {
4693 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
4694 case TOK_CONST1:
4695 case TOK_VOLATILE1:
4696 case TOK_STATIC:
4697 case '*':
4698 next();
4699 continue;
4700 default:
4701 break;
4703 break;
4705 n = -1;
4706 t1 = 0;
4707 if (tok != ']') {
4708 if (!local_stack || (storage & VT_STATIC))
4709 vpushi(expr_const());
4710 else {
4711 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4712 length must always be evaluated, even under nocode_wanted,
4713 so that its size slot is initialized (e.g. under sizeof
4714 or typeof). */
4715 nocode_wanted = 0;
4716 gexpr();
4718 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
4719 n = vtop->c.i;
4720 if (n < 0)
4721 tcc_error("invalid array size");
4722 } else {
4723 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
4724 tcc_error("size of variable length array should be an integer");
4725 n = 0;
4726 t1 = VT_VLA;
4729 skip(']');
4730 /* parse next post type */
4731 post_type(type, ad, storage, 0);
4733 if ((type->t & VT_BTYPE) == VT_FUNC)
4734 tcc_error("declaration of an array of functions");
4735 if ((type->t & VT_BTYPE) == VT_VOID
4736 || type_size(type, &unused_align) < 0)
4737 tcc_error("declaration of an array of incomplete type elements");
4739 t1 |= type->t & VT_VLA;
4741 if (t1 & VT_VLA) {
4742 if (n < 0)
4743 tcc_error("need explicit inner array size in VLAs");
4744 loc -= type_size(&int_type, &align);
4745 loc &= -align;
4746 n = loc;
4748 vla_runtime_type_size(type, &align);
4749 gen_op('*');
4750 vset(&int_type, VT_LOCAL|VT_LVAL, n);
4751 vswap();
4752 vstore();
4754 if (n != -1)
4755 vpop();
4756 nocode_wanted = saved_nocode_wanted;
4758 /* we push an anonymous symbol which will contain the array
4759 element type */
4760 s = sym_push(SYM_FIELD, type, 0, n);
4761 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
4762 type->ref = s;
4764 return 1;
4767 /* Parse a type declarator (except basic type), and return the type
4768 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4769 expected. 'type' should contain the basic type. 'ad' is the
4770 attribute definition of the basic type. It can be modified by
4771 type_decl(). If this (possibly abstract) declarator is a pointer chain
4772 it returns the innermost pointed to type (equals *type, but is a different
4773 pointer), otherwise returns type itself, that's used for recursive calls. */
4774 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
4776 CType *post, *ret;
4777 int qualifiers, storage;
4779 /* recursive type, remove storage bits first, apply them later again */
4780 storage = type->t & VT_STORAGE;
4781 type->t &= ~VT_STORAGE;
4782 post = ret = type;
4784 while (tok == '*') {
4785 qualifiers = 0;
4786 redo:
4787 next();
4788 switch(tok) {
4789 case TOK_CONST1:
4790 case TOK_CONST2:
4791 case TOK_CONST3:
4792 qualifiers |= VT_CONSTANT;
4793 goto redo;
4794 case TOK_VOLATILE1:
4795 case TOK_VOLATILE2:
4796 case TOK_VOLATILE3:
4797 qualifiers |= VT_VOLATILE;
4798 goto redo;
4799 case TOK_RESTRICT1:
4800 case TOK_RESTRICT2:
4801 case TOK_RESTRICT3:
4802 goto redo;
4803 /* XXX: clarify attribute handling */
4804 case TOK_ATTRIBUTE1:
4805 case TOK_ATTRIBUTE2:
4806 parse_attribute(ad);
4807 break;
4809 mk_pointer(type);
4810 type->t |= qualifiers;
4811 if (ret == type)
4812 /* innermost pointed to type is the one for the first derivation */
4813 ret = pointed_type(type);
4816 if (tok == '(') {
4817 /* This is possibly a parameter type list for abstract declarators
4818 ('int ()'), use post_type for testing this. */
4819 if (!post_type(type, ad, 0, td)) {
4820 /* It's not, so it's a nested declarator, and the post operations
4821 apply to the innermost pointed to type (if any). */
4822 /* XXX: this is not correct to modify 'ad' at this point, but
4823 the syntax is not clear */
4824 parse_attribute(ad);
4825 post = type_decl(type, ad, v, td);
4826 skip(')');
4827 } else
4828 goto abstract;
4829 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
4830 /* type identifier */
4831 *v = tok;
4832 next();
4833 } else {
4834 abstract:
4835 if (!(td & TYPE_ABSTRACT))
4836 expect("identifier");
4837 *v = 0;
4839 post_type(post, ad, storage, 0);
4840 parse_attribute(ad);
4841 type->t |= storage;
4842 return ret;
4845 /* indirection with full error checking and bound check */
4846 ST_FUNC void indir(void)
4848 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
4849 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
4850 return;
4851 expect("pointer");
4853 if (vtop->r & VT_LVAL)
4854 gv(RC_INT);
4855 vtop->type = *pointed_type(&vtop->type);
4856 /* Arrays and functions are never lvalues */
4857 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
4858 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
4859 vtop->r |= VT_LVAL;
4860 /* if bound checking, the referenced pointer must be checked */
4861 #ifdef CONFIG_TCC_BCHECK
4862 if (tcc_state->do_bounds_check)
4863 vtop->r |= VT_MUSTBOUND;
4864 #endif
4868 /* pass a parameter to a function and do type checking and casting */
4869 static void gfunc_param_typed(Sym *func, Sym *arg)
4871 int func_type;
4872 CType type;
4874 func_type = func->f.func_type;
4875 if (func_type == FUNC_OLD ||
4876 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
4877 /* default casting : only need to convert float to double */
4878 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
4879 gen_cast_s(VT_DOUBLE);
4880 } else if (vtop->type.t & VT_BITFIELD) {
4881 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
4882 type.ref = vtop->type.ref;
4883 gen_cast(&type);
4884 } else if (vtop->r & VT_MUSTCAST) {
4885 force_charshort_cast();
4887 } else if (arg == NULL) {
4888 tcc_error("too many arguments to function");
4889 } else {
4890 type = arg->type;
4891 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
4892 gen_assign_cast(&type);
4896 /* parse an expression and return its type without any side effect. */
4897 static void expr_type(CType *type, void (*expr_fn)(void))
4899 nocode_wanted++;
4900 expr_fn();
4901 *type = vtop->type;
4902 vpop();
4903 nocode_wanted--;
4906 /* parse an expression of the form '(type)' or '(expr)' and return its
4907 type */
4908 static void parse_expr_type(CType *type)
4910 int n;
4911 AttributeDef ad;
4913 skip('(');
4914 if (parse_btype(type, &ad)) {
4915 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4916 } else {
4917 expr_type(type, gexpr);
4919 skip(')');
4922 static void parse_type(CType *type)
4924 AttributeDef ad;
4925 int n;
4927 if (!parse_btype(type, &ad)) {
4928 expect("type");
4930 type_decl(type, &ad, &n, TYPE_ABSTRACT);
4933 static void parse_builtin_params(int nc, const char *args)
4935 char c, sep = '(';
4936 CType t;
4937 if (nc)
4938 nocode_wanted++;
4939 next();
4940 while ((c = *args++)) {
4941 skip(sep);
4942 sep = ',';
4943 switch (c) {
4944 case 'e': expr_eq(); continue;
4945 case 't': parse_type(&t); vpush(&t); continue;
4946 default: tcc_error("internal error"); break;
4949 skip(')');
4950 if (nc)
4951 nocode_wanted--;
4954 ST_FUNC void unary(void)
4956 int n, t, align, size, r, sizeof_caller;
4957 CType type;
4958 Sym *s;
4959 AttributeDef ad;
4961 sizeof_caller = in_sizeof;
4962 in_sizeof = 0;
4963 type.ref = NULL;
4964 /* XXX: GCC 2.95.3 does not generate a table although it should be
4965 better here */
4966 tok_next:
4967 switch(tok) {
4968 case TOK_EXTENSION:
4969 next();
4970 goto tok_next;
4971 case TOK_LCHAR:
4972 #ifdef TCC_TARGET_PE
4973 t = VT_SHORT|VT_UNSIGNED;
4974 goto push_tokc;
4975 #endif
4976 case TOK_CINT:
4977 case TOK_CCHAR:
4978 t = VT_INT;
4979 push_tokc:
4980 type.t = t;
4981 vsetc(&type, VT_CONST, &tokc);
4982 next();
4983 break;
4984 case TOK_CUINT:
4985 t = VT_INT | VT_UNSIGNED;
4986 goto push_tokc;
4987 case TOK_CLLONG:
4988 t = VT_LLONG;
4989 goto push_tokc;
4990 case TOK_CULLONG:
4991 t = VT_LLONG | VT_UNSIGNED;
4992 goto push_tokc;
4993 case TOK_CFLOAT:
4994 t = VT_FLOAT;
4995 goto push_tokc;
4996 case TOK_CDOUBLE:
4997 t = VT_DOUBLE;
4998 goto push_tokc;
4999 case TOK_CLDOUBLE:
5000 t = VT_LDOUBLE;
5001 goto push_tokc;
5002 case TOK_CLONG:
5003 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5004 goto push_tokc;
5005 case TOK_CULONG:
5006 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5007 goto push_tokc;
5008 case TOK___FUNCTION__:
5009 if (!gnu_ext)
5010 goto tok_identifier;
5011 /* fall thru */
5012 case TOK___FUNC__:
5014 void *ptr;
5015 int len;
5016 /* special function name identifier */
5017 len = strlen(funcname) + 1;
5018 /* generate char[len] type */
5019 type.t = VT_BYTE;
5020 mk_pointer(&type);
5021 type.t |= VT_ARRAY;
5022 type.ref->c = len;
5023 vpush_ref(&type, data_section, data_section->data_offset, len);
5024 if (!NODATA_WANTED) {
5025 ptr = section_ptr_add(data_section, len);
5026 memcpy(ptr, funcname, len);
5028 next();
5030 break;
5031 case TOK_LSTR:
5032 #ifdef TCC_TARGET_PE
5033 t = VT_SHORT | VT_UNSIGNED;
5034 #else
5035 t = VT_INT;
5036 #endif
5037 goto str_init;
5038 case TOK_STR:
5039 /* string parsing */
5040 t = VT_BYTE;
5041 if (tcc_state->char_is_unsigned)
5042 t = VT_BYTE | VT_UNSIGNED;
5043 str_init:
5044 if (tcc_state->warn_write_strings)
5045 t |= VT_CONSTANT;
5046 type.t = t;
5047 mk_pointer(&type);
5048 type.t |= VT_ARRAY;
5049 memset(&ad, 0, sizeof(AttributeDef));
5050 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5051 break;
5052 case '(':
5053 next();
5054 /* cast ? */
5055 if (parse_btype(&type, &ad)) {
5056 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5057 skip(')');
5058 /* check ISOC99 compound literal */
5059 if (tok == '{') {
5060 /* data is allocated locally by default */
5061 if (global_expr)
5062 r = VT_CONST;
5063 else
5064 r = VT_LOCAL;
5065 /* all except arrays are lvalues */
5066 if (!(type.t & VT_ARRAY))
5067 r |= VT_LVAL;
5068 memset(&ad, 0, sizeof(AttributeDef));
5069 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5070 } else {
5071 if (sizeof_caller) {
5072 vpush(&type);
5073 return;
5075 unary();
5076 gen_cast(&type);
5078 } else if (tok == '{') {
5079 int saved_nocode_wanted = nocode_wanted;
5080 if (const_wanted && !(nocode_wanted & unevalmask))
5081 tcc_error("expected constant");
5082 /* save all registers */
5083 save_regs(0);
5084 /* statement expression : we do not accept break/continue
5085 inside as GCC does. We do retain the nocode_wanted state,
5086 as statement expressions can't ever be entered from the
5087 outside, so any reactivation of code emission (from labels
5088 or loop heads) can be disabled again after the end of it. */
5089 block(1);
5090 nocode_wanted = saved_nocode_wanted;
5091 skip(')');
5092 } else {
5093 gexpr();
5094 skip(')');
5096 break;
5097 case '*':
5098 next();
5099 unary();
5100 indir();
5101 break;
5102 case '&':
5103 next();
5104 unary();
5105 /* functions names must be treated as function pointers,
5106 except for unary '&' and sizeof. Since we consider that
5107 functions are not lvalues, we only have to handle it
5108 there and in function calls. */
5109 /* arrays can also be used although they are not lvalues */
5110 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5111 !(vtop->type.t & VT_ARRAY))
5112 test_lvalue();
5113 if (vtop->sym)
5114 vtop->sym->a.addrtaken = 1;
5115 mk_pointer(&vtop->type);
5116 gaddrof();
5117 break;
5118 case '!':
5119 next();
5120 unary();
5121 gen_test_zero(TOK_EQ);
5122 break;
5123 case '~':
5124 next();
5125 unary();
5126 vpushi(-1);
5127 gen_op('^');
5128 break;
5129 case '+':
5130 next();
5131 unary();
5132 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5133 tcc_error("pointer not accepted for unary plus");
5134 /* In order to force cast, we add zero, except for floating point
5135 where we really need an noop (otherwise -0.0 will be transformed
5136 into +0.0). */
5137 if (!is_float(vtop->type.t)) {
5138 vpushi(0);
5139 gen_op('+');
5141 break;
5142 case TOK_SIZEOF:
5143 case TOK_ALIGNOF1:
5144 case TOK_ALIGNOF2:
5145 case TOK_ALIGNOF3:
5146 t = tok;
5147 next();
5148 in_sizeof++;
5149 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5150 s = NULL;
5151 if (vtop[1].r & VT_SYM)
5152 s = vtop[1].sym; /* hack: accessing previous vtop */
5153 size = type_size(&type, &align);
5154 if (s && s->a.aligned)
5155 align = 1 << (s->a.aligned - 1);
5156 if (t == TOK_SIZEOF) {
5157 if (!(type.t & VT_VLA)) {
5158 if (size < 0)
5159 tcc_error("sizeof applied to an incomplete type");
5160 vpushs(size);
5161 } else {
5162 vla_runtime_type_size(&type, &align);
5164 } else {
5165 vpushs(align);
5167 vtop->type.t |= VT_UNSIGNED;
5168 break;
5170 case TOK_builtin_expect:
5171 /* __builtin_expect is a no-op for now */
5172 parse_builtin_params(0, "ee");
5173 vpop();
5174 break;
5175 case TOK_builtin_types_compatible_p:
5176 parse_builtin_params(0, "tt");
5177 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5178 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5179 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5180 vtop -= 2;
5181 vpushi(n);
5182 break;
5183 case TOK_builtin_choose_expr:
5185 int64_t c;
5186 next();
5187 skip('(');
5188 c = expr_const64();
5189 skip(',');
5190 if (!c) {
5191 nocode_wanted++;
5193 expr_eq();
5194 if (!c) {
5195 vpop();
5196 nocode_wanted--;
5198 skip(',');
5199 if (c) {
5200 nocode_wanted++;
5202 expr_eq();
5203 if (c) {
5204 vpop();
5205 nocode_wanted--;
5207 skip(')');
5209 break;
5210 case TOK_builtin_constant_p:
5211 parse_builtin_params(1, "e");
5212 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5213 vtop--;
5214 vpushi(n);
5215 break;
5216 case TOK_builtin_frame_address:
5217 case TOK_builtin_return_address:
5219 int tok1 = tok;
5220 int level;
5221 next();
5222 skip('(');
5223 if (tok != TOK_CINT) {
5224 tcc_error("%s only takes positive integers",
5225 tok1 == TOK_builtin_return_address ?
5226 "__builtin_return_address" :
5227 "__builtin_frame_address");
5229 level = (uint32_t)tokc.i;
5230 next();
5231 skip(')');
5232 type.t = VT_VOID;
5233 mk_pointer(&type);
5234 vset(&type, VT_LOCAL, 0); /* local frame */
5235 while (level--) {
5236 mk_pointer(&vtop->type);
5237 indir(); /* -> parent frame */
5239 if (tok1 == TOK_builtin_return_address) {
5240 // assume return address is just above frame pointer on stack
5241 vpushi(PTR_SIZE);
5242 gen_op('+');
5243 mk_pointer(&vtop->type);
5244 indir();
5247 break;
5248 #ifdef TCC_TARGET_RISCV64
5249 case TOK_builtin_va_start:
5250 parse_builtin_params(0, "ee");
5251 r = vtop->r & VT_VALMASK;
5252 if (r == VT_LLOCAL)
5253 r = VT_LOCAL;
5254 if (r != VT_LOCAL)
5255 tcc_error("__builtin_va_start expects a local variable");
5256 gen_va_start();
5257 vstore();
5258 break;
5259 #endif
5260 #ifdef TCC_TARGET_X86_64
5261 #ifdef TCC_TARGET_PE
5262 case TOK_builtin_va_start:
5263 parse_builtin_params(0, "ee");
5264 r = vtop->r & VT_VALMASK;
5265 if (r == VT_LLOCAL)
5266 r = VT_LOCAL;
5267 if (r != VT_LOCAL)
5268 tcc_error("__builtin_va_start expects a local variable");
5269 vtop->r = r;
5270 vtop->type = char_pointer_type;
5271 vtop->c.i += 8;
5272 vstore();
5273 break;
5274 #else
5275 case TOK_builtin_va_arg_types:
5276 parse_builtin_params(0, "t");
5277 vpushi(classify_x86_64_va_arg(&vtop->type));
5278 vswap();
5279 vpop();
5280 break;
5281 #endif
5282 #endif
5284 #ifdef TCC_TARGET_ARM64
5285 case TOK___va_start: {
5286 parse_builtin_params(0, "ee");
5287 //xx check types
5288 gen_va_start();
5289 vpushi(0);
5290 vtop->type.t = VT_VOID;
5291 break;
5293 case TOK___va_arg: {
5294 parse_builtin_params(0, "et");
5295 type = vtop->type;
5296 vpop();
5297 //xx check types
5298 gen_va_arg(&type);
5299 vtop->type = type;
5300 break;
5302 case TOK___arm64_clear_cache: {
5303 parse_builtin_params(0, "ee");
5304 gen_clear_cache();
5305 vpushi(0);
5306 vtop->type.t = VT_VOID;
5307 break;
5309 #endif
5310 /* pre operations */
5311 case TOK_INC:
5312 case TOK_DEC:
5313 t = tok;
5314 next();
5315 unary();
5316 inc(0, t);
5317 break;
5318 case '-':
5319 next();
5320 unary();
5321 t = vtop->type.t & VT_BTYPE;
5322 if (is_float(t)) {
5323 /* In IEEE negate(x) isn't subtract(0,x), but rather
5324 subtract(-0, x). */
5325 vpush(&vtop->type);
5326 if (t == VT_FLOAT)
5327 vtop->c.f = -1.0 * 0.0;
5328 else if (t == VT_DOUBLE)
5329 vtop->c.d = -1.0 * 0.0;
5330 else
5331 vtop->c.ld = -1.0 * 0.0;
5332 } else
5333 vpushi(0);
5334 vswap();
5335 gen_op('-');
5336 break;
5337 case TOK_LAND:
5338 if (!gnu_ext)
5339 goto tok_identifier;
5340 next();
5341 /* allow to take the address of a label */
5342 if (tok < TOK_UIDENT)
5343 expect("label identifier");
5344 s = label_find(tok);
5345 if (!s) {
5346 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5347 } else {
5348 if (s->r == LABEL_DECLARED)
5349 s->r = LABEL_FORWARD;
5351 if (!s->type.t) {
5352 s->type.t = VT_VOID;
5353 mk_pointer(&s->type);
5354 s->type.t |= VT_STATIC;
5356 vpushsym(&s->type, s);
5357 next();
5358 break;
5360 case TOK_GENERIC:
5362 CType controlling_type;
5363 int has_default = 0;
5364 int has_match = 0;
5365 int learn = 0;
5366 TokenString *str = NULL;
5367 int saved_const_wanted = const_wanted;
5369 next();
5370 skip('(');
5371 const_wanted = 0;
5372 expr_type(&controlling_type, expr_eq);
5373 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5374 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5375 mk_pointer(&controlling_type);
5376 const_wanted = saved_const_wanted;
5377 for (;;) {
5378 learn = 0;
5379 skip(',');
5380 if (tok == TOK_DEFAULT) {
5381 if (has_default)
5382 tcc_error("too many 'default'");
5383 has_default = 1;
5384 if (!has_match)
5385 learn = 1;
5386 next();
5387 } else {
5388 AttributeDef ad_tmp;
5389 int itmp;
5390 CType cur_type;
5392 in_generic++;
5393 parse_btype(&cur_type, &ad_tmp);
5394 in_generic--;
5396 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5397 if (compare_types(&controlling_type, &cur_type, 0)) {
5398 if (has_match) {
5399 tcc_error("type match twice");
5401 has_match = 1;
5402 learn = 1;
5405 skip(':');
5406 if (learn) {
5407 if (str)
5408 tok_str_free(str);
5409 skip_or_save_block(&str);
5410 } else {
5411 skip_or_save_block(NULL);
5413 if (tok == ')')
5414 break;
5416 if (!str) {
5417 char buf[60];
5418 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5419 tcc_error("type '%s' does not match any association", buf);
5421 begin_macro(str, 1);
5422 next();
5423 expr_eq();
5424 if (tok != TOK_EOF)
5425 expect(",");
5426 end_macro();
5427 next();
5428 break;
5430 // special qnan , snan and infinity values
5431 case TOK___NAN__:
5432 n = 0x7fc00000;
5433 special_math_val:
5434 vpushi(n);
5435 vtop->type.t = VT_FLOAT;
5436 next();
5437 break;
5438 case TOK___SNAN__:
5439 n = 0x7f800001;
5440 goto special_math_val;
5441 case TOK___INF__:
5442 n = 0x7f800000;
5443 goto special_math_val;
5445 default:
5446 tok_identifier:
5447 t = tok;
5448 next();
5449 if (t < TOK_UIDENT)
5450 expect("identifier");
5451 s = sym_find(t);
5452 if (!s || IS_ASM_SYM(s)) {
5453 const char *name = get_tok_str(t, NULL);
5454 if (tok != '(')
5455 tcc_error("'%s' undeclared", name);
5456 /* for simple function calls, we tolerate undeclared
5457 external reference to int() function */
5458 if (tcc_state->warn_implicit_function_declaration
5459 #ifdef TCC_TARGET_PE
5460 /* people must be warned about using undeclared WINAPI functions
5461 (which usually start with uppercase letter) */
5462 || (name[0] >= 'A' && name[0] <= 'Z')
5463 #endif
5465 tcc_warning("implicit declaration of function '%s'", name);
5466 s = external_global_sym(t, &func_old_type);
5469 r = s->r;
5470 /* A symbol that has a register is a local register variable,
5471 which starts out as VT_LOCAL value. */
5472 if ((r & VT_VALMASK) < VT_CONST)
5473 r = (r & ~VT_VALMASK) | VT_LOCAL;
5475 vset(&s->type, r, s->c);
5476 /* Point to s as backpointer (even without r&VT_SYM).
5477 Will be used by at least the x86 inline asm parser for
5478 regvars. */
5479 vtop->sym = s;
5481 if (r & VT_SYM) {
5482 vtop->c.i = 0;
5483 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5484 vtop->c.i = s->enum_val;
5486 break;
5489 /* post operations */
5490 while (1) {
5491 if (tok == TOK_INC || tok == TOK_DEC) {
5492 inc(1, tok);
5493 next();
5494 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5495 int qualifiers, cumofs = 0;
5496 /* field */
5497 if (tok == TOK_ARROW)
5498 indir();
5499 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5500 test_lvalue();
5501 gaddrof();
5502 /* expect pointer on structure */
5503 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5504 expect("struct or union");
5505 if (tok == TOK_CDOUBLE)
5506 expect("field name");
5507 next();
5508 if (tok == TOK_CINT || tok == TOK_CUINT)
5509 expect("field name");
5510 s = find_field(&vtop->type, tok, &cumofs);
5511 if (!s)
5512 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5513 /* add field offset to pointer */
5514 vtop->type = char_pointer_type; /* change type to 'char *' */
5515 vpushi(cumofs + s->c);
5516 gen_op('+');
5517 /* change type to field type, and set to lvalue */
5518 vtop->type = s->type;
5519 vtop->type.t |= qualifiers;
5520 /* an array is never an lvalue */
5521 if (!(vtop->type.t & VT_ARRAY)) {
5522 vtop->r |= VT_LVAL;
5523 #ifdef CONFIG_TCC_BCHECK
5524 /* if bound checking, the referenced pointer must be checked */
5525 if (tcc_state->do_bounds_check && (vtop->r & VT_VALMASK) != VT_LOCAL)
5526 vtop->r |= VT_MUSTBOUND;
5527 #endif
5529 next();
5530 } else if (tok == '[') {
5531 next();
5532 gexpr();
5533 gen_op('+');
5534 indir();
5535 skip(']');
5536 } else if (tok == '(') {
5537 SValue ret;
5538 Sym *sa;
5539 int nb_args, ret_nregs, ret_align, regsize, variadic;
5541 #ifdef CONFIG_TCC_BCHECK
5542 if (tcc_state->do_bounds_check && (vtop->r & VT_SYM) && vtop->sym->v == TOK_alloca) {
5543 addr_t *bounds_ptr;
5545 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
5546 bounds_ptr[0] = 1; /* marks alloca/vla used */
5547 bounds_ptr[1] = 0;
5549 #endif
5550 /* function call */
5551 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5552 /* pointer test (no array accepted) */
5553 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5554 vtop->type = *pointed_type(&vtop->type);
5555 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5556 goto error_func;
5557 } else {
5558 error_func:
5559 expect("function pointer");
5561 } else {
5562 vtop->r &= ~VT_LVAL; /* no lvalue */
5564 /* get return type */
5565 s = vtop->type.ref;
5566 next();
5567 sa = s->next; /* first parameter */
5568 nb_args = regsize = 0;
5569 ret.r2 = VT_CONST;
5570 /* compute first implicit argument if a structure is returned */
5571 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5572 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5573 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5574 &ret_align, &regsize);
5575 if (ret_nregs <= 0) {
5576 /* get some space for the returned structure */
5577 size = type_size(&s->type, &align);
5578 #ifdef TCC_TARGET_ARM64
5579 /* On arm64, a small struct is return in registers.
5580 It is much easier to write it to memory if we know
5581 that we are allowed to write some extra bytes, so
5582 round the allocated space up to a power of 2: */
5583 if (size < 16)
5584 while (size & (size - 1))
5585 size = (size | (size - 1)) + 1;
5586 #endif
5587 loc = (loc - size) & -align;
5588 ret.type = s->type;
5589 ret.r = VT_LOCAL | VT_LVAL;
5590 /* pass it as 'int' to avoid structure arg passing
5591 problems */
5592 vseti(VT_LOCAL, loc);
5593 ret.c = vtop->c;
5594 if (ret_nregs < 0)
5595 vtop--;
5596 else
5597 nb_args++;
5599 } else {
5600 ret_nregs = 1;
5601 ret.type = s->type;
5604 if (ret_nregs > 0) {
5605 /* return in register */
5606 ret.c.i = 0;
5607 PUT_R_RET(&ret, ret.type.t);
5609 if (tok != ')') {
5610 for(;;) {
5611 expr_eq();
5612 gfunc_param_typed(s, sa);
5613 nb_args++;
5614 if (sa)
5615 sa = sa->next;
5616 if (tok == ')')
5617 break;
5618 skip(',');
5621 if (sa)
5622 tcc_error("too few arguments to function");
5623 skip(')');
5624 gfunc_call(nb_args);
5626 if (ret_nregs < 0) {
5627 vsetc(&ret.type, ret.r, &ret.c);
5628 #ifdef TCC_TARGET_RISCV64
5629 arch_transfer_ret_regs(1);
5630 #endif
5631 } else {
5632 /* return value */
5633 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
5634 vsetc(&ret.type, r, &ret.c);
5635 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
5638 /* handle packed struct return */
5639 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
5640 int addr, offset;
5642 size = type_size(&s->type, &align);
5643 /* We're writing whole regs often, make sure there's enough
5644 space. Assume register size is power of 2. */
5645 if (regsize > align)
5646 align = regsize;
5647 loc = (loc - size) & -align;
5648 addr = loc;
5649 offset = 0;
5650 for (;;) {
5651 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
5652 vswap();
5653 vstore();
5654 vtop--;
5655 if (--ret_nregs == 0)
5656 break;
5657 offset += regsize;
5659 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
5662 /* Promote char/short return values. This is matters only
5663 for calling function that were not compiled by TCC and
5664 only on some architectures. For those where it doesn't
5665 matter we expect things to be already promoted to int,
5666 but not larger. */
5667 t = s->type.t & VT_BTYPE;
5668 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
5669 #ifdef PROMOTE_RET
5670 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5671 #else
5672 vtop->type.t = VT_INT;
5673 #endif
5676 if (s->f.func_noreturn)
5677 CODE_OFF();
5678 } else {
5679 break;
5684 ST_FUNC void expr_prod(void)
5686 int t;
5688 unary();
5689 while (tok == '*' || tok == '/' || tok == '%') {
5690 t = tok;
5691 next();
5692 unary();
5693 gen_op(t);
5697 ST_FUNC void expr_sum(void)
5699 int t;
5701 expr_prod();
5702 while (tok == '+' || tok == '-') {
5703 t = tok;
5704 next();
5705 expr_prod();
5706 gen_op(t);
5710 static void expr_shift(void)
5712 int t;
5714 expr_sum();
5715 while (tok == TOK_SHL || tok == TOK_SAR) {
5716 t = tok;
5717 next();
5718 expr_sum();
5719 gen_op(t);
5723 static void expr_cmp(void)
5725 int t;
5727 expr_shift();
5728 while ((tok >= TOK_ULE && tok <= TOK_GT) ||
5729 tok == TOK_ULT || tok == TOK_UGE) {
5730 t = tok;
5731 next();
5732 expr_shift();
5733 gen_op(t);
5737 static void expr_cmpeq(void)
5739 int t;
5741 expr_cmp();
5742 while (tok == TOK_EQ || tok == TOK_NE) {
5743 t = tok;
5744 next();
5745 expr_cmp();
5746 gen_op(t);
5750 static void expr_and(void)
5752 expr_cmpeq();
5753 while (tok == '&') {
5754 next();
5755 expr_cmpeq();
5756 gen_op('&');
5760 static void expr_xor(void)
5762 expr_and();
5763 while (tok == '^') {
5764 next();
5765 expr_and();
5766 gen_op('^');
5770 static void expr_or(void)
5772 expr_xor();
5773 while (tok == '|') {
5774 next();
5775 expr_xor();
5776 gen_op('|');
5780 static int condition_3way(void);
5782 static void expr_landor(void(*e_fn)(void), int e_op, int i)
5784 int t = 0, cc = 1, f = 0, c;
5785 for(;;) {
5786 c = f ? i : condition_3way();
5787 if (c < 0) {
5788 save_regs(1), cc = 0;
5789 } else if (c != i) {
5790 nocode_wanted++, f = 1;
5792 if (tok != e_op) {
5793 if (cc || f) {
5794 vpop();
5795 vpushi(i ^ f);
5796 gsym(t);
5797 nocode_wanted -= f;
5798 } else {
5799 gvtst_set(i, t);
5801 break;
5803 if (c < 0)
5804 t = gvtst(i, t);
5805 else
5806 vpop();
5807 next();
5808 e_fn();
5812 static void expr_land(void)
5814 expr_or();
5815 if (tok == TOK_LAND)
5816 expr_landor(expr_or, TOK_LAND, 1);
5819 static void expr_lor(void)
5821 expr_land();
5822 if (tok == TOK_LOR)
5823 expr_landor(expr_land, TOK_LOR, 0);
5826 /* Assuming vtop is a value used in a conditional context
5827 (i.e. compared with zero) return 0 if it's false, 1 if
5828 true and -1 if it can't be statically determined. */
5829 static int condition_3way(void)
5831 int c = -1;
5832 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5833 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
5834 vdup();
5835 gen_cast_s(VT_BOOL);
5836 c = vtop->c.i;
5837 vpop();
5839 return c;
5842 static int is_cond_bool(SValue *sv)
5844 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
5845 && (sv->type.t & VT_BTYPE) == VT_INT)
5846 return (unsigned)sv->c.i < 2;
5847 if (sv->r == VT_CMP)
5848 return 1;
5849 return 0;
5852 static void expr_cond(void)
5854 int tt, u, r1, r2, rc, t1, t2, bt1, bt2, islv, c, g;
5855 SValue sv;
5856 CType type, type1, type2;
5857 int ncw_prev;
5859 expr_lor();
5860 if (tok == '?') {
5861 next();
5862 c = condition_3way();
5863 g = (tok == ':' && gnu_ext);
5864 tt = 0;
5865 if (!g) {
5866 if (c < 0) {
5867 save_regs(1);
5868 tt = gvtst(1, 0);
5869 } else {
5870 vpop();
5872 } else if (c < 0) {
5873 /* needed to avoid having different registers saved in
5874 each branch */
5875 save_regs(1);
5876 gv_dup();
5877 tt = gvtst(0, 0);
5880 ncw_prev = nocode_wanted;
5881 if (1) {
5882 if (c == 0)
5883 nocode_wanted++;
5884 if (!g)
5885 gexpr();
5887 if (c < 0 && vtop->r == VT_CMP) {
5888 t1 = gvtst(0, 0);
5889 vpushi(0);
5890 gvtst_set(0, t1);
5893 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5894 mk_pointer(&vtop->type);
5895 type1 = vtop->type;
5896 sv = *vtop; /* save value to handle it later */
5897 vtop--; /* no vpop so that FP stack is not flushed */
5899 if (g) {
5900 u = tt;
5901 } else if (c < 0) {
5902 u = gjmp(0);
5903 gsym(tt);
5904 } else
5905 u = 0;
5907 nocode_wanted = ncw_prev;
5908 if (c == 1)
5909 nocode_wanted++;
5910 skip(':');
5911 expr_cond();
5913 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
5914 if (sv.r == VT_CMP) {
5915 t1 = sv.jtrue;
5916 t2 = u;
5917 } else {
5918 t1 = gvtst(0, 0);
5919 t2 = gjmp(0);
5920 gsym(u);
5921 vpushv(&sv);
5923 gvtst_set(0, t1);
5924 gvtst_set(1, t2);
5925 nocode_wanted = ncw_prev;
5926 // tcc_warning("two conditions expr_cond");
5927 return;
5930 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5931 mk_pointer(&vtop->type);
5932 type2=vtop->type;
5933 t1 = type1.t;
5934 bt1 = t1 & VT_BTYPE;
5935 t2 = type2.t;
5936 bt2 = t2 & VT_BTYPE;
5937 type.ref = NULL;
5939 /* cast operands to correct type according to ISOC rules */
5940 if (bt1 == VT_VOID || bt2 == VT_VOID) {
5941 type.t = VT_VOID; /* NOTE: as an extension, we accept void on only one side */
5942 } else if (is_float(bt1) || is_float(bt2)) {
5943 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
5944 type.t = VT_LDOUBLE;
5946 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
5947 type.t = VT_DOUBLE;
5948 } else {
5949 type.t = VT_FLOAT;
5951 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
5952 /* cast to biggest op */
5953 type.t = VT_LLONG | VT_LONG;
5954 if (bt1 == VT_LLONG)
5955 type.t &= t1;
5956 if (bt2 == VT_LLONG)
5957 type.t &= t2;
5958 /* convert to unsigned if it does not fit in a long long */
5959 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
5960 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
5961 type.t |= VT_UNSIGNED;
5962 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
5963 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5964 /* If one is a null ptr constant the result type
5965 is the other. */
5966 if (is_null_pointer (vtop)) type = type1;
5967 else if (is_null_pointer (&sv)) type = type2;
5968 else if (bt1 != bt2)
5969 tcc_error("incompatible types in conditional expressions");
5970 else {
5971 CType *pt1 = pointed_type(&type1);
5972 CType *pt2 = pointed_type(&type2);
5973 int pbt1 = pt1->t & VT_BTYPE;
5974 int pbt2 = pt2->t & VT_BTYPE;
5975 int newquals, copied = 0;
5976 /* pointers to void get preferred, otherwise the
5977 pointed to types minus qualifs should be compatible */
5978 type = (pbt1 == VT_VOID) ? type1 : type2;
5979 if (pbt1 != VT_VOID && pbt2 != VT_VOID) {
5980 if(!compare_types(pt1, pt2, 1/*unqualif*/))
5981 tcc_warning("pointer type mismatch in conditional expression\n");
5983 /* combine qualifs */
5984 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
5985 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
5986 & newquals)
5988 /* copy the pointer target symbol */
5989 type.ref = sym_push(SYM_FIELD, &type.ref->type,
5990 0, type.ref->c);
5991 copied = 1;
5992 pointed_type(&type)->t |= newquals;
5994 /* pointers to incomplete arrays get converted to
5995 pointers to completed ones if possible */
5996 if (pt1->t & VT_ARRAY
5997 && pt2->t & VT_ARRAY
5998 && pointed_type(&type)->ref->c < 0
5999 && (pt1->ref->c > 0 || pt2->ref->c > 0))
6001 if (!copied)
6002 type.ref = sym_push(SYM_FIELD, &type.ref->type,
6003 0, type.ref->c);
6004 pointed_type(&type)->ref =
6005 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
6006 0, pointed_type(&type)->ref->c);
6007 pointed_type(&type)->ref->c =
6008 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
6011 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
6012 /* XXX: test structure compatibility */
6013 type = bt1 == VT_STRUCT ? type1 : type2;
6014 } else {
6015 /* integer operations */
6016 type.t = VT_INT | (VT_LONG & (t1 | t2));
6017 /* convert to unsigned if it does not fit in an integer */
6018 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
6019 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
6020 type.t |= VT_UNSIGNED;
6022 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6023 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6024 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6026 /* now we convert second operand */
6027 if (c != 1) {
6028 gen_cast(&type);
6029 if (islv) {
6030 mk_pointer(&vtop->type);
6031 gaddrof();
6032 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6033 gaddrof();
6036 rc = RC_TYPE(type.t);
6037 /* for long longs, we use fixed registers to avoid having
6038 to handle a complicated move */
6039 if (USING_TWO_WORDS(type.t))
6040 rc = RC_RET(type.t);
6042 tt = r2 = 0;
6043 if (c < 0) {
6044 r2 = gv(rc);
6045 tt = gjmp(0);
6047 gsym(u);
6048 nocode_wanted = ncw_prev;
6050 /* this is horrible, but we must also convert first
6051 operand */
6052 if (c != 0) {
6053 *vtop = sv;
6054 gen_cast(&type);
6055 if (islv) {
6056 mk_pointer(&vtop->type);
6057 gaddrof();
6058 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6059 gaddrof();
6062 if (c < 0) {
6063 r1 = gv(rc);
6064 move_reg(r2, r1, islv ? VT_PTR : type.t);
6065 vtop->r = r2;
6066 gsym(tt);
6069 if (islv)
6070 indir();
6075 static void expr_eq(void)
6077 int t;
6079 expr_cond();
6080 if (tok == '=' ||
6081 (tok >= TOK_A_MOD && tok <= TOK_A_DIV) ||
6082 tok == TOK_A_XOR || tok == TOK_A_OR ||
6083 tok == TOK_A_SHL || tok == TOK_A_SAR) {
6084 test_lvalue();
6085 t = tok;
6086 next();
6087 if (t == '=') {
6088 expr_eq();
6089 } else {
6090 vdup();
6091 expr_eq();
6092 gen_op(t & 0x7f);
6094 vstore();
6098 ST_FUNC void gexpr(void)
6100 while (1) {
6101 expr_eq();
6102 if (tok != ',')
6103 break;
6104 vpop();
6105 next();
6109 /* parse a constant expression and return value in vtop. */
6110 static void expr_const1(void)
6112 const_wanted++;
6113 nocode_wanted += unevalmask + 1;
6114 expr_cond();
6115 nocode_wanted -= unevalmask + 1;
6116 const_wanted--;
6119 /* parse an integer constant and return its value. */
6120 static inline int64_t expr_const64(void)
6122 int64_t c;
6123 expr_const1();
6124 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6125 expect("constant expression");
6126 c = vtop->c.i;
6127 vpop();
6128 return c;
6131 /* parse an integer constant and return its value.
6132 Complain if it doesn't fit 32bit (signed or unsigned). */
6133 ST_FUNC int expr_const(void)
6135 int c;
6136 int64_t wc = expr_const64();
6137 c = wc;
6138 if (c != wc && (unsigned)c != wc)
6139 tcc_error("constant exceeds 32 bit");
6140 return c;
6143 /* ------------------------------------------------------------------------- */
6144 /* return from function */
6146 #ifndef TCC_TARGET_ARM64
6147 static void gfunc_return(CType *func_type)
6149 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6150 CType type, ret_type;
6151 int ret_align, ret_nregs, regsize;
6152 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6153 &ret_align, &regsize);
6154 if (ret_nregs < 0) {
6155 #ifdef TCC_TARGET_RISCV64
6156 arch_transfer_ret_regs(0);
6157 #endif
6158 } else if (0 == ret_nregs) {
6159 /* if returning structure, must copy it to implicit
6160 first pointer arg location */
6161 type = *func_type;
6162 mk_pointer(&type);
6163 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6164 indir();
6165 vswap();
6166 /* copy structure value to pointer */
6167 vstore();
6168 } else {
6169 /* returning structure packed into registers */
6170 int size, addr, align, rc;
6171 size = type_size(func_type,&align);
6172 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6173 (vtop->c.i & (ret_align-1)))
6174 && (align & (ret_align-1))) {
6175 loc = (loc - size) & -ret_align;
6176 addr = loc;
6177 type = *func_type;
6178 vset(&type, VT_LOCAL | VT_LVAL, addr);
6179 vswap();
6180 vstore();
6181 vpop();
6182 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6184 vtop->type = ret_type;
6185 rc = RC_RET(ret_type.t);
6186 if (ret_nregs == 1)
6187 gv(rc);
6188 else {
6189 for (;;) {
6190 vdup();
6191 gv(rc);
6192 vpop();
6193 if (--ret_nregs == 0)
6194 break;
6195 /* We assume that when a structure is returned in multiple
6196 registers, their classes are consecutive values of the
6197 suite s(n) = 2^n */
6198 rc <<= 1;
6199 vtop->c.i += regsize;
6203 } else {
6204 gv(RC_RET(func_type->t));
6206 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6208 #endif
6210 static void check_func_return(void)
6212 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6213 return;
6214 if (!strcmp (funcname, "main")
6215 && (func_vt.t & VT_BTYPE) == VT_INT) {
6216 /* main returns 0 by default */
6217 vpushi(0);
6218 gen_assign_cast(&func_vt);
6219 gfunc_return(&func_vt);
6220 } else {
6221 tcc_warning("function might return no value: '%s'", funcname);
6225 /* ------------------------------------------------------------------------- */
6226 /* switch/case */
6228 static int case_cmp(const void *pa, const void *pb)
6230 int64_t a = (*(struct case_t**) pa)->v1;
6231 int64_t b = (*(struct case_t**) pb)->v1;
6232 return a < b ? -1 : a > b;
6235 static void gtst_addr(int t, int a)
6237 gsym_addr(gvtst(0, t), a);
6240 static void gcase(struct case_t **base, int len, int *bsym)
6242 struct case_t *p;
6243 int e;
6244 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6245 while (len > 8) {
6246 /* binary search */
6247 p = base[len/2];
6248 vdup();
6249 if (ll)
6250 vpushll(p->v2);
6251 else
6252 vpushi(p->v2);
6253 gen_op(TOK_LE);
6254 e = gvtst(1, 0);
6255 vdup();
6256 if (ll)
6257 vpushll(p->v1);
6258 else
6259 vpushi(p->v1);
6260 gen_op(TOK_GE);
6261 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6262 /* x < v1 */
6263 gcase(base, len/2, bsym);
6264 /* x > v2 */
6265 gsym(e);
6266 e = len/2 + 1;
6267 base += e; len -= e;
6269 /* linear scan */
6270 while (len--) {
6271 p = *base++;
6272 vdup();
6273 if (ll)
6274 vpushll(p->v2);
6275 else
6276 vpushi(p->v2);
6277 if (p->v1 == p->v2) {
6278 gen_op(TOK_EQ);
6279 gtst_addr(0, p->sym);
6280 } else {
6281 gen_op(TOK_LE);
6282 e = gvtst(1, 0);
6283 vdup();
6284 if (ll)
6285 vpushll(p->v1);
6286 else
6287 vpushi(p->v1);
6288 gen_op(TOK_GE);
6289 gtst_addr(0, p->sym);
6290 gsym(e);
6293 *bsym = gjmp(*bsym);
6296 /* ------------------------------------------------------------------------- */
6297 /* __attribute__((cleanup(fn))) */
6299 static void try_call_scope_cleanup(Sym *stop)
6301 Sym *cls = cur_scope->cl.s;
6303 for (; cls != stop; cls = cls->ncl) {
6304 Sym *fs = cls->next;
6305 Sym *vs = cls->prev_tok;
6307 vpushsym(&fs->type, fs);
6308 vset(&vs->type, vs->r, vs->c);
6309 vtop->sym = vs;
6310 mk_pointer(&vtop->type);
6311 gaddrof();
6312 gfunc_call(1);
6316 static void try_call_cleanup_goto(Sym *cleanupstate)
6318 Sym *oc, *cc;
6319 int ocd, ccd;
6321 if (!cur_scope->cl.s)
6322 return;
6324 /* search NCA of both cleanup chains given parents and initial depth */
6325 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6326 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6328 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6330 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6333 try_call_scope_cleanup(cc);
6336 /* call 'func' for each __attribute__((cleanup(func))) */
6337 static void block_cleanup(struct scope *o)
6339 int jmp = 0;
6340 Sym *g, **pg;
6341 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6342 if (g->prev_tok->r & LABEL_FORWARD) {
6343 Sym *pcl = g->next;
6344 if (!jmp)
6345 jmp = gjmp(0);
6346 gsym(pcl->jnext);
6347 try_call_scope_cleanup(o->cl.s);
6348 pcl->jnext = gjmp(0);
6349 if (!o->cl.n)
6350 goto remove_pending;
6351 g->c = o->cl.n;
6352 pg = &g->prev;
6353 } else {
6354 remove_pending:
6355 *pg = g->prev;
6356 sym_free(g);
6359 gsym(jmp);
6360 try_call_scope_cleanup(o->cl.s);
6363 /* ------------------------------------------------------------------------- */
6364 /* VLA */
6366 static void vla_restore(int loc)
6368 if (loc)
6369 gen_vla_sp_restore(loc);
6372 static void vla_leave(struct scope *o)
6374 if (o->vla.num < cur_scope->vla.num)
6375 vla_restore(o->vla.loc);
6378 /* ------------------------------------------------------------------------- */
6379 /* local scopes */
6381 void new_scope(struct scope *o)
6383 /* copy and link previous scope */
6384 *o = *cur_scope;
6385 o->prev = cur_scope;
6386 cur_scope = o;
6388 /* record local declaration stack position */
6389 o->lstk = local_stack;
6390 o->llstk = local_label_stack;
6392 ++local_scope;
6395 void prev_scope(struct scope *o, int is_expr)
6397 vla_leave(o->prev);
6399 if (o->cl.s != o->prev->cl.s)
6400 block_cleanup(o->prev);
6402 /* pop locally defined labels */
6403 label_pop(&local_label_stack, o->llstk, is_expr);
6405 /* In the is_expr case (a statement expression is finished here),
6406 vtop might refer to symbols on the local_stack. Either via the
6407 type or via vtop->sym. We can't pop those nor any that in turn
6408 might be referred to. To make it easier we don't roll back
6409 any symbols in that case; some upper level call to block() will
6410 do that. We do have to remove such symbols from the lookup
6411 tables, though. sym_pop will do that. */
6413 /* pop locally defined symbols */
6414 pop_local_syms(&local_stack, o->lstk, is_expr);
6416 cur_scope = o->prev;
6417 --local_scope;
6420 /* leave a scope via break/continue(/goto) */
6421 void leave_scope(struct scope *o)
6423 if (!o)
6424 return;
6425 try_call_scope_cleanup(o->cl.s);
6426 vla_leave(o);
6429 /* ------------------------------------------------------------------------- */
6430 /* call block from 'for do while' loops */
6432 static void lblock(int *bsym, int *csym)
6434 struct scope *lo = loop_scope, *co = cur_scope;
6435 int *b = co->bsym, *c = co->csym;
6436 if (csym) {
6437 co->csym = csym;
6438 loop_scope = co;
6440 co->bsym = bsym;
6441 block(0);
6442 co->bsym = b;
6443 if (csym) {
6444 co->csym = c;
6445 loop_scope = lo;
6449 static void block(int is_expr)
6451 int a, b, c, d, e, t;
6452 Sym *s;
6454 if (is_expr) {
6455 /* default return value is (void) */
6456 vpushi(0);
6457 vtop->type.t = VT_VOID;
6460 again:
6461 t = tok, next();
6463 if (t == TOK_IF) {
6464 skip('(');
6465 gexpr();
6466 skip(')');
6467 a = gvtst(1, 0);
6468 block(0);
6469 if (tok == TOK_ELSE) {
6470 d = gjmp(0);
6471 gsym(a);
6472 next();
6473 block(0);
6474 gsym(d); /* patch else jmp */
6475 } else {
6476 gsym(a);
6479 } else if (t == TOK_WHILE) {
6480 d = gind();
6481 skip('(');
6482 gexpr();
6483 skip(')');
6484 a = gvtst(1, 0);
6485 b = 0;
6486 lblock(&a, &b);
6487 gjmp_addr(d);
6488 gsym_addr(b, d);
6489 gsym(a);
6491 } else if (t == '{') {
6492 struct scope o;
6493 new_scope(&o);
6495 /* handle local labels declarations */
6496 while (tok == TOK_LABEL) {
6497 do {
6498 next();
6499 if (tok < TOK_UIDENT)
6500 expect("label identifier");
6501 label_push(&local_label_stack, tok, LABEL_DECLARED);
6502 next();
6503 } while (tok == ',');
6504 skip(';');
6507 while (tok != '}') {
6508 decl(VT_LOCAL);
6509 if (tok != '}') {
6510 if (is_expr)
6511 vpop();
6512 block(is_expr);
6516 prev_scope(&o, is_expr);
6518 if (0 == local_scope && !nocode_wanted)
6519 check_func_return();
6520 next();
6522 } else if (t == TOK_RETURN) {
6523 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6524 if (tok != ';') {
6525 gexpr();
6526 if (b) {
6527 gen_assign_cast(&func_vt);
6528 } else {
6529 if (vtop->type.t != VT_VOID)
6530 tcc_warning("void function returns a value");
6531 vtop--;
6533 } else if (b) {
6534 tcc_warning("'return' with no value");
6535 b = 0;
6537 leave_scope(root_scope);
6538 if (b)
6539 gfunc_return(&func_vt);
6540 skip(';');
6541 /* jump unless last stmt in top-level block */
6542 if (tok != '}' || local_scope != 1)
6543 rsym = gjmp(rsym);
6544 CODE_OFF();
6546 } else if (t == TOK_BREAK) {
6547 /* compute jump */
6548 if (!cur_scope->bsym)
6549 tcc_error("cannot break");
6550 if (!cur_switch || cur_scope->bsym != cur_switch->bsym)
6551 leave_scope(loop_scope);
6552 else
6553 leave_scope(cur_switch->scope);
6554 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6555 skip(';');
6557 } else if (t == TOK_CONTINUE) {
6558 /* compute jump */
6559 if (!cur_scope->csym)
6560 tcc_error("cannot continue");
6561 leave_scope(loop_scope);
6562 *cur_scope->csym = gjmp(*cur_scope->csym);
6563 skip(';');
6565 } else if (t == TOK_FOR) {
6566 struct scope o;
6567 new_scope(&o);
6569 skip('(');
6570 if (tok != ';') {
6571 /* c99 for-loop init decl? */
6572 if (!decl0(VT_LOCAL, 1, NULL)) {
6573 /* no, regular for-loop init expr */
6574 gexpr();
6575 vpop();
6578 skip(';');
6579 a = b = 0;
6580 c = d = gind();
6581 if (tok != ';') {
6582 gexpr();
6583 a = gvtst(1, 0);
6585 skip(';');
6586 if (tok != ')') {
6587 e = gjmp(0);
6588 d = gind();
6589 gexpr();
6590 vpop();
6591 gjmp_addr(c);
6592 gsym(e);
6594 skip(')');
6595 lblock(&a, &b);
6596 gjmp_addr(d);
6597 gsym_addr(b, d);
6598 gsym(a);
6599 prev_scope(&o, 0);
6601 } else if (t == TOK_DO) {
6602 a = b = 0;
6603 d = gind();
6604 lblock(&a, &b);
6605 gsym(b);
6606 skip(TOK_WHILE);
6607 skip('(');
6608 gexpr();
6609 skip(')');
6610 skip(';');
6611 c = gvtst(0, 0);
6612 gsym_addr(c, d);
6613 gsym(a);
6615 } else if (t == TOK_SWITCH) {
6616 struct switch_t *saved, sw;
6617 SValue switchval;
6619 sw.p = NULL;
6620 sw.n = 0;
6621 sw.def_sym = 0;
6622 sw.bsym = &a;
6623 sw.scope = cur_scope;
6625 saved = cur_switch;
6626 cur_switch = &sw;
6628 skip('(');
6629 gexpr();
6630 skip(')');
6631 switchval = *vtop--;
6633 a = 0;
6634 b = gjmp(0); /* jump to first case */
6635 lblock(&a, NULL);
6636 a = gjmp(a); /* add implicit break */
6637 /* case lookup */
6638 gsym(b);
6640 qsort(sw.p, sw.n, sizeof(void*), case_cmp);
6641 for (b = 1; b < sw.n; b++)
6642 if (sw.p[b - 1]->v2 >= sw.p[b]->v1)
6643 tcc_error("duplicate case value");
6645 /* Our switch table sorting is signed, so the compared
6646 value needs to be as well when it's 64bit. */
6647 if ((switchval.type.t & VT_BTYPE) == VT_LLONG)
6648 switchval.type.t &= ~VT_UNSIGNED;
6649 vpushv(&switchval);
6650 gv(RC_INT);
6651 d = 0, gcase(sw.p, sw.n, &d);
6652 vpop();
6653 if (sw.def_sym)
6654 gsym_addr(d, sw.def_sym);
6655 else
6656 gsym(d);
6657 /* break label */
6658 gsym(a);
6660 dynarray_reset(&sw.p, &sw.n);
6661 cur_switch = saved;
6663 } else if (t == TOK_CASE) {
6664 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6665 if (!cur_switch)
6666 expect("switch");
6667 cr->v1 = cr->v2 = expr_const64();
6668 if (gnu_ext && tok == TOK_DOTS) {
6669 next();
6670 cr->v2 = expr_const64();
6671 if (cr->v2 < cr->v1)
6672 tcc_warning("empty case range");
6674 cr->sym = gind();
6675 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
6676 skip(':');
6677 is_expr = 0;
6678 goto block_after_label;
6680 } else if (t == TOK_DEFAULT) {
6681 if (!cur_switch)
6682 expect("switch");
6683 if (cur_switch->def_sym)
6684 tcc_error("too many 'default'");
6685 cur_switch->def_sym = gind();
6686 skip(':');
6687 is_expr = 0;
6688 goto block_after_label;
6690 } else if (t == TOK_GOTO) {
6691 vla_restore(root_scope->vla.loc);
6692 if (tok == '*' && gnu_ext) {
6693 /* computed goto */
6694 next();
6695 gexpr();
6696 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
6697 expect("pointer");
6698 ggoto();
6700 } else if (tok >= TOK_UIDENT) {
6701 s = label_find(tok);
6702 /* put forward definition if needed */
6703 if (!s)
6704 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6705 else if (s->r == LABEL_DECLARED)
6706 s->r = LABEL_FORWARD;
6708 if (s->r & LABEL_FORWARD) {
6709 /* start new goto chain for cleanups, linked via label->next */
6710 if (cur_scope->cl.s && !nocode_wanted) {
6711 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
6712 pending_gotos->prev_tok = s;
6713 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
6714 pending_gotos->next = s;
6716 s->jnext = gjmp(s->jnext);
6717 } else {
6718 try_call_cleanup_goto(s->cleanupstate);
6719 gjmp_addr(s->jnext);
6721 next();
6723 } else {
6724 expect("label identifier");
6726 skip(';');
6728 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
6729 asm_instr();
6731 } else {
6732 if (tok == ':' && t >= TOK_UIDENT) {
6733 /* label case */
6734 next();
6735 s = label_find(t);
6736 if (s) {
6737 if (s->r == LABEL_DEFINED)
6738 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
6739 s->r = LABEL_DEFINED;
6740 if (s->next) {
6741 Sym *pcl; /* pending cleanup goto */
6742 for (pcl = s->next; pcl; pcl = pcl->prev)
6743 gsym(pcl->jnext);
6744 sym_pop(&s->next, NULL, 0);
6745 } else
6746 gsym(s->jnext);
6747 } else {
6748 s = label_push(&global_label_stack, t, LABEL_DEFINED);
6750 s->jnext = gind();
6751 s->cleanupstate = cur_scope->cl.s;
6753 block_after_label:
6754 vla_restore(cur_scope->vla.loc);
6755 /* we accept this, but it is a mistake */
6756 if (tok == '}') {
6757 tcc_warning("deprecated use of label at end of compound statement");
6758 } else {
6759 goto again;
6762 } else {
6763 /* expression case */
6764 if (t != ';') {
6765 unget_tok(t);
6766 if (is_expr) {
6767 vpop();
6768 gexpr();
6769 } else {
6770 gexpr();
6771 vpop();
6773 skip(';');
6779 /* This skips over a stream of tokens containing balanced {} and ()
6780 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6781 with a '{'). If STR then allocates and stores the skipped tokens
6782 in *STR. This doesn't check if () and {} are nested correctly,
6783 i.e. "({)}" is accepted. */
6784 static void skip_or_save_block(TokenString **str)
6786 int braces = tok == '{';
6787 int level = 0;
6788 if (str)
6789 *str = tok_str_alloc();
6791 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
6792 int t;
6793 if (tok == TOK_EOF) {
6794 if (str || level > 0)
6795 tcc_error("unexpected end of file");
6796 else
6797 break;
6799 if (str)
6800 tok_str_add_tok(*str);
6801 t = tok;
6802 next();
6803 if (t == '{' || t == '(') {
6804 level++;
6805 } else if (t == '}' || t == ')') {
6806 level--;
6807 if (level == 0 && braces && t == '}')
6808 break;
6811 if (str) {
6812 tok_str_add(*str, -1);
6813 tok_str_add(*str, 0);
6817 #define EXPR_CONST 1
6818 #define EXPR_ANY 2
6820 static void parse_init_elem(int expr_type)
6822 int saved_global_expr;
6823 switch(expr_type) {
6824 case EXPR_CONST:
6825 /* compound literals must be allocated globally in this case */
6826 saved_global_expr = global_expr;
6827 global_expr = 1;
6828 expr_const1();
6829 global_expr = saved_global_expr;
6830 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6831 (compound literals). */
6832 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
6833 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
6834 || vtop->sym->v < SYM_FIRST_ANOM))
6835 #ifdef TCC_TARGET_PE
6836 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
6837 #endif
6839 tcc_error("initializer element is not constant");
6840 break;
6841 case EXPR_ANY:
6842 expr_eq();
6843 break;
6847 /* put zeros for variable based init */
6848 static void init_putz(Section *sec, unsigned long c, int size)
6850 if (sec) {
6851 /* nothing to do because globals are already set to zero */
6852 } else {
6853 vpush_global_sym(&func_old_type, TOK_memset);
6854 vseti(VT_LOCAL, c);
6855 #ifdef TCC_TARGET_ARM
6856 vpushs(size);
6857 vpushi(0);
6858 #else
6859 vpushi(0);
6860 vpushs(size);
6861 #endif
6862 gfunc_call(3);
6866 #define DIF_FIRST 1
6867 #define DIF_SIZE_ONLY 2
6868 #define DIF_HAVE_ELEM 4
6870 /* t is the array or struct type. c is the array or struct
6871 address. cur_field is the pointer to the current
6872 field, for arrays the 'c' member contains the current start
6873 index. 'flags' is as in decl_initializer.
6874 'al' contains the already initialized length of the
6875 current container (starting at c). This returns the new length of that. */
6876 static int decl_designator(CType *type, Section *sec, unsigned long c,
6877 Sym **cur_field, int flags, int al)
6879 Sym *s, *f;
6880 int index, index_last, align, l, nb_elems, elem_size;
6881 unsigned long corig = c;
6883 elem_size = 0;
6884 nb_elems = 1;
6886 if (flags & DIF_HAVE_ELEM)
6887 goto no_designator;
6889 if (gnu_ext && tok >= TOK_UIDENT) {
6890 l = tok, next();
6891 if (tok == ':')
6892 goto struct_field;
6893 unget_tok(l);
6896 /* NOTE: we only support ranges for last designator */
6897 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
6898 if (tok == '[') {
6899 if (!(type->t & VT_ARRAY))
6900 expect("array type");
6901 next();
6902 index = index_last = expr_const();
6903 if (tok == TOK_DOTS && gnu_ext) {
6904 next();
6905 index_last = expr_const();
6907 skip(']');
6908 s = type->ref;
6909 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
6910 index_last < index)
6911 tcc_error("invalid index");
6912 if (cur_field)
6913 (*cur_field)->c = index_last;
6914 type = pointed_type(type);
6915 elem_size = type_size(type, &align);
6916 c += index * elem_size;
6917 nb_elems = index_last - index + 1;
6918 } else {
6919 int cumofs;
6920 next();
6921 l = tok;
6922 struct_field:
6923 next();
6924 if ((type->t & VT_BTYPE) != VT_STRUCT)
6925 expect("struct/union type");
6926 cumofs = 0;
6927 f = find_field(type, l, &cumofs);
6928 if (!f)
6929 expect("field");
6930 if (cur_field)
6931 *cur_field = f;
6932 type = &f->type;
6933 c += cumofs + f->c;
6935 cur_field = NULL;
6937 if (!cur_field) {
6938 if (tok == '=') {
6939 next();
6940 } else if (!gnu_ext) {
6941 expect("=");
6943 } else {
6944 no_designator:
6945 if (type->t & VT_ARRAY) {
6946 index = (*cur_field)->c;
6947 if (type->ref->c >= 0 && index >= type->ref->c)
6948 tcc_error("index too large");
6949 type = pointed_type(type);
6950 c += index * type_size(type, &align);
6951 } else {
6952 f = *cur_field;
6953 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
6954 *cur_field = f = f->next;
6955 if (!f)
6956 tcc_error("too many field init");
6957 type = &f->type;
6958 c += f->c;
6961 /* must put zero in holes (note that doing it that way
6962 ensures that it even works with designators) */
6963 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
6964 init_putz(sec, corig + al, c - corig - al);
6965 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
6967 /* XXX: make it more general */
6968 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
6969 unsigned long c_end;
6970 uint8_t *src, *dst;
6971 int i;
6973 if (!sec) {
6974 vset(type, VT_LOCAL|VT_LVAL, c);
6975 for (i = 1; i < nb_elems; i++) {
6976 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
6977 vswap();
6978 vstore();
6980 vpop();
6981 } else if (!NODATA_WANTED) {
6982 c_end = c + nb_elems * elem_size;
6983 if (c_end > sec->data_allocated)
6984 section_realloc(sec, c_end);
6985 src = sec->data + c;
6986 dst = src;
6987 for(i = 1; i < nb_elems; i++) {
6988 dst += elem_size;
6989 memcpy(dst, src, elem_size);
6993 c += nb_elems * type_size(type, &align);
6994 if (c - corig > al)
6995 al = c - corig;
6996 return al;
6999 /* store a value or an expression directly in global data or in local array */
7000 static void init_putv(CType *type, Section *sec, unsigned long c)
7002 int bt;
7003 void *ptr;
7004 CType dtype;
7006 dtype = *type;
7007 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7009 if (sec) {
7010 int size, align;
7011 /* XXX: not portable */
7012 /* XXX: generate error if incorrect relocation */
7013 gen_assign_cast(&dtype);
7014 bt = type->t & VT_BTYPE;
7016 if ((vtop->r & VT_SYM)
7017 && bt != VT_PTR
7018 && bt != VT_FUNC
7019 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7020 || (type->t & VT_BITFIELD))
7021 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7023 tcc_error("initializer element is not computable at load time");
7025 if (NODATA_WANTED) {
7026 vtop--;
7027 return;
7030 size = type_size(type, &align);
7031 section_reserve(sec, c + size);
7032 ptr = sec->data + c;
7034 /* XXX: make code faster ? */
7035 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7036 vtop->sym->v >= SYM_FIRST_ANOM &&
7037 /* XXX This rejects compound literals like
7038 '(void *){ptr}'. The problem is that '&sym' is
7039 represented the same way, which would be ruled out
7040 by the SYM_FIRST_ANOM check above, but also '"string"'
7041 in 'char *p = "string"' is represented the same
7042 with the type being VT_PTR and the symbol being an
7043 anonymous one. That is, there's no difference in vtop
7044 between '(void *){x}' and '&(void *){x}'. Ignore
7045 pointer typed entities here. Hopefully no real code
7046 will every use compound literals with scalar type. */
7047 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7048 /* These come from compound literals, memcpy stuff over. */
7049 Section *ssec;
7050 ElfSym *esym;
7051 ElfW_Rel *rel;
7052 esym = elfsym(vtop->sym);
7053 ssec = tcc_state->sections[esym->st_shndx];
7054 memmove (ptr, ssec->data + esym->st_value, size);
7055 if (ssec->reloc) {
7056 /* We need to copy over all memory contents, and that
7057 includes relocations. Use the fact that relocs are
7058 created it order, so look from the end of relocs
7059 until we hit one before the copied region. */
7060 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7061 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7062 while (num_relocs--) {
7063 rel--;
7064 if (rel->r_offset >= esym->st_value + size)
7065 continue;
7066 if (rel->r_offset < esym->st_value)
7067 break;
7068 /* Note: if the same fields are initialized multiple
7069 times (possible with designators) then we possibly
7070 add multiple relocations for the same offset here.
7071 That would lead to wrong code, the last reloc needs
7072 to win. We clean this up later after the whole
7073 initializer is parsed. */
7074 put_elf_reloca(symtab_section, sec,
7075 c + rel->r_offset - esym->st_value,
7076 ELFW(R_TYPE)(rel->r_info),
7077 ELFW(R_SYM)(rel->r_info),
7078 #if PTR_SIZE == 8
7079 rel->r_addend
7080 #else
7082 #endif
7086 } else {
7087 if (type->t & VT_BITFIELD) {
7088 int bit_pos, bit_size, bits, n;
7089 unsigned char *p, v, m;
7090 bit_pos = BIT_POS(vtop->type.t);
7091 bit_size = BIT_SIZE(vtop->type.t);
7092 p = (unsigned char*)ptr + (bit_pos >> 3);
7093 bit_pos &= 7, bits = 0;
7094 while (bit_size) {
7095 n = 8 - bit_pos;
7096 if (n > bit_size)
7097 n = bit_size;
7098 v = vtop->c.i >> bits << bit_pos;
7099 m = ((1 << n) - 1) << bit_pos;
7100 *p = (*p & ~m) | (v & m);
7101 bits += n, bit_size -= n, bit_pos = 0, ++p;
7103 } else
7104 switch(bt) {
7105 /* XXX: when cross-compiling we assume that each type has the
7106 same representation on host and target, which is likely to
7107 be wrong in the case of long double */
7108 case VT_BOOL:
7109 vtop->c.i = vtop->c.i != 0;
7110 case VT_BYTE:
7111 *(char *)ptr |= vtop->c.i;
7112 break;
7113 case VT_SHORT:
7114 *(short *)ptr |= vtop->c.i;
7115 break;
7116 case VT_FLOAT:
7117 *(float*)ptr = vtop->c.f;
7118 break;
7119 case VT_DOUBLE:
7120 *(double *)ptr = vtop->c.d;
7121 break;
7122 case VT_LDOUBLE:
7123 #if defined TCC_IS_NATIVE_387
7124 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7125 memcpy(ptr, &vtop->c.ld, 10);
7126 #ifdef __TINYC__
7127 else if (sizeof (long double) == sizeof (double))
7128 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7129 #endif
7130 else if (vtop->c.ld == 0.0)
7132 else
7133 #endif
7134 if (sizeof(long double) == LDOUBLE_SIZE)
7135 *(long double*)ptr = vtop->c.ld;
7136 else if (sizeof(double) == LDOUBLE_SIZE)
7137 *(double *)ptr = (double)vtop->c.ld;
7138 else
7139 tcc_error("can't cross compile long double constants");
7140 break;
7141 #if PTR_SIZE != 8
7142 case VT_LLONG:
7143 *(long long *)ptr |= vtop->c.i;
7144 break;
7145 #else
7146 case VT_LLONG:
7147 #endif
7148 case VT_PTR:
7150 addr_t val = vtop->c.i;
7151 #if PTR_SIZE == 8
7152 if (vtop->r & VT_SYM)
7153 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7154 else
7155 *(addr_t *)ptr |= val;
7156 #else
7157 if (vtop->r & VT_SYM)
7158 greloc(sec, vtop->sym, c, R_DATA_PTR);
7159 *(addr_t *)ptr |= val;
7160 #endif
7161 break;
7163 default:
7165 int val = vtop->c.i;
7166 #if PTR_SIZE == 8
7167 if (vtop->r & VT_SYM)
7168 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7169 else
7170 *(int *)ptr |= val;
7171 #else
7172 if (vtop->r & VT_SYM)
7173 greloc(sec, vtop->sym, c, R_DATA_PTR);
7174 *(int *)ptr |= val;
7175 #endif
7176 break;
7180 vtop--;
7181 } else {
7182 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7183 vswap();
7184 vstore();
7185 vpop();
7189 /* 't' contains the type and storage info. 'c' is the offset of the
7190 object in section 'sec'. If 'sec' is NULL, it means stack based
7191 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7192 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7193 size only evaluation is wanted (only for arrays). */
7194 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7195 int flags)
7197 int len, n, no_oblock, nb, i;
7198 int size1, align1;
7199 Sym *s, *f;
7200 Sym indexsym;
7201 CType *t1;
7203 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7204 /* In case of strings we have special handling for arrays, so
7205 don't consume them as initializer value (which would commit them
7206 to some anonymous symbol). */
7207 tok != TOK_LSTR && tok != TOK_STR &&
7208 !(flags & DIF_SIZE_ONLY)) {
7209 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7210 flags |= DIF_HAVE_ELEM;
7213 if ((flags & DIF_HAVE_ELEM) &&
7214 !(type->t & VT_ARRAY) &&
7215 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7216 The source type might have VT_CONSTANT set, which is
7217 of course assignable to non-const elements. */
7218 is_compatible_unqualified_types(type, &vtop->type)) {
7219 init_putv(type, sec, c);
7220 } else if (type->t & VT_ARRAY) {
7221 s = type->ref;
7222 n = s->c;
7223 t1 = pointed_type(type);
7224 size1 = type_size(t1, &align1);
7226 no_oblock = 1;
7227 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7228 tok == '{') {
7229 if (tok != '{')
7230 tcc_error("character array initializer must be a literal,"
7231 " optionally enclosed in braces");
7232 skip('{');
7233 no_oblock = 0;
7236 /* only parse strings here if correct type (otherwise: handle
7237 them as ((w)char *) expressions */
7238 if ((tok == TOK_LSTR &&
7239 #ifdef TCC_TARGET_PE
7240 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7241 #else
7242 (t1->t & VT_BTYPE) == VT_INT
7243 #endif
7244 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7245 len = 0;
7246 while (tok == TOK_STR || tok == TOK_LSTR) {
7247 int cstr_len, ch;
7249 /* compute maximum number of chars wanted */
7250 if (tok == TOK_STR)
7251 cstr_len = tokc.str.size;
7252 else
7253 cstr_len = tokc.str.size / sizeof(nwchar_t);
7254 cstr_len--;
7255 nb = cstr_len;
7256 if (n >= 0 && nb > (n - len))
7257 nb = n - len;
7258 if (!(flags & DIF_SIZE_ONLY)) {
7259 if (cstr_len > nb)
7260 tcc_warning("initializer-string for array is too long");
7261 /* in order to go faster for common case (char
7262 string in global variable, we handle it
7263 specifically */
7264 if (sec && tok == TOK_STR && size1 == 1) {
7265 if (!NODATA_WANTED)
7266 memcpy(sec->data + c + len, tokc.str.data, nb);
7267 } else {
7268 for(i=0;i<nb;i++) {
7269 if (tok == TOK_STR)
7270 ch = ((unsigned char *)tokc.str.data)[i];
7271 else
7272 ch = ((nwchar_t *)tokc.str.data)[i];
7273 vpushi(ch);
7274 init_putv(t1, sec, c + (len + i) * size1);
7278 len += nb;
7279 next();
7281 /* only add trailing zero if enough storage (no
7282 warning in this case since it is standard) */
7283 if (n < 0 || len < n) {
7284 if (!(flags & DIF_SIZE_ONLY)) {
7285 vpushi(0);
7286 init_putv(t1, sec, c + (len * size1));
7288 len++;
7290 len *= size1;
7291 } else {
7292 indexsym.c = 0;
7293 f = &indexsym;
7295 do_init_list:
7296 len = 0;
7297 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7298 len = decl_designator(type, sec, c, &f, flags, len);
7299 flags &= ~DIF_HAVE_ELEM;
7300 if (type->t & VT_ARRAY) {
7301 ++indexsym.c;
7302 /* special test for multi dimensional arrays (may not
7303 be strictly correct if designators are used at the
7304 same time) */
7305 if (no_oblock && len >= n*size1)
7306 break;
7307 } else {
7308 if (s->type.t == VT_UNION)
7309 f = NULL;
7310 else
7311 f = f->next;
7312 if (no_oblock && f == NULL)
7313 break;
7316 if (tok == '}')
7317 break;
7318 skip(',');
7321 /* put zeros at the end */
7322 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7323 init_putz(sec, c + len, n*size1 - len);
7324 if (!no_oblock)
7325 skip('}');
7326 /* patch type size if needed, which happens only for array types */
7327 if (n < 0)
7328 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7329 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7330 size1 = 1;
7331 no_oblock = 1;
7332 if ((flags & DIF_FIRST) || tok == '{') {
7333 skip('{');
7334 no_oblock = 0;
7336 s = type->ref;
7337 f = s->next;
7338 n = s->c;
7339 goto do_init_list;
7340 } else if (tok == '{') {
7341 if (flags & DIF_HAVE_ELEM)
7342 skip(';');
7343 next();
7344 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7345 skip('}');
7346 } else if ((flags & DIF_SIZE_ONLY)) {
7347 /* If we supported only ISO C we wouldn't have to accept calling
7348 this on anything than an array if DIF_SIZE_ONLY (and even then
7349 only on the outermost level, so no recursion would be needed),
7350 because initializing a flex array member isn't supported.
7351 But GNU C supports it, so we need to recurse even into
7352 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7353 /* just skip expression */
7354 skip_or_save_block(NULL);
7355 } else {
7356 if (!(flags & DIF_HAVE_ELEM)) {
7357 /* This should happen only when we haven't parsed
7358 the init element above for fear of committing a
7359 string constant to memory too early. */
7360 if (tok != TOK_STR && tok != TOK_LSTR)
7361 expect("string constant");
7362 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7364 init_putv(type, sec, c);
7368 /* parse an initializer for type 't' if 'has_init' is non zero, and
7369 allocate space in local or global data space ('r' is either
7370 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7371 variable 'v' of scope 'scope' is declared before initializers
7372 are parsed. If 'v' is zero, then a reference to the new object
7373 is put in the value stack. If 'has_init' is 2, a special parsing
7374 is done to handle string constants. */
7375 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7376 int has_init, int v, int scope)
7378 int size, align, addr;
7379 TokenString *init_str = NULL;
7381 Section *sec;
7382 Sym *flexible_array;
7383 Sym *sym = NULL;
7384 int saved_nocode_wanted = nocode_wanted;
7385 #ifdef CONFIG_TCC_BCHECK
7386 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7387 #endif
7389 /* Always allocate static or global variables */
7390 if (v && (r & VT_VALMASK) == VT_CONST)
7391 nocode_wanted |= 0x80000000;
7393 flexible_array = NULL;
7394 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7395 Sym *field = type->ref->next;
7396 if (field) {
7397 while (field->next)
7398 field = field->next;
7399 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7400 flexible_array = field;
7404 size = type_size(type, &align);
7405 /* If unknown size, we must evaluate it before
7406 evaluating initializers because
7407 initializers can generate global data too
7408 (e.g. string pointers or ISOC99 compound
7409 literals). It also simplifies local
7410 initializers handling */
7411 if (size < 0 || (flexible_array && has_init)) {
7412 if (!has_init)
7413 tcc_error("unknown type size");
7414 /* get all init string */
7415 if (has_init == 2) {
7416 init_str = tok_str_alloc();
7417 /* only get strings */
7418 while (tok == TOK_STR || tok == TOK_LSTR) {
7419 tok_str_add_tok(init_str);
7420 next();
7422 tok_str_add(init_str, -1);
7423 tok_str_add(init_str, 0);
7424 } else {
7425 skip_or_save_block(&init_str);
7427 unget_tok(0);
7429 /* compute size */
7430 begin_macro(init_str, 1);
7431 next();
7432 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7433 /* prepare second initializer parsing */
7434 macro_ptr = init_str->str;
7435 next();
7437 /* if still unknown size, error */
7438 size = type_size(type, &align);
7439 if (size < 0)
7440 tcc_error("unknown type size");
7442 /* If there's a flex member and it was used in the initializer
7443 adjust size. */
7444 if (flexible_array &&
7445 flexible_array->type.ref->c > 0)
7446 size += flexible_array->type.ref->c
7447 * pointed_size(&flexible_array->type);
7448 /* take into account specified alignment if bigger */
7449 if (ad->a.aligned) {
7450 int speca = 1 << (ad->a.aligned - 1);
7451 if (speca > align)
7452 align = speca;
7453 } else if (ad->a.packed) {
7454 align = 1;
7457 if (!v && NODATA_WANTED)
7458 size = 0, align = 1;
7460 if ((r & VT_VALMASK) == VT_LOCAL) {
7461 sec = NULL;
7462 #ifdef CONFIG_TCC_BCHECK
7463 if (bcheck && v) {
7464 /* add padding between stack variables for bound checking */
7465 loc--;
7467 #endif
7468 loc = (loc - size) & -align;
7469 addr = loc;
7470 #ifdef CONFIG_TCC_BCHECK
7471 if (bcheck && v) {
7472 /* add padding between stack variables for bound checking */
7473 loc--;
7475 #endif
7476 if (v) {
7477 /* local variable */
7478 #ifdef CONFIG_TCC_ASM
7479 if (ad->asm_label) {
7480 int reg = asm_parse_regvar(ad->asm_label);
7481 if (reg >= 0)
7482 r = (r & ~VT_VALMASK) | reg;
7484 #endif
7485 sym = sym_push(v, type, r, addr);
7486 if (ad->cleanup_func) {
7487 Sym *cls = sym_push2(&all_cleanups,
7488 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7489 cls->prev_tok = sym;
7490 cls->next = ad->cleanup_func;
7491 cls->ncl = cur_scope->cl.s;
7492 cur_scope->cl.s = cls;
7495 sym->a = ad->a;
7496 } else {
7497 /* push local reference */
7498 vset(type, r, addr);
7500 } else {
7501 if (v && scope == VT_CONST) {
7502 /* see if the symbol was already defined */
7503 sym = sym_find(v);
7504 if (sym) {
7505 patch_storage(sym, ad, type);
7506 /* we accept several definitions of the same global variable. */
7507 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7508 goto no_alloc;
7512 /* allocate symbol in corresponding section */
7513 sec = ad->section;
7514 if (!sec) {
7515 if (has_init)
7516 sec = data_section;
7517 else if (tcc_state->nocommon)
7518 sec = bss_section;
7521 if (sec) {
7522 addr = section_add(sec, size, align);
7523 #ifdef CONFIG_TCC_BCHECK
7524 /* add padding if bound check */
7525 if (bcheck)
7526 section_add(sec, 1, 1);
7527 #endif
7528 } else {
7529 addr = align; /* SHN_COMMON is special, symbol value is align */
7530 sec = common_section;
7533 if (v) {
7534 if (!sym) {
7535 sym = sym_push(v, type, r | VT_SYM, 0);
7536 patch_storage(sym, ad, NULL);
7538 /* update symbol definition */
7539 put_extern_sym(sym, sec, addr, size);
7540 } else {
7541 /* push global reference */
7542 vpush_ref(type, sec, addr, size);
7543 sym = vtop->sym;
7544 vtop->r |= r;
7547 #ifdef CONFIG_TCC_BCHECK
7548 /* handles bounds now because the symbol must be defined
7549 before for the relocation */
7550 if (bcheck) {
7551 addr_t *bounds_ptr;
7553 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7554 /* then add global bound info */
7555 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7556 bounds_ptr[0] = 0; /* relocated */
7557 bounds_ptr[1] = size;
7559 #endif
7562 if (type->t & VT_VLA) {
7563 int a;
7565 if (NODATA_WANTED)
7566 goto no_alloc;
7568 /* save current stack pointer */
7569 if (root_scope->vla.loc == 0) {
7570 struct scope *v = cur_scope;
7571 gen_vla_sp_save(loc -= PTR_SIZE);
7572 do v->vla.loc = loc; while ((v = v->prev));
7575 vla_runtime_type_size(type, &a);
7576 gen_vla_alloc(type, a);
7577 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7578 /* on _WIN64, because of the function args scratch area, the
7579 result of alloca differs from RSP and is returned in RAX. */
7580 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7581 #endif
7582 gen_vla_sp_save(addr);
7583 cur_scope->vla.loc = addr;
7584 cur_scope->vla.num++;
7585 #ifdef CONFIG_TCC_BCHECK
7586 if (bcheck) {
7587 addr_t *bounds_ptr;
7589 bounds_ptr = section_ptr_add(lbounds_section, 2 * sizeof(addr_t));
7590 bounds_ptr[0] = 1; /* marks alloca/vla used */
7591 bounds_ptr[1] = 0;
7593 #endif
7595 } else if (has_init) {
7596 size_t oldreloc_offset = 0;
7597 if (sec && sec->reloc)
7598 oldreloc_offset = sec->reloc->data_offset;
7599 decl_initializer(type, sec, addr, DIF_FIRST);
7600 if (sec && sec->reloc)
7601 squeeze_multi_relocs(sec, oldreloc_offset);
7602 /* patch flexible array member size back to -1, */
7603 /* for possible subsequent similar declarations */
7604 if (flexible_array)
7605 flexible_array->type.ref->c = -1;
7608 no_alloc:
7609 /* restore parse state if needed */
7610 if (init_str) {
7611 end_macro();
7612 next();
7615 nocode_wanted = saved_nocode_wanted;
7618 /* parse a function defined by symbol 'sym' and generate its code in
7619 'cur_text_section' */
7620 static void gen_function(Sym *sym, AttributeDef *ad)
7622 /* Initialize VLA state */
7623 struct scope f = { 0 };
7624 cur_scope = root_scope = &f;
7626 nocode_wanted = 0;
7627 ind = cur_text_section->data_offset;
7628 if (sym->a.aligned) {
7629 size_t newoff = section_add(cur_text_section, 0,
7630 1 << (sym->a.aligned - 1));
7631 gen_fill_nops(newoff - ind);
7633 /* NOTE: we patch the symbol size later */
7634 put_extern_sym(sym, cur_text_section, ind, 0);
7636 if (ad && ad->constructor) {
7637 add_init_array (tcc_state, sym);
7639 if (ad && ad->destructor) {
7640 add_fini_array (tcc_state, sym);
7643 funcname = get_tok_str(sym->v, NULL);
7644 func_ind = ind;
7646 /* put debug symbol */
7647 tcc_debug_funcstart(tcc_state, sym);
7648 /* push a dummy symbol to enable local sym storage */
7649 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7650 local_scope = 1; /* for function parameters */
7651 gfunc_prolog(sym);
7652 local_scope = 0;
7653 rsym = 0;
7654 clear_temp_local_var_list();
7655 block(0);
7656 gsym(rsym);
7657 nocode_wanted = 0;
7658 /* reset local stack */
7659 pop_local_syms(&local_stack, NULL, 0);
7660 gfunc_epilog();
7661 cur_text_section->data_offset = ind;
7662 local_scope = 0;
7663 label_pop(&global_label_stack, NULL, 0);
7664 sym_pop(&all_cleanups, NULL, 0);
7665 /* patch symbol size */
7666 elfsym(sym)->st_size = ind - func_ind;
7667 /* end of function */
7668 tcc_debug_funcend(tcc_state, ind - func_ind);
7669 /* It's better to crash than to generate wrong code */
7670 cur_text_section = NULL;
7671 funcname = ""; /* for safety */
7672 func_vt.t = VT_VOID; /* for safety */
7673 func_var = 0; /* for safety */
7674 ind = 0; /* for safety */
7675 nocode_wanted = 0x80000000;
7676 check_vstack();
7679 static void gen_inline_functions(TCCState *s)
7681 Sym *sym;
7682 int inline_generated, i;
7683 struct InlineFunc *fn;
7685 tcc_open_bf(s, ":inline:", 0);
7686 /* iterate while inline function are referenced */
7687 do {
7688 inline_generated = 0;
7689 for (i = 0; i < s->nb_inline_fns; ++i) {
7690 fn = s->inline_fns[i];
7691 sym = fn->sym;
7692 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
7693 /* the function was used or forced (and then not internal):
7694 generate its code and convert it to a normal function */
7695 fn->sym = NULL;
7696 tcc_debug_putfile(s, fn->filename);
7697 begin_macro(fn->func_str, 1);
7698 next();
7699 cur_text_section = text_section;
7700 gen_function(sym, NULL);
7701 end_macro();
7703 inline_generated = 1;
7706 } while (inline_generated);
7707 tcc_close();
7710 static void free_inline_functions(TCCState *s)
7712 int i;
7713 /* free tokens of unused inline functions */
7714 for (i = 0; i < s->nb_inline_fns; ++i) {
7715 struct InlineFunc *fn = s->inline_fns[i];
7716 if (fn->sym)
7717 tok_str_free(fn->func_str);
7719 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
7722 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7723 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7724 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
7726 int v, has_init, r;
7727 CType type, btype;
7728 Sym *sym;
7729 AttributeDef ad, adbase;
7731 while (1) {
7732 if (tok == TOK_STATIC_ASSERT) {
7733 int c;
7735 next();
7736 skip('(');
7737 c = expr_const();
7738 skip(',');
7739 if (c == 0)
7740 tcc_error("%s", get_tok_str(tok, &tokc));
7741 next();
7742 skip(')');
7743 skip(';');
7744 continue;
7746 if (!parse_btype(&btype, &adbase)) {
7747 if (is_for_loop_init)
7748 return 0;
7749 /* skip redundant ';' if not in old parameter decl scope */
7750 if (tok == ';' && l != VT_CMP) {
7751 next();
7752 continue;
7754 if (l != VT_CONST)
7755 break;
7756 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
7757 /* global asm block */
7758 asm_global_instr();
7759 continue;
7761 if (tok >= TOK_UIDENT) {
7762 /* special test for old K&R protos without explicit int
7763 type. Only accepted when defining global data */
7764 btype.t = VT_INT;
7765 } else {
7766 if (tok != TOK_EOF)
7767 expect("declaration");
7768 break;
7771 if (tok == ';') {
7772 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
7773 int v = btype.ref->v;
7774 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
7775 tcc_warning("unnamed struct/union that defines no instances");
7776 next();
7777 continue;
7779 if (IS_ENUM(btype.t)) {
7780 next();
7781 continue;
7784 while (1) { /* iterate thru each declaration */
7785 type = btype;
7786 /* If the base type itself was an array type of unspecified
7787 size (like in 'typedef int arr[]; arr x = {1};') then
7788 we will overwrite the unknown size by the real one for
7789 this decl. We need to unshare the ref symbol holding
7790 that size. */
7791 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
7792 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
7794 ad = adbase;
7795 type_decl(&type, &ad, &v, TYPE_DIRECT);
7796 #if 0
7798 char buf[500];
7799 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
7800 printf("type = '%s'\n", buf);
7802 #endif
7803 if ((type.t & VT_BTYPE) == VT_FUNC) {
7804 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
7805 tcc_error("function without file scope cannot be static");
7806 /* if old style function prototype, we accept a
7807 declaration list */
7808 sym = type.ref;
7809 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
7810 decl0(VT_CMP, 0, sym);
7811 /* always compile 'extern inline' */
7812 if (type.t & VT_EXTERN)
7813 type.t &= ~VT_INLINE;
7816 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
7817 ad.asm_label = asm_label_instr();
7818 /* parse one last attribute list, after asm label */
7819 parse_attribute(&ad);
7820 #if 0
7821 /* gcc does not allow __asm__("label") with function definition,
7822 but why not ... */
7823 if (tok == '{')
7824 expect(";");
7825 #endif
7828 #ifdef TCC_TARGET_PE
7829 if (ad.a.dllimport || ad.a.dllexport) {
7830 if (type.t & VT_STATIC)
7831 tcc_error("cannot have dll linkage with static");
7832 if (type.t & VT_TYPEDEF) {
7833 tcc_warning("'%s' attribute ignored for typedef",
7834 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
7835 (ad.a.dllexport = 0, "dllexport"));
7836 } else if (ad.a.dllimport) {
7837 if ((type.t & VT_BTYPE) == VT_FUNC)
7838 ad.a.dllimport = 0;
7839 else
7840 type.t |= VT_EXTERN;
7843 #endif
7844 if (tok == '{') {
7845 if (l != VT_CONST)
7846 tcc_error("cannot use local functions");
7847 if ((type.t & VT_BTYPE) != VT_FUNC)
7848 expect("function definition");
7850 /* reject abstract declarators in function definition
7851 make old style params without decl have int type */
7852 sym = type.ref;
7853 while ((sym = sym->next) != NULL) {
7854 if (!(sym->v & ~SYM_FIELD))
7855 expect("identifier");
7856 if (sym->type.t == VT_VOID)
7857 sym->type = int_type;
7860 /* put function symbol */
7861 type.t &= ~VT_EXTERN;
7862 sym = external_sym(v, &type, 0, &ad);
7863 /* static inline functions are just recorded as a kind
7864 of macro. Their code will be emitted at the end of
7865 the compilation unit only if they are used */
7866 if (sym->type.t & VT_INLINE) {
7867 struct InlineFunc *fn;
7868 const char *filename;
7870 filename = file ? file->filename : "";
7871 fn = tcc_malloc(sizeof *fn + strlen(filename));
7872 strcpy(fn->filename, filename);
7873 fn->sym = sym;
7874 skip_or_save_block(&fn->func_str);
7875 dynarray_add(&tcc_state->inline_fns,
7876 &tcc_state->nb_inline_fns, fn);
7877 } else {
7878 /* compute text section */
7879 cur_text_section = ad.section;
7880 if (!cur_text_section)
7881 cur_text_section = text_section;
7882 gen_function(sym, &ad);
7884 break;
7885 } else {
7886 if (l == VT_CMP) {
7887 /* find parameter in function parameter list */
7888 for (sym = func_sym->next; sym; sym = sym->next)
7889 if ((sym->v & ~SYM_FIELD) == v)
7890 goto found;
7891 tcc_error("declaration for parameter '%s' but no such parameter",
7892 get_tok_str(v, NULL));
7893 found:
7894 if (type.t & VT_STORAGE) /* 'register' is okay */
7895 tcc_error("storage class specified for '%s'",
7896 get_tok_str(v, NULL));
7897 if (sym->type.t != VT_VOID)
7898 tcc_error("redefinition of parameter '%s'",
7899 get_tok_str(v, NULL));
7900 convert_parameter_type(&type);
7901 sym->type = type;
7902 } else if (type.t & VT_TYPEDEF) {
7903 /* save typedefed type */
7904 /* XXX: test storage specifiers ? */
7905 sym = sym_find(v);
7906 if (sym && sym->sym_scope == local_scope) {
7907 if (!is_compatible_types(&sym->type, &type)
7908 || !(sym->type.t & VT_TYPEDEF))
7909 tcc_error("incompatible redefinition of '%s'",
7910 get_tok_str(v, NULL));
7911 sym->type = type;
7912 } else {
7913 sym = sym_push(v, &type, 0, 0);
7915 sym->a = ad.a;
7916 sym->f = ad.f;
7917 } else if ((type.t & VT_BTYPE) == VT_VOID
7918 && !(type.t & VT_EXTERN)) {
7919 tcc_error("declaration of void object");
7920 } else {
7921 r = 0;
7922 if ((type.t & VT_BTYPE) == VT_FUNC) {
7923 /* external function definition */
7924 /* specific case for func_call attribute */
7925 type.ref->f = ad.f;
7926 } else if (!(type.t & VT_ARRAY)) {
7927 /* not lvalue if array */
7928 r |= VT_LVAL;
7930 has_init = (tok == '=');
7931 if (has_init && (type.t & VT_VLA))
7932 tcc_error("variable length array cannot be initialized");
7933 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
7934 || (type.t & VT_BTYPE) == VT_FUNC
7935 /* as with GCC, uninitialized global arrays with no size
7936 are considered extern: */
7937 || ((type.t & VT_ARRAY) && !has_init
7938 && l == VT_CONST && type.ref->c < 0)
7940 /* external variable or function */
7941 type.t |= VT_EXTERN;
7942 sym = external_sym(v, &type, r, &ad);
7943 if (ad.alias_target) {
7944 ElfSym *esym;
7945 Sym *alias_target;
7946 alias_target = sym_find(ad.alias_target);
7947 esym = elfsym(alias_target);
7948 if (!esym)
7949 tcc_error("unsupported forward __alias__ attribute");
7950 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
7952 } else {
7953 if (type.t & VT_STATIC)
7954 r |= VT_CONST;
7955 else
7956 r |= l;
7957 if (has_init)
7958 next();
7959 else if (l == VT_CONST)
7960 /* uninitialized global variables may be overridden */
7961 type.t |= VT_EXTERN;
7962 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
7965 if (tok != ',') {
7966 if (is_for_loop_init)
7967 return 1;
7968 skip(';');
7969 break;
7971 next();
7975 return 0;
7978 static void decl(int l)
7980 decl0(l, 0, NULL);
7983 /* ------------------------------------------------------------------------- */
7984 #undef gjmp_addr
7985 #undef gjmp
7986 /* ------------------------------------------------------------------------- */