2 * Linearize - walk the statement tree (but _not_ the expressions)
3 * to generate a linear version of it and the basic blocks.
5 * NOTE! We're not interested in the actual sub-expressions yet,
6 * even though they can generate conditional branches and
7 * subroutine calls. That's all "local" behaviour.
9 * Copyright (C) 2004 Linus Torvalds
10 * Copyright (C) 2004 Christopher Li
20 #include "expression.h"
21 #include "linearize.h"
26 static pseudo_t
linearize_statement(struct entrypoint
*ep
, struct statement
*stmt
);
27 static pseudo_t
linearize_expression(struct entrypoint
*ep
, struct expression
*expr
);
29 static pseudo_t
add_cast(struct entrypoint
*ep
, struct symbol
*to
, struct symbol
*from
, int op
, pseudo_t src
);
30 static pseudo_t
add_binary_op(struct entrypoint
*ep
, struct symbol
*ctype
, int op
, pseudo_t left
, pseudo_t right
);
31 static pseudo_t
linearize_one_symbol(struct entrypoint
*ep
, struct symbol
*sym
);
33 static pseudo_t
cast_pseudo(struct entrypoint
*ep
, pseudo_t src
, struct symbol
*from
, struct symbol
*to
);
35 struct pseudo void_pseudo
= {};
37 static struct position current_pos
;
39 ALLOCATOR(pseudo_user
, "pseudo_user");
41 static struct instruction
*alloc_instruction(int opcode
, int size
)
43 struct instruction
* insn
= __alloc_instruction(0);
44 insn
->opcode
= opcode
;
46 insn
->pos
= current_pos
;
50 static inline int type_size(struct symbol
*type
)
52 return type
? type
->bit_size
> 0 ? type
->bit_size
: 0 : 0;
55 static struct instruction
*alloc_typed_instruction(int opcode
, struct symbol
*type
)
57 struct instruction
*insn
= alloc_instruction(opcode
, type_size(type
));
62 static struct entrypoint
*alloc_entrypoint(void)
64 return __alloc_entrypoint(0);
67 static struct basic_block
*alloc_basic_block(struct entrypoint
*ep
, struct position pos
)
70 struct basic_block
*bb
= __alloc_basic_block(0);
77 static struct multijmp
*alloc_multijmp(struct basic_block
*target
, long long begin
, long long end
)
79 struct multijmp
*multijmp
= __alloc_multijmp(0);
80 multijmp
->target
= target
;
81 multijmp
->begin
= begin
;
86 const char *show_label(struct basic_block
*bb
)
89 static char buffer
[4][16];
90 char *buf
= buffer
[3 & ++n
];
94 snprintf(buf
, 64, ".L%u", bb
->nr
);
98 const char *show_pseudo(pseudo_t pseudo
)
101 static char buffer
[4][64];
109 buf
= buffer
[3 & ++n
];
110 switch(pseudo
->type
) {
112 struct symbol
*sym
= pseudo
->sym
;
113 struct expression
*expr
;
116 snprintf(buf
, 64, "<bad symbol>");
119 if (sym
->bb_target
) {
120 snprintf(buf
, 64, "%s", show_label(sym
->bb_target
));
124 snprintf(buf
, 64, "%s", show_ident(sym
->ident
));
127 expr
= sym
->initializer
;
128 snprintf(buf
, 64, "<anon symbol:%p>", verbose
? sym
: NULL
);
130 switch (expr
->type
) {
132 snprintf(buf
, 64, "<symbol value: %lld>", expr
->value
);
135 return show_string(expr
->string
);
143 i
= snprintf(buf
, 64, "%%r%d", pseudo
->nr
);
145 sprintf(buf
+i
, "(%s)", show_ident(pseudo
->ident
));
148 long long value
= pseudo
->value
;
149 if (value
> 1000 || value
< -1000)
150 snprintf(buf
, 64, "$%#llx", value
);
152 snprintf(buf
, 64, "$%lld", value
);
156 snprintf(buf
, 64, "%%arg%d", pseudo
->nr
);
159 i
= snprintf(buf
, 64, "%%phi%d", pseudo
->nr
);
161 sprintf(buf
+i
, "(%s)", show_ident(pseudo
->ident
));
166 snprintf(buf
, 64, "<bad pseudo type %d>", pseudo
->type
);
171 static const char *opcodes
[] = {
172 [OP_BADOP
] = "bad_op",
175 [OP_ENTRY
] = "<entry-point>",
181 [OP_SWITCH
] = "switch",
182 [OP_UNREACH
] = "unreachable",
183 [OP_COMPUTEDGOTO
] = "jmp *",
197 /* Floating-point Binary */
208 /* Binary comparison */
209 [OP_SET_EQ
] = "seteq",
210 [OP_SET_NE
] = "setne",
211 [OP_SET_LE
] = "setle",
212 [OP_SET_GE
] = "setge",
213 [OP_SET_LT
] = "setlt",
214 [OP_SET_GT
] = "setgt",
217 [OP_SET_BE
] = "setbe",
218 [OP_SET_AE
] = "setae",
220 /* floating-point comparison */
221 [OP_FCMP_ORD
] = "fcmpord",
222 [OP_FCMP_OEQ
] = "fcmpoeq",
223 [OP_FCMP_ONE
] = "fcmpone",
224 [OP_FCMP_OLE
] = "fcmpole",
225 [OP_FCMP_OGE
] = "fcmpoge",
226 [OP_FCMP_OLT
] = "fcmpolt",
227 [OP_FCMP_OGT
] = "fcmpogt",
228 [OP_FCMP_UEQ
] = "fcmpueq",
229 [OP_FCMP_UNE
] = "fcmpune",
230 [OP_FCMP_ULE
] = "fcmpule",
231 [OP_FCMP_UGE
] = "fcmpuge",
232 [OP_FCMP_ULT
] = "fcmpult",
233 [OP_FCMP_UGT
] = "fcmpugt",
234 [OP_FCMP_UNO
] = "fcmpuno",
241 /* Special three-input */
243 [OP_FMADD
] = "fmadd",
247 [OP_STORE
] = "store",
248 [OP_LABEL
] = "label",
250 [OP_SETFVAL
] = "setfval",
251 [OP_SYMADDR
] = "symaddr",
255 [OP_PHISOURCE
] = "phisrc",
258 [OP_TRUNC
] = "trunc",
259 [OP_FCVTU
] = "fcvtu",
260 [OP_FCVTS
] = "fcvts",
261 [OP_UCVTF
] = "ucvtf",
262 [OP_SCVTF
] = "scvtf",
263 [OP_FCVTF
] = "fcvtf",
264 [OP_UTPTR
] = "utptr",
265 [OP_PTRTU
] = "ptrtu",
266 [OP_PTRCAST
] = "ptrcast",
267 [OP_INLINED_CALL
] = "# call",
269 [OP_SLICE
] = "slice",
271 [OP_DEATHNOTE
] = "dead",
274 /* Sparse tagging (line numbers, context, whatever) */
275 [OP_CONTEXT
] = "context",
276 [OP_RANGE
] = "range-check",
281 static char *show_asm_constraints(char *buf
, const char *sep
, struct asm_constraint_list
*list
)
283 struct asm_constraint
*entry
;
285 FOR_EACH_PTR(list
, entry
) {
286 buf
+= sprintf(buf
, "%s\"%s\"", sep
, entry
->constraint
);
288 buf
+= sprintf(buf
, " (%s)", show_pseudo(entry
->pseudo
));
290 buf
+= sprintf(buf
, " [%s]", show_ident(entry
->ident
));
292 } END_FOR_EACH_PTR(entry
);
296 static char *show_asm(char *buf
, struct instruction
*insn
)
298 struct asm_rules
*rules
= insn
->asm_rules
;
300 buf
+= sprintf(buf
, "\"%s\"", insn
->string
);
301 buf
= show_asm_constraints(buf
, "\n\t\tout: ", rules
->outputs
);
302 buf
= show_asm_constraints(buf
, "\n\t\tin: ", rules
->inputs
);
303 buf
= show_asm_constraints(buf
, "\n\t\tclobber: ", rules
->clobbers
);
307 const char *show_instruction(struct instruction
*insn
)
309 int opcode
= insn
->opcode
;
310 static char buffer
[4096];
315 buf
+= sprintf(buf
, "# ");
317 if (opcode
< ARRAY_SIZE(opcodes
)) {
318 const char *op
= opcodes
[opcode
];
320 buf
+= sprintf(buf
, "opcode:%d", opcode
);
322 buf
+= sprintf(buf
, "%s", op
);
324 buf
+= sprintf(buf
, ".%d", insn
->size
);
325 memset(buf
, ' ', 20);
329 if (buf
< buffer
+ 12)
333 if (insn
->src
&& insn
->src
!= VOID
)
334 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->src
));
338 buf
+= sprintf(buf
, "%s, %s, %s", show_pseudo(insn
->cond
), show_label(insn
->bb_true
), show_label(insn
->bb_false
));
342 buf
+= sprintf(buf
, "%s", show_label(insn
->bb_true
));
346 buf
+= sprintf(buf
, "%s <- ", show_pseudo(insn
->target
));
347 buf
+= sprintf(buf
, "%s", show_label(insn
->bb_true
));
351 struct expression
*expr
= insn
->val
;
352 buf
+= sprintf(buf
, "%s <- ", show_pseudo(insn
->target
));
355 buf
+= sprintf(buf
, "%s", "<none>");
359 switch (expr
->type
) {
361 buf
+= sprintf(buf
, "%lld", expr
->value
);
364 buf
+= sprintf(buf
, "%Le", expr
->fvalue
);
367 buf
+= sprintf(buf
, "%.40s", show_string(expr
->string
));
370 buf
+= sprintf(buf
, "%s", show_ident(expr
->symbol
->ident
));
373 buf
+= sprintf(buf
, "%s", show_label(expr
->symbol
->bb_target
));
376 buf
+= sprintf(buf
, "SETVAL EXPR TYPE %d", expr
->type
);
381 buf
+= sprintf(buf
, "%s <- ", show_pseudo(insn
->target
));
382 buf
+= sprintf(buf
, "%Le", insn
->fvalue
);
386 struct multijmp
*jmp
;
387 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->cond
));
388 FOR_EACH_PTR(insn
->multijmp_list
, jmp
) {
389 if (jmp
->begin
== jmp
->end
)
390 buf
+= sprintf(buf
, ", %lld -> %s", jmp
->begin
, show_label(jmp
->target
));
391 else if (jmp
->begin
< jmp
->end
)
392 buf
+= sprintf(buf
, ", %lld ... %lld -> %s", jmp
->begin
, jmp
->end
, show_label(jmp
->target
));
394 buf
+= sprintf(buf
, ", default -> %s", show_label(jmp
->target
));
395 } END_FOR_EACH_PTR(jmp
);
398 case OP_COMPUTEDGOTO
: {
399 struct multijmp
*jmp
;
400 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->src
));
401 FOR_EACH_PTR(insn
->multijmp_list
, jmp
) {
402 buf
+= sprintf(buf
, ", %s", show_label(jmp
->target
));
403 } END_FOR_EACH_PTR(jmp
);
410 buf
+= sprintf(buf
, "%s <- %s ", show_pseudo(insn
->target
), show_pseudo(insn
->phi_src
));
416 const char *s
= " <-";
417 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->target
));
418 FOR_EACH_PTR(insn
->phi_list
, phi
) {
419 if (phi
== VOID
&& !verbose
)
421 buf
+= sprintf(buf
, "%s %s", s
, show_pseudo(phi
));
423 } END_FOR_EACH_PTR(phi
);
427 buf
+= sprintf(buf
, "%s <- %lld[%s]", show_pseudo(insn
->target
), insn
->offset
, show_pseudo(insn
->src
));
430 buf
+= sprintf(buf
, "%s -> %lld[%s]", show_pseudo(insn
->target
), insn
->offset
, show_pseudo(insn
->src
));
432 case OP_INLINED_CALL
:
435 if (insn
->target
&& insn
->target
!= VOID
)
436 buf
+= sprintf(buf
, "%s <- ", show_pseudo(insn
->target
));
437 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->func
));
438 FOR_EACH_PTR(insn
->arguments
, arg
) {
439 buf
+= sprintf(buf
, ", %s", show_pseudo(arg
));
440 } END_FOR_EACH_PTR(arg
);
443 case OP_SEXT
: case OP_ZEXT
:
445 case OP_FCVTU
: case OP_FCVTS
:
446 case OP_UCVTF
: case OP_SCVTF
:
451 buf
+= sprintf(buf
, "%s <- (%d) %s",
452 show_pseudo(insn
->target
),
453 type_size(insn
->orig_type
),
454 show_pseudo(insn
->src
));
456 case OP_BINARY
... OP_BINARY_END
:
457 case OP_FPCMP
... OP_FPCMP_END
:
458 case OP_BINCMP
... OP_BINCMP_END
:
459 buf
+= sprintf(buf
, "%s <- %s, %s", show_pseudo(insn
->target
), show_pseudo(insn
->src1
), show_pseudo(insn
->src2
));
464 buf
+= sprintf(buf
, "%s <- %s, %s, %s", show_pseudo(insn
->target
),
465 show_pseudo(insn
->src1
), show_pseudo(insn
->src2
), show_pseudo(insn
->src3
));
469 buf
+= sprintf(buf
, "%s <- (%d) %s, %d", show_pseudo(insn
->target
), type_size(insn
->orig_type
), show_pseudo(insn
->src
), insn
->from
);
472 case OP_NOT
: case OP_NEG
:
475 buf
+= sprintf(buf
, "%s <- %s", show_pseudo(insn
->target
), show_pseudo(insn
->src1
));
479 buf
+= sprintf(buf
, "%s%d", insn
->check
? "check: " : "", insn
->increment
);
482 buf
+= sprintf(buf
, "%s between %s..%s", show_pseudo(insn
->src1
), show_pseudo(insn
->src2
), show_pseudo(insn
->src3
));
485 buf
+= sprintf(buf
, "%s <- %s", show_pseudo(insn
->target
), show_pseudo(insn
->src1
));
488 buf
+= sprintf(buf
, "%s", show_pseudo(insn
->target
));
491 buf
= show_asm(buf
, insn
);
494 buf
+= sprintf(buf
, "%s <- %s", show_pseudo(insn
->target
), show_pseudo(insn
->src
));
500 if (buf
>= buffer
+ sizeof(buffer
))
501 die("instruction buffer overflowed %td\n", buf
- buffer
);
502 do { --buf
; } while (*buf
== ' ');
507 void show_bb(struct basic_block
*bb
)
509 struct instruction
*insn
;
511 printf("%s:\n", show_label(bb
));
513 pseudo_t needs
, defines
;
514 printf("%s:%d\n", stream_name(bb
->pos
.stream
), bb
->pos
.line
);
516 FOR_EACH_PTR(bb
->needs
, needs
) {
517 struct instruction
*def
= needs
->def
;
518 if (def
->opcode
!= OP_PHI
) {
519 printf(" **uses %s (from %s)**\n", show_pseudo(needs
), show_label(def
->bb
));
522 const char *sep
= " ";
523 printf(" **uses %s (from", show_pseudo(needs
));
524 FOR_EACH_PTR(def
->phi_list
, phi
) {
527 printf("%s(%s:%s)", sep
, show_pseudo(phi
), show_label(phi
->def
->bb
));
529 } END_FOR_EACH_PTR(phi
);
532 } END_FOR_EACH_PTR(needs
);
534 FOR_EACH_PTR(bb
->defines
, defines
) {
535 printf(" **defines %s **\n", show_pseudo(defines
));
536 } END_FOR_EACH_PTR(defines
);
539 struct basic_block
*from
;
540 FOR_EACH_PTR(bb
->parents
, from
) {
541 printf(" **from %s (%s:%d:%d)**\n", show_label(from
),
542 stream_name(from
->pos
.stream
), from
->pos
.line
, from
->pos
.pos
);
543 } END_FOR_EACH_PTR(from
);
547 struct basic_block
*to
;
548 FOR_EACH_PTR(bb
->children
, to
) {
549 printf(" **to %s (%s:%d:%d)**\n", show_label(to
),
550 stream_name(to
->pos
.stream
), to
->pos
.line
, to
->pos
.pos
);
551 } END_FOR_EACH_PTR(to
);
555 FOR_EACH_PTR(bb
->insns
, insn
) {
556 if (!insn
->bb
&& verbose
< 2)
558 printf("\t%s\n", show_instruction(insn
));
559 } END_FOR_EACH_PTR(insn
);
560 if (!bb_terminated(bb
))
565 // show BB of non-removed instruction
566 void show_insn_bb(struct instruction
*insn
)
568 if (!insn
|| !insn
->bb
)
573 static void show_symbol_usage(pseudo_t pseudo
)
575 struct pseudo_user
*pu
;
578 FOR_EACH_PTR(pseudo
->users
, pu
) {
579 printf("\t%s\n", show_instruction(pu
->insn
));
580 } END_FOR_EACH_PTR(pu
);
584 void show_entry(struct entrypoint
*ep
)
587 struct basic_block
*bb
;
589 printf("%s:\n", show_ident(ep
->name
->ident
));
592 printf("ep %p: %s\n", ep
, show_ident(ep
->name
->ident
));
594 FOR_EACH_PTR(ep
->syms
, sym
) {
597 if (!sym
->pseudo
->users
)
599 printf(" sym: %p %s\n", sym
, show_ident(sym
->ident
));
600 if (sym
->ctype
.modifiers
& (MOD_EXTERN
| MOD_STATIC
| MOD_ADDRESSABLE
))
601 printf("\texternal visibility\n");
602 show_symbol_usage(sym
->pseudo
);
603 } END_FOR_EACH_PTR(sym
);
608 FOR_EACH_PTR(ep
->bbs
, bb
) {
611 if (!bb
->parents
&& !bb
->children
&& !bb
->insns
&& verbose
< 2)
615 } END_FOR_EACH_PTR(bb
);
621 // show the function containing the instruction but only if not already removed.
622 void show_insn_entry(struct instruction
*insn
)
624 if (!insn
|| !insn
->bb
|| !insn
->bb
->ep
)
626 show_entry(insn
->bb
->ep
);
629 static void bind_label(struct symbol
*label
, struct basic_block
*bb
, struct position pos
)
631 if (label
->bb_target
)
632 warning(pos
, "label '%s' already bound", show_ident(label
->ident
));
633 label
->bb_target
= bb
;
636 static struct basic_block
* get_bound_block(struct entrypoint
*ep
, struct symbol
*label
)
638 struct basic_block
*bb
= label
->bb_target
;
641 bb
= alloc_basic_block(ep
, label
->pos
);
642 label
->bb_target
= bb
;
647 static void finish_block(struct entrypoint
*ep
)
649 struct basic_block
*src
= ep
->active
;
650 if (bb_reachable(src
))
654 static void add_goto(struct entrypoint
*ep
, struct basic_block
*dst
)
656 struct basic_block
*src
= ep
->active
;
657 if (bb_reachable(src
)) {
658 struct instruction
*br
= alloc_instruction(OP_BR
, 0);
660 add_bb(&dst
->parents
, src
);
661 add_bb(&src
->children
, dst
);
663 add_instruction(&src
->insns
, br
);
668 static void add_one_insn(struct entrypoint
*ep
, struct instruction
*insn
)
670 struct basic_block
*bb
= ep
->active
;
672 if (bb_reachable(bb
)) {
674 add_instruction(&bb
->insns
, insn
);
678 static void add_unreachable(struct entrypoint
*ep
)
680 struct instruction
*insn
= alloc_instruction(OP_UNREACH
, 0);
681 add_one_insn(ep
, insn
);
685 static void set_activeblock(struct entrypoint
*ep
, struct basic_block
*bb
)
687 if (!bb_terminated(ep
->active
))
691 if (bb_reachable(bb
))
692 add_bb(&ep
->bbs
, bb
);
695 void insert_select(struct basic_block
*bb
, struct instruction
*br
, struct instruction
*phi_node
, pseudo_t if_true
, pseudo_t if_false
)
698 struct instruction
*select
;
700 select
= alloc_typed_instruction(OP_SEL
, phi_node
->type
);
703 use_pseudo(select
, br
->cond
, &select
->src1
);
705 target
= phi_node
->target
;
706 assert(target
->def
== phi_node
);
707 select
->target
= target
;
708 target
->def
= select
;
710 use_pseudo(select
, if_true
, &select
->src2
);
711 use_pseudo(select
, if_false
, &select
->src3
);
713 insert_last_instruction(bb
, select
);
716 static inline int bb_empty(struct basic_block
*bb
)
721 /* Add a label to the currently active block, return new active block */
722 static struct basic_block
* add_label(struct entrypoint
*ep
, struct symbol
*label
)
724 struct basic_block
*bb
= label
->bb_target
;
727 set_activeblock(ep
, bb
);
731 if (!bb_reachable(bb
) || !bb_empty(bb
)) {
732 bb
= alloc_basic_block(ep
, label
->pos
);
733 set_activeblock(ep
, bb
);
735 label
->bb_target
= bb
;
739 static void add_branch(struct entrypoint
*ep
, pseudo_t cond
, struct basic_block
*bb_true
, struct basic_block
*bb_false
)
741 struct basic_block
*bb
= ep
->active
;
742 struct instruction
*br
;
744 if (bb_reachable(bb
)) {
745 br
= alloc_instruction(OP_CBR
, 0);
746 use_pseudo(br
, cond
, &br
->cond
);
747 br
->bb_true
= bb_true
;
748 br
->bb_false
= bb_false
;
749 add_bb(&bb_true
->parents
, bb
);
750 add_bb(&bb_false
->parents
, bb
);
751 add_bb(&bb
->children
, bb_true
);
752 add_bb(&bb
->children
, bb_false
);
753 add_one_insn(ep
, br
);
757 pseudo_t
alloc_pseudo(struct instruction
*def
)
760 struct pseudo
* pseudo
= __alloc_pseudo(0);
761 pseudo
->type
= PSEUDO_REG
;
767 static pseudo_t
symbol_pseudo(struct entrypoint
*ep
, struct symbol
*sym
)
774 pseudo
= sym
->pseudo
;
776 pseudo
= __alloc_pseudo(0);
778 pseudo
->type
= PSEUDO_SYM
;
780 pseudo
->ident
= sym
->ident
;
781 sym
->pseudo
= pseudo
;
782 add_pseudo(&ep
->accesses
, pseudo
);
784 /* Symbol pseudos have neither nr nor def */
788 pseudo_t
value_pseudo(long long val
)
790 #define MAX_VAL_HASH 64
791 static struct pseudo_list
*prev
[MAX_VAL_HASH
];
792 int hash
= val
& (MAX_VAL_HASH
-1);
793 struct pseudo_list
**list
= prev
+ hash
;
796 FOR_EACH_PTR(*list
, pseudo
) {
797 if (pseudo
->value
== val
)
799 } END_FOR_EACH_PTR(pseudo
);
801 pseudo
= __alloc_pseudo(0);
802 pseudo
->type
= PSEUDO_VAL
;
804 add_pseudo(list
, pseudo
);
806 /* Value pseudos have neither nr, usage nor def */
810 pseudo_t
undef_pseudo(void)
812 pseudo_t pseudo
= __alloc_pseudo(0);
813 pseudo
->type
= PSEUDO_UNDEF
;
817 static pseudo_t
argument_pseudo(struct entrypoint
*ep
, int nr
)
819 pseudo_t pseudo
= __alloc_pseudo(0);
820 struct instruction
*entry
= ep
->entry
;
822 pseudo
->type
= PSEUDO_ARG
;
825 add_pseudo(&entry
->arg_list
, pseudo
);
827 /* Argument pseudos have neither usage nor def */
831 struct instruction
*alloc_phisrc(pseudo_t pseudo
, struct symbol
*type
)
833 struct instruction
*insn
= alloc_typed_instruction(OP_PHISOURCE
, type
);
834 pseudo_t phi
= __alloc_pseudo(0);
837 phi
->type
= PSEUDO_PHI
;
841 use_pseudo(insn
, pseudo
, &insn
->phi_src
);
846 pseudo_t
alloc_phi(struct basic_block
*source
, pseudo_t pseudo
, struct symbol
*type
)
848 struct instruction
*insn
;
853 insn
= alloc_phisrc(pseudo
, type
);
855 add_instruction(&source
->insns
, insn
);
859 struct instruction
*alloc_phi_node(struct basic_block
*bb
, struct symbol
*type
, struct ident
*ident
)
861 struct instruction
*phi_node
= alloc_typed_instruction(OP_PHI
, type
);
864 phi
= alloc_pseudo(phi_node
);
867 phi_node
->target
= phi
;
872 void add_phi_node(struct basic_block
*bb
, struct instruction
*phi_node
)
874 struct instruction
*insn
;
876 FOR_EACH_PTR(bb
->insns
, insn
) {
877 enum opcode op
= insn
->opcode
;
880 INSERT_CURRENT(phi_node
, insn
);
882 } END_FOR_EACH_PTR(insn
);
885 add_instruction(&bb
->insns
, phi_node
);
888 struct instruction
*insert_phi_node(struct basic_block
*bb
, struct symbol
*var
)
890 struct instruction
*phi_node
= alloc_phi_node(bb
, var
, var
->ident
);
891 add_phi_node(bb
, phi_node
);
896 * We carry the "access_data" structure around for any accesses,
897 * which simplifies things a lot. It contains all the access
898 * information in one place.
901 struct symbol
*type
; // ctype
902 struct symbol
*btype
; // base type of bitfields
903 pseudo_t address
; // pseudo containing address ..
904 long long offset
; // byte offset
907 static int linearize_simple_address(struct entrypoint
*ep
,
908 struct expression
*addr
,
909 struct access_data
*ad
)
911 if (addr
->type
== EXPR_SYMBOL
) {
912 linearize_one_symbol(ep
, addr
->symbol
);
913 ad
->address
= symbol_pseudo(ep
, addr
->symbol
);
916 if (addr
->type
== EXPR_BINOP
) {
917 if (addr
->right
->type
== EXPR_VALUE
) {
918 if (addr
->op
== '+') {
919 ad
->offset
+= get_expression_value(addr
->right
);
920 return linearize_simple_address(ep
, addr
->left
, ad
);
924 ad
->address
= linearize_expression(ep
, addr
);
928 static struct symbol
*bitfield_base_type(struct symbol
*sym
)
930 struct symbol
*base
= sym
;
933 if (sym
->type
== SYM_NODE
)
934 base
= base
->ctype
.base_type
;
935 if (base
->type
== SYM_BITFIELD
) {
936 base
= base
->ctype
.base_type
;
938 int size
= bits_to_bytes(sym
->bit_offset
+ sym
->bit_size
);
939 sym
= __alloc_symbol(0);
941 sym
->bit_size
= bytes_to_bits(size
);
950 static int linearize_address_gen(struct entrypoint
*ep
,
951 struct expression
*expr
,
952 struct access_data
*ad
)
954 struct symbol
*ctype
= expr
->ctype
;
959 if (expr
->type
== EXPR_PREOP
&& expr
->op
== '*')
960 return linearize_simple_address(ep
, expr
->unop
, ad
);
962 warning(expr
->pos
, "generating address of non-lvalue (%d)", expr
->type
);
966 static pseudo_t
add_load(struct entrypoint
*ep
, struct access_data
*ad
)
968 struct instruction
*insn
;
974 insn
= alloc_typed_instruction(OP_LOAD
, ad
->btype
);
975 new = alloc_pseudo(insn
);
978 insn
->offset
= ad
->offset
;
979 insn
->is_volatile
= ad
->type
&& (ad
->type
->ctype
.modifiers
& MOD_VOLATILE
);
980 use_pseudo(insn
, ad
->address
, &insn
->src
);
981 add_one_insn(ep
, insn
);
985 static void add_store(struct entrypoint
*ep
, struct access_data
*ad
, pseudo_t value
)
987 struct basic_block
*bb
= ep
->active
;
988 struct instruction
*store
;
993 store
= alloc_typed_instruction(OP_STORE
, ad
->btype
);
994 store
->offset
= ad
->offset
;
995 store
->is_volatile
= ad
->type
&& (ad
->type
->ctype
.modifiers
& MOD_VOLATILE
);
996 use_pseudo(store
, value
, &store
->target
);
997 use_pseudo(store
, ad
->address
, &store
->src
);
998 add_one_insn(ep
, store
);
1001 static pseudo_t
linearize_bitfield_insert(struct entrypoint
*ep
,
1002 pseudo_t ori
, pseudo_t val
, struct symbol
*ctype
, struct symbol
*btype
)
1004 unsigned int shift
= ctype
->bit_offset
;
1005 unsigned int size
= ctype
->bit_size
;
1006 unsigned long long mask
= ((1ULL << size
) - 1);
1007 unsigned long long smask
= bits_mask(btype
->bit_size
);
1009 val
= add_cast(ep
, btype
, ctype
, OP_ZEXT
, val
);
1011 val
= add_binary_op(ep
, btype
, OP_SHL
, val
, value_pseudo(shift
));
1014 ori
= add_binary_op(ep
, btype
, OP_AND
, ori
, value_pseudo(~mask
& smask
));
1015 val
= add_binary_op(ep
, btype
, OP_OR
, ori
, val
);
1020 static pseudo_t
linearize_store_gen(struct entrypoint
*ep
,
1022 struct access_data
*ad
)
1024 struct symbol
*ctype
= ad
->type
;
1025 struct symbol
*btype
;
1026 pseudo_t store
= value
;
1031 btype
= ad
->btype
= bitfield_base_type(ctype
);
1032 if (type_size(btype
) != type_size(ctype
)) {
1033 pseudo_t orig
= add_load(ep
, ad
);
1034 store
= linearize_bitfield_insert(ep
, orig
, value
, ctype
, btype
);
1036 add_store(ep
, ad
, store
);
1040 static void taint_undefined_behaviour(struct instruction
*insn
)
1044 switch (insn
->opcode
) {
1049 if (src2
->type
!= PSEUDO_VAL
)
1051 if ((unsigned long long)src2
->value
>= insn
->size
)
1057 static pseudo_t
add_binary_op(struct entrypoint
*ep
, struct symbol
*ctype
, int op
, pseudo_t left
, pseudo_t right
)
1059 struct instruction
*insn
= alloc_typed_instruction(op
, ctype
);
1060 pseudo_t target
= alloc_pseudo(insn
);
1061 insn
->target
= target
;
1062 use_pseudo(insn
, left
, &insn
->src1
);
1063 use_pseudo(insn
, right
, &insn
->src2
);
1064 add_one_insn(ep
, insn
);
1068 static pseudo_t
add_cmp_op(struct entrypoint
*ep
, struct symbol
*ctype
, int op
, struct symbol
*itype
, pseudo_t left
, pseudo_t right
)
1070 pseudo_t target
= add_binary_op(ep
, ctype
, op
, left
, right
);
1071 target
->def
->itype
= itype
;
1075 static pseudo_t
add_setval(struct entrypoint
*ep
, struct symbol
*ctype
, struct expression
*val
)
1077 struct instruction
*insn
= alloc_typed_instruction(OP_SETVAL
, ctype
);
1078 pseudo_t target
= alloc_pseudo(insn
);
1079 insn
->target
= target
;
1081 add_one_insn(ep
, insn
);
1085 static pseudo_t
add_setfval(struct entrypoint
*ep
, struct symbol
*ctype
, long double fval
)
1087 struct instruction
*insn
= alloc_typed_instruction(OP_SETFVAL
, ctype
);
1088 pseudo_t target
= alloc_pseudo(insn
);
1089 insn
->target
= target
;
1090 insn
->fvalue
= fval
;
1091 add_one_insn(ep
, insn
);
1095 static pseudo_t
add_symbol_address(struct entrypoint
*ep
, struct expression
*expr
)
1097 struct instruction
*insn
= alloc_typed_instruction(OP_SYMADDR
, expr
->ctype
);
1098 pseudo_t target
= alloc_pseudo(insn
);
1100 insn
->target
= target
;
1101 use_pseudo(insn
, symbol_pseudo(ep
, expr
->symbol
), &insn
->src
);
1102 add_one_insn(ep
, insn
);
1106 static pseudo_t
linearize_bitfield_extract(struct entrypoint
*ep
,
1107 pseudo_t val
, struct symbol
*ctype
, struct symbol
*btype
)
1109 unsigned int off
= ctype
->bit_offset
;
1112 pseudo_t shift
= value_pseudo(off
);
1113 val
= add_binary_op(ep
, btype
, OP_LSR
, val
, shift
);
1115 val
= cast_pseudo(ep
, val
, btype
, ctype
);
1119 static pseudo_t
linearize_load_gen(struct entrypoint
*ep
, struct access_data
*ad
)
1121 struct symbol
*ctype
= ad
->type
;
1122 struct symbol
*btype
;
1128 btype
= ad
->btype
= bitfield_base_type(ctype
);
1129 new = add_load(ep
, ad
);
1130 if (ctype
->bit_size
!= type_size(btype
))
1131 new = linearize_bitfield_extract(ep
, new, ctype
, btype
);
1135 static pseudo_t
linearize_access(struct entrypoint
*ep
, struct expression
*expr
)
1137 struct access_data ad
= { NULL
, };
1140 if (!linearize_address_gen(ep
, expr
, &ad
))
1142 value
= linearize_load_gen(ep
, &ad
);
1146 static pseudo_t
linearize_inc_dec(struct entrypoint
*ep
, struct expression
*expr
, int postop
)
1148 struct access_data ad
= { NULL
, };
1149 pseudo_t old
, new, one
;
1150 int op
= expr
->op
== SPECIAL_INCREMENT
? OP_ADD
: OP_SUB
;
1152 if (!linearize_address_gen(ep
, expr
->unop
, &ad
))
1155 old
= linearize_load_gen(ep
, &ad
);
1156 op
= opcode_float(op
, expr
->ctype
);
1157 if (is_float_type(expr
->ctype
))
1158 one
= add_setfval(ep
, expr
->ctype
, expr
->op_value
);
1160 one
= value_pseudo(expr
->op_value
);
1161 if (ad
.btype
!= ad
.type
)
1162 old
= cast_pseudo(ep
, old
, ad
.type
, ad
.btype
);
1163 new = add_binary_op(ep
, ad
.btype
, op
, old
, one
);
1164 if (ad
.btype
!= ad
.type
)
1165 new = cast_pseudo(ep
, new, ad
.btype
, ad
.type
);
1166 linearize_store_gen(ep
, new, &ad
);
1167 return postop
? old
: new;
1170 static pseudo_t
add_unop(struct entrypoint
*ep
, struct symbol
*ctype
, int op
, pseudo_t src
)
1172 struct instruction
*insn
= alloc_typed_instruction(op
, ctype
);
1173 pseudo_t
new = alloc_pseudo(insn
);
1176 use_pseudo(insn
, src
, &insn
->src1
);
1177 add_one_insn(ep
, insn
);
1181 static pseudo_t
add_cast(struct entrypoint
*ep
, struct symbol
*to
,
1182 struct symbol
*from
, int op
, pseudo_t src
)
1184 pseudo_t
new = add_unop(ep
, to
, op
, src
);
1185 new->def
->orig_type
= from
;
1189 static pseudo_t
linearize_slice(struct entrypoint
*ep
, struct expression
*expr
)
1191 pseudo_t pre
= linearize_expression(ep
, expr
->base
);
1192 struct instruction
*insn
= alloc_typed_instruction(OP_SLICE
, expr
->ctype
);
1193 pseudo_t
new = alloc_pseudo(insn
);
1196 insn
->from
= expr
->r_bitpos
;
1197 insn
->orig_type
= expr
->base
->ctype
;
1198 use_pseudo(insn
, pre
, &insn
->src
);
1199 add_one_insn(ep
, insn
);
1203 static pseudo_t
linearize_regular_preop(struct entrypoint
*ep
, struct expression
*expr
)
1205 pseudo_t pre
= linearize_expression(ep
, expr
->unop
);
1206 struct symbol
*ctype
= expr
->ctype
;
1211 pseudo_t zero
= value_pseudo(0);
1212 return add_cmp_op(ep
, ctype
, OP_SET_EQ
, expr
->unop
->ctype
, pre
, zero
);
1215 return add_unop(ep
, ctype
, OP_NOT
, pre
);
1217 return add_unop(ep
, ctype
, opcode_float(OP_NEG
, ctype
), pre
);
1222 static pseudo_t
linearize_preop(struct entrypoint
*ep
, struct expression
*expr
)
1225 * '*' is an lvalue access, and is fundamentally different
1226 * from an arithmetic operation. Maybe it should have an
1227 * expression type of its own..
1229 if (expr
->op
== '*')
1230 return linearize_access(ep
, expr
);
1231 if (expr
->op
== SPECIAL_INCREMENT
|| expr
->op
== SPECIAL_DECREMENT
)
1232 return linearize_inc_dec(ep
, expr
, 0);
1233 return linearize_regular_preop(ep
, expr
);
1236 static pseudo_t
linearize_postop(struct entrypoint
*ep
, struct expression
*expr
)
1238 return linearize_inc_dec(ep
, expr
, 1);
1242 * Casts to pointers are "less safe" than other casts, since
1243 * they imply type-unsafe accesses. "void *" is a special
1244 * case, since you can't access through it anyway without another
1251 MTYPE_VPTR
, // TODO: must be removed ?
1256 static enum mtype
get_mtype(struct symbol
*s
)
1258 int sign
= (s
->ctype
.modifiers
& MOD_SIGNED
) ? 1 : 0;
1260 retry
: switch (s
->type
) {
1262 s
= s
->ctype
.base_type
;
1265 if (s
->ctype
.base_type
== &void_ctype
)
1272 s
= s
->ctype
.base_type
;
1275 return sign
? MTYPE_SINT
: MTYPE_UINT
;
1277 if (s
->ctype
.base_type
== &fp_type
)
1279 if (s
->ctype
.base_type
== &int_type
)
1287 static int get_cast_opcode(struct symbol
*dst
, struct symbol
*src
)
1289 enum mtype stype
= get_mtype(src
);
1290 enum mtype dtype
= get_mtype(dst
);
1296 if (dst
->bit_size
== src
->bit_size
)
1334 return dtype
== MTYPE_UINT
? OP_FCVTU
: OP_FCVTS
;
1340 if (dst
->bit_size
==src
->bit_size
)
1342 if (dst
->bit_size
< src
->bit_size
)
1344 return stype
== MTYPE_SINT
? OP_SEXT
: OP_ZEXT
;
1350 if (src
->type
== SYM_NODE
)
1351 src
= src
->ctype
.base_type
;
1352 if (dst
->type
== SYM_NODE
)
1353 dst
= dst
->ctype
.base_type
;
1360 static pseudo_t
cast_pseudo(struct entrypoint
*ep
, pseudo_t src
, struct symbol
*from
, struct symbol
*to
)
1362 const struct position pos
= current_pos
;
1364 struct instruction
*insn
;
1371 if (from
->bit_size
< 0 || to
->bit_size
< 0)
1373 opcode
= get_cast_opcode(to
, from
);
1378 if (from
->bit_size
== to
->bit_size
)
1380 if (src
== value_pseudo(0))
1382 if (Wint_to_pointer_cast
)
1383 warning(pos
, "non size-preserving integer to pointer cast");
1384 src
= cast_pseudo(ep
, src
, from
, size_t_ctype
);
1385 from
= size_t_ctype
;
1388 if (from
->bit_size
== to
->bit_size
)
1390 if (Wpointer_to_int_cast
)
1391 warning(pos
, "non size-preserving pointer to integer cast");
1392 src
= cast_pseudo(ep
, src
, from
, size_t_ctype
);
1393 return cast_pseudo(ep
, src
, size_t_ctype
, to
);
1399 insn
= alloc_typed_instruction(opcode
, to
);
1400 result
= alloc_pseudo(insn
);
1401 insn
->target
= result
;
1402 insn
->orig_type
= from
;
1403 use_pseudo(insn
, src
, &insn
->src
);
1404 add_one_insn(ep
, insn
);
1408 static int map_opcode(int opcode
, struct symbol
*ctype
)
1410 if (ctype
&& is_float_type(ctype
))
1411 return opcode_table
[opcode
].to_float
;
1412 if (ctype
&& (ctype
->ctype
.modifiers
& MOD_SIGNED
)) {
1414 case OP_DIVU
: case OP_MODU
: case OP_LSR
:
1421 static inline pseudo_t
add_convert_to_bool(struct entrypoint
*ep
, pseudo_t src
, struct symbol
*type
)
1426 if (!type
|| src
== VOID
)
1428 if (is_bool_type(type
))
1430 if (src
->type
== PSEUDO_VAL
&& (src
->value
== 0 || src
->value
== 1))
1432 if (is_float_type(type
)) {
1433 zero
= add_setfval(ep
, type
, 0.0);
1434 op
= map_opcode(OP_SET_NE
, type
);
1436 zero
= value_pseudo(0);
1439 return add_cmp_op(ep
, &bool_ctype
, op
, type
, src
, zero
);
1442 static pseudo_t
linearize_expression_to_bool(struct entrypoint
*ep
, struct expression
*expr
)
1445 dst
= linearize_expression(ep
, expr
);
1446 dst
= add_convert_to_bool(ep
, dst
, expr
->ctype
);
1450 static pseudo_t
linearize_assignment(struct entrypoint
*ep
, struct expression
*expr
)
1452 struct access_data ad
= { NULL
, };
1453 struct expression
*target
= expr
->left
;
1454 struct expression
*src
= expr
->right
;
1455 struct symbol
*ctype
;
1458 value
= linearize_expression(ep
, src
);
1459 if (!target
|| !linearize_address_gen(ep
, target
, &ad
))
1461 if (expr
->op
!= '=') {
1462 pseudo_t oldvalue
= linearize_load_gen(ep
, &ad
);
1464 static const int op_trans
[] = {
1465 [SPECIAL_ADD_ASSIGN
- SPECIAL_BASE
] = OP_ADD
,
1466 [SPECIAL_SUB_ASSIGN
- SPECIAL_BASE
] = OP_SUB
,
1467 [SPECIAL_MUL_ASSIGN
- SPECIAL_BASE
] = OP_MUL
,
1468 [SPECIAL_DIV_ASSIGN
- SPECIAL_BASE
] = OP_DIVU
,
1469 [SPECIAL_MOD_ASSIGN
- SPECIAL_BASE
] = OP_MODU
,
1470 [SPECIAL_SHL_ASSIGN
- SPECIAL_BASE
] = OP_SHL
,
1471 [SPECIAL_SHR_ASSIGN
- SPECIAL_BASE
] = OP_LSR
,
1472 [SPECIAL_AND_ASSIGN
- SPECIAL_BASE
] = OP_AND
,
1473 [SPECIAL_OR_ASSIGN
- SPECIAL_BASE
] = OP_OR
,
1474 [SPECIAL_XOR_ASSIGN
- SPECIAL_BASE
] = OP_XOR
1482 oldvalue
= cast_pseudo(ep
, oldvalue
, target
->ctype
, ctype
);
1483 opcode
= map_opcode(op_trans
[expr
->op
- SPECIAL_BASE
], ctype
);
1484 dst
= add_binary_op(ep
, ctype
, opcode
, oldvalue
, value
);
1485 taint_undefined_behaviour(dst
->def
);
1486 value
= cast_pseudo(ep
, dst
, ctype
, expr
->ctype
);
1488 value
= linearize_store_gen(ep
, value
, &ad
);
1492 static pseudo_t
linearize_call_expression(struct entrypoint
*ep
, struct expression
*expr
)
1494 struct expression
*arg
, *fn
;
1495 struct instruction
*insn
;
1496 pseudo_t retval
, call
;
1497 struct ctype
*ctype
= NULL
;
1498 struct symbol
*fntype
;
1499 struct context
*context
;
1508 if (fntype
->op
&& fntype
->op
->linearize
) {
1509 retval
= fntype
->op
->linearize(ep
, expr
);
1514 ctype
= &fntype
->ctype
;
1516 insn
= alloc_typed_instruction(OP_CALL
, expr
->ctype
);
1517 add_symbol(&insn
->fntypes
, fntype
);
1518 FOR_EACH_PTR(expr
->args
, arg
) {
1519 pseudo_t
new = linearize_expression(ep
, arg
);
1520 use_pseudo(insn
, new, add_pseudo(&insn
->arguments
, new));
1521 add_symbol(&insn
->fntypes
, arg
->ctype
);
1522 } END_FOR_EACH_PTR(arg
);
1524 if (fn
->type
== EXPR_PREOP
&& fn
->op
== '*' && is_func_type(fn
->ctype
))
1527 if (fn
->type
== EXPR_SYMBOL
) {
1528 call
= symbol_pseudo(ep
, fn
->symbol
);
1530 call
= linearize_expression(ep
, fn
);
1532 use_pseudo(insn
, call
, &insn
->func
);
1534 if (expr
->ctype
!= &void_ctype
)
1535 retval
= alloc_pseudo(insn
);
1536 insn
->target
= retval
;
1537 add_one_insn(ep
, insn
);
1540 FOR_EACH_PTR(ctype
->contexts
, context
) {
1541 int in
= context
->in
;
1542 int out
= context
->out
;
1553 context_diff
= out
- in
;
1554 if (check
|| context_diff
) {
1555 insn
= alloc_instruction(OP_CONTEXT
, 0);
1556 insn
->increment
= context_diff
;
1557 insn
->check
= check
;
1558 insn
->context_expr
= context
->context
;
1559 add_one_insn(ep
, insn
);
1561 } END_FOR_EACH_PTR(context
);
1563 if (ctype
->modifiers
& MOD_NORETURN
)
1564 add_unreachable(ep
);
1570 static pseudo_t
linearize_binop_bool(struct entrypoint
*ep
, struct expression
*expr
)
1572 pseudo_t src1
, src2
, dst
;
1573 int op
= (expr
->op
== SPECIAL_LOGICAL_OR
) ? OP_OR
: OP_AND
;
1575 src1
= linearize_expression_to_bool(ep
, expr
->left
);
1576 src2
= linearize_expression_to_bool(ep
, expr
->right
);
1577 dst
= add_binary_op(ep
, &bool_ctype
, op
, src1
, src2
);
1578 if (expr
->ctype
!= &bool_ctype
)
1579 dst
= cast_pseudo(ep
, dst
, &bool_ctype
, expr
->ctype
);
1583 static pseudo_t
linearize_binop(struct entrypoint
*ep
, struct expression
*expr
)
1585 pseudo_t src1
, src2
, dst
;
1586 static const int opcode
[] = {
1587 ['+'] = OP_ADD
, ['-'] = OP_SUB
,
1588 ['*'] = OP_MUL
, ['/'] = OP_DIVU
,
1589 ['%'] = OP_MODU
, ['&'] = OP_AND
,
1590 ['|'] = OP_OR
, ['^'] = OP_XOR
,
1591 [SPECIAL_LEFTSHIFT
] = OP_SHL
,
1592 [SPECIAL_RIGHTSHIFT
] = OP_LSR
,
1596 src1
= linearize_expression(ep
, expr
->left
);
1597 src2
= linearize_expression(ep
, expr
->right
);
1598 op
= map_opcode(opcode
[expr
->op
], expr
->ctype
);
1599 dst
= add_binary_op(ep
, expr
->ctype
, op
, src1
, src2
);
1600 taint_undefined_behaviour(dst
->def
);
1604 static pseudo_t
linearize_logical_branch(struct entrypoint
*ep
, struct expression
*expr
, struct basic_block
*bb_true
, struct basic_block
*bb_false
);
1606 static pseudo_t
linearize_cond_branch(struct entrypoint
*ep
, struct expression
*expr
, struct basic_block
*bb_true
, struct basic_block
*bb_false
);
1608 static pseudo_t
linearize_select(struct entrypoint
*ep
, struct expression
*expr
)
1610 pseudo_t cond
, valt
, valf
, res
;
1611 struct instruction
*insn
;
1613 valt
= linearize_expression(ep
, expr
->cond_true
);
1614 valf
= linearize_expression(ep
, expr
->cond_false
);
1615 cond
= linearize_expression(ep
, expr
->conditional
);
1617 insn
= alloc_typed_instruction(OP_SEL
, expr
->ctype
);
1618 if (!expr
->cond_true
)
1620 use_pseudo(insn
, cond
, &insn
->src1
);
1621 use_pseudo(insn
, valt
, &insn
->src2
);
1622 use_pseudo(insn
, valf
, &insn
->src3
);
1624 res
= alloc_pseudo(insn
);
1626 add_one_insn(ep
, insn
);
1630 static pseudo_t
add_join_conditional(struct entrypoint
*ep
, struct expression
*expr
,
1631 pseudo_t phi1
, pseudo_t phi2
)
1634 struct instruction
*phi_node
;
1637 return (phi2
== VOID
) ? phi2
: phi2
->def
->src
;
1639 return (phi1
== VOID
) ? phi1
: phi1
->def
->src
;
1641 phi_node
= alloc_typed_instruction(OP_PHI
, expr
->ctype
);
1642 link_phi(phi_node
, phi1
);
1643 link_phi(phi_node
, phi2
);
1644 phi_node
->target
= target
= alloc_pseudo(phi_node
);
1645 add_one_insn(ep
, phi_node
);
1649 static pseudo_t
linearize_short_conditional(struct entrypoint
*ep
, struct expression
*expr
,
1650 struct expression
*cond
,
1651 struct expression
*expr_false
)
1653 pseudo_t src1
, src2
;
1654 struct basic_block
*bb_false
;
1655 struct basic_block
*merge
;
1656 pseudo_t phi1
, phi2
;
1658 if (!expr_false
|| !ep
->active
)
1661 bb_false
= alloc_basic_block(ep
, expr_false
->pos
);
1662 merge
= alloc_basic_block(ep
, expr
->pos
);
1664 src1
= linearize_expression(ep
, cond
);
1665 phi1
= alloc_phi(ep
->active
, src1
, expr
->ctype
);
1666 add_branch(ep
, src1
, merge
, bb_false
);
1668 set_activeblock(ep
, bb_false
);
1669 src2
= linearize_expression(ep
, expr_false
);
1670 phi2
= alloc_phi(ep
->active
, src2
, expr
->ctype
);
1671 set_activeblock(ep
, merge
);
1673 return add_join_conditional(ep
, expr
, phi1
, phi2
);
1676 static pseudo_t
linearize_conditional(struct entrypoint
*ep
, struct expression
*expr
,
1677 struct expression
*cond
,
1678 struct expression
*expr_true
,
1679 struct expression
*expr_false
)
1681 pseudo_t src1
, src2
;
1682 pseudo_t phi1
, phi2
;
1683 struct basic_block
*bb_true
, *bb_false
, *merge
;
1685 if (!cond
|| !expr_true
|| !expr_false
|| !ep
->active
)
1687 bb_true
= alloc_basic_block(ep
, expr_true
->pos
);
1688 bb_false
= alloc_basic_block(ep
, expr_false
->pos
);
1689 merge
= alloc_basic_block(ep
, expr
->pos
);
1691 linearize_cond_branch(ep
, cond
, bb_true
, bb_false
);
1693 set_activeblock(ep
, bb_true
);
1694 src1
= linearize_expression(ep
, expr_true
);
1695 phi1
= alloc_phi(ep
->active
, src1
, expr
->ctype
);
1696 add_goto(ep
, merge
);
1698 set_activeblock(ep
, bb_false
);
1699 src2
= linearize_expression(ep
, expr_false
);
1700 phi2
= alloc_phi(ep
->active
, src2
, expr
->ctype
);
1701 set_activeblock(ep
, merge
);
1703 return add_join_conditional(ep
, expr
, phi1
, phi2
);
1706 static void insert_phis(struct basic_block
*bb
, pseudo_t src
, struct symbol
*ctype
,
1707 struct instruction
*node
)
1709 struct basic_block
*parent
;
1711 FOR_EACH_PTR(bb
->parents
, parent
) {
1712 struct instruction
*phisrc
= alloc_phisrc(src
, ctype
);
1713 insert_last_instruction(parent
, phisrc
);
1714 link_phi(node
, phisrc
->target
);
1715 } END_FOR_EACH_PTR(parent
);
1718 static pseudo_t
linearize_logical(struct entrypoint
*ep
, struct expression
*expr
)
1720 struct symbol
*ctype
= expr
->ctype
;
1721 struct basic_block
*other
, *merge
;
1722 struct instruction
*node
;
1723 pseudo_t src1
, src2
, phi2
;
1725 if (!ep
->active
|| !expr
->left
|| !expr
->right
)
1728 other
= alloc_basic_block(ep
, expr
->right
->pos
);
1729 merge
= alloc_basic_block(ep
, expr
->pos
);
1730 node
= alloc_phi_node(merge
, ctype
, NULL
);
1732 // LHS and its shortcut
1733 if (expr
->op
== SPECIAL_LOGICAL_OR
) {
1734 linearize_cond_branch(ep
, expr
->left
, merge
, other
);
1735 src1
= value_pseudo(1);
1737 linearize_cond_branch(ep
, expr
->left
, other
, merge
);
1738 src1
= value_pseudo(0);
1740 insert_phis(merge
, src1
, ctype
, node
);
1743 set_activeblock(ep
, other
);
1744 src2
= linearize_expression_to_bool(ep
, expr
->right
);
1745 src2
= cast_pseudo(ep
, src2
, &bool_ctype
, ctype
);
1746 phi2
= alloc_phi(ep
->active
, src2
, ctype
);
1747 link_phi(node
, phi2
);
1750 set_activeblock(ep
, merge
);
1751 add_instruction(&merge
->insns
, node
);
1752 return node
->target
;
1755 static pseudo_t
linearize_compare(struct entrypoint
*ep
, struct expression
*expr
)
1757 static const int cmpop
[] = {
1758 ['>'] = OP_SET_GT
, ['<'] = OP_SET_LT
,
1759 [SPECIAL_EQUAL
] = OP_SET_EQ
,
1760 [SPECIAL_NOTEQUAL
] = OP_SET_NE
,
1761 [SPECIAL_GTE
] = OP_SET_GE
,
1762 [SPECIAL_LTE
] = OP_SET_LE
,
1763 [SPECIAL_UNSIGNED_LT
] = OP_SET_B
,
1764 [SPECIAL_UNSIGNED_GT
] = OP_SET_A
,
1765 [SPECIAL_UNSIGNED_LTE
] = OP_SET_BE
,
1766 [SPECIAL_UNSIGNED_GTE
] = OP_SET_AE
,
1768 struct symbol
*itype
= expr
->right
->ctype
;
1769 int op
= opcode_float(cmpop
[expr
->op
], itype
);
1770 pseudo_t src1
= linearize_expression(ep
, expr
->left
);
1771 pseudo_t src2
= linearize_expression(ep
, expr
->right
);
1772 pseudo_t dst
= add_cmp_op(ep
, expr
->ctype
, op
, itype
, src1
, src2
);
1777 static pseudo_t
linearize_cond_branch(struct entrypoint
*ep
, struct expression
*expr
, struct basic_block
*bb_true
, struct basic_block
*bb_false
)
1781 if (!expr
|| !valid_type(expr
->ctype
) || !bb_reachable(ep
->active
))
1784 switch (expr
->type
) {
1788 add_goto(ep
, expr
->value
? bb_true
: bb_false
);
1791 add_goto(ep
, expr
->fvalue
? bb_true
: bb_false
);
1794 linearize_logical_branch(ep
, expr
, bb_true
, bb_false
);
1797 cond
= linearize_compare(ep
, expr
);
1798 add_branch(ep
, cond
, bb_true
, bb_false
);
1801 if (expr
->op
== '!')
1802 return linearize_cond_branch(ep
, expr
->unop
, bb_false
, bb_true
);
1805 cond
= linearize_expression_to_bool(ep
, expr
);
1806 add_branch(ep
, cond
, bb_true
, bb_false
);
1813 static pseudo_t
linearize_logical_branch(struct entrypoint
*ep
, struct expression
*expr
, struct basic_block
*bb_true
, struct basic_block
*bb_false
)
1815 struct basic_block
*next
= alloc_basic_block(ep
, expr
->pos
);
1817 if (expr
->op
== SPECIAL_LOGICAL_OR
)
1818 linearize_cond_branch(ep
, expr
->left
, bb_true
, next
);
1820 linearize_cond_branch(ep
, expr
->left
, next
, bb_false
);
1821 set_activeblock(ep
, next
);
1822 linearize_cond_branch(ep
, expr
->right
, bb_true
, bb_false
);
1826 static pseudo_t
linearize_cast(struct entrypoint
*ep
, struct expression
*expr
)
1829 struct expression
*orig
= expr
->cast_expression
;
1834 src
= linearize_expression(ep
, orig
);
1835 return cast_pseudo(ep
, src
, orig
->ctype
, expr
->ctype
);
1838 static pseudo_t
linearize_initializer(struct entrypoint
*ep
, struct expression
*initializer
, struct access_data
*ad
)
1840 switch (initializer
->type
) {
1841 case EXPR_INITIALIZER
: {
1842 struct expression
*expr
;
1843 FOR_EACH_PTR(initializer
->expr_list
, expr
) {
1844 linearize_initializer(ep
, expr
, ad
);
1845 } END_FOR_EACH_PTR(expr
);
1849 ad
->offset
= initializer
->init_offset
;
1850 linearize_initializer(ep
, initializer
->init_expr
, ad
);
1853 pseudo_t value
= linearize_expression(ep
, initializer
);
1854 ad
->type
= initializer
->ctype
;
1855 linearize_store_gen(ep
, value
, ad
);
1863 static void linearize_argument(struct entrypoint
*ep
, struct symbol
*arg
, int nr
)
1865 struct access_data ad
= { NULL
, };
1868 ad
.address
= symbol_pseudo(ep
, arg
);
1869 linearize_store_gen(ep
, argument_pseudo(ep
, nr
), &ad
);
1872 static pseudo_t
linearize_expression(struct entrypoint
*ep
, struct expression
*expr
)
1874 if (!expr
|| !valid_type(expr
->ctype
))
1877 current_pos
= expr
->pos
;
1878 switch (expr
->type
) {
1880 linearize_one_symbol(ep
, expr
->symbol
);
1881 return add_symbol_address(ep
, expr
);
1884 return value_pseudo(expr
->value
);
1888 return add_setval(ep
, expr
->ctype
, expr
);
1891 return add_setfval(ep
, expr
->ctype
, expr
->fvalue
);
1893 case EXPR_STATEMENT
:
1894 return linearize_statement(ep
, expr
->statement
);
1897 return linearize_call_expression(ep
, expr
);
1900 if (expr
->op
== SPECIAL_LOGICAL_AND
|| expr
->op
== SPECIAL_LOGICAL_OR
)
1901 return linearize_binop_bool(ep
, expr
);
1902 return linearize_binop(ep
, expr
);
1905 return linearize_logical(ep
, expr
);
1908 return linearize_compare(ep
, expr
);
1911 return linearize_select(ep
, expr
);
1913 case EXPR_CONDITIONAL
:
1914 if (!expr
->cond_true
)
1915 return linearize_short_conditional(ep
, expr
, expr
->conditional
, expr
->cond_false
);
1917 return linearize_conditional(ep
, expr
, expr
->conditional
,
1918 expr
->cond_true
, expr
->cond_false
);
1921 linearize_expression(ep
, expr
->left
);
1922 return linearize_expression(ep
, expr
->right
);
1924 case EXPR_ASSIGNMENT
:
1925 return linearize_assignment(ep
, expr
);
1928 return linearize_preop(ep
, expr
);
1931 return linearize_postop(ep
, expr
);
1934 case EXPR_FORCE_CAST
:
1935 case EXPR_IMPLIED_CAST
:
1936 return linearize_cast(ep
, expr
);
1939 return linearize_slice(ep
, expr
);
1941 case EXPR_INITIALIZER
:
1943 warning(expr
->pos
, "unexpected initializer expression (%d %d)", expr
->type
, expr
->op
);
1946 warning(expr
->pos
, "unknown expression (%d %d)", expr
->type
, expr
->op
);
1952 static pseudo_t
linearize_one_symbol(struct entrypoint
*ep
, struct symbol
*sym
)
1954 struct access_data ad
= { NULL
, };
1957 if (!sym
|| !sym
->initializer
|| sym
->initialized
)
1960 /* We need to output these puppies some day too.. */
1961 if (sym
->ctype
.modifiers
& (MOD_STATIC
| MOD_TOPLEVEL
))
1964 sym
->initialized
= 1;
1965 ad
.address
= symbol_pseudo(ep
, sym
);
1967 if (sym
->initializer
&& !is_scalar_type(sym
)) {
1968 // default zero initialization [6.7.9.21]
1969 // FIXME: this init the whole aggregate while
1970 // only the existing fields need to be initialized.
1971 // FIXME: this init the whole aggregate even if
1972 // all fields arelater explicitely initialized.
1974 ad
.address
= symbol_pseudo(ep
, sym
);
1975 linearize_store_gen(ep
, value_pseudo(0), &ad
);
1978 value
= linearize_initializer(ep
, sym
->initializer
, &ad
);
1982 static pseudo_t
linearize_compound_statement(struct entrypoint
*ep
, struct statement
*stmt
)
1985 struct statement
*s
;
1988 FOR_EACH_PTR(stmt
->stmts
, s
) {
1989 pseudo
= linearize_statement(ep
, s
);
1990 } END_FOR_EACH_PTR(s
);
1995 static void add_return(struct entrypoint
*ep
, struct basic_block
*bb
, struct symbol
*ctype
, pseudo_t src
)
1997 struct instruction
*phi_node
= first_instruction(bb
->insns
);
2000 phi_node
= alloc_typed_instruction(OP_PHI
, ctype
);
2001 phi_node
->target
= alloc_pseudo(phi_node
);
2003 add_instruction(&bb
->insns
, phi_node
);
2005 phi
= alloc_phi(ep
->active
, src
, ctype
);
2006 phi
->ident
= &return_ident
;
2007 link_phi(phi_node
, phi
);
2010 static pseudo_t
linearize_fn_statement(struct entrypoint
*ep
, struct statement
*stmt
)
2012 struct instruction
*phi_node
;
2013 struct basic_block
*bb
;
2016 pseudo
= linearize_compound_statement(ep
, stmt
);
2017 if (!is_void_type(stmt
->ret
)) { // non-void function
2018 struct basic_block
*active
= ep
->active
;
2019 if (active
&& !bb_terminated(active
)) { // missing return
2020 struct basic_block
*bb_ret
;
2021 bb_ret
= get_bound_block(ep
, stmt
->ret
);
2022 add_return(ep
, bb_ret
, stmt
->ret
, undef_pseudo());
2025 bb
= add_label(ep
, stmt
->ret
);
2026 phi_node
= first_instruction(bb
->insns
);
2028 pseudo
= phi_node
->target
;
2032 static pseudo_t
linearize_inlined_call(struct entrypoint
*ep
, struct statement
*stmt
)
2034 struct instruction
*insn
= alloc_instruction(OP_INLINED_CALL
, 0);
2035 struct statement
*args
= stmt
->args
;
2036 struct basic_block
*bb
;
2042 concat_symbol_list(args
->declaration
, &ep
->syms
);
2043 FOR_EACH_PTR(args
->declaration
, sym
) {
2044 pseudo_t value
= linearize_one_symbol(ep
, sym
);
2045 add_pseudo(&insn
->arguments
, value
);
2046 } END_FOR_EACH_PTR(sym
);
2049 pseudo
= linearize_fn_statement(ep
, stmt
);
2050 insn
->target
= pseudo
;
2052 insn
->func
= symbol_pseudo(ep
, stmt
->inline_fn
);
2055 bb
->pos
= stmt
->pos
;
2056 add_one_insn(ep
, insn
);
2060 static pseudo_t
linearize_context(struct entrypoint
*ep
, struct statement
*stmt
)
2062 struct instruction
*insn
= alloc_instruction(OP_CONTEXT
, 0);
2063 struct expression
*expr
= stmt
->expression
;
2065 insn
->increment
= get_expression_value(expr
);
2066 insn
->context_expr
= stmt
->context
;
2067 add_one_insn(ep
, insn
);
2071 static pseudo_t
linearize_range(struct entrypoint
*ep
, struct statement
*stmt
)
2073 struct instruction
*insn
= alloc_instruction(OP_RANGE
, 0);
2075 use_pseudo(insn
, linearize_expression(ep
, stmt
->range_expression
), &insn
->src1
);
2076 use_pseudo(insn
, linearize_expression(ep
, stmt
->range_low
), &insn
->src2
);
2077 use_pseudo(insn
, linearize_expression(ep
, stmt
->range_high
), &insn
->src3
);
2078 add_one_insn(ep
, insn
);
2082 ALLOCATOR(asm_rules
, "asm rules");
2083 ALLOCATOR(asm_constraint
, "asm constraints");
2085 static void add_asm_rule(struct instruction
*insn
, struct asm_constraint_list
**list
, struct asm_operand
*op
, pseudo_t pseudo
)
2087 struct asm_constraint
*rule
= __alloc_asm_constraint(0);
2088 rule
->is_memory
= op
->is_memory
;
2089 rule
->ident
= op
->name
;
2090 rule
->constraint
= op
->constraint
? op
->constraint
->string
->data
: "";
2091 use_pseudo(insn
, pseudo
, &rule
->pseudo
);
2092 add_ptr_list(list
, rule
);
2095 static void add_asm_input(struct entrypoint
*ep
, struct instruction
*insn
, struct asm_operand
*op
)
2097 pseudo_t pseudo
= linearize_expression(ep
, op
->expr
);
2099 add_asm_rule(insn
, &insn
->asm_rules
->inputs
, op
, pseudo
);
2102 static void add_asm_output_address(struct entrypoint
*ep
, struct instruction
*insn
, struct asm_operand
*op
)
2109 pseudo
= linearize_expression(ep
, op
->expr
);
2110 add_asm_rule(insn
, &insn
->asm_rules
->outputs
, op
, pseudo
);
2111 insn
->output_memory
= 1;
2114 static void add_asm_output(struct entrypoint
*ep
, struct instruction
*insn
, struct asm_operand
*op
)
2116 struct access_data ad
= { NULL
, };
2122 if (!linearize_address_gen(ep
, op
->expr
, &ad
))
2124 pseudo
= alloc_pseudo(insn
);
2125 linearize_store_gen(ep
, pseudo
, &ad
);
2127 add_asm_rule(insn
, &insn
->asm_rules
->outputs
, op
, pseudo
);
2130 static pseudo_t
linearize_asm_statement(struct entrypoint
*ep
, struct statement
*stmt
)
2132 struct instruction
*insn
;
2133 struct expression
*expr
, *clob
;
2134 struct asm_rules
*rules
;
2135 struct asm_operand
*op
;
2137 insn
= alloc_instruction(OP_ASM
, 0);
2138 expr
= stmt
->asm_string
;
2139 if (!expr
|| expr
->type
!= EXPR_STRING
) {
2140 warning(stmt
->pos
, "expected string in inline asm");
2143 insn
->string
= expr
->string
->data
;
2145 rules
= __alloc_asm_rules(0);
2146 insn
->asm_rules
= rules
;
2148 /* Gather the inputs.. */
2149 FOR_EACH_PTR(stmt
->asm_inputs
, op
) {
2150 add_asm_input(ep
, insn
, op
);
2151 } END_FOR_EACH_PTR(op
);
2153 /* ... and the addresses for memory outputs */
2154 FOR_EACH_PTR(stmt
->asm_outputs
, op
) {
2155 add_asm_output_address(ep
, insn
, op
);
2156 } END_FOR_EACH_PTR(op
);
2158 add_one_insn(ep
, insn
);
2160 /* Assign the outputs */
2161 FOR_EACH_PTR(stmt
->asm_outputs
, op
) {
2162 add_asm_output(ep
, insn
, op
);
2163 } END_FOR_EACH_PTR(op
);
2165 /* and finally, look if it clobbers memory */
2166 FOR_EACH_PTR(stmt
->asm_clobbers
, clob
) {
2167 if (!strcmp(clob
->string
->data
, "memory"))
2168 insn
->clobber_memory
= 1;
2169 } END_FOR_EACH_PTR(clob
);
2174 static int multijmp_cmp(const void *_a
, const void *_b
)
2176 const struct multijmp
*a
= _a
;
2177 const struct multijmp
*b
= _b
;
2180 if (a
->begin
> a
->end
) {
2181 if (b
->begin
> b
->end
)
2185 if (b
->begin
> b
->end
)
2187 if (a
->begin
== b
->begin
) {
2188 if (a
->end
== b
->end
)
2190 return (a
->end
< b
->end
) ? -1 : 1;
2192 return a
->begin
< b
->begin
? -1 : 1;
2195 static void sort_switch_cases(struct instruction
*insn
)
2197 sort_list((struct ptr_list
**)&insn
->multijmp_list
, multijmp_cmp
);
2200 static pseudo_t
linearize_declaration(struct entrypoint
*ep
, struct statement
*stmt
)
2204 concat_symbol_list(stmt
->declaration
, &ep
->syms
);
2206 FOR_EACH_PTR(stmt
->declaration
, sym
) {
2207 linearize_one_symbol(ep
, sym
);
2208 } END_FOR_EACH_PTR(sym
);
2212 static pseudo_t
linearize_return(struct entrypoint
*ep
, struct statement
*stmt
)
2214 struct expression
*expr
= stmt
->expression
;
2215 struct symbol
*ret
= stmt
->ret_target
;
2216 struct basic_block
*bb_return
= get_bound_block(ep
, ret
);
2217 struct basic_block
*active
;
2218 pseudo_t src
= linearize_expression(ep
, expr
);
2219 active
= ep
->active
;
2220 if (active
&& !is_void_type(ret
)) {
2221 add_return(ep
, bb_return
, ret
, src
);
2223 add_goto(ep
, bb_return
);
2227 static pseudo_t
linearize_switch(struct entrypoint
*ep
, struct statement
*stmt
)
2230 struct instruction
*switch_ins
;
2231 struct basic_block
*switch_end
= alloc_basic_block(ep
, stmt
->pos
);
2232 struct basic_block
*active
, *default_case
;
2233 struct expression
*expr
= stmt
->switch_expression
;
2234 struct multijmp
*jmp
;
2237 if (!expr
|| !expr
->ctype
)
2239 pseudo
= linearize_expression(ep
, expr
);
2240 active
= ep
->active
;
2242 active
= alloc_basic_block(ep
, stmt
->pos
);
2243 set_activeblock(ep
, active
);
2246 switch_ins
= alloc_typed_instruction(OP_SWITCH
, expr
->ctype
);
2247 use_pseudo(switch_ins
, pseudo
, &switch_ins
->cond
);
2248 add_one_insn(ep
, switch_ins
);
2251 default_case
= NULL
;
2252 FOR_EACH_PTR(stmt
->switch_case
->symbol_list
, sym
) {
2253 struct statement
*case_stmt
= sym
->stmt
;
2254 struct basic_block
*bb_case
= get_bound_block(ep
, sym
);
2256 if (!case_stmt
->case_expression
) {
2257 default_case
= bb_case
;
2259 } else if (case_stmt
->case_expression
->type
!= EXPR_VALUE
) {
2262 struct expression
*case_to
= case_stmt
->case_to
;
2263 long long begin
, end
;
2265 begin
= end
= case_stmt
->case_expression
->value
;
2266 if (case_to
&& case_to
->type
== EXPR_VALUE
)
2267 end
= case_to
->value
;
2269 jmp
= alloc_multijmp(bb_case
, end
, begin
);
2271 jmp
= alloc_multijmp(bb_case
, begin
, end
);
2274 add_multijmp(&switch_ins
->multijmp_list
, jmp
);
2275 add_bb(&bb_case
->parents
, active
);
2276 add_bb(&active
->children
, bb_case
);
2277 } END_FOR_EACH_PTR(sym
);
2279 bind_label(stmt
->switch_break
, switch_end
, stmt
->pos
);
2281 /* And linearize the actual statement */
2282 linearize_statement(ep
, stmt
->switch_statement
);
2283 set_activeblock(ep
, switch_end
);
2286 default_case
= switch_end
;
2288 jmp
= alloc_multijmp(default_case
, 1, 0);
2289 add_multijmp(&switch_ins
->multijmp_list
, jmp
);
2290 add_bb(&default_case
->parents
, active
);
2291 add_bb(&active
->children
, default_case
);
2292 sort_switch_cases(switch_ins
);
2297 static pseudo_t
linearize_iterator(struct entrypoint
*ep
, struct statement
*stmt
)
2299 struct statement
*pre_statement
= stmt
->iterator_pre_statement
;
2300 struct expression
*pre_condition
= stmt
->iterator_pre_condition
;
2301 struct statement
*statement
= stmt
->iterator_statement
;
2302 struct statement
*post_statement
= stmt
->iterator_post_statement
;
2303 struct expression
*post_condition
= stmt
->iterator_post_condition
;
2304 struct basic_block
*loop_top
, *loop_body
, *loop_continue
, *loop_end
;
2307 FOR_EACH_PTR(stmt
->iterator_syms
, sym
) {
2308 linearize_one_symbol(ep
, sym
);
2309 } END_FOR_EACH_PTR(sym
);
2310 concat_symbol_list(stmt
->iterator_syms
, &ep
->syms
);
2311 linearize_statement(ep
, pre_statement
);
2313 loop_body
= loop_top
= alloc_basic_block(ep
, stmt
->pos
);
2314 loop_continue
= alloc_basic_block(ep
, stmt
->pos
);
2315 loop_end
= alloc_basic_block(ep
, stmt
->pos
);
2317 /* An empty post-condition means that it's the same as the pre-condition */
2318 if (!post_condition
) {
2319 loop_top
= alloc_basic_block(ep
, stmt
->pos
);
2320 set_activeblock(ep
, loop_top
);
2324 linearize_cond_branch(ep
, pre_condition
, loop_body
, loop_end
);
2326 bind_label(stmt
->iterator_continue
, loop_continue
, stmt
->pos
);
2327 bind_label(stmt
->iterator_break
, loop_end
, stmt
->pos
);
2329 set_activeblock(ep
, loop_body
);
2330 linearize_statement(ep
, statement
);
2331 add_goto(ep
, loop_continue
);
2333 set_activeblock(ep
, loop_continue
);
2334 linearize_statement(ep
, post_statement
);
2335 if (!post_condition
)
2336 add_goto(ep
, loop_top
);
2338 linearize_cond_branch(ep
, post_condition
, loop_top
, loop_end
);
2339 set_activeblock(ep
, loop_end
);
2344 static pseudo_t
linearize_statement(struct entrypoint
*ep
, struct statement
*stmt
)
2346 struct basic_block
*bb
;
2352 if (bb
&& !bb
->insns
)
2353 bb
->pos
= stmt
->pos
;
2354 current_pos
= stmt
->pos
;
2356 switch (stmt
->type
) {
2360 case STMT_DECLARATION
:
2361 return linearize_declaration(ep
, stmt
);
2364 return linearize_context(ep
, stmt
);
2367 return linearize_range(ep
, stmt
);
2369 case STMT_EXPRESSION
:
2370 return linearize_expression(ep
, stmt
->expression
);
2373 return linearize_asm_statement(ep
, stmt
);
2376 return linearize_return(ep
, stmt
);
2379 add_label(ep
, stmt
->case_label
);
2380 linearize_statement(ep
, stmt
->case_statement
);
2385 struct symbol
*label
= stmt
->label_identifier
;
2388 add_label(ep
, label
);
2390 return linearize_statement(ep
, stmt
->label_statement
);
2395 struct expression
*expr
;
2396 struct instruction
*goto_ins
;
2397 struct basic_block
*active
;
2400 active
= ep
->active
;
2401 if (!bb_reachable(active
))
2404 if (stmt
->goto_label
) {
2405 add_goto(ep
, get_bound_block(ep
, stmt
->goto_label
));
2409 expr
= stmt
->goto_expression
;
2413 /* This can happen as part of simplification */
2414 if (expr
->type
== EXPR_LABEL
) {
2415 add_goto(ep
, get_bound_block(ep
, expr
->label_symbol
));
2419 pseudo
= linearize_expression(ep
, expr
);
2420 goto_ins
= alloc_instruction(OP_COMPUTEDGOTO
, 0);
2421 use_pseudo(goto_ins
, pseudo
, &goto_ins
->src
);
2422 add_one_insn(ep
, goto_ins
);
2424 FOR_EACH_PTR(stmt
->target_list
, sym
) {
2425 struct basic_block
*bb_computed
= get_bound_block(ep
, sym
);
2426 struct multijmp
*jmp
= alloc_multijmp(bb_computed
, 1, 0);
2427 add_multijmp(&goto_ins
->multijmp_list
, jmp
);
2428 add_bb(&bb_computed
->parents
, ep
->active
);
2429 add_bb(&active
->children
, bb_computed
);
2430 } END_FOR_EACH_PTR(sym
);
2437 if (stmt
->inline_fn
)
2438 return linearize_inlined_call(ep
, stmt
);
2439 return linearize_compound_statement(ep
, stmt
);
2442 * This could take 'likely/unlikely' into account, and
2443 * switch the arms around appropriately..
2446 struct basic_block
*bb_true
, *bb_false
, *endif
;
2447 struct expression
*cond
= stmt
->if_conditional
;
2449 bb_true
= alloc_basic_block(ep
, stmt
->pos
);
2450 bb_false
= endif
= alloc_basic_block(ep
, stmt
->pos
);
2452 // If the condition is invalid, the following
2453 // statement(s) are not evaluated.
2454 if (!cond
|| !valid_type(cond
->ctype
))
2456 linearize_cond_branch(ep
, cond
, bb_true
, bb_false
);
2458 set_activeblock(ep
, bb_true
);
2459 linearize_statement(ep
, stmt
->if_true
);
2461 if (stmt
->if_false
) {
2462 endif
= alloc_basic_block(ep
, stmt
->pos
);
2463 add_goto(ep
, endif
);
2464 set_activeblock(ep
, bb_false
);
2465 linearize_statement(ep
, stmt
->if_false
);
2467 set_activeblock(ep
, endif
);
2472 return linearize_switch(ep
, stmt
);
2475 return linearize_iterator(ep
, stmt
);
2483 static void check_tainted_insn(struct instruction
*insn
)
2485 unsigned long long uval
;
2489 switch (insn
->opcode
) {
2490 case OP_DIVU
: case OP_DIVS
:
2491 case OP_MODU
: case OP_MODS
:
2492 if (insn
->src2
== value_pseudo(0))
2493 warning(insn
->pos
, "divide by zero");
2495 case OP_SHL
: case OP_LSR
: case OP_ASR
:
2497 if (src2
->type
!= PSEUDO_VAL
)
2500 if (uval
< insn
->size
)
2502 sval
= sign_extend(uval
, insn
->size
);
2503 if (Wshift_count_negative
&& sval
< 0)
2504 warning(insn
->pos
, "shift count is negative (%lld)", sval
);
2505 else if (Wshift_count_overflow
)
2506 warning(insn
->pos
, "shift too big (%llu) for type %s", uval
, show_typename(insn
->type
));
2511 // issue warnings after all possible DCE
2512 static void late_warnings(struct entrypoint
*ep
)
2514 struct basic_block
*bb
;
2515 FOR_EACH_PTR(ep
->bbs
, bb
) {
2516 struct instruction
*insn
;
2517 FOR_EACH_PTR(bb
->insns
, insn
) {
2521 check_tainted_insn(insn
);
2522 switch (insn
->opcode
) {
2524 // Check for illegal offsets.
2528 } END_FOR_EACH_PTR(insn
);
2529 } END_FOR_EACH_PTR(bb
);
2532 static struct entrypoint
*linearize_fn(struct symbol
*sym
, struct symbol
*base_type
)
2534 struct statement
*stmt
= base_type
->stmt
;
2535 struct entrypoint
*ep
;
2536 struct basic_block
*bb
;
2537 struct symbol
*ret_type
;
2539 struct instruction
*entry
;
2540 struct instruction
*ret
;
2544 if (!stmt
|| sym
->bogus_linear
)
2547 ep
= alloc_entrypoint();
2550 bb
= alloc_basic_block(ep
, sym
->pos
);
2551 set_activeblock(ep
, bb
);
2553 if (stmt
->type
== STMT_ASM
) { // top-level asm
2554 linearize_asm_statement(ep
, stmt
);
2558 entry
= alloc_instruction(OP_ENTRY
, 0);
2559 add_one_insn(ep
, entry
);
2562 concat_symbol_list(base_type
->arguments
, &ep
->syms
);
2564 /* FIXME!! We should do something else about varargs.. */
2566 FOR_EACH_PTR(base_type
->arguments
, arg
) {
2567 linearize_argument(ep
, arg
, ++i
);
2568 } END_FOR_EACH_PTR(arg
);
2570 result
= linearize_fn_statement(ep
, stmt
);
2571 ret_type
= base_type
->ctype
.base_type
;
2572 ret
= alloc_typed_instruction(OP_RET
, ret_type
);
2573 if (type_size(ret_type
) > 0)
2574 use_pseudo(ret
, result
, &ret
->src
);
2575 add_one_insn(ep
, ret
);
2582 struct entrypoint
*linearize_symbol(struct symbol
*sym
)
2584 struct symbol
*base_type
;
2588 current_pos
= sym
->pos
;
2589 base_type
= sym
->ctype
.base_type
;
2592 if (base_type
->type
== SYM_FN
)
2593 return linearize_fn(sym
, base_type
);
2601 static pseudo_t
linearize_fma(struct entrypoint
*ep
, struct expression
*expr
)
2603 struct instruction
*insn
= alloc_typed_instruction(OP_FMADD
, expr
->ctype
);
2604 struct expression
*arg
;
2606 PREPARE_PTR_LIST(expr
->args
, arg
);
2607 use_pseudo(insn
, linearize_expression(ep
, arg
), &insn
->src1
);
2609 use_pseudo(insn
, linearize_expression(ep
, arg
), &insn
->src2
);
2611 use_pseudo(insn
, linearize_expression(ep
, arg
), &insn
->src3
);
2612 FINISH_PTR_LIST(arg
);
2614 add_one_insn(ep
, insn
);
2615 return insn
->target
= alloc_pseudo(insn
);
2618 static pseudo_t
linearize_isdigit(struct entrypoint
*ep
, struct expression
*expr
)
2620 struct instruction
*insn
;
2623 insn
= alloc_typed_instruction(OP_SUB
, &int_ctype
);
2624 src
= linearize_expression(ep
, first_expression(expr
->args
));
2625 use_pseudo(insn
, src
, &insn
->src1
);
2626 insn
->src2
= value_pseudo('0');
2627 src
= insn
->target
= alloc_pseudo(insn
);
2628 add_one_insn(ep
, insn
);
2630 insn
= alloc_typed_instruction(OP_SET_BE
, &int_ctype
);
2631 use_pseudo(insn
, src
, &insn
->src1
);
2632 insn
->src2
= value_pseudo(9);
2633 insn
->target
= alloc_pseudo(insn
);
2634 insn
->itype
= &int_ctype
;
2635 add_one_insn(ep
, insn
);
2637 return insn
->target
;
2640 static pseudo_t
linearize_unreachable(struct entrypoint
*ep
, struct expression
*exp
)
2642 add_unreachable(ep
);
2646 static struct sym_init
{
2648 pseudo_t (*linearize
)(struct entrypoint
*, struct expression
*);
2649 struct symbol_op op
;
2650 } builtins_table
[] = {
2651 // must be declared in builtin.c:declare_builtins[]
2652 { "__builtin_fma", linearize_fma
},
2653 { "__builtin_fmaf", linearize_fma
},
2654 { "__builtin_fmal", linearize_fma
},
2655 { "__builtin_isdigit", linearize_isdigit
},
2656 { "__builtin_unreachable", linearize_unreachable
},
2660 void init_linearized_builtins(int stream
)
2662 struct sym_init
*ptr
;
2664 for (ptr
= builtins_table
; ptr
->name
; ptr
++) {
2666 sym
= create_symbol(stream
, ptr
->name
, SYM_NODE
, NS_SYMBOL
);
2669 sym
->op
->type
|= KW_BUILTIN
;
2670 sym
->op
->linearize
= ptr
->linearize
;