2 * memops - try to combine memory ops.
4 * Copyright (C) 2004 Linus Torvalds
15 #include "expression.h"
16 #include "linearize.h"
19 static int find_dominating_parents(pseudo_t pseudo
, struct instruction
*insn
,
20 struct basic_block
*bb
, unsigned long generation
, struct pseudo_list
**dominators
,
23 struct basic_block
*parent
;
25 if (bb_list_size(bb
->parents
) > 1)
27 FOR_EACH_PTR(bb
->parents
, parent
) {
28 struct instruction
*one
;
29 struct instruction
*br
;
32 FOR_EACH_PTR_REVERSE(parent
->insns
, one
) {
36 dominance
= dominates(pseudo
, insn
, one
, local
);
38 if (one
->opcode
== OP_LOAD
)
44 if (one
->opcode
== OP_LOAD
&& !loads
)
47 } END_FOR_EACH_PTR_REVERSE(one
);
49 if (parent
->generation
== generation
)
51 parent
->generation
= generation
;
53 if (!find_dominating_parents(pseudo
, insn
, parent
, generation
, dominators
, local
, loads
))
58 br
= delete_last_instruction(&parent
->insns
);
59 phi
= alloc_phi(parent
, one
->target
, one
->size
);
60 phi
->ident
= phi
->ident
? : one
->target
->ident
;
61 add_instruction(&parent
->insns
, br
);
62 use_pseudo(phi
, add_pseudo(dominators
, phi
));
63 } END_FOR_EACH_PTR(parent
);
68 * FIXME! This is wrong. Since we now distribute out the OP_SYMADDR,
69 * we can no longer really use "container()" to get from a user to
70 * the instruction that uses it.
72 * This happens to work, simply because the likelyhood of the
73 * (possibly non-instruction) containing the right bitpattern
74 * in the right place is pretty low. But this is still wrong.
76 * We should make symbol-pseudos count non-load/store usage,
79 static int address_taken(pseudo_t pseudo
)
82 FOR_EACH_PTR(pseudo
->users
, usep
) {
83 struct instruction
*insn
= container(usep
, struct instruction
, src
);
84 if (insn
->bb
&& (insn
->opcode
!= OP_LOAD
|| insn
->opcode
!= OP_STORE
))
86 } END_FOR_EACH_PTR(usep
);
90 static int local_pseudo(pseudo_t pseudo
)
92 return pseudo
->type
== PSEUDO_SYM
93 && !(pseudo
->sym
->ctype
.modifiers
& (MOD_STATIC
| MOD_NONLOCAL
))
94 && !address_taken(pseudo
);
97 static void simplify_loads(struct basic_block
*bb
)
99 struct instruction
*insn
;
101 FOR_EACH_PTR_REVERSE(bb
->insns
, insn
) {
104 if (insn
->opcode
== OP_LOAD
) {
105 struct instruction
*dom
;
106 pseudo_t pseudo
= insn
->src
;
107 int local
= local_pseudo(pseudo
);
108 struct pseudo_list
*dominators
;
109 unsigned long generation
;
111 RECURSE_PTR_REVERSE(insn
, dom
) {
115 dominance
= dominates(pseudo
, insn
, dom
, local
);
117 /* possible partial dominance? */
119 if (dom
->opcode
== OP_LOAD
)
123 /* Yeehaa! Found one! */
124 convert_load_instruction(insn
, dom
->target
);
127 } END_FOR_EACH_PTR_REVERSE(dom
);
129 /* Ok, go find the parents */
130 generation
= ++bb_generation
;
131 bb
->generation
= generation
;
133 if (find_dominating_parents(pseudo
, insn
, bb
, generation
, &dominators
, local
, 1)) {
134 /* This happens with initial assignments to structures etc.. */
137 assert(pseudo
->type
!= PSEUDO_ARG
);
138 convert_load_instruction(insn
, value_pseudo(0));
142 rewrite_load_instruction(insn
, dominators
);
146 /* Do the next one */;
147 } END_FOR_EACH_PTR_REVERSE(insn
);
150 static void kill_store(struct instruction
*insn
)
154 insn
->opcode
= OP_SNOP
;
155 kill_use(&insn
->target
);
159 static void kill_dominated_stores(struct basic_block
*bb
)
161 struct instruction
*insn
;
163 FOR_EACH_PTR_REVERSE(bb
->insns
, insn
) {
166 if (insn
->opcode
== OP_STORE
) {
167 struct instruction
*dom
;
168 pseudo_t pseudo
= insn
->src
;
169 int local
= local_pseudo(pseudo
);
171 RECURSE_PTR_REVERSE(insn
, dom
) {
175 dominance
= dominates(pseudo
, insn
, dom
, local
);
177 /* possible partial dominance? */
180 if (dom
->opcode
== OP_LOAD
)
182 /* Yeehaa! Found one! */
185 } END_FOR_EACH_PTR_REVERSE(dom
);
187 /* Ok, we should check the parents now */
190 /* Do the next one */;
191 } END_FOR_EACH_PTR_REVERSE(insn
);
194 void simplify_memops(struct entrypoint
*ep
)
196 struct basic_block
*bb
;
198 FOR_EACH_PTR_REVERSE(ep
->bbs
, bb
) {
200 } END_FOR_EACH_PTR_REVERSE(bb
);
202 FOR_EACH_PTR_REVERSE(ep
->bbs
, bb
) {
203 kill_dominated_stores(bb
);
204 } END_FOR_EACH_PTR_REVERSE(bb
);