1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "diagnostic-core.h"
29 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
42 #include "sched-int.h"
51 #ifdef INSN_SCHEDULING
53 /* A flag indicating that a ddg edge belongs to an SCC or not. */
54 enum edge_flag
{NOT_IN_SCC
= 0, IN_SCC
};
56 /* Forward declarations. */
57 static void add_backarc_to_ddg (ddg_ptr
, ddg_edge_ptr
);
58 static void add_backarc_to_scc (ddg_scc_ptr
, ddg_edge_ptr
);
59 static void add_scc_to_ddg (ddg_all_sccs_ptr
, ddg_scc_ptr
);
60 static void create_ddg_dep_from_intra_loop_link (ddg_ptr
, ddg_node_ptr
,
62 static void create_ddg_dep_no_link (ddg_ptr
, ddg_node_ptr
, ddg_node_ptr
,
63 dep_type
, dep_data_type
, int);
64 static ddg_edge_ptr
create_ddg_edge (ddg_node_ptr
, ddg_node_ptr
, dep_type
,
65 dep_data_type
, int, int);
66 static void add_edge_to_ddg (ddg_ptr g
, ddg_edge_ptr
);
68 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
69 static bool mem_ref_p
;
71 /* Auxiliary function for mem_read_insn_p. */
73 mark_mem_use (rtx
*x
, void *)
75 subrtx_iterator::array_type array
;
76 FOR_EACH_SUBRTX (iter
, array
, *x
, NONCONST
)
84 /* Returns nonzero if INSN reads from memory. */
86 mem_read_insn_p (rtx_insn
*insn
)
89 note_uses (&PATTERN (insn
), mark_mem_use
, NULL
);
94 mark_mem_store (rtx loc
, const_rtx setter ATTRIBUTE_UNUSED
, void *data ATTRIBUTE_UNUSED
)
100 /* Returns nonzero if INSN writes to memory. */
102 mem_write_insn_p (rtx_insn
*insn
)
105 note_stores (PATTERN (insn
), mark_mem_store
, NULL
);
109 /* Returns nonzero if X has access to memory. */
111 rtx_mem_access_p (rtx x
)
124 fmt
= GET_RTX_FORMAT (code
);
125 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
129 if (rtx_mem_access_p (XEXP (x
, i
)))
132 else if (fmt
[i
] == 'E')
133 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
135 if (rtx_mem_access_p (XVECEXP (x
, i
, j
)))
142 /* Returns nonzero if INSN reads to or writes from memory. */
144 mem_access_insn_p (rtx_insn
*insn
)
146 return rtx_mem_access_p (PATTERN (insn
));
149 /* Return true if DEF_INSN contains address being auto-inc or auto-dec
150 which is used in USE_INSN. Otherwise return false. The result is
151 being used to decide whether to remove the edge between def_insn and
152 use_insn when -fmodulo-sched-allow-regmoves is set. This function
153 doesn't need to consider the specific address register; no reg_moves
154 will be allowed for any life range defined by def_insn and used
155 by use_insn, if use_insn uses an address register auto-inc'ed by
158 autoinc_var_is_used_p (rtx_insn
*def_insn
, rtx_insn
*use_insn
)
162 for (note
= REG_NOTES (def_insn
); note
; note
= XEXP (note
, 1))
163 if (REG_NOTE_KIND (note
) == REG_INC
164 && reg_referenced_p (XEXP (note
, 0), PATTERN (use_insn
)))
170 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
173 def_has_ccmode_p (rtx_insn
*insn
)
177 FOR_EACH_INSN_DEF (def
, insn
)
179 enum machine_mode mode
= GET_MODE (DF_REF_REG (def
));
181 if (GET_MODE_CLASS (mode
) == MODE_CC
)
188 /* Computes the dependence parameters (latency, distance etc.), creates
189 a ddg_edge and adds it to the given DDG. */
191 create_ddg_dep_from_intra_loop_link (ddg_ptr g
, ddg_node_ptr src_node
,
192 ddg_node_ptr dest_node
, dep_t link
)
195 int latency
, distance
= 0;
196 dep_type t
= TRUE_DEP
;
197 dep_data_type dt
= (mem_access_insn_p (src_node
->insn
)
198 && mem_access_insn_p (dest_node
->insn
) ? MEM_DEP
200 gcc_assert (src_node
->cuid
< dest_node
->cuid
);
203 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
204 if (DEP_TYPE (link
) == REG_DEP_ANTI
)
206 else if (DEP_TYPE (link
) == REG_DEP_OUTPUT
)
209 gcc_assert (!DEBUG_INSN_P (dest_node
->insn
) || t
== ANTI_DEP
);
210 gcc_assert (!DEBUG_INSN_P (src_node
->insn
) || t
== ANTI_DEP
);
212 /* We currently choose not to create certain anti-deps edges and
213 compensate for that by generating reg-moves based on the life-range
214 analysis. The anti-deps that will be deleted are the ones which
215 have true-deps edges in the opposite direction (in other words
216 the kernel has only one def of the relevant register).
217 If the address that is being auto-inc or auto-dec in DEST_NODE
218 is used in SRC_NODE then do not remove the edge to make sure
219 reg-moves will not be created for this address.
220 TODO: support the removal of all anti-deps edges, i.e. including those
221 whose register has multiple defs in the loop. */
222 if (flag_modulo_sched_allow_regmoves
223 && (t
== ANTI_DEP
&& dt
== REG_DEP
)
224 && !def_has_ccmode_p (dest_node
->insn
)
225 && !autoinc_var_is_used_p (dest_node
->insn
, src_node
->insn
))
229 set
= single_set (dest_node
->insn
);
230 /* TODO: Handle registers that REG_P is not true for them, i.e.
231 subregs and special registers. */
232 if (set
&& REG_P (SET_DEST (set
)))
234 int regno
= REGNO (SET_DEST (set
));
236 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
238 first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
239 gcc_assert (first_def
);
241 if (bitmap_bit_p (&bb_info
->gen
, DF_REF_ID (first_def
)))
246 latency
= dep_cost (link
);
247 e
= create_ddg_edge (src_node
, dest_node
, t
, dt
, latency
, distance
);
248 add_edge_to_ddg (g
, e
);
251 /* The same as the above function, but it doesn't require a link parameter. */
253 create_ddg_dep_no_link (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
,
254 dep_type d_t
, dep_data_type d_dt
, int distance
)
258 enum reg_note dep_kind
;
259 struct _dep _dep
, *dep
= &_dep
;
261 gcc_assert (!DEBUG_INSN_P (to
->insn
) || d_t
== ANTI_DEP
);
262 gcc_assert (!DEBUG_INSN_P (from
->insn
) || d_t
== ANTI_DEP
);
265 dep_kind
= REG_DEP_ANTI
;
266 else if (d_t
== OUTPUT_DEP
)
267 dep_kind
= REG_DEP_OUTPUT
;
270 gcc_assert (d_t
== TRUE_DEP
);
272 dep_kind
= REG_DEP_TRUE
;
275 init_dep (dep
, from
->insn
, to
->insn
, dep_kind
);
279 e
= create_ddg_edge (from
, to
, d_t
, d_dt
, l
, distance
);
281 add_backarc_to_ddg (g
, e
);
283 add_edge_to_ddg (g
, e
);
287 /* Given a downwards exposed register def LAST_DEF (which is the last
288 definition of that register in the bb), add inter-loop true dependences
289 to all its uses in the next iteration, an output dependence to the
290 first def of the same register (possibly itself) in the next iteration
291 and anti-dependences from its uses in the current iteration to the
292 first definition in the next iteration. */
294 add_cross_iteration_register_deps (ddg_ptr g
, df_ref last_def
)
296 int regno
= DF_REF_REGNO (last_def
);
297 struct df_link
*r_use
;
298 int has_use_in_bb_p
= false;
299 rtx_insn
*def_insn
= DF_REF_INSN (last_def
);
300 ddg_node_ptr last_def_node
= get_node_of_insn (g
, def_insn
);
301 ddg_node_ptr use_node
;
302 #ifdef ENABLE_CHECKING
303 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
305 df_ref first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
307 gcc_assert (last_def_node
);
308 gcc_assert (first_def
);
310 #ifdef ENABLE_CHECKING
311 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
))
312 gcc_assert (!bitmap_bit_p (&bb_info
->gen
,
313 DF_REF_ID (first_def
)));
316 /* Create inter-loop true dependences and anti dependences. */
317 for (r_use
= DF_REF_CHAIN (last_def
); r_use
!= NULL
; r_use
= r_use
->next
)
319 rtx_insn
*use_insn
= DF_REF_INSN (r_use
->ref
);
321 if (BLOCK_FOR_INSN (use_insn
) != g
->bb
)
324 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
325 use_node
= get_node_of_insn (g
, use_insn
);
326 gcc_assert (use_node
);
327 has_use_in_bb_p
= true;
328 if (use_node
->cuid
<= last_def_node
->cuid
)
330 /* Add true deps from last_def to it's uses in the next
331 iteration. Any such upwards exposed use appears before
333 create_ddg_dep_no_link (g
, last_def_node
, use_node
,
334 DEBUG_INSN_P (use_insn
) ? ANTI_DEP
: TRUE_DEP
,
337 else if (!DEBUG_INSN_P (use_insn
))
339 /* Add anti deps from last_def's uses in the current iteration
340 to the first def in the next iteration. We do not add ANTI
341 dep when there is an intra-loop TRUE dep in the opposite
342 direction, but use regmoves to fix such disregarded ANTI
343 deps when broken. If the first_def reaches the USE then
344 there is such a dep. */
345 ddg_node_ptr first_def_node
= get_node_of_insn (g
,
346 DF_REF_INSN (first_def
));
348 gcc_assert (first_def_node
);
350 /* Always create the edge if the use node is a branch in
351 order to prevent the creation of reg-moves.
352 If the address that is being auto-inc or auto-dec in LAST_DEF
353 is used in USE_INSN then do not remove the edge to make sure
354 reg-moves will not be created for that address. */
355 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
)
356 || !flag_modulo_sched_allow_regmoves
357 || JUMP_P (use_node
->insn
)
358 || autoinc_var_is_used_p (DF_REF_INSN (last_def
), use_insn
)
359 || def_has_ccmode_p (DF_REF_INSN (last_def
)))
360 create_ddg_dep_no_link (g
, use_node
, first_def_node
, ANTI_DEP
,
365 /* Create an inter-loop output dependence between LAST_DEF (which is the
366 last def in its block, being downwards exposed) and the first def in
367 its block. Avoid creating a self output dependence. Avoid creating
368 an output dependence if there is a dependence path between the two
369 defs starting with a true dependence to a use which can be in the
370 next iteration; followed by an anti dependence of that use to the
371 first def (i.e. if there is a use between the two defs.) */
372 if (!has_use_in_bb_p
)
374 ddg_node_ptr dest_node
;
376 if (DF_REF_ID (last_def
) == DF_REF_ID (first_def
))
379 dest_node
= get_node_of_insn (g
, DF_REF_INSN (first_def
));
380 gcc_assert (dest_node
);
381 create_ddg_dep_no_link (g
, last_def_node
, dest_node
,
382 OUTPUT_DEP
, REG_DEP
, 1);
385 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
387 build_inter_loop_deps (ddg_ptr g
)
390 struct df_rd_bb_info
*rd_bb_info
;
393 rd_bb_info
= DF_RD_BB_INFO (g
->bb
);
395 /* Find inter-loop register output, true and anti deps. */
396 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info
->gen
, 0, rd_num
, bi
)
398 df_ref rd
= DF_DEFS_GET (rd_num
);
400 add_cross_iteration_register_deps (g
, rd
);
405 /* Return true if two specified instructions have mem expr with conflict
408 insns_may_alias_p (rtx_insn
*insn1
, rtx_insn
*insn2
)
410 subrtx_iterator::array_type array1
;
411 subrtx_iterator::array_type array2
;
412 FOR_EACH_SUBRTX (iter1
, array1
, PATTERN (insn1
), NONCONST
)
414 const_rtx x1
= *iter1
;
416 FOR_EACH_SUBRTX (iter2
, array2
, PATTERN (insn2
), NONCONST
)
418 const_rtx x2
= *iter2
;
419 if (MEM_P (x2
) && may_alias_p (x2
, x1
))
426 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
429 add_intra_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
432 if ((from
->cuid
== to
->cuid
)
433 || !insns_may_alias_p (from
->insn
, to
->insn
))
434 /* Do not create edge if memory references have disjoint alias sets
435 or 'to' and 'from' are the same instruction. */
438 if (mem_write_insn_p (from
->insn
))
440 if (mem_read_insn_p (to
->insn
))
441 create_ddg_dep_no_link (g
, from
, to
,
442 DEBUG_INSN_P (to
->insn
)
443 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 0);
445 create_ddg_dep_no_link (g
, from
, to
,
446 DEBUG_INSN_P (to
->insn
)
447 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 0);
449 else if (!mem_read_insn_p (to
->insn
))
450 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 0);
453 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
456 add_inter_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
458 if (!insns_may_alias_p (from
->insn
, to
->insn
))
459 /* Do not create edge if memory references have disjoint alias sets. */
462 if (mem_write_insn_p (from
->insn
))
464 if (mem_read_insn_p (to
->insn
))
465 create_ddg_dep_no_link (g
, from
, to
,
466 DEBUG_INSN_P (to
->insn
)
467 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 1);
468 else if (from
->cuid
!= to
->cuid
)
469 create_ddg_dep_no_link (g
, from
, to
,
470 DEBUG_INSN_P (to
->insn
)
471 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 1);
475 if (mem_read_insn_p (to
->insn
))
477 else if (from
->cuid
!= to
->cuid
)
479 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 1);
480 if (DEBUG_INSN_P (from
->insn
) || DEBUG_INSN_P (to
->insn
))
481 create_ddg_dep_no_link (g
, to
, from
, ANTI_DEP
, MEM_DEP
, 1);
483 create_ddg_dep_no_link (g
, to
, from
, TRUE_DEP
, MEM_DEP
, 1);
489 /* Perform intra-block Data Dependency analysis and connect the nodes in
490 the DDG. We assume the loop has a single basic block. */
492 build_intra_loop_deps (ddg_ptr g
)
495 /* Hold the dependency analysis state during dependency calculations. */
496 struct deps_desc tmp_deps
;
497 rtx_insn
*head
, *tail
;
499 /* Build the dependence information, using the sched_analyze function. */
501 init_deps (&tmp_deps
, false);
503 /* Do the intra-block data dependence analysis for the given block. */
504 get_ebb_head_tail (g
->bb
, g
->bb
, &head
, &tail
);
505 sched_analyze (&tmp_deps
, head
, tail
);
507 /* Build intra-loop data dependencies using the scheduler dependency
509 for (i
= 0; i
< g
->num_nodes
; i
++)
511 ddg_node_ptr dest_node
= &g
->nodes
[i
];
512 sd_iterator_def sd_it
;
515 if (! INSN_P (dest_node
->insn
))
518 FOR_EACH_DEP (dest_node
->insn
, SD_LIST_BACK
, sd_it
, dep
)
520 rtx_insn
*src_insn
= DEP_PRO (dep
);
521 ddg_node_ptr src_node
;
523 /* Don't add dependencies on debug insns to non-debug insns
524 to avoid codegen differences between -g and -g0. */
525 if (DEBUG_INSN_P (src_insn
) && !DEBUG_INSN_P (dest_node
->insn
))
528 src_node
= get_node_of_insn (g
, src_insn
);
533 create_ddg_dep_from_intra_loop_link (g
, src_node
, dest_node
, dep
);
536 /* If this insn modifies memory, add an edge to all insns that access
538 if (mem_access_insn_p (dest_node
->insn
))
542 for (j
= 0; j
<= i
; j
++)
544 ddg_node_ptr j_node
= &g
->nodes
[j
];
545 if (DEBUG_INSN_P (j_node
->insn
))
547 if (mem_access_insn_p (j_node
->insn
))
549 /* Don't bother calculating inter-loop dep if an intra-loop dep
551 if (! bitmap_bit_p (dest_node
->successors
, j
))
552 add_inter_loop_mem_dep (g
, dest_node
, j_node
);
553 /* If -fmodulo-sched-allow-regmoves
554 is set certain anti-dep edges are not created.
555 It might be that these anti-dep edges are on the
556 path from one memory instruction to another such that
557 removing these edges could cause a violation of the
558 memory dependencies. Thus we add intra edges between
559 every two memory instructions in this case. */
560 if (flag_modulo_sched_allow_regmoves
561 && !bitmap_bit_p (dest_node
->predecessors
, j
))
562 add_intra_loop_mem_dep (g
, j_node
, dest_node
);
568 /* Free the INSN_LISTs. */
569 finish_deps_global ();
570 free_deps (&tmp_deps
);
572 /* Free dependencies. */
573 sched_free_deps (head
, tail
, false);
577 /* Given a basic block, create its DDG and return a pointer to a variable
578 of ddg type that represents it.
579 Initialize the ddg structure fields to the appropriate values. */
581 create_ddg (basic_block bb
, int closing_branch_deps
)
584 rtx_insn
*insn
, *first_note
;
588 g
= (ddg_ptr
) xcalloc (1, sizeof (struct ddg
));
591 g
->closing_branch_deps
= closing_branch_deps
;
593 /* Count the number of insns in the BB. */
594 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
595 insn
= NEXT_INSN (insn
))
597 if (! INSN_P (insn
) || GET_CODE (PATTERN (insn
)) == USE
)
600 if (DEBUG_INSN_P (insn
))
604 if (mem_read_insn_p (insn
))
606 if (mem_write_insn_p (insn
))
612 /* There is nothing to do for this BB. */
613 if ((num_nodes
- g
->num_debug
) <= 1)
619 /* Allocate the nodes array, and initialize the nodes. */
620 g
->num_nodes
= num_nodes
;
621 g
->nodes
= (ddg_node_ptr
) xcalloc (num_nodes
, sizeof (struct ddg_node
));
622 g
->closing_branch
= NULL
;
625 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
626 insn
= NEXT_INSN (insn
))
630 if (! first_note
&& NOTE_P (insn
)
631 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
)
637 gcc_assert (!g
->closing_branch
);
638 g
->closing_branch
= &g
->nodes
[i
];
640 else if (GET_CODE (PATTERN (insn
)) == USE
)
647 g
->nodes
[i
].cuid
= i
;
648 g
->nodes
[i
].successors
= sbitmap_alloc (num_nodes
);
649 bitmap_clear (g
->nodes
[i
].successors
);
650 g
->nodes
[i
].predecessors
= sbitmap_alloc (num_nodes
);
651 bitmap_clear (g
->nodes
[i
].predecessors
);
652 g
->nodes
[i
].first_note
= (first_note
? first_note
: insn
);
653 g
->nodes
[i
++].insn
= insn
;
657 /* We must have found a branch in DDG. */
658 gcc_assert (g
->closing_branch
);
661 /* Build the data dependency graph. */
662 build_intra_loop_deps (g
);
663 build_inter_loop_deps (g
);
667 /* Free all the memory allocated for the DDG. */
676 for (i
= 0; i
< g
->num_nodes
; i
++)
678 ddg_edge_ptr e
= g
->nodes
[i
].out
;
682 ddg_edge_ptr next
= e
->next_out
;
687 sbitmap_free (g
->nodes
[i
].successors
);
688 sbitmap_free (g
->nodes
[i
].predecessors
);
690 if (g
->num_backarcs
> 0)
697 print_ddg_edge (FILE *file
, ddg_edge_ptr e
)
713 fprintf (file
, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e
->src
->insn
),
714 dep_c
, e
->latency
, e
->distance
, INSN_UID (e
->dest
->insn
));
717 /* Print the DDG nodes with there in/out edges to the dump file. */
719 print_ddg (FILE *file
, ddg_ptr g
)
723 for (i
= 0; i
< g
->num_nodes
; i
++)
727 fprintf (file
, "Node num: %d\n", g
->nodes
[i
].cuid
);
728 print_rtl_single (file
, g
->nodes
[i
].insn
);
729 fprintf (file
, "OUT ARCS: ");
730 for (e
= g
->nodes
[i
].out
; e
; e
= e
->next_out
)
731 print_ddg_edge (file
, e
);
733 fprintf (file
, "\nIN ARCS: ");
734 for (e
= g
->nodes
[i
].in
; e
; e
= e
->next_in
)
735 print_ddg_edge (file
, e
);
737 fprintf (file
, "\n");
741 /* Print the given DDG in VCG format. */
743 vcg_print_ddg (FILE *file
, ddg_ptr g
)
747 fprintf (file
, "graph: {\n");
748 for (src_cuid
= 0; src_cuid
< g
->num_nodes
; src_cuid
++)
751 int src_uid
= INSN_UID (g
->nodes
[src_cuid
].insn
);
753 fprintf (file
, "node: {title: \"%d_%d\" info1: \"", src_cuid
, src_uid
);
754 print_rtl_single (file
, g
->nodes
[src_cuid
].insn
);
755 fprintf (file
, "\"}\n");
756 for (e
= g
->nodes
[src_cuid
].out
; e
; e
= e
->next_out
)
758 int dst_uid
= INSN_UID (e
->dest
->insn
);
759 int dst_cuid
= e
->dest
->cuid
;
761 /* Give the backarcs a different color. */
763 fprintf (file
, "backedge: {color: red ");
765 fprintf (file
, "edge: { ");
767 fprintf (file
, "sourcename: \"%d_%d\" ", src_cuid
, src_uid
);
768 fprintf (file
, "targetname: \"%d_%d\" ", dst_cuid
, dst_uid
);
769 fprintf (file
, "label: \"%d_%d\"}\n", e
->latency
, e
->distance
);
772 fprintf (file
, "}\n");
775 /* Dump the sccs in SCCS. */
777 print_sccs (FILE *file
, ddg_all_sccs_ptr sccs
, ddg_ptr g
)
780 sbitmap_iterator sbi
;
786 fprintf (file
, "\n;; Number of SCC nodes - %d\n", sccs
->num_sccs
);
787 for (i
= 0; i
< sccs
->num_sccs
; i
++)
789 fprintf (file
, "SCC number: %d\n", i
);
790 EXECUTE_IF_SET_IN_BITMAP (sccs
->sccs
[i
]->nodes
, 0, u
, sbi
)
792 fprintf (file
, "insn num %d\n", u
);
793 print_rtl_single (file
, g
->nodes
[u
].insn
);
796 fprintf (file
, "\n");
799 /* Create an edge and initialize it with given values. */
801 create_ddg_edge (ddg_node_ptr src
, ddg_node_ptr dest
,
802 dep_type t
, dep_data_type dt
, int l
, int d
)
804 ddg_edge_ptr e
= (ddg_edge_ptr
) xmalloc (sizeof (struct ddg_edge
));
812 e
->next_in
= e
->next_out
= NULL
;
817 /* Add the given edge to the in/out linked lists of the DDG nodes. */
819 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED
, ddg_edge_ptr e
)
821 ddg_node_ptr src
= e
->src
;
822 ddg_node_ptr dest
= e
->dest
;
824 /* Should have allocated the sbitmaps. */
825 gcc_assert (src
->successors
&& dest
->predecessors
);
827 bitmap_set_bit (src
->successors
, dest
->cuid
);
828 bitmap_set_bit (dest
->predecessors
, src
->cuid
);
829 e
->next_in
= dest
->in
;
831 e
->next_out
= src
->out
;
837 /* Algorithm for computing the recurrence_length of an scc. We assume at
838 for now that cycles in the data dependence graph contain a single backarc.
839 This simplifies the algorithm, and can be generalized later. */
841 set_recurrence_length (ddg_scc_ptr scc
, ddg_ptr g
)
846 for (j
= 0; j
< scc
->num_backarcs
; j
++)
848 ddg_edge_ptr backarc
= scc
->backarcs
[j
];
850 int distance
= backarc
->distance
;
851 ddg_node_ptr src
= backarc
->dest
;
852 ddg_node_ptr dest
= backarc
->src
;
854 length
= longest_simple_path (g
, src
->cuid
, dest
->cuid
, scc
->nodes
);
857 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
860 length
+= backarc
->latency
;
861 result
= MAX (result
, (length
/ distance
));
863 scc
->recurrence_length
= result
;
866 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
867 and mark edges that belong to this scc as IN_SCC. */
869 create_scc (ddg_ptr g
, sbitmap nodes
)
873 sbitmap_iterator sbi
;
875 scc
= (ddg_scc_ptr
) xmalloc (sizeof (struct ddg_scc
));
876 scc
->backarcs
= NULL
;
877 scc
->num_backarcs
= 0;
878 scc
->nodes
= sbitmap_alloc (g
->num_nodes
);
879 bitmap_copy (scc
->nodes
, nodes
);
881 /* Mark the backarcs that belong to this SCC. */
882 EXECUTE_IF_SET_IN_BITMAP (nodes
, 0, u
, sbi
)
885 ddg_node_ptr n
= &g
->nodes
[u
];
887 for (e
= n
->out
; e
; e
= e
->next_out
)
888 if (bitmap_bit_p (nodes
, e
->dest
->cuid
))
890 e
->aux
.count
= IN_SCC
;
892 add_backarc_to_scc (scc
, e
);
896 set_recurrence_length (scc
, g
);
900 /* Cleans the memory allocation of a given SCC. */
902 free_scc (ddg_scc_ptr scc
)
907 sbitmap_free (scc
->nodes
);
908 if (scc
->num_backarcs
> 0)
909 free (scc
->backarcs
);
914 /* Add a given edge known to be a backarc to the given DDG. */
916 add_backarc_to_ddg (ddg_ptr g
, ddg_edge_ptr e
)
918 int size
= (g
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
920 add_edge_to_ddg (g
, e
);
921 g
->backarcs
= (ddg_edge_ptr
*) xrealloc (g
->backarcs
, size
);
922 g
->backarcs
[g
->num_backarcs
++] = e
;
925 /* Add backarc to an SCC. */
927 add_backarc_to_scc (ddg_scc_ptr scc
, ddg_edge_ptr e
)
929 int size
= (scc
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
931 scc
->backarcs
= (ddg_edge_ptr
*) xrealloc (scc
->backarcs
, size
);
932 scc
->backarcs
[scc
->num_backarcs
++] = e
;
935 /* Add the given SCC to the DDG. */
937 add_scc_to_ddg (ddg_all_sccs_ptr g
, ddg_scc_ptr scc
)
939 int size
= (g
->num_sccs
+ 1) * sizeof (ddg_scc_ptr
);
941 g
->sccs
= (ddg_scc_ptr
*) xrealloc (g
->sccs
, size
);
942 g
->sccs
[g
->num_sccs
++] = scc
;
945 /* Given the instruction INSN return the node that represents it. */
947 get_node_of_insn (ddg_ptr g
, rtx_insn
*insn
)
951 for (i
= 0; i
< g
->num_nodes
; i
++)
952 if (insn
== g
->nodes
[i
].insn
)
957 /* Given a set OPS of nodes in the DDG, find the set of their successors
958 which are not in OPS, and set their bits in SUCC. Bits corresponding to
959 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
961 find_successors (sbitmap succ
, ddg_ptr g
, sbitmap ops
)
964 sbitmap_iterator sbi
;
966 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
968 const sbitmap node_succ
= NODE_SUCCESSORS (&g
->nodes
[i
]);
969 bitmap_ior (succ
, succ
, node_succ
);
972 /* We want those that are not in ops. */
973 bitmap_and_compl (succ
, succ
, ops
);
976 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
977 which are not in OPS, and set their bits in PREDS. Bits corresponding to
978 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
980 find_predecessors (sbitmap preds
, ddg_ptr g
, sbitmap ops
)
983 sbitmap_iterator sbi
;
985 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
987 const sbitmap node_preds
= NODE_PREDECESSORS (&g
->nodes
[i
]);
988 bitmap_ior (preds
, preds
, node_preds
);
991 /* We want those that are not in ops. */
992 bitmap_and_compl (preds
, preds
, ops
);
996 /* Compare function to be passed to qsort to order the backarcs in descending
999 compare_sccs (const void *s1
, const void *s2
)
1001 const int rec_l1
= (*(const ddg_scc_ptr
*)s1
)->recurrence_length
;
1002 const int rec_l2
= (*(const ddg_scc_ptr
*)s2
)->recurrence_length
;
1003 return ((rec_l2
> rec_l1
) - (rec_l2
< rec_l1
));
1007 /* Order the backarcs in descending recMII order using compare_sccs. */
1009 order_sccs (ddg_all_sccs_ptr g
)
1011 qsort (g
->sccs
, g
->num_sccs
, sizeof (ddg_scc_ptr
),
1012 (int (*) (const void *, const void *)) compare_sccs
);
1015 #ifdef ENABLE_CHECKING
1016 /* Check that every node in SCCS belongs to exactly one strongly connected
1017 component and that no element of SCCS is empty. */
1019 check_sccs (ddg_all_sccs_ptr sccs
, int num_nodes
)
1022 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1025 for (i
= 0; i
< sccs
->num_sccs
; i
++)
1027 gcc_assert (!bitmap_empty_p (sccs
->sccs
[i
]->nodes
));
1028 /* Verify that every node in sccs is in exactly one strongly
1029 connected component. */
1030 gcc_assert (!bitmap_intersect_p (tmp
, sccs
->sccs
[i
]->nodes
));
1031 bitmap_ior (tmp
, tmp
, sccs
->sccs
[i
]->nodes
);
1037 /* Perform the Strongly Connected Components decomposing algorithm on the
1038 DDG and return DDG_ALL_SCCS structure that contains them. */
1040 create_ddg_all_sccs (ddg_ptr g
)
1043 int num_nodes
= g
->num_nodes
;
1044 sbitmap from
= sbitmap_alloc (num_nodes
);
1045 sbitmap to
= sbitmap_alloc (num_nodes
);
1046 sbitmap scc_nodes
= sbitmap_alloc (num_nodes
);
1047 ddg_all_sccs_ptr sccs
= (ddg_all_sccs_ptr
)
1048 xmalloc (sizeof (struct ddg_all_sccs
));
1054 for (i
= 0; i
< g
->num_backarcs
; i
++)
1057 ddg_edge_ptr backarc
= g
->backarcs
[i
];
1058 ddg_node_ptr src
= backarc
->src
;
1059 ddg_node_ptr dest
= backarc
->dest
;
1061 /* If the backarc already belongs to an SCC, continue. */
1062 if (backarc
->aux
.count
== IN_SCC
)
1065 bitmap_clear (scc_nodes
);
1066 bitmap_clear (from
);
1068 bitmap_set_bit (from
, dest
->cuid
);
1069 bitmap_set_bit (to
, src
->cuid
);
1071 if (find_nodes_on_paths (scc_nodes
, g
, from
, to
))
1073 scc
= create_scc (g
, scc_nodes
);
1074 add_scc_to_ddg (sccs
, scc
);
1078 sbitmap_free (from
);
1080 sbitmap_free (scc_nodes
);
1081 #ifdef ENABLE_CHECKING
1082 check_sccs (sccs
, num_nodes
);
1087 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1089 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs
)
1096 for (i
= 0; i
< all_sccs
->num_sccs
; i
++)
1097 free_scc (all_sccs
->sccs
[i
]);
1099 free (all_sccs
->sccs
);
1104 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1105 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1106 nodes from FROM and TO). Return nonzero if nodes exist. */
1108 find_nodes_on_paths (sbitmap result
, ddg_ptr g
, sbitmap from
, sbitmap to
)
1113 int num_nodes
= g
->num_nodes
;
1114 sbitmap_iterator sbi
;
1116 sbitmap workset
= sbitmap_alloc (num_nodes
);
1117 sbitmap reachable_from
= sbitmap_alloc (num_nodes
);
1118 sbitmap reach_to
= sbitmap_alloc (num_nodes
);
1119 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1121 bitmap_copy (reachable_from
, from
);
1122 bitmap_copy (tmp
, from
);
1128 bitmap_copy (workset
, tmp
);
1130 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1133 ddg_node_ptr u_node
= &g
->nodes
[u
];
1135 for (e
= u_node
->out
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_out
)
1137 ddg_node_ptr v_node
= e
->dest
;
1138 int v
= v_node
->cuid
;
1140 if (!bitmap_bit_p (reachable_from
, v
))
1142 bitmap_set_bit (reachable_from
, v
);
1143 bitmap_set_bit (tmp
, v
);
1150 bitmap_copy (reach_to
, to
);
1151 bitmap_copy (tmp
, to
);
1157 bitmap_copy (workset
, tmp
);
1159 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1162 ddg_node_ptr u_node
= &g
->nodes
[u
];
1164 for (e
= u_node
->in
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_in
)
1166 ddg_node_ptr v_node
= e
->src
;
1167 int v
= v_node
->cuid
;
1169 if (!bitmap_bit_p (reach_to
, v
))
1171 bitmap_set_bit (reach_to
, v
);
1172 bitmap_set_bit (tmp
, v
);
1179 answer
= bitmap_and (result
, reachable_from
, reach_to
);
1180 sbitmap_free (workset
);
1181 sbitmap_free (reachable_from
);
1182 sbitmap_free (reach_to
);
1188 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1189 at-least as large as the count of U_NODE plus the latency between them.
1190 Sets a bit in TMP for each successor whose count was changed (increased).
1191 Returns nonzero if any count was changed. */
1193 update_dist_to_successors (ddg_node_ptr u_node
, sbitmap nodes
, sbitmap tmp
)
1198 for (e
= u_node
->out
; e
; e
= e
->next_out
)
1200 ddg_node_ptr v_node
= e
->dest
;
1201 int v
= v_node
->cuid
;
1203 if (bitmap_bit_p (nodes
, v
)
1204 && (e
->distance
== 0)
1205 && (v_node
->aux
.count
< u_node
->aux
.count
+ e
->latency
))
1207 v_node
->aux
.count
= u_node
->aux
.count
+ e
->latency
;
1208 bitmap_set_bit (tmp
, v
);
1216 /* Find the length of a longest path from SRC to DEST in G,
1217 going only through NODES, and disregarding backarcs. */
1219 longest_simple_path (struct ddg
* g
, int src
, int dest
, sbitmap nodes
)
1225 int num_nodes
= g
->num_nodes
;
1226 sbitmap workset
= sbitmap_alloc (num_nodes
);
1227 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1230 /* Data will hold the distance of the longest path found so far from
1231 src to each node. Initialize to -1 = less than minimum. */
1232 for (i
= 0; i
< g
->num_nodes
; i
++)
1233 g
->nodes
[i
].aux
.count
= -1;
1234 g
->nodes
[src
].aux
.count
= 0;
1237 bitmap_set_bit (tmp
, src
);
1241 sbitmap_iterator sbi
;
1244 bitmap_copy (workset
, tmp
);
1246 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1248 ddg_node_ptr u_node
= &g
->nodes
[u
];
1250 change
|= update_dist_to_successors (u_node
, nodes
, tmp
);
1253 result
= g
->nodes
[dest
].aux
.count
;
1254 sbitmap_free (workset
);
1259 #endif /* INSN_SCHEDULING */