1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "diagnostic-core.h"
30 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
38 #include "sched-int.h"
40 #include "cfglayout.h"
47 #ifdef INSN_SCHEDULING
49 /* A flag indicating that a ddg edge belongs to an SCC or not. */
50 enum edge_flag
{NOT_IN_SCC
= 0, IN_SCC
};
52 /* Forward declarations. */
53 static void add_backarc_to_ddg (ddg_ptr
, ddg_edge_ptr
);
54 static void add_backarc_to_scc (ddg_scc_ptr
, ddg_edge_ptr
);
55 static void add_scc_to_ddg (ddg_all_sccs_ptr
, ddg_scc_ptr
);
56 static void create_ddg_dep_from_intra_loop_link (ddg_ptr
, ddg_node_ptr
,
58 static void create_ddg_dep_no_link (ddg_ptr
, ddg_node_ptr
, ddg_node_ptr
,
59 dep_type
, dep_data_type
, int);
60 static ddg_edge_ptr
create_ddg_edge (ddg_node_ptr
, ddg_node_ptr
, dep_type
,
61 dep_data_type
, int, int);
62 static void add_edge_to_ddg (ddg_ptr g
, ddg_edge_ptr
);
64 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
65 static bool mem_ref_p
;
67 /* Auxiliary function for mem_read_insn_p. */
69 mark_mem_use (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
76 /* Auxiliary function for mem_read_insn_p. */
78 mark_mem_use_1 (rtx
*x
, void *data
)
80 for_each_rtx (x
, mark_mem_use
, data
);
83 /* Returns nonzero if INSN reads from memory. */
85 mem_read_insn_p (rtx insn
)
88 note_uses (&PATTERN (insn
), mark_mem_use_1
, NULL
);
93 mark_mem_store (rtx loc
, const_rtx setter ATTRIBUTE_UNUSED
, void *data ATTRIBUTE_UNUSED
)
99 /* Returns nonzero if INSN writes to memory. */
101 mem_write_insn_p (rtx insn
)
104 note_stores (PATTERN (insn
), mark_mem_store
, NULL
);
108 /* Returns nonzero if X has access to memory. */
110 rtx_mem_access_p (rtx x
)
123 fmt
= GET_RTX_FORMAT (code
);
124 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
128 if (rtx_mem_access_p (XEXP (x
, i
)))
131 else if (fmt
[i
] == 'E')
132 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
134 if (rtx_mem_access_p (XVECEXP (x
, i
, j
)))
141 /* Returns nonzero if INSN reads to or writes from memory. */
143 mem_access_insn_p (rtx insn
)
145 return rtx_mem_access_p (PATTERN (insn
));
148 /* Computes the dependence parameters (latency, distance etc.), creates
149 a ddg_edge and adds it to the given DDG. */
151 create_ddg_dep_from_intra_loop_link (ddg_ptr g
, ddg_node_ptr src_node
,
152 ddg_node_ptr dest_node
, dep_t link
)
155 int latency
, distance
= 0;
156 dep_type t
= TRUE_DEP
;
157 dep_data_type dt
= (mem_access_insn_p (src_node
->insn
)
158 && mem_access_insn_p (dest_node
->insn
) ? MEM_DEP
160 gcc_assert (src_node
->cuid
< dest_node
->cuid
);
163 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
164 if (DEP_TYPE (link
) == REG_DEP_ANTI
)
166 else if (DEP_TYPE (link
) == REG_DEP_OUTPUT
)
169 gcc_assert (!DEBUG_INSN_P (dest_node
->insn
) || t
== ANTI_DEP
);
170 gcc_assert (!DEBUG_INSN_P (src_node
->insn
) || t
== ANTI_DEP
);
172 /* We currently choose not to create certain anti-deps edges and
173 compensate for that by generating reg-moves based on the life-range
174 analysis. The anti-deps that will be deleted are the ones which
175 have true-deps edges in the opposite direction (in other words
176 the kernel has only one def of the relevant register). TODO:
177 support the removal of all anti-deps edges, i.e. including those
178 whose register has multiple defs in the loop. */
179 if (flag_modulo_sched_allow_regmoves
&& (t
== ANTI_DEP
&& dt
== REG_DEP
))
183 set
= single_set (dest_node
->insn
);
184 /* TODO: Handle registers that REG_P is not true for them, i.e.
185 subregs and special registers. */
186 if (set
&& REG_P (SET_DEST (set
)))
188 int regno
= REGNO (SET_DEST (set
));
190 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
192 first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
193 gcc_assert (first_def
);
195 if (bitmap_bit_p (&bb_info
->gen
, DF_REF_ID (first_def
)))
200 /* If a true dep edge enters the branch create an anti edge in the
201 opposite direction to prevent the creation of reg-moves. */
202 if ((DEP_TYPE (link
) == REG_DEP_TRUE
) && JUMP_P (dest_node
->insn
))
203 create_ddg_dep_no_link (g
, dest_node
, src_node
, ANTI_DEP
, REG_DEP
, 1);
205 latency
= dep_cost (link
);
206 e
= create_ddg_edge (src_node
, dest_node
, t
, dt
, latency
, distance
);
207 add_edge_to_ddg (g
, e
);
210 /* The same as the above function, but it doesn't require a link parameter. */
212 create_ddg_dep_no_link (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
,
213 dep_type d_t
, dep_data_type d_dt
, int distance
)
217 enum reg_note dep_kind
;
218 struct _dep _dep
, *dep
= &_dep
;
220 gcc_assert (!DEBUG_INSN_P (to
->insn
) || d_t
== ANTI_DEP
);
221 gcc_assert (!DEBUG_INSN_P (from
->insn
) || d_t
== ANTI_DEP
);
224 dep_kind
= REG_DEP_ANTI
;
225 else if (d_t
== OUTPUT_DEP
)
226 dep_kind
= REG_DEP_OUTPUT
;
229 gcc_assert (d_t
== TRUE_DEP
);
231 dep_kind
= REG_DEP_TRUE
;
234 init_dep (dep
, from
->insn
, to
->insn
, dep_kind
);
238 e
= create_ddg_edge (from
, to
, d_t
, d_dt
, l
, distance
);
240 add_backarc_to_ddg (g
, e
);
242 add_edge_to_ddg (g
, e
);
246 /* Given a downwards exposed register def LAST_DEF (which is the last
247 definition of that register in the bb), add inter-loop true dependences
248 to all its uses in the next iteration, an output dependence to the
249 first def of the same register (possibly itself) in the next iteration
250 and anti-dependences from its uses in the current iteration to the
251 first definition in the next iteration. */
253 add_cross_iteration_register_deps (ddg_ptr g
, df_ref last_def
)
255 int regno
= DF_REF_REGNO (last_def
);
256 struct df_link
*r_use
;
257 int has_use_in_bb_p
= false;
258 rtx def_insn
= DF_REF_INSN (last_def
);
259 ddg_node_ptr last_def_node
= get_node_of_insn (g
, def_insn
);
260 ddg_node_ptr use_node
;
261 #ifdef ENABLE_CHECKING
262 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
264 df_ref first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
266 gcc_assert (last_def_node
);
267 gcc_assert (first_def
);
269 #ifdef ENABLE_CHECKING
270 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
))
271 gcc_assert (!bitmap_bit_p (&bb_info
->gen
,
272 DF_REF_ID (first_def
)));
275 /* Create inter-loop true dependences and anti dependences. */
276 for (r_use
= DF_REF_CHAIN (last_def
); r_use
!= NULL
; r_use
= r_use
->next
)
278 rtx use_insn
= DF_REF_INSN (r_use
->ref
);
280 if (BLOCK_FOR_INSN (use_insn
) != g
->bb
)
283 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
284 use_node
= get_node_of_insn (g
, use_insn
);
285 gcc_assert (use_node
);
286 has_use_in_bb_p
= true;
287 if (use_node
->cuid
<= last_def_node
->cuid
)
289 /* Add true deps from last_def to it's uses in the next
290 iteration. Any such upwards exposed use appears before
292 create_ddg_dep_no_link (g
, last_def_node
, use_node
,
293 DEBUG_INSN_P (use_insn
) ? ANTI_DEP
: TRUE_DEP
,
296 else if (!DEBUG_INSN_P (use_insn
))
298 /* Add anti deps from last_def's uses in the current iteration
299 to the first def in the next iteration. We do not add ANTI
300 dep when there is an intra-loop TRUE dep in the opposite
301 direction, but use regmoves to fix such disregarded ANTI
302 deps when broken. If the first_def reaches the USE then
303 there is such a dep. */
304 ddg_node_ptr first_def_node
= get_node_of_insn (g
,
305 DF_REF_INSN (first_def
));
307 gcc_assert (first_def_node
);
309 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
)
310 || !flag_modulo_sched_allow_regmoves
)
311 create_ddg_dep_no_link (g
, use_node
, first_def_node
, ANTI_DEP
,
316 /* Create an inter-loop output dependence between LAST_DEF (which is the
317 last def in its block, being downwards exposed) and the first def in
318 its block. Avoid creating a self output dependence. Avoid creating
319 an output dependence if there is a dependence path between the two
320 defs starting with a true dependence to a use which can be in the
321 next iteration; followed by an anti dependence of that use to the
322 first def (i.e. if there is a use between the two defs.) */
323 if (!has_use_in_bb_p
)
325 ddg_node_ptr dest_node
;
327 if (DF_REF_ID (last_def
) == DF_REF_ID (first_def
))
330 dest_node
= get_node_of_insn (g
, DF_REF_INSN (first_def
));
331 gcc_assert (dest_node
);
332 create_ddg_dep_no_link (g
, last_def_node
, dest_node
,
333 OUTPUT_DEP
, REG_DEP
, 1);
336 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
338 build_inter_loop_deps (ddg_ptr g
)
341 struct df_rd_bb_info
*rd_bb_info
;
344 rd_bb_info
= DF_RD_BB_INFO (g
->bb
);
346 /* Find inter-loop register output, true and anti deps. */
347 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info
->gen
, 0, rd_num
, bi
)
349 df_ref rd
= DF_DEFS_GET (rd_num
);
351 add_cross_iteration_register_deps (g
, rd
);
357 walk_mems_2 (rtx
*x
, rtx mem
)
361 if (may_alias_p (*x
, mem
))
370 walk_mems_1 (rtx
*x
, rtx
*pat
)
374 /* Visit all MEMs in *PAT and check indepedence. */
375 if (for_each_rtx (pat
, (rtx_function
) walk_mems_2
, *x
))
376 /* Indicate that dependence was determined and stop traversal. */
384 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
386 insns_may_alias_p (rtx insn1
, rtx insn2
)
388 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
389 return for_each_rtx (&PATTERN (insn1
), (rtx_function
) walk_mems_1
,
393 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
396 add_inter_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
398 if (!insns_may_alias_p (from
->insn
, to
->insn
))
399 /* Do not create edge if memory references have disjoint alias sets. */
402 if (mem_write_insn_p (from
->insn
))
404 if (mem_read_insn_p (to
->insn
))
405 create_ddg_dep_no_link (g
, from
, to
,
406 DEBUG_INSN_P (to
->insn
)
407 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 1);
408 else if (from
->cuid
!= to
->cuid
)
409 create_ddg_dep_no_link (g
, from
, to
,
410 DEBUG_INSN_P (to
->insn
)
411 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 1);
415 if (mem_read_insn_p (to
->insn
))
417 else if (from
->cuid
!= to
->cuid
)
419 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 1);
420 if (DEBUG_INSN_P (from
->insn
) || DEBUG_INSN_P (to
->insn
))
421 create_ddg_dep_no_link (g
, to
, from
, ANTI_DEP
, MEM_DEP
, 1);
423 create_ddg_dep_no_link (g
, to
, from
, TRUE_DEP
, MEM_DEP
, 1);
429 /* Perform intra-block Data Dependency analysis and connect the nodes in
430 the DDG. We assume the loop has a single basic block. */
432 build_intra_loop_deps (ddg_ptr g
)
435 /* Hold the dependency analysis state during dependency calculations. */
436 struct deps_desc tmp_deps
;
439 /* Build the dependence information, using the sched_analyze function. */
441 init_deps (&tmp_deps
, false);
443 /* Do the intra-block data dependence analysis for the given block. */
444 get_ebb_head_tail (g
->bb
, g
->bb
, &head
, &tail
);
445 sched_analyze (&tmp_deps
, head
, tail
);
447 /* Build intra-loop data dependencies using the scheduler dependency
449 for (i
= 0; i
< g
->num_nodes
; i
++)
451 ddg_node_ptr dest_node
= &g
->nodes
[i
];
452 sd_iterator_def sd_it
;
455 if (! INSN_P (dest_node
->insn
))
458 FOR_EACH_DEP (dest_node
->insn
, SD_LIST_BACK
, sd_it
, dep
)
460 ddg_node_ptr src_node
= get_node_of_insn (g
, DEP_PRO (dep
));
465 create_ddg_dep_from_intra_loop_link (g
, src_node
, dest_node
, dep
);
468 /* If this insn modifies memory, add an edge to all insns that access
470 if (mem_access_insn_p (dest_node
->insn
))
474 for (j
= 0; j
<= i
; j
++)
476 ddg_node_ptr j_node
= &g
->nodes
[j
];
477 if (DEBUG_INSN_P (j_node
->insn
))
479 if (mem_access_insn_p (j_node
->insn
))
480 /* Don't bother calculating inter-loop dep if an intra-loop dep
482 if (! TEST_BIT (dest_node
->successors
, j
))
483 add_inter_loop_mem_dep (g
, dest_node
, j_node
);
488 /* Free the INSN_LISTs. */
489 finish_deps_global ();
490 free_deps (&tmp_deps
);
492 /* Free dependencies. */
493 sched_free_deps (head
, tail
, false);
497 /* Given a basic block, create its DDG and return a pointer to a variable
498 of ddg type that represents it.
499 Initialize the ddg structure fields to the appropriate values. */
501 create_ddg (basic_block bb
, int closing_branch_deps
)
504 rtx insn
, first_note
;
508 g
= (ddg_ptr
) xcalloc (1, sizeof (struct ddg
));
511 g
->closing_branch_deps
= closing_branch_deps
;
513 /* Count the number of insns in the BB. */
514 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
515 insn
= NEXT_INSN (insn
))
517 if (! INSN_P (insn
) || GET_CODE (PATTERN (insn
)) == USE
)
520 if (DEBUG_INSN_P (insn
))
524 if (mem_read_insn_p (insn
))
526 if (mem_write_insn_p (insn
))
532 /* There is nothing to do for this BB. */
533 if ((num_nodes
- g
->num_debug
) <= 1)
539 /* Allocate the nodes array, and initialize the nodes. */
540 g
->num_nodes
= num_nodes
;
541 g
->nodes
= (ddg_node_ptr
) xcalloc (num_nodes
, sizeof (struct ddg_node
));
542 g
->closing_branch
= NULL
;
544 first_note
= NULL_RTX
;
545 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
546 insn
= NEXT_INSN (insn
))
550 if (! first_note
&& NOTE_P (insn
)
551 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
)
557 gcc_assert (!g
->closing_branch
);
558 g
->closing_branch
= &g
->nodes
[i
];
560 else if (GET_CODE (PATTERN (insn
)) == USE
)
567 g
->nodes
[i
].cuid
= i
;
568 g
->nodes
[i
].successors
= sbitmap_alloc (num_nodes
);
569 sbitmap_zero (g
->nodes
[i
].successors
);
570 g
->nodes
[i
].predecessors
= sbitmap_alloc (num_nodes
);
571 sbitmap_zero (g
->nodes
[i
].predecessors
);
572 g
->nodes
[i
].first_note
= (first_note
? first_note
: insn
);
573 g
->nodes
[i
++].insn
= insn
;
574 first_note
= NULL_RTX
;
577 /* We must have found a branch in DDG. */
578 gcc_assert (g
->closing_branch
);
581 /* Build the data dependency graph. */
582 build_intra_loop_deps (g
);
583 build_inter_loop_deps (g
);
587 /* Free all the memory allocated for the DDG. */
596 for (i
= 0; i
< g
->num_nodes
; i
++)
598 ddg_edge_ptr e
= g
->nodes
[i
].out
;
602 ddg_edge_ptr next
= e
->next_out
;
607 sbitmap_free (g
->nodes
[i
].successors
);
608 sbitmap_free (g
->nodes
[i
].predecessors
);
610 if (g
->num_backarcs
> 0)
617 print_ddg_edge (FILE *file
, ddg_edge_ptr e
)
633 fprintf (file
, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e
->src
->insn
),
634 dep_c
, e
->latency
, e
->distance
, INSN_UID (e
->dest
->insn
));
637 /* Print the DDG nodes with there in/out edges to the dump file. */
639 print_ddg (FILE *file
, ddg_ptr g
)
643 for (i
= 0; i
< g
->num_nodes
; i
++)
647 fprintf (file
, "Node num: %d\n", g
->nodes
[i
].cuid
);
648 print_rtl_single (file
, g
->nodes
[i
].insn
);
649 fprintf (file
, "OUT ARCS: ");
650 for (e
= g
->nodes
[i
].out
; e
; e
= e
->next_out
)
651 print_ddg_edge (file
, e
);
653 fprintf (file
, "\nIN ARCS: ");
654 for (e
= g
->nodes
[i
].in
; e
; e
= e
->next_in
)
655 print_ddg_edge (file
, e
);
657 fprintf (file
, "\n");
661 /* Print the given DDG in VCG format. */
663 vcg_print_ddg (FILE *file
, ddg_ptr g
)
667 fprintf (file
, "graph: {\n");
668 for (src_cuid
= 0; src_cuid
< g
->num_nodes
; src_cuid
++)
671 int src_uid
= INSN_UID (g
->nodes
[src_cuid
].insn
);
673 fprintf (file
, "node: {title: \"%d_%d\" info1: \"", src_cuid
, src_uid
);
674 print_rtl_single (file
, g
->nodes
[src_cuid
].insn
);
675 fprintf (file
, "\"}\n");
676 for (e
= g
->nodes
[src_cuid
].out
; e
; e
= e
->next_out
)
678 int dst_uid
= INSN_UID (e
->dest
->insn
);
679 int dst_cuid
= e
->dest
->cuid
;
681 /* Give the backarcs a different color. */
683 fprintf (file
, "backedge: {color: red ");
685 fprintf (file
, "edge: { ");
687 fprintf (file
, "sourcename: \"%d_%d\" ", src_cuid
, src_uid
);
688 fprintf (file
, "targetname: \"%d_%d\" ", dst_cuid
, dst_uid
);
689 fprintf (file
, "label: \"%d_%d\"}\n", e
->latency
, e
->distance
);
692 fprintf (file
, "}\n");
695 /* Dump the sccs in SCCS. */
697 print_sccs (FILE *file
, ddg_all_sccs_ptr sccs
, ddg_ptr g
)
700 sbitmap_iterator sbi
;
706 fprintf (file
, "\n;; Number of SCC nodes - %d\n", sccs
->num_sccs
);
707 for (i
= 0; i
< sccs
->num_sccs
; i
++)
709 fprintf (file
, "SCC number: %d\n", i
);
710 EXECUTE_IF_SET_IN_SBITMAP (sccs
->sccs
[i
]->nodes
, 0, u
, sbi
)
712 fprintf (file
, "insn num %d\n", u
);
713 print_rtl_single (file
, g
->nodes
[u
].insn
);
716 fprintf (file
, "\n");
719 /* Create an edge and initialize it with given values. */
721 create_ddg_edge (ddg_node_ptr src
, ddg_node_ptr dest
,
722 dep_type t
, dep_data_type dt
, int l
, int d
)
724 ddg_edge_ptr e
= (ddg_edge_ptr
) xmalloc (sizeof (struct ddg_edge
));
732 e
->next_in
= e
->next_out
= NULL
;
737 /* Add the given edge to the in/out linked lists of the DDG nodes. */
739 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED
, ddg_edge_ptr e
)
741 ddg_node_ptr src
= e
->src
;
742 ddg_node_ptr dest
= e
->dest
;
744 /* Should have allocated the sbitmaps. */
745 gcc_assert (src
->successors
&& dest
->predecessors
);
747 SET_BIT (src
->successors
, dest
->cuid
);
748 SET_BIT (dest
->predecessors
, src
->cuid
);
749 e
->next_in
= dest
->in
;
751 e
->next_out
= src
->out
;
757 /* Algorithm for computing the recurrence_length of an scc. We assume at
758 for now that cycles in the data dependence graph contain a single backarc.
759 This simplifies the algorithm, and can be generalized later. */
761 set_recurrence_length (ddg_scc_ptr scc
, ddg_ptr g
)
766 for (j
= 0; j
< scc
->num_backarcs
; j
++)
768 ddg_edge_ptr backarc
= scc
->backarcs
[j
];
770 int distance
= backarc
->distance
;
771 ddg_node_ptr src
= backarc
->dest
;
772 ddg_node_ptr dest
= backarc
->src
;
774 length
= longest_simple_path (g
, src
->cuid
, dest
->cuid
, scc
->nodes
);
777 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
780 length
+= backarc
->latency
;
781 result
= MAX (result
, (length
/ distance
));
783 scc
->recurrence_length
= result
;
786 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
787 and mark edges that belong to this scc as IN_SCC. */
789 create_scc (ddg_ptr g
, sbitmap nodes
)
793 sbitmap_iterator sbi
;
795 scc
= (ddg_scc_ptr
) xmalloc (sizeof (struct ddg_scc
));
796 scc
->backarcs
= NULL
;
797 scc
->num_backarcs
= 0;
798 scc
->nodes
= sbitmap_alloc (g
->num_nodes
);
799 sbitmap_copy (scc
->nodes
, nodes
);
801 /* Mark the backarcs that belong to this SCC. */
802 EXECUTE_IF_SET_IN_SBITMAP (nodes
, 0, u
, sbi
)
805 ddg_node_ptr n
= &g
->nodes
[u
];
807 for (e
= n
->out
; e
; e
= e
->next_out
)
808 if (TEST_BIT (nodes
, e
->dest
->cuid
))
810 e
->aux
.count
= IN_SCC
;
812 add_backarc_to_scc (scc
, e
);
816 set_recurrence_length (scc
, g
);
820 /* Cleans the memory allocation of a given SCC. */
822 free_scc (ddg_scc_ptr scc
)
827 sbitmap_free (scc
->nodes
);
828 if (scc
->num_backarcs
> 0)
829 free (scc
->backarcs
);
834 /* Add a given edge known to be a backarc to the given DDG. */
836 add_backarc_to_ddg (ddg_ptr g
, ddg_edge_ptr e
)
838 int size
= (g
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
840 add_edge_to_ddg (g
, e
);
841 g
->backarcs
= (ddg_edge_ptr
*) xrealloc (g
->backarcs
, size
);
842 g
->backarcs
[g
->num_backarcs
++] = e
;
845 /* Add backarc to an SCC. */
847 add_backarc_to_scc (ddg_scc_ptr scc
, ddg_edge_ptr e
)
849 int size
= (scc
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
851 scc
->backarcs
= (ddg_edge_ptr
*) xrealloc (scc
->backarcs
, size
);
852 scc
->backarcs
[scc
->num_backarcs
++] = e
;
855 /* Add the given SCC to the DDG. */
857 add_scc_to_ddg (ddg_all_sccs_ptr g
, ddg_scc_ptr scc
)
859 int size
= (g
->num_sccs
+ 1) * sizeof (ddg_scc_ptr
);
861 g
->sccs
= (ddg_scc_ptr
*) xrealloc (g
->sccs
, size
);
862 g
->sccs
[g
->num_sccs
++] = scc
;
865 /* Given the instruction INSN return the node that represents it. */
867 get_node_of_insn (ddg_ptr g
, rtx insn
)
871 for (i
= 0; i
< g
->num_nodes
; i
++)
872 if (insn
== g
->nodes
[i
].insn
)
877 /* Given a set OPS of nodes in the DDG, find the set of their successors
878 which are not in OPS, and set their bits in SUCC. Bits corresponding to
879 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
881 find_successors (sbitmap succ
, ddg_ptr g
, sbitmap ops
)
884 sbitmap_iterator sbi
;
886 EXECUTE_IF_SET_IN_SBITMAP (ops
, 0, i
, sbi
)
888 const sbitmap node_succ
= NODE_SUCCESSORS (&g
->nodes
[i
]);
889 sbitmap_a_or_b (succ
, succ
, node_succ
);
892 /* We want those that are not in ops. */
893 sbitmap_difference (succ
, succ
, ops
);
896 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
897 which are not in OPS, and set their bits in PREDS. Bits corresponding to
898 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
900 find_predecessors (sbitmap preds
, ddg_ptr g
, sbitmap ops
)
903 sbitmap_iterator sbi
;
905 EXECUTE_IF_SET_IN_SBITMAP (ops
, 0, i
, sbi
)
907 const sbitmap node_preds
= NODE_PREDECESSORS (&g
->nodes
[i
]);
908 sbitmap_a_or_b (preds
, preds
, node_preds
);
911 /* We want those that are not in ops. */
912 sbitmap_difference (preds
, preds
, ops
);
916 /* Compare function to be passed to qsort to order the backarcs in descending
919 compare_sccs (const void *s1
, const void *s2
)
921 const int rec_l1
= (*(const ddg_scc_ptr
*)s1
)->recurrence_length
;
922 const int rec_l2
= (*(const ddg_scc_ptr
*)s2
)->recurrence_length
;
923 return ((rec_l2
> rec_l1
) - (rec_l2
< rec_l1
));
927 /* Order the backarcs in descending recMII order using compare_sccs. */
929 order_sccs (ddg_all_sccs_ptr g
)
931 qsort (g
->sccs
, g
->num_sccs
, sizeof (ddg_scc_ptr
),
932 (int (*) (const void *, const void *)) compare_sccs
);
935 #ifdef ENABLE_CHECKING
936 /* Check that every node in SCCS belongs to exactly one strongly connected
937 component and that no element of SCCS is empty. */
939 check_sccs (ddg_all_sccs_ptr sccs
, int num_nodes
)
942 sbitmap tmp
= sbitmap_alloc (num_nodes
);
945 for (i
= 0; i
< sccs
->num_sccs
; i
++)
947 gcc_assert (!sbitmap_empty_p (sccs
->sccs
[i
]->nodes
));
948 /* Verify that every node in sccs is in exactly one strongly
949 connected component. */
950 gcc_assert (!sbitmap_any_common_bits (tmp
, sccs
->sccs
[i
]->nodes
));
951 sbitmap_a_or_b (tmp
, tmp
, sccs
->sccs
[i
]->nodes
);
957 /* Perform the Strongly Connected Components decomposing algorithm on the
958 DDG and return DDG_ALL_SCCS structure that contains them. */
960 create_ddg_all_sccs (ddg_ptr g
)
963 int num_nodes
= g
->num_nodes
;
964 sbitmap from
= sbitmap_alloc (num_nodes
);
965 sbitmap to
= sbitmap_alloc (num_nodes
);
966 sbitmap scc_nodes
= sbitmap_alloc (num_nodes
);
967 ddg_all_sccs_ptr sccs
= (ddg_all_sccs_ptr
)
968 xmalloc (sizeof (struct ddg_all_sccs
));
974 for (i
= 0; i
< g
->num_backarcs
; i
++)
977 ddg_edge_ptr backarc
= g
->backarcs
[i
];
978 ddg_node_ptr src
= backarc
->src
;
979 ddg_node_ptr dest
= backarc
->dest
;
981 /* If the backarc already belongs to an SCC, continue. */
982 if (backarc
->aux
.count
== IN_SCC
)
985 sbitmap_zero (scc_nodes
);
988 SET_BIT (from
, dest
->cuid
);
989 SET_BIT (to
, src
->cuid
);
991 if (find_nodes_on_paths (scc_nodes
, g
, from
, to
))
993 scc
= create_scc (g
, scc_nodes
);
994 add_scc_to_ddg (sccs
, scc
);
1000 sbitmap_free (scc_nodes
);
1001 #ifdef ENABLE_CHECKING
1002 check_sccs (sccs
, num_nodes
);
1007 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1009 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs
)
1016 for (i
= 0; i
< all_sccs
->num_sccs
; i
++)
1017 free_scc (all_sccs
->sccs
[i
]);
1019 free (all_sccs
->sccs
);
1024 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1025 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1026 nodes from FROM and TO). Return nonzero if nodes exist. */
1028 find_nodes_on_paths (sbitmap result
, ddg_ptr g
, sbitmap from
, sbitmap to
)
1033 int num_nodes
= g
->num_nodes
;
1034 sbitmap_iterator sbi
;
1036 sbitmap workset
= sbitmap_alloc (num_nodes
);
1037 sbitmap reachable_from
= sbitmap_alloc (num_nodes
);
1038 sbitmap reach_to
= sbitmap_alloc (num_nodes
);
1039 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1041 sbitmap_copy (reachable_from
, from
);
1042 sbitmap_copy (tmp
, from
);
1048 sbitmap_copy (workset
, tmp
);
1050 EXECUTE_IF_SET_IN_SBITMAP (workset
, 0, u
, sbi
)
1053 ddg_node_ptr u_node
= &g
->nodes
[u
];
1055 for (e
= u_node
->out
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_out
)
1057 ddg_node_ptr v_node
= e
->dest
;
1058 int v
= v_node
->cuid
;
1060 if (!TEST_BIT (reachable_from
, v
))
1062 SET_BIT (reachable_from
, v
);
1070 sbitmap_copy (reach_to
, to
);
1071 sbitmap_copy (tmp
, to
);
1077 sbitmap_copy (workset
, tmp
);
1079 EXECUTE_IF_SET_IN_SBITMAP (workset
, 0, u
, sbi
)
1082 ddg_node_ptr u_node
= &g
->nodes
[u
];
1084 for (e
= u_node
->in
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_in
)
1086 ddg_node_ptr v_node
= e
->src
;
1087 int v
= v_node
->cuid
;
1089 if (!TEST_BIT (reach_to
, v
))
1091 SET_BIT (reach_to
, v
);
1099 answer
= sbitmap_a_and_b_cg (result
, reachable_from
, reach_to
);
1100 sbitmap_free (workset
);
1101 sbitmap_free (reachable_from
);
1102 sbitmap_free (reach_to
);
1108 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1109 at-least as large as the count of U_NODE plus the latency between them.
1110 Sets a bit in TMP for each successor whose count was changed (increased).
1111 Returns nonzero if any count was changed. */
1113 update_dist_to_successors (ddg_node_ptr u_node
, sbitmap nodes
, sbitmap tmp
)
1118 for (e
= u_node
->out
; e
; e
= e
->next_out
)
1120 ddg_node_ptr v_node
= e
->dest
;
1121 int v
= v_node
->cuid
;
1123 if (TEST_BIT (nodes
, v
)
1124 && (e
->distance
== 0)
1125 && (v_node
->aux
.count
< u_node
->aux
.count
+ e
->latency
))
1127 v_node
->aux
.count
= u_node
->aux
.count
+ e
->latency
;
1136 /* Find the length of a longest path from SRC to DEST in G,
1137 going only through NODES, and disregarding backarcs. */
1139 longest_simple_path (struct ddg
* g
, int src
, int dest
, sbitmap nodes
)
1145 int num_nodes
= g
->num_nodes
;
1146 sbitmap workset
= sbitmap_alloc (num_nodes
);
1147 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1150 /* Data will hold the distance of the longest path found so far from
1151 src to each node. Initialize to -1 = less than minimum. */
1152 for (i
= 0; i
< g
->num_nodes
; i
++)
1153 g
->nodes
[i
].aux
.count
= -1;
1154 g
->nodes
[src
].aux
.count
= 0;
1161 sbitmap_iterator sbi
;
1164 sbitmap_copy (workset
, tmp
);
1166 EXECUTE_IF_SET_IN_SBITMAP (workset
, 0, u
, sbi
)
1168 ddg_node_ptr u_node
= &g
->nodes
[u
];
1170 change
|= update_dist_to_successors (u_node
, nodes
, tmp
);
1173 result
= g
->nodes
[dest
].aux
.count
;
1174 sbitmap_free (workset
);
1179 #endif /* INSN_SCHEDULING */