1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "diagnostic-core.h"
29 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
37 #include "sched-int.h"
46 #ifdef INSN_SCHEDULING
48 /* A flag indicating that a ddg edge belongs to an SCC or not. */
49 enum edge_flag
{NOT_IN_SCC
= 0, IN_SCC
};
51 /* Forward declarations. */
52 static void add_backarc_to_ddg (ddg_ptr
, ddg_edge_ptr
);
53 static void add_backarc_to_scc (ddg_scc_ptr
, ddg_edge_ptr
);
54 static void add_scc_to_ddg (ddg_all_sccs_ptr
, ddg_scc_ptr
);
55 static void create_ddg_dep_from_intra_loop_link (ddg_ptr
, ddg_node_ptr
,
57 static void create_ddg_dep_no_link (ddg_ptr
, ddg_node_ptr
, ddg_node_ptr
,
58 dep_type
, dep_data_type
, int);
59 static ddg_edge_ptr
create_ddg_edge (ddg_node_ptr
, ddg_node_ptr
, dep_type
,
60 dep_data_type
, int, int);
61 static void add_edge_to_ddg (ddg_ptr g
, ddg_edge_ptr
);
63 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
64 static bool mem_ref_p
;
66 /* Auxiliary function for mem_read_insn_p. */
68 mark_mem_use (rtx
*x
, void *)
70 subrtx_iterator::array_type array
;
71 FOR_EACH_SUBRTX (iter
, array
, *x
, NONCONST
)
79 /* Returns nonzero if INSN reads from memory. */
81 mem_read_insn_p (rtx_insn
*insn
)
84 note_uses (&PATTERN (insn
), mark_mem_use
, NULL
);
89 mark_mem_store (rtx loc
, const_rtx setter ATTRIBUTE_UNUSED
, void *data ATTRIBUTE_UNUSED
)
95 /* Returns nonzero if INSN writes to memory. */
97 mem_write_insn_p (rtx_insn
*insn
)
100 note_stores (PATTERN (insn
), mark_mem_store
, NULL
);
104 /* Returns nonzero if X has access to memory. */
106 rtx_mem_access_p (rtx x
)
119 fmt
= GET_RTX_FORMAT (code
);
120 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
124 if (rtx_mem_access_p (XEXP (x
, i
)))
127 else if (fmt
[i
] == 'E')
128 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
130 if (rtx_mem_access_p (XVECEXP (x
, i
, j
)))
137 /* Returns nonzero if INSN reads to or writes from memory. */
139 mem_access_insn_p (rtx_insn
*insn
)
141 return rtx_mem_access_p (PATTERN (insn
));
144 /* Return true if DEF_INSN contains address being auto-inc or auto-dec
145 which is used in USE_INSN. Otherwise return false. The result is
146 being used to decide whether to remove the edge between def_insn and
147 use_insn when -fmodulo-sched-allow-regmoves is set. This function
148 doesn't need to consider the specific address register; no reg_moves
149 will be allowed for any life range defined by def_insn and used
150 by use_insn, if use_insn uses an address register auto-inc'ed by
153 autoinc_var_is_used_p (rtx_insn
*def_insn
, rtx_insn
*use_insn
)
157 for (note
= REG_NOTES (def_insn
); note
; note
= XEXP (note
, 1))
158 if (REG_NOTE_KIND (note
) == REG_INC
159 && reg_referenced_p (XEXP (note
, 0), PATTERN (use_insn
)))
165 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
168 def_has_ccmode_p (rtx_insn
*insn
)
172 FOR_EACH_INSN_DEF (def
, insn
)
174 enum machine_mode mode
= GET_MODE (DF_REF_REG (def
));
176 if (GET_MODE_CLASS (mode
) == MODE_CC
)
183 /* Computes the dependence parameters (latency, distance etc.), creates
184 a ddg_edge and adds it to the given DDG. */
186 create_ddg_dep_from_intra_loop_link (ddg_ptr g
, ddg_node_ptr src_node
,
187 ddg_node_ptr dest_node
, dep_t link
)
190 int latency
, distance
= 0;
191 dep_type t
= TRUE_DEP
;
192 dep_data_type dt
= (mem_access_insn_p (src_node
->insn
)
193 && mem_access_insn_p (dest_node
->insn
) ? MEM_DEP
195 gcc_assert (src_node
->cuid
< dest_node
->cuid
);
198 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
199 if (DEP_TYPE (link
) == REG_DEP_ANTI
)
201 else if (DEP_TYPE (link
) == REG_DEP_OUTPUT
)
204 gcc_assert (!DEBUG_INSN_P (dest_node
->insn
) || t
== ANTI_DEP
);
205 gcc_assert (!DEBUG_INSN_P (src_node
->insn
) || t
== ANTI_DEP
);
207 /* We currently choose not to create certain anti-deps edges and
208 compensate for that by generating reg-moves based on the life-range
209 analysis. The anti-deps that will be deleted are the ones which
210 have true-deps edges in the opposite direction (in other words
211 the kernel has only one def of the relevant register).
212 If the address that is being auto-inc or auto-dec in DEST_NODE
213 is used in SRC_NODE then do not remove the edge to make sure
214 reg-moves will not be created for this address.
215 TODO: support the removal of all anti-deps edges, i.e. including those
216 whose register has multiple defs in the loop. */
217 if (flag_modulo_sched_allow_regmoves
218 && (t
== ANTI_DEP
&& dt
== REG_DEP
)
219 && !def_has_ccmode_p (dest_node
->insn
)
220 && !autoinc_var_is_used_p (dest_node
->insn
, src_node
->insn
))
224 set
= single_set (dest_node
->insn
);
225 /* TODO: Handle registers that REG_P is not true for them, i.e.
226 subregs and special registers. */
227 if (set
&& REG_P (SET_DEST (set
)))
229 int regno
= REGNO (SET_DEST (set
));
231 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
233 first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
234 gcc_assert (first_def
);
236 if (bitmap_bit_p (&bb_info
->gen
, DF_REF_ID (first_def
)))
241 latency
= dep_cost (link
);
242 e
= create_ddg_edge (src_node
, dest_node
, t
, dt
, latency
, distance
);
243 add_edge_to_ddg (g
, e
);
246 /* The same as the above function, but it doesn't require a link parameter. */
248 create_ddg_dep_no_link (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
,
249 dep_type d_t
, dep_data_type d_dt
, int distance
)
253 enum reg_note dep_kind
;
254 struct _dep _dep
, *dep
= &_dep
;
256 gcc_assert (!DEBUG_INSN_P (to
->insn
) || d_t
== ANTI_DEP
);
257 gcc_assert (!DEBUG_INSN_P (from
->insn
) || d_t
== ANTI_DEP
);
260 dep_kind
= REG_DEP_ANTI
;
261 else if (d_t
== OUTPUT_DEP
)
262 dep_kind
= REG_DEP_OUTPUT
;
265 gcc_assert (d_t
== TRUE_DEP
);
267 dep_kind
= REG_DEP_TRUE
;
270 init_dep (dep
, from
->insn
, to
->insn
, dep_kind
);
274 e
= create_ddg_edge (from
, to
, d_t
, d_dt
, l
, distance
);
276 add_backarc_to_ddg (g
, e
);
278 add_edge_to_ddg (g
, e
);
282 /* Given a downwards exposed register def LAST_DEF (which is the last
283 definition of that register in the bb), add inter-loop true dependences
284 to all its uses in the next iteration, an output dependence to the
285 first def of the same register (possibly itself) in the next iteration
286 and anti-dependences from its uses in the current iteration to the
287 first definition in the next iteration. */
289 add_cross_iteration_register_deps (ddg_ptr g
, df_ref last_def
)
291 int regno
= DF_REF_REGNO (last_def
);
292 struct df_link
*r_use
;
293 int has_use_in_bb_p
= false;
294 rtx_insn
*def_insn
= DF_REF_INSN (last_def
);
295 ddg_node_ptr last_def_node
= get_node_of_insn (g
, def_insn
);
296 ddg_node_ptr use_node
;
297 #ifdef ENABLE_CHECKING
298 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
300 df_ref first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
302 gcc_assert (last_def_node
);
303 gcc_assert (first_def
);
305 #ifdef ENABLE_CHECKING
306 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
))
307 gcc_assert (!bitmap_bit_p (&bb_info
->gen
,
308 DF_REF_ID (first_def
)));
311 /* Create inter-loop true dependences and anti dependences. */
312 for (r_use
= DF_REF_CHAIN (last_def
); r_use
!= NULL
; r_use
= r_use
->next
)
314 rtx_insn
*use_insn
= DF_REF_INSN (r_use
->ref
);
316 if (BLOCK_FOR_INSN (use_insn
) != g
->bb
)
319 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
320 use_node
= get_node_of_insn (g
, use_insn
);
321 gcc_assert (use_node
);
322 has_use_in_bb_p
= true;
323 if (use_node
->cuid
<= last_def_node
->cuid
)
325 /* Add true deps from last_def to it's uses in the next
326 iteration. Any such upwards exposed use appears before
328 create_ddg_dep_no_link (g
, last_def_node
, use_node
,
329 DEBUG_INSN_P (use_insn
) ? ANTI_DEP
: TRUE_DEP
,
332 else if (!DEBUG_INSN_P (use_insn
))
334 /* Add anti deps from last_def's uses in the current iteration
335 to the first def in the next iteration. We do not add ANTI
336 dep when there is an intra-loop TRUE dep in the opposite
337 direction, but use regmoves to fix such disregarded ANTI
338 deps when broken. If the first_def reaches the USE then
339 there is such a dep. */
340 ddg_node_ptr first_def_node
= get_node_of_insn (g
,
341 DF_REF_INSN (first_def
));
343 gcc_assert (first_def_node
);
345 /* Always create the edge if the use node is a branch in
346 order to prevent the creation of reg-moves.
347 If the address that is being auto-inc or auto-dec in LAST_DEF
348 is used in USE_INSN then do not remove the edge to make sure
349 reg-moves will not be created for that address. */
350 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
)
351 || !flag_modulo_sched_allow_regmoves
352 || JUMP_P (use_node
->insn
)
353 || autoinc_var_is_used_p (DF_REF_INSN (last_def
), use_insn
)
354 || def_has_ccmode_p (DF_REF_INSN (last_def
)))
355 create_ddg_dep_no_link (g
, use_node
, first_def_node
, ANTI_DEP
,
360 /* Create an inter-loop output dependence between LAST_DEF (which is the
361 last def in its block, being downwards exposed) and the first def in
362 its block. Avoid creating a self output dependence. Avoid creating
363 an output dependence if there is a dependence path between the two
364 defs starting with a true dependence to a use which can be in the
365 next iteration; followed by an anti dependence of that use to the
366 first def (i.e. if there is a use between the two defs.) */
367 if (!has_use_in_bb_p
)
369 ddg_node_ptr dest_node
;
371 if (DF_REF_ID (last_def
) == DF_REF_ID (first_def
))
374 dest_node
= get_node_of_insn (g
, DF_REF_INSN (first_def
));
375 gcc_assert (dest_node
);
376 create_ddg_dep_no_link (g
, last_def_node
, dest_node
,
377 OUTPUT_DEP
, REG_DEP
, 1);
380 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
382 build_inter_loop_deps (ddg_ptr g
)
385 struct df_rd_bb_info
*rd_bb_info
;
388 rd_bb_info
= DF_RD_BB_INFO (g
->bb
);
390 /* Find inter-loop register output, true and anti deps. */
391 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info
->gen
, 0, rd_num
, bi
)
393 df_ref rd
= DF_DEFS_GET (rd_num
);
395 add_cross_iteration_register_deps (g
, rd
);
400 /* Return true if two specified instructions have mem expr with conflict
403 insns_may_alias_p (rtx_insn
*insn1
, rtx_insn
*insn2
)
405 subrtx_iterator::array_type array1
;
406 subrtx_iterator::array_type array2
;
407 FOR_EACH_SUBRTX (iter1
, array1
, PATTERN (insn1
), NONCONST
)
409 const_rtx x1
= *iter1
;
411 FOR_EACH_SUBRTX (iter2
, array2
, PATTERN (insn2
), NONCONST
)
413 const_rtx x2
= *iter2
;
414 if (MEM_P (x2
) && may_alias_p (x2
, x1
))
421 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
424 add_intra_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
427 if ((from
->cuid
== to
->cuid
)
428 || !insns_may_alias_p (from
->insn
, to
->insn
))
429 /* Do not create edge if memory references have disjoint alias sets
430 or 'to' and 'from' are the same instruction. */
433 if (mem_write_insn_p (from
->insn
))
435 if (mem_read_insn_p (to
->insn
))
436 create_ddg_dep_no_link (g
, from
, to
,
437 DEBUG_INSN_P (to
->insn
)
438 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 0);
440 create_ddg_dep_no_link (g
, from
, to
,
441 DEBUG_INSN_P (to
->insn
)
442 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 0);
444 else if (!mem_read_insn_p (to
->insn
))
445 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 0);
448 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
451 add_inter_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
453 if (!insns_may_alias_p (from
->insn
, to
->insn
))
454 /* Do not create edge if memory references have disjoint alias sets. */
457 if (mem_write_insn_p (from
->insn
))
459 if (mem_read_insn_p (to
->insn
))
460 create_ddg_dep_no_link (g
, from
, to
,
461 DEBUG_INSN_P (to
->insn
)
462 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 1);
463 else if (from
->cuid
!= to
->cuid
)
464 create_ddg_dep_no_link (g
, from
, to
,
465 DEBUG_INSN_P (to
->insn
)
466 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 1);
470 if (mem_read_insn_p (to
->insn
))
472 else if (from
->cuid
!= to
->cuid
)
474 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 1);
475 if (DEBUG_INSN_P (from
->insn
) || DEBUG_INSN_P (to
->insn
))
476 create_ddg_dep_no_link (g
, to
, from
, ANTI_DEP
, MEM_DEP
, 1);
478 create_ddg_dep_no_link (g
, to
, from
, TRUE_DEP
, MEM_DEP
, 1);
484 /* Perform intra-block Data Dependency analysis and connect the nodes in
485 the DDG. We assume the loop has a single basic block. */
487 build_intra_loop_deps (ddg_ptr g
)
490 /* Hold the dependency analysis state during dependency calculations. */
491 struct deps_desc tmp_deps
;
492 rtx_insn
*head
, *tail
;
494 /* Build the dependence information, using the sched_analyze function. */
496 init_deps (&tmp_deps
, false);
498 /* Do the intra-block data dependence analysis for the given block. */
499 get_ebb_head_tail (g
->bb
, g
->bb
, &head
, &tail
);
500 sched_analyze (&tmp_deps
, head
, tail
);
502 /* Build intra-loop data dependencies using the scheduler dependency
504 for (i
= 0; i
< g
->num_nodes
; i
++)
506 ddg_node_ptr dest_node
= &g
->nodes
[i
];
507 sd_iterator_def sd_it
;
510 if (! INSN_P (dest_node
->insn
))
513 FOR_EACH_DEP (dest_node
->insn
, SD_LIST_BACK
, sd_it
, dep
)
515 rtx_insn
*src_insn
= DEP_PRO (dep
);
516 ddg_node_ptr src_node
;
518 /* Don't add dependencies on debug insns to non-debug insns
519 to avoid codegen differences between -g and -g0. */
520 if (DEBUG_INSN_P (src_insn
) && !DEBUG_INSN_P (dest_node
->insn
))
523 src_node
= get_node_of_insn (g
, src_insn
);
528 create_ddg_dep_from_intra_loop_link (g
, src_node
, dest_node
, dep
);
531 /* If this insn modifies memory, add an edge to all insns that access
533 if (mem_access_insn_p (dest_node
->insn
))
537 for (j
= 0; j
<= i
; j
++)
539 ddg_node_ptr j_node
= &g
->nodes
[j
];
540 if (DEBUG_INSN_P (j_node
->insn
))
542 if (mem_access_insn_p (j_node
->insn
))
544 /* Don't bother calculating inter-loop dep if an intra-loop dep
546 if (! bitmap_bit_p (dest_node
->successors
, j
))
547 add_inter_loop_mem_dep (g
, dest_node
, j_node
);
548 /* If -fmodulo-sched-allow-regmoves
549 is set certain anti-dep edges are not created.
550 It might be that these anti-dep edges are on the
551 path from one memory instruction to another such that
552 removing these edges could cause a violation of the
553 memory dependencies. Thus we add intra edges between
554 every two memory instructions in this case. */
555 if (flag_modulo_sched_allow_regmoves
556 && !bitmap_bit_p (dest_node
->predecessors
, j
))
557 add_intra_loop_mem_dep (g
, j_node
, dest_node
);
563 /* Free the INSN_LISTs. */
564 finish_deps_global ();
565 free_deps (&tmp_deps
);
567 /* Free dependencies. */
568 sched_free_deps (head
, tail
, false);
572 /* Given a basic block, create its DDG and return a pointer to a variable
573 of ddg type that represents it.
574 Initialize the ddg structure fields to the appropriate values. */
576 create_ddg (basic_block bb
, int closing_branch_deps
)
579 rtx_insn
*insn
, *first_note
;
583 g
= (ddg_ptr
) xcalloc (1, sizeof (struct ddg
));
586 g
->closing_branch_deps
= closing_branch_deps
;
588 /* Count the number of insns in the BB. */
589 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
590 insn
= NEXT_INSN (insn
))
592 if (! INSN_P (insn
) || GET_CODE (PATTERN (insn
)) == USE
)
595 if (DEBUG_INSN_P (insn
))
599 if (mem_read_insn_p (insn
))
601 if (mem_write_insn_p (insn
))
607 /* There is nothing to do for this BB. */
608 if ((num_nodes
- g
->num_debug
) <= 1)
614 /* Allocate the nodes array, and initialize the nodes. */
615 g
->num_nodes
= num_nodes
;
616 g
->nodes
= (ddg_node_ptr
) xcalloc (num_nodes
, sizeof (struct ddg_node
));
617 g
->closing_branch
= NULL
;
620 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
621 insn
= NEXT_INSN (insn
))
625 if (! first_note
&& NOTE_P (insn
)
626 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
)
632 gcc_assert (!g
->closing_branch
);
633 g
->closing_branch
= &g
->nodes
[i
];
635 else if (GET_CODE (PATTERN (insn
)) == USE
)
642 g
->nodes
[i
].cuid
= i
;
643 g
->nodes
[i
].successors
= sbitmap_alloc (num_nodes
);
644 bitmap_clear (g
->nodes
[i
].successors
);
645 g
->nodes
[i
].predecessors
= sbitmap_alloc (num_nodes
);
646 bitmap_clear (g
->nodes
[i
].predecessors
);
647 g
->nodes
[i
].first_note
= (first_note
? first_note
: insn
);
648 g
->nodes
[i
++].insn
= insn
;
652 /* We must have found a branch in DDG. */
653 gcc_assert (g
->closing_branch
);
656 /* Build the data dependency graph. */
657 build_intra_loop_deps (g
);
658 build_inter_loop_deps (g
);
662 /* Free all the memory allocated for the DDG. */
671 for (i
= 0; i
< g
->num_nodes
; i
++)
673 ddg_edge_ptr e
= g
->nodes
[i
].out
;
677 ddg_edge_ptr next
= e
->next_out
;
682 sbitmap_free (g
->nodes
[i
].successors
);
683 sbitmap_free (g
->nodes
[i
].predecessors
);
685 if (g
->num_backarcs
> 0)
692 print_ddg_edge (FILE *file
, ddg_edge_ptr e
)
708 fprintf (file
, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e
->src
->insn
),
709 dep_c
, e
->latency
, e
->distance
, INSN_UID (e
->dest
->insn
));
712 /* Print the DDG nodes with there in/out edges to the dump file. */
714 print_ddg (FILE *file
, ddg_ptr g
)
718 for (i
= 0; i
< g
->num_nodes
; i
++)
722 fprintf (file
, "Node num: %d\n", g
->nodes
[i
].cuid
);
723 print_rtl_single (file
, g
->nodes
[i
].insn
);
724 fprintf (file
, "OUT ARCS: ");
725 for (e
= g
->nodes
[i
].out
; e
; e
= e
->next_out
)
726 print_ddg_edge (file
, e
);
728 fprintf (file
, "\nIN ARCS: ");
729 for (e
= g
->nodes
[i
].in
; e
; e
= e
->next_in
)
730 print_ddg_edge (file
, e
);
732 fprintf (file
, "\n");
736 /* Print the given DDG in VCG format. */
738 vcg_print_ddg (FILE *file
, ddg_ptr g
)
742 fprintf (file
, "graph: {\n");
743 for (src_cuid
= 0; src_cuid
< g
->num_nodes
; src_cuid
++)
746 int src_uid
= INSN_UID (g
->nodes
[src_cuid
].insn
);
748 fprintf (file
, "node: {title: \"%d_%d\" info1: \"", src_cuid
, src_uid
);
749 print_rtl_single (file
, g
->nodes
[src_cuid
].insn
);
750 fprintf (file
, "\"}\n");
751 for (e
= g
->nodes
[src_cuid
].out
; e
; e
= e
->next_out
)
753 int dst_uid
= INSN_UID (e
->dest
->insn
);
754 int dst_cuid
= e
->dest
->cuid
;
756 /* Give the backarcs a different color. */
758 fprintf (file
, "backedge: {color: red ");
760 fprintf (file
, "edge: { ");
762 fprintf (file
, "sourcename: \"%d_%d\" ", src_cuid
, src_uid
);
763 fprintf (file
, "targetname: \"%d_%d\" ", dst_cuid
, dst_uid
);
764 fprintf (file
, "label: \"%d_%d\"}\n", e
->latency
, e
->distance
);
767 fprintf (file
, "}\n");
770 /* Dump the sccs in SCCS. */
772 print_sccs (FILE *file
, ddg_all_sccs_ptr sccs
, ddg_ptr g
)
775 sbitmap_iterator sbi
;
781 fprintf (file
, "\n;; Number of SCC nodes - %d\n", sccs
->num_sccs
);
782 for (i
= 0; i
< sccs
->num_sccs
; i
++)
784 fprintf (file
, "SCC number: %d\n", i
);
785 EXECUTE_IF_SET_IN_BITMAP (sccs
->sccs
[i
]->nodes
, 0, u
, sbi
)
787 fprintf (file
, "insn num %d\n", u
);
788 print_rtl_single (file
, g
->nodes
[u
].insn
);
791 fprintf (file
, "\n");
794 /* Create an edge and initialize it with given values. */
796 create_ddg_edge (ddg_node_ptr src
, ddg_node_ptr dest
,
797 dep_type t
, dep_data_type dt
, int l
, int d
)
799 ddg_edge_ptr e
= (ddg_edge_ptr
) xmalloc (sizeof (struct ddg_edge
));
807 e
->next_in
= e
->next_out
= NULL
;
812 /* Add the given edge to the in/out linked lists of the DDG nodes. */
814 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED
, ddg_edge_ptr e
)
816 ddg_node_ptr src
= e
->src
;
817 ddg_node_ptr dest
= e
->dest
;
819 /* Should have allocated the sbitmaps. */
820 gcc_assert (src
->successors
&& dest
->predecessors
);
822 bitmap_set_bit (src
->successors
, dest
->cuid
);
823 bitmap_set_bit (dest
->predecessors
, src
->cuid
);
824 e
->next_in
= dest
->in
;
826 e
->next_out
= src
->out
;
832 /* Algorithm for computing the recurrence_length of an scc. We assume at
833 for now that cycles in the data dependence graph contain a single backarc.
834 This simplifies the algorithm, and can be generalized later. */
836 set_recurrence_length (ddg_scc_ptr scc
, ddg_ptr g
)
841 for (j
= 0; j
< scc
->num_backarcs
; j
++)
843 ddg_edge_ptr backarc
= scc
->backarcs
[j
];
845 int distance
= backarc
->distance
;
846 ddg_node_ptr src
= backarc
->dest
;
847 ddg_node_ptr dest
= backarc
->src
;
849 length
= longest_simple_path (g
, src
->cuid
, dest
->cuid
, scc
->nodes
);
852 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
855 length
+= backarc
->latency
;
856 result
= MAX (result
, (length
/ distance
));
858 scc
->recurrence_length
= result
;
861 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
862 and mark edges that belong to this scc as IN_SCC. */
864 create_scc (ddg_ptr g
, sbitmap nodes
)
868 sbitmap_iterator sbi
;
870 scc
= (ddg_scc_ptr
) xmalloc (sizeof (struct ddg_scc
));
871 scc
->backarcs
= NULL
;
872 scc
->num_backarcs
= 0;
873 scc
->nodes
= sbitmap_alloc (g
->num_nodes
);
874 bitmap_copy (scc
->nodes
, nodes
);
876 /* Mark the backarcs that belong to this SCC. */
877 EXECUTE_IF_SET_IN_BITMAP (nodes
, 0, u
, sbi
)
880 ddg_node_ptr n
= &g
->nodes
[u
];
882 for (e
= n
->out
; e
; e
= e
->next_out
)
883 if (bitmap_bit_p (nodes
, e
->dest
->cuid
))
885 e
->aux
.count
= IN_SCC
;
887 add_backarc_to_scc (scc
, e
);
891 set_recurrence_length (scc
, g
);
895 /* Cleans the memory allocation of a given SCC. */
897 free_scc (ddg_scc_ptr scc
)
902 sbitmap_free (scc
->nodes
);
903 if (scc
->num_backarcs
> 0)
904 free (scc
->backarcs
);
909 /* Add a given edge known to be a backarc to the given DDG. */
911 add_backarc_to_ddg (ddg_ptr g
, ddg_edge_ptr e
)
913 int size
= (g
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
915 add_edge_to_ddg (g
, e
);
916 g
->backarcs
= (ddg_edge_ptr
*) xrealloc (g
->backarcs
, size
);
917 g
->backarcs
[g
->num_backarcs
++] = e
;
920 /* Add backarc to an SCC. */
922 add_backarc_to_scc (ddg_scc_ptr scc
, ddg_edge_ptr e
)
924 int size
= (scc
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
926 scc
->backarcs
= (ddg_edge_ptr
*) xrealloc (scc
->backarcs
, size
);
927 scc
->backarcs
[scc
->num_backarcs
++] = e
;
930 /* Add the given SCC to the DDG. */
932 add_scc_to_ddg (ddg_all_sccs_ptr g
, ddg_scc_ptr scc
)
934 int size
= (g
->num_sccs
+ 1) * sizeof (ddg_scc_ptr
);
936 g
->sccs
= (ddg_scc_ptr
*) xrealloc (g
->sccs
, size
);
937 g
->sccs
[g
->num_sccs
++] = scc
;
940 /* Given the instruction INSN return the node that represents it. */
942 get_node_of_insn (ddg_ptr g
, rtx_insn
*insn
)
946 for (i
= 0; i
< g
->num_nodes
; i
++)
947 if (insn
== g
->nodes
[i
].insn
)
952 /* Given a set OPS of nodes in the DDG, find the set of their successors
953 which are not in OPS, and set their bits in SUCC. Bits corresponding to
954 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
956 find_successors (sbitmap succ
, ddg_ptr g
, sbitmap ops
)
959 sbitmap_iterator sbi
;
961 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
963 const sbitmap node_succ
= NODE_SUCCESSORS (&g
->nodes
[i
]);
964 bitmap_ior (succ
, succ
, node_succ
);
967 /* We want those that are not in ops. */
968 bitmap_and_compl (succ
, succ
, ops
);
971 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
972 which are not in OPS, and set their bits in PREDS. Bits corresponding to
973 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
975 find_predecessors (sbitmap preds
, ddg_ptr g
, sbitmap ops
)
978 sbitmap_iterator sbi
;
980 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
982 const sbitmap node_preds
= NODE_PREDECESSORS (&g
->nodes
[i
]);
983 bitmap_ior (preds
, preds
, node_preds
);
986 /* We want those that are not in ops. */
987 bitmap_and_compl (preds
, preds
, ops
);
991 /* Compare function to be passed to qsort to order the backarcs in descending
994 compare_sccs (const void *s1
, const void *s2
)
996 const int rec_l1
= (*(const ddg_scc_ptr
*)s1
)->recurrence_length
;
997 const int rec_l2
= (*(const ddg_scc_ptr
*)s2
)->recurrence_length
;
998 return ((rec_l2
> rec_l1
) - (rec_l2
< rec_l1
));
1002 /* Order the backarcs in descending recMII order using compare_sccs. */
1004 order_sccs (ddg_all_sccs_ptr g
)
1006 qsort (g
->sccs
, g
->num_sccs
, sizeof (ddg_scc_ptr
),
1007 (int (*) (const void *, const void *)) compare_sccs
);
1010 #ifdef ENABLE_CHECKING
1011 /* Check that every node in SCCS belongs to exactly one strongly connected
1012 component and that no element of SCCS is empty. */
1014 check_sccs (ddg_all_sccs_ptr sccs
, int num_nodes
)
1017 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1020 for (i
= 0; i
< sccs
->num_sccs
; i
++)
1022 gcc_assert (!bitmap_empty_p (sccs
->sccs
[i
]->nodes
));
1023 /* Verify that every node in sccs is in exactly one strongly
1024 connected component. */
1025 gcc_assert (!bitmap_intersect_p (tmp
, sccs
->sccs
[i
]->nodes
));
1026 bitmap_ior (tmp
, tmp
, sccs
->sccs
[i
]->nodes
);
1032 /* Perform the Strongly Connected Components decomposing algorithm on the
1033 DDG and return DDG_ALL_SCCS structure that contains them. */
1035 create_ddg_all_sccs (ddg_ptr g
)
1038 int num_nodes
= g
->num_nodes
;
1039 sbitmap from
= sbitmap_alloc (num_nodes
);
1040 sbitmap to
= sbitmap_alloc (num_nodes
);
1041 sbitmap scc_nodes
= sbitmap_alloc (num_nodes
);
1042 ddg_all_sccs_ptr sccs
= (ddg_all_sccs_ptr
)
1043 xmalloc (sizeof (struct ddg_all_sccs
));
1049 for (i
= 0; i
< g
->num_backarcs
; i
++)
1052 ddg_edge_ptr backarc
= g
->backarcs
[i
];
1053 ddg_node_ptr src
= backarc
->src
;
1054 ddg_node_ptr dest
= backarc
->dest
;
1056 /* If the backarc already belongs to an SCC, continue. */
1057 if (backarc
->aux
.count
== IN_SCC
)
1060 bitmap_clear (scc_nodes
);
1061 bitmap_clear (from
);
1063 bitmap_set_bit (from
, dest
->cuid
);
1064 bitmap_set_bit (to
, src
->cuid
);
1066 if (find_nodes_on_paths (scc_nodes
, g
, from
, to
))
1068 scc
= create_scc (g
, scc_nodes
);
1069 add_scc_to_ddg (sccs
, scc
);
1073 sbitmap_free (from
);
1075 sbitmap_free (scc_nodes
);
1076 #ifdef ENABLE_CHECKING
1077 check_sccs (sccs
, num_nodes
);
1082 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1084 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs
)
1091 for (i
= 0; i
< all_sccs
->num_sccs
; i
++)
1092 free_scc (all_sccs
->sccs
[i
]);
1094 free (all_sccs
->sccs
);
1099 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1100 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1101 nodes from FROM and TO). Return nonzero if nodes exist. */
1103 find_nodes_on_paths (sbitmap result
, ddg_ptr g
, sbitmap from
, sbitmap to
)
1108 int num_nodes
= g
->num_nodes
;
1109 sbitmap_iterator sbi
;
1111 sbitmap workset
= sbitmap_alloc (num_nodes
);
1112 sbitmap reachable_from
= sbitmap_alloc (num_nodes
);
1113 sbitmap reach_to
= sbitmap_alloc (num_nodes
);
1114 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1116 bitmap_copy (reachable_from
, from
);
1117 bitmap_copy (tmp
, from
);
1123 bitmap_copy (workset
, tmp
);
1125 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1128 ddg_node_ptr u_node
= &g
->nodes
[u
];
1130 for (e
= u_node
->out
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_out
)
1132 ddg_node_ptr v_node
= e
->dest
;
1133 int v
= v_node
->cuid
;
1135 if (!bitmap_bit_p (reachable_from
, v
))
1137 bitmap_set_bit (reachable_from
, v
);
1138 bitmap_set_bit (tmp
, v
);
1145 bitmap_copy (reach_to
, to
);
1146 bitmap_copy (tmp
, to
);
1152 bitmap_copy (workset
, tmp
);
1154 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1157 ddg_node_ptr u_node
= &g
->nodes
[u
];
1159 for (e
= u_node
->in
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_in
)
1161 ddg_node_ptr v_node
= e
->src
;
1162 int v
= v_node
->cuid
;
1164 if (!bitmap_bit_p (reach_to
, v
))
1166 bitmap_set_bit (reach_to
, v
);
1167 bitmap_set_bit (tmp
, v
);
1174 answer
= bitmap_and (result
, reachable_from
, reach_to
);
1175 sbitmap_free (workset
);
1176 sbitmap_free (reachable_from
);
1177 sbitmap_free (reach_to
);
1183 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1184 at-least as large as the count of U_NODE plus the latency between them.
1185 Sets a bit in TMP for each successor whose count was changed (increased).
1186 Returns nonzero if any count was changed. */
1188 update_dist_to_successors (ddg_node_ptr u_node
, sbitmap nodes
, sbitmap tmp
)
1193 for (e
= u_node
->out
; e
; e
= e
->next_out
)
1195 ddg_node_ptr v_node
= e
->dest
;
1196 int v
= v_node
->cuid
;
1198 if (bitmap_bit_p (nodes
, v
)
1199 && (e
->distance
== 0)
1200 && (v_node
->aux
.count
< u_node
->aux
.count
+ e
->latency
))
1202 v_node
->aux
.count
= u_node
->aux
.count
+ e
->latency
;
1203 bitmap_set_bit (tmp
, v
);
1211 /* Find the length of a longest path from SRC to DEST in G,
1212 going only through NODES, and disregarding backarcs. */
1214 longest_simple_path (struct ddg
* g
, int src
, int dest
, sbitmap nodes
)
1220 int num_nodes
= g
->num_nodes
;
1221 sbitmap workset
= sbitmap_alloc (num_nodes
);
1222 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1225 /* Data will hold the distance of the longest path found so far from
1226 src to each node. Initialize to -1 = less than minimum. */
1227 for (i
= 0; i
< g
->num_nodes
; i
++)
1228 g
->nodes
[i
].aux
.count
= -1;
1229 g
->nodes
[src
].aux
.count
= 0;
1232 bitmap_set_bit (tmp
, src
);
1236 sbitmap_iterator sbi
;
1239 bitmap_copy (workset
, tmp
);
1241 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1243 ddg_node_ptr u_node
= &g
->nodes
[u
];
1245 change
|= update_dist_to_successors (u_node
, nodes
, tmp
);
1248 result
= g
->nodes
[dest
].aux
.count
;
1249 sbitmap_free (workset
);
1254 #endif /* INSN_SCHEDULING */