1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "diagnostic-core.h"
29 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "sched-int.h"
54 #ifdef INSN_SCHEDULING
56 /* A flag indicating that a ddg edge belongs to an SCC or not. */
57 enum edge_flag
{NOT_IN_SCC
= 0, IN_SCC
};
59 /* Forward declarations. */
60 static void add_backarc_to_ddg (ddg_ptr
, ddg_edge_ptr
);
61 static void add_backarc_to_scc (ddg_scc_ptr
, ddg_edge_ptr
);
62 static void add_scc_to_ddg (ddg_all_sccs_ptr
, ddg_scc_ptr
);
63 static void create_ddg_dep_from_intra_loop_link (ddg_ptr
, ddg_node_ptr
,
65 static void create_ddg_dep_no_link (ddg_ptr
, ddg_node_ptr
, ddg_node_ptr
,
66 dep_type
, dep_data_type
, int);
67 static ddg_edge_ptr
create_ddg_edge (ddg_node_ptr
, ddg_node_ptr
, dep_type
,
68 dep_data_type
, int, int);
69 static void add_edge_to_ddg (ddg_ptr g
, ddg_edge_ptr
);
71 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
72 static bool mem_ref_p
;
74 /* Auxiliary function for mem_read_insn_p. */
76 mark_mem_use (rtx
*x
, void *)
78 subrtx_iterator::array_type array
;
79 FOR_EACH_SUBRTX (iter
, array
, *x
, NONCONST
)
87 /* Returns nonzero if INSN reads from memory. */
89 mem_read_insn_p (rtx_insn
*insn
)
92 note_uses (&PATTERN (insn
), mark_mem_use
, NULL
);
97 mark_mem_store (rtx loc
, const_rtx setter ATTRIBUTE_UNUSED
, void *data ATTRIBUTE_UNUSED
)
103 /* Returns nonzero if INSN writes to memory. */
105 mem_write_insn_p (rtx_insn
*insn
)
108 note_stores (PATTERN (insn
), mark_mem_store
, NULL
);
112 /* Returns nonzero if X has access to memory. */
114 rtx_mem_access_p (rtx x
)
127 fmt
= GET_RTX_FORMAT (code
);
128 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
132 if (rtx_mem_access_p (XEXP (x
, i
)))
135 else if (fmt
[i
] == 'E')
136 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
138 if (rtx_mem_access_p (XVECEXP (x
, i
, j
)))
145 /* Returns nonzero if INSN reads to or writes from memory. */
147 mem_access_insn_p (rtx_insn
*insn
)
149 return rtx_mem_access_p (PATTERN (insn
));
152 /* Return true if DEF_INSN contains address being auto-inc or auto-dec
153 which is used in USE_INSN. Otherwise return false. The result is
154 being used to decide whether to remove the edge between def_insn and
155 use_insn when -fmodulo-sched-allow-regmoves is set. This function
156 doesn't need to consider the specific address register; no reg_moves
157 will be allowed for any life range defined by def_insn and used
158 by use_insn, if use_insn uses an address register auto-inc'ed by
161 autoinc_var_is_used_p (rtx_insn
*def_insn
, rtx_insn
*use_insn
)
165 for (note
= REG_NOTES (def_insn
); note
; note
= XEXP (note
, 1))
166 if (REG_NOTE_KIND (note
) == REG_INC
167 && reg_referenced_p (XEXP (note
, 0), PATTERN (use_insn
)))
173 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
176 def_has_ccmode_p (rtx_insn
*insn
)
180 FOR_EACH_INSN_DEF (def
, insn
)
182 machine_mode mode
= GET_MODE (DF_REF_REG (def
));
184 if (GET_MODE_CLASS (mode
) == MODE_CC
)
191 /* Computes the dependence parameters (latency, distance etc.), creates
192 a ddg_edge and adds it to the given DDG. */
194 create_ddg_dep_from_intra_loop_link (ddg_ptr g
, ddg_node_ptr src_node
,
195 ddg_node_ptr dest_node
, dep_t link
)
198 int latency
, distance
= 0;
199 dep_type t
= TRUE_DEP
;
200 dep_data_type dt
= (mem_access_insn_p (src_node
->insn
)
201 && mem_access_insn_p (dest_node
->insn
) ? MEM_DEP
203 gcc_assert (src_node
->cuid
< dest_node
->cuid
);
206 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
207 if (DEP_TYPE (link
) == REG_DEP_ANTI
)
209 else if (DEP_TYPE (link
) == REG_DEP_OUTPUT
)
212 gcc_assert (!DEBUG_INSN_P (dest_node
->insn
) || t
== ANTI_DEP
);
213 gcc_assert (!DEBUG_INSN_P (src_node
->insn
) || t
== ANTI_DEP
);
215 /* We currently choose not to create certain anti-deps edges and
216 compensate for that by generating reg-moves based on the life-range
217 analysis. The anti-deps that will be deleted are the ones which
218 have true-deps edges in the opposite direction (in other words
219 the kernel has only one def of the relevant register).
220 If the address that is being auto-inc or auto-dec in DEST_NODE
221 is used in SRC_NODE then do not remove the edge to make sure
222 reg-moves will not be created for this address.
223 TODO: support the removal of all anti-deps edges, i.e. including those
224 whose register has multiple defs in the loop. */
225 if (flag_modulo_sched_allow_regmoves
226 && (t
== ANTI_DEP
&& dt
== REG_DEP
)
227 && !def_has_ccmode_p (dest_node
->insn
)
228 && !autoinc_var_is_used_p (dest_node
->insn
, src_node
->insn
))
232 set
= single_set (dest_node
->insn
);
233 /* TODO: Handle registers that REG_P is not true for them, i.e.
234 subregs and special registers. */
235 if (set
&& REG_P (SET_DEST (set
)))
237 int regno
= REGNO (SET_DEST (set
));
239 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
241 first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
242 gcc_assert (first_def
);
244 if (bitmap_bit_p (&bb_info
->gen
, DF_REF_ID (first_def
)))
249 latency
= dep_cost (link
);
250 e
= create_ddg_edge (src_node
, dest_node
, t
, dt
, latency
, distance
);
251 add_edge_to_ddg (g
, e
);
254 /* The same as the above function, but it doesn't require a link parameter. */
256 create_ddg_dep_no_link (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
,
257 dep_type d_t
, dep_data_type d_dt
, int distance
)
261 enum reg_note dep_kind
;
262 struct _dep _dep
, *dep
= &_dep
;
264 gcc_assert (!DEBUG_INSN_P (to
->insn
) || d_t
== ANTI_DEP
);
265 gcc_assert (!DEBUG_INSN_P (from
->insn
) || d_t
== ANTI_DEP
);
268 dep_kind
= REG_DEP_ANTI
;
269 else if (d_t
== OUTPUT_DEP
)
270 dep_kind
= REG_DEP_OUTPUT
;
273 gcc_assert (d_t
== TRUE_DEP
);
275 dep_kind
= REG_DEP_TRUE
;
278 init_dep (dep
, from
->insn
, to
->insn
, dep_kind
);
282 e
= create_ddg_edge (from
, to
, d_t
, d_dt
, l
, distance
);
284 add_backarc_to_ddg (g
, e
);
286 add_edge_to_ddg (g
, e
);
290 /* Given a downwards exposed register def LAST_DEF (which is the last
291 definition of that register in the bb), add inter-loop true dependences
292 to all its uses in the next iteration, an output dependence to the
293 first def of the same register (possibly itself) in the next iteration
294 and anti-dependences from its uses in the current iteration to the
295 first definition in the next iteration. */
297 add_cross_iteration_register_deps (ddg_ptr g
, df_ref last_def
)
299 int regno
= DF_REF_REGNO (last_def
);
300 struct df_link
*r_use
;
301 int has_use_in_bb_p
= false;
302 rtx_insn
*def_insn
= DF_REF_INSN (last_def
);
303 ddg_node_ptr last_def_node
= get_node_of_insn (g
, def_insn
);
304 ddg_node_ptr use_node
;
305 #ifdef ENABLE_CHECKING
306 struct df_rd_bb_info
*bb_info
= DF_RD_BB_INFO (g
->bb
);
308 df_ref first_def
= df_bb_regno_first_def_find (g
->bb
, regno
);
310 gcc_assert (last_def_node
);
311 gcc_assert (first_def
);
313 #ifdef ENABLE_CHECKING
314 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
))
315 gcc_assert (!bitmap_bit_p (&bb_info
->gen
,
316 DF_REF_ID (first_def
)));
319 /* Create inter-loop true dependences and anti dependences. */
320 for (r_use
= DF_REF_CHAIN (last_def
); r_use
!= NULL
; r_use
= r_use
->next
)
322 rtx_insn
*use_insn
= DF_REF_INSN (r_use
->ref
);
324 if (BLOCK_FOR_INSN (use_insn
) != g
->bb
)
327 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
328 use_node
= get_node_of_insn (g
, use_insn
);
329 gcc_assert (use_node
);
330 has_use_in_bb_p
= true;
331 if (use_node
->cuid
<= last_def_node
->cuid
)
333 /* Add true deps from last_def to it's uses in the next
334 iteration. Any such upwards exposed use appears before
336 create_ddg_dep_no_link (g
, last_def_node
, use_node
,
337 DEBUG_INSN_P (use_insn
) ? ANTI_DEP
: TRUE_DEP
,
340 else if (!DEBUG_INSN_P (use_insn
))
342 /* Add anti deps from last_def's uses in the current iteration
343 to the first def in the next iteration. We do not add ANTI
344 dep when there is an intra-loop TRUE dep in the opposite
345 direction, but use regmoves to fix such disregarded ANTI
346 deps when broken. If the first_def reaches the USE then
347 there is such a dep. */
348 ddg_node_ptr first_def_node
= get_node_of_insn (g
,
349 DF_REF_INSN (first_def
));
351 gcc_assert (first_def_node
);
353 /* Always create the edge if the use node is a branch in
354 order to prevent the creation of reg-moves.
355 If the address that is being auto-inc or auto-dec in LAST_DEF
356 is used in USE_INSN then do not remove the edge to make sure
357 reg-moves will not be created for that address. */
358 if (DF_REF_ID (last_def
) != DF_REF_ID (first_def
)
359 || !flag_modulo_sched_allow_regmoves
360 || JUMP_P (use_node
->insn
)
361 || autoinc_var_is_used_p (DF_REF_INSN (last_def
), use_insn
)
362 || def_has_ccmode_p (DF_REF_INSN (last_def
)))
363 create_ddg_dep_no_link (g
, use_node
, first_def_node
, ANTI_DEP
,
368 /* Create an inter-loop output dependence between LAST_DEF (which is the
369 last def in its block, being downwards exposed) and the first def in
370 its block. Avoid creating a self output dependence. Avoid creating
371 an output dependence if there is a dependence path between the two
372 defs starting with a true dependence to a use which can be in the
373 next iteration; followed by an anti dependence of that use to the
374 first def (i.e. if there is a use between the two defs.) */
375 if (!has_use_in_bb_p
)
377 ddg_node_ptr dest_node
;
379 if (DF_REF_ID (last_def
) == DF_REF_ID (first_def
))
382 dest_node
= get_node_of_insn (g
, DF_REF_INSN (first_def
));
383 gcc_assert (dest_node
);
384 create_ddg_dep_no_link (g
, last_def_node
, dest_node
,
385 OUTPUT_DEP
, REG_DEP
, 1);
388 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
390 build_inter_loop_deps (ddg_ptr g
)
393 struct df_rd_bb_info
*rd_bb_info
;
396 rd_bb_info
= DF_RD_BB_INFO (g
->bb
);
398 /* Find inter-loop register output, true and anti deps. */
399 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info
->gen
, 0, rd_num
, bi
)
401 df_ref rd
= DF_DEFS_GET (rd_num
);
403 add_cross_iteration_register_deps (g
, rd
);
408 /* Return true if two specified instructions have mem expr with conflict
411 insns_may_alias_p (rtx_insn
*insn1
, rtx_insn
*insn2
)
413 subrtx_iterator::array_type array1
;
414 subrtx_iterator::array_type array2
;
415 FOR_EACH_SUBRTX (iter1
, array1
, PATTERN (insn1
), NONCONST
)
417 const_rtx x1
= *iter1
;
419 FOR_EACH_SUBRTX (iter2
, array2
, PATTERN (insn2
), NONCONST
)
421 const_rtx x2
= *iter2
;
422 if (MEM_P (x2
) && may_alias_p (x2
, x1
))
429 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
432 add_intra_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
435 if ((from
->cuid
== to
->cuid
)
436 || !insns_may_alias_p (from
->insn
, to
->insn
))
437 /* Do not create edge if memory references have disjoint alias sets
438 or 'to' and 'from' are the same instruction. */
441 if (mem_write_insn_p (from
->insn
))
443 if (mem_read_insn_p (to
->insn
))
444 create_ddg_dep_no_link (g
, from
, to
,
445 DEBUG_INSN_P (to
->insn
)
446 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 0);
448 create_ddg_dep_no_link (g
, from
, to
,
449 DEBUG_INSN_P (to
->insn
)
450 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 0);
452 else if (!mem_read_insn_p (to
->insn
))
453 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 0);
456 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
459 add_inter_loop_mem_dep (ddg_ptr g
, ddg_node_ptr from
, ddg_node_ptr to
)
461 if (!insns_may_alias_p (from
->insn
, to
->insn
))
462 /* Do not create edge if memory references have disjoint alias sets. */
465 if (mem_write_insn_p (from
->insn
))
467 if (mem_read_insn_p (to
->insn
))
468 create_ddg_dep_no_link (g
, from
, to
,
469 DEBUG_INSN_P (to
->insn
)
470 ? ANTI_DEP
: TRUE_DEP
, MEM_DEP
, 1);
471 else if (from
->cuid
!= to
->cuid
)
472 create_ddg_dep_no_link (g
, from
, to
,
473 DEBUG_INSN_P (to
->insn
)
474 ? ANTI_DEP
: OUTPUT_DEP
, MEM_DEP
, 1);
478 if (mem_read_insn_p (to
->insn
))
480 else if (from
->cuid
!= to
->cuid
)
482 create_ddg_dep_no_link (g
, from
, to
, ANTI_DEP
, MEM_DEP
, 1);
483 if (DEBUG_INSN_P (from
->insn
) || DEBUG_INSN_P (to
->insn
))
484 create_ddg_dep_no_link (g
, to
, from
, ANTI_DEP
, MEM_DEP
, 1);
486 create_ddg_dep_no_link (g
, to
, from
, TRUE_DEP
, MEM_DEP
, 1);
492 /* Perform intra-block Data Dependency analysis and connect the nodes in
493 the DDG. We assume the loop has a single basic block. */
495 build_intra_loop_deps (ddg_ptr g
)
498 /* Hold the dependency analysis state during dependency calculations. */
499 struct deps_desc tmp_deps
;
500 rtx_insn
*head
, *tail
;
502 /* Build the dependence information, using the sched_analyze function. */
504 init_deps (&tmp_deps
, false);
506 /* Do the intra-block data dependence analysis for the given block. */
507 get_ebb_head_tail (g
->bb
, g
->bb
, &head
, &tail
);
508 sched_analyze (&tmp_deps
, head
, tail
);
510 /* Build intra-loop data dependencies using the scheduler dependency
512 for (i
= 0; i
< g
->num_nodes
; i
++)
514 ddg_node_ptr dest_node
= &g
->nodes
[i
];
515 sd_iterator_def sd_it
;
518 if (! INSN_P (dest_node
->insn
))
521 FOR_EACH_DEP (dest_node
->insn
, SD_LIST_BACK
, sd_it
, dep
)
523 rtx_insn
*src_insn
= DEP_PRO (dep
);
524 ddg_node_ptr src_node
;
526 /* Don't add dependencies on debug insns to non-debug insns
527 to avoid codegen differences between -g and -g0. */
528 if (DEBUG_INSN_P (src_insn
) && !DEBUG_INSN_P (dest_node
->insn
))
531 src_node
= get_node_of_insn (g
, src_insn
);
536 create_ddg_dep_from_intra_loop_link (g
, src_node
, dest_node
, dep
);
539 /* If this insn modifies memory, add an edge to all insns that access
541 if (mem_access_insn_p (dest_node
->insn
))
545 for (j
= 0; j
<= i
; j
++)
547 ddg_node_ptr j_node
= &g
->nodes
[j
];
548 if (DEBUG_INSN_P (j_node
->insn
))
550 if (mem_access_insn_p (j_node
->insn
))
552 /* Don't bother calculating inter-loop dep if an intra-loop dep
554 if (! bitmap_bit_p (dest_node
->successors
, j
))
555 add_inter_loop_mem_dep (g
, dest_node
, j_node
);
556 /* If -fmodulo-sched-allow-regmoves
557 is set certain anti-dep edges are not created.
558 It might be that these anti-dep edges are on the
559 path from one memory instruction to another such that
560 removing these edges could cause a violation of the
561 memory dependencies. Thus we add intra edges between
562 every two memory instructions in this case. */
563 if (flag_modulo_sched_allow_regmoves
564 && !bitmap_bit_p (dest_node
->predecessors
, j
))
565 add_intra_loop_mem_dep (g
, j_node
, dest_node
);
571 /* Free the INSN_LISTs. */
572 finish_deps_global ();
573 free_deps (&tmp_deps
);
575 /* Free dependencies. */
576 sched_free_deps (head
, tail
, false);
580 /* Given a basic block, create its DDG and return a pointer to a variable
581 of ddg type that represents it.
582 Initialize the ddg structure fields to the appropriate values. */
584 create_ddg (basic_block bb
, int closing_branch_deps
)
587 rtx_insn
*insn
, *first_note
;
591 g
= (ddg_ptr
) xcalloc (1, sizeof (struct ddg
));
594 g
->closing_branch_deps
= closing_branch_deps
;
596 /* Count the number of insns in the BB. */
597 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
598 insn
= NEXT_INSN (insn
))
600 if (! INSN_P (insn
) || GET_CODE (PATTERN (insn
)) == USE
)
603 if (DEBUG_INSN_P (insn
))
607 if (mem_read_insn_p (insn
))
609 if (mem_write_insn_p (insn
))
615 /* There is nothing to do for this BB. */
616 if ((num_nodes
- g
->num_debug
) <= 1)
622 /* Allocate the nodes array, and initialize the nodes. */
623 g
->num_nodes
= num_nodes
;
624 g
->nodes
= (ddg_node_ptr
) xcalloc (num_nodes
, sizeof (struct ddg_node
));
625 g
->closing_branch
= NULL
;
628 for (insn
= BB_HEAD (bb
); insn
!= NEXT_INSN (BB_END (bb
));
629 insn
= NEXT_INSN (insn
))
633 if (! first_note
&& NOTE_P (insn
)
634 && NOTE_KIND (insn
) != NOTE_INSN_BASIC_BLOCK
)
640 gcc_assert (!g
->closing_branch
);
641 g
->closing_branch
= &g
->nodes
[i
];
643 else if (GET_CODE (PATTERN (insn
)) == USE
)
650 g
->nodes
[i
].cuid
= i
;
651 g
->nodes
[i
].successors
= sbitmap_alloc (num_nodes
);
652 bitmap_clear (g
->nodes
[i
].successors
);
653 g
->nodes
[i
].predecessors
= sbitmap_alloc (num_nodes
);
654 bitmap_clear (g
->nodes
[i
].predecessors
);
655 g
->nodes
[i
].first_note
= (first_note
? first_note
: insn
);
656 g
->nodes
[i
++].insn
= insn
;
660 /* We must have found a branch in DDG. */
661 gcc_assert (g
->closing_branch
);
664 /* Build the data dependency graph. */
665 build_intra_loop_deps (g
);
666 build_inter_loop_deps (g
);
670 /* Free all the memory allocated for the DDG. */
679 for (i
= 0; i
< g
->num_nodes
; i
++)
681 ddg_edge_ptr e
= g
->nodes
[i
].out
;
685 ddg_edge_ptr next
= e
->next_out
;
690 sbitmap_free (g
->nodes
[i
].successors
);
691 sbitmap_free (g
->nodes
[i
].predecessors
);
693 if (g
->num_backarcs
> 0)
700 print_ddg_edge (FILE *file
, ddg_edge_ptr e
)
716 fprintf (file
, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e
->src
->insn
),
717 dep_c
, e
->latency
, e
->distance
, INSN_UID (e
->dest
->insn
));
720 /* Print the DDG nodes with there in/out edges to the dump file. */
722 print_ddg (FILE *file
, ddg_ptr g
)
726 for (i
= 0; i
< g
->num_nodes
; i
++)
730 fprintf (file
, "Node num: %d\n", g
->nodes
[i
].cuid
);
731 print_rtl_single (file
, g
->nodes
[i
].insn
);
732 fprintf (file
, "OUT ARCS: ");
733 for (e
= g
->nodes
[i
].out
; e
; e
= e
->next_out
)
734 print_ddg_edge (file
, e
);
736 fprintf (file
, "\nIN ARCS: ");
737 for (e
= g
->nodes
[i
].in
; e
; e
= e
->next_in
)
738 print_ddg_edge (file
, e
);
740 fprintf (file
, "\n");
744 /* Print the given DDG in VCG format. */
746 vcg_print_ddg (FILE *file
, ddg_ptr g
)
750 fprintf (file
, "graph: {\n");
751 for (src_cuid
= 0; src_cuid
< g
->num_nodes
; src_cuid
++)
754 int src_uid
= INSN_UID (g
->nodes
[src_cuid
].insn
);
756 fprintf (file
, "node: {title: \"%d_%d\" info1: \"", src_cuid
, src_uid
);
757 print_rtl_single (file
, g
->nodes
[src_cuid
].insn
);
758 fprintf (file
, "\"}\n");
759 for (e
= g
->nodes
[src_cuid
].out
; e
; e
= e
->next_out
)
761 int dst_uid
= INSN_UID (e
->dest
->insn
);
762 int dst_cuid
= e
->dest
->cuid
;
764 /* Give the backarcs a different color. */
766 fprintf (file
, "backedge: {color: red ");
768 fprintf (file
, "edge: { ");
770 fprintf (file
, "sourcename: \"%d_%d\" ", src_cuid
, src_uid
);
771 fprintf (file
, "targetname: \"%d_%d\" ", dst_cuid
, dst_uid
);
772 fprintf (file
, "label: \"%d_%d\"}\n", e
->latency
, e
->distance
);
775 fprintf (file
, "}\n");
778 /* Dump the sccs in SCCS. */
780 print_sccs (FILE *file
, ddg_all_sccs_ptr sccs
, ddg_ptr g
)
783 sbitmap_iterator sbi
;
789 fprintf (file
, "\n;; Number of SCC nodes - %d\n", sccs
->num_sccs
);
790 for (i
= 0; i
< sccs
->num_sccs
; i
++)
792 fprintf (file
, "SCC number: %d\n", i
);
793 EXECUTE_IF_SET_IN_BITMAP (sccs
->sccs
[i
]->nodes
, 0, u
, sbi
)
795 fprintf (file
, "insn num %d\n", u
);
796 print_rtl_single (file
, g
->nodes
[u
].insn
);
799 fprintf (file
, "\n");
802 /* Create an edge and initialize it with given values. */
804 create_ddg_edge (ddg_node_ptr src
, ddg_node_ptr dest
,
805 dep_type t
, dep_data_type dt
, int l
, int d
)
807 ddg_edge_ptr e
= (ddg_edge_ptr
) xmalloc (sizeof (struct ddg_edge
));
815 e
->next_in
= e
->next_out
= NULL
;
820 /* Add the given edge to the in/out linked lists of the DDG nodes. */
822 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED
, ddg_edge_ptr e
)
824 ddg_node_ptr src
= e
->src
;
825 ddg_node_ptr dest
= e
->dest
;
827 /* Should have allocated the sbitmaps. */
828 gcc_assert (src
->successors
&& dest
->predecessors
);
830 bitmap_set_bit (src
->successors
, dest
->cuid
);
831 bitmap_set_bit (dest
->predecessors
, src
->cuid
);
832 e
->next_in
= dest
->in
;
834 e
->next_out
= src
->out
;
840 /* Algorithm for computing the recurrence_length of an scc. We assume at
841 for now that cycles in the data dependence graph contain a single backarc.
842 This simplifies the algorithm, and can be generalized later. */
844 set_recurrence_length (ddg_scc_ptr scc
, ddg_ptr g
)
849 for (j
= 0; j
< scc
->num_backarcs
; j
++)
851 ddg_edge_ptr backarc
= scc
->backarcs
[j
];
853 int distance
= backarc
->distance
;
854 ddg_node_ptr src
= backarc
->dest
;
855 ddg_node_ptr dest
= backarc
->src
;
857 length
= longest_simple_path (g
, src
->cuid
, dest
->cuid
, scc
->nodes
);
860 /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
863 length
+= backarc
->latency
;
864 result
= MAX (result
, (length
/ distance
));
866 scc
->recurrence_length
= result
;
869 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
870 and mark edges that belong to this scc as IN_SCC. */
872 create_scc (ddg_ptr g
, sbitmap nodes
)
876 sbitmap_iterator sbi
;
878 scc
= (ddg_scc_ptr
) xmalloc (sizeof (struct ddg_scc
));
879 scc
->backarcs
= NULL
;
880 scc
->num_backarcs
= 0;
881 scc
->nodes
= sbitmap_alloc (g
->num_nodes
);
882 bitmap_copy (scc
->nodes
, nodes
);
884 /* Mark the backarcs that belong to this SCC. */
885 EXECUTE_IF_SET_IN_BITMAP (nodes
, 0, u
, sbi
)
888 ddg_node_ptr n
= &g
->nodes
[u
];
890 for (e
= n
->out
; e
; e
= e
->next_out
)
891 if (bitmap_bit_p (nodes
, e
->dest
->cuid
))
893 e
->aux
.count
= IN_SCC
;
895 add_backarc_to_scc (scc
, e
);
899 set_recurrence_length (scc
, g
);
903 /* Cleans the memory allocation of a given SCC. */
905 free_scc (ddg_scc_ptr scc
)
910 sbitmap_free (scc
->nodes
);
911 if (scc
->num_backarcs
> 0)
912 free (scc
->backarcs
);
917 /* Add a given edge known to be a backarc to the given DDG. */
919 add_backarc_to_ddg (ddg_ptr g
, ddg_edge_ptr e
)
921 int size
= (g
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
923 add_edge_to_ddg (g
, e
);
924 g
->backarcs
= (ddg_edge_ptr
*) xrealloc (g
->backarcs
, size
);
925 g
->backarcs
[g
->num_backarcs
++] = e
;
928 /* Add backarc to an SCC. */
930 add_backarc_to_scc (ddg_scc_ptr scc
, ddg_edge_ptr e
)
932 int size
= (scc
->num_backarcs
+ 1) * sizeof (ddg_edge_ptr
);
934 scc
->backarcs
= (ddg_edge_ptr
*) xrealloc (scc
->backarcs
, size
);
935 scc
->backarcs
[scc
->num_backarcs
++] = e
;
938 /* Add the given SCC to the DDG. */
940 add_scc_to_ddg (ddg_all_sccs_ptr g
, ddg_scc_ptr scc
)
942 int size
= (g
->num_sccs
+ 1) * sizeof (ddg_scc_ptr
);
944 g
->sccs
= (ddg_scc_ptr
*) xrealloc (g
->sccs
, size
);
945 g
->sccs
[g
->num_sccs
++] = scc
;
948 /* Given the instruction INSN return the node that represents it. */
950 get_node_of_insn (ddg_ptr g
, rtx_insn
*insn
)
954 for (i
= 0; i
< g
->num_nodes
; i
++)
955 if (insn
== g
->nodes
[i
].insn
)
960 /* Given a set OPS of nodes in the DDG, find the set of their successors
961 which are not in OPS, and set their bits in SUCC. Bits corresponding to
962 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
964 find_successors (sbitmap succ
, ddg_ptr g
, sbitmap ops
)
967 sbitmap_iterator sbi
;
969 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
971 const sbitmap node_succ
= NODE_SUCCESSORS (&g
->nodes
[i
]);
972 bitmap_ior (succ
, succ
, node_succ
);
975 /* We want those that are not in ops. */
976 bitmap_and_compl (succ
, succ
, ops
);
979 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
980 which are not in OPS, and set their bits in PREDS. Bits corresponding to
981 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
983 find_predecessors (sbitmap preds
, ddg_ptr g
, sbitmap ops
)
986 sbitmap_iterator sbi
;
988 EXECUTE_IF_SET_IN_BITMAP (ops
, 0, i
, sbi
)
990 const sbitmap node_preds
= NODE_PREDECESSORS (&g
->nodes
[i
]);
991 bitmap_ior (preds
, preds
, node_preds
);
994 /* We want those that are not in ops. */
995 bitmap_and_compl (preds
, preds
, ops
);
999 /* Compare function to be passed to qsort to order the backarcs in descending
1002 compare_sccs (const void *s1
, const void *s2
)
1004 const int rec_l1
= (*(const ddg_scc_ptr
*)s1
)->recurrence_length
;
1005 const int rec_l2
= (*(const ddg_scc_ptr
*)s2
)->recurrence_length
;
1006 return ((rec_l2
> rec_l1
) - (rec_l2
< rec_l1
));
1010 /* Order the backarcs in descending recMII order using compare_sccs. */
1012 order_sccs (ddg_all_sccs_ptr g
)
1014 qsort (g
->sccs
, g
->num_sccs
, sizeof (ddg_scc_ptr
),
1015 (int (*) (const void *, const void *)) compare_sccs
);
1018 #ifdef ENABLE_CHECKING
1019 /* Check that every node in SCCS belongs to exactly one strongly connected
1020 component and that no element of SCCS is empty. */
1022 check_sccs (ddg_all_sccs_ptr sccs
, int num_nodes
)
1025 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1028 for (i
= 0; i
< sccs
->num_sccs
; i
++)
1030 gcc_assert (!bitmap_empty_p (sccs
->sccs
[i
]->nodes
));
1031 /* Verify that every node in sccs is in exactly one strongly
1032 connected component. */
1033 gcc_assert (!bitmap_intersect_p (tmp
, sccs
->sccs
[i
]->nodes
));
1034 bitmap_ior (tmp
, tmp
, sccs
->sccs
[i
]->nodes
);
1040 /* Perform the Strongly Connected Components decomposing algorithm on the
1041 DDG and return DDG_ALL_SCCS structure that contains them. */
1043 create_ddg_all_sccs (ddg_ptr g
)
1046 int num_nodes
= g
->num_nodes
;
1047 sbitmap from
= sbitmap_alloc (num_nodes
);
1048 sbitmap to
= sbitmap_alloc (num_nodes
);
1049 sbitmap scc_nodes
= sbitmap_alloc (num_nodes
);
1050 ddg_all_sccs_ptr sccs
= (ddg_all_sccs_ptr
)
1051 xmalloc (sizeof (struct ddg_all_sccs
));
1057 for (i
= 0; i
< g
->num_backarcs
; i
++)
1060 ddg_edge_ptr backarc
= g
->backarcs
[i
];
1061 ddg_node_ptr src
= backarc
->src
;
1062 ddg_node_ptr dest
= backarc
->dest
;
1064 /* If the backarc already belongs to an SCC, continue. */
1065 if (backarc
->aux
.count
== IN_SCC
)
1068 bitmap_clear (scc_nodes
);
1069 bitmap_clear (from
);
1071 bitmap_set_bit (from
, dest
->cuid
);
1072 bitmap_set_bit (to
, src
->cuid
);
1074 if (find_nodes_on_paths (scc_nodes
, g
, from
, to
))
1076 scc
= create_scc (g
, scc_nodes
);
1077 add_scc_to_ddg (sccs
, scc
);
1081 sbitmap_free (from
);
1083 sbitmap_free (scc_nodes
);
1084 #ifdef ENABLE_CHECKING
1085 check_sccs (sccs
, num_nodes
);
1090 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1092 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs
)
1099 for (i
= 0; i
< all_sccs
->num_sccs
; i
++)
1100 free_scc (all_sccs
->sccs
[i
]);
1102 free (all_sccs
->sccs
);
1107 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1108 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1109 nodes from FROM and TO). Return nonzero if nodes exist. */
1111 find_nodes_on_paths (sbitmap result
, ddg_ptr g
, sbitmap from
, sbitmap to
)
1116 int num_nodes
= g
->num_nodes
;
1117 sbitmap_iterator sbi
;
1119 sbitmap workset
= sbitmap_alloc (num_nodes
);
1120 sbitmap reachable_from
= sbitmap_alloc (num_nodes
);
1121 sbitmap reach_to
= sbitmap_alloc (num_nodes
);
1122 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1124 bitmap_copy (reachable_from
, from
);
1125 bitmap_copy (tmp
, from
);
1131 bitmap_copy (workset
, tmp
);
1133 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1136 ddg_node_ptr u_node
= &g
->nodes
[u
];
1138 for (e
= u_node
->out
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_out
)
1140 ddg_node_ptr v_node
= e
->dest
;
1141 int v
= v_node
->cuid
;
1143 if (!bitmap_bit_p (reachable_from
, v
))
1145 bitmap_set_bit (reachable_from
, v
);
1146 bitmap_set_bit (tmp
, v
);
1153 bitmap_copy (reach_to
, to
);
1154 bitmap_copy (tmp
, to
);
1160 bitmap_copy (workset
, tmp
);
1162 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1165 ddg_node_ptr u_node
= &g
->nodes
[u
];
1167 for (e
= u_node
->in
; e
!= (ddg_edge_ptr
) 0; e
= e
->next_in
)
1169 ddg_node_ptr v_node
= e
->src
;
1170 int v
= v_node
->cuid
;
1172 if (!bitmap_bit_p (reach_to
, v
))
1174 bitmap_set_bit (reach_to
, v
);
1175 bitmap_set_bit (tmp
, v
);
1182 answer
= bitmap_and (result
, reachable_from
, reach_to
);
1183 sbitmap_free (workset
);
1184 sbitmap_free (reachable_from
);
1185 sbitmap_free (reach_to
);
1191 /* Updates the counts of U_NODE's successors (that belong to NODES) to be
1192 at-least as large as the count of U_NODE plus the latency between them.
1193 Sets a bit in TMP for each successor whose count was changed (increased).
1194 Returns nonzero if any count was changed. */
1196 update_dist_to_successors (ddg_node_ptr u_node
, sbitmap nodes
, sbitmap tmp
)
1201 for (e
= u_node
->out
; e
; e
= e
->next_out
)
1203 ddg_node_ptr v_node
= e
->dest
;
1204 int v
= v_node
->cuid
;
1206 if (bitmap_bit_p (nodes
, v
)
1207 && (e
->distance
== 0)
1208 && (v_node
->aux
.count
< u_node
->aux
.count
+ e
->latency
))
1210 v_node
->aux
.count
= u_node
->aux
.count
+ e
->latency
;
1211 bitmap_set_bit (tmp
, v
);
1219 /* Find the length of a longest path from SRC to DEST in G,
1220 going only through NODES, and disregarding backarcs. */
1222 longest_simple_path (struct ddg
* g
, int src
, int dest
, sbitmap nodes
)
1228 int num_nodes
= g
->num_nodes
;
1229 sbitmap workset
= sbitmap_alloc (num_nodes
);
1230 sbitmap tmp
= sbitmap_alloc (num_nodes
);
1233 /* Data will hold the distance of the longest path found so far from
1234 src to each node. Initialize to -1 = less than minimum. */
1235 for (i
= 0; i
< g
->num_nodes
; i
++)
1236 g
->nodes
[i
].aux
.count
= -1;
1237 g
->nodes
[src
].aux
.count
= 0;
1240 bitmap_set_bit (tmp
, src
);
1244 sbitmap_iterator sbi
;
1247 bitmap_copy (workset
, tmp
);
1249 EXECUTE_IF_SET_IN_BITMAP (workset
, 0, u
, sbi
)
1251 ddg_node_ptr u_node
= &g
->nodes
[u
];
1253 change
|= update_dist_to_successors (u_node
, nodes
, tmp
);
1256 result
= g
->nodes
[dest
].aux
.count
;
1257 sbitmap_free (workset
);
1262 #endif /* INSN_SCHEDULING */