Fix warnings building linux-atomic.c and fptr.c on hppa64-linux
[official-gcc.git] / gcc / ddg.c
bloba54f67045665851f0e102a7a5bacbd2a9a93342c
1 /* DDG - Data Dependence Graph implementation.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "df.h"
28 #include "insn-attr.h"
29 #include "sched-int.h"
30 #include "ddg.h"
31 #include "rtl-iter.h"
33 #ifdef INSN_SCHEDULING
35 /* Forward declarations. */
36 static void add_backarc_to_ddg (ddg_ptr, ddg_edge_ptr);
37 static void add_backarc_to_scc (ddg_scc_ptr, ddg_edge_ptr);
38 static void add_scc_to_ddg (ddg_all_sccs_ptr, ddg_scc_ptr);
39 static void create_ddg_dep_from_intra_loop_link (ddg_ptr, ddg_node_ptr,
40 ddg_node_ptr, dep_t);
41 static void create_ddg_dep_no_link (ddg_ptr, ddg_node_ptr, ddg_node_ptr,
42 dep_type, dep_data_type, int);
43 static ddg_edge_ptr create_ddg_edge (ddg_node_ptr, ddg_node_ptr, dep_type,
44 dep_data_type, int, int);
45 static void add_edge_to_ddg (ddg_ptr g, ddg_edge_ptr);
47 /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
48 static bool mem_ref_p;
50 /* Auxiliary function for mem_read_insn_p. */
51 static void
52 mark_mem_use (rtx *x, void *)
54 subrtx_iterator::array_type array;
55 FOR_EACH_SUBRTX (iter, array, *x, NONCONST)
56 if (MEM_P (*iter))
58 mem_ref_p = true;
59 break;
63 /* Returns nonzero if INSN reads from memory. */
64 static bool
65 mem_read_insn_p (rtx_insn *insn)
67 mem_ref_p = false;
68 note_uses (&PATTERN (insn), mark_mem_use, NULL);
69 return mem_ref_p;
72 static void
73 mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
75 if (MEM_P (loc))
76 mem_ref_p = true;
79 /* Returns nonzero if INSN writes to memory. */
80 static bool
81 mem_write_insn_p (rtx_insn *insn)
83 mem_ref_p = false;
84 note_stores (insn, mark_mem_store, NULL);
85 return mem_ref_p;
88 /* Returns nonzero if X has access to memory. */
89 static bool
90 rtx_mem_access_p (rtx x)
92 int i, j;
93 const char *fmt;
94 enum rtx_code code;
96 if (x == 0)
97 return false;
99 if (MEM_P (x))
100 return true;
102 code = GET_CODE (x);
103 fmt = GET_RTX_FORMAT (code);
104 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
106 if (fmt[i] == 'e')
108 if (rtx_mem_access_p (XEXP (x, i)))
109 return true;
111 else if (fmt[i] == 'E')
112 for (j = 0; j < XVECLEN (x, i); j++)
114 if (rtx_mem_access_p (XVECEXP (x, i, j)))
115 return true;
118 return false;
121 /* Returns nonzero if INSN reads to or writes from memory. */
122 static bool
123 mem_access_insn_p (rtx_insn *insn)
125 return rtx_mem_access_p (PATTERN (insn));
128 /* Return true if DEF_INSN contains address being auto-inc or auto-dec
129 which is used in USE_INSN. Otherwise return false. The result is
130 being used to decide whether to remove the edge between def_insn and
131 use_insn when -fmodulo-sched-allow-regmoves is set. This function
132 doesn't need to consider the specific address register; no reg_moves
133 will be allowed for any life range defined by def_insn and used
134 by use_insn, if use_insn uses an address register auto-inc'ed by
135 def_insn. */
136 bool
137 autoinc_var_is_used_p (rtx_insn *def_insn, rtx_insn *use_insn)
139 rtx note;
141 for (note = REG_NOTES (def_insn); note; note = XEXP (note, 1))
142 if (REG_NOTE_KIND (note) == REG_INC
143 && reg_referenced_p (XEXP (note, 0), PATTERN (use_insn)))
144 return true;
146 return false;
149 /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
150 return false. */
151 static bool
152 def_has_ccmode_p (rtx_insn *insn)
154 df_ref def;
156 FOR_EACH_INSN_DEF (def, insn)
158 machine_mode mode = GET_MODE (DF_REF_REG (def));
160 if (GET_MODE_CLASS (mode) == MODE_CC)
161 return true;
164 return false;
167 /* Computes the dependence parameters (latency, distance etc.), creates
168 a ddg_edge and adds it to the given DDG. */
169 static void
170 create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
171 ddg_node_ptr dest_node, dep_t link)
173 ddg_edge_ptr e;
174 int latency, distance = 0;
175 dep_type t = TRUE_DEP;
176 dep_data_type dt = (mem_access_insn_p (src_node->insn)
177 && mem_access_insn_p (dest_node->insn) ? MEM_DEP
178 : REG_DEP);
179 gcc_assert (src_node->cuid < dest_node->cuid);
180 gcc_assert (link);
182 /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
183 if (DEP_TYPE (link) == REG_DEP_ANTI)
184 t = ANTI_DEP;
185 else if (DEP_TYPE (link) == REG_DEP_OUTPUT)
186 t = OUTPUT_DEP;
188 /* We currently choose not to create certain anti-deps edges and
189 compensate for that by generating reg-moves based on the life-range
190 analysis. The anti-deps that will be deleted are the ones which
191 have true-deps edges in the opposite direction (in other words
192 the kernel has only one def of the relevant register).
193 If the address that is being auto-inc or auto-dec in DEST_NODE
194 is used in SRC_NODE then do not remove the edge to make sure
195 reg-moves will not be created for this address.
196 TODO: support the removal of all anti-deps edges, i.e. including those
197 whose register has multiple defs in the loop. */
198 if (flag_modulo_sched_allow_regmoves
199 && (t == ANTI_DEP && dt == REG_DEP)
200 && !def_has_ccmode_p (dest_node->insn)
201 && !autoinc_var_is_used_p (dest_node->insn, src_node->insn))
203 rtx set;
205 set = single_set (dest_node->insn);
206 /* TODO: Handle registers that REG_P is not true for them, i.e.
207 subregs and special registers. */
208 if (set && REG_P (SET_DEST (set)))
210 int regno = REGNO (SET_DEST (set));
211 df_ref first_def;
212 class df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
214 first_def = df_bb_regno_first_def_find (g->bb, regno);
215 gcc_assert (first_def);
217 if (bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)))
218 return;
222 latency = dep_cost (link);
223 e = create_ddg_edge (src_node, dest_node, t, dt, latency, distance);
224 add_edge_to_ddg (g, e);
227 /* The same as the above function, but it doesn't require a link parameter. */
228 static void
229 create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
230 dep_type d_t, dep_data_type d_dt, int distance)
232 ddg_edge_ptr e;
233 int l;
234 enum reg_note dep_kind;
235 struct _dep _dep, *dep = &_dep;
237 if (d_t == ANTI_DEP)
238 dep_kind = REG_DEP_ANTI;
239 else if (d_t == OUTPUT_DEP)
240 dep_kind = REG_DEP_OUTPUT;
241 else
243 gcc_assert (d_t == TRUE_DEP);
245 dep_kind = REG_DEP_TRUE;
248 init_dep (dep, from->insn, to->insn, dep_kind);
250 l = dep_cost (dep);
252 e = create_ddg_edge (from, to, d_t, d_dt, l, distance);
253 if (distance > 0)
254 add_backarc_to_ddg (g, e);
255 else
256 add_edge_to_ddg (g, e);
260 /* Given a downwards exposed register def LAST_DEF (which is the last
261 definition of that register in the bb), add inter-loop true dependences
262 to all its uses in the next iteration, an output dependence to the
263 first def of the same register (possibly itself) in the next iteration
264 and anti-dependences from its uses in the current iteration to the
265 first definition in the next iteration. */
266 static void
267 add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
269 struct df_link *r_use;
270 int has_use_in_bb_p = false;
271 int regno = DF_REF_REGNO (last_def);
272 ddg_node_ptr last_def_node = get_node_of_insn (g, DF_REF_INSN (last_def));
273 df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
274 ddg_node_ptr first_def_node = get_node_of_insn (g, DF_REF_INSN (first_def));
275 ddg_node_ptr use_node;
277 gcc_assert (last_def_node && first_def && first_def_node);
279 if (flag_checking && DF_REF_ID (last_def) != DF_REF_ID (first_def))
281 class df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
282 gcc_assert (!bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)));
285 /* Create inter-loop true dependences and anti dependences. */
286 for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
288 if (DF_REF_BB (r_use->ref) != g->bb)
289 continue;
291 gcc_assert (!DF_REF_IS_ARTIFICIAL (r_use->ref)
292 && DF_REF_INSN_INFO (r_use->ref) != NULL);
294 rtx_insn *use_insn = DF_REF_INSN (r_use->ref);
296 if (DEBUG_INSN_P (use_insn))
297 continue;
299 /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
300 use_node = get_node_of_insn (g, use_insn);
301 gcc_assert (use_node);
302 has_use_in_bb_p = true;
303 if (use_node->cuid <= last_def_node->cuid)
305 /* Add true deps from last_def to it's uses in the next
306 iteration. Any such upwards exposed use appears before
307 the last_def def. */
308 create_ddg_dep_no_link (g, last_def_node, use_node,
309 TRUE_DEP, REG_DEP, 1);
311 else
313 /* Add anti deps from last_def's uses in the current iteration
314 to the first def in the next iteration. We do not add ANTI
315 dep when there is an intra-loop TRUE dep in the opposite
316 direction, but use regmoves to fix such disregarded ANTI
317 deps when broken. If the first_def reaches the USE then
318 there is such a dep.
319 Always create the edge if the use node is a branch in
320 order to prevent the creation of reg-moves.
321 If the address that is being auto-inc or auto-dec in LAST_DEF
322 is used in USE_INSN then do not remove the edge to make sure
323 reg-moves will not be created for that address. */
324 if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
325 || !flag_modulo_sched_allow_regmoves
326 || JUMP_P (use_node->insn)
327 || autoinc_var_is_used_p (DF_REF_INSN (last_def), use_insn)
328 || def_has_ccmode_p (DF_REF_INSN (last_def)))
329 create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
330 REG_DEP, 1);
333 /* Create an inter-loop output dependence between LAST_DEF (which is the
334 last def in its block, being downwards exposed) and the first def in
335 its block. Avoid creating a self output dependence. Avoid creating
336 an output dependence if there is a dependence path between the two
337 defs starting with a true dependence to a use which can be in the
338 next iteration; followed by an anti dependence of that use to the
339 first def (i.e. if there is a use between the two defs.) */
340 if (!has_use_in_bb_p && DF_REF_ID (last_def) != DF_REF_ID (first_def))
341 create_ddg_dep_no_link (g, last_def_node, first_def_node,
342 OUTPUT_DEP, REG_DEP, 1);
345 /* Build inter-loop dependencies, by looking at DF analysis backwards. */
346 static void
347 build_inter_loop_deps (ddg_ptr g)
349 unsigned rd_num;
350 class df_rd_bb_info *rd_bb_info;
351 bitmap_iterator bi;
353 rd_bb_info = DF_RD_BB_INFO (g->bb);
355 /* Find inter-loop register output, true and anti deps. */
356 EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info->gen, 0, rd_num, bi)
358 df_ref rd = DF_DEFS_GET (rd_num);
360 add_cross_iteration_register_deps (g, rd);
365 /* Return true if two specified instructions have mem expr with conflict
366 alias sets. */
367 static bool
368 insns_may_alias_p (rtx_insn *insn1, rtx_insn *insn2)
370 subrtx_iterator::array_type array1;
371 subrtx_iterator::array_type array2;
372 FOR_EACH_SUBRTX (iter1, array1, PATTERN (insn1), NONCONST)
374 const_rtx x1 = *iter1;
375 if (MEM_P (x1))
376 FOR_EACH_SUBRTX (iter2, array2, PATTERN (insn2), NONCONST)
378 const_rtx x2 = *iter2;
379 if (MEM_P (x2) && may_alias_p (x2, x1))
380 return true;
383 return false;
386 /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
387 to ddg G. */
388 static void
389 add_intra_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
392 if ((from->cuid == to->cuid)
393 || !insns_may_alias_p (from->insn, to->insn))
394 /* Do not create edge if memory references have disjoint alias sets
395 or 'to' and 'from' are the same instruction. */
396 return;
398 if (mem_write_insn_p (from->insn))
400 if (mem_read_insn_p (to->insn))
401 create_ddg_dep_no_link (g, from, to, TRUE_DEP, MEM_DEP, 0);
402 else
403 create_ddg_dep_no_link (g, from, to, OUTPUT_DEP, MEM_DEP, 0);
405 else if (!mem_read_insn_p (to->insn))
406 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 0);
409 /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
410 to ddg G. */
411 static void
412 add_inter_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
414 if (!insns_may_alias_p (from->insn, to->insn))
415 /* Do not create edge if memory references have disjoint alias sets. */
416 return;
418 if (mem_write_insn_p (from->insn))
420 if (mem_read_insn_p (to->insn))
421 create_ddg_dep_no_link (g, from, to, TRUE_DEP, MEM_DEP, 1);
422 else if (from->cuid != to->cuid)
423 create_ddg_dep_no_link (g, from, to, OUTPUT_DEP, MEM_DEP, 1);
425 else
427 if (mem_read_insn_p (to->insn))
428 return;
429 else if (from->cuid != to->cuid)
431 create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 1);
432 create_ddg_dep_no_link (g, to, from, TRUE_DEP, MEM_DEP, 1);
437 /* Perform intra-block Data Dependency analysis and connect the nodes in
438 the DDG. We assume the loop has a single basic block. */
439 static void
440 build_intra_loop_deps (ddg_ptr g)
442 int i;
443 /* Hold the dependency analysis state during dependency calculations. */
444 class deps_desc tmp_deps;
445 rtx_insn *head, *tail;
447 /* Build the dependence information, using the sched_analyze function. */
448 init_deps_global ();
449 init_deps (&tmp_deps, false);
451 /* Do the intra-block data dependence analysis for the given block. */
452 get_ebb_head_tail (g->bb, g->bb, &head, &tail);
453 sched_analyze (&tmp_deps, head, tail);
455 /* Build intra-loop data dependencies using the scheduler dependency
456 analysis. */
457 for (i = 0; i < g->num_nodes; i++)
459 ddg_node_ptr dest_node = &g->nodes[i];
460 sd_iterator_def sd_it;
461 dep_t dep;
463 FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep)
465 rtx_insn *src_insn = DEP_PRO (dep);
466 ddg_node_ptr src_node = get_node_of_insn (g, src_insn);
468 if (!src_node)
469 continue;
471 create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep);
474 /* If this insn modifies memory, add an edge to all insns that access
475 memory. */
476 if (mem_access_insn_p (dest_node->insn))
478 int j;
480 for (j = 0; j <= i; j++)
482 ddg_node_ptr j_node = &g->nodes[j];
484 if (mem_access_insn_p (j_node->insn))
486 /* Don't bother calculating inter-loop dep if an intra-loop dep
487 already exists. */
488 if (! bitmap_bit_p (dest_node->successors, j))
489 add_inter_loop_mem_dep (g, dest_node, j_node);
490 /* If -fmodulo-sched-allow-regmoves
491 is set certain anti-dep edges are not created.
492 It might be that these anti-dep edges are on the
493 path from one memory instruction to another such that
494 removing these edges could cause a violation of the
495 memory dependencies. Thus we add intra edges between
496 every two memory instructions in this case. */
497 if (flag_modulo_sched_allow_regmoves
498 && !bitmap_bit_p (dest_node->predecessors, j))
499 add_intra_loop_mem_dep (g, j_node, dest_node);
505 /* Free the INSN_LISTs. */
506 finish_deps_global ();
507 free_deps (&tmp_deps);
509 /* Free dependencies. */
510 sched_free_deps (head, tail, false);
514 /* Given a basic block, create its DDG and return a pointer to a variable
515 of ddg type that represents it.
516 Initialize the ddg structure fields to the appropriate values. */
517 ddg_ptr
518 create_ddg (basic_block bb, int closing_branch_deps)
520 ddg_ptr g;
521 rtx_insn *insn, *first_note;
522 int i, j;
523 int num_nodes = 0;
525 g = (ddg_ptr) xcalloc (1, sizeof (struct ddg));
527 g->bb = bb;
528 g->closing_branch_deps = closing_branch_deps;
530 /* Count the number of insns in the BB. */
531 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
532 insn = NEXT_INSN (insn))
534 if (!INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
535 continue;
537 if (NONDEBUG_INSN_P (insn))
539 if (mem_read_insn_p (insn))
540 g->num_loads++;
541 if (mem_write_insn_p (insn))
542 g->num_stores++;
543 num_nodes++;
547 /* There is nothing to do for this BB. */
548 if (num_nodes <= 1)
550 free (g);
551 return NULL;
554 /* Allocate the nodes array, and initialize the nodes. */
555 g->num_nodes = num_nodes;
556 g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node));
557 g->closing_branch = NULL;
558 i = 0;
559 first_note = NULL;
560 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
561 insn = NEXT_INSN (insn))
563 if (LABEL_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
564 continue;
566 if (!first_note && (INSN_P (insn) || NOTE_P (insn)))
567 first_note = insn;
569 if (!INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
570 continue;
572 if (JUMP_P (insn))
574 gcc_assert (!g->closing_branch);
575 g->closing_branch = &g->nodes[i];
578 if (NONDEBUG_INSN_P (insn))
580 g->nodes[i].cuid = i;
581 g->nodes[i].successors = sbitmap_alloc (num_nodes);
582 bitmap_clear (g->nodes[i].successors);
583 g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
584 bitmap_clear (g->nodes[i].predecessors);
586 gcc_checking_assert (first_note);
587 g->nodes[i].first_note = first_note;
589 g->nodes[i].aux.count = -1;
590 g->nodes[i].max_dist = XCNEWVEC (int, num_nodes);
591 for (j = 0; j < num_nodes; j++)
592 g->nodes[i].max_dist[j] = -1;
594 g->nodes[i++].insn = insn;
596 first_note = NULL;
599 /* We must have found a branch in DDG. */
600 gcc_assert (g->closing_branch);
603 /* Build the data dependency graph. */
604 build_intra_loop_deps (g);
605 build_inter_loop_deps (g);
606 return g;
609 /* Free all the memory allocated for the DDG. */
610 void
611 free_ddg (ddg_ptr g)
613 int i;
615 if (!g)
616 return;
618 for (i = 0; i < g->num_nodes; i++)
620 ddg_edge_ptr e = g->nodes[i].out;
622 while (e)
624 ddg_edge_ptr next = e->next_out;
626 free (e);
627 e = next;
629 sbitmap_free (g->nodes[i].successors);
630 sbitmap_free (g->nodes[i].predecessors);
631 free (g->nodes[i].max_dist);
633 if (g->num_backarcs > 0)
634 free (g->backarcs);
635 free (g->nodes);
636 free (g);
639 void
640 print_ddg_edge (FILE *file, ddg_edge_ptr e)
642 char dep_c;
644 switch (e->type)
646 case OUTPUT_DEP :
647 dep_c = 'O';
648 break;
649 case ANTI_DEP :
650 dep_c = 'A';
651 break;
652 default:
653 dep_c = 'T';
656 fprintf (file, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e->src->insn),
657 dep_c, e->latency, e->distance, INSN_UID (e->dest->insn));
660 /* Print the DDG nodes with there in/out edges to the dump file. */
661 void
662 print_ddg (FILE *file, ddg_ptr g)
664 int i;
666 for (i = 0; i < g->num_nodes; i++)
668 ddg_edge_ptr e;
670 fprintf (file, "Node num: %d\n", g->nodes[i].cuid);
671 print_rtl_single (file, g->nodes[i].insn);
672 fprintf (file, "OUT ARCS: ");
673 for (e = g->nodes[i].out; e; e = e->next_out)
674 print_ddg_edge (file, e);
676 fprintf (file, "\nIN ARCS: ");
677 for (e = g->nodes[i].in; e; e = e->next_in)
678 print_ddg_edge (file, e);
680 fprintf (file, "\n");
684 /* Print the given DDG in VCG format. */
685 DEBUG_FUNCTION void
686 vcg_print_ddg (FILE *file, ddg_ptr g)
688 int src_cuid;
690 fprintf (file, "graph: {\n");
691 for (src_cuid = 0; src_cuid < g->num_nodes; src_cuid++)
693 ddg_edge_ptr e;
694 int src_uid = INSN_UID (g->nodes[src_cuid].insn);
696 fprintf (file, "node: {title: \"%d_%d\" info1: \"", src_cuid, src_uid);
697 print_rtl_single (file, g->nodes[src_cuid].insn);
698 fprintf (file, "\"}\n");
699 for (e = g->nodes[src_cuid].out; e; e = e->next_out)
701 int dst_uid = INSN_UID (e->dest->insn);
702 int dst_cuid = e->dest->cuid;
704 /* Give the backarcs a different color. */
705 if (e->distance > 0)
706 fprintf (file, "backedge: {color: red ");
707 else
708 fprintf (file, "edge: { ");
710 fprintf (file, "sourcename: \"%d_%d\" ", src_cuid, src_uid);
711 fprintf (file, "targetname: \"%d_%d\" ", dst_cuid, dst_uid);
712 fprintf (file, "label: \"%d_%d\"}\n", e->latency, e->distance);
715 fprintf (file, "}\n");
718 /* Dump the sccs in SCCS. */
719 void
720 print_sccs (FILE *file, ddg_all_sccs_ptr sccs, ddg_ptr g)
722 unsigned int u = 0;
723 sbitmap_iterator sbi;
724 int i;
726 if (!file)
727 return;
729 fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs);
730 for (i = 0; i < sccs->num_sccs; i++)
732 fprintf (file, "SCC number: %d\n", i);
733 EXECUTE_IF_SET_IN_BITMAP (sccs->sccs[i]->nodes, 0, u, sbi)
735 fprintf (file, "insn num %d\n", u);
736 print_rtl_single (file, g->nodes[u].insn);
739 fprintf (file, "\n");
742 /* Create an edge and initialize it with given values. */
743 static ddg_edge_ptr
744 create_ddg_edge (ddg_node_ptr src, ddg_node_ptr dest,
745 dep_type t, dep_data_type dt, int l, int d)
747 ddg_edge_ptr e = (ddg_edge_ptr) xmalloc (sizeof (struct ddg_edge));
749 e->src = src;
750 e->dest = dest;
751 e->type = t;
752 e->data_type = dt;
753 e->latency = l;
754 e->distance = d;
755 e->next_in = e->next_out = NULL;
756 e->in_scc = false;
757 return e;
760 /* Add the given edge to the in/out linked lists of the DDG nodes. */
761 static void
762 add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
764 ddg_node_ptr src = e->src;
765 ddg_node_ptr dest = e->dest;
767 /* Should have allocated the sbitmaps. */
768 gcc_assert (src->successors && dest->predecessors);
770 bitmap_set_bit (src->successors, dest->cuid);
771 bitmap_set_bit (dest->predecessors, src->cuid);
772 e->next_in = dest->in;
773 dest->in = e;
774 e->next_out = src->out;
775 src->out = e;
780 /* Algorithm for computing the recurrence_length of an scc. We assume at
781 for now that cycles in the data dependence graph contain a single backarc.
782 This simplifies the algorithm, and can be generalized later. */
783 static void
784 set_recurrence_length (ddg_scc_ptr scc)
786 int j;
787 int result = -1;
789 for (j = 0; j < scc->num_backarcs; j++)
791 ddg_edge_ptr backarc = scc->backarcs[j];
792 int distance = backarc->distance;
793 ddg_node_ptr src = backarc->dest;
794 ddg_node_ptr dest = backarc->src;
795 int length = src->max_dist[dest->cuid];
797 if (length < 0)
798 continue;
800 length += backarc->latency;
801 result = MAX (result, (length / distance));
803 scc->recurrence_length = result;
806 /* Create a new SCC given the set of its nodes. Compute its recurrence_length
807 and mark edges that belong to this scc. */
808 static ddg_scc_ptr
809 create_scc (ddg_ptr g, sbitmap nodes, int id)
811 ddg_scc_ptr scc;
812 unsigned int u = 0;
813 sbitmap_iterator sbi;
815 scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc));
816 scc->backarcs = NULL;
817 scc->num_backarcs = 0;
818 scc->nodes = sbitmap_alloc (g->num_nodes);
819 bitmap_copy (scc->nodes, nodes);
821 /* Mark the backarcs that belong to this SCC. */
822 EXECUTE_IF_SET_IN_BITMAP (nodes, 0, u, sbi)
824 ddg_edge_ptr e;
825 ddg_node_ptr n = &g->nodes[u];
827 gcc_assert (n->aux.count == -1);
828 n->aux.count = id;
830 for (e = n->out; e; e = e->next_out)
831 if (bitmap_bit_p (nodes, e->dest->cuid))
833 e->in_scc = true;
834 if (e->distance > 0)
835 add_backarc_to_scc (scc, e);
839 return scc;
842 /* Cleans the memory allocation of a given SCC. */
843 static void
844 free_scc (ddg_scc_ptr scc)
846 if (!scc)
847 return;
849 sbitmap_free (scc->nodes);
850 if (scc->num_backarcs > 0)
851 free (scc->backarcs);
852 free (scc);
856 /* Add a given edge known to be a backarc to the given DDG. */
857 static void
858 add_backarc_to_ddg (ddg_ptr g, ddg_edge_ptr e)
860 int size = (g->num_backarcs + 1) * sizeof (ddg_edge_ptr);
862 add_edge_to_ddg (g, e);
863 g->backarcs = (ddg_edge_ptr *) xrealloc (g->backarcs, size);
864 g->backarcs[g->num_backarcs++] = e;
867 /* Add backarc to an SCC. */
868 static void
869 add_backarc_to_scc (ddg_scc_ptr scc, ddg_edge_ptr e)
871 int size = (scc->num_backarcs + 1) * sizeof (ddg_edge_ptr);
873 scc->backarcs = (ddg_edge_ptr *) xrealloc (scc->backarcs, size);
874 scc->backarcs[scc->num_backarcs++] = e;
877 /* Add the given SCC to the DDG. */
878 static void
879 add_scc_to_ddg (ddg_all_sccs_ptr g, ddg_scc_ptr scc)
881 int size = (g->num_sccs + 1) * sizeof (ddg_scc_ptr);
883 g->sccs = (ddg_scc_ptr *) xrealloc (g->sccs, size);
884 g->sccs[g->num_sccs++] = scc;
887 /* Given the instruction INSN return the node that represents it. */
888 ddg_node_ptr
889 get_node_of_insn (ddg_ptr g, rtx_insn *insn)
891 int i;
893 for (i = 0; i < g->num_nodes; i++)
894 if (insn == g->nodes[i].insn)
895 return &g->nodes[i];
896 return NULL;
899 /* Given a set OPS of nodes in the DDG, find the set of their successors
900 which are not in OPS, and set their bits in SUCC. Bits corresponding to
901 OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
902 void
903 find_successors (sbitmap succ, ddg_ptr g, sbitmap ops)
905 unsigned int i = 0;
906 sbitmap_iterator sbi;
908 EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
910 const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
911 bitmap_ior (succ, succ, node_succ);
914 /* We want those that are not in ops. */
915 bitmap_and_compl (succ, succ, ops);
918 /* Given a set OPS of nodes in the DDG, find the set of their predecessors
919 which are not in OPS, and set their bits in PREDS. Bits corresponding to
920 OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
921 void
922 find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops)
924 unsigned int i = 0;
925 sbitmap_iterator sbi;
927 EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
929 const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
930 bitmap_ior (preds, preds, node_preds);
933 /* We want those that are not in ops. */
934 bitmap_and_compl (preds, preds, ops);
938 /* Compare function to be passed to qsort to order the backarcs in descending
939 recMII order. */
940 static int
941 compare_sccs (const void *s1, const void *s2)
943 const int rec_l1 = (*(const ddg_scc_ptr *)s1)->recurrence_length;
944 const int rec_l2 = (*(const ddg_scc_ptr *)s2)->recurrence_length;
945 return ((rec_l2 > rec_l1) - (rec_l2 < rec_l1));
949 /* Order the backarcs in descending recMII order using compare_sccs. */
950 static void
951 order_sccs (ddg_all_sccs_ptr g)
953 qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr),
954 (int (*) (const void *, const void *)) compare_sccs);
957 /* Check that every node in SCCS belongs to exactly one strongly connected
958 component and that no element of SCCS is empty. */
959 static void
960 check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
962 int i = 0;
963 auto_sbitmap tmp (num_nodes);
965 bitmap_clear (tmp);
966 for (i = 0; i < sccs->num_sccs; i++)
968 gcc_assert (!bitmap_empty_p (sccs->sccs[i]->nodes));
969 /* Verify that every node in sccs is in exactly one strongly
970 connected component. */
971 gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes));
972 bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes);
976 /* Perform the Strongly Connected Components decomposing algorithm on the
977 DDG and return DDG_ALL_SCCS structure that contains them. */
978 ddg_all_sccs_ptr
979 create_ddg_all_sccs (ddg_ptr g)
981 int i, j, k, scc, way;
982 int num_nodes = g->num_nodes;
983 auto_sbitmap from (num_nodes);
984 auto_sbitmap to (num_nodes);
985 auto_sbitmap scc_nodes (num_nodes);
986 ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
987 xmalloc (sizeof (struct ddg_all_sccs));
989 sccs->ddg = g;
990 sccs->sccs = NULL;
991 sccs->num_sccs = 0;
993 for (i = 0; i < g->num_backarcs; i++)
995 ddg_scc_ptr scc;
996 ddg_edge_ptr backarc = g->backarcs[i];
997 ddg_node_ptr src = backarc->src;
998 ddg_node_ptr dest = backarc->dest;
1000 /* If the backarc already belongs to an SCC, continue. */
1001 if (backarc->in_scc)
1002 continue;
1004 bitmap_clear (scc_nodes);
1005 bitmap_clear (from);
1006 bitmap_clear (to);
1007 bitmap_set_bit (from, dest->cuid);
1008 bitmap_set_bit (to, src->cuid);
1010 if (find_nodes_on_paths (scc_nodes, g, from, to))
1012 scc = create_scc (g, scc_nodes, sccs->num_sccs);
1013 add_scc_to_ddg (sccs, scc);
1017 /* Init max_dist arrays for Floyd–Warshall-like
1018 longest patch calculation algorithm. */
1019 for (k = 0; k < num_nodes; k++)
1021 ddg_edge_ptr e;
1022 ddg_node_ptr n = &g->nodes[k];
1024 if (n->aux.count == -1)
1025 continue;
1027 n->max_dist[k] = 0;
1028 for (e = n->out; e; e = e->next_out)
1029 if (e->distance == 0 && g->nodes[e->dest->cuid].aux.count == n->aux.count)
1030 n->max_dist[e->dest->cuid] = e->latency;
1033 /* Run main Floid-Warshall loop. We use only non-backarc edges
1034 inside each scc. */
1035 for (k = 0; k < num_nodes; k++)
1037 scc = g->nodes[k].aux.count;
1038 if (scc != -1)
1040 for (i = 0; i < num_nodes; i++)
1041 if (g->nodes[i].aux.count == scc)
1042 for (j = 0; j < num_nodes; j++)
1043 if (g->nodes[j].aux.count == scc
1044 && g->nodes[i].max_dist[k] >= 0
1045 && g->nodes[k].max_dist[j] >= 0)
1047 way = g->nodes[i].max_dist[k] + g->nodes[k].max_dist[j];
1048 if (g->nodes[i].max_dist[j] < way)
1049 g->nodes[i].max_dist[j] = way;
1054 /* Calculate recurrence_length using max_dist info. */
1055 for (i = 0; i < sccs->num_sccs; i++)
1056 set_recurrence_length (sccs->sccs[i]);
1058 order_sccs (sccs);
1060 if (flag_checking)
1061 check_sccs (sccs, num_nodes);
1063 return sccs;
1066 /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
1067 void
1068 free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
1070 int i;
1072 if (!all_sccs)
1073 return;
1075 for (i = 0; i < all_sccs->num_sccs; i++)
1076 free_scc (all_sccs->sccs[i]);
1078 free (all_sccs->sccs);
1079 free (all_sccs);
1083 /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
1084 nodes - find all nodes that lie on paths from FROM to TO (not excluding
1085 nodes from FROM and TO). Return nonzero if nodes exist. */
1087 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
1089 int change;
1090 unsigned int u = 0;
1091 int num_nodes = g->num_nodes;
1092 sbitmap_iterator sbi;
1094 auto_sbitmap workset (num_nodes);
1095 auto_sbitmap reachable_from (num_nodes);
1096 auto_sbitmap reach_to (num_nodes);
1097 auto_sbitmap tmp (num_nodes);
1099 bitmap_copy (reachable_from, from);
1100 bitmap_copy (tmp, from);
1102 change = 1;
1103 while (change)
1105 change = 0;
1106 bitmap_copy (workset, tmp);
1107 bitmap_clear (tmp);
1108 EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
1110 ddg_edge_ptr e;
1111 ddg_node_ptr u_node = &g->nodes[u];
1113 for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out)
1115 ddg_node_ptr v_node = e->dest;
1116 int v = v_node->cuid;
1118 if (!bitmap_bit_p (reachable_from, v))
1120 bitmap_set_bit (reachable_from, v);
1121 bitmap_set_bit (tmp, v);
1122 change = 1;
1128 bitmap_copy (reach_to, to);
1129 bitmap_copy (tmp, to);
1131 change = 1;
1132 while (change)
1134 change = 0;
1135 bitmap_copy (workset, tmp);
1136 bitmap_clear (tmp);
1137 EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
1139 ddg_edge_ptr e;
1140 ddg_node_ptr u_node = &g->nodes[u];
1142 for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in)
1144 ddg_node_ptr v_node = e->src;
1145 int v = v_node->cuid;
1147 if (!bitmap_bit_p (reach_to, v))
1149 bitmap_set_bit (reach_to, v);
1150 bitmap_set_bit (tmp, v);
1151 change = 1;
1157 return bitmap_and (result, reachable_from, reach_to);
1160 #endif /* INSN_SCHEDULING */