From 43a0881d501ee95a1496d45dce4cf726dbeb7028 Mon Sep 17 00:00:00 2001 From: mmitchel Date: Fri, 11 May 2001 15:49:30 +0000 Subject: [PATCH] * loop.h (struct movables): Remove `num'. * loop.c (scan_loop): Don't set it. Use count_insns_in_loop. Adjust call to strength_reduce. (num_unmoved_movables): New function. (move_movables): Don't set movables->num. (strength_reduce): Compute the number of instructions in the loop here, rather than in the caller. (check_dbra_loop): Always clear reversible_mem_store if there are any memory sets. (loop_regs_scan): Don't count instructions here. (count_insn_in_loop): New function. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-3_0-branch@41965 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog | 12 +++++++++ gcc/loop.c | 82 +++++++++++++++++++++++++++++++++++++---------------------- gcc/loop.h | 2 -- 3 files changed, 64 insertions(+), 32 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 00ab333979c..bd2580ac1c7 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,17 @@ 2001-05-11 Mark Mitchell + * loop.h (struct movables): Remove `num'. + * loop.c (scan_loop): Don't set it. Use count_insns_in_loop. + Adjust call to strength_reduce. + (num_unmoved_movables): New function. + (move_movables): Don't set movables->num. + (strength_reduce): Compute the number of instructions in the loop + here, rather than in the caller. + (check_dbra_loop): Always clear reversible_mem_store if there + are any memory sets. + (loop_regs_scan): Don't count instructions here. + (count_insn_in_loop): New function. + * optabs.c (emit_libcall_block): Don't mark calls as CONST_CALL_P. 2001-05-10 David Edelsohn diff --git a/gcc/loop.c b/gcc/loop.c index 1aca51adccd..d9f404f8c5c 100644 --- a/gcc/loop.c +++ b/gcc/loop.c @@ -166,6 +166,7 @@ static void ignore_some_movables PARAMS ((struct loop_movables *)); static void force_movables PARAMS ((struct loop_movables *)); static void combine_movables PARAMS ((struct loop_movables *, struct loop_regs *)); +static int num_unmoved_movables PARAMS ((const struct loop *)); static int regs_match_p PARAMS ((rtx, rtx, struct loop_movables *)); static int rtx_equal_for_loop_p PARAMS ((rtx, rtx, struct loop_movables *, struct loop_regs *)); @@ -190,7 +191,7 @@ static void loop_givs_reduce PARAMS((struct loop *, struct iv_class *)); static void loop_givs_rescan PARAMS((struct loop *, struct iv_class *, rtx *)); static void loop_ivs_free PARAMS((struct loop *)); -static void strength_reduce PARAMS ((struct loop *, int, int)); +static void strength_reduce PARAMS ((struct loop *, int)); static void find_single_use_in_loop PARAMS ((struct loop_regs *, rtx, rtx)); static int valid_initial_value_p PARAMS ((rtx, rtx, int, rtx)); static void find_mem_givs PARAMS ((const struct loop *, rtx, rtx, int, int)); @@ -233,7 +234,8 @@ static int last_use_this_basic_block PARAMS ((rtx, rtx)); static void record_initial PARAMS ((rtx, rtx, void *)); static void update_reg_last_use PARAMS ((rtx, rtx)); static rtx next_insn_in_loop PARAMS ((const struct loop *, rtx)); -static void loop_regs_scan PARAMS ((const struct loop*, int, int *)); +static void loop_regs_scan PARAMS ((const struct loop *, int)); +static int count_insns_in_loop PARAMS ((const struct loop *)); static void load_mems PARAMS ((const struct loop *)); static int insert_loop_mem PARAMS ((rtx *, void *)); static int replace_loop_mem PARAMS ((rtx *, void *)); @@ -551,7 +553,6 @@ scan_loop (loop, flags) movables->head = 0; movables->last = 0; - movables->num = 0; /* Determine whether this loop starts with a jump down to a test at the end. This will occur for a small number of loops with a test @@ -638,7 +639,8 @@ scan_loop (loop, flags) /* Allocate extra space for REGs that might be created by load_mems. We allocate a little extra slop as well, in the hopes that we won't have to reallocate the regs array. */ - loop_regs_scan (loop, loop_info->mems_idx + 16, &insn_count); + loop_regs_scan (loop, loop_info->mems_idx + 16); + insn_count = count_insns_in_loop (loop); if (loop_dump_stream) { @@ -1007,7 +1009,7 @@ scan_loop (loop, flags) /* Recalculate regs->array if load_mems has created new registers. */ if (max_reg_num () > regs->num) - loop_regs_scan (loop, 0, &insn_count); + loop_regs_scan (loop, 0); for (update_start = loop_start; PREV_INSN (update_start) @@ -1025,7 +1027,7 @@ scan_loop (loop, flags) /* Ensure our label doesn't go away. */ LABEL_NUSES (update_end)++; - strength_reduce (loop, insn_count, flags); + strength_reduce (loop, flags); reg_scan_update (update_start, update_end, loop_max_reg); loop_max_reg = max_reg_num (); @@ -1422,6 +1424,24 @@ combine_movables (movables, regs) /* Clean up. */ free (matched_regs); } + +/* Returns the number of movable instructions in LOOP that were not + moved outside the loop. */ + +static int +num_unmoved_movables (loop) + const struct loop *loop; +{ + int num = 0; + struct movable *m; + + for (m = LOOP_MOVABLES (loop)->head; m; m = m->next) + if (!m->done) + ++num; + + return num; +} + /* Return 1 if regs X and Y will become the same if moved. */ @@ -1635,8 +1655,6 @@ move_movables (loop, movables, threshold, insn_count) rtx *reg_map = (rtx *) xcalloc (nregs, sizeof (rtx)); char *already_moved = (char *) xcalloc (nregs, sizeof (char)); - movables->num = 0; - for (m = movables->head; m; m = m->next) { /* Describe this movable insn. */ @@ -1665,9 +1683,6 @@ move_movables (loop, movables, threshold, insn_count) INSN_UID (m->forces->insn)); } - /* Count movables. Value used in heuristics in strength_reduce. */ - movables->num++; - /* Ignore the insn if it's already done (it matched something else). Otherwise, see if it is now safe to move. */ @@ -4201,9 +4216,8 @@ loop_ivs_free (loop) must check regnos to make sure they are in bounds. */ static void -strength_reduce (loop, insn_count, flags) +strength_reduce (loop, flags) struct loop *loop; - int insn_count; int flags; { struct loop_info *loop_info = LOOP_INFO (loop); @@ -4223,6 +4237,7 @@ strength_reduce (loop, insn_count, flags) int reg_map_size; int unrolled_insn_copies = 0; rtx test_reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1); + int insn_count = count_insns_in_loop (loop); addr_placeholder = gen_reg_rtx (Pmode); @@ -7326,12 +7341,11 @@ check_dbra_loop (loop, insn_count) { struct induction *v; - reversible_mem_store - = (! loop_info->unknown_address_altered - && ! loop_info->unknown_constant_address_altered - && ! loop_invariant_p (loop, - XEXP (XEXP (loop_info->store_mems, 0), - 0))); + /* If we could prove that each of the memory locations + written to was different, then we could reverse the + store -- but we don't presently have any way of + knowing that. */ + reversible_mem_store = 0; /* If the store depends on a register that is set after the store, it depends on the initial value, and is thus not @@ -7363,7 +7377,7 @@ check_dbra_loop (loop, insn_count) && ! loop_info->has_volatile && reversible_mem_store && (bl->giv_count + bl->biv_count + loop_info->num_mem_sets - + LOOP_MOVABLES (loop)->num + compare_and_branch == insn_count) + + num_unmoved_movables (loop) + compare_and_branch == insn_count) && (bl == ivs->list && bl->next == 0)) || no_use_except_counting) { @@ -8713,16 +8727,12 @@ insert_loop_mem (mem, data) parameter may be zero, in which case this processing is not done. Set REGS->ARRAY[I].MAY_NOT_OPTIMIZE nonzero if we should not - optimize register I. - - Store in *COUNT_PTR the number of actual instructions - in the loop. We use this to decide what is worth moving out. */ + optimize register I. */ static void -loop_regs_scan (loop, extra_size, count_ptr) +loop_regs_scan (loop, extra_size) const struct loop *loop; int extra_size; - int *count_ptr; { struct loop_regs *regs = LOOP_REGS (loop); int old_nregs; @@ -8730,7 +8740,6 @@ loop_regs_scan (loop, extra_size, count_ptr) basic block. In that case, it is the insn that last set reg n. */ rtx *last_set; rtx insn; - int count = 0; int i; old_nregs = regs->num; @@ -8765,8 +8774,6 @@ loop_regs_scan (loop, extra_size, count_ptr) { if (INSN_P (insn)) { - ++count; - /* Record registers that have exactly one use. */ find_single_use_in_loop (regs, insn, PATTERN (insn)); @@ -8809,9 +8816,24 @@ loop_regs_scan (loop, extra_size, count_ptr) regs->array[i].n_times_set = regs->array[i].set_in_loop; free (last_set); - *count_ptr = count; } +/* Returns the number of real INSNs in the LOOP. */ + +static int +count_insns_in_loop (loop) + const struct loop *loop; +{ + int count = 0; + rtx insn; + + for (insn = loop->top ? loop->top : loop->start; insn != loop->end; + insn = NEXT_INSN (insn)) + if (INSN_P (insn)) + ++count; + + return count; +} /* Move MEMs into registers for the duration of the loop. */ diff --git a/gcc/loop.h b/gcc/loop.h index cfec9934b20..1794730307d 100644 --- a/gcc/loop.h +++ b/gcc/loop.h @@ -289,8 +289,6 @@ struct loop_movables struct movable *head; /* Last movable in chain. */ struct movable *last; - /* Number of movables in the loop. */ - int num; }; -- 2.11.4.GIT