2004-09-24 Ben Elliston <bje@au.ibm.com> Steven Bosscher <stevenb@suse.de> Andrew Pinski <pinskia@physics.uc.edu> Merge from edge-vector-branch: * basic-block.h: Include vec.h, errors.h. Instantiate a VEC(edge). (struct edge_def): Remove pred_next, succ_next members. (struct basic_block_def): Remove pred, succ members. Add preds and succs members of type VEC(edge). (FALLTHRU_EDGE): Redefine using EDGE_SUCC. (BRANCH_EDGE): Likewise. (EDGE_CRITICAL_P): Redefine using EDGE_COUNT. (EDGE_COUNT, EDGE_I, EDGE_PRED, EDGE_SUCC): New. (edge_iterator): New. (ei_start, ei_last, ei_end_p, ei_one_before_end_p): New. (ei_next, ei_prev, ei_edge, ei_safe_edge): Likewise. (FOR_EACH_EDGE): New. * bb-reorder.c (find_traces): Use FOR_EACH_EDGE and EDGE_* macros where applicable. (rotate_loop): Likewise. (find_traces_1_route): Likewise. (bb_to_key): Likewise. (connect_traces): Likewise. (copy_bb_p): Likewise. (find_rarely_executed_basic_blocks_and_crossing_edges): Likewise. (add_labels_and_missing_jumps): Likewise. (fix_up_fall_thru_edges): Likewise. (find_jump_block): Likewise. (fix_crossing_conditional_branches): Likewise. (fix_crossing_unconditional_branches): Likewise. (add_reg_crossing_jump_notes): Likewise. * bt-load.c (augment_live_range): Likewise. * cfg.c (clear_edges): Likewise. (unchecked_make_edge): Likewise. (cached_make_edge): Likewise. (make_single_succ_edge): Likewise. (remove_edge): Likewise. (redirect_edge_succ_nodup): Likewise. (check_bb_profile): Likewise. (dump_flow_info): Likewise. (alloc_aux_for_edges): Likewise. (clear_aux_for_edges): Likewise. (dump_cfg_bb_info): Likewise. * cfganal.c (forwarder_block_p): Likewise. (can_fallthru): Likewise. (could_fall_through): Likewise. (mark_dfs_back_edges): Likewise. (set_edge_can_fallthru_flag): Likewise. (find_unreachable_blocks): Likewise. (create_edge_list): Likewise. (verify_edge_list): Likewise. (add_noreturn_fake_exit_edges): Likewise. (connect_infinite_loops_to_exit): Likewise. (flow_reverse_top_sort_order_compute): Likewise. (flow_depth_first_order_compute): Likewise. (flow_preorder_transversal_compute): Likewise. (flow_dfs_compute_reverse_execute): Likewise. (dfs_enumerate_from): Likewise. (compute_dominance_frontiers_1): Likewise. * cfgbuild.c (make_edges): Likewise. (compute_outgoing_frequencies): Likewise. (find_many_sub_basic_blocks): Likewise. (find_sub_basic_blocks): Likewise. * cfgcleanup.c (try_simplify_condjump): Likewise. (thread_jump): Likewise. (try_forward_edges): Likewise. (merge_blocks_move): Likewise. (outgoing_edges_match): Likewise. (try_crossjump_to_edge): Likewise. (try_crossjump_bb): Likewise. (try_optimize_cfg): Likewise. (merge_seq_blocks): Likewise. * cfgexpand.c (expand_gimple_tailcall): Likewise. (expand_gimple_basic_block): Likewise. (construct_init_block): Likewise. (construct_exit_block): Likewise. * cfghooks.c (verify_flow_info): Likewise. (dump_bb): Likewise. (delete_basic_block): Likewise. (split_edge): Likewise. (merge_blocks): Likewise. (make_forwarder_block): Likewise. (tidy_fallthru_edges): Likewise. (can_duplicate_block_p): Likewise. (duplicate_block): Likewise. * cfglayout.c (fixup_reorder_chain): Likewise. (fixup_fallthru_exit_predecessor): Likewise. (can_copy_bbs_p): Likewise. (copy_bbs): Likewise. * cfgloop.c (flow_loops_cfg_dump): Likewise. (flow_loop_entry_edges_find): Likewise. (flow_loop_exit_edges_find): Likewise. (flow_loop_nodes_find): Likewise. (mark_single_exit_loops): Likewise. (flow_loop_pre_header_scan): Likewise. (flow_loop_pre_header_find): Likewise. (update_latch_info): Likewise. (canonicalize_loop_headers): Likewise. (flow_loops_find): Likewise. (get_loop_body_in_bfs_order): Likewise. (get_loop_exit_edges): Likewise. (num_loop_branches): Likewise. (verify_loop_structure): Likewise. (loop_latch_edge): Likewise. (loop_preheader_edge): Likewise. * cfgloopanal.c (mark_irreducible_loops): Likewise. (expected_loop_iterations): Likewise. * cfgloopmanip.c (remove_bbs): Likewise. (fix_bb_placement): Likewise. (fix_irreducible_loops): Likewise. (remove_path): Likewise. (scale_bbs_frequencies): Likewise. (loopify): Likewise. (unloop): Likewise. (fix_loop_placement): Likewise. (loop_delete_branch_edge): Likewise. (duplicate_loop_to_header_edge): Likewise. (mfb_keep_just): Likewise. (create_preheader): Likewise. (force_single_succ_latches): Likewise. (loop_split_edge_with): Likewise. (create_loop_notes): Likewise. * cfgrtl.c (rtl_split_block): Likewise. (rtl_merge_blocks): Likewise. (rtl_can_merge_blocks): Likewise. (try_redirect_by_replacing_jump): Likewise. (force_nonfallthru_and_redirect): Likewise. (rtl_tidy_fallthru_edge): Likewise. (commit_one_edge_insertion): Likewise. (commit_edge_insertions): Likewise. (commit_edge_insertions_watch_calls): Likewise. (rtl_verify_flow_info_1): Likewise. (rtl_verify_flow_info): Likewise. (purge_dead_edges): Likewise. (cfg_layout_redirect_edge_and_branch): Likewise. (cfg_layout_can_merge_blocks_p): Likewise. (rtl_flow_call_edges_add): Likewise. * cse.c (cse_cc_succs): Likewise. * df.c (hybrid_search): Likewise. * dominance.c (calc_dfs_tree_nonrec): Likewise. (calc_dfs_tree): Likewise. (calc_idoms): Likewise. (recount_dominator): Likewise. * domwalk.c (walk_dominator_tree): Likewise. * except.c (emit_to_new_bb_before): Likewise. (connect_post_landing_pads): Likewise. (sjlj_emit_function_enter): Likewise. (sjlj_emit_function_exit): Likewise. (finish_eh_generation): Likewise. * final.c (compute_alignments): Likewise. * flow.c (calculate_global_regs_live): Likewise. (initialize_uninitialized_subregs): Likewise. (init_propagate_block_info): Likewise. * function.c (thread_prologue_and_epilogue_insns): Likewise. * gcse.c (find_implicit_sets): Likewise. (bypass_block): Likewise. (bypass_conditional_jumps): Likewise. (compute_pre_data): Likewise. (insert_insn_end_bb): Likewise. (insert_store): Likewise. (remove_reachable_equiv_notes): Likewise. * global.c (global_conflicts): Likewise. (calculate_reg_pav): Likewise. * graph.c (print_rtl_graph_with_bb): Likewise. * ifcvt.c (mark_loop_exit_edges): Likewise. (merge_if_block): Likewise. (find_if_header): Likewise. (block_jumps_and_fallthru_p): Likewise. (find_if_block): Likewise. (find_cond_trap): Likewise. (block_has_only_trap): Likewise. (find_if_case1): Likewise. (find_if_case_2): Likewise. * lambda-code.c (lambda_loopnest_to_gcc_loopnest): Likewise. (perfect_nestify): Likewise. * lcm.c (compute_antinout_edge): Likewise. (compute_laterin): Likewise. (compute_available): Likewise. (compute_nearerout): Likewise. * loop-doloop.c (doloop_modify): Likewise. * loop-init.c (loop_optimizer_init): Likewise. * loop-invariant.c (find_exits): Likewise. * loop-iv.c (simplify_using_initial_values): Likewise. (check_simple_exit): Likewise. (find_simple_exit): Likewise. * loop-unroll.c (peel_loop_completely): Likewise. (unroll_loop_constant_iterations): Likewise. (unroll_loop_runtime_iterations): Likewise. * loop-unswitch.c (may_unswitch_on): Likewise. (unswitch_loop): Likewise. * modulo-sched.c (generate_prolog_epilog): Likewise. (sms_schedule): Likewise. * postreload-gcse.c (eliminate_partially_redundant_load): Likewise. * predict.c (can_predict_insn_p): Likewise. (set_even_probabilities): Likewise. (combine_predictions_for_bb): Likewise. (predict_loops): Likewise. (estimate_probability): Likewise. (tree_predict_by_opcode): Likewise. (tree_estimate_probability): Likewise. (last_basic_block_p): Likewise. (propagate_freq): Likewise. (estimate_loops_at_level): Likewise. (estimate_bb_frequencies): Likewise. * profile.c (instrument_edges): Likewise. (get_exec_counts): Likewise. (compute_branch_probabilities): Likewise. (branch_prob): Likewise. * ra-build.c (live_in): Likewise. * ra-rewrite.c (rewrite_program2): Likewise. * ra.c (reg_alloc): Likewise. * reg-stack.c (reg_to_stack): Likewise. (convert_regs_entry): Likewise. (compensate_edge): Likewise. (convert_regs_1): Likewise, (convert_regs_2): Likewise. (convert_regs): Likewise. * regrename.c (copyprop_hardreg_forward): Likewise. * reload1.c (fixup_abnormal_edges): Likewise. * sbitmap.c (sbitmap_intersection_of_succs): Likewise. (sbitmap_insersection_of_preds): Likewise. (sbitmap_union_of_succs): Likewise. (sbitmap_union_of_preds): Likewise. * sched-ebb.c (compute_jump_reg_dependencies): Likewise. (fix_basic_block_boundaries): Likewise. (sched_ebbs): Likewise. * sched-rgn.c (build_control_flow): Likewise. (find_rgns): Likewise. * tracer.c (find_best_successor): Likewise. (find_best_predecessor): Likewise. (tail_duplicate): Likewise. * tree-cfg.c (make_edges): Likewise. (make_ctrl_stmt_edges): Likewise. (make_goto_expr_edges): Likewise. (tree_can_merge_blocks_p): Likewise. (tree_merge_blocks): Likewise. (cfg_remove_useless_stmts_bb): Likewise. (remove_phi_nodes_and_edges_for_unreachable_block): Likewise. (tree_block_forwards_to): Likewise. (cleanup_control_expr_graph): Likewise. (find_taken_edge): Likewise. (dump_cfg_stats): Likewise. (tree_cfg2vcg): Likewise. (disband_implicit_edges): Likewise. (tree_find_edge_insert_loc): Likewise. (bsi_commit_edge_inserts): Likewise. (tree_split_edge): Likewise. (tree_verify_flow_info): Likewise. (tree_make_forwarder_block): Likewise. (tree_forwarder_block_p): Likewise. (thread_jumps): Likewise. (tree_try_redirect_by_replacing_jump): Likewise. (tree_split_block): Likewise. (add_phi_args_after_copy_bb): Likewise. (rewrite_to_new_ssa_names_bb): Likewise. (dump_function_to_file): Likewise. (print_pred_bbs): Likewise. (print_loop): Likewise. (tree_flow_call_edges_add): Likewise. (split_critical_edges): Likewise. (execute_warn_function_return): Likewise. (extract_true_false_edges_from_block): Likewise. * tree-if-conv.c (tree_if_conversion): Likewise. (if_convertable_bb_p): Likewise. (find_phi_replacement_condition): Likewise. (combine_blocks): Likewise. * tree-into-ssa.c (compute_global_livein): Likewise. (ssa_mark_phi_uses): Likewise. (ssa_rewrite_initialize_block): Likewise. (rewrite_add_phi_arguments): Likewise. (ssa_rewrite_phi_arguments): Likewise. (insert_phi_nodes_for): Likewise. (rewrite_into_ssa): Likewise. (rewrite_ssa_into_ssa): Likewise. * tree-mudflap.c (mf_build_check_statement_for): Likewise. * tree-outof-ssa.c (coalesce_abnormal_edges): Likewise. (rewrite_trees): Likewise. * tree-pretty-print.c (dump_bb_header): Likewise. (dump_implicit_edges): Likewise. * tree-sra.c (insert_edge_copies): Likewise. (find_obviously_necessary_stmts): Likewise. (remove_data_stmt): Likewise. * tree-ssa-dom.c (thread_across_edge): Likewise. (dom_opt_finalize_block): Likewise. (single_incoming_edge_ignoring_loop_edges): Likewise. (record_equivalences_from_incoming_edges): Likewise. (cprop_into_successor_phis): Likewise. * tree-ssa-live.c (live_worklist): Likewise. (calculate_live_on_entry): Likewise. (calculate_live_on_exit): Likewise. * tree-ssa-loop-ch.c (should_duplicate_loop_header_p): Likewise. (copy_loop_headers): Likewise. * tree-ssa-loop-im.c (loop_commit_inserts): Likewise. (fill_always_executed_in): Likewise. * tree-ssa-loop-ivcanon.c (create_canonical_iv): Likewise. * tree-ssa-loop-ivopts.c (find_interesting_uses): Likewise. (compute_phi_arg_on_exit): Likewise. * tree-ssa-loop-manip.c (add_exit_phis_edge): Likewise. (get_loops_exit): Likewise. (split_loop_exit_edge): Likewise. (ip_normal_pos): Likewise. * tree-ssa-loop-niter.c (simplify_using_initial_conditions): Likewise. * tree-ssa-phiopt.c (candidate_bb_for_phi_optimization): Likewise. (replace_phi_with_stmt): Likewise. (value_replacement): Likewise. * tree-ssa-pre.c (compute_antic_aux): Likewise. (insert_aux): Likewise. (init_pre): Likewise. * tree-ssa-propagate.c (simulate_stmt): Likewise. (simulate_block): Likewise. (ssa_prop_init): Likewise. * tree-ssa-threadupdate.c (thread_block): Likewise. (create_block_for_threading): Likewise. (remove_last_stmt_and_useless_edges): Likewise. * tree-ssa.c (verify_phi_args): Likewise. (verify_ssa): Likewise. * tree_tailcall.c (independent_of_stmt_p): Likewise. (find_tail_calls): Likewise. (eliminate_tail_call): Likewise. (tree_optimize_tail_calls_1): Likewise. * tree-vectorizer.c (vect_transform_loop): Likewise. * var-tracking.c (prologue_stack_adjust): Likewise. (vt_stack_adjustments): Likewise. (vt_find_locations): Likewise. * config/frv/frv.c (frv_ifcvt_modify_tests): Likewise. * config/i386/i386.c (ix86_pad_returns): Likewise. * config/ia64/ia64.c (ia64_expand_prologue): Likewise. * config/rs6000/rs6000.c (rs6000_emit_prologue): Likewise. Co-Authored-By: Andrew Pinski <pinskia@physics.uc.edu> Co-Authored-By: Steven Bosscher <stevenb@suse.de> From-SVN: r88222
1340 lines
34 KiB
C
1340 lines
34 KiB
C
/* Basic block reordering routines for the GNU compiler.
|
||
Copyright (C) 2000, 2001, 2003, 2004 Free Software Foundation, Inc.
|
||
|
||
This file is part of GCC.
|
||
|
||
GCC is free software; you can redistribute it and/or modify it under
|
||
the terms of the GNU General Public License as published by the Free
|
||
Software Foundation; either version 2, or (at your option) any later
|
||
version.
|
||
|
||
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
||
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||
for more details.
|
||
|
||
You should have received a copy of the GNU General Public License
|
||
along with GCC; see the file COPYING. If not, write to the Free
|
||
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||
02111-1307, USA. */
|
||
|
||
#include "config.h"
|
||
#include "system.h"
|
||
#include "coretypes.h"
|
||
#include "tm.h"
|
||
#include "tree.h"
|
||
#include "rtl.h"
|
||
#include "hard-reg-set.h"
|
||
#include "basic-block.h"
|
||
#include "insn-config.h"
|
||
#include "output.h"
|
||
#include "function.h"
|
||
#include "obstack.h"
|
||
#include "cfglayout.h"
|
||
#include "cfgloop.h"
|
||
#include "target.h"
|
||
#include "ggc.h"
|
||
#include "alloc-pool.h"
|
||
#include "flags.h"
|
||
|
||
/* The contents of the current function definition are allocated
|
||
in this obstack, and all are freed at the end of the function. */
|
||
extern struct obstack flow_obstack;
|
||
|
||
/* Holds the interesting trailing notes for the function. */
|
||
rtx cfg_layout_function_footer, cfg_layout_function_header;
|
||
|
||
static rtx skip_insns_after_block (basic_block);
|
||
static void record_effective_endpoints (void);
|
||
static rtx label_for_bb (basic_block);
|
||
static void fixup_reorder_chain (void);
|
||
|
||
static void set_block_levels (tree, int);
|
||
static void change_scope (rtx, tree, tree);
|
||
|
||
void verify_insn_chain (void);
|
||
static void fixup_fallthru_exit_predecessor (void);
|
||
static tree insn_scope (rtx);
|
||
static void update_unlikely_executed_notes (basic_block);
|
||
|
||
rtx
|
||
unlink_insn_chain (rtx first, rtx last)
|
||
{
|
||
rtx prevfirst = PREV_INSN (first);
|
||
rtx nextlast = NEXT_INSN (last);
|
||
|
||
PREV_INSN (first) = NULL;
|
||
NEXT_INSN (last) = NULL;
|
||
if (prevfirst)
|
||
NEXT_INSN (prevfirst) = nextlast;
|
||
if (nextlast)
|
||
PREV_INSN (nextlast) = prevfirst;
|
||
else
|
||
set_last_insn (prevfirst);
|
||
if (!prevfirst)
|
||
set_first_insn (nextlast);
|
||
return first;
|
||
}
|
||
|
||
/* Skip over inter-block insns occurring after BB which are typically
|
||
associated with BB (e.g., barriers). If there are any such insns,
|
||
we return the last one. Otherwise, we return the end of BB. */
|
||
|
||
static rtx
|
||
skip_insns_after_block (basic_block bb)
|
||
{
|
||
rtx insn, last_insn, next_head, prev;
|
||
|
||
next_head = NULL_RTX;
|
||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||
next_head = BB_HEAD (bb->next_bb);
|
||
|
||
for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
|
||
{
|
||
if (insn == next_head)
|
||
break;
|
||
|
||
switch (GET_CODE (insn))
|
||
{
|
||
case BARRIER:
|
||
last_insn = insn;
|
||
continue;
|
||
|
||
case NOTE:
|
||
switch (NOTE_LINE_NUMBER (insn))
|
||
{
|
||
case NOTE_INSN_LOOP_END:
|
||
case NOTE_INSN_BLOCK_END:
|
||
last_insn = insn;
|
||
continue;
|
||
case NOTE_INSN_DELETED:
|
||
case NOTE_INSN_DELETED_LABEL:
|
||
continue;
|
||
|
||
default:
|
||
continue;
|
||
break;
|
||
}
|
||
break;
|
||
|
||
case CODE_LABEL:
|
||
if (NEXT_INSN (insn)
|
||
&& JUMP_P (NEXT_INSN (insn))
|
||
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|
||
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
|
||
{
|
||
insn = NEXT_INSN (insn);
|
||
last_insn = insn;
|
||
continue;
|
||
}
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
}
|
||
|
||
break;
|
||
}
|
||
|
||
/* It is possible to hit contradictory sequence. For instance:
|
||
|
||
jump_insn
|
||
NOTE_INSN_LOOP_BEG
|
||
barrier
|
||
|
||
Where barrier belongs to jump_insn, but the note does not. This can be
|
||
created by removing the basic block originally following
|
||
NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
|
||
|
||
for (insn = last_insn; insn != BB_END (bb); insn = prev)
|
||
{
|
||
prev = PREV_INSN (insn);
|
||
if (NOTE_P (insn))
|
||
switch (NOTE_LINE_NUMBER (insn))
|
||
{
|
||
case NOTE_INSN_LOOP_END:
|
||
case NOTE_INSN_BLOCK_END:
|
||
case NOTE_INSN_DELETED:
|
||
case NOTE_INSN_DELETED_LABEL:
|
||
continue;
|
||
default:
|
||
reorder_insns (insn, insn, last_insn);
|
||
}
|
||
}
|
||
|
||
return last_insn;
|
||
}
|
||
|
||
/* Locate or create a label for a given basic block. */
|
||
|
||
static rtx
|
||
label_for_bb (basic_block bb)
|
||
{
|
||
rtx label = BB_HEAD (bb);
|
||
|
||
if (!LABEL_P (label))
|
||
{
|
||
if (dump_file)
|
||
fprintf (dump_file, "Emitting label for block %d\n", bb->index);
|
||
|
||
label = block_label (bb);
|
||
}
|
||
|
||
return label;
|
||
}
|
||
|
||
/* Locate the effective beginning and end of the insn chain for each
|
||
block, as defined by skip_insns_after_block above. */
|
||
|
||
static void
|
||
record_effective_endpoints (void)
|
||
{
|
||
rtx next_insn;
|
||
basic_block bb;
|
||
rtx insn;
|
||
|
||
for (insn = get_insns ();
|
||
insn
|
||
&& NOTE_P (insn)
|
||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
|
||
insn = NEXT_INSN (insn))
|
||
continue;
|
||
/* No basic blocks at all? */
|
||
gcc_assert (insn);
|
||
|
||
if (PREV_INSN (insn))
|
||
cfg_layout_function_header =
|
||
unlink_insn_chain (get_insns (), PREV_INSN (insn));
|
||
else
|
||
cfg_layout_function_header = NULL_RTX;
|
||
|
||
next_insn = get_insns ();
|
||
FOR_EACH_BB (bb)
|
||
{
|
||
rtx end;
|
||
|
||
if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
|
||
bb->rbi->header = unlink_insn_chain (next_insn,
|
||
PREV_INSN (BB_HEAD (bb)));
|
||
end = skip_insns_after_block (bb);
|
||
if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
|
||
bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
|
||
next_insn = NEXT_INSN (BB_END (bb));
|
||
}
|
||
|
||
cfg_layout_function_footer = next_insn;
|
||
if (cfg_layout_function_footer)
|
||
cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
|
||
}
|
||
|
||
/* Data structures representing mapping of INSN_LOCATOR into scope blocks, line
|
||
numbers and files. In order to be GGC friendly we need to use separate
|
||
varrays. This also slightly improve the memory locality in binary search.
|
||
The _locs array contains locators where the given property change. The
|
||
block_locators_blocks contains the scope block that is used for all insn
|
||
locator greater than corresponding block_locators_locs value and smaller
|
||
than the following one. Similarly for the other properties. */
|
||
static GTY(()) varray_type block_locators_locs;
|
||
static GTY(()) varray_type block_locators_blocks;
|
||
static GTY(()) varray_type line_locators_locs;
|
||
static GTY(()) varray_type line_locators_lines;
|
||
static GTY(()) varray_type file_locators_locs;
|
||
static GTY(()) varray_type file_locators_files;
|
||
int prologue_locator;
|
||
int epilogue_locator;
|
||
|
||
/* During the RTL expansion the lexical blocks and line numbers are
|
||
represented via INSN_NOTEs. Replace them by representation using
|
||
INSN_LOCATORs. */
|
||
|
||
void
|
||
insn_locators_initialize (void)
|
||
{
|
||
tree block = NULL;
|
||
tree last_block = NULL;
|
||
rtx insn, next;
|
||
int loc = 0;
|
||
int line_number = 0, last_line_number = 0;
|
||
const char *file_name = NULL, *last_file_name = NULL;
|
||
|
||
prologue_locator = epilogue_locator = 0;
|
||
|
||
VARRAY_INT_INIT (block_locators_locs, 32, "block_locators_locs");
|
||
VARRAY_TREE_INIT (block_locators_blocks, 32, "block_locators_blocks");
|
||
VARRAY_INT_INIT (line_locators_locs, 32, "line_locators_locs");
|
||
VARRAY_INT_INIT (line_locators_lines, 32, "line_locators_lines");
|
||
VARRAY_INT_INIT (file_locators_locs, 32, "file_locators_locs");
|
||
VARRAY_CHAR_PTR_INIT (file_locators_files, 32, "file_locators_files");
|
||
|
||
for (insn = get_insns (); insn; insn = next)
|
||
{
|
||
int active = 0;
|
||
|
||
next = NEXT_INSN (insn);
|
||
|
||
if (NOTE_P (insn))
|
||
{
|
||
gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
|
||
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
|
||
if (NOTE_LINE_NUMBER (insn) > 0)
|
||
{
|
||
expanded_location xloc;
|
||
NOTE_EXPANDED_LOCATION (xloc, insn);
|
||
line_number = xloc.line;
|
||
file_name = xloc.file;
|
||
}
|
||
}
|
||
else
|
||
active = (active_insn_p (insn)
|
||
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
|
||
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
|
||
|
||
check_block_change (insn, &block);
|
||
|
||
if (active
|
||
|| !next
|
||
|| (!prologue_locator && file_name))
|
||
{
|
||
if (last_block != block)
|
||
{
|
||
loc++;
|
||
VARRAY_PUSH_INT (block_locators_locs, loc);
|
||
VARRAY_PUSH_TREE (block_locators_blocks, block);
|
||
last_block = block;
|
||
}
|
||
if (last_line_number != line_number)
|
||
{
|
||
loc++;
|
||
VARRAY_PUSH_INT (line_locators_locs, loc);
|
||
VARRAY_PUSH_INT (line_locators_lines, line_number);
|
||
last_line_number = line_number;
|
||
}
|
||
if (last_file_name != file_name)
|
||
{
|
||
loc++;
|
||
VARRAY_PUSH_INT (file_locators_locs, loc);
|
||
VARRAY_PUSH_CHAR_PTR (file_locators_files, (char *) file_name);
|
||
last_file_name = file_name;
|
||
}
|
||
if (!prologue_locator && file_name)
|
||
prologue_locator = loc;
|
||
if (!next)
|
||
epilogue_locator = loc;
|
||
if (active)
|
||
INSN_LOCATOR (insn) = loc;
|
||
}
|
||
}
|
||
|
||
/* Tag the blocks with a depth number so that change_scope can find
|
||
the common parent easily. */
|
||
set_block_levels (DECL_INITIAL (cfun->decl), 0);
|
||
|
||
free_block_changes ();
|
||
}
|
||
|
||
/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
|
||
found in the block tree. */
|
||
|
||
static void
|
||
set_block_levels (tree block, int level)
|
||
{
|
||
while (block)
|
||
{
|
||
BLOCK_NUMBER (block) = level;
|
||
set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
|
||
block = BLOCK_CHAIN (block);
|
||
}
|
||
}
|
||
|
||
/* Return sope resulting from combination of S1 and S2. */
|
||
tree
|
||
choose_inner_scope (tree s1, tree s2)
|
||
{
|
||
if (!s1)
|
||
return s2;
|
||
if (!s2)
|
||
return s1;
|
||
if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
|
||
return s1;
|
||
return s2;
|
||
}
|
||
|
||
/* Emit lexical block notes needed to change scope from S1 to S2. */
|
||
|
||
static void
|
||
change_scope (rtx orig_insn, tree s1, tree s2)
|
||
{
|
||
rtx insn = orig_insn;
|
||
tree com = NULL_TREE;
|
||
tree ts1 = s1, ts2 = s2;
|
||
tree s;
|
||
|
||
while (ts1 != ts2)
|
||
{
|
||
gcc_assert (ts1 && ts2);
|
||
if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
|
||
ts1 = BLOCK_SUPERCONTEXT (ts1);
|
||
else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
|
||
ts2 = BLOCK_SUPERCONTEXT (ts2);
|
||
else
|
||
{
|
||
ts1 = BLOCK_SUPERCONTEXT (ts1);
|
||
ts2 = BLOCK_SUPERCONTEXT (ts2);
|
||
}
|
||
}
|
||
com = ts1;
|
||
|
||
/* Close scopes. */
|
||
s = s1;
|
||
while (s != com)
|
||
{
|
||
rtx note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
|
||
NOTE_BLOCK (note) = s;
|
||
s = BLOCK_SUPERCONTEXT (s);
|
||
}
|
||
|
||
/* Open scopes. */
|
||
s = s2;
|
||
while (s != com)
|
||
{
|
||
insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
|
||
NOTE_BLOCK (insn) = s;
|
||
s = BLOCK_SUPERCONTEXT (s);
|
||
}
|
||
}
|
||
|
||
/* Return lexical scope block insn belong to. */
|
||
static tree
|
||
insn_scope (rtx insn)
|
||
{
|
||
int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
|
||
int min = 0;
|
||
int loc = INSN_LOCATOR (insn);
|
||
|
||
/* When block_locators_locs was initialized, the pro- and epilogue
|
||
insns didn't exist yet and can therefore not be found this way.
|
||
But we know that they belong to the outer most block of the
|
||
current function.
|
||
Without this test, the prologue would be put inside the block of
|
||
the first valid instruction in the function and when that first
|
||
insn is part of an inlined function then the low_pc of that
|
||
inlined function is messed up. Likewise for the epilogue and
|
||
the last valid instruction. */
|
||
if (loc == prologue_locator || loc == epilogue_locator)
|
||
return DECL_INITIAL (cfun->decl);
|
||
|
||
if (!max || !loc)
|
||
return NULL;
|
||
while (1)
|
||
{
|
||
int pos = (min + max) / 2;
|
||
int tmp = VARRAY_INT (block_locators_locs, pos);
|
||
|
||
if (tmp <= loc && min != pos)
|
||
min = pos;
|
||
else if (tmp > loc && max != pos)
|
||
max = pos;
|
||
else
|
||
{
|
||
min = pos;
|
||
break;
|
||
}
|
||
}
|
||
return VARRAY_TREE (block_locators_blocks, min);
|
||
}
|
||
|
||
/* Return line number of the statement specified by the locator. */
|
||
int
|
||
locator_line (int loc)
|
||
{
|
||
int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
|
||
int min = 0;
|
||
|
||
if (!max || !loc)
|
||
return 0;
|
||
while (1)
|
||
{
|
||
int pos = (min + max) / 2;
|
||
int tmp = VARRAY_INT (line_locators_locs, pos);
|
||
|
||
if (tmp <= loc && min != pos)
|
||
min = pos;
|
||
else if (tmp > loc && max != pos)
|
||
max = pos;
|
||
else
|
||
{
|
||
min = pos;
|
||
break;
|
||
}
|
||
}
|
||
return VARRAY_INT (line_locators_lines, min);
|
||
}
|
||
|
||
/* Return line number of the statement that produced this insn. */
|
||
int
|
||
insn_line (rtx insn)
|
||
{
|
||
return locator_line (INSN_LOCATOR (insn));
|
||
}
|
||
|
||
/* Return source file of the statement specified by LOC. */
|
||
const char *
|
||
locator_file (int loc)
|
||
{
|
||
int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
|
||
int min = 0;
|
||
|
||
if (!max || !loc)
|
||
return NULL;
|
||
while (1)
|
||
{
|
||
int pos = (min + max) / 2;
|
||
int tmp = VARRAY_INT (file_locators_locs, pos);
|
||
|
||
if (tmp <= loc && min != pos)
|
||
min = pos;
|
||
else if (tmp > loc && max != pos)
|
||
max = pos;
|
||
else
|
||
{
|
||
min = pos;
|
||
break;
|
||
}
|
||
}
|
||
return VARRAY_CHAR_PTR (file_locators_files, min);
|
||
}
|
||
|
||
/* Return source file of the statement that produced this insn. */
|
||
const char *
|
||
insn_file (rtx insn)
|
||
{
|
||
return locator_file (INSN_LOCATOR (insn));
|
||
}
|
||
|
||
/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
|
||
on the scope tree and the newly reordered instructions. */
|
||
|
||
void
|
||
reemit_insn_block_notes (void)
|
||
{
|
||
tree cur_block = DECL_INITIAL (cfun->decl);
|
||
rtx insn, note;
|
||
|
||
insn = get_insns ();
|
||
if (!active_insn_p (insn))
|
||
insn = next_active_insn (insn);
|
||
for (; insn; insn = next_active_insn (insn))
|
||
{
|
||
tree this_block;
|
||
|
||
this_block = insn_scope (insn);
|
||
/* For sequences compute scope resulting from merging all scopes
|
||
of instructions nested inside. */
|
||
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
|
||
{
|
||
int i;
|
||
rtx body = PATTERN (insn);
|
||
|
||
this_block = NULL;
|
||
for (i = 0; i < XVECLEN (body, 0); i++)
|
||
this_block = choose_inner_scope (this_block,
|
||
insn_scope (XVECEXP (body, 0, i)));
|
||
}
|
||
if (! this_block)
|
||
continue;
|
||
|
||
if (this_block != cur_block)
|
||
{
|
||
change_scope (insn, cur_block, this_block);
|
||
cur_block = this_block;
|
||
}
|
||
}
|
||
|
||
/* change_scope emits before the insn, not after. */
|
||
note = emit_note (NOTE_INSN_DELETED);
|
||
change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
|
||
delete_insn (note);
|
||
|
||
reorder_blocks ();
|
||
}
|
||
|
||
/* Given a reorder chain, rearrange the code to match. */
|
||
|
||
static void
|
||
fixup_reorder_chain (void)
|
||
{
|
||
basic_block bb, prev_bb;
|
||
int index;
|
||
rtx insn = NULL;
|
||
|
||
if (cfg_layout_function_header)
|
||
{
|
||
set_first_insn (cfg_layout_function_header);
|
||
insn = cfg_layout_function_header;
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
}
|
||
|
||
/* First do the bulk reordering -- rechain the blocks without regard to
|
||
the needed changes to jumps and labels. */
|
||
|
||
for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
|
||
bb != 0;
|
||
bb = bb->rbi->next, index++)
|
||
{
|
||
if (bb->rbi->header)
|
||
{
|
||
if (insn)
|
||
NEXT_INSN (insn) = bb->rbi->header;
|
||
else
|
||
set_first_insn (bb->rbi->header);
|
||
PREV_INSN (bb->rbi->header) = insn;
|
||
insn = bb->rbi->header;
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
}
|
||
if (insn)
|
||
NEXT_INSN (insn) = BB_HEAD (bb);
|
||
else
|
||
set_first_insn (BB_HEAD (bb));
|
||
PREV_INSN (BB_HEAD (bb)) = insn;
|
||
insn = BB_END (bb);
|
||
if (bb->rbi->footer)
|
||
{
|
||
NEXT_INSN (insn) = bb->rbi->footer;
|
||
PREV_INSN (bb->rbi->footer) = insn;
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
}
|
||
}
|
||
|
||
gcc_assert (index == n_basic_blocks);
|
||
|
||
NEXT_INSN (insn) = cfg_layout_function_footer;
|
||
if (cfg_layout_function_footer)
|
||
PREV_INSN (cfg_layout_function_footer) = insn;
|
||
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
|
||
set_last_insn (insn);
|
||
#ifdef ENABLE_CHECKING
|
||
verify_insn_chain ();
|
||
#endif
|
||
delete_dead_jumptables ();
|
||
|
||
/* Now add jumps and labels as needed to match the blocks new
|
||
outgoing edges. */
|
||
|
||
for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = bb->rbi->next)
|
||
{
|
||
edge e_fall, e_taken, e;
|
||
rtx bb_end_insn;
|
||
basic_block nb;
|
||
basic_block old_bb;
|
||
edge_iterator ei;
|
||
|
||
if (EDGE_COUNT (bb->succs) == 0)
|
||
continue;
|
||
|
||
/* Find the old fallthru edge, and another non-EH edge for
|
||
a taken jump. */
|
||
e_taken = e_fall = NULL;
|
||
|
||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||
if (e->flags & EDGE_FALLTHRU)
|
||
e_fall = e;
|
||
else if (! (e->flags & EDGE_EH))
|
||
e_taken = e;
|
||
|
||
bb_end_insn = BB_END (bb);
|
||
if (JUMP_P (bb_end_insn))
|
||
{
|
||
if (any_condjump_p (bb_end_insn))
|
||
{
|
||
/* If the old fallthru is still next, nothing to do. */
|
||
if (bb->rbi->next == e_fall->dest
|
||
|| e_fall->dest == EXIT_BLOCK_PTR)
|
||
continue;
|
||
|
||
/* The degenerated case of conditional jump jumping to the next
|
||
instruction can happen on target having jumps with side
|
||
effects.
|
||
|
||
Create temporarily the duplicated edge representing branch.
|
||
It will get unidentified by force_nonfallthru_and_redirect
|
||
that would otherwise get confused by fallthru edge not pointing
|
||
to the next basic block. */
|
||
if (!e_taken)
|
||
{
|
||
rtx note;
|
||
edge e_fake;
|
||
bool redirected;
|
||
|
||
e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
|
||
|
||
redirected = redirect_jump (BB_END (bb),
|
||
block_label (bb), 0);
|
||
gcc_assert (redirected);
|
||
|
||
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
|
||
if (note)
|
||
{
|
||
int prob = INTVAL (XEXP (note, 0));
|
||
|
||
e_fake->probability = prob;
|
||
e_fake->count = e_fall->count * prob / REG_BR_PROB_BASE;
|
||
e_fall->probability -= e_fall->probability;
|
||
e_fall->count -= e_fake->count;
|
||
if (e_fall->probability < 0)
|
||
e_fall->probability = 0;
|
||
if (e_fall->count < 0)
|
||
e_fall->count = 0;
|
||
}
|
||
}
|
||
/* There is one special case: if *neither* block is next,
|
||
such as happens at the very end of a function, then we'll
|
||
need to add a new unconditional jump. Choose the taken
|
||
edge based on known or assumed probability. */
|
||
else if (bb->rbi->next != e_taken->dest)
|
||
{
|
||
rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
|
||
|
||
if (note
|
||
&& INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
|
||
&& invert_jump (bb_end_insn,
|
||
(e_fall->dest == EXIT_BLOCK_PTR
|
||
? NULL_RTX
|
||
: label_for_bb (e_fall->dest)), 0))
|
||
{
|
||
e_fall->flags &= ~EDGE_FALLTHRU;
|
||
#ifdef ENABLE_CHECKING
|
||
gcc_assert (could_fall_through
|
||
(e_taken->src, e_taken->dest));
|
||
#endif
|
||
e_taken->flags |= EDGE_FALLTHRU;
|
||
update_br_prob_note (bb);
|
||
e = e_fall, e_fall = e_taken, e_taken = e;
|
||
}
|
||
}
|
||
|
||
/* If the "jumping" edge is a crossing edge, and the fall
|
||
through edge is non-crossing, leave things as they are. */
|
||
else if ((e_taken->flags & EDGE_CROSSING)
|
||
&& !(e_fall->flags & EDGE_CROSSING))
|
||
continue;
|
||
|
||
/* Otherwise we can try to invert the jump. This will
|
||
basically never fail, however, keep up the pretense. */
|
||
else if (invert_jump (bb_end_insn,
|
||
(e_fall->dest == EXIT_BLOCK_PTR
|
||
? NULL_RTX
|
||
: label_for_bb (e_fall->dest)), 0))
|
||
{
|
||
e_fall->flags &= ~EDGE_FALLTHRU;
|
||
#ifdef ENABLE_CHECKING
|
||
gcc_assert (could_fall_through
|
||
(e_taken->src, e_taken->dest));
|
||
#endif
|
||
e_taken->flags |= EDGE_FALLTHRU;
|
||
update_br_prob_note (bb);
|
||
continue;
|
||
}
|
||
}
|
||
else
|
||
{
|
||
#ifndef CASE_DROPS_THROUGH
|
||
/* Otherwise we have some return, switch or computed
|
||
jump. In the 99% case, there should not have been a
|
||
fallthru edge. */
|
||
gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
|
||
continue;
|
||
#else
|
||
if (returnjump_p (bb_end_insn) || !e_fall)
|
||
continue;
|
||
/* Except for VAX. Since we didn't have predication for the
|
||
tablejump, the fallthru block should not have moved. */
|
||
if (bb->rbi->next == e_fall->dest)
|
||
continue;
|
||
bb_end_insn = skip_insns_after_block (bb);
|
||
#endif
|
||
}
|
||
}
|
||
else
|
||
{
|
||
/* No fallthru implies a noreturn function with EH edges, or
|
||
something similarly bizarre. In any case, we don't need to
|
||
do anything. */
|
||
if (! e_fall)
|
||
continue;
|
||
|
||
/* If the fallthru block is still next, nothing to do. */
|
||
if (bb->rbi->next == e_fall->dest)
|
||
continue;
|
||
|
||
/* A fallthru to exit block. */
|
||
if (e_fall->dest == EXIT_BLOCK_PTR)
|
||
continue;
|
||
}
|
||
|
||
/* We got here if we need to add a new jump insn. */
|
||
nb = force_nonfallthru (e_fall);
|
||
if (nb)
|
||
{
|
||
initialize_bb_rbi (nb);
|
||
nb->rbi->visited = 1;
|
||
nb->rbi->next = bb->rbi->next;
|
||
bb->rbi->next = nb;
|
||
/* Don't process this new block. */
|
||
old_bb = bb;
|
||
bb = nb;
|
||
|
||
/* Make sure new bb is tagged for correct section (same as
|
||
fall-thru source, since you cannot fall-throu across
|
||
section boundaries). */
|
||
BB_COPY_PARTITION (e_fall->src, EDGE_PRED (bb, 0)->src);
|
||
if (flag_reorder_blocks_and_partition
|
||
&& targetm.have_named_sections)
|
||
{
|
||
if (BB_PARTITION (EDGE_PRED (bb, 0)->src) == BB_COLD_PARTITION)
|
||
{
|
||
rtx new_note;
|
||
rtx note = BB_HEAD (e_fall->src);
|
||
|
||
while (!INSN_P (note)
|
||
&& note != BB_END (e_fall->src))
|
||
note = NEXT_INSN (note);
|
||
|
||
new_note = emit_note_before
|
||
(NOTE_INSN_UNLIKELY_EXECUTED_CODE,
|
||
note);
|
||
NOTE_BASIC_BLOCK (new_note) = bb;
|
||
}
|
||
if (JUMP_P (BB_END (bb))
|
||
&& !any_condjump_p (BB_END (bb))
|
||
&& (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
|
||
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
|
||
(REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
|
||
}
|
||
}
|
||
}
|
||
|
||
/* Put basic_block_info in the new order. */
|
||
|
||
if (dump_file)
|
||
{
|
||
fprintf (dump_file, "Reordered sequence:\n");
|
||
for (bb = ENTRY_BLOCK_PTR->next_bb, index = 0;
|
||
bb;
|
||
bb = bb->rbi->next, index++)
|
||
{
|
||
fprintf (dump_file, " %i ", index);
|
||
if (bb->rbi->original)
|
||
fprintf (dump_file, "duplicate of %i ",
|
||
bb->rbi->original->index);
|
||
else if (forwarder_block_p (bb)
|
||
&& !LABEL_P (BB_HEAD (bb)))
|
||
fprintf (dump_file, "compensation ");
|
||
else
|
||
fprintf (dump_file, "bb %i ", bb->index);
|
||
fprintf (dump_file, " [%i]\n", bb->frequency);
|
||
}
|
||
}
|
||
|
||
prev_bb = ENTRY_BLOCK_PTR;
|
||
bb = ENTRY_BLOCK_PTR->next_bb;
|
||
index = 0;
|
||
|
||
for (; bb; prev_bb = bb, bb = bb->rbi->next, index ++)
|
||
{
|
||
bb->index = index;
|
||
BASIC_BLOCK (index) = bb;
|
||
|
||
update_unlikely_executed_notes (bb);
|
||
|
||
bb->prev_bb = prev_bb;
|
||
prev_bb->next_bb = bb;
|
||
}
|
||
prev_bb->next_bb = EXIT_BLOCK_PTR;
|
||
EXIT_BLOCK_PTR->prev_bb = prev_bb;
|
||
|
||
/* Annoying special case - jump around dead jumptables left in the code. */
|
||
FOR_EACH_BB (bb)
|
||
{
|
||
edge e;
|
||
edge_iterator ei;
|
||
|
||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||
if (e->flags & EDGE_FALLTHRU)
|
||
break;
|
||
|
||
if (e && !can_fallthru (e->src, e->dest))
|
||
force_nonfallthru (e);
|
||
}
|
||
}
|
||
|
||
/* Update the basic block number information in any
|
||
NOTE_INSN_UNLIKELY_EXECUTED_CODE notes within the basic block. */
|
||
|
||
static void
|
||
update_unlikely_executed_notes (basic_block bb)
|
||
{
|
||
rtx cur_insn;
|
||
|
||
for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
|
||
cur_insn = NEXT_INSN (cur_insn))
|
||
if (NOTE_P (cur_insn)
|
||
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
|
||
NOTE_BASIC_BLOCK (cur_insn) = bb;
|
||
}
|
||
|
||
/* Perform sanity checks on the insn chain.
|
||
1. Check that next/prev pointers are consistent in both the forward and
|
||
reverse direction.
|
||
2. Count insns in chain, going both directions, and check if equal.
|
||
3. Check that get_last_insn () returns the actual end of chain. */
|
||
|
||
void
|
||
verify_insn_chain (void)
|
||
{
|
||
rtx x, prevx, nextx;
|
||
int insn_cnt1, insn_cnt2;
|
||
|
||
for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
|
||
x != 0;
|
||
prevx = x, insn_cnt1++, x = NEXT_INSN (x))
|
||
gcc_assert (PREV_INSN (x) == prevx);
|
||
|
||
gcc_assert (prevx == get_last_insn ());
|
||
|
||
for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
|
||
x != 0;
|
||
nextx = x, insn_cnt2++, x = PREV_INSN (x))
|
||
gcc_assert (NEXT_INSN (x) == nextx);
|
||
|
||
gcc_assert (insn_cnt1 == insn_cnt2);
|
||
}
|
||
|
||
/* If we have assembler epilogues, the block falling through to exit must
|
||
be the last one in the reordered chain when we reach final. Ensure
|
||
that this condition is met. */
|
||
static void
|
||
fixup_fallthru_exit_predecessor (void)
|
||
{
|
||
edge e;
|
||
edge_iterator ei;
|
||
basic_block bb = NULL;
|
||
|
||
/* This transformation is not valid before reload, because we might
|
||
separate a call from the instruction that copies the return
|
||
value. */
|
||
gcc_assert (reload_completed);
|
||
|
||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||
if (e->flags & EDGE_FALLTHRU)
|
||
bb = e->src;
|
||
|
||
if (bb && bb->rbi->next)
|
||
{
|
||
basic_block c = ENTRY_BLOCK_PTR->next_bb;
|
||
|
||
/* If the very first block is the one with the fall-through exit
|
||
edge, we have to split that block. */
|
||
if (c == bb)
|
||
{
|
||
bb = split_block (bb, NULL)->dest;
|
||
initialize_bb_rbi (bb);
|
||
bb->rbi->next = c->rbi->next;
|
||
c->rbi->next = bb;
|
||
bb->rbi->footer = c->rbi->footer;
|
||
c->rbi->footer = NULL;
|
||
}
|
||
|
||
while (c->rbi->next != bb)
|
||
c = c->rbi->next;
|
||
|
||
c->rbi->next = bb->rbi->next;
|
||
while (c->rbi->next)
|
||
c = c->rbi->next;
|
||
|
||
c->rbi->next = bb;
|
||
bb->rbi->next = NULL;
|
||
}
|
||
}
|
||
|
||
/* Return true in case it is possible to duplicate the basic block BB. */
|
||
|
||
/* We do not want to declare the function in a header file, since it should
|
||
only be used through the cfghooks interface, and we do not want to move
|
||
it to cfgrtl.c since it would require also moving quite a lot of related
|
||
code. */
|
||
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
|
||
|
||
bool
|
||
cfg_layout_can_duplicate_bb_p (basic_block bb)
|
||
{
|
||
/* Do not attempt to duplicate tablejumps, as we need to unshare
|
||
the dispatch table. This is difficult to do, as the instructions
|
||
computing jump destination may be hoisted outside the basic block. */
|
||
if (tablejump_p (BB_END (bb), NULL, NULL))
|
||
return false;
|
||
|
||
/* Do not duplicate blocks containing insns that can't be copied. */
|
||
if (targetm.cannot_copy_insn_p)
|
||
{
|
||
rtx insn = BB_HEAD (bb);
|
||
while (1)
|
||
{
|
||
if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
|
||
return false;
|
||
if (insn == BB_END (bb))
|
||
break;
|
||
insn = NEXT_INSN (insn);
|
||
}
|
||
}
|
||
|
||
return true;
|
||
}
|
||
|
||
rtx
|
||
duplicate_insn_chain (rtx from, rtx to)
|
||
{
|
||
rtx insn, last;
|
||
|
||
/* Avoid updating of boundaries of previous basic block. The
|
||
note will get removed from insn stream in fixup. */
|
||
last = emit_note (NOTE_INSN_DELETED);
|
||
|
||
/* Create copy at the end of INSN chain. The chain will
|
||
be reordered later. */
|
||
for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
|
||
{
|
||
switch (GET_CODE (insn))
|
||
{
|
||
case INSN:
|
||
case CALL_INSN:
|
||
case JUMP_INSN:
|
||
/* Avoid copying of dispatch tables. We never duplicate
|
||
tablejumps, so this can hit only in case the table got
|
||
moved far from original jump. */
|
||
if (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
||
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
|
||
break;
|
||
emit_copy_of_insn_after (insn, get_last_insn ());
|
||
break;
|
||
|
||
case CODE_LABEL:
|
||
break;
|
||
|
||
case BARRIER:
|
||
emit_barrier ();
|
||
break;
|
||
|
||
case NOTE:
|
||
switch (NOTE_LINE_NUMBER (insn))
|
||
{
|
||
/* In case prologue is empty and function contain label
|
||
in first BB, we may want to copy the block. */
|
||
case NOTE_INSN_PROLOGUE_END:
|
||
|
||
case NOTE_INSN_LOOP_BEG:
|
||
case NOTE_INSN_LOOP_END:
|
||
/* Strip down the loop notes - we don't really want to keep
|
||
them consistent in loop copies. */
|
||
case NOTE_INSN_DELETED:
|
||
case NOTE_INSN_DELETED_LABEL:
|
||
/* No problem to strip these. */
|
||
case NOTE_INSN_EPILOGUE_BEG:
|
||
case NOTE_INSN_FUNCTION_END:
|
||
/* Debug code expect these notes to exist just once.
|
||
Keep them in the master copy.
|
||
??? It probably makes more sense to duplicate them for each
|
||
epilogue copy. */
|
||
case NOTE_INSN_FUNCTION_BEG:
|
||
/* There is always just single entry to function. */
|
||
case NOTE_INSN_BASIC_BLOCK:
|
||
break;
|
||
|
||
case NOTE_INSN_REPEATED_LINE_NUMBER:
|
||
case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
|
||
emit_note_copy (insn);
|
||
break;
|
||
|
||
default:
|
||
/* All other notes should have already been eliminated.
|
||
*/
|
||
gcc_assert (NOTE_LINE_NUMBER (insn) >= 0);
|
||
|
||
/* It is possible that no_line_number is set and the note
|
||
won't be emitted. */
|
||
emit_note_copy (insn);
|
||
}
|
||
break;
|
||
default:
|
||
gcc_unreachable ();
|
||
}
|
||
}
|
||
insn = NEXT_INSN (last);
|
||
delete_insn (last);
|
||
return insn;
|
||
}
|
||
/* Create a duplicate of the basic block BB. */
|
||
|
||
/* We do not want to declare the function in a header file, since it should
|
||
only be used through the cfghooks interface, and we do not want to move
|
||
it to cfgrtl.c since it would require also moving quite a lot of related
|
||
code. */
|
||
extern basic_block cfg_layout_duplicate_bb (basic_block);
|
||
|
||
basic_block
|
||
cfg_layout_duplicate_bb (basic_block bb)
|
||
{
|
||
rtx insn;
|
||
basic_block new_bb;
|
||
|
||
insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
|
||
new_bb = create_basic_block (insn,
|
||
insn ? get_last_insn () : NULL,
|
||
EXIT_BLOCK_PTR->prev_bb);
|
||
|
||
BB_COPY_PARTITION (new_bb, bb);
|
||
if (bb->rbi->header)
|
||
{
|
||
insn = bb->rbi->header;
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
insn = duplicate_insn_chain (bb->rbi->header, insn);
|
||
if (insn)
|
||
new_bb->rbi->header = unlink_insn_chain (insn, get_last_insn ());
|
||
}
|
||
|
||
if (bb->rbi->footer)
|
||
{
|
||
insn = bb->rbi->footer;
|
||
while (NEXT_INSN (insn))
|
||
insn = NEXT_INSN (insn);
|
||
insn = duplicate_insn_chain (bb->rbi->footer, insn);
|
||
if (insn)
|
||
new_bb->rbi->footer = unlink_insn_chain (insn, get_last_insn ());
|
||
}
|
||
|
||
if (bb->global_live_at_start)
|
||
{
|
||
new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||
new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
|
||
COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_start);
|
||
COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
|
||
}
|
||
|
||
return new_bb;
|
||
}
|
||
|
||
/* Main entry point to this module - initialize the datastructures for
|
||
CFG layout changes. It keeps LOOPS up-to-date if not null.
|
||
|
||
FLAGS is a set of additional flags to pass to cleanup_cfg(). It should
|
||
include CLEANUP_UPDATE_LIFE if liveness information must be kept up
|
||
to date. */
|
||
|
||
void
|
||
cfg_layout_initialize (unsigned int flags)
|
||
{
|
||
basic_block bb;
|
||
|
||
/* Our algorithm depends on fact that there are no dead jumptables
|
||
around the code. */
|
||
alloc_rbi_pool ();
|
||
|
||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||
initialize_bb_rbi (bb);
|
||
|
||
cfg_layout_rtl_register_cfg_hooks ();
|
||
|
||
record_effective_endpoints ();
|
||
|
||
cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
|
||
}
|
||
|
||
/* Splits superblocks. */
|
||
void
|
||
break_superblocks (void)
|
||
{
|
||
sbitmap superblocks;
|
||
bool need = false;
|
||
basic_block bb;
|
||
|
||
superblocks = sbitmap_alloc (last_basic_block);
|
||
sbitmap_zero (superblocks);
|
||
|
||
FOR_EACH_BB (bb)
|
||
if (bb->flags & BB_SUPERBLOCK)
|
||
{
|
||
bb->flags &= ~BB_SUPERBLOCK;
|
||
SET_BIT (superblocks, bb->index);
|
||
need = true;
|
||
}
|
||
|
||
if (need)
|
||
{
|
||
rebuild_jump_labels (get_insns ());
|
||
find_many_sub_basic_blocks (superblocks);
|
||
}
|
||
|
||
free (superblocks);
|
||
}
|
||
|
||
/* Finalize the changes: reorder insn list according to the sequence, enter
|
||
compensation code, rebuild scope forest. */
|
||
|
||
void
|
||
cfg_layout_finalize (void)
|
||
{
|
||
basic_block bb;
|
||
|
||
#ifdef ENABLE_CHECKING
|
||
verify_flow_info ();
|
||
#endif
|
||
rtl_register_cfg_hooks ();
|
||
if (reload_completed
|
||
#ifdef HAVE_epilogue
|
||
&& !HAVE_epilogue
|
||
#endif
|
||
)
|
||
fixup_fallthru_exit_predecessor ();
|
||
fixup_reorder_chain ();
|
||
|
||
#ifdef ENABLE_CHECKING
|
||
verify_insn_chain ();
|
||
#endif
|
||
|
||
free_rbi_pool ();
|
||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||
bb->rbi = NULL;
|
||
|
||
break_superblocks ();
|
||
|
||
#ifdef ENABLE_CHECKING
|
||
verify_flow_info ();
|
||
#endif
|
||
}
|
||
|
||
/* Checks whether all N blocks in BBS array can be copied. */
|
||
bool
|
||
can_copy_bbs_p (basic_block *bbs, unsigned n)
|
||
{
|
||
unsigned i;
|
||
edge e;
|
||
int ret = true;
|
||
|
||
for (i = 0; i < n; i++)
|
||
bbs[i]->rbi->duplicated = 1;
|
||
|
||
for (i = 0; i < n; i++)
|
||
{
|
||
/* In case we should redirect abnormal edge during duplication, fail. */
|
||
edge_iterator ei;
|
||
FOR_EACH_EDGE (e, ei, bbs[i]->succs)
|
||
if ((e->flags & EDGE_ABNORMAL)
|
||
&& e->dest->rbi->duplicated)
|
||
{
|
||
ret = false;
|
||
goto end;
|
||
}
|
||
|
||
if (!can_duplicate_block_p (bbs[i]))
|
||
{
|
||
ret = false;
|
||
break;
|
||
}
|
||
}
|
||
|
||
end:
|
||
for (i = 0; i < n; i++)
|
||
bbs[i]->rbi->duplicated = 0;
|
||
|
||
return ret;
|
||
}
|
||
|
||
/* Duplicates N basic blocks stored in array BBS. Newly created basic blocks
|
||
are placed into array NEW_BBS in the same order. Edges from basic blocks
|
||
in BBS are also duplicated and copies of those of them
|
||
that lead into BBS are redirected to appropriate newly created block. The
|
||
function assigns bbs into loops (copy of basic block bb is assigned to
|
||
bb->loop_father->copy loop, so this must be set up correctly in advance)
|
||
and updates dominators locally (LOOPS structure that contains the information
|
||
about dominators is passed to enable this).
|
||
|
||
BASE is the superloop to that basic block belongs; if its header or latch
|
||
is copied, we do not set the new blocks as header or latch.
|
||
|
||
Created copies of N_EDGES edges in array EDGES are stored in array NEW_EDGES,
|
||
also in the same order. */
|
||
|
||
void
|
||
copy_bbs (basic_block *bbs, unsigned n, basic_block *new_bbs,
|
||
edge *edges, unsigned n_edges, edge *new_edges,
|
||
struct loop *base)
|
||
{
|
||
unsigned i, j;
|
||
basic_block bb, new_bb, dom_bb;
|
||
edge e;
|
||
|
||
/* Duplicate bbs, update dominators, assign bbs to loops. */
|
||
for (i = 0; i < n; i++)
|
||
{
|
||
/* Duplicate. */
|
||
bb = bbs[i];
|
||
new_bb = new_bbs[i] = duplicate_block (bb, NULL);
|
||
bb->rbi->duplicated = 1;
|
||
/* Add to loop. */
|
||
add_bb_to_loop (new_bb, bb->loop_father->copy);
|
||
/* Possibly set header. */
|
||
if (bb->loop_father->header == bb && bb->loop_father != base)
|
||
new_bb->loop_father->header = new_bb;
|
||
/* Or latch. */
|
||
if (bb->loop_father->latch == bb && bb->loop_father != base)
|
||
new_bb->loop_father->latch = new_bb;
|
||
}
|
||
|
||
/* Set dominators. */
|
||
for (i = 0; i < n; i++)
|
||
{
|
||
bb = bbs[i];
|
||
new_bb = new_bbs[i];
|
||
|
||
dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
|
||
if (dom_bb->rbi->duplicated)
|
||
{
|
||
dom_bb = dom_bb->rbi->copy;
|
||
set_immediate_dominator (CDI_DOMINATORS, new_bb, dom_bb);
|
||
}
|
||
}
|
||
|
||
/* Redirect edges. */
|
||
for (j = 0; j < n_edges; j++)
|
||
new_edges[j] = NULL;
|
||
for (i = 0; i < n; i++)
|
||
{
|
||
edge_iterator ei;
|
||
new_bb = new_bbs[i];
|
||
bb = bbs[i];
|
||
|
||
FOR_EACH_EDGE (e, ei, new_bb->succs)
|
||
{
|
||
for (j = 0; j < n_edges; j++)
|
||
if (edges[j] && edges[j]->src == bb && edges[j]->dest == e->dest)
|
||
new_edges[j] = e;
|
||
|
||
if (!e->dest->rbi->duplicated)
|
||
continue;
|
||
redirect_edge_and_branch_force (e, e->dest->rbi->copy);
|
||
}
|
||
}
|
||
|
||
/* Clear information about duplicates. */
|
||
for (i = 0; i < n; i++)
|
||
bbs[i]->rbi->duplicated = 0;
|
||
}
|
||
|
||
#include "gt-cfglayout.h"
|