blob: a19eff7b9bb7b9fcac2afca2e340ab7687a56264 [file] [log] [blame]
/* Standard problems for dataflow support routines.
Copyright (C) 1999-2019 Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
and Kenneth Zadeck (zadeck@naturalbridge.com).
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "df.h"
#include "memmodel.h"
#include "tm_p.h"
#include "insn-config.h"
#include "cfganal.h"
#include "dce.h"
#include "valtrack.h"
#include "dumpfile.h"
#include "rtl-iter.h"
/* Note that turning REG_DEAD_DEBUGGING on will cause
gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints
addresses in the dumps. */
#define REG_DEAD_DEBUGGING 0
#define DF_SPARSE_THRESHOLD 32
static bitmap_head seen_in_block;
static bitmap_head seen_in_insn;
/*----------------------------------------------------------------------------
Utility functions.
----------------------------------------------------------------------------*/
/* Generic versions to get the void* version of the block info. Only
used inside the problem instance vectors. */
/* Dump a def-use or use-def chain for REF to FILE. */
void
df_chain_dump (struct df_link *link, FILE *file)
{
fprintf (file, "{ ");
for (; link; link = link->next)
{
fprintf (file, "%c%d(bb %d insn %d) ",
DF_REF_REG_DEF_P (link->ref)
? 'd'
: (DF_REF_FLAGS (link->ref) & DF_REF_IN_NOTE) ? 'e' : 'u',
DF_REF_ID (link->ref),
DF_REF_BBNO (link->ref),
DF_REF_IS_ARTIFICIAL (link->ref)
? -1 : DF_REF_INSN_UID (link->ref));
}
fprintf (file, "}");
}
/* Print some basic block info as part of df_dump. */
void
df_print_bb_index (basic_block bb, FILE *file)
{
edge e;
edge_iterator ei;
fprintf (file, "\n( ");
FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pred = e->src;
fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
}
fprintf (file, ")->[%d]->( ", bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
basic_block succ = e->dest;
fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
}
fprintf (file, ")\n");
}
/*----------------------------------------------------------------------------
REACHING DEFINITIONS
Find the locations in the function where each definition site for a
pseudo reaches. In and out bitvectors are built for each basic
block. The id field in the ref is used to index into these sets.
See df.h for details.
If the DF_RD_PRUNE_DEAD_DEFS changeable flag is set, only DEFs reaching
existing uses are included in the global reaching DEFs set, or in other
words only DEFs that are still live. This is a kind of pruned version
of the traditional reaching definitions problem that is much less
complex to compute and produces enough information to compute UD-chains.
In this context, live must be interpreted in the DF_LR sense: Uses that
are upward exposed but maybe not initialized on all paths through the
CFG. For a USE that is not reached by a DEF on all paths, we still want
to make those DEFs that do reach the USE visible, and pruning based on
DF_LIVE would make that impossible.
----------------------------------------------------------------------------*/
/* This problem plays a large number of games for the sake of
efficiency.
1) The order of the bits in the bitvectors. After the scanning
phase, all of the defs are sorted. All of the defs for the reg 0
are first, followed by all defs for reg 1 and so on.
2) There are two kill sets, one if the number of defs is less or
equal to DF_SPARSE_THRESHOLD and another if the number of defs is
greater.
<= : Data is built directly in the kill set.
> : One level of indirection is used to keep from generating long
strings of 1 bits in the kill sets. Bitvectors that are indexed
by the regnum are used to represent that there is a killing def
for the register. The confluence and transfer functions use
these along with the bitmap_clear_range call to remove ranges of
bits without actually generating a knockout vector.
The kill and sparse_kill and the dense_invalidated_by_call and
sparse_invalidated_by_call both play this game. */
/* Private data used to compute the solution for this problem. These
data structures are not accessible outside of this module. */
struct df_rd_problem_data
{
/* The set of defs to regs invalidated by call. */
bitmap_head sparse_invalidated_by_call;
/* The set of defs to regs invalidate by call for rd. */
bitmap_head dense_invalidated_by_call;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack rd_bitmaps;
};
/* Free basic block info. */
static void
df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
if (bb_info)
{
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->sparse_kill);
bitmap_clear (&bb_info->gen);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
not touched unless the block is new. */
static void
df_rd_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
struct df_rd_problem_data *problem_data;
if (df_rd->problem_data)
{
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap_clear (&problem_data->sparse_invalidated_by_call);
bitmap_clear (&problem_data->dense_invalidated_by_call);
}
else
{
problem_data = XNEW (struct df_rd_problem_data);
df_rd->problem_data = problem_data;
bitmap_obstack_initialize (&problem_data->rd_bitmaps);
bitmap_initialize (&problem_data->sparse_invalidated_by_call,
&problem_data->rd_bitmaps);
bitmap_initialize (&problem_data->dense_invalidated_by_call,
&problem_data->rd_bitmaps);
}
df_grow_bb_info (df_rd);
/* Because of the clustering of all use sites for the same pseudo,
we have to process all of the blocks before doing the analysis. */
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
/* When bitmaps are already initialized, just clear them. */
if (bb_info->kill.obstack)
{
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->sparse_kill);
bitmap_clear (&bb_info->gen);
}
else
{
bitmap_initialize (&bb_info->kill, &problem_data->rd_bitmaps);
bitmap_initialize (&bb_info->sparse_kill, &problem_data->rd_bitmaps);
bitmap_initialize (&bb_info->gen, &problem_data->rd_bitmaps);
bitmap_initialize (&bb_info->in, &problem_data->rd_bitmaps);
bitmap_initialize (&bb_info->out, &problem_data->rd_bitmaps);
}
}
df_rd->optional_p = true;
}
/* Add the effect of the top artificial defs of BB to the reaching definitions
bitmap LOCAL_RD. */
void
df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
{
int bb_index = bb->index;
df_ref def;
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
bitmap_clear_range (local_rd,
DF_DEFS_BEGIN (dregno),
DF_DEFS_COUNT (dregno));
bitmap_set_bit (local_rd, DF_REF_ID (def));
}
}
/* Add the effect of the defs of INSN to the reaching definitions bitmap
LOCAL_RD. */
void
df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
bitmap local_rd)
{
df_ref def;
FOR_EACH_INSN_DEF (def, insn)
{
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
{
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
bitmap_clear_range (local_rd,
DF_DEFS_BEGIN (dregno),
DF_DEFS_COUNT (dregno));
if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (local_rd, DF_REF_ID (def));
}
}
}
/* Process a list of DEFs for df_rd_bb_local_compute. This is a bit
more complicated than just simulating, because we must produce the
gen and kill sets and hence deal with the two possible representations
of kill sets. */
static void
df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
df_ref def,
int top_flag)
{
for (; def; def = DF_REF_NEXT_LOC (def))
{
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
unsigned int begin = DF_DEFS_BEGIN (regno);
unsigned int n_defs = DF_DEFS_COUNT (regno);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (regno >= FIRST_PSEUDO_REGISTER))
{
/* Only the last def(s) for a regno in the block has any
effect. */
if (!bitmap_bit_p (&seen_in_block, regno))
{
/* The first def for regno in insn gets to knock out the
defs from other instructions. */
if ((!bitmap_bit_p (&seen_in_insn, regno))
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
&& (!(DF_REF_FLAGS (def) &
(DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
{
if (n_defs > DF_SPARSE_THRESHOLD)
{
bitmap_set_bit (&bb_info->sparse_kill, regno);
bitmap_clear_range (&bb_info->gen, begin, n_defs);
}
else
{
bitmap_set_range (&bb_info->kill, begin, n_defs);
bitmap_clear_range (&bb_info->gen, begin, n_defs);
}
}
bitmap_set_bit (&seen_in_insn, regno);
/* All defs for regno in the instruction may be put into
the gen set. */
if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
}
}
}
}
}
/* Compute local reaching def info for basic block BB. */
static void
df_rd_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx_insn *insn;
bitmap_clear (&seen_in_block);
bitmap_clear (&seen_in_insn);
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_rd_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
0);
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
continue;
df_rd_bb_local_compute_process_def (bb_info,
DF_INSN_UID_DEFS (uid), 0);
/* This complex dance with the two bitmaps is required because
instructions can assign twice to the same pseudo. This
generally happens with calls that will have one def for the
result and another def for the clobber. If only one vector
is used and the clobber goes first, the result will be
lost. */
bitmap_ior_into (&seen_in_block, &seen_in_insn);
bitmap_clear (&seen_in_insn);
}
/* Process the artificial defs at the top of the block last since we
are going backwards through the block and these are logically at
the start. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_rd_bb_local_compute_process_def (bb_info,
df_get_artificial_defs (bb_index),
DF_REF_AT_TOP);
}
/* Compute local reaching def info for each basic block within BLOCKS. */
static void
df_rd_local_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
unsigned int regno;
struct df_rd_problem_data *problem_data
= (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
bitmap_initialize (&seen_in_block, &df_bitmap_obstack);
bitmap_initialize (&seen_in_insn, &df_bitmap_obstack);
df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
df_rd_bb_local_compute (bb_index);
}
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi)
{
if (! HARD_REGISTER_NUM_P (regno)
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
bitmap_set_bit (sparse_invalidated, regno);
else
bitmap_set_range (dense_invalidated,
DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
}
bitmap_release (&seen_in_block);
bitmap_release (&seen_in_insn);
}
/* Initialize the solution bit vectors for problem. */
static void
df_rd_init_solution (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
bitmap_copy (&bb_info->out, &bb_info->gen);
bitmap_clear (&bb_info->in);
}
}
/* In of target gets or of out of source. */
static bool
df_rd_confluence_n (edge e)
{
bitmap op1 = &df_rd_get_bb_info (e->dest->index)->in;
bitmap op2 = &df_rd_get_bb_info (e->src->index)->out;
bool changed = false;
if (e->flags & EDGE_FAKE)
return false;
if (e->flags & EDGE_EH)
{
struct df_rd_problem_data *problem_data
= (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = &problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
bitmap_iterator bi;
unsigned int regno;
auto_bitmap tmp (&df_bitmap_obstack);
bitmap_and_compl (tmp, op2, dense_invalidated);
EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
{
bitmap_clear_range (tmp,
DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
changed |= bitmap_ior_into (op1, tmp);
return changed;
}
else
return bitmap_ior_into (op1, op2);
}
/* Transfer function. */
static bool
df_rd_transfer_function (int bb_index)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
unsigned int regno;
bitmap_iterator bi;
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
bitmap gen = &bb_info->gen;
bitmap kill = &bb_info->kill;
bitmap sparse_kill = &bb_info->sparse_kill;
bool changed = false;
if (bitmap_empty_p (sparse_kill))
changed = bitmap_ior_and_compl (out, gen, in, kill);
else
{
struct df_rd_problem_data *problem_data;
bitmap_head tmp;
/* Note that TMP is _not_ a temporary bitmap if we end up replacing
OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap_initialize (&tmp, &problem_data->rd_bitmaps);
bitmap_and_compl (&tmp, in, kill);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
{
bitmap_clear_range (&tmp,
DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
}
bitmap_ior_into (&tmp, gen);
changed = !bitmap_equal_p (&tmp, out);
if (changed)
bitmap_move (out, &tmp);
else
bitmap_clear (&tmp);
}
if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
{
/* Create a mask of DEFs for all registers live at the end of this
basic block, and mask out DEFs of registers that are not live.
Computing the mask looks costly, but the benefit of the pruning
outweighs the cost. */
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
bitmap regs_live_out = &df_lr_get_bb_info (bb_index)->out;
bitmap live_defs = BITMAP_ALLOC (&df_bitmap_obstack);
unsigned int regno;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (regs_live_out, 0, regno, bi)
bitmap_set_range (live_defs,
DF_DEFS_BEGIN (regno),
DF_DEFS_COUNT (regno));
changed |= bitmap_and_into (&bb_info->out, live_defs);
BITMAP_FREE (live_defs);
}
return changed;
}
/* Free all storage associated with the problem. */
static void
df_rd_free (void)
{
struct df_rd_problem_data *problem_data
= (struct df_rd_problem_data *) df_rd->problem_data;
if (problem_data)
{
bitmap_obstack_release (&problem_data->rd_bitmaps);
df_rd->block_info_size = 0;
free (df_rd->block_info);
df_rd->block_info = NULL;
free (df_rd->problem_data);
}
free (df_rd);
}
/* Debugging info. */
static void
df_rd_start_dump (FILE *file)
{
struct df_rd_problem_data *problem_data
= (struct df_rd_problem_data *) df_rd->problem_data;
unsigned int m = DF_REG_SIZE (df);
unsigned int regno;
if (!df_rd->block_info)
return;
fprintf (file, ";; Reaching defs:\n");
fprintf (file, ";; sparse invalidated \t");
dump_bitmap (file, &problem_data->sparse_invalidated_by_call);
fprintf (file, ";; dense invalidated \t");
dump_bitmap (file, &problem_data->dense_invalidated_by_call);
fprintf (file, ";; reg->defs[] map:\t");
for (regno = 0; regno < m; regno++)
if (DF_DEFS_COUNT (regno))
fprintf (file, "%d[%d,%d] ", regno,
DF_DEFS_BEGIN (regno),
DF_DEFS_BEGIN (regno) + DF_DEFS_COUNT (regno) - 1);
fprintf (file, "\n");
}
static void
df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
{
bitmap_head tmp;
unsigned int regno;
unsigned int m = DF_REG_SIZE (df);
bool first_reg = true;
fprintf (file, "%s\t(%d) ", prefix, (int) bitmap_count_bits (defs_set));
bitmap_initialize (&tmp, &df_bitmap_obstack);
for (regno = 0; regno < m; regno++)
{
if (HARD_REGISTER_NUM_P (regno)
&& (df->changeable_flags & DF_NO_HARD_REGS))
continue;
bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
bitmap_and_into (&tmp, defs_set);
if (! bitmap_empty_p (&tmp))
{
bitmap_iterator bi;
unsigned int ix;
bool first_def = true;
if (! first_reg)
fprintf (file, ",");
first_reg = false;
fprintf (file, "%u[", regno);
EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, ix, bi)
{
fprintf (file, "%s%u", first_def ? "" : ",", ix);
first_def = false;
}
fprintf (file, "]");
}
bitmap_clear (&tmp);
}
fprintf (file, "\n");
bitmap_clear (&tmp);
}
/* Debugging info at top of bb. */
static void
df_rd_top_dump (basic_block bb, FILE *file)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info)
return;
df_rd_dump_defs_set (&bb_info->in, ";; rd in ", file);
df_rd_dump_defs_set (&bb_info->gen, ";; rd gen ", file);
df_rd_dump_defs_set (&bb_info->kill, ";; rd kill", file);
}
/* Debugging info at bottom of bb. */
static void
df_rd_bottom_dump (basic_block bb, FILE *file)
{
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
if (!bb_info)
return;
df_rd_dump_defs_set (&bb_info->out, ";; rd out ", file);
}
/* All of the information associated with every instance of the problem. */
static const struct df_problem problem_RD =
{
DF_RD, /* Problem id. */
DF_FORWARD, /* Direction. */
df_rd_alloc, /* Allocate the problem specific data. */
NULL, /* Reset global information. */
df_rd_free_bb_info, /* Free basic block info. */
df_rd_local_compute, /* Local compute function. */
df_rd_init_solution, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_rd_confluence_n, /* Confluence operator n. */
df_rd_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_rd_free, /* Free all of the problem information. */
df_rd_free, /* Remove this problem from the stack of dataflow problems. */
df_rd_start_dump, /* Debugging. */
df_rd_top_dump, /* Debugging start block. */
df_rd_bottom_dump, /* Debugging end block. */
NULL, /* Debugging start insn. */
NULL, /* Debugging end insn. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
sizeof (struct df_rd_bb_info),/* Size of entry of block_info array. */
TV_DF_RD, /* Timing variable. */
true /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new RD instance and add it to the existing instance
of DF. */
void
df_rd_add_problem (void)
{
df_add_problem (&problem_RD);
}
/*----------------------------------------------------------------------------
LIVE REGISTERS
Find the locations in the function where any use of a pseudo can
reach in the backwards direction. In and out bitvectors are built
for each basic block. The regno is used to index into these sets.
See df.h for details.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
struct df_lr_problem_data
{
bitmap_head *in;
bitmap_head *out;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack lr_bitmaps;
};
/* Free basic block info. */
static void
df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
if (bb_info)
{
bitmap_clear (&bb_info->use);
bitmap_clear (&bb_info->def);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
not touched unless the block is new. */
static void
df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
struct df_lr_problem_data *problem_data;
df_grow_bb_info (df_lr);
if (df_lr->problem_data)
problem_data = (struct df_lr_problem_data *) df_lr->problem_data;
else
{
problem_data = XNEW (struct df_lr_problem_data);
df_lr->problem_data = problem_data;
problem_data->out = NULL;
problem_data->in = NULL;
bitmap_obstack_initialize (&problem_data->lr_bitmaps);
}
EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
/* When bitmaps are already initialized, just clear them. */
if (bb_info->use.obstack)
{
bitmap_clear (&bb_info->def);
bitmap_clear (&bb_info->use);
}
else
{
bitmap_initialize (&bb_info->use, &problem_data->lr_bitmaps);
bitmap_initialize (&bb_info->def, &problem_data->lr_bitmaps);
bitmap_initialize (&bb_info->in, &problem_data->lr_bitmaps);
bitmap_initialize (&bb_info->out, &problem_data->lr_bitmaps);
}
}
df_lr->optional_p = false;
}
/* Reset the global solution for recalculation. */
static void
df_lr_reset (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Compute local live register info for basic block BB. */
static void
df_lr_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx_insn *insn;
df_ref def, use;
/* Process the registers set in an exception handler. */
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (&bb_info->def, dregno);
bitmap_clear_bit (&bb_info->use, dregno);
}
/* Process the hardware registers that are always live. */
FOR_EACH_ARTIFICIAL_USE (use, bb_index)
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!NONDEBUG_INSN_P (insn))
continue;
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
FOR_EACH_INSN_INFO_DEF (def, insn_info)
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (&bb_info->def, dregno);
bitmap_clear_bit (&bb_info->use, dregno);
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
/* Add use to set of uses in this BB. */
bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
}
/* Process the registers set in an exception handler or the hard
frame pointer if this block is the target of a non local
goto. */
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (&bb_info->def, dregno);
bitmap_clear_bit (&bb_info->use, dregno);
}
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
FOR_EACH_ARTIFICIAL_USE (use, bb_index)
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
#endif
/* If the df_live problem is not defined, such as at -O0 and -O1, we
still need to keep the luids up to date. This is normally done
in the df_live problem since this problem has a forwards
scan. */
if (!df_live)
df_recompute_luids (bb);
}
/* Compute local live register info for each basic block within BLOCKS. */
static void
df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index, i;
bitmap_iterator bi;
bitmap_clear (&df->hardware_regs_used);
/* The all-important stack pointer must always be live. */
bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
/* Global regs are always live, too. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
bitmap_set_bit (&df->hardware_regs_used, i);
/* Before reload, there are a few registers that must be forced
live everywhere -- which might not already be the case for
blocks within infinite loops. */
if (!reload_completed)
{
unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
&& fixed_regs[ARG_POINTER_REGNUM])
bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if (pic_offset_table_regnum != INVALID_REGNUM
&& fixed_regs[pic_offset_table_regnum])
bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
}
EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
if (bb_index == EXIT_BLOCK)
{
/* The exit block is special for this problem and its bits are
computed from thin air. */
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
bitmap_copy (&bb_info->use, df->exit_block_uses);
}
else
df_lr_bb_local_compute (bb_index);
}
bitmap_clear (df_lr->out_of_date_transfer_functions);
}
/* Initialize the solution vectors. */
static void
df_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap_copy (&bb_info->in, &bb_info->use);
bitmap_clear (&bb_info->out);
}
}
/* Confluence function that processes infinite loops. This might be a
noreturn function that throws. And even if it isn't, getting the
unwind info right helps debugging. */
static void
df_lr_confluence_0 (basic_block bb)
{
bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
bitmap_copy (op1, &df->hardware_regs_used);
}
/* Confluence function that ignores fake edges. */
static bool
df_lr_confluence_n (edge e)
{
bitmap op1 = &df_lr_get_bb_info (e->src->index)->out;
bitmap op2 = &df_lr_get_bb_info (e->dest->index)->in;
bool changed = false;
/* Call-clobbered registers die across exception and call edges. */
/* ??? Abnormal call edges ignored for the moment, as this gets
confused by sibling call edges, which crashes reg-stack. */
if (e->flags & EDGE_EH)
changed = bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset);
else
changed = bitmap_ior_into (op1, op2);
changed |= bitmap_ior_into (op1, &df->hardware_regs_used);
return changed;
}
/* Transfer function. */
static bool
df_lr_transfer_function (int bb_index)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
bitmap use = &bb_info->use;
bitmap def = &bb_info->def;
return bitmap_ior_and_compl (in, use, out, def);
}
/* Run the fast dce as a side effect of building LR. */
static void
df_lr_finalize (bitmap all_blocks)
{
df_lr->solutions_dirty = false;
if (df->changeable_flags & DF_LR_RUN_DCE)
{
run_fast_df_dce ();
/* If dce deletes some instructions, we need to recompute the lr
solution before proceeding further. The problem is that fast
dce is a pessimestic dataflow algorithm. In the case where
it deletes a statement S inside of a loop, the uses inside of
S may not be deleted from the dataflow solution because they
were carried around the loop. While it is conservatively
correct to leave these extra bits, the standards of df
require that we maintain the best possible (least fixed
point) solution. The only way to do that is to redo the
iteration from the beginning. See PR35805 for an
example. */
if (df_lr->solutions_dirty)
{
df_clear_flags (DF_LR_RUN_DCE);
df_lr_alloc (all_blocks);
df_lr_local_compute (all_blocks);
df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks);
df_lr_finalize (all_blocks);
df_set_flags (DF_LR_RUN_DCE);
}
}
}
/* Free all storage associated with the problem. */
static void
df_lr_free (void)
{
struct df_lr_problem_data *problem_data
= (struct df_lr_problem_data *) df_lr->problem_data;
if (df_lr->block_info)
{
df_lr->block_info_size = 0;
free (df_lr->block_info);
df_lr->block_info = NULL;
bitmap_obstack_release (&problem_data->lr_bitmaps);
free (df_lr->problem_data);
df_lr->problem_data = NULL;
}
BITMAP_FREE (df_lr->out_of_date_transfer_functions);
free (df_lr);
}
/* Debugging info at top of bb. */
static void
df_lr_top_dump (basic_block bb, FILE *file)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
struct df_lr_problem_data *problem_data;
if (!bb_info)
return;
fprintf (file, ";; lr in \t");
df_print_regset (file, &bb_info->in);
if (df_lr->problem_data)
{
problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
if (problem_data->in)
{
fprintf (file, ";; old in \t");
df_print_regset (file, &problem_data->in[bb->index]);
}
}
fprintf (file, ";; lr use \t");
df_print_regset (file, &bb_info->use);
fprintf (file, ";; lr def \t");
df_print_regset (file, &bb_info->def);
}
/* Debugging info at bottom of bb. */
static void
df_lr_bottom_dump (basic_block bb, FILE *file)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
struct df_lr_problem_data *problem_data;
if (!bb_info)
return;
fprintf (file, ";; lr out \t");
df_print_regset (file, &bb_info->out);
if (df_lr->problem_data)
{
problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
if (problem_data->out)
{
fprintf (file, ";; old out \t");
df_print_regset (file, &problem_data->out[bb->index]);
}
}
}
/* Build the datastructure to verify that the solution to the dataflow
equations is not dirty. */
static void
df_lr_verify_solution_start (void)
{
basic_block bb;
struct df_lr_problem_data *problem_data;
if (df_lr->solutions_dirty)
return;
/* Set it true so that the solution is recomputed. */
df_lr->solutions_dirty = true;
problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_initialize (&problem_data->in[bb->index], &problem_data->lr_bitmaps);
bitmap_initialize (&problem_data->out[bb->index], &problem_data->lr_bitmaps);
bitmap_copy (&problem_data->in[bb->index], DF_LR_IN (bb));
bitmap_copy (&problem_data->out[bb->index], DF_LR_OUT (bb));
}
}
/* Compare the saved datastructure and the new solution to the dataflow
equations. */
static void
df_lr_verify_solution_end (void)
{
struct df_lr_problem_data *problem_data;
basic_block bb;
problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
if (!problem_data->out)
return;
if (df_lr->solutions_dirty)
/* Do not check if the solution is still dirty. See the comment
in df_lr_finalize for details. */
df_lr->solutions_dirty = false;
else
FOR_ALL_BB_FN (bb, cfun)
{
if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
|| (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
{
/*df_dump (stderr);*/
gcc_unreachable ();
}
}
/* Cannot delete them immediately because you may want to dump them
if the comparison fails. */
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_clear (&problem_data->in[bb->index]);
bitmap_clear (&problem_data->out[bb->index]);
}
free (problem_data->in);
free (problem_data->out);
problem_data->in = NULL;
problem_data->out = NULL;
}
/* All of the information associated with every instance of the problem. */
static const struct df_problem problem_LR =
{
DF_LR, /* Problem id. */
DF_BACKWARD, /* Direction. */
df_lr_alloc, /* Allocate the problem specific data. */
df_lr_reset, /* Reset global information. */
df_lr_free_bb_info, /* Free basic block info. */
df_lr_local_compute, /* Local compute function. */
df_lr_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
df_lr_confluence_0, /* Confluence operator 0. */
df_lr_confluence_n, /* Confluence operator n. */
df_lr_transfer_function, /* Transfer function. */
df_lr_finalize, /* Finalize function. */
df_lr_free, /* Free all of the problem information. */
NULL, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
df_lr_top_dump, /* Debugging start block. */
df_lr_bottom_dump, /* Debugging end block. */
NULL, /* Debugging start insn. */
NULL, /* Debugging end insn. */
df_lr_verify_solution_start,/* Incremental solution verify start. */
df_lr_verify_solution_end, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
sizeof (struct df_lr_bb_info),/* Size of entry of block_info array. */
TV_DF_LR, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new DATAFLOW instance and add it to an existing instance
of DF. The returned structure is what is used to get at the
solution. */
void
df_lr_add_problem (void)
{
df_add_problem (&problem_LR);
/* These will be initialized when df_scan_blocks processes each
block. */
df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
}
/* Verify that all of the lr related info is consistent and
correct. */
void
df_lr_verify_transfer_functions (void)
{
basic_block bb;
bitmap_head saved_def;
bitmap_head saved_use;
bitmap_head all_blocks;
if (!df)
return;
bitmap_initialize (&saved_def, &bitmap_default_obstack);
bitmap_initialize (&saved_use, &bitmap_default_obstack);
bitmap_initialize (&all_blocks, &bitmap_default_obstack);
FOR_ALL_BB_FN (bb, cfun)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
bitmap_set_bit (&all_blocks, bb->index);
if (bb_info)
{
/* Make a copy of the transfer functions and then compute
new ones to see if the transfer functions have
changed. */
if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
bb->index))
{
bitmap_copy (&saved_def, &bb_info->def);
bitmap_copy (&saved_use, &bb_info->use);
bitmap_clear (&bb_info->def);
bitmap_clear (&bb_info->use);
df_lr_bb_local_compute (bb->index);
gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
}
}
else
{
/* If we do not have basic block info, the block must be in
the list of dirty blocks or else some one has added a
block behind our backs. */
gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
bb->index));
}
/* Make sure no one created a block without following
procedures. */
gcc_assert (df_scan_get_bb_info (bb->index));
}
/* Make sure there are no dirty bits in blocks that have been deleted. */
gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
&all_blocks));
bitmap_clear (&saved_def);
bitmap_clear (&saved_use);
bitmap_clear (&all_blocks);
}
/*----------------------------------------------------------------------------
LIVE AND MAY-INITIALIZED REGISTERS.
This problem first computes the IN and OUT bitvectors for the
may-initialized registers problems, which is a forward problem.
It gives the set of registers for which we MAY have an available
definition, i.e. for which there is an available definition on
at least one path from the entry block to the entry/exit of a
basic block. Sets generate a definition, while clobbers kill
a definition.
In and out bitvectors are built for each basic block and are indexed by
regnum (see df.h for details). In and out bitvectors in struct
df_live_bb_info actually refers to the may-initialized problem;
Then, the in and out sets for the LIVE problem itself are computed.
These are the logical AND of the IN and OUT sets from the LR problem
and the may-initialized problem.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
struct df_live_problem_data
{
bitmap_head *in;
bitmap_head *out;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack live_bitmaps;
};
/* Scratch var used by transfer functions. This is used to implement
an optimization to reduce the amount of space used to compute the
combined lr and live analysis. */
static bitmap_head df_live_scratch;
/* Free basic block info. */
static void
df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
if (bb_info)
{
bitmap_clear (&bb_info->gen);
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
not touched unless the block is new. */
static void
df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
struct df_live_problem_data *problem_data;
if (df_live->problem_data)
problem_data = (struct df_live_problem_data *) df_live->problem_data;
else
{
problem_data = XNEW (struct df_live_problem_data);
df_live->problem_data = problem_data;
problem_data->out = NULL;
problem_data->in = NULL;
bitmap_obstack_initialize (&problem_data->live_bitmaps);
bitmap_initialize (&df_live_scratch, &problem_data->live_bitmaps);
}
df_grow_bb_info (df_live);
EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
/* When bitmaps are already initialized, just clear them. */
if (bb_info->kill.obstack)
{
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->gen);
}
else
{
bitmap_initialize (&bb_info->kill, &problem_data->live_bitmaps);
bitmap_initialize (&bb_info->gen, &problem_data->live_bitmaps);
bitmap_initialize (&bb_info->in, &problem_data->live_bitmaps);
bitmap_initialize (&bb_info->out, &problem_data->live_bitmaps);
}
}
df_live->optional_p = (optimize <= 1);
}
/* Reset the global solution for recalculation. */
static void
df_live_reset (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Compute local uninitialized register info for basic block BB. */
static void
df_live_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx_insn *insn;
df_ref def;
int luid = 0;
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
/* Inserting labels does not always trigger the incremental
rescanning. */
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
insn_info = df_insn_create_insn_record (insn);
}
DF_INSN_INFO_LUID (insn_info) = luid;
if (!INSN_P (insn))
continue;
luid++;
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
DF_REF_PARTIAL | DF_REF_CONDITIONAL))
/* All partial or conditional def
seen are included in the gen set. */
bitmap_set_bit (&bb_info->gen, regno);
else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
/* Only must clobbers for the entire reg destroy the
value. */
bitmap_set_bit (&bb_info->kill, regno);
else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
bitmap_set_bit (&bb_info->gen, regno);
}
}
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
}
/* Compute local uninitialized register info. */
static void
df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
df_grow_insn_info ();
EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
0, bb_index, bi)
{
df_live_bb_local_compute (bb_index);
}
bitmap_clear (df_live->out_of_date_transfer_functions);
}
/* Initialize the solution vectors. */
static void
df_live_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
bitmap_clear (&bb_info->in);
}
}
/* Forward confluence function that ignores fake edges. */
static bool
df_live_confluence_n (edge e)
{
bitmap op1 = &df_live_get_bb_info (e->dest->index)->in;
bitmap op2 = &df_live_get_bb_info (e->src->index)->out;
if (e->flags & EDGE_FAKE)
return false;
return bitmap_ior_into (op1, op2);
}
/* Transfer function for the forwards may-initialized problem. */
static bool
df_live_transfer_function (int bb_index)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
bitmap gen = &bb_info->gen;
bitmap kill = &bb_info->kill;
/* We need to use a scratch set here so that the value returned from this
function invocation properly reflects whether the sets changed in a
significant way; i.e. not just because the lr set was anded in. */
bitmap_and (&df_live_scratch, gen, &bb_lr_info->out);
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and_into (in, &bb_lr_info->in);
return bitmap_ior_and_compl (out, &df_live_scratch, in, kill);
}
/* And the LR info with the may-initialized registers to produce the LIVE info. */
static void
df_live_finalize (bitmap all_blocks)
{
if (df_live->solutions_dirty)
{
bitmap_iterator bi;
unsigned int bb_index;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and_into (&bb_live_info->in, &bb_lr_info->in);
bitmap_and_into (&bb_live_info->out, &bb_lr_info->out);
}
df_live->solutions_dirty = false;
}
}
/* Free all storage associated with the problem. */
static void
df_live_free (void)
{
struct df_live_problem_data *problem_data
= (struct df_live_problem_data *) df_live->problem_data;
if (df_live->block_info)
{
df_live->block_info_size = 0;
free (df_live->block_info);
df_live->block_info = NULL;
bitmap_release (&df_live_scratch);
bitmap_obstack_release (&problem_data->live_bitmaps);
free (problem_data);
df_live->problem_data = NULL;
}
BITMAP_FREE (df_live->out_of_date_transfer_functions);
free (df_live);
}
/* Debugging info at top of bb. */
static void
df_live_top_dump (basic_block bb, FILE *file)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
struct df_live_problem_data *problem_data;
if (!bb_info)
return;
fprintf (file, ";; live in \t");
df_print_regset (file, &bb_info->in);
if (df_live->problem_data)
{
problem_data = (struct df_live_problem_data *)df_live->problem_data;
if (problem_data->in)
{
fprintf (file, ";; old in \t");
df_print_regset (file, &problem_data->in[bb->index]);
}
}
fprintf (file, ";; live gen \t");
df_print_regset (file, &bb_info->gen);
fprintf (file, ";; live kill\t");
df_print_regset (file, &bb_info->kill);
}
/* Debugging info at bottom of bb. */
static void
df_live_bottom_dump (basic_block bb, FILE *file)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
struct df_live_problem_data *problem_data;
if (!bb_info)
return;
fprintf (file, ";; live out \t");
df_print_regset (file, &bb_info->out);
if (df_live->problem_data)
{
problem_data = (struct df_live_problem_data *)df_live->problem_data;
if (problem_data->out)
{
fprintf (file, ";; old out \t");
df_print_regset (file, &problem_data->out[bb->index]);
}
}
}
/* Build the datastructure to verify that the solution to the dataflow
equations is not dirty. */
static void
df_live_verify_solution_start (void)
{
basic_block bb;
struct df_live_problem_data *problem_data;
if (df_live->solutions_dirty)
return;
/* Set it true so that the solution is recomputed. */
df_live->solutions_dirty = true;
problem_data = (struct df_live_problem_data *)df_live->problem_data;
problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_initialize (&problem_data->in[bb->index], &problem_data->live_bitmaps);
bitmap_initialize (&problem_data->out[bb->index], &problem_data->live_bitmaps);
bitmap_copy (&problem_data->in[bb->index], DF_LIVE_IN (bb));
bitmap_copy (&problem_data->out[bb->index], DF_LIVE_OUT (bb));
}
}
/* Compare the saved datastructure and the new solution to the dataflow
equations. */
static void
df_live_verify_solution_end (void)
{
struct df_live_problem_data *problem_data;
basic_block bb;
problem_data = (struct df_live_problem_data *)df_live->problem_data;
if (!problem_data->out)
return;
FOR_ALL_BB_FN (bb, cfun)
{
if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
|| (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
{
/*df_dump (stderr);*/
gcc_unreachable ();
}
}
/* Cannot delete them immediately because you may want to dump them
if the comparison fails. */
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_clear (&problem_data->in[bb->index]);
bitmap_clear (&problem_data->out[bb->index]);
}
free (problem_data->in);
free (problem_data->out);
free (problem_data);
df_live->problem_data = NULL;
}
/* All of the information associated with every instance of the problem. */
static const struct df_problem problem_LIVE =
{
DF_LIVE, /* Problem id. */
DF_FORWARD, /* Direction. */
df_live_alloc, /* Allocate the problem specific data. */
df_live_reset, /* Reset global information. */
df_live_free_bb_info, /* Free basic block info. */
df_live_local_compute, /* Local compute function. */
df_live_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_live_confluence_n, /* Confluence operator n. */
df_live_transfer_function, /* Transfer function. */
df_live_finalize, /* Finalize function. */
df_live_free, /* Free all of the problem information. */
df_live_free, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
df_live_top_dump, /* Debugging start block. */
df_live_bottom_dump, /* Debugging end block. */
NULL, /* Debugging start insn. */
NULL, /* Debugging end insn. */
df_live_verify_solution_start,/* Incremental solution verify start. */
df_live_verify_solution_end, /* Incremental solution verify end. */
&problem_LR, /* Dependent problem. */
sizeof (struct df_live_bb_info),/* Size of entry of block_info array. */
TV_DF_LIVE, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new DATAFLOW instance and add it to an existing instance
of DF. The returned structure is what is used to get at the
solution. */
void
df_live_add_problem (void)
{
df_add_problem (&problem_LIVE);
/* These will be initialized when df_scan_blocks processes each
block. */
df_live->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
}
/* Set all of the blocks as dirty. This needs to be done if this
problem is added after all of the insns have been scanned. */
void
df_live_set_all_dirty (void)
{
basic_block bb;
FOR_ALL_BB_FN (bb, cfun)
bitmap_set_bit (df_live->out_of_date_transfer_functions,
bb->index);
}
/* Verify that all of the lr related info is consistent and
correct. */
void
df_live_verify_transfer_functions (void)
{
basic_block bb;
bitmap_head saved_gen;
bitmap_head saved_kill;
bitmap_head all_blocks;
if (!df)
return;
bitmap_initialize (&saved_gen, &bitmap_default_obstack);
bitmap_initialize (&saved_kill, &bitmap_default_obstack);
bitmap_initialize (&all_blocks, &bitmap_default_obstack);
df_grow_insn_info ();
FOR_ALL_BB_FN (bb, cfun)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
bitmap_set_bit (&all_blocks, bb->index);
if (bb_info)
{
/* Make a copy of the transfer functions and then compute
new ones to see if the transfer functions have
changed. */
if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
bb->index))
{
bitmap_copy (&saved_gen, &bb_info->gen);
bitmap_copy (&saved_kill, &bb_info->kill);
bitmap_clear (&bb_info->gen);
bitmap_clear (&bb_info->kill);
df_live_bb_local_compute (bb->index);
gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
}
}
else
{
/* If we do not have basic block info, the block must be in
the list of dirty blocks or else some one has added a
block behind our backs. */
gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
bb->index));
}
/* Make sure no one created a block without following
procedures. */
gcc_assert (df_scan_get_bb_info (bb->index));
}
/* Make sure there are no dirty bits in blocks that have been deleted. */
gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
&all_blocks));
bitmap_clear (&saved_gen);
bitmap_clear (&saved_kill);
bitmap_clear (&all_blocks);
}
/*----------------------------------------------------------------------------
MUST-INITIALIZED REGISTERS.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
struct df_mir_problem_data
{
bitmap_head *in;
bitmap_head *out;
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack mir_bitmaps;
};
/* Free basic block info. */
static void
df_mir_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_mir_bb_info *bb_info = (struct df_mir_bb_info *) vbb_info;
if (bb_info)
{
bitmap_clear (&bb_info->gen);
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Allocate or reset bitmaps for DF_MIR blocks. The solution bits are
not touched unless the block is new. */
static void
df_mir_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
struct df_mir_problem_data *problem_data;
if (df_mir->problem_data)
problem_data = (struct df_mir_problem_data *) df_mir->problem_data;
else
{
problem_data = XNEW (struct df_mir_problem_data);
df_mir->problem_data = problem_data;
problem_data->out = NULL;
problem_data->in = NULL;
bitmap_obstack_initialize (&problem_data->mir_bitmaps);
}
df_grow_bb_info (df_mir);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
/* When bitmaps are already initialized, just clear them. */
if (bb_info->kill.obstack)
{
bitmap_clear (&bb_info->kill);
bitmap_clear (&bb_info->gen);
}
else
{
bitmap_initialize (&bb_info->kill, &problem_data->mir_bitmaps);
bitmap_initialize (&bb_info->gen, &problem_data->mir_bitmaps);
bitmap_initialize (&bb_info->in, &problem_data->mir_bitmaps);
bitmap_initialize (&bb_info->out, &problem_data->mir_bitmaps);
bb_info->con_visited = false;
}
}
df_mir->optional_p = 1;
}
/* Reset the global solution for recalculation. */
static void
df_mir_reset (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
bb_info->con_visited = false;
}
}
/* Compute local uninitialized register info for basic block BB. */
static void
df_mir_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
rtx_insn *insn;
int luid = 0;
/* Ignoring artificial defs is intentional: these often pretend that some
registers carry incoming arguments (when they are FUNCTION_ARG_REGNO) even
though they are not used for that. As a result, conservatively assume
they may be uninitialized. */
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
/* Inserting labels does not always trigger the incremental
rescanning. */
if (!insn_info)
{
gcc_assert (!INSN_P (insn));
insn_info = df_insn_create_insn_record (insn);
}
DF_INSN_INFO_LUID (insn_info) = luid;
if (!INSN_P (insn))
continue;
luid++;
df_mir_simulate_one_insn (bb, insn, &bb_info->kill, &bb_info->gen);
}
}
/* Compute local uninitialized register info. */
static void
df_mir_local_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
df_grow_insn_info ();
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
df_mir_bb_local_compute (bb_index);
}
}
/* Initialize the solution vectors. */
static void
df_mir_init (bitmap all_blocks)
{
df_mir_reset (all_blocks);
}
/* Initialize IN sets for blocks with no predecessors: when landing on such
blocks, assume all registers are uninitialized. */
static void
df_mir_confluence_0 (basic_block bb)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
bitmap_clear (&bb_info->in);
bb_info->con_visited = true;
}
/* Forward confluence function that ignores fake edges. */
static bool
df_mir_confluence_n (edge e)
{
if (e->flags & EDGE_FAKE)
return false;
df_mir_bb_info *src_info = df_mir_get_bb_info (e->src->index);
/* If SRC was not visited yet then we'll and with all-ones which
means no changes. Do not consider DST con_visited by this
operation alone either. */
if (!src_info->con_visited)
return false;
df_mir_bb_info *dst_info = df_mir_get_bb_info (e->dest->index);
bitmap op1 = &dst_info->in;
bitmap op2 = &src_info->out;
/* If DEST was not visited yet just copy the SRC bitmap. */
if (!dst_info->con_visited)
{
dst_info->con_visited = true;
bitmap_copy (op1, op2);
return true;
}
/* A register is must-initialized at the entry of a basic block iff it is
must-initialized at the exit of all the predecessors. */
return bitmap_and_into (op1, op2);
}
/* Transfer function for the forwards must-initialized problem. */
static bool
df_mir_transfer_function (int bb_index)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb_index);
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
bitmap gen = &bb_info->gen;
bitmap kill = &bb_info->kill;
return bitmap_ior_and_compl (out, gen, in, kill);
}
/* Free all storage associated with the problem. */
static void
df_mir_free (void)
{
struct df_mir_problem_data *problem_data
= (struct df_mir_problem_data *) df_mir->problem_data;
if (df_mir->block_info)
{
df_mir->block_info_size = 0;
free (df_mir->block_info);
df_mir->block_info = NULL;
bitmap_obstack_release (&problem_data->mir_bitmaps);
free (problem_data);
df_mir->problem_data = NULL;
}
free (df_mir);
}
/* Debugging info at top of bb. */
static void
df_mir_top_dump (basic_block bb, FILE *file)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
if (!bb_info)
return;
fprintf (file, ";; mir in \t");
df_print_regset (file, &bb_info->in);
fprintf (file, ";; mir kill\t");
df_print_regset (file, &bb_info->kill);
fprintf (file, ";; mir gen \t");
df_print_regset (file, &bb_info->gen);
}
/* Debugging info at bottom of bb. */
static void
df_mir_bottom_dump (basic_block bb, FILE *file)
{
struct df_mir_bb_info *bb_info = df_mir_get_bb_info (bb->index);
if (!bb_info)
return;
fprintf (file, ";; mir out \t");
df_print_regset (file, &bb_info->out);
}
/* Build the datastructure to verify that the solution to the dataflow
equations is not dirty. */
static void
df_mir_verify_solution_start (void)
{
basic_block bb;
struct df_mir_problem_data *problem_data;
if (df_mir->solutions_dirty)
return;
/* Set it true so that the solution is recomputed. */
df_mir->solutions_dirty = true;
problem_data = (struct df_mir_problem_data *) df_mir->problem_data;
problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
bitmap_obstack_initialize (&problem_data->mir_bitmaps);
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_initialize (&problem_data->in[bb->index], &problem_data->mir_bitmaps);
bitmap_initialize (&problem_data->out[bb->index], &problem_data->mir_bitmaps);
bitmap_copy (&problem_data->in[bb->index], DF_MIR_IN (bb));
bitmap_copy (&problem_data->out[bb->index], DF_MIR_OUT (bb));
}
}
/* Compare the saved datastructure and the new solution to the dataflow
equations. */
static void
df_mir_verify_solution_end (void)
{
struct df_mir_problem_data *problem_data;
basic_block bb;
problem_data = (struct df_mir_problem_data *) df_mir->problem_data;
if (!problem_data->out)
return;
FOR_ALL_BB_FN (bb, cfun)
{
if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_MIR_IN (bb)))
|| (!bitmap_equal_p (&problem_data->out[bb->index], DF_MIR_OUT (bb))))
gcc_unreachable ();
}
/* Cannot delete them immediately because you may want to dump them
if the comparison fails. */
FOR_ALL_BB_FN (bb, cfun)
{
bitmap_clear (&problem_data->in[bb->index]);
bitmap_clear (&problem_data->out[bb->index]);
}
free (problem_data->in);
free (problem_data->out);
bitmap_obstack_release (&problem_data->mir_bitmaps);
free (problem_data);
df_mir->problem_data = NULL;
}
/* All of the information associated with every instance of the problem. */
static const struct df_problem problem_MIR =
{
DF_MIR, /* Problem id. */
DF_FORWARD, /* Direction. */
df_mir_alloc, /* Allocate the problem specific data. */
df_mir_reset, /* Reset global information. */
df_mir_free_bb_info, /* Free basic block info. */
df_mir_local_compute, /* Local compute function. */
df_mir_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
df_mir_confluence_0, /* Confluence operator 0. */
df_mir_confluence_n, /* Confluence operator n. */
df_mir_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_mir_free, /* Free all of the problem information. */
df_mir_free, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
df_mir_top_dump, /* Debugging start block. */
df_mir_bottom_dump, /* Debugging end block. */
NULL, /* Debugging start insn. */
NULL, /* Debugging end insn. */
df_mir_verify_solution_start, /* Incremental solution verify start. */
df_mir_verify_solution_end, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
sizeof (struct df_mir_bb_info),/* Size of entry of block_info array. */
TV_DF_MIR, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new DATAFLOW instance and add it to an existing instance
of DF. */
void
df_mir_add_problem (void)
{
df_add_problem (&problem_MIR);
/* These will be initialized when df_scan_blocks processes each
block. */
df_mir->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
}
/* Apply the effects of the gen/kills in INSN to the corresponding bitmaps. */
void
df_mir_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx_insn *insn,
bitmap kill, bitmap gen)
{
df_ref def;
FOR_EACH_INSN_DEF (def, insn)
{
unsigned int regno = DF_REF_REGNO (def);
/* The order of GENs/KILLs matters, so if this def clobbers a reg, any
previous gen is irrelevant (and reciprocally). Also, claim that a
register is GEN only if it is in all cases. */
if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
bitmap_set_bit (kill, regno);
bitmap_clear_bit (gen, regno);
}
/* In the worst case, partial and conditional defs can leave bits
uninitialized, so assume they do not change anything. */
else if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
{
bitmap_set_bit (gen, regno);
bitmap_clear_bit (kill, regno);
}
}
}
/*----------------------------------------------------------------------------
CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
Link either the defs to the uses and / or the uses to the defs.
These problems are set up like the other dataflow problems so that
they nicely fit into the framework. They are much simpler and only
involve a single traversal of instructions and an examination of
the reaching defs information (the dependent problem).
----------------------------------------------------------------------------*/
#define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
/* Create a du or ud chain from SRC to DST and link it into SRC. */
struct df_link *
df_chain_create (df_ref src, df_ref dst)
{
struct df_link *head = DF_REF_CHAIN (src);
struct df_link *link = df_chain->block_pool->allocate ();
DF_REF_CHAIN (src) = link;
link->next = head;
link->ref = dst;
return link;
}
/* Delete any du or ud chains that start at REF and point to
TARGET. */
static void
df_chain_unlink_1 (df_ref ref, df_ref target)
{
struct df_link *chain = DF_REF_CHAIN (ref);
struct df_link *prev = NULL;
while (chain)
{
if (chain->ref == target)
{
if (prev)
prev->next = chain->next;
else
DF_REF_CHAIN (ref) = chain->next;
df_chain->block_pool->remove (chain);
return;
}
prev = chain;
chain = chain->next;
}
}
/* Delete a du or ud chain that leave or point to REF. */
void
df_chain_unlink (df_ref ref)
{
struct df_link *chain = DF_REF_CHAIN (ref);
while (chain)
{
struct df_link *next = chain->next;
/* Delete the other side if it exists. */
df_chain_unlink_1 (chain->ref, ref);
df_chain->block_pool->remove (chain);
chain = next;
}
DF_REF_CHAIN (ref) = NULL;
}
/* Copy the du or ud chain starting at FROM_REF and attach it to
TO_REF. */
void
df_chain_copy (df_ref to_ref,
struct df_link *from_ref)
{
while (from_ref)
{
df_chain_create (to_ref, from_ref->ref);
from_ref = from_ref->next;
}
}
/* Remove this problem from the stack of dataflow problems. */
static void
df_chain_remove_problem (void)
{
bitmap_iterator bi;
unsigned int bb_index;
/* Wholesale destruction of the old chains. */
if (df_chain->block_pool)
delete df_chain->block_pool;
EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
{
rtx_insn *insn;
df_ref def, use;
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
DF_REF_CHAIN (def) = NULL;
if (df_chain_problem_p (DF_UD_CHAIN))
FOR_EACH_ARTIFICIAL_USE (use, bb_index)
DF_REF_CHAIN (use) = NULL;
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (df_chain_problem_p (DF_DU_CHAIN))
FOR_EACH_INSN_INFO_DEF (def, insn_info)
DF_REF_CHAIN (def) = NULL;
if (df_chain_problem_p (DF_UD_CHAIN))
{
FOR_EACH_INSN_INFO_USE (use, insn_info)
DF_REF_CHAIN (use) = NULL;
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
DF_REF_CHAIN (use) = NULL;
}
}
}
bitmap_clear (df_chain->out_of_date_transfer_functions);
df_chain->block_pool = NULL;
}
/* Remove the chain problem completely. */
static void
df_chain_fully_remove_problem (void)
{
df_chain_remove_problem ();
BITMAP_FREE (df_chain->out_of_date_transfer_functions);
free (df_chain);
}
/* Create def-use or use-def chains. */
static void
df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
df_chain_remove_problem ();
df_chain->block_pool = new object_allocator<df_link> ("df_chain_block pool");
df_chain->optional_p = true;
}
/* Reset all of the chains when the set of basic blocks changes. */
static void
df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
{
df_chain_remove_problem ();
}
/* Create the chains for a list of USEs. */
static void
df_chain_create_bb_process_use (bitmap local_rd,
df_ref use,
int top_flag)
{
bitmap_iterator bi;
unsigned int def_index;
for (; use; use = DF_REF_NEXT_LOC (use))
{
unsigned int uregno = DF_REF_REGNO (use);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (uregno >= FIRST_PSEUDO_REGISTER))
{
/* Do not want to go through this for an uninitialized var. */
int count = DF_DEFS_COUNT (uregno);
if (count)
{
if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
{
unsigned int first_index = DF_DEFS_BEGIN (uregno);
unsigned int last_index = first_index + count - 1;
EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
{
df_ref def;
if (def_index > last_index)
break;
def = DF_DEFS_GET (def_index);
if (df_chain_problem_p (DF_DU_CHAIN))
df_chain_create (def, use);
if (df_chain_problem_p (DF_UD_CHAIN))
df_chain_create (use, def);
}
}
}
}
}
}
/* Create chains from reaching defs bitmaps for basic block BB. */
static void
df_chain_create_bb (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx_insn *insn;
bitmap_head cpy;
bitmap_initialize (&cpy, &bitmap_default_obstack);
bitmap_copy (&cpy, &bb_info->in);
bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
/* Since we are going forwards, process the artificial uses first
then the artificial defs second. */
#ifdef EH_USES
/* Create the chains for the artificial uses from the EH_USES at the
beginning of the block. */
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_chain_create_bb_process_use (&cpy,
df_get_artificial_uses (bb->index),
DF_REF_AT_TOP);
#endif
df_rd_simulate_artificial_defs_at_top (bb, &cpy);
/* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
/* First scan the uses and link them up with the defs that remain
in the cpy vector. */
df_chain_create_bb_process_use (&cpy, DF_INSN_UID_USES (uid), 0);
if (df->changeable_flags & DF_EQ_NOTES)
df_chain_create_bb_process_use (&cpy, DF_INSN_UID_EQ_USES (uid), 0);
/* Since we are going forwards, process the defs second. */
df_rd_simulate_one_insn (bb, insn, &cpy);
}
/* Create the chains for the artificial uses of the hard registers
at the end of the block. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
df_chain_create_bb_process_use (&cpy,
df_get_artificial_uses (bb->index),
0);
bitmap_clear (&cpy);
}
/* Create def-use chains from reaching use bitmaps for basic blocks
in BLOCKS. */
static void
df_chain_finalize (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
df_chain_create_bb (bb_index);
}
}
/* Free all storage associated with the problem. */
static void
df_chain_free (void)
{
delete df_chain->block_pool;
BITMAP_FREE (df_chain->out_of_date_transfer_functions);
free (df_chain);
}
/* Debugging info. */
static void
df_chain_bb_dump (basic_block bb, FILE *file, bool top)
{
/* Artificials are only hard regs. */
if (df->changeable_flags & DF_NO_HARD_REGS)
return;
if (df_chain_problem_p (DF_UD_CHAIN))
{
df_ref use;
fprintf (file,
";; UD chains for artificial uses at %s\n",
top ? "top" : "bottom");
FOR_EACH_ARTIFICIAL_USE (use, bb->index)
if ((top && (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
|| (!top && !(DF_REF_FLAGS (use) & DF_REF_AT_TOP)))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
}
if (df_chain_problem_p (DF_DU_CHAIN))
{
df_ref def;
fprintf (file,
";; DU chains for artificial defs at %s\n",
top ? "top" : "bottom");
FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
if ((top && (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
|| (!top && !(DF_REF_FLAGS (def) & DF_REF_AT_TOP)))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
}
}
}
static void
df_chain_top_dump (basic_block bb, FILE *file)
{
df_chain_bb_dump (bb, file, /*top=*/true);
}
static void
df_chain_bottom_dump (basic_block bb, FILE *file)
{
df_chain_bb_dump (bb, file, /*top=*/false);
}
static void
df_chain_insn_top_dump (const rtx_insn *insn, FILE *file)
{
if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref use;
fprintf (file, ";; UD chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
}
}
static void
df_chain_insn_bottom_dump (const rtx_insn *insn, FILE *file)
{
if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref def;
fprintf (file, ";; DU chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
}
fprintf (file, "\n");
}
}
static const struct df_problem problem_CHAIN =
{
DF_CHAIN, /* Problem id. */
DF_NONE, /* Direction. */
df_chain_alloc, /* Allocate the problem specific data. */
df_chain_reset, /* Reset global information. */
NULL, /* Free basic block info. */
NULL, /* Local compute function. */
NULL, /* Init the solution specific data. */
NULL, /* Iterative solver. */
NULL, /* Confluence operator 0. */
NULL, /* Confluence operator n. */
NULL, /* Transfer function. */
df_chain_finalize, /* Finalize function. */
df_chain_free, /* Free all of the problem information. */
df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
df_chain_top_dump, /* Debugging start block. */
df_chain_bottom_dump, /* Debugging end block. */
df_chain_insn_top_dump, /* Debugging start insn. */
df_chain_insn_bottom_dump, /* Debugging end insn. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
&problem_RD, /* Dependent problem. */
sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
TV_DF_CHAIN, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new DATAFLOW instance and add it to an existing instance
of DF. The returned structure is what is used to get at the
solution. */
void
df_chain_add_problem (unsigned int chain_flags)
{
df_add_problem (&problem_CHAIN);
df_chain->local_flags = chain_flags;
df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
}
#undef df_chain_problem_p
/*----------------------------------------------------------------------------
WORD LEVEL LIVE REGISTERS
Find the locations in the function where any use of a pseudo can
reach in the backwards direction. In and out bitvectors are built
for each basic block. We only track pseudo registers that have a
size of 2 * UNITS_PER_WORD; bitmaps are indexed by 2 * regno and
contain two bits corresponding to each of the subwords.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
struct df_word_lr_problem_data
{
/* An obstack for the bitmaps we need for this problem. */
bitmap_obstack word_lr_bitmaps;
};
/* Free basic block info. */
static void
df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
if (bb_info)
{
bitmap_clear (&bb_info->use);
bitmap_clear (&bb_info->def);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Allocate or reset bitmaps for DF_WORD_LR blocks. The solution bits are
not touched unless the block is new. */
static void
df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
basic_block bb;
struct df_word_lr_problem_data *problem_data
= XNEW (struct df_word_lr_problem_data);
df_word_lr->problem_data = problem_data;
df_grow_bb_info (df_word_lr);
/* Create the mapping from regnos to slots. This does not change
unless the problem is destroyed and recreated. In particular, if
we end up deleting the only insn that used a subreg, we do not
want to redo the mapping because this would invalidate everything
else. */
bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
FOR_EACH_BB_FN (bb, cfun)
bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
/* When bitmaps are already initialized, just clear them. */
if (bb_info->use.obstack)
{
bitmap_clear (&bb_info->def);
bitmap_clear (&bb_info->use);
}
else
{
bitmap_initialize (&bb_info->use, &problem_data->word_lr_bitmaps);
bitmap_initialize (&bb_info->def, &problem_data->word_lr_bitmaps);
bitmap_initialize (&bb_info->in, &problem_data->word_lr_bitmaps);
bitmap_initialize (&bb_info->out, &problem_data->word_lr_bitmaps);
}
}
df_word_lr->optional_p = true;
}
/* Reset the global solution for recalculation. */
static void
df_word_lr_reset (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (&bb_info->in);
bitmap_clear (&bb_info->out);
}
}
/* Examine REF, and if it is for a reg we're interested in, set or
clear the bits corresponding to its subwords from the bitmap
according to IS_SET. LIVE is the bitmap we should update. We do
not track hard regs or pseudos of any size other than 2 *
UNITS_PER_WORD.
We return true if we changed the bitmap, or if we encountered a register
we're not tracking. */
bool
df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
{
rtx orig_reg = DF_REF_REG (ref);
rtx reg = orig_reg;
machine_mode reg_mode;
unsigned regno;
/* Left at -1 for whole accesses. */
int which_subword = -1;
bool changed = false;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (orig_reg);
regno = REGNO (reg);
reg_mode = GET_MODE (reg);
if (regno < FIRST_PSEUDO_REGISTER
|| maybe_ne (GET_MODE_SIZE (reg_mode), 2 * UNITS_PER_WORD))
return true;
if (GET_CODE (orig_reg) == SUBREG
&& read_modify_subreg_p (orig_reg))
{
gcc_assert (DF_REF_FLAGS_IS_SET (ref, DF_REF_PARTIAL));
if (subreg_lowpart_p (orig_reg))
which_subword = 0;
else
which_subword = 1;
}
if (is_set)
{
if (which_subword != 1)
changed |= bitmap_set_bit (live, regno * 2);
if (which_subword != 0)
changed |= bitmap_set_bit (live, regno * 2 + 1);
}
else
{
if (which_subword != 1)
changed |= bitmap_clear_bit (live, regno * 2);
if (which_subword != 0)
changed |= bitmap_clear_bit (live, regno * 2 + 1);
}
return changed;
}
/* Compute local live register info for basic block BB. */
static void
df_word_lr_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
rtx_insn *insn;
df_ref def, use;
/* Ensure that artificial refs don't contain references to pseudos. */
FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
gcc_assert (DF_REF_REGNO (def) < FIRST_PSEUDO_REGISTER);
FOR_EACH_ARTIFICIAL_USE (use, bb_index)
gcc_assert (DF_REF_REGNO (use) < FIRST_PSEUDO_REGISTER);
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!NONDEBUG_INSN_P (insn))
continue;
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
FOR_EACH_INSN_INFO_DEF (def, insn_info)
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
{
df_word_lr_mark_ref (def, true, &bb_info->def);
df_word_lr_mark_ref (def, false, &bb_info->use);
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
df_word_lr_mark_ref (use, true, &bb_info->use);
}
}
/* Compute local live register info for each basic block within BLOCKS. */
static void
df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
if (bb_index == EXIT_BLOCK)
{
unsigned regno;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, FIRST_PSEUDO_REGISTER,
regno, bi)
gcc_unreachable ();
}
else
df_word_lr_bb_local_compute (bb_index);
}
bitmap_clear (df_word_lr->out_of_date_transfer_functions);
}
/* Initialize the solution vectors. */
static void
df_word_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
bitmap_copy (&bb_info->in, &bb_info->use);
bitmap_clear (&bb_info->out);
}
}
/* Confluence function that ignores fake edges. */
static bool
df_word_lr_confluence_n (edge e)
{
bitmap op1 = &df_word_lr_get_bb_info (e->src->index)->out;
bitmap op2 = &df_word_lr_get_bb_info (e->dest->index)->in;
return bitmap_ior_into (op1, op2);
}
/* Transfer function. */
static bool
df_word_lr_transfer_function (int bb_index)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
bitmap in = &bb_info->in;
bitmap out = &bb_info->out;
bitmap use = &bb_info->use;
bitmap def = &bb_info->def;
return bitmap_ior_and_compl (in, use, out, def);
}
/* Free all storage associated with the problem. */
static void
df_word_lr_free (void)
{
struct df_word_lr_problem_data *problem_data
= (struct df_word_lr_problem_data *)df_word_lr->problem_data;
if (df_word_lr->block_info)
{
df_word_lr->block_info_size = 0;
free (df_word_lr->block_info);
df_word_lr->block_info = NULL;
}
BITMAP_FREE (df_word_lr->out_of_date_transfer_functions);
bitmap_obstack_release (&problem_data->word_lr_bitmaps);
free (problem_data);
free (df_word_lr);
}
/* Debugging info at top of bb. */
static void
df_word_lr_top_dump (basic_block bb, FILE *file)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
if (!bb_info)
return;
fprintf (file, ";; blr in \t");
df_print_word_regset (file, &bb_info->in);
fprintf (file, ";; blr use \t");
df_print_word_regset (file, &bb_info->use);
fprintf (file, ";; blr def \t");
df_print_word_regset (file, &bb_info->def);
}
/* Debugging info at bottom of bb. */
static void
df_word_lr_bottom_dump (basic_block bb, FILE *file)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb->index);
if (!bb_info)
return;
fprintf (file, ";; blr out \t");
df_print_word_regset (file, &bb_info->out);
}
/* All of the information associated with every instance of the problem. */
static const struct df_problem problem_WORD_LR =
{
DF_WORD_LR, /* Problem id. */
DF_BACKWARD, /* Direction. */
df_word_lr_alloc, /* Allocate the problem specific data. */
df_word_lr_reset, /* Reset global information. */
df_word_lr_free_bb_info, /* Free basic block info. */
df_word_lr_local_compute, /* Local compute function. */
df_word_lr_init, /* Init the solution specific data. */
df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_word_lr_confluence_n, /* Confluence operator n. */
df_word_lr_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_word_lr_free, /* Free all of the problem information. */
df_word_lr_free, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
df_word_lr_top_dump, /* Debugging start block. */
df_word_lr_bottom_dump, /* Debugging end block. */
NULL, /* Debugging start insn. */
NULL, /* Debugging end insn. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
sizeof (struct df_word_lr_bb_info),/* Size of entry of block_info array. */
TV_DF_WORD_LR, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
};
/* Create a new DATAFLOW instance and add it to an existing instance
of DF. The returned structure is what is used to get at the
solution. */
void
df_word_lr_add_problem (void)
{
df_add_problem (&problem_WORD_LR);
/* These will be initialized when df_scan_blocks processes each
block. */
df_word_lr->out_of_date_transfer_functions = BITMAP_ALLOC (&df_bitmap_obstack);
}
/* Simulate the effects of the defs of INSN on LIVE. Return true if we changed
any bits, which is used by the caller to determine whether a set is
necessary. We also return true if there are other reasons not to delete
an insn. */
bool
df_word_lr_simulate_defs (rtx_insn *insn, bitmap live)
{
bool changed = false;
df_ref def;
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
changed = true;
else
changed |= df_word_lr_mark_ref (def, false, live);
return changed;
}
/* Simulate the effects of the uses of INSN on LIVE. */
void
df_word_lr_simulate_uses (rtx_insn *insn, bitmap live)
{
df_ref use;
FOR_EACH_INSN_USE (use, insn)
df_word_lr_mark_ref (use, true, live);
}
/*----------------------------------------------------------------------------
This problem computes REG_DEAD and REG_UNUSED notes.
----------------------------------------------------------------------------*/
static void
df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
df_note->optional_p = true;
}
/* This is only used if REG_DEAD_DEBUGGING is in effect. */
static void
df_print_note (const char *prefix, rtx_insn *insn, rtx note)
{
if (dump_file)
{
fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
print_rtl (dump_file, note);
fprintf (dump_file, "\n");
}
}
/* After reg-stack, the x86 floating point stack regs are difficult to
analyze because of all of the pushes, pops and rotations. Thus, we
just leave the notes alone. */
#ifdef STACK_REGS
static inline bool
df_ignore_stack_reg (int regno)
{
return regstack_completed
&& IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
}
#else
static inline bool
df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
{
return false;
}
#endif
/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
static void
df_remove_dead_and_unused_notes (rtx_insn *insn)
{
rtx *pprev = &REG_NOTES (insn);
rtx link = *pprev;
while (link)
{
switch (REG_NOTE_KIND (link))
{
case REG_DEAD:
/* After reg-stack, we need to ignore any unused notes
for the stack registers. */
if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
{
pprev = &XEXP (link, 1);
link = *pprev;
}
else
{
rtx next = XEXP (link, 1);
if (REG_DEAD_DEBUGGING)
df_print_note ("deleting: ", insn, link);
free_EXPR_LIST_node (link);
*pprev = link = next;
}
break;
case REG_UNUSED:
/* After reg-stack, we need to ignore any unused notes
for the stack registers. */
if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
{
pprev = &XEXP (link, 1);
link = *pprev;
}
else
{
rtx next = XEXP (link, 1);
if (REG_DEAD_DEBUGGING)
df_print_note ("deleting: ", insn, link);
free_EXPR_LIST_node (link);
*pprev = link = next;
}
break;
default:
pprev = &XEXP (link, 1);
link = *pprev;
break;
}
}
}
/* Remove REG_EQUAL/REG_EQUIV notes referring to dead pseudos using LIVE
as the bitmap of currently live registers. */
static void
df_remove_dead_eq_notes (rtx_insn *insn, bitmap live)
{
rtx *pprev = &REG_NOTES (insn);
rtx link = *pprev;
while (link)
{
switch (REG_NOTE_KIND (link))
{
case REG_EQUAL:
case REG_EQUIV:
{
/* Remove the notes that refer to dead registers. As we have at most
one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
so we need to purge the complete EQ_USES vector when removing
the note using df_notes_rescan. */
df_ref use;
bool deleted = false;
FOR_EACH_INSN_EQ_USE (use, insn)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
&& DF_REF_LOC (use)
&& (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
&& !bitmap_bit_p (live, DF_REF_REGNO (use))
&& loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
{
deleted = true;
break;
}
if (deleted)
{
rtx next;
if (REG_DEAD_DEBUGGING)
df_print_note ("deleting: ", insn, link);
next = XEXP (link, 1);
free_EXPR_LIST_node (link);
*pprev = link = next;
df_notes_rescan (insn);
}
else
{
pprev = &XEXP (link, 1);
link = *pprev;
}
break;
}
default:
pprev = &XEXP (link, 1);
link = *pprev;
break;
}
}
}
/* Set a NOTE_TYPE note for REG in INSN. */
static inline void
df_set_note (enum reg_note note_type, rtx_insn *insn, rtx reg)
{
gcc_checking_assert (!DEBUG_INSN_P (insn));
add_reg_note (insn, note_type, reg);
}
/* A subroutine of df_set_unused_notes_for_mw, with a selection of its
arguments. Return true if the register value described by MWS's
mw_reg is known to be completely unused, and if mw_reg can therefore
be used in a REG_UNUSED note. */
static bool
df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws,
bitmap live, bitmap artificial_uses)
{
unsigned int r;
/* If MWS describes a partial reference, create REG_UNUSED notes for
individual hard registers. */
if (mws->flags & DF_REF_PARTIAL)
return false;
/* Likewise if some part of the register is used. */
for (r = mws->start_regno; r <= mws->end_regno; r++)
if (bitmap_bit_p (live, r)
|| bitmap_bit_p (artificial_uses, r))
return false;
gcc_assert (REG_P (mws->mw_reg));
return true;
}
/* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
based on the bits in LIVE. Do not generate notes for registers in
artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
not generated if the reg is both read and written by the
instruction.
*/
static void
df_set_unused_notes_for_mw (rtx_insn *insn, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
bitmap artificial_uses,
struct dead_debug_local *debug)
{
unsigned int r;
if (REG_DEAD_DEBUGGING && dump_file)
fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
mws->start_regno, mws->end_regno);
if (df_whole_mw_reg_unused_p (mws, live, artificial_uses))
{
unsigned int regno = mws->start_regno;
df_set_note (REG_UNUSED, insn, mws->mw_reg);
dead_debug_insert_temp (debug, regno, insn, DEBUG_TEMP_AFTER_WITH_REG);
if (REG_DEAD_DEBUGGING)
df_print_note ("adding 1: ", insn, REG_NOTES (insn));
bitmap_set_bit (do_not_gen, regno);
/* Only do this if the value is totally dead. */
}
else
for (r = mws->start_regno; r <= mws->end_regno; r++)
{
if (!bitmap_bit_p (live, r)
&& !bitmap_bit_p (artificial_uses, r))
{
df_set_note (REG_UNUSED, insn,