mirror of
https://gcc.gnu.org/git/gcc.git
synced 2024-12-29 05:55:44 +08:00
df-scan.c (df_collection_rec): Adjust.
* df-scan.c (df_collection_rec): Adjust. (copy_defs): New constant. (copy_uses): Likewise. (copy_eq_uses): Likewise. (copy_mw): Likewise. (copy_all): Likewise. (df_insn_rescan): Adjust. (df_notes_rescan): Likewise. (df_swap_refs): Likewise. (df_sort_and_compress_refs): Likewise. (df_sort_and_compress_mws): Likewise. (df_install_refs): Likewise. (df_install_mws): Likewise. (df_refs_add_to_chains): Add flags parameter controlling which vectors are coppied. (df_bb_refs_record): Adjust. (df_record_entry_block_defs): Likewise. (df_record_exit_block_defs): Likewise. (df_refs_verify): Likewise. (df_mws_verify): Likewise. (df_insn_refs_verify): Likewise. (df_bb_verify): Likewise. * ipa-pure-const.c (finish_state): Remove. (propagate): Adjust. * tree-data-ref.c tree-ssa-alias.c tree-ssa-loop-ivcanon.c tree-ssa-threadedge.c tree-vect-loop-manip.c tree-vect-slp.c var-tracking.c: Adjust. * vec.c (stack_vecs): Remove. (register_stack_vec): Likewise. (stack_vec_register_index): Likewise. (unregister_stack_vec): Likewise. * vec.h (struct va_stack): Remove. (struct vec<T, A, vl_ptr>): Specialize as struct vec<T, va_heap, vl_ptr> instead since va_heap is the only allocation strategy compatable with the vl_ptr layout. (struct vec<T, va_gc, vl_ptr>): Remove because it now gets an empty specialization anyway. (class stack_vec): New class. (vec_stack_alloc): Remove. (vec<T, va_heap, vl_ptr>::using_auto_storage): New method. * gcc-interface/decl.c (components_to_record): Adjust. From-SVN: r204137
This commit is contained in:
parent
b868b7cae4
commit
ff4c81cccb
@ -1,3 +1,46 @@
|
||||
2013-10-28 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* df-scan.c (df_collection_rec): Adjust.
|
||||
(copy_defs): New constant.
|
||||
(copy_uses): Likewise.
|
||||
(copy_eq_uses): Likewise.
|
||||
(copy_mw): Likewise.
|
||||
(copy_all): Likewise.
|
||||
(df_insn_rescan): Adjust.
|
||||
(df_notes_rescan): Likewise.
|
||||
(df_swap_refs): Likewise.
|
||||
(df_sort_and_compress_refs): Likewise.
|
||||
(df_sort_and_compress_mws): Likewise.
|
||||
(df_install_refs): Likewise.
|
||||
(df_install_mws): Likewise.
|
||||
(df_refs_add_to_chains): Add flags parameter controlling which vectors
|
||||
are coppied.
|
||||
(df_bb_refs_record): Adjust.
|
||||
(df_record_entry_block_defs): Likewise.
|
||||
(df_record_exit_block_defs): Likewise.
|
||||
(df_refs_verify): Likewise.
|
||||
(df_mws_verify): Likewise.
|
||||
(df_insn_refs_verify): Likewise.
|
||||
(df_bb_verify): Likewise.
|
||||
* ipa-pure-const.c (finish_state): Remove.
|
||||
(propagate): Adjust.
|
||||
* tree-data-ref.c tree-ssa-alias.c tree-ssa-loop-ivcanon.c
|
||||
tree-ssa-threadedge.c tree-vect-loop-manip.c tree-vect-slp.c
|
||||
var-tracking.c: Adjust.
|
||||
* vec.c (stack_vecs): Remove.
|
||||
(register_stack_vec): Likewise.
|
||||
(stack_vec_register_index): Likewise.
|
||||
(unregister_stack_vec): Likewise.
|
||||
* vec.h (struct va_stack): Remove.
|
||||
(struct vec<T, A, vl_ptr>): Specialize as
|
||||
struct vec<T, va_heap, vl_ptr> instead since va_heap is the only
|
||||
allocation strategy compatable with the vl_ptr layout.
|
||||
(struct vec<T, va_gc, vl_ptr>): Remove because it now gets an empty
|
||||
specialization anyway.
|
||||
(class stack_vec): New class.
|
||||
(vec_stack_alloc): Remove.
|
||||
(vec<T, va_heap, vl_ptr>::using_auto_storage): New method.
|
||||
|
||||
2013-10-28 Alexander Ivchenko <alexander.ivchenko@intel.com>
|
||||
Maxim Kuznetsov <maxim.kuznetsov@intel.com>
|
||||
Sergey Lega <sergey.s.lega@intel.com>
|
||||
|
@ -1,3 +1,7 @@
|
||||
2013-10-28 Trevor Saunders <tsaunders@mozilla.com>
|
||||
|
||||
* gcc-interface/decl.c (components_to_record): Adjust.
|
||||
|
||||
2013-10-24 Rainer Orth <ro@CeBiTec.Uni-Bielefeld.DE>
|
||||
|
||||
* gcc-interface/Make-lang.in (ADA_DEPS): Fix quoting.
|
||||
|
@ -7003,13 +7003,11 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
|
||||
tree gnu_union_type, gnu_union_name;
|
||||
tree this_first_free_pos, gnu_variant_list = NULL_TREE;
|
||||
bool union_field_needs_strict_alignment = false;
|
||||
vec <vinfo_t, va_stack> variant_types;
|
||||
stack_vec <vinfo_t, 16> variant_types;
|
||||
vinfo_t *gnu_variant;
|
||||
unsigned int variants_align = 0;
|
||||
unsigned int i;
|
||||
|
||||
vec_stack_alloc (vinfo_t, variant_types, 16);
|
||||
|
||||
if (TREE_CODE (gnu_name) == TYPE_DECL)
|
||||
gnu_name = DECL_NAME (gnu_name);
|
||||
|
||||
@ -7205,9 +7203,6 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
|
||||
gnu_variant_list = gnu_field;
|
||||
}
|
||||
|
||||
/* We are done with the variants. */
|
||||
variant_types.release ();
|
||||
|
||||
/* Only make the QUAL_UNION_TYPE if there are non-empty variants. */
|
||||
if (gnu_variant_list)
|
||||
{
|
||||
|
138
gcc/df-scan.c
138
gcc/df-scan.c
@ -86,10 +86,10 @@ static HARD_REG_SET elim_reg_set;
|
||||
|
||||
struct df_collection_rec
|
||||
{
|
||||
vec<df_ref, va_stack> def_vec;
|
||||
vec<df_ref, va_stack> use_vec;
|
||||
vec<df_ref, va_stack> eq_use_vec;
|
||||
vec<df_mw_hardreg_ptr, va_stack> mw_vec;
|
||||
stack_vec<df_ref, 128> def_vec;
|
||||
stack_vec<df_ref, 32> use_vec;
|
||||
stack_vec<df_ref, 32> eq_use_vec;
|
||||
stack_vec<df_mw_hardreg_ptr, 32> mw_vec;
|
||||
};
|
||||
|
||||
static df_ref df_null_ref_rec[1];
|
||||
@ -131,7 +131,7 @@ static void df_ref_chain_delete_du_chain (df_ref *);
|
||||
static void df_ref_chain_delete (df_ref *);
|
||||
|
||||
static void df_refs_add_to_chains (struct df_collection_rec *,
|
||||
basic_block, rtx);
|
||||
basic_block, rtx, unsigned int);
|
||||
|
||||
static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
|
||||
static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
|
||||
@ -153,6 +153,14 @@ static void df_insn_info_delete (unsigned int);
|
||||
and epilogue to save and restore registers as needed. */
|
||||
|
||||
static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
|
||||
|
||||
/* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
|
||||
static const unsigned int copy_defs = 0x1;
|
||||
static const unsigned int copy_uses = 0x2;
|
||||
static const unsigned int copy_eq_uses = 0x4;
|
||||
static const unsigned int copy_mw = 0x8;
|
||||
static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
|
||||
| copy_mw;
|
||||
|
||||
/*----------------------------------------------------------------------------
|
||||
SCANNING DATAFLOW PROBLEM
|
||||
@ -1268,11 +1276,6 @@ df_insn_rescan (rtx insn)
|
||||
return false;
|
||||
}
|
||||
|
||||
vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
|
||||
vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
|
||||
vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
|
||||
vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
|
||||
|
||||
bitmap_clear_bit (&df->insns_to_delete, uid);
|
||||
bitmap_clear_bit (&df->insns_to_rescan, uid);
|
||||
bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
|
||||
@ -1306,15 +1309,10 @@ df_insn_rescan (rtx insn)
|
||||
fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
|
||||
}
|
||||
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn);
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
|
||||
if (!DEBUG_INSN_P (insn))
|
||||
df_set_bb_dirty (bb);
|
||||
|
||||
collection_rec.def_vec.release ();
|
||||
collection_rec.use_vec.release ();
|
||||
collection_rec.eq_use_vec.release ();
|
||||
collection_rec.mw_vec.release ();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -2221,10 +2219,6 @@ df_notes_rescan (rtx insn)
|
||||
unsigned int num_deleted;
|
||||
unsigned int mw_len;
|
||||
|
||||
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
|
||||
vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
|
||||
vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
|
||||
|
||||
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
|
||||
df_ref_chain_delete (insn_info->eq_uses);
|
||||
insn_info->eq_uses = NULL;
|
||||
@ -2287,11 +2281,7 @@ df_notes_rescan (rtx insn)
|
||||
insn_info->mw_hardregs[mw_len] = NULL;
|
||||
}
|
||||
}
|
||||
/* Get rid of the mw_rec so that df_refs_add_to_chains will
|
||||
ignore it. */
|
||||
collection_rec.mw_vec.release ();
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn);
|
||||
collection_rec.eq_use_vec.release ();
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
|
||||
}
|
||||
else
|
||||
df_insn_rescan (insn);
|
||||
@ -2391,7 +2381,7 @@ df_ref_compare (const void *r1, const void *r2)
|
||||
}
|
||||
|
||||
static void
|
||||
df_swap_refs (vec<df_ref, va_stack> *ref_vec, int i, int j)
|
||||
df_swap_refs (vec<df_ref, va_heap> *ref_vec, int i, int j)
|
||||
{
|
||||
df_ref tmp = (*ref_vec)[i];
|
||||
(*ref_vec)[i] = (*ref_vec)[j];
|
||||
@ -2401,7 +2391,7 @@ df_swap_refs (vec<df_ref, va_stack> *ref_vec, int i, int j)
|
||||
/* Sort and compress a set of refs. */
|
||||
|
||||
static void
|
||||
df_sort_and_compress_refs (vec<df_ref, va_stack> *ref_vec)
|
||||
df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
|
||||
{
|
||||
unsigned int count;
|
||||
unsigned int i;
|
||||
@ -2510,7 +2500,7 @@ df_mw_compare (const void *m1, const void *m2)
|
||||
/* Sort and compress a set of refs. */
|
||||
|
||||
static void
|
||||
df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_stack> *mw_vec)
|
||||
df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
|
||||
{
|
||||
unsigned int count;
|
||||
struct df_scan_problem_data *problem_data
|
||||
@ -2621,14 +2611,12 @@ df_install_ref (df_ref this_ref,
|
||||
|
||||
static df_ref *
|
||||
df_install_refs (basic_block bb,
|
||||
vec<df_ref, va_stack> old_vec,
|
||||
const vec<df_ref, va_heap> *old_vec,
|
||||
struct df_reg_info **reg_info,
|
||||
struct df_ref_info *ref_info,
|
||||
bool is_notes)
|
||||
{
|
||||
unsigned int count;
|
||||
|
||||
count = old_vec.length ();
|
||||
unsigned int count = old_vec->length ();
|
||||
if (count)
|
||||
{
|
||||
df_ref *new_vec = XNEWVEC (df_ref, count + 1);
|
||||
@ -2659,7 +2647,7 @@ df_install_refs (basic_block bb,
|
||||
if (add_to_table && df->analyze_subset)
|
||||
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
|
||||
|
||||
FOR_EACH_VEC_ELT (old_vec, ix, this_ref)
|
||||
FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
|
||||
{
|
||||
new_vec[ix] = this_ref;
|
||||
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
|
||||
@ -2678,16 +2666,14 @@ df_install_refs (basic_block bb,
|
||||
insn. */
|
||||
|
||||
static struct df_mw_hardreg **
|
||||
df_install_mws (vec<df_mw_hardreg_ptr, va_stack> old_vec)
|
||||
df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
|
||||
{
|
||||
unsigned int count;
|
||||
|
||||
count = old_vec.length ();
|
||||
unsigned int count = old_vec->length ();
|
||||
if (count)
|
||||
{
|
||||
struct df_mw_hardreg **new_vec
|
||||
= XNEWVEC (struct df_mw_hardreg*, count + 1);
|
||||
memcpy (new_vec, old_vec.address (),
|
||||
memcpy (new_vec, old_vec->address (),
|
||||
sizeof (struct df_mw_hardreg*) * count);
|
||||
new_vec[count] = NULL;
|
||||
return new_vec;
|
||||
@ -2702,7 +2688,7 @@ df_install_mws (vec<df_mw_hardreg_ptr, va_stack> old_vec)
|
||||
|
||||
static void
|
||||
df_refs_add_to_chains (struct df_collection_rec *collection_rec,
|
||||
basic_block bb, rtx insn)
|
||||
basic_block bb, rtx insn, unsigned int flags)
|
||||
{
|
||||
if (insn)
|
||||
{
|
||||
@ -2710,35 +2696,35 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
|
||||
/* If there is a vector in the collection rec, add it to the
|
||||
insn. A null rec is a signal that the caller will handle the
|
||||
chain specially. */
|
||||
if (collection_rec->def_vec.exists ())
|
||||
if (flags & copy_defs)
|
||||
{
|
||||
df_scan_free_ref_vec (insn_rec->defs);
|
||||
insn_rec->defs
|
||||
= df_install_refs (bb, collection_rec->def_vec,
|
||||
= df_install_refs (bb, &collection_rec->def_vec,
|
||||
df->def_regs,
|
||||
&df->def_info, false);
|
||||
}
|
||||
if (collection_rec->use_vec.exists ())
|
||||
if (flags & copy_uses)
|
||||
{
|
||||
df_scan_free_ref_vec (insn_rec->uses);
|
||||
insn_rec->uses
|
||||
= df_install_refs (bb, collection_rec->use_vec,
|
||||
= df_install_refs (bb, &collection_rec->use_vec,
|
||||
df->use_regs,
|
||||
&df->use_info, false);
|
||||
}
|
||||
if (collection_rec->eq_use_vec.exists ())
|
||||
if (flags & copy_eq_uses)
|
||||
{
|
||||
df_scan_free_ref_vec (insn_rec->eq_uses);
|
||||
insn_rec->eq_uses
|
||||
= df_install_refs (bb, collection_rec->eq_use_vec,
|
||||
= df_install_refs (bb, &collection_rec->eq_use_vec,
|
||||
df->eq_use_regs,
|
||||
&df->use_info, true);
|
||||
}
|
||||
if (collection_rec->mw_vec.exists ())
|
||||
if (flags & copy_mw)
|
||||
{
|
||||
df_scan_free_mws_vec (insn_rec->mw_hardregs);
|
||||
insn_rec->mw_hardregs
|
||||
= df_install_mws (collection_rec->mw_vec);
|
||||
= df_install_mws (&collection_rec->mw_vec);
|
||||
}
|
||||
}
|
||||
else
|
||||
@ -2747,12 +2733,12 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
|
||||
|
||||
df_scan_free_ref_vec (bb_info->artificial_defs);
|
||||
bb_info->artificial_defs
|
||||
= df_install_refs (bb, collection_rec->def_vec,
|
||||
= df_install_refs (bb, &collection_rec->def_vec,
|
||||
df->def_regs,
|
||||
&df->def_info, false);
|
||||
df_scan_free_ref_vec (bb_info->artificial_uses);
|
||||
bb_info->artificial_uses
|
||||
= df_install_refs (bb, collection_rec->use_vec,
|
||||
= df_install_refs (bb, &collection_rec->use_vec,
|
||||
df->use_regs,
|
||||
&df->use_info, false);
|
||||
}
|
||||
@ -3633,17 +3619,12 @@ df_bb_refs_record (int bb_index, bool scan_insns)
|
||||
basic_block bb = BASIC_BLOCK (bb_index);
|
||||
rtx insn;
|
||||
int luid = 0;
|
||||
struct df_collection_rec collection_rec;
|
||||
|
||||
if (!df)
|
||||
return;
|
||||
|
||||
df_collection_rec collection_rec;
|
||||
df_grow_bb_info (df_scan);
|
||||
vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
|
||||
vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
|
||||
vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
|
||||
vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
|
||||
|
||||
if (scan_insns)
|
||||
/* Scan the block an insn at a time from beginning to end. */
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
@ -3657,19 +3638,14 @@ df_bb_refs_record (int bb_index, bool scan_insns)
|
||||
/* Record refs within INSN. */
|
||||
DF_INSN_INFO_LUID (insn_info) = luid++;
|
||||
df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn);
|
||||
df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
|
||||
}
|
||||
DF_INSN_INFO_LUID (insn_info) = luid;
|
||||
}
|
||||
|
||||
/* Other block level artificial refs */
|
||||
df_bb_refs_collect (&collection_rec, bb);
|
||||
df_refs_add_to_chains (&collection_rec, bb, NULL);
|
||||
|
||||
collection_rec.def_vec.release ();
|
||||
collection_rec.use_vec.release ();
|
||||
collection_rec.eq_use_vec.release ();
|
||||
collection_rec.mw_vec.release ();
|
||||
df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
|
||||
|
||||
/* Now that the block has been processed, set the block as dirty so
|
||||
LR and LIVE will get it processed. */
|
||||
@ -3911,13 +3887,11 @@ static void
|
||||
df_record_entry_block_defs (bitmap entry_block_defs)
|
||||
{
|
||||
struct df_collection_rec collection_rec;
|
||||
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
|
||||
vec_stack_alloc (df_ref, collection_rec.def_vec, FIRST_PSEUDO_REGISTER);
|
||||
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
|
||||
|
||||
/* Process bb_refs chain */
|
||||
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
|
||||
collection_rec.def_vec.release ();
|
||||
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL,
|
||||
copy_defs);
|
||||
}
|
||||
|
||||
|
||||
@ -4084,13 +4058,11 @@ static void
|
||||
df_record_exit_block_uses (bitmap exit_block_uses)
|
||||
{
|
||||
struct df_collection_rec collection_rec;
|
||||
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
|
||||
vec_stack_alloc (df_ref, collection_rec.use_vec, FIRST_PSEUDO_REGISTER);
|
||||
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
|
||||
|
||||
/* Process bb_refs chain */
|
||||
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
|
||||
collection_rec.use_vec.release ();
|
||||
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL,
|
||||
copy_uses);
|
||||
}
|
||||
|
||||
|
||||
@ -4331,13 +4303,13 @@ df_reg_chain_verify_unmarked (df_ref refs)
|
||||
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
|
||||
|
||||
static bool
|
||||
df_refs_verify (vec<df_ref, va_stack> new_rec, df_ref *old_rec,
|
||||
df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref *old_rec,
|
||||
bool abort_if_fail)
|
||||
{
|
||||
unsigned int ix;
|
||||
df_ref new_ref;
|
||||
|
||||
FOR_EACH_VEC_ELT (new_rec, ix, new_ref)
|
||||
FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
|
||||
{
|
||||
if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
|
||||
{
|
||||
@ -4369,14 +4341,14 @@ df_refs_verify (vec<df_ref, va_stack> new_rec, df_ref *old_rec,
|
||||
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
|
||||
|
||||
static bool
|
||||
df_mws_verify (vec<df_mw_hardreg_ptr, va_stack> new_rec,
|
||||
df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
|
||||
struct df_mw_hardreg **old_rec,
|
||||
bool abort_if_fail)
|
||||
{
|
||||
unsigned int ix;
|
||||
struct df_mw_hardreg *new_reg;
|
||||
|
||||
FOR_EACH_VEC_ELT (new_rec, ix, new_reg)
|
||||
FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
|
||||
{
|
||||
if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
|
||||
{
|
||||
@ -4430,13 +4402,13 @@ df_insn_refs_verify (struct df_collection_rec *collection_rec,
|
||||
|
||||
/* Unfortunately we cannot opt out early if one of these is not
|
||||
right because the marks will not get cleared. */
|
||||
ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
|
||||
ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
|
||||
abort_if_fail);
|
||||
ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
|
||||
ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
|
||||
abort_if_fail);
|
||||
ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
|
||||
ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
|
||||
abort_if_fail);
|
||||
ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
|
||||
ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
|
||||
abort_if_fail);
|
||||
return (ret1 && ret2 && ret3 && ret4);
|
||||
}
|
||||
@ -4453,12 +4425,6 @@ df_bb_verify (basic_block bb)
|
||||
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
|
||||
struct df_collection_rec collection_rec;
|
||||
|
||||
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
|
||||
vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
|
||||
vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
|
||||
vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
|
||||
vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
|
||||
|
||||
gcc_assert (bb_info);
|
||||
|
||||
/* Scan the block, one insn at a time, from beginning to end. */
|
||||
@ -4472,8 +4438,8 @@ df_bb_verify (basic_block bb)
|
||||
|
||||
/* Do the artificial defs and uses. */
|
||||
df_bb_refs_collect (&collection_rec, bb);
|
||||
df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
|
||||
df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
|
||||
df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
|
||||
df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
|
||||
df_free_collection_rec (&collection_rec);
|
||||
|
||||
return true;
|
||||
|
@ -190,15 +190,6 @@ warn_function_noreturn (tree decl)
|
||||
true, warned_about, "noreturn");
|
||||
}
|
||||
|
||||
/* Init the function state. */
|
||||
|
||||
static void
|
||||
finish_state (void)
|
||||
{
|
||||
funct_state_vec.release ();
|
||||
}
|
||||
|
||||
|
||||
/* Return true if we have a function state for NODE. */
|
||||
|
||||
static inline bool
|
||||
@ -1488,7 +1479,6 @@ propagate (void)
|
||||
if (has_function_state (node))
|
||||
free (get_function_state (node));
|
||||
funct_state_vec.release ();
|
||||
finish_state ();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -4325,7 +4325,7 @@ typedef struct data_ref_loc_d
|
||||
true if STMT clobbers memory, false otherwise. */
|
||||
|
||||
static bool
|
||||
get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_stack> *references)
|
||||
get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_heap> *references)
|
||||
{
|
||||
bool clobbers_memory = false;
|
||||
data_ref_loc ref;
|
||||
@ -4417,17 +4417,13 @@ find_data_references_in_stmt (struct loop *nest, gimple stmt,
|
||||
vec<data_reference_p> *datarefs)
|
||||
{
|
||||
unsigned i;
|
||||
vec<data_ref_loc, va_stack> references;
|
||||
stack_vec<data_ref_loc, 2> references;
|
||||
data_ref_loc *ref;
|
||||
bool ret = true;
|
||||
data_reference_p dr;
|
||||
|
||||
vec_stack_alloc (data_ref_loc, references, 2);
|
||||
if (get_references_in_stmt (stmt, &references))
|
||||
{
|
||||
references.release ();
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
FOR_EACH_VEC_ELT (references, i, ref)
|
||||
{
|
||||
@ -4451,17 +4447,13 @@ graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
|
||||
vec<data_reference_p> *datarefs)
|
||||
{
|
||||
unsigned i;
|
||||
vec<data_ref_loc, va_stack> references;
|
||||
stack_vec<data_ref_loc, 2> references;
|
||||
data_ref_loc *ref;
|
||||
bool ret = true;
|
||||
data_reference_p dr;
|
||||
|
||||
vec_stack_alloc (data_ref_loc, references, 2);
|
||||
if (get_references_in_stmt (stmt, &references))
|
||||
{
|
||||
references.release ();
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
FOR_EACH_VEC_ELT (references, i, ref)
|
||||
{
|
||||
|
@ -736,11 +736,8 @@ aliasing_component_refs_p (tree ref1,
|
||||
static bool
|
||||
nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
|
||||
{
|
||||
vec<tree, va_stack> component_refs1;
|
||||
vec<tree, va_stack> component_refs2;
|
||||
|
||||
vec_stack_alloc (tree, component_refs1, 16);
|
||||
vec_stack_alloc (tree, component_refs2, 16);
|
||||
stack_vec<tree, 16> component_refs1;
|
||||
stack_vec<tree, 16> component_refs2;
|
||||
|
||||
/* Create the stack of handled components for REF1. */
|
||||
while (handled_component_p (ref1))
|
||||
|
@ -1100,7 +1100,7 @@ propagate_constants_for_unrolling (basic_block bb)
|
||||
|
||||
static bool
|
||||
tree_unroll_loops_completely_1 (bool may_increase_size, bool unroll_outer,
|
||||
vec<loop_p, va_stack>& father_stack,
|
||||
vec<loop_p, va_heap>& father_stack,
|
||||
struct loop *loop)
|
||||
{
|
||||
struct loop *loop_father;
|
||||
@ -1164,12 +1164,11 @@ tree_unroll_loops_completely_1 (bool may_increase_size, bool unroll_outer,
|
||||
unsigned int
|
||||
tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
|
||||
{
|
||||
vec<loop_p, va_stack> father_stack;
|
||||
stack_vec<loop_p, 16> father_stack;
|
||||
bool changed;
|
||||
int iteration = 0;
|
||||
bool irred_invalidated = false;
|
||||
|
||||
vec_stack_alloc (loop_p, father_stack, 16);
|
||||
do
|
||||
{
|
||||
changed = false;
|
||||
|
@ -644,7 +644,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
|
||||
i++;
|
||||
}
|
||||
|
||||
vec<tree, va_stack> fewvars = vNULL;
|
||||
stack_vec<tree, alloc_count> fewvars;
|
||||
pointer_set_t *vars = NULL;
|
||||
|
||||
/* If we're already starting with 3/4 of alloc_count, go for a
|
||||
@ -652,8 +652,6 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
|
||||
VEC. */
|
||||
if (i * 4 > alloc_count * 3)
|
||||
vars = pointer_set_create ();
|
||||
else if (alloc_count)
|
||||
vec_stack_alloc (tree, fewvars, alloc_count);
|
||||
|
||||
/* Now go through the initial debug stmts in DEST again, this time
|
||||
actually inserting in VARS or FEWVARS. Don't bother checking for
|
||||
|
@ -115,7 +115,7 @@ typedef struct
|
||||
with a PHI DEF that would soon become non-dominant, and when we got
|
||||
to the suitable one, it wouldn't have anything to substitute any
|
||||
more. */
|
||||
static vec<adjust_info, va_stack> adjust_vec;
|
||||
static vec<adjust_info, va_heap> adjust_vec;
|
||||
|
||||
/* Adjust any debug stmts that referenced AI->from values to use the
|
||||
loop-closed AI->to, if the references are dominated by AI->bb and
|
||||
@ -1133,7 +1133,7 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop,
|
||||
if (MAY_HAVE_DEBUG_STMTS)
|
||||
{
|
||||
gcc_assert (!adjust_vec.exists ());
|
||||
vec_stack_alloc (adjust_info, adjust_vec, 32);
|
||||
adjust_vec.create (32);
|
||||
}
|
||||
|
||||
if (e == exit_e)
|
||||
|
@ -1934,7 +1934,7 @@ vect_slp_analyze_operations (bb_vec_info bb_vinfo)
|
||||
|
||||
static unsigned
|
||||
vect_bb_slp_scalar_cost (basic_block bb,
|
||||
slp_tree node, vec<bool, va_stack> life)
|
||||
slp_tree node, vec<bool, va_heap> *life)
|
||||
{
|
||||
unsigned scalar_cost = 0;
|
||||
unsigned i;
|
||||
@ -1948,7 +1948,7 @@ vect_bb_slp_scalar_cost (basic_block bb,
|
||||
def_operand_p def_p;
|
||||
stmt_vec_info stmt_info;
|
||||
|
||||
if (life[i])
|
||||
if ((*life)[i])
|
||||
continue;
|
||||
|
||||
/* If there is a non-vectorized use of the defs then the scalar
|
||||
@ -1965,11 +1965,11 @@ vect_bb_slp_scalar_cost (basic_block bb,
|
||||
|| gimple_bb (use_stmt) != bb
|
||||
|| !STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (use_stmt)))
|
||||
{
|
||||
life[i] = true;
|
||||
(*life)[i] = true;
|
||||
BREAK_FROM_IMM_USE_STMT (use_iter);
|
||||
}
|
||||
}
|
||||
if (life[i])
|
||||
if ((*life)[i])
|
||||
continue;
|
||||
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
@ -2023,13 +2023,11 @@ vect_bb_vectorization_profitable_p (bb_vec_info bb_vinfo)
|
||||
/* Calculate scalar cost. */
|
||||
FOR_EACH_VEC_ELT (slp_instances, i, instance)
|
||||
{
|
||||
vec<bool, va_stack> life;
|
||||
vec_stack_alloc (bool, life, SLP_INSTANCE_GROUP_SIZE (instance));
|
||||
life.quick_grow_cleared (SLP_INSTANCE_GROUP_SIZE (instance));
|
||||
stack_vec<bool, 20> life;
|
||||
life.safe_grow_cleared (SLP_INSTANCE_GROUP_SIZE (instance));
|
||||
scalar_cost += vect_bb_slp_scalar_cost (BB_VINFO_BB (bb_vinfo),
|
||||
SLP_INSTANCE_TREE (instance),
|
||||
life);
|
||||
life.release ();
|
||||
&life);
|
||||
}
|
||||
|
||||
/* Complete the target-specific cost calculation. */
|
||||
|
@ -7907,7 +7907,7 @@ struct expand_loc_callback_data
|
||||
|
||||
/* Stack of values and debug_exprs under expansion, and their
|
||||
children. */
|
||||
vec<rtx, va_stack> expanding;
|
||||
stack_vec<rtx, 4> expanding;
|
||||
|
||||
/* Stack of values and debug_exprs whose expansion hit recursion
|
||||
cycles. They will have VALUE_RECURSED_INTO marked when added to
|
||||
@ -7915,7 +7915,7 @@ struct expand_loc_callback_data
|
||||
resolves to a valid location. So, if the flag remains set at the
|
||||
end of the search, we know no valid location for this one can
|
||||
possibly exist. */
|
||||
vec<rtx, va_stack> pending;
|
||||
stack_vec<rtx, 4> pending;
|
||||
|
||||
/* The maximum depth among the sub-expressions under expansion.
|
||||
Zero indicates no expansion so far. */
|
||||
@ -8417,11 +8417,11 @@ vt_expand_loc_callback (rtx x, bitmap regs,
|
||||
This function performs this finalization of NULL locations. */
|
||||
|
||||
static void
|
||||
resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
|
||||
resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
|
||||
{
|
||||
while (!pending.is_empty ())
|
||||
while (!pending->is_empty ())
|
||||
{
|
||||
rtx x = pending.pop ();
|
||||
rtx x = pending->pop ();
|
||||
decl_or_value dv;
|
||||
|
||||
if (!VALUE_RECURSED_INTO (x))
|
||||
@ -8441,8 +8441,6 @@ resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
|
||||
do \
|
||||
{ \
|
||||
(d).vars = (v); \
|
||||
vec_stack_alloc (rtx, (d).expanding, 4); \
|
||||
vec_stack_alloc (rtx, (d).pending, 4); \
|
||||
(d).depth.complexity = (d).depth.entryvals = 0; \
|
||||
} \
|
||||
while (0)
|
||||
@ -8450,7 +8448,7 @@ resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
|
||||
#define FINI_ELCD(d, l) \
|
||||
do \
|
||||
{ \
|
||||
resolve_expansions_pending_recursion ((d).pending); \
|
||||
resolve_expansions_pending_recursion (&(d).pending); \
|
||||
(d).pending.release (); \
|
||||
(d).expanding.release (); \
|
||||
\
|
||||
@ -8744,7 +8742,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
|
||||
|
||||
int
|
||||
var_track_values_to_stack (variable_def **slot,
|
||||
vec<rtx, va_stack> *changed_values_stack)
|
||||
vec<rtx, va_heap> *changed_values_stack)
|
||||
{
|
||||
variable var = *slot;
|
||||
|
||||
@ -8779,7 +8777,7 @@ remove_value_from_changed_variables (rtx val)
|
||||
|
||||
static void
|
||||
notify_dependents_of_changed_value (rtx val, variable_table_type htab,
|
||||
vec<rtx, va_stack> *changed_values_stack)
|
||||
vec<rtx, va_heap> *changed_values_stack)
|
||||
{
|
||||
variable_def **slot;
|
||||
variable var;
|
||||
@ -8864,13 +8862,11 @@ process_changed_values (variable_table_type htab)
|
||||
{
|
||||
int i, n;
|
||||
rtx val;
|
||||
vec<rtx, va_stack> changed_values_stack;
|
||||
|
||||
vec_stack_alloc (rtx, changed_values_stack, 20);
|
||||
stack_vec<rtx, 20> changed_values_stack;
|
||||
|
||||
/* Move values from changed_variables to changed_values_stack. */
|
||||
changed_variables
|
||||
.traverse <vec<rtx, va_stack>*, var_track_values_to_stack>
|
||||
.traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
|
||||
(&changed_values_stack);
|
||||
|
||||
/* Back-propagate change notifications in values while popping
|
||||
@ -8891,8 +8887,6 @@ process_changed_values (variable_table_type htab)
|
||||
n--;
|
||||
}
|
||||
}
|
||||
|
||||
changed_values_stack.release ();
|
||||
}
|
||||
|
||||
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
|
||||
|
43
gcc/vec.c
43
gcc/vec.c
@ -217,49 +217,6 @@ vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
|
||||
}
|
||||
|
||||
|
||||
/* Stack vectors are a little different. VEC_alloc turns into a call
|
||||
to vec<T, A>::stack_reserve and passes in space allocated via a
|
||||
call to alloca. We record that pointer so that we know that we
|
||||
shouldn't free it. If the vector is resized, we resize it on the
|
||||
heap. We record the pointers in a vector and search it in LIFO
|
||||
order--i.e., we look for the newest stack vectors first. We don't
|
||||
expect too many stack vectors at any one level, and searching from
|
||||
the end should normally be efficient even if they are used in a
|
||||
recursive function. */
|
||||
|
||||
static vec<void *> stack_vecs;
|
||||
|
||||
/* Add a stack vector to STACK_VECS. */
|
||||
|
||||
void
|
||||
register_stack_vec (void *vec)
|
||||
{
|
||||
stack_vecs.safe_push (vec);
|
||||
}
|
||||
|
||||
|
||||
/* If VEC is registered in STACK_VECS, return its index.
|
||||
Otherwise, return -1. */
|
||||
|
||||
int
|
||||
stack_vec_register_index (void *vec)
|
||||
{
|
||||
for (unsigned ix = stack_vecs.length (); ix > 0; --ix)
|
||||
if (stack_vecs[ix - 1] == vec)
|
||||
return static_cast<int> (ix - 1);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
/* Remove vector at slot IX from the list of registered stack vectors. */
|
||||
|
||||
void
|
||||
unregister_stack_vec (unsigned ix)
|
||||
{
|
||||
stack_vecs.unordered_remove (ix);
|
||||
}
|
||||
|
||||
|
||||
/* Helper for qsort; sort descriptors by amount of memory consumed. */
|
||||
|
||||
static int
|
||||
|
346
gcc/vec.h
346
gcc/vec.h
@ -101,8 +101,6 @@ along with GCC; see the file COPYING3. If not see
|
||||
- Heap: allocation is done using malloc/free. This is the
|
||||
default allocation strategy.
|
||||
|
||||
- Stack: allocation is done using alloca.
|
||||
|
||||
- GC: allocation is done using ggc_alloc/ggc_free.
|
||||
|
||||
- GC atomic: same as GC with the exception that the elements
|
||||
@ -233,9 +231,9 @@ struct vec_prefix
|
||||
friend struct va_gc;
|
||||
friend struct va_gc_atomic;
|
||||
friend struct va_heap;
|
||||
friend struct va_stack;
|
||||
|
||||
unsigned m_alloc;
|
||||
unsigned m_alloc : 31;
|
||||
unsigned m_has_auto_buf : 1;
|
||||
unsigned m_num;
|
||||
};
|
||||
|
||||
@ -255,8 +253,7 @@ struct vl_ptr { };
|
||||
va_heap - Allocation uses malloc/free.
|
||||
va_gc - Allocation uses ggc_alloc.
|
||||
va_gc_atomic - Same as GC, but individual elements of the array
|
||||
do not need to be marked during collection.
|
||||
va_stack - Allocation uses alloca. */
|
||||
do not need to be marked during collection. */
|
||||
|
||||
/* Allocator type for heap vectors. */
|
||||
struct va_heap
|
||||
@ -401,107 +398,6 @@ struct va_gc_atomic : va_gc
|
||||
};
|
||||
|
||||
|
||||
/* Allocator type for stack vectors. */
|
||||
struct va_stack
|
||||
{
|
||||
/* Use vl_ptr as the default layout for stack vectors. */
|
||||
typedef vl_ptr default_layout;
|
||||
|
||||
template<typename T>
|
||||
static void alloc (vec<T, va_stack, vl_ptr>&, unsigned,
|
||||
vec<T, va_stack, vl_embed> *);
|
||||
|
||||
template <typename T>
|
||||
static void reserve (vec<T, va_stack, vl_embed> *&, unsigned, bool
|
||||
CXX_MEM_STAT_INFO);
|
||||
|
||||
template <typename T>
|
||||
static void release (vec<T, va_stack, vl_embed> *&);
|
||||
};
|
||||
|
||||
/* Helper functions to keep track of vectors allocated on the stack. */
|
||||
void register_stack_vec (void *);
|
||||
int stack_vec_register_index (void *);
|
||||
void unregister_stack_vec (unsigned);
|
||||
|
||||
/* Allocate a vector V which uses alloca for the initial allocation.
|
||||
SPACE is space allocated using alloca. NELEMS is the number of
|
||||
entries allocated. */
|
||||
|
||||
template<typename T>
|
||||
void
|
||||
va_stack::alloc (vec<T, va_stack, vl_ptr> &v, unsigned nelems,
|
||||
vec<T, va_stack, vl_embed> *space)
|
||||
{
|
||||
v.m_vec = space;
|
||||
register_stack_vec (static_cast<void *> (v.m_vec));
|
||||
v.m_vec->embedded_init (nelems, 0);
|
||||
}
|
||||
|
||||
|
||||
/* Reserve NELEMS slots for a vector initially allocated on the stack.
|
||||
When this happens, we switch back to heap allocation. We remove
|
||||
the vector from stack_vecs, if it is there, since we no longer need
|
||||
to avoid freeing it. If EXACT is true, grow exactly, otherwise
|
||||
grow exponentially. */
|
||||
|
||||
template<typename T>
|
||||
void
|
||||
va_stack::reserve (vec<T, va_stack, vl_embed> *&v, unsigned nelems, bool exact
|
||||
MEM_STAT_DECL)
|
||||
{
|
||||
int ix = stack_vec_register_index (static_cast<void *> (v));
|
||||
if (ix >= 0)
|
||||
unregister_stack_vec (ix);
|
||||
else
|
||||
{
|
||||
/* V is already on the heap. */
|
||||
va_heap::reserve (reinterpret_cast<vec<T, va_heap, vl_embed> *&> (v),
|
||||
nelems, exact PASS_MEM_STAT);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Move VEC_ to the heap. */
|
||||
nelems += v->m_vecpfx.m_num;
|
||||
vec<T, va_stack, vl_embed> *oldvec = v;
|
||||
v = NULL;
|
||||
va_heap::reserve (reinterpret_cast<vec<T, va_heap, vl_embed> *&>(v), nelems,
|
||||
exact PASS_MEM_STAT);
|
||||
if (v && oldvec)
|
||||
{
|
||||
v->m_vecpfx.m_num = oldvec->length ();
|
||||
memcpy (v->m_vecdata,
|
||||
oldvec->m_vecdata,
|
||||
oldvec->length () * sizeof (T));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Free a vector allocated on the stack. Don't actually free it if we
|
||||
find it in the hash table. */
|
||||
|
||||
template<typename T>
|
||||
void
|
||||
va_stack::release (vec<T, va_stack, vl_embed> *&v)
|
||||
{
|
||||
if (v == NULL)
|
||||
return;
|
||||
|
||||
int ix = stack_vec_register_index (static_cast<void *> (v));
|
||||
if (ix >= 0)
|
||||
{
|
||||
unregister_stack_vec (ix);
|
||||
v = NULL;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* The vector was not on the list of vectors allocated on the stack, so it
|
||||
must be allocated on the heap. */
|
||||
va_heap::release (reinterpret_cast<vec<T, va_heap, vl_embed> *&> (v));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Generic vector template. Default values for A and L indicate the
|
||||
most commonly used strategies.
|
||||
|
||||
@ -597,7 +493,6 @@ public:
|
||||
friend struct va_gc;
|
||||
friend struct va_gc_atomic;
|
||||
friend struct va_heap;
|
||||
friend struct va_stack;
|
||||
|
||||
/* FIXME - These fields should be private, but we need to cater to
|
||||
compilers that have stricter notions of PODness for types. */
|
||||
@ -1112,6 +1007,7 @@ inline void
|
||||
vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num)
|
||||
{
|
||||
m_vecpfx.m_alloc = alloc;
|
||||
m_vecpfx.m_has_auto_buf = 0;
|
||||
m_vecpfx.m_num = num;
|
||||
}
|
||||
|
||||
@ -1218,8 +1114,8 @@ gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
|
||||
As long as we use C++03, we cannot have constructors nor
|
||||
destructors in classes that are stored in unions. */
|
||||
|
||||
template<typename T, typename A>
|
||||
struct vec<T, A, vl_ptr>
|
||||
template<typename T>
|
||||
struct vec<T, va_heap, vl_ptr>
|
||||
{
|
||||
public:
|
||||
/* Memory allocation and deallocation for the embedded vector.
|
||||
@ -1284,23 +1180,39 @@ public:
|
||||
void qsort (int (*) (const void *, const void *));
|
||||
unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
|
||||
|
||||
template<typename T1>
|
||||
friend void va_stack::alloc (vec<T1, va_stack, vl_ptr>&, unsigned,
|
||||
vec<T1, va_stack, vl_embed> *);
|
||||
bool using_auto_storage () const;
|
||||
|
||||
/* FIXME - This field should be private, but we need to cater to
|
||||
compilers that have stricter notions of PODness for types. */
|
||||
vec<T, A, vl_embed> *m_vec;
|
||||
vec<T, va_heap, vl_embed> *m_vec;
|
||||
};
|
||||
|
||||
|
||||
/* Empty specialization for GC allocation. This will prevent GC
|
||||
vectors from using the vl_ptr layout. FIXME: This is needed to
|
||||
circumvent limitations in the GTY machinery. */
|
||||
|
||||
template<typename T>
|
||||
struct vec<T, va_gc, vl_ptr>
|
||||
/* stack_vec is a subclass of vec containing N elements of internal storage.
|
||||
You probably only want to allocate this on the stack because if the array
|
||||
ends up being larger or much smaller than N it will be wasting space. */
|
||||
template<typename T, size_t N>
|
||||
class stack_vec : public vec<T, va_heap>
|
||||
{
|
||||
public:
|
||||
stack_vec ()
|
||||
{
|
||||
m_header.m_alloc = N;
|
||||
m_header.m_has_auto_buf = 1;
|
||||
m_header.m_num = 0;
|
||||
this->m_vec = reinterpret_cast<vec<T, va_heap, vl_embed> *> (&m_header);
|
||||
}
|
||||
|
||||
~stack_vec ()
|
||||
{
|
||||
this->release ();
|
||||
}
|
||||
|
||||
private:
|
||||
friend class vec<T, va_heap, vl_ptr>;
|
||||
|
||||
vec_prefix m_header;
|
||||
T m_data[N];
|
||||
};
|
||||
|
||||
|
||||
@ -1343,45 +1255,6 @@ vec_free (vec<T> *&v)
|
||||
}
|
||||
|
||||
|
||||
/* Allocate a new stack vector with space for exactly NELEMS objects.
|
||||
If NELEMS is zero, NO vector is created.
|
||||
|
||||
For the stack allocator, no memory is really allocated. The vector
|
||||
is initialized to be at address SPACE and contain NELEMS slots.
|
||||
Memory allocation actually occurs in the expansion of VEC_alloc.
|
||||
|
||||
Usage notes:
|
||||
|
||||
* This does not allocate an instance of vec<T, A>. It allocates the
|
||||
actual vector of elements (i.e., vec<T, A, vl_embed>) inside a
|
||||
vec<T, A> instance.
|
||||
|
||||
* This allocator must always be a macro:
|
||||
|
||||
We support a vector which starts out with space on the stack and
|
||||
switches to heap space when forced to reallocate. This works a
|
||||
little differently. In the case of stack vectors, vec_alloc will
|
||||
expand to a call to vec_alloc_1 that calls XALLOCAVAR to request
|
||||
the initial allocation. This uses alloca to get the initial
|
||||
space. Since alloca can not be usefully called in an inline
|
||||
function, vec_alloc must always be a macro.
|
||||
|
||||
Important limitations of stack vectors:
|
||||
|
||||
- Only the initial allocation will be made using alloca, so pass
|
||||
a reasonable estimate that doesn't use too much stack space;
|
||||
don't pass zero.
|
||||
|
||||
- Don't return a stack-allocated vector from the function which
|
||||
allocated it. */
|
||||
|
||||
#define vec_stack_alloc(T,V,N) \
|
||||
do { \
|
||||
typedef vec<T, va_stack, vl_embed> stackv; \
|
||||
va_stack::alloc (V, N, XALLOCAVAR (stackv, stackv::embedded_size (N)));\
|
||||
} while (0)
|
||||
|
||||
|
||||
/* Return iteration condition and update PTR to point to the IX'th
|
||||
element of this vector. Use this to iterate over the elements of a
|
||||
vector as follows,
|
||||
@ -1389,9 +1262,9 @@ vec_free (vec<T> *&v)
|
||||
for (ix = 0; v.iterate (ix, &ptr); ix++)
|
||||
continue; */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline bool
|
||||
vec<T, A, vl_ptr>::iterate (unsigned ix, T *ptr) const
|
||||
vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
|
||||
{
|
||||
if (m_vec)
|
||||
return m_vec->iterate (ix, ptr);
|
||||
@ -1412,9 +1285,9 @@ vec<T, A, vl_ptr>::iterate (unsigned ix, T *ptr) const
|
||||
|
||||
This variant is for vectors of objects. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline bool
|
||||
vec<T, A, vl_ptr>::iterate (unsigned ix, T **ptr) const
|
||||
vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
|
||||
{
|
||||
if (m_vec)
|
||||
return m_vec->iterate (ix, ptr);
|
||||
@ -1451,11 +1324,11 @@ vec<T, A, vl_ptr>::iterate (unsigned ix, T **ptr) const
|
||||
|
||||
/* Return a copy of this vector. */
|
||||
|
||||
template<typename T, typename A>
|
||||
inline vec<T, A, vl_ptr>
|
||||
vec<T, A, vl_ptr>::copy (ALONE_MEM_STAT_DECL) const
|
||||
template<typename T>
|
||||
inline vec<T, va_heap, vl_ptr>
|
||||
vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECL) const
|
||||
{
|
||||
vec<T, A, vl_ptr> new_vec = vNULL;
|
||||
vec<T, va_heap, vl_ptr> new_vec = vNULL;
|
||||
if (length ())
|
||||
new_vec.m_vec = m_vec->copy ();
|
||||
return new_vec;
|
||||
@ -1471,14 +1344,34 @@ vec<T, A, vl_ptr>::copy (ALONE_MEM_STAT_DECL) const
|
||||
Note that this can cause the embedded vector to be reallocated.
|
||||
Returns true iff reallocation actually occurred. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline bool
|
||||
vec<T, A, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
|
||||
{
|
||||
bool extend = nelems ? !space (nelems) : false;
|
||||
if (extend)
|
||||
A::reserve (m_vec, nelems, exact PASS_MEM_STAT);
|
||||
return extend;
|
||||
if (!nelems || space (nelems))
|
||||
return false;
|
||||
|
||||
/* For now play a game with va_heap::reserve to hide our auto storage if any,
|
||||
this is necessary because it doesn't have enough information to know the
|
||||
embedded vector is in auto storage, and so should not be freed. */
|
||||
vec<T, va_heap, vl_embed> *oldvec = m_vec;
|
||||
unsigned int oldsize = 0;
|
||||
bool handle_auto_vec = m_vec && using_auto_storage ();
|
||||
if (handle_auto_vec)
|
||||
{
|
||||
m_vec = NULL;
|
||||
oldsize = oldvec->length ();
|
||||
nelems += oldsize;
|
||||
}
|
||||
|
||||
va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
|
||||
if (handle_auto_vec)
|
||||
{
|
||||
memcpy (m_vec->address (), oldvec->address (), sizeof (T) * oldsize);
|
||||
m_vec->m_vecpfx.m_num = oldsize;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@ -1487,9 +1380,9 @@ vec<T, A, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
|
||||
embedded vector to be reallocated. Returns true iff reallocation
|
||||
actually occurred. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline bool
|
||||
vec<T, A, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
|
||||
{
|
||||
return reserve (nelems, true PASS_MEM_STAT);
|
||||
}
|
||||
@ -1500,9 +1393,9 @@ vec<T, A, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
|
||||
unconditionally allocated from scratch. The old one, if it
|
||||
existed, is lost. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
|
||||
{
|
||||
m_vec = NULL;
|
||||
if (nelems > 0)
|
||||
@ -1512,23 +1405,30 @@ vec<T, A, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
|
||||
|
||||
/* Free the memory occupied by the embedded vector. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::release (void)
|
||||
vec<T, va_heap, vl_ptr>::release (void)
|
||||
{
|
||||
if (m_vec)
|
||||
A::release (m_vec);
|
||||
}
|
||||
if (!m_vec)
|
||||
return;
|
||||
|
||||
if (using_auto_storage ())
|
||||
{
|
||||
static_cast<stack_vec<T, 1> *> (this)->m_header.m_num = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
va_heap::release (m_vec);
|
||||
}
|
||||
|
||||
/* Copy the elements from SRC to the end of this vector as if by memcpy.
|
||||
SRC and this vector must be allocated with the same memory
|
||||
allocation mechanism. This vector is assumed to have sufficient
|
||||
headroom available. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::splice (vec<T, A, vl_ptr> &src)
|
||||
vec<T, va_heap, vl_ptr>::splice (vec<T, va_heap, vl_ptr> &src)
|
||||
{
|
||||
if (src.m_vec)
|
||||
m_vec->splice (*(src.m_vec));
|
||||
@ -1540,9 +1440,10 @@ vec<T, A, vl_ptr>::splice (vec<T, A, vl_ptr> &src)
|
||||
If there is not enough headroom in this vector, it will be reallocated
|
||||
as needed. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::safe_splice (vec<T, A, vl_ptr> &src MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::safe_splice (vec<T, va_heap, vl_ptr> &src
|
||||
MEM_STAT_DECL)
|
||||
{
|
||||
if (src.length ())
|
||||
{
|
||||
@ -1556,9 +1457,9 @@ vec<T, A, vl_ptr>::safe_splice (vec<T, A, vl_ptr> &src MEM_STAT_DECL)
|
||||
sufficient space in the vector. Return a pointer to the slot
|
||||
where OBJ was inserted. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline T *
|
||||
vec<T, A, vl_ptr>::quick_push (const T &obj)
|
||||
vec<T, va_heap, vl_ptr>::quick_push (const T &obj)
|
||||
{
|
||||
return m_vec->quick_push (obj);
|
||||
}
|
||||
@ -1568,9 +1469,9 @@ vec<T, A, vl_ptr>::quick_push (const T &obj)
|
||||
the embedded vector, if needed. Return a pointer to the slot where
|
||||
OBJ was inserted. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline T *
|
||||
vec<T, A, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
|
||||
{
|
||||
reserve (1, false PASS_MEM_STAT);
|
||||
return quick_push (obj);
|
||||
@ -1579,9 +1480,9 @@ vec<T, A, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
|
||||
|
||||
/* Pop and return the last element off the end of the vector. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline T &
|
||||
vec<T, A, vl_ptr>::pop (void)
|
||||
vec<T, va_heap, vl_ptr>::pop (void)
|
||||
{
|
||||
return m_vec->pop ();
|
||||
}
|
||||
@ -1590,9 +1491,9 @@ vec<T, A, vl_ptr>::pop (void)
|
||||
/* Set the length of the vector to LEN. The new length must be less
|
||||
than or equal to the current length. This is an O(1) operation. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::truncate (unsigned size)
|
||||
vec<T, va_heap, vl_ptr>::truncate (unsigned size)
|
||||
{
|
||||
if (m_vec)
|
||||
m_vec->truncate (size);
|
||||
@ -1605,9 +1506,9 @@ vec<T, A, vl_ptr>::truncate (unsigned size)
|
||||
longer than the current length. The new elements are
|
||||
uninitialized. Reallocate the internal vector, if needed. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::safe_grow (unsigned len MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::safe_grow (unsigned len MEM_STAT_DECL)
|
||||
{
|
||||
unsigned oldlen = length ();
|
||||
gcc_checking_assert (oldlen <= len);
|
||||
@ -1620,9 +1521,9 @@ vec<T, A, vl_ptr>::safe_grow (unsigned len MEM_STAT_DECL)
|
||||
long or longer than the current length. The new elements are
|
||||
initialized to zero. Reallocate the internal vector, if needed. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::safe_grow_cleared (unsigned len MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len MEM_STAT_DECL)
|
||||
{
|
||||
unsigned oldlen = length ();
|
||||
safe_grow (len PASS_MEM_STAT);
|
||||
@ -1633,9 +1534,9 @@ vec<T, A, vl_ptr>::safe_grow_cleared (unsigned len MEM_STAT_DECL)
|
||||
/* Same as vec::safe_grow but without reallocation of the internal vector.
|
||||
If the vector cannot be extended, a runtime assertion will be triggered. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::quick_grow (unsigned len)
|
||||
vec<T, va_heap, vl_ptr>::quick_grow (unsigned len)
|
||||
{
|
||||
gcc_checking_assert (m_vec);
|
||||
m_vec->quick_grow (len);
|
||||
@ -1646,9 +1547,9 @@ vec<T, A, vl_ptr>::quick_grow (unsigned len)
|
||||
internal vector. If the vector cannot be extended, a runtime
|
||||
assertion will be triggered. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::quick_grow_cleared (unsigned len)
|
||||
vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len)
|
||||
{
|
||||
gcc_checking_assert (m_vec);
|
||||
m_vec->quick_grow_cleared (len);
|
||||
@ -1658,9 +1559,9 @@ vec<T, A, vl_ptr>::quick_grow_cleared (unsigned len)
|
||||
/* Insert an element, OBJ, at the IXth position of this vector. There
|
||||
must be sufficient space. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::quick_insert (unsigned ix, const T &obj)
|
||||
vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj)
|
||||
{
|
||||
m_vec->quick_insert (ix, obj);
|
||||
}
|
||||
@ -1669,9 +1570,9 @@ vec<T, A, vl_ptr>::quick_insert (unsigned ix, const T &obj)
|
||||
/* Insert an element, OBJ, at the IXth position of the vector.
|
||||
Reallocate the embedded vector, if necessary. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
|
||||
vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
|
||||
{
|
||||
reserve (1, false PASS_MEM_STAT);
|
||||
quick_insert (ix, obj);
|
||||
@ -1682,9 +1583,9 @@ vec<T, A, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
|
||||
remaining elements is preserved. This is an O(N) operation due to
|
||||
a memmove. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::ordered_remove (unsigned ix)
|
||||
vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix)
|
||||
{
|
||||
m_vec->ordered_remove (ix);
|
||||
}
|
||||
@ -1693,9 +1594,9 @@ vec<T, A, vl_ptr>::ordered_remove (unsigned ix)
|
||||
/* Remove an element from the IXth position of this vector. Ordering
|
||||
of remaining elements is destroyed. This is an O(1) operation. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::unordered_remove (unsigned ix)
|
||||
vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix)
|
||||
{
|
||||
m_vec->unordered_remove (ix);
|
||||
}
|
||||
@ -1704,9 +1605,9 @@ vec<T, A, vl_ptr>::unordered_remove (unsigned ix)
|
||||
/* Remove LEN elements starting at the IXth. Ordering is retained.
|
||||
This is an O(N) operation due to memmove. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::block_remove (unsigned ix, unsigned len)
|
||||
vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len)
|
||||
{
|
||||
m_vec->block_remove (ix, len);
|
||||
}
|
||||
@ -1715,9 +1616,9 @@ vec<T, A, vl_ptr>::block_remove (unsigned ix, unsigned len)
|
||||
/* Sort the contents of this vector with qsort. CMP is the comparison
|
||||
function to pass to qsort. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline void
|
||||
vec<T, A, vl_ptr>::qsort (int (*cmp) (const void *, const void *))
|
||||
vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))
|
||||
{
|
||||
if (m_vec)
|
||||
m_vec->qsort (cmp);
|
||||
@ -1729,14 +1630,27 @@ vec<T, A, vl_ptr>::qsort (int (*cmp) (const void *, const void *))
|
||||
function that returns true if the first argument is strictly less
|
||||
than the second. */
|
||||
|
||||
template<typename T, typename A>
|
||||
template<typename T>
|
||||
inline unsigned
|
||||
vec<T, A, vl_ptr>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
|
||||
vec<T, va_heap, vl_ptr>::lower_bound (T obj,
|
||||
bool (*lessthan)(const T &, const T &))
|
||||
const
|
||||
{
|
||||
return m_vec ? m_vec->lower_bound (obj, lessthan) : 0;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
inline bool
|
||||
vec<T, va_heap, vl_ptr>::using_auto_storage () const
|
||||
{
|
||||
if (!m_vec->m_vecpfx.m_has_auto_buf)
|
||||
return false;
|
||||
|
||||
const vec_prefix *auto_header
|
||||
= &static_cast<const stack_vec<T, 1> *> (this)->m_header;
|
||||
return reinterpret_cast<vec_prefix *> (m_vec) == auto_header;
|
||||
}
|
||||
|
||||
#if (GCC_VERSION >= 3000)
|
||||
# pragma GCC poison m_vec m_vecpfx m_vecdata
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user