Skip to content

Commit

Permalink
tree-vect-data-refs.c (vect_analyze_group_access): Properly handle ne…
Browse files Browse the repository at this point in the history
…gative step.

2013-04-18  Richard Biener  <[email protected]>

	* tree-vect-data-refs.c (vect_analyze_group_access): Properly
	handle negative step.  Remove redundant checks.
	(vect_create_data_ref_ptr): Avoid ICEs with non-constant steps.
	* tree-vect-stmts.c (vectorizable_load): Instead of asserting
	for negative step and grouped loads fail to vectorize.

From-SVN: r198054
  • Loading branch information
rguenth authored and Richard Biener committed Apr 18, 2013
1 parent 0e0f87d commit 08940f3
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 25 deletions.
8 changes: 8 additions & 0 deletions gcc/ChangeLog
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
2013-04-18 Richard Biener <[email protected]>

* tree-vect-data-refs.c (vect_analyze_group_access): Properly
handle negative step. Remove redundant checks.
(vect_create_data_ref_ptr): Avoid ICEs with non-constant steps.
* tree-vect-stmts.c (vectorizable_load): Instead of asserting
for negative step and grouped loads fail to vectorize.

2013-04-18 Steven Bosscher <[email protected]>

* emit-rtl.c (reset_insn_used_flags): New function.
Expand Down
42 changes: 18 additions & 24 deletions gcc/tree-vect-data-refs.c
Original file line number Diff line number Diff line change
Expand Up @@ -2024,7 +2024,7 @@ vect_analyze_group_access (struct data_reference *dr)

/* For interleaving, GROUPSIZE is STEP counted in elements, i.e., the
size of the interleaving group (including gaps). */
groupsize = dr_step / type_size;
groupsize = absu_hwi (dr_step) / type_size;

/* Not consecutive access is possible only if it is a part of interleaving. */
if (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
Expand Down Expand Up @@ -2094,10 +2094,10 @@ vect_analyze_group_access (struct data_reference *dr)
gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
struct data_reference *data_ref = dr;
unsigned int count = 1;
tree next_step;
tree prev_init = DR_INIT (data_ref);
gimple prev = stmt;
HOST_WIDE_INT diff, count_in_bytes, gaps = 0;
HOST_WIDE_INT diff, gaps = 0;
unsigned HOST_WIDE_INT count_in_bytes;

while (next)
{
Expand Down Expand Up @@ -2126,18 +2126,11 @@ vect_analyze_group_access (struct data_reference *dr)
}

prev = next;
data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));

/* Check that all the accesses have the same STEP. */
next_step = DR_STEP (STMT_VINFO_DATA_REF (vinfo_for_stmt (next)));
if (tree_int_cst_compare (step, next_step))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not consecutive access in interleaving");
return false;
}
/* All group members have the same STEP by construction. */
gcc_checking_assert (operand_equal_p (DR_STEP (data_ref), step, 0));

data_ref = STMT_VINFO_DATA_REF (vinfo_for_stmt (next));
/* Check that the distance between two accesses is equal to the type
size. Otherwise, we have gaps. */
diff = (TREE_INT_CST_LOW (DR_INIT (data_ref))
Expand Down Expand Up @@ -2175,7 +2168,8 @@ vect_analyze_group_access (struct data_reference *dr)

/* Check that the size of the interleaving (including gaps) is not
greater than STEP. */
if (dr_step && dr_step < count_in_bytes + gaps * type_size)
if (dr_step != 0
&& absu_hwi (dr_step) < count_in_bytes + gaps * type_size)
{
if (dump_enabled_p ())
{
Expand All @@ -2188,7 +2182,8 @@ vect_analyze_group_access (struct data_reference *dr)

/* Check that the size of the interleaving is equal to STEP for stores,
i.e., that there are no gaps. */
if (dr_step && dr_step != count_in_bytes)
if (dr_step != 0
&& absu_hwi (dr_step) != count_in_bytes)
{
if (DR_IS_READ (dr))
{
Expand All @@ -2208,7 +2203,8 @@ vect_analyze_group_access (struct data_reference *dr)
}

/* Check that STEP is a multiple of type size. */
if (dr_step && (dr_step % type_size) != 0)
if (dr_step != 0
&& (dr_step % type_size) != 0)
{
if (dump_enabled_p ())
{
Expand Down Expand Up @@ -3520,7 +3516,6 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
tree aptr;
gimple_stmt_iterator incr_gsi;
bool insert_after;
bool negative;
tree indx_before_incr, indx_after_incr;
gimple incr;
tree step;
Expand Down Expand Up @@ -3550,11 +3545,10 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else
step = DR_STEP (STMT_VINFO_DATA_REF (stmt_info));

if (tree_int_cst_compare (step, size_zero_node) == 0)
if (integer_zerop (step))
*inv_p = true;
else
*inv_p = false;
negative = tree_int_cst_compare (step, size_zero_node) < 0;

/* Create an expression for the first address accessed by this load
in LOOP. */
Expand Down Expand Up @@ -3693,18 +3687,18 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
else
{
/* The step of the aggregate pointer is the type size. */
tree step = TYPE_SIZE_UNIT (aggr_type);
tree iv_step = TYPE_SIZE_UNIT (aggr_type);
/* One exception to the above is when the scalar step of the load in
LOOP is zero. In this case the step here is also zero. */
if (*inv_p)
step = size_zero_node;
else if (negative)
step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
iv_step = size_zero_node;
else if (tree_int_cst_sgn (step) == -1)
iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step);

standard_iv_increment_position (loop, &incr_gsi, &insert_after);

create_iv (aggr_ptr_init,
fold_convert (aggr_ptr_type, step),
fold_convert (aggr_ptr_type, iv_step),
aggr_ptr, loop, &incr_gsi, insert_after,
&indx_before_incr, &indx_after_incr);
incr = gsi_stmt (incr_gsi);
Expand Down
8 changes: 7 additions & 1 deletion gcc/tree-vect-stmts.c
Original file line number Diff line number Diff line change
Expand Up @@ -4465,7 +4465,13 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,

if (negative)
{
gcc_assert (!grouped_load);
if (grouped_load)
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"negative step for group load not supported");
return false;
}
alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
if (alignment_support_scheme != dr_aligned
&& alignment_support_scheme != dr_unaligned_supported)
Expand Down

0 comments on commit 08940f3

Please sign in to comment.