return get_or_create_unaryop (type, op, arg);
}
+/* Subroutine of region_model_manager::maybe_fold_binop for handling
+ (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
+ optimize_bit_field_compare, where CST is from ARG1.
+
+ Support masking out bits from a compound_svalue for comparing a bitfield
+ against a value, as generated by optimize_bit_field_compare for
+ BITFIELD == VALUE.
+
+ If COMPOUND_SVAL has a value for the appropriate bits, return it,
+ shifted accordingly.
+ Otherwise return NULL. */
+
+const svalue *
+region_model_manager::
+maybe_undo_optimize_bit_field_compare (tree type,
+ const compound_svalue *compound_sval,
+ tree cst,
+ const svalue *arg1)
+{
+ if (type != unsigned_char_type_node)
+ return NULL;
+
+ const binding_map &map = compound_sval->get_map ();
+ unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
+ /* If "mask" is a contiguous range of set bits, see if the
+ compound_sval has a value for those bits. */
+ bit_range bits (0, 0);
+ if (!bit_range::from_mask (mask, &bits))
+ return NULL;
+
+ bit_range bound_bits (bits);
+ if (BYTES_BIG_ENDIAN)
+ bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
+ bits.m_size_in_bits);
+ const concrete_binding *conc
+ = get_store_manager ()->get_concrete_binding (bound_bits, BK_direct);
+ const svalue *sval = map.get (conc);
+ if (!sval)
+ return NULL;
+
+ /* We have a value;
+ shift it by the correct number of bits. */
+ const svalue *lhs = get_or_create_cast (type, sval);
+ HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
+ tree shift_amt = build_int_cst (type, bit_offset);
+ const svalue *shift_sval = get_or_create_constant_svalue (shift_amt);
+ const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
+ lhs, shift_sval);
+ /* Reapply the mask (needed for negative
+ signed bitfields). */
+ return get_or_create_binop (type, BIT_AND_EXPR,
+ shifted_sval, arg1);
+}
+
/* Subroutine of region_model_manager::get_or_create_binop.
Attempt to fold the inputs and return a simpler svalue *.
Otherwise, return NULL. */
/* "(ARG0 & 0)" -> "0". */
return get_or_create_constant_svalue (build_int_cst (type, 0));
- /* Support masking out bits from a compound_svalue, as this
- is generated when accessing bitfields. */
if (const compound_svalue *compound_sval
= arg0->dyn_cast_compound_svalue ())
- {
- const binding_map &map = compound_sval->get_map ();
- unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst1);
- /* If "mask" is a contiguous range of set bits, see if the
- compound_sval has a value for those bits. */
- bit_range bits (0, 0);
- if (bit_range::from_mask (mask, &bits))
- {
- const concrete_binding *conc
- = get_store_manager ()->get_concrete_binding (bits,
- BK_direct);
- if (const svalue *sval = map.get (conc))
- {
- /* We have a value;
- shift it by the correct number of bits. */
- const svalue *lhs = get_or_create_cast (type, sval);
- HOST_WIDE_INT bit_offset
- = bits.get_start_bit_offset ().to_shwi ();
- tree shift_amt = build_int_cst (type, bit_offset);
- const svalue *shift_sval
- = get_or_create_constant_svalue (shift_amt);
- const svalue *shifted_sval
- = get_or_create_binop (type,
- LSHIFT_EXPR,
- lhs, shift_sval);
- /* Reapply the mask (needed for negative
- signed bitfields). */
- return get_or_create_binop (type,
- BIT_AND_EXPR,
- shifted_sval, arg1);
- }
- }
- }
+ if (const svalue *sval
+ = maybe_undo_optimize_bit_field_compare (type,
+ compound_sval,
+ cst1, arg1))
+ return sval;
}
break;
case TRUTH_ANDIF_EXPR: