--- /dev/null
+typedef struct {
+ unsigned long pmd0;
+ unsigned long pmd1;
+} pmd_t;
+typedef unsigned int pgd_t;
+struct mm_struct {
+ pgd_t * pgd;
+};
+extern inline int pmd_bad(pmd_t pmd)
+{
+}
+extern inline void pmd_clear(pmd_t * pmdp)
+{
+ ((*pmdp).pmd0) = 0x20 | 0x00;
+ ((*pmdp).pmd1) = 0x20 | 0x00;
+}
+static inline void free_one_pmd(pmd_t * dir)
+{
+ if (pmd_bad(*dir)) {
+ pmd_clear(dir);
+ }
+}
+static inline void free_one_pgd(pgd_t * dir)
+{
+ int j;
+ pmd_t * pmd;
+ pmd = ((pmd_t *) ((unsigned long) (void *)(__pgd_val(dir) & (~((1UL << 12)-1)))) + (((0) >> 21) & (512 - 1)));
+ for (j = 0; j < 512 ; j++) {
+ free_one_pmd(pmd+j);
+ }
+}
+void clear_page_tables(struct mm_struct *mm, unsigned long first, int nr)
+{
+ pgd_t * page_dir = mm->pgd;
+ do {
+ free_one_pgd(page_dir);
+ } while (--nr);
+}
(the pointer base cannot validly point to an offset less than zero
of the variable).
They also cannot alias if the pointer may not point to the decl. */
- if ((TREE_CODE (base1) != TARGET_MEM_REF || !TMR_INDEX (base1))
+ if ((TREE_CODE (base1) != TARGET_MEM_REF
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
return false;
if (!ptr_deref_may_alias_decl_p (ptr1, base2))
is relative to the start of the type which we ensure by
comparing rvalue and access type and disregarding the constant
pointer offset. */
- if ((TREE_CODE (base1) != TARGET_MEM_REF || !TMR_INDEX (base1))
+ if ((TREE_CODE (base1) != TARGET_MEM_REF
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& (TREE_CODE (base1) != MEM_REF
|| same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1)
&& same_type_for_tbaa (TREE_TYPE (ptrtype1), TREE_TYPE (base2)) == 1)
if ((!cfun || gimple_in_ssa_p (cfun))
&& operand_equal_p (ptr1, ptr2, 0)
&& (((TREE_CODE (base1) != TARGET_MEM_REF
- || !TMR_INDEX (base1))
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& (TREE_CODE (base2) != TARGET_MEM_REF
- || !TMR_INDEX (base2)))
+ || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
|| (TREE_CODE (base1) == TARGET_MEM_REF
&& TREE_CODE (base2) == TARGET_MEM_REF
&& (TMR_STEP (base1) == TMR_STEP (base2)
|| (TMR_STEP (base1) && TMR_STEP (base2)
&& operand_equal_p (TMR_STEP (base1),
TMR_STEP (base2), 0)))
- && operand_equal_p (TMR_INDEX (base1), TMR_INDEX (base2), 0))))
+ && (TMR_INDEX (base1) == TMR_INDEX (base2)
+ || (TMR_INDEX (base1) && TMR_INDEX (base2)
+ && operand_equal_p (TMR_INDEX (base1),
+ TMR_INDEX (base2), 0)))
+ && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
+ || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
+ && operand_equal_p (TMR_INDEX2 (base1),
+ TMR_INDEX2 (base2), 0))))))
{
/* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */