__asm__ __volatile__ ("wr %%g0, %0, %%asi" : : "r" ((val).seg)); \
} while(0)
+/*
+ * Test whether a block of memory is a valid user space address.
+ * Returns 0 if the range is valid, nonzero otherwise.
+ */
+static inline bool __chk_range_not_ok(unsigned long addr, unsigned long size, unsigned long limit)
+{
+ if (__builtin_constant_p(size))
+ return addr > limit - size;
+
+ addr += size;
+ if (addr < size)
+ return true;
+
+ return addr > limit;
+}
+
+#define __range_not_ok(addr, size, limit) \
+({ \
+ __chk_user_ptr(addr); \
+ __chk_range_not_ok((unsigned long __force)(addr), size, limit); \
+})
+
static inline int __access_ok(const void __user * addr, unsigned long size)
{
return 1;
} while (entry->nr < PERF_MAX_STACK_DEPTH);
}
+static inline int
+valid_user_frame(const void __user *fp, unsigned long size)
+{
+ /* addresses should be at least 4-byte aligned */
+ if (((unsigned long) fp) & 3)
+ return 0;
+
+ return (__range_not_ok(fp, size, TASK_SIZE) == 0);
+}
+
static void perf_callchain_user_64(struct perf_callchain_entry *entry,
struct pt_regs *regs)
{
unsigned long pc;
usf = (struct sparc_stackf __user *)ufp;
+ if (!valid_user_frame(usf, sizeof(sf)))
+ break;
+
if (__copy_from_user_inatomic(&sf, usf, sizeof(sf)))
break;