arm64: futex: Mask __user pointers prior to dereference
authorWill Deacon <will.deacon@arm.com>
Mon, 5 Feb 2018 15:34:24 +0000 (15:34 +0000)
committerHaibo Chen <haibo.chen@nxp.com>
Thu, 12 Apr 2018 10:46:23 +0000 (18:46 +0800)
commit 91b2d3442f6a upstream.

The arm64 futex code has some explicit dereferencing of user pointers
where performing atomic operations in response to a futex command. This
patch uses masking to limit any speculative futex operations to within
the user address space.

Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Alex Shi <alex.shi@linaro.org>
Conflicts:
change on old futex_atomic_op_inuser function instead of
arch_futex_atomic_op_inuser in arch/arm64/include/asm/futex.h

arch/arm64/include/asm/futex.h

index 85c4a89..1943cf6 100644 (file)
@@ -48,13 +48,14 @@ do {                                                                        \
 } while (0)
 
 static inline int
-futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
+futex_atomic_op_inuser (int encoded_op, u32 __user *_uaddr)
 {
        int op = (encoded_op >> 28) & 7;
        int cmp = (encoded_op >> 24) & 15;
        int oparg = (encoded_op << 8) >> 20;
        int cmparg = (encoded_op << 20) >> 20;
        int oldval = 0, ret, tmp;
+       u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
 
        if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
                oparg = 1 << oparg;
@@ -106,15 +107,17 @@ futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
 }
 
 static inline int
-futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
+futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
                              u32 oldval, u32 newval)
 {
        int ret = 0;
        u32 val, tmp;
+       u32 __user *uaddr;
 
-       if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
+       if (!access_ok(VERIFY_WRITE, _uaddr, sizeof(u32)))
                return -EFAULT;
 
+       uaddr = __uaccess_mask_ptr(_uaddr);
        uaccess_enable();
        asm volatile("// futex_atomic_cmpxchg_inatomic\n"
 "      prfm    pstl1strm, %2\n"