121 строка
2.7 KiB
C
121 строка
2.7 KiB
C
/* SPDX-License-Identifier: GPL-2.0-only */
|
|
/*
|
|
* Copyright (C) 2012 ARM Ltd.
|
|
*/
|
|
#ifndef __ASM_FUTEX_H
|
|
#define __ASM_FUTEX_H
|
|
|
|
#include <linux/futex.h>
|
|
#include <linux/uaccess.h>
|
|
|
|
#include <asm/errno.h>
|
|
|
|
#define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
|
|
|
|
#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
|
|
do { \
|
|
unsigned int loops = FUTEX_MAX_LOOPS; \
|
|
\
|
|
uaccess_enable_privileged(); \
|
|
asm volatile( \
|
|
" prfm pstl1strm, %2\n" \
|
|
"1: ldxr %w1, %2\n" \
|
|
insn "\n" \
|
|
"2: stlxr %w0, %w3, %2\n" \
|
|
" cbz %w0, 3f\n" \
|
|
" sub %w4, %w4, %w0\n" \
|
|
" cbnz %w4, 1b\n" \
|
|
" mov %w0, %w6\n" \
|
|
"3:\n" \
|
|
" dmb ish\n" \
|
|
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
|
|
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
|
|
: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
|
|
"+r" (loops) \
|
|
: "r" (oparg), "Ir" (-EAGAIN) \
|
|
: "memory"); \
|
|
uaccess_disable_privileged(); \
|
|
} while (0)
|
|
|
|
static inline int
|
|
arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
|
|
{
|
|
int oldval = 0, ret, tmp;
|
|
u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
|
|
|
|
if (!access_ok(_uaddr, sizeof(u32)))
|
|
return -EFAULT;
|
|
|
|
switch (op) {
|
|
case FUTEX_OP_SET:
|
|
__futex_atomic_op("mov %w3, %w5",
|
|
ret, oldval, uaddr, tmp, oparg);
|
|
break;
|
|
case FUTEX_OP_ADD:
|
|
__futex_atomic_op("add %w3, %w1, %w5",
|
|
ret, oldval, uaddr, tmp, oparg);
|
|
break;
|
|
case FUTEX_OP_OR:
|
|
__futex_atomic_op("orr %w3, %w1, %w5",
|
|
ret, oldval, uaddr, tmp, oparg);
|
|
break;
|
|
case FUTEX_OP_ANDN:
|
|
__futex_atomic_op("and %w3, %w1, %w5",
|
|
ret, oldval, uaddr, tmp, ~oparg);
|
|
break;
|
|
case FUTEX_OP_XOR:
|
|
__futex_atomic_op("eor %w3, %w1, %w5",
|
|
ret, oldval, uaddr, tmp, oparg);
|
|
break;
|
|
default:
|
|
ret = -ENOSYS;
|
|
}
|
|
|
|
if (!ret)
|
|
*oval = oldval;
|
|
|
|
return ret;
|
|
}
|
|
|
|
static inline int
|
|
futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
|
|
u32 oldval, u32 newval)
|
|
{
|
|
int ret = 0;
|
|
unsigned int loops = FUTEX_MAX_LOOPS;
|
|
u32 val, tmp;
|
|
u32 __user *uaddr;
|
|
|
|
if (!access_ok(_uaddr, sizeof(u32)))
|
|
return -EFAULT;
|
|
|
|
uaddr = __uaccess_mask_ptr(_uaddr);
|
|
uaccess_enable_privileged();
|
|
asm volatile("// futex_atomic_cmpxchg_inatomic\n"
|
|
" prfm pstl1strm, %2\n"
|
|
"1: ldxr %w1, %2\n"
|
|
" sub %w3, %w1, %w5\n"
|
|
" cbnz %w3, 4f\n"
|
|
"2: stlxr %w3, %w6, %2\n"
|
|
" cbz %w3, 3f\n"
|
|
" sub %w4, %w4, %w3\n"
|
|
" cbnz %w4, 1b\n"
|
|
" mov %w0, %w7\n"
|
|
"3:\n"
|
|
" dmb ish\n"
|
|
"4:\n"
|
|
_ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
|
|
_ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
|
|
: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
|
|
: "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
|
|
: "memory");
|
|
uaccess_disable_privileged();
|
|
|
|
if (!ret)
|
|
*uval = val;
|
|
|
|
return ret;
|
|
}
|
|
|
|
#endif /* __ASM_FUTEX_H */
|