Skip to content

Commit eaa3bab

Browse files
LeviYeoReumctmarinas
authored andcommitted
arm64: futex: Refactor futex atomic operation
Refactor the futex atomic operations using ll/sc instructions in preparation for FEAT_LSUI support. In addition, use named operands for the inline asm. No functional change. Signed-off-by: Yeoreum Yun <[email protected]> [[email protected]: remove unnecessary stringify.h include] Signed-off-by: Catalin Marinas <[email protected]>
1 parent 42550d7 commit eaa3bab

1 file changed

Lines changed: 97 additions & 58 deletions

File tree

arch/arm64/include/asm/futex.h

Lines changed: 97 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -12,109 +12,148 @@
1212

1313
#define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
1414

15-
#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg) \
16-
do { \
15+
#define LLSC_FUTEX_ATOMIC_OP(op, insn) \
16+
static __always_inline int \
17+
__llsc_futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \
18+
{ \
1719
unsigned int loops = FUTEX_MAX_LOOPS; \
20+
int ret, oldval, newval; \
1821
\
1922
uaccess_enable_privileged(); \
20-
asm volatile( \
21-
" prfm pstl1strm, %2\n" \
22-
"1: ldxr %w1, %2\n" \
23+
asm volatile("// __llsc_futex_atomic_" #op "\n" \
24+
" prfm pstl1strm, %[uaddr]\n" \
25+
"1: ldxr %w[oldval], %[uaddr]\n" \
2326
insn "\n" \
24-
"2: stlxr %w0, %w3, %2\n" \
25-
" cbz %w0, 3f\n" \
26-
" sub %w4, %w4, %w0\n" \
27-
" cbnz %w4, 1b\n" \
28-
" mov %w0, %w6\n" \
27+
"2: stlxr %w[ret], %w[newval], %[uaddr]\n" \
28+
" cbz %w[ret], 3f\n" \
29+
" sub %w[loops], %w[loops], %w[ret]\n" \
30+
" cbnz %w[loops], 1b\n" \
31+
" mov %w[ret], %w[err]\n" \
2932
"3:\n" \
3033
" dmb ish\n" \
31-
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
32-
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
33-
: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
34-
"+r" (loops) \
35-
: "r" (oparg), "Ir" (-EAGAIN) \
34+
_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w[ret]) \
35+
_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w[ret]) \
36+
: [ret] "=&r" (ret), [oldval] "=&r" (oldval), \
37+
[uaddr] "+Q" (*uaddr), [newval] "=&r" (newval), \
38+
[loops] "+r" (loops) \
39+
: [oparg] "r" (oparg), [err] "Ir" (-EAGAIN) \
3640
: "memory"); \
3741
uaccess_disable_privileged(); \
38-
} while (0)
42+
\
43+
if (!ret) \
44+
*oval = oldval; \
45+
\
46+
return ret; \
47+
}
48+
49+
LLSC_FUTEX_ATOMIC_OP(add, "add %w[newval], %w[oldval], %w[oparg]")
50+
LLSC_FUTEX_ATOMIC_OP(or, "orr %w[newval], %w[oldval], %w[oparg]")
51+
LLSC_FUTEX_ATOMIC_OP(and, "and %w[newval], %w[oldval], %w[oparg]")
52+
LLSC_FUTEX_ATOMIC_OP(eor, "eor %w[newval], %w[oldval], %w[oparg]")
53+
LLSC_FUTEX_ATOMIC_OP(set, "mov %w[newval], %w[oparg]")
54+
55+
static __always_inline int
56+
__llsc_futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
57+
{
58+
int ret = 0;
59+
unsigned int loops = FUTEX_MAX_LOOPS;
60+
u32 val, tmp;
61+
62+
uaccess_enable_privileged();
63+
asm volatile("//__llsc_futex_cmpxchg\n"
64+
" prfm pstl1strm, %[uaddr]\n"
65+
"1: ldxr %w[curval], %[uaddr]\n"
66+
" eor %w[tmp], %w[curval], %w[oldval]\n"
67+
" cbnz %w[tmp], 4f\n"
68+
"2: stlxr %w[tmp], %w[newval], %[uaddr]\n"
69+
" cbz %w[tmp], 3f\n"
70+
" sub %w[loops], %w[loops], %w[tmp]\n"
71+
" cbnz %w[loops], 1b\n"
72+
" mov %w[ret], %w[err]\n"
73+
"3:\n"
74+
" dmb ish\n"
75+
"4:\n"
76+
_ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w[ret])
77+
_ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w[ret])
78+
: [ret] "+r" (ret), [curval] "=&r" (val),
79+
[uaddr] "+Q" (*uaddr), [tmp] "=&r" (tmp),
80+
[loops] "+r" (loops)
81+
: [oldval] "r" (oldval), [newval] "r" (newval),
82+
[err] "Ir" (-EAGAIN)
83+
: "memory");
84+
uaccess_disable_privileged();
85+
86+
if (!ret)
87+
*oval = val;
88+
89+
return ret;
90+
}
91+
92+
#define FUTEX_ATOMIC_OP(op) \
93+
static __always_inline int \
94+
__futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \
95+
{ \
96+
return __llsc_futex_atomic_##op(oparg, uaddr, oval); \
97+
}
98+
99+
FUTEX_ATOMIC_OP(add)
100+
FUTEX_ATOMIC_OP(or)
101+
FUTEX_ATOMIC_OP(and)
102+
FUTEX_ATOMIC_OP(eor)
103+
FUTEX_ATOMIC_OP(set)
104+
105+
static __always_inline int
106+
__futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
107+
{
108+
return __llsc_futex_cmpxchg(uaddr, oldval, newval, oval);
109+
}
39110

40111
static inline int
41112
arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
42113
{
43-
int oldval = 0, ret, tmp;
44-
u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
114+
int ret;
115+
u32 __user *uaddr;
45116

46117
if (!access_ok(_uaddr, sizeof(u32)))
47118
return -EFAULT;
48119

120+
uaddr = __uaccess_mask_ptr(_uaddr);
121+
49122
switch (op) {
50123
case FUTEX_OP_SET:
51-
__futex_atomic_op("mov %w3, %w5",
52-
ret, oldval, uaddr, tmp, oparg);
124+
ret = __futex_atomic_set(oparg, uaddr, oval);
53125
break;
54126
case FUTEX_OP_ADD:
55-
__futex_atomic_op("add %w3, %w1, %w5",
56-
ret, oldval, uaddr, tmp, oparg);
127+
ret = __futex_atomic_add(oparg, uaddr, oval);
57128
break;
58129
case FUTEX_OP_OR:
59-
__futex_atomic_op("orr %w3, %w1, %w5",
60-
ret, oldval, uaddr, tmp, oparg);
130+
ret = __futex_atomic_or(oparg, uaddr, oval);
61131
break;
62132
case FUTEX_OP_ANDN:
63-
__futex_atomic_op("and %w3, %w1, %w5",
64-
ret, oldval, uaddr, tmp, ~oparg);
133+
ret = __futex_atomic_and(~oparg, uaddr, oval);
65134
break;
66135
case FUTEX_OP_XOR:
67-
__futex_atomic_op("eor %w3, %w1, %w5",
68-
ret, oldval, uaddr, tmp, oparg);
136+
ret = __futex_atomic_eor(oparg, uaddr, oval);
69137
break;
70138
default:
71139
ret = -ENOSYS;
72140
}
73141

74-
if (!ret)
75-
*oval = oldval;
76-
77142
return ret;
78143
}
79144

80145
static inline int
81146
futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
82147
u32 oldval, u32 newval)
83148
{
84-
int ret = 0;
85-
unsigned int loops = FUTEX_MAX_LOOPS;
86-
u32 val, tmp;
87149
u32 __user *uaddr;
88150

89151
if (!access_ok(_uaddr, sizeof(u32)))
90152
return -EFAULT;
91153

92154
uaddr = __uaccess_mask_ptr(_uaddr);
93-
uaccess_enable_privileged();
94-
asm volatile("// futex_atomic_cmpxchg_inatomic\n"
95-
" prfm pstl1strm, %2\n"
96-
"1: ldxr %w1, %2\n"
97-
" sub %w3, %w1, %w5\n"
98-
" cbnz %w3, 4f\n"
99-
"2: stlxr %w3, %w6, %2\n"
100-
" cbz %w3, 3f\n"
101-
" sub %w4, %w4, %w3\n"
102-
" cbnz %w4, 1b\n"
103-
" mov %w0, %w7\n"
104-
"3:\n"
105-
" dmb ish\n"
106-
"4:\n"
107-
_ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
108-
_ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
109-
: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
110-
: "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
111-
: "memory");
112-
uaccess_disable_privileged();
113155

114-
if (!ret)
115-
*uval = val;
116-
117-
return ret;
156+
return __futex_cmpxchg(uaddr, oldval, newval, uval);
118157
}
119158

120159
#endif /* __ASM_FUTEX_H */

0 commit comments

Comments
 (0)