99#include <linux/uaccess.h>
1010
1111#include <asm/errno.h>
12+ #include <asm/lsui.h>
1213
1314#define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
1415
@@ -89,11 +90,166 @@ __llsc_futex_cmpxchg(u32 __user *uaddr, u32 oldval, u32 newval, u32 *oval)
8990 return ret ;
9091}
9192
93+ #ifdef CONFIG_ARM64_LSUI
94+
95+ /*
96+ * Wrap LSUI instructions with uaccess_ttbr0_enable()/disable(), as
97+ * PAN toggling is not required.
98+ */
99+
100+ #define LSUI_FUTEX_ATOMIC_OP (op , asm_op ) \
101+ static __always_inline int \
102+ __lsui_futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \
103+ { \
104+ int ret = 0; \
105+ int oldval; \
106+ \
107+ uaccess_ttbr0_enable(); \
108+ \
109+ asm volatile("// __lsui_futex_atomic_" #op "\n" \
110+ __LSUI_PREAMBLE \
111+ "1: " #asm_op "al %w[oparg], %w[oldval], %[uaddr]\n" \
112+ "2:\n" \
113+ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w[ret]) \
114+ : [ret] "+r" (ret), [uaddr] "+Q" (*uaddr), \
115+ [oldval] "=r" (oldval) \
116+ : [oparg] "r" (oparg) \
117+ : "memory"); \
118+ \
119+ uaccess_ttbr0_disable(); \
120+ \
121+ if (!ret) \
122+ *oval = oldval; \
123+ return ret; \
124+ }
125+
126+ LSUI_FUTEX_ATOMIC_OP (add , ldtadd )
127+ LSUI_FUTEX_ATOMIC_OP (or , ldtset )
128+ LSUI_FUTEX_ATOMIC_OP (andnot , ldtclr )
129+ LSUI_FUTEX_ATOMIC_OP (set , swpt )
130+
131+ static __always_inline int
132+ __lsui_cmpxchg64 (u64 __user * uaddr , u64 * oldval , u64 newval )
133+ {
134+ int ret = 0 ;
135+
136+ uaccess_ttbr0_enable ();
137+
138+ asm volatile ("// __lsui_cmpxchg64\n"
139+ __LSUI_PREAMBLE
140+ "1: casalt %[oldval], %[newval], %[uaddr]\n"
141+ "2:\n"
142+ _ASM_EXTABLE_UACCESS_ERR (1b , 2b , %w [ret ])
143+ : [ret ] "+ r " (ret), [uaddr] " + Q " (*uaddr),
144+ [oldval ] "+ r " (*oldval)
145+ : [newval ] "r" (newval )
146+ : "memory ");
147+
148+ uaccess_ttbr0_disable ();
149+
150+ return ret ;
151+ }
152+
153+ static __always_inline int
154+ __lsui_cmpxchg32 (u32 __user * uaddr , u32 oldval , u32 newval , u32 * oval )
155+ {
156+ u64 __user * uaddr64 ;
157+ bool futex_pos , other_pos ;
158+ u32 other , orig_other ;
159+ union {
160+ u32 futex [2 ];
161+ u64 raw ;
162+ } oval64 , orig64 , nval64 ;
163+
164+ uaddr64 = (u64 __user * )PTR_ALIGN_DOWN (uaddr , sizeof (u64 ));
165+ futex_pos = !IS_ALIGNED ((unsigned long )uaddr , sizeof (u64 ));
166+ other_pos = !futex_pos ;
167+
168+ oval64 .futex [futex_pos ] = oldval ;
169+ if (get_user (oval64 .futex [other_pos ], (u32 __user * )uaddr64 + other_pos ))
170+ return - EFAULT ;
171+
172+ orig64 .raw = oval64 .raw ;
173+
174+ nval64 .futex [futex_pos ] = newval ;
175+ nval64 .futex [other_pos ] = oval64 .futex [other_pos ];
176+
177+ if (__lsui_cmpxchg64 (uaddr64 , & oval64 .raw , nval64 .raw ))
178+ return - EFAULT ;
179+
180+ oldval = oval64 .futex [futex_pos ];
181+ other = oval64 .futex [other_pos ];
182+ orig_other = orig64 .futex [other_pos ];
183+
184+ if (other != orig_other )
185+ return - EAGAIN ;
186+
187+ * oval = oldval ;
188+
189+ return 0 ;
190+ }
191+
192+ static __always_inline int
193+ __lsui_futex_atomic_and (int oparg , u32 __user * uaddr , int * oval )
194+ {
195+ /*
196+ * Undo the bitwise negation applied to the oparg passed from
197+ * arch_futex_atomic_op_inuser() with FUTEX_OP_ANDN.
198+ */
199+ return __lsui_futex_atomic_andnot (~oparg , uaddr , oval );
200+ }
201+
202+ static __always_inline int
203+ __lsui_futex_atomic_eor (int oparg , u32 __user * uaddr , int * oval )
204+ {
205+ u32 oldval , newval , val ;
206+ int ret , i ;
207+
208+ if (get_user (oldval , uaddr ))
209+ return - EFAULT ;
210+
211+ /*
212+ * there are no ldteor/stteor instructions...
213+ */
214+ for (i = 0 ; i < FUTEX_MAX_LOOPS ; i ++ ) {
215+ newval = oldval ^ oparg ;
216+
217+ ret = __lsui_cmpxchg32 (uaddr , oldval , newval , & val );
218+ switch (ret ) {
219+ case - EFAULT :
220+ return ret ;
221+ case - EAGAIN :
222+ continue ;
223+ }
224+
225+ if (val == oldval ) {
226+ * oval = val ;
227+ return 0 ;
228+ }
229+
230+ oldval = val ;
231+ }
232+
233+ return - EAGAIN ;
234+ }
235+
236+ static __always_inline int
237+ __lsui_futex_cmpxchg (u32 __user * uaddr , u32 oldval , u32 newval , u32 * oval )
238+ {
239+ /*
240+ * Callers of futex_atomic_cmpxchg_inatomic() already retry on
241+ * -EAGAIN, no need for another loop of max retries.
242+ */
243+ return __lsui_cmpxchg32 (uaddr , oldval , newval , oval );
244+ }
245+ #endif /* CONFIG_ARM64_LSUI */
246+
247+
92248#define FUTEX_ATOMIC_OP (op ) \
93249static __always_inline int \
94250__futex_atomic_##op(int oparg, u32 __user *uaddr, int *oval) \
95251{ \
96- return __llsc_futex_atomic_ ##op( oparg, uaddr, oval); \
252+ return __lsui_llsc_body(futex_atomic_ ##op, oparg, uaddr, oval); \
97253}
98254
99255FUTEX_ATOMIC_OP (add )
@@ -105,7 +261,7 @@ FUTEX_ATOMIC_OP(set)
105261static __always_inline int
106262__futex_cmpxchg (u32 __user * uaddr , u32 oldval , u32 newval , u32 * oval )
107263{
108- return __llsc_futex_cmpxchg ( uaddr , oldval , newval , oval );
264+ return __lsui_llsc_body ( futex_cmpxchg , uaddr , oldval , newval , oval );
109265}
110266
111267static inline int
0 commit comments