1515#include <asm/barrier.h>
1616#include <asm/cmpxchg.h>
1717
18- static inline int arch_atomic_read (const atomic_t * v )
18+ static __always_inline int arch_atomic_read (const atomic_t * v )
1919{
2020 return __atomic_read (v );
2121}
2222#define arch_atomic_read arch_atomic_read
2323
24- static inline void arch_atomic_set (atomic_t * v , int i )
24+ static __always_inline void arch_atomic_set (atomic_t * v , int i )
2525{
2626 __atomic_set (v , i );
2727}
2828#define arch_atomic_set arch_atomic_set
2929
30- static inline int arch_atomic_add_return (int i , atomic_t * v )
30+ static __always_inline int arch_atomic_add_return (int i , atomic_t * v )
3131{
3232 return __atomic_add_barrier (i , & v -> counter ) + i ;
3333}
3434#define arch_atomic_add_return arch_atomic_add_return
3535
36- static inline int arch_atomic_fetch_add (int i , atomic_t * v )
36+ static __always_inline int arch_atomic_fetch_add (int i , atomic_t * v )
3737{
3838 return __atomic_add_barrier (i , & v -> counter );
3939}
4040#define arch_atomic_fetch_add arch_atomic_fetch_add
4141
42- static inline void arch_atomic_add (int i , atomic_t * v )
42+ static __always_inline void arch_atomic_add (int i , atomic_t * v )
4343{
4444 __atomic_add (i , & v -> counter );
4545}
@@ -50,11 +50,11 @@ static inline void arch_atomic_add(int i, atomic_t *v)
5050#define arch_atomic_fetch_sub (_i , _v ) arch_atomic_fetch_add(-(int)(_i), _v)
5151
5252#define ATOMIC_OPS (op ) \
53- static inline void arch_atomic_##op(int i, atomic_t *v) \
53+ static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
5454{ \
5555 __atomic_##op(i, &v->counter); \
5656} \
57- static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
57+ static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
5858{ \
5959 return __atomic_##op##_barrier(i, &v->counter); \
6060}
@@ -74,60 +74,60 @@ ATOMIC_OPS(xor)
7474
7575#define arch_atomic_xchg (v , new ) (arch_xchg(&((v)->counter), new))
7676
77- static inline int arch_atomic_cmpxchg (atomic_t * v , int old , int new )
77+ static __always_inline int arch_atomic_cmpxchg (atomic_t * v , int old , int new )
7878{
7979 return __atomic_cmpxchg (& v -> counter , old , new );
8080}
8181#define arch_atomic_cmpxchg arch_atomic_cmpxchg
8282
8383#define ATOMIC64_INIT (i ) { (i) }
8484
85- static inline s64 arch_atomic64_read (const atomic64_t * v )
85+ static __always_inline s64 arch_atomic64_read (const atomic64_t * v )
8686{
8787 return __atomic64_read (v );
8888}
8989#define arch_atomic64_read arch_atomic64_read
9090
91- static inline void arch_atomic64_set (atomic64_t * v , s64 i )
91+ static __always_inline void arch_atomic64_set (atomic64_t * v , s64 i )
9292{
9393 __atomic64_set (v , i );
9494}
9595#define arch_atomic64_set arch_atomic64_set
9696
97- static inline s64 arch_atomic64_add_return (s64 i , atomic64_t * v )
97+ static __always_inline s64 arch_atomic64_add_return (s64 i , atomic64_t * v )
9898{
9999 return __atomic64_add_barrier (i , (long * )& v -> counter ) + i ;
100100}
101101#define arch_atomic64_add_return arch_atomic64_add_return
102102
103- static inline s64 arch_atomic64_fetch_add (s64 i , atomic64_t * v )
103+ static __always_inline s64 arch_atomic64_fetch_add (s64 i , atomic64_t * v )
104104{
105105 return __atomic64_add_barrier (i , (long * )& v -> counter );
106106}
107107#define arch_atomic64_fetch_add arch_atomic64_fetch_add
108108
109- static inline void arch_atomic64_add (s64 i , atomic64_t * v )
109+ static __always_inline void arch_atomic64_add (s64 i , atomic64_t * v )
110110{
111111 __atomic64_add (i , (long * )& v -> counter );
112112}
113113#define arch_atomic64_add arch_atomic64_add
114114
115115#define arch_atomic64_xchg (v , new ) (arch_xchg(&((v)->counter), new))
116116
117- static inline s64 arch_atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
117+ static __always_inline s64 arch_atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
118118{
119119 return __atomic64_cmpxchg ((long * )& v -> counter , old , new );
120120}
121121#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
122122
123- #define ATOMIC64_OPS (op ) \
124- static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
125- { \
126- __atomic64_##op(i, (long *)&v->counter); \
127- } \
128- static inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
129- { \
130- return __atomic64_##op##_barrier(i, (long *)&v->counter); \
123+ #define ATOMIC64_OPS (op ) \
124+ static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
125+ { \
126+ __atomic64_##op(i, (long *)&v->counter); \
127+ } \
128+ static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
129+ { \
130+ return __atomic64_##op##_barrier(i, (long *)&v->counter); \
131131}
132132
133133ATOMIC64_OPS (and )
0 commit comments