@@ -52,17 +52,6 @@ static inline unsigned long __kern_my_cpu_offset(void)
#define __my_cpu_offset __kern_my_cpu_offset()
#endif
-#define PERCPU_RW_OPS(sz) \
-static inline unsigned long __percpu_read_##sz(void *ptr) \
-{ \
- return READ_ONCE(*(u##sz *)ptr); \
-} \
- \
-static inline void __percpu_write_##sz(void *ptr, unsigned long val) \
-{ \
- WRITE_ONCE(*(u##sz *)ptr, (u##sz)val); \
-}
-
#define __PERCPU_OP_CASE(w, sfx, name, sz, op_llsc, op_lse) \
static inline void \
__percpu_##name##_case_##sz(void *ptr, unsigned long val) \
@@ -120,10 +109,6 @@ __percpu_##name##_return_case_##sz(void *ptr, unsigned long val) \
__PERCPU_RET_OP_CASE(w, , name, 32, op_llsc, op_lse) \
__PERCPU_RET_OP_CASE( , , name, 64, op_llsc, op_lse)
-PERCPU_RW_OPS(8)
-PERCPU_RW_OPS(16)
-PERCPU_RW_OPS(32)
-PERCPU_RW_OPS(64)
PERCPU_OP(add, add, stadd)
PERCPU_OP(andnot, bic, stclr)
PERCPU_OP(or, orr, stset)
@@ -168,24 +153,6 @@ PERCPU_RET_OP(add, add, ldadd)
__retval; \
})
-#define this_cpu_read_1(pcp) \
- _pcp_protect_return(__percpu_read_8, pcp)
-#define this_cpu_read_2(pcp) \
- _pcp_protect_return(__percpu_read_16, pcp)
-#define this_cpu_read_4(pcp) \
- _pcp_protect_return(__percpu_read_32, pcp)
-#define this_cpu_read_8(pcp) \
- _pcp_protect_return(__percpu_read_64, pcp)
-
-#define this_cpu_write_1(pcp, val) \
- _pcp_protect(__percpu_write_8, pcp, (unsigned long)val)
-#define this_cpu_write_2(pcp, val) \
- _pcp_protect(__percpu_write_16, pcp, (unsigned long)val)
-#define this_cpu_write_4(pcp, val) \
- _pcp_protect(__percpu_write_32, pcp, (unsigned long)val)
-#define this_cpu_write_8(pcp, val) \
- _pcp_protect(__percpu_write_64, pcp, (unsigned long)val)
-
#define this_cpu_add_1(pcp, val) \
_pcp_protect(__percpu_add_case_8, pcp, val)
#define this_cpu_add_2(pcp, val) \