@@ -60,4 +60,40 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
#endif
#endif
+#ifndef __atomic_pre_full_fence
+#define __atomic_pre_full_fence smp_mb__before_atomic
+#endif
+
+#ifndef __atomic_post_full_fence
+#define __atomic_post_full_fence smp_mb__after_atomic
+#endif
+
+#ifndef atomic_fetch_add
+static inline int
+atomic_fetch_add(int i, atomic_t *v)
+{
+ return atomic_add_return(i, v) - i;
+}
+#define atomic_fetch_add atomic_fetch_add
+#endif
+
+#ifndef atomic_fetch_add_relaxed
+#define atomic_fetch_add_relaxed atomic_fetch_add
+#endif
+
+#ifndef atomic_fetch_sub
+static inline int
+atomic_fetch_sub(int i, atomic_t *v)
+{
+ return atomic_sub_return(i, v) + i;
+}
+#define atomic_fetch_sub atomic_fetch_sub
+#endif
+
+#ifndef atomic_fetch_sub_relaxed
+#define atomic_fetch_sub_acquire atomic_fetch_sub
+#define atomic_fetch_sub_release atomic_fetch_sub
+#define atomic_fetch_sub_relaxed atomic_fetch_sub
+#endif
+
#endif /* __BP_ATOMIC_H */