@@ -15,8 +15,12 @@ extern "C" {
#include "generic/rte_atomic.h"
+#ifndef dsb
#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
+#endif
+#ifndef dmb
#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
+#endif
#define rte_mb() dsb(sy)
@@ -17,6 +17,112 @@ static inline void rte_pause(void)
asm volatile("yield" ::: "memory");
}
+#ifdef RTE_USE_WFE
+/* Wait for *addr to be updated with expected value */
+static __rte_always_inline void
+rte_wait_until_equal16(volatile uint16_t *addr, uint16_t expected, int memorder)
+{
+ uint16_t tmp;
+ if (memorder == __ATOMIC_RELAXED)
+ asm volatile(
+ "ldxrh %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldxrh %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+ else
+ asm volatile(
+ "ldaxrh %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldaxrh %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+}
+
+static __rte_always_inline void
+rte_wait_until_equal32(volatile uint32_t *addr, uint32_t expected, int memorder)
+{
+ uint32_t tmp;
+ if (memorder == __ATOMIC_RELAXED)
+ asm volatile(
+ "ldxr %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldxr %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+ else
+ asm volatile(
+ "ldaxr %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldaxr %w[tmp], %w[addr]\n"
+ "cmp %w[tmp], %w[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+}
+
+static __rte_always_inline void
+rte_wait_until_equal64(volatile uint64_t *addr, uint64_t expected, int memorder)
+{
+ uint64_t tmp;
+ if (memorder == __ATOMIC_RELAXED)
+ asm volatile(
+ "ldxr %x[tmp], %x[addr]\n"
+ "cmp %x[tmp], %x[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldxr %x[tmp], %x[addr]\n"
+ "cmp %x[tmp], %x[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+ else
+ asm volatile(
+ "ldaxr %x[tmp], %x[addr]\n"
+ "cmp %x[tmp], %x[expected]\n"
+ "b.eq 2f\n"
+ "sevl\n"
+ "1: wfe\n"
+ "ldaxr %x[tmp], %x[addr]\n"
+ "cmp %x[tmp], %x[expected]\n"
+ "bne 1b\n"
+ "2:\n"
+ : [tmp] "=&r" (tmp)
+ : [addr] "Q"(*addr), [expected] "r"(expected)
+ : "cc", "memory");
+}
+
+#endif
+
#ifdef __cplusplus
}
#endif
@@ -4,7 +4,6 @@
#ifndef _RTE_PAUSE_H_
#define _RTE_PAUSE_H_
-
/**
* @file
*
@@ -12,6 +11,10 @@
*
*/
+#include <stdint.h>
+#include <rte_common.h>
+#include <rte_atomic.h>
+
/**
* Pause CPU execution for a short while
*
@@ -20,4 +23,38 @@
*/
static inline void rte_pause(void);
+#if !defined(RTE_USE_WFE)
+#ifdef RTE_USE_C11_MEM_MODEL
+#define __rte_wait_until_equal(addr, expected, memorder) do {\
+ while (__atomic_load_n(addr, memorder) != expected) \
+ rte_pause();\
+} while (0)
+#else
+#define __rte_wait_until_equal(addr, expected, memorder) do {\
+ while (*addr != expected)\
+ rte_pause();\
+ if (memorder != __ATOMIC_RELAXED)\
+ rte_smp_rmb();\
+} while (0)
+#endif
+
+static __rte_always_inline void
+rte_wait_until_equal16(volatile uint16_t *addr, uint16_t expected, int memorder)
+{
+ __rte_wait_until_equal(addr, expected, memorder);
+}
+
+static __rte_always_inline void
+rte_wait_until_equal32(volatile uint32_t *addr, uint32_t expected, int memorder)
+{
+ __rte_wait_until_equal(addr, expected, memorder);
+}
+
+static __rte_always_inline void
+rte_wait_until_equal64(volatile uint64_t *addr, uint64_t expected, int memorder)
+{
+ __rte_wait_until_equal(addr, expected, memorder);
+}
+#endif /* RTE_USE_WFE */
+
#endif /* _RTE_PAUSE_H_ */