[v5,1/6] stack: replace rte atomics with GCC builtin atomics

Message ID 1686087947-15471-2-git-send-email-roretzla@linux.microsoft.com (mailing list archive)
State Accepted, archived
Delegated to: David Marchand
Headers
Series replace rte atomics with GCC builtin atomics |

Checks

Context Check Description
ci/checkpatch success coding style OK

Commit Message

Tyler Retzlaff June 6, 2023, 9:45 p.m. UTC
  Replace the use of rte_atomic.h types and functions, instead use GCC
supplied C++11 memory model builtins.

Signed-off-by: Tyler Retzlaff <roretzla@linux.microsoft.com>
Acked-by: Morten Brørup <mb@smartsharesystems.com>
---
 lib/stack/rte_stack_lf_generic.h | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)
  

Patch

diff --git a/lib/stack/rte_stack_lf_generic.h b/lib/stack/rte_stack_lf_generic.h
index 7fa29ce..aad3747 100644
--- a/lib/stack/rte_stack_lf_generic.h
+++ b/lib/stack/rte_stack_lf_generic.h
@@ -26,8 +26,8 @@ 
 	 * elements. If the mempool is near-empty to the point that this is a
 	 * concern, the user should consider increasing the mempool size.
 	 */
-	return (unsigned int)rte_atomic64_read((rte_atomic64_t *)
-			&s->stack_lf.used.len);
+	/* NOTE: review for potential ordering optimization */
+	return __atomic_load_n(&s->stack_lf.used.len, __ATOMIC_SEQ_CST);
 }
 
 static __rte_always_inline void
@@ -67,8 +67,8 @@ 
 				1, __ATOMIC_RELEASE,
 				__ATOMIC_RELAXED);
 	} while (success == 0);
-
-	rte_atomic64_add((rte_atomic64_t *)&list->len, num);
+	/* NOTE: review for potential ordering optimization */
+	__atomic_fetch_add(&list->len, num, __ATOMIC_SEQ_CST);
 }
 
 static __rte_always_inline struct rte_stack_lf_elem *
@@ -82,14 +82,16 @@ 
 
 	/* Reserve num elements, if available */
 	while (1) {
-		uint64_t len = rte_atomic64_read((rte_atomic64_t *)&list->len);
+		/* NOTE: review for potential ordering optimization */
+		uint64_t len = __atomic_load_n(&list->len, __ATOMIC_SEQ_CST);
 
 		/* Does the list contain enough elements? */
 		if (unlikely(len < num))
 			return NULL;
 
-		if (rte_atomic64_cmpset((volatile uint64_t *)&list->len,
-					len, len - num))
+		/* NOTE: review for potential ordering optimization */
+		if (__atomic_compare_exchange_n(&list->len, &len, len - num,
+			0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))
 			break;
 	}