[1/3] mem: provide thread-unsafe contig walk variant
Checks
Commit Message
Sometimes, user code needs to walk memseg list while being inside
a memory-related callback. Rather than making everyone copy around
the same iteration code and depending on DPDK internals, provide an
official way to do memseg_contig_walk() inside callbacks.
Signed-off-by: Anatoly Burakov <anatoly.burakov@intel.com>
---
lib/librte_eal/common/eal_common_memory.c | 28 ++++++++++++----------
lib/librte_eal/common/include/rte_memory.h | 18 ++++++++++++++
lib/librte_eal/rte_eal_version.map | 1 +
3 files changed, 35 insertions(+), 12 deletions(-)
Comments
12/06/2018 11:46, Anatoly Burakov:
> Sometimes, user code needs to walk memseg list while being inside
> a memory-related callback. Rather than making everyone copy around
> the same iteration code and depending on DPDK internals, provide an
> official way to do memseg_contig_walk() inside callbacks.
>
> Signed-off-by: Anatoly Burakov <anatoly.burakov@intel.com>
Series applied, thanks
@@ -788,14 +788,11 @@ rte_mem_lock_page(const void *virt)
}
int __rte_experimental
-rte_memseg_contig_walk(rte_memseg_contig_walk_t func, void *arg)
+rte_memseg_contig_walk_thread_unsafe(rte_memseg_contig_walk_t func, void *arg)
{
struct rte_mem_config *mcfg = rte_eal_get_configuration()->mem_config;
int i, ms_idx, ret = 0;
- /* do not allow allocations/frees/init while we iterate */
- rte_rwlock_read_lock(&mcfg->memory_hotplug_lock);
-
for (i = 0; i < RTE_MAX_MEMSEG_LISTS; i++) {
struct rte_memseg_list *msl = &mcfg->memsegs[i];
const struct rte_memseg *ms;
@@ -820,19 +817,26 @@ rte_memseg_contig_walk(rte_memseg_contig_walk_t func, void *arg)
len = n_segs * msl->page_sz;
ret = func(msl, ms, len, arg);
- if (ret < 0) {
- ret = -1;
- goto out;
- } else if (ret > 0) {
- ret = 1;
- goto out;
- }
+ if (ret)
+ return ret;
ms_idx = rte_fbarray_find_next_used(arr,
ms_idx + n_segs);
}
}
-out:
+ return 0;
+}
+
+int __rte_experimental
+rte_memseg_contig_walk(rte_memseg_contig_walk_t func, void *arg)
+{
+ struct rte_mem_config *mcfg = rte_eal_get_configuration()->mem_config;
+ int ret = 0;
+
+ /* do not allow allocations/frees/init while we iterate */
+ rte_rwlock_read_lock(&mcfg->memory_hotplug_lock);
+ ret = rte_memseg_contig_walk_thread_unsafe(func, arg);
rte_rwlock_read_unlock(&mcfg->memory_hotplug_lock);
+
return ret;
}
@@ -263,6 +263,24 @@ rte_memseg_contig_walk(rte_memseg_contig_walk_t func, void *arg);
int __rte_experimental
rte_memseg_list_walk(rte_memseg_list_walk_t func, void *arg);
+/**
+ * Walk each VA-contiguous area without performing any locking.
+ *
+ * @note This function does not perform any locking, and is only safe to call
+ * from within memory-related callback functions.
+ *
+ * @param func
+ * Iterator function
+ * @param arg
+ * Argument passed to iterator
+ * @return
+ * 0 if walked over the entire list
+ * 1 if stopped by the user
+ * -1 if user function reported error
+ */
+int __rte_experimental
+rte_memseg_contig_walk_thread_unsafe(rte_memseg_contig_walk_t func, void *arg);
+
/**
* Dump the physical memory layout to a file.
*
@@ -286,6 +286,7 @@ EXPERIMENTAL {
rte_mem_virt2memseg;
rte_mem_virt2memseg_list;
rte_memseg_contig_walk;
+ rte_memseg_contig_walk_thread_unsafe;
rte_memseg_list_walk;
rte_memseg_walk;
rte_mp_action_register;