[v1,6/7] mem: promote DMA mask API's to stable
Checks
Commit Message
As per ABI policy, move the formerly experimental API's to the stable
section.
Signed-off-by: Anatoly Burakov <anatoly.burakov@intel.com>
---
lib/eal/include/rte_memory.h | 12 ------------
lib/eal/version.map | 6 +++---
2 files changed, 3 insertions(+), 15 deletions(-)
Comments
On 10/09/2021 13:30, Anatoly Burakov wrote:
> As per ABI policy, move the formerly experimental API's to the stable
> section.
>
> Signed-off-by: Anatoly Burakov <anatoly.burakov@intel.com>
> ---
> lib/eal/include/rte_memory.h | 12 ------------
> lib/eal/version.map | 6 +++---
> 2 files changed, 3 insertions(+), 15 deletions(-)
>
Acked-by: Ray Kinsella <mdr@ashroe.eu>
@@ -553,22 +553,15 @@ unsigned rte_memory_get_nchannel(void);
unsigned rte_memory_get_nrank(void);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Check if all currently allocated memory segments are compliant with
* supplied DMA address width.
*
* @param maskbits
* Address width to check against.
*/
-__rte_experimental
int rte_mem_check_dma_mask(uint8_t maskbits);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Check if all currently allocated memory segments are compliant with
* supplied DMA address width. This function will use
* rte_memseg_walk_thread_unsafe instead of rte_memseg_walk implying
@@ -581,18 +574,13 @@ int rte_mem_check_dma_mask(uint8_t maskbits);
* @param maskbits
* Address width to check against.
*/
-__rte_experimental
int rte_mem_check_dma_mask_thread_unsafe(uint8_t maskbits);
/**
- * @warning
- * @b EXPERIMENTAL: this API may change without prior notice
- *
* Set dma mask to use once memory initialization is done. Previous functions
* rte_mem_check_dma_mask and rte_mem_check_dma_mask_thread_unsafe can not be
* used safely until memory has been initialized.
*/
-__rte_experimental
void rte_mem_set_dma_mask(uint8_t maskbits);
/**
@@ -174,10 +174,13 @@ DPDK_22 {
rte_mcfg_tailq_write_unlock;
rte_mem_alloc_validator_register;
rte_mem_alloc_validator_unregister;
+ rte_mem_check_dma_mask;
+ rte_mem_check_dma_mask_thread_unsafe;
rte_mem_event_callback_register;
rte_mem_event_callback_unregister;
rte_mem_iova2virt;
rte_mem_lock_page;
+ rte_mem_set_dma_mask;
rte_mem_virt2iova;
rte_mem_virt2memseg;
rte_mem_virt2memseg_list;
@@ -293,7 +296,6 @@ EXPERIMENTAL {
rte_dev_event_monitor_start; # WINDOWS_NO_EXPORT
rte_dev_event_monitor_stop; # WINDOWS_NO_EXPORT
rte_log_register_type_and_pick_level;
- rte_mem_check_dma_mask;
# added in 18.08
rte_class_find;
@@ -308,8 +310,6 @@ EXPERIMENTAL {
rte_dev_event_callback_process;
rte_dev_hotplug_handle_disable; # WINDOWS_NO_EXPORT
rte_dev_hotplug_handle_enable; # WINDOWS_NO_EXPORT
- rte_mem_check_dma_mask_thread_unsafe;
- rte_mem_set_dma_mask;
# added in 19.05
rte_dev_dma_map;