[v2,2/2] eal/arm: change inline functions to always inline

Message ID 20200626203502.20658-2-honnappa.nagarahalli@arm.com (mailing list archive)
State Accepted, archived
Delegated to: David Marchand
Headers
Series [v2,1/2] eal/arm: generic counter based loop for CPU freq calculation |

Checks

Context Check Description
ci/checkpatch success coding style OK
ci/travis-robot success Travis build: passed
ci/Intel-compilation success Compilation OK

Commit Message

Honnappa Nagarahalli June 26, 2020, 8:35 p.m. UTC
  Change the inline functions to use __rte_always_inline to be
consistent with rest of the inline functions.

Signed-off-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>
---
 lib/librte_eal/arm/include/rte_cycles_64.h | 13 ++++++++-----
 1 file changed, 8 insertions(+), 5 deletions(-)
  

Comments

Jerin Jacob July 7, 2020, 2:05 a.m. UTC | #1
On Sat, Jun 27, 2020 at 2:05 AM Honnappa Nagarahalli
<honnappa.nagarahalli@arm.com> wrote:
>
> Change the inline functions to use __rte_always_inline to be
> consistent with rest of the inline functions.
>
> Signed-off-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>

Acked-by: Jerin Jacob <jerinj@marvell.com>



> ---
>  lib/librte_eal/arm/include/rte_cycles_64.h | 13 ++++++++-----
>  1 file changed, 8 insertions(+), 5 deletions(-)
>
> diff --git a/lib/librte_eal/arm/include/rte_cycles_64.h b/lib/librte_eal/arm/include/rte_cycles_64.h
> index e41f9dbd6..029fdc435 100644
> --- a/lib/librte_eal/arm/include/rte_cycles_64.h
> +++ b/lib/librte_eal/arm/include/rte_cycles_64.h
> @@ -50,7 +50,7 @@ __rte_arm64_cntvct_precise(void)
>   * This call is portable to any ARMv8 architecture, however, typically
>   * cntvct_el0 runs at <= 100MHz and it may be imprecise for some tasks.
>   */
> -static inline uint64_t
> +static __rte_always_inline uint64_t
>  rte_rdtsc(void)
>  {
>         return __rte_arm64_cntvct();
> @@ -85,22 +85,25 @@ __rte_arm64_pmccntr(void)
>         return tsc;
>  }
>
> -static inline uint64_t
> +static __rte_always_inline uint64_t
>  rte_rdtsc(void)
>  {
>         return __rte_arm64_pmccntr();
>  }
>  #endif
>
> -static inline uint64_t
> +static __rte_always_inline uint64_t
>  rte_rdtsc_precise(void)
>  {
>         asm volatile("isb" : : : "memory");
>         return rte_rdtsc();
>  }
>
> -static inline uint64_t
> -rte_get_tsc_cycles(void) { return rte_rdtsc(); }
> +static __rte_always_inline uint64_t
> +rte_get_tsc_cycles(void)
> +{
> +       return rte_rdtsc();
> +}
>
>  #ifdef __cplusplus
>  }
> --
> 2.17.1
>
  

Patch

diff --git a/lib/librte_eal/arm/include/rte_cycles_64.h b/lib/librte_eal/arm/include/rte_cycles_64.h
index e41f9dbd6..029fdc435 100644
--- a/lib/librte_eal/arm/include/rte_cycles_64.h
+++ b/lib/librte_eal/arm/include/rte_cycles_64.h
@@ -50,7 +50,7 @@  __rte_arm64_cntvct_precise(void)
  * This call is portable to any ARMv8 architecture, however, typically
  * cntvct_el0 runs at <= 100MHz and it may be imprecise for some tasks.
  */
-static inline uint64_t
+static __rte_always_inline uint64_t
 rte_rdtsc(void)
 {
 	return __rte_arm64_cntvct();
@@ -85,22 +85,25 @@  __rte_arm64_pmccntr(void)
 	return tsc;
 }
 
-static inline uint64_t
+static __rte_always_inline uint64_t
 rte_rdtsc(void)
 {
 	return __rte_arm64_pmccntr();
 }
 #endif
 
-static inline uint64_t
+static __rte_always_inline uint64_t
 rte_rdtsc_precise(void)
 {
 	asm volatile("isb" : : : "memory");
 	return rte_rdtsc();
 }
 
-static inline uint64_t
-rte_get_tsc_cycles(void) { return rte_rdtsc(); }
+static __rte_always_inline uint64_t
+rte_get_tsc_cycles(void)
+{
+	return rte_rdtsc();
+}
 
 #ifdef __cplusplus
 }