Message ID | 20200626203502.20658-2-honnappa.nagarahalli@arm.com |
---|---|
State | New |
Headers | show |
Series | [v2,1/2] eal/arm: generic counter based loop for CPU freq calculation | expand |
On Sat, Jun 27, 2020 at 2:05 AM Honnappa Nagarahalli <honnappa.nagarahalli@arm.com> wrote: > > Change the inline functions to use __rte_always_inline to be > consistent with rest of the inline functions. > > Signed-off-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com> Acked-by: Jerin Jacob <jerinj@marvell.com> > --- > lib/librte_eal/arm/include/rte_cycles_64.h | 13 ++++++++----- > 1 file changed, 8 insertions(+), 5 deletions(-) > > diff --git a/lib/librte_eal/arm/include/rte_cycles_64.h b/lib/librte_eal/arm/include/rte_cycles_64.h > index e41f9dbd6..029fdc435 100644 > --- a/lib/librte_eal/arm/include/rte_cycles_64.h > +++ b/lib/librte_eal/arm/include/rte_cycles_64.h > @@ -50,7 +50,7 @@ __rte_arm64_cntvct_precise(void) > * This call is portable to any ARMv8 architecture, however, typically > * cntvct_el0 runs at <= 100MHz and it may be imprecise for some tasks. > */ > -static inline uint64_t > +static __rte_always_inline uint64_t > rte_rdtsc(void) > { > return __rte_arm64_cntvct(); > @@ -85,22 +85,25 @@ __rte_arm64_pmccntr(void) > return tsc; > } > > -static inline uint64_t > +static __rte_always_inline uint64_t > rte_rdtsc(void) > { > return __rte_arm64_pmccntr(); > } > #endif > > -static inline uint64_t > +static __rte_always_inline uint64_t > rte_rdtsc_precise(void) > { > asm volatile("isb" : : : "memory"); > return rte_rdtsc(); > } > > -static inline uint64_t > -rte_get_tsc_cycles(void) { return rte_rdtsc(); } > +static __rte_always_inline uint64_t > +rte_get_tsc_cycles(void) > +{ > + return rte_rdtsc(); > +} > > #ifdef __cplusplus > } > -- > 2.17.1 >
diff --git a/lib/librte_eal/arm/include/rte_cycles_64.h b/lib/librte_eal/arm/include/rte_cycles_64.h index e41f9dbd6..029fdc435 100644 --- a/lib/librte_eal/arm/include/rte_cycles_64.h +++ b/lib/librte_eal/arm/include/rte_cycles_64.h @@ -50,7 +50,7 @@ __rte_arm64_cntvct_precise(void) * This call is portable to any ARMv8 architecture, however, typically * cntvct_el0 runs at <= 100MHz and it may be imprecise for some tasks. */ -static inline uint64_t +static __rte_always_inline uint64_t rte_rdtsc(void) { return __rte_arm64_cntvct(); @@ -85,22 +85,25 @@ __rte_arm64_pmccntr(void) return tsc; } -static inline uint64_t +static __rte_always_inline uint64_t rte_rdtsc(void) { return __rte_arm64_pmccntr(); } #endif -static inline uint64_t +static __rte_always_inline uint64_t rte_rdtsc_precise(void) { asm volatile("isb" : : : "memory"); return rte_rdtsc(); } -static inline uint64_t -rte_get_tsc_cycles(void) { return rte_rdtsc(); } +static __rte_always_inline uint64_t +rte_get_tsc_cycles(void) +{ + return rte_rdtsc(); +} #ifdef __cplusplus }
Change the inline functions to use __rte_always_inline to be consistent with rest of the inline functions. Signed-off-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com> --- lib/librte_eal/arm/include/rte_cycles_64.h | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) -- 2.17.1