@@ -235,7 +235,7 @@ struct crng {
static DEFINE_PER_CPU(struct crng, crngs) = {
.generation = ULONG_MAX,
- .lock.lock = __SPIN_LOCK_UNLOCKED(crngs.lock.lock),
+ .lock = INIT_LOCAL_LOCK(crngs.lock),
};
/* Used by crng_reseed() and crng_make_state() to extract a new seed from the input pool. */
@@ -514,7 +514,7 @@ struct batch_ ##type { \
}; \
\
static DEFINE_PER_CPU(struct batch_ ##type, batched_entropy_ ##type) = { \
- .lock.lock = __SPIN_LOCK_UNLOCKED(batched_entropy_ ##type.lock.lock), \
+ .lock = INIT_LOCAL_LOCK(batched_entropy_ ##type.lock), \
.position = UINT_MAX \
}; \
\
@@ -23,6 +23,8 @@ struct local_irq_lock {
unsigned long flags;
};
+#define INIT_LOCAL_LOCK(lvar) { .lock = __SPIN_LOCK_UNLOCKED((lvar).lock.lock) }
+
#define DEFINE_LOCAL_IRQ_LOCK(lvar) \
DEFINE_PER_CPU(struct local_irq_lock, lvar) = { \
.lock = __SPIN_LOCK_UNLOCKED((lvar).lock) }
@@ -241,6 +243,9 @@ static inline int __local_unlock_irqrestore(struct local_irq_lock *lv,
#else /* PREEMPT_RT_BASE */
+struct local_irq_lock { };
+#define INIT_LOCAL_LOCK(lvar) { }
+
#define DEFINE_LOCAL_IRQ_LOCK(lvar) __typeof__(const int) lvar
#define DECLARE_LOCAL_IRQ_LOCK(lvar) extern __typeof__(const int) lvar