diff mbox series

[v3,11/12] x86/alternatives: consolidate LOCK_PREFIX macro

Message ID 1545062607-8599-12-git-send-email-yamada.masahiro@socionext.com
State New
Headers show
Series x86, kbuild: revert macrofying inline assembly code | expand

Commit Message

Masahiro Yamada Dec. 17, 2018, 4:03 p.m. UTC
The LOCK_PREFIX is mostly used in inline asm, but also used by
atomic64_cx8_32.S

Let's unify the definition by using ASM() macro.

This was previously cleaned up by 77f48ec28e4c ("x86/alternatives:
Macrofy lock prefixes to work around GCC inlining bugs").

Now, I am refactoring the code without using the macros approach.

The new header <asm/alternative-common.h> contains macros that
can be used by C and assembly.

Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>

---

 arch/x86/include/asm/alternative-asm.h    | 14 +--------
 arch/x86/include/asm/alternative-common.h | 47 +++++++++++++++++++++++++++++++
 arch/x86/include/asm/alternative.h        | 37 +-----------------------
 3 files changed, 49 insertions(+), 49 deletions(-)
 create mode 100644 arch/x86/include/asm/alternative-common.h

-- 
2.7.4
diff mbox series

Patch

diff --git a/arch/x86/include/asm/alternative-asm.h b/arch/x86/include/asm/alternative-asm.h
index 31b627b..7425514 100644
--- a/arch/x86/include/asm/alternative-asm.h
+++ b/arch/x86/include/asm/alternative-asm.h
@@ -4,21 +4,9 @@ 
 
 #ifdef __ASSEMBLY__
 
+#include <asm/alternative-common.h>
 #include <asm/asm.h>
 
-#ifdef CONFIG_SMP
-	.macro LOCK_PREFIX
-672:	lock
-	.pushsection .smp_locks,"a"
-	.balign 4
-	.long 672b - .
-	.popsection
-	.endm
-#else
-	.macro LOCK_PREFIX
-	.endm
-#endif
-
 /*
  * Issue one struct alt_instr descriptor entry (need to put it into
  * the section .altinstructions, see below). This entry contains
diff --git a/arch/x86/include/asm/alternative-common.h b/arch/x86/include/asm/alternative-common.h
new file mode 100644
index 0000000..ae0b58f
--- /dev/null
+++ b/arch/x86/include/asm/alternative-common.h
@@ -0,0 +1,47 @@ 
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef _ASM_X86_ALTERNATIVE_COMMON_H
+#define _ASM_X86_ALTERNATIVE_COMMON_H
+
+#include <linux/linkage.h>
+
+/*
+ * Alternative inline assembly for SMP.
+ *
+ * The LOCK_PREFIX macro defined here replaces the LOCK and
+ * LOCK_PREFIX macros used everywhere in the source tree.
+ *
+ * SMP alternatives use the same data structures as the other
+ * alternatives and the X86_FEATURE_UP flag to indicate the case of a
+ * UP system running a SMP kernel.  The existing apply_alternatives()
+ * works fine for patching a SMP kernel for UP.
+ *
+ * The SMP alternative tables can be kept after boot and contain both
+ * UP and SMP versions of the instructions to allow switching back to
+ * SMP at runtime, when hotplugging in a new CPU, which is especially
+ * useful in virtualized environments.
+ *
+ * The very common lock prefix is handled as special case in a
+ * separate table which is a pure address list without replacement ptr
+ * and size information.  That keeps the table sizes small.
+ */
+
+#include <linux/linkage.h>
+#include <linux/stringify.h>
+
+#ifdef CONFIG_SMP
+
+#define LOCK_PREFIX_HERE			 \
+ASM(	.pushsection .smp_locks,"a"		)\
+ASM(	.balign 4				)\
+ASM(	.long 671f - .				)\
+ASM(	.popsection				)\
+ASM( 671:					)
+
+#define LOCK_PREFIX	LOCK_PREFIX_HERE	ASM(lock)
+
+#else /* ! CONFIG_SMP */
+#define LOCK_PREFIX_HERE	ASM()
+#define LOCK_PREFIX		ASM()
+#endif
+
+#endif  /* _ASM_X86_ALTERNATIVE_COMMON_H */
diff --git a/arch/x86/include/asm/alternative.h b/arch/x86/include/asm/alternative.h
index 4cd6a3b..157967c 100644
--- a/arch/x86/include/asm/alternative.h
+++ b/arch/x86/include/asm/alternative.h
@@ -7,44 +7,9 @@ 
 #include <linux/types.h>
 #include <linux/stddef.h>
 #include <linux/stringify.h>
+#include <asm/alternative-common.h>
 #include <asm/asm.h>
 
-/*
- * Alternative inline assembly for SMP.
- *
- * The LOCK_PREFIX macro defined here replaces the LOCK and
- * LOCK_PREFIX macros used everywhere in the source tree.
- *
- * SMP alternatives use the same data structures as the other
- * alternatives and the X86_FEATURE_UP flag to indicate the case of a
- * UP system running a SMP kernel.  The existing apply_alternatives()
- * works fine for patching a SMP kernel for UP.
- *
- * The SMP alternative tables can be kept after boot and contain both
- * UP and SMP versions of the instructions to allow switching back to
- * SMP at runtime, when hotplugging in a new CPU, which is especially
- * useful in virtualized environments.
- *
- * The very common lock prefix is handled as special case in a
- * separate table which is a pure address list without replacement ptr
- * and size information.  That keeps the table sizes small.
- */
-
-#ifdef CONFIG_SMP
-#define LOCK_PREFIX_HERE \
-		".pushsection .smp_locks,\"a\"\n"	\
-		".balign 4\n"				\
-		".long 671f - .\n" /* offset */		\
-		".popsection\n"				\
-		"671:"
-
-#define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "
-
-#else /* ! CONFIG_SMP */
-#define LOCK_PREFIX_HERE ""
-#define LOCK_PREFIX ""
-#endif
-
 struct alt_instr {
 	s32 instr_offset;	/* original instruction */
 	s32 repl_offset;	/* offset to replacement instruction */