[edk2,2/7] ArmPkg/ArmExceptionLib: fold exception handler prologue into vector table

Message ID 1458220815-6944-3-git-send-email-ard.biesheuvel@linaro.org
State Accepted
Commit 0dbbaa557331fe2ba26c3e2d66be5d21d1f5f7bb
Headers show

Commit Message

Ard Biesheuvel March 17, 2016, 1:20 p.m.
Unlike the AArch32 vector table, which has room for a single instruction
for each exception type, the AArch64 exception table has 128 byte slots,
which can easily hold the shared prologues that are emitted out of line.

So refactor this code into a single macro, and expand it into each vector
table slot. Since the address of the command handler entry point is no
longer patched in by the C code, we can just emit the literal into each
vector entry directly.

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>

---
 ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S | 112 +++++++-------------
 1 file changed, 39 insertions(+), 73 deletions(-)

-- 
2.5.0

_______________________________________________
edk2-devel mailing list
edk2-devel@lists.01.org
https://lists.01.org/mailman/listinfo/edk2-devel

Comments

Ard Biesheuvel March 21, 2016, 5:50 p.m. | #1
On 21 March 2016 at 16:43, Cohen, Eugene <eugene@hp.com> wrote:
>> Although I see why you defined a macro for saving exception context

>> (avoiding duplication) I'm not sure if this is a good idea.  I'm envisioning

>> stepping through the exception handling with a debugger and the resulting

>> confusion because of the code hiding behind the macro.  My preference

>> would be to tolerate the duplication (it's just 5 lines of assembly) in favor of

>> readability / debuggability.

>

> Ha, I spoke too soon!  Patch 7 increases the duplication of non-macroizing the exception handler this to something like 25 lines of assembly so I agree that the macro is a better approach.  If only you created the patches in the reverse order... :)

>

> [Note to self: don't hit send on patch feedback until all patches in the series are reviewed.]

>

> Series Reviewed-by: Eugene Cohen <eugene@hp.com>

>


Thanks!
_______________________________________________
edk2-devel mailing list
edk2-devel@lists.01.org
https://lists.01.org/mailman/listinfo/edk2-devel

Patch

diff --git a/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S b/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
index 790ce009b8de..c47974b81e8b 100644
--- a/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
+++ b/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
@@ -99,8 +99,6 @@ 
 */
 
 GCC_ASM_EXPORT(ExceptionHandlersEnd)
-GCC_ASM_EXPORT(CommonExceptionEntry)
-GCC_ASM_EXPORT(AsmCommonExceptionEntry)
 GCC_ASM_EXPORT(CommonCExceptionHandler)
 
 .text
@@ -172,142 +170,110 @@  ASM_PFX(ExceptionHandlersStart):
 VECTOR_BASE(ExceptionHandlersStart)
 #endif
 
+#undef  REG_PAIR
+#undef  REG_ONE
+#define REG_PAIR(REG1, REG2, OFFSET, CONTEXT_SIZE)  stp  REG1, REG2, [sp, #(OFFSET-CONTEXT_SIZE)]
+#define REG_ONE(REG1, OFFSET, CONTEXT_SIZE)         stur REG1, [sp, #(OFFSET-CONTEXT_SIZE)]
+
+  .macro  ExceptionEntry, val
+  // Move the stackpointer so we can reach our structure with the str instruction.
+  sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
+
+  // Save all the General regs before touching x0 and x1.
+  // This does not save r31(SP) as it is special. We do that later.
+  ALL_GP_REGS
+
+  // Record the type of exception that occurred.
+  mov       x0, #\val
+
+  // Jump to our general handler to deal with all the common parts and process the exception.
+  ldr       x1, =ASM_PFX(CommonExceptionEntry)
+  br        x1
+  .ltorg
+  .endm
+
 //
 // Current EL with SP0 : 0x0 - 0x180
 //
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SYNC)
 ASM_PFX(SynchronousExceptionSP0):
-  b   ASM_PFX(SynchronousExceptionEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_IRQ)
 ASM_PFX(IrqSP0):
-  b   ASM_PFX(IrqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_IRQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_FIQ)
 ASM_PFX(FiqSP0):
-  b   ASM_PFX(FiqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_FIQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SP0_SERR)
 ASM_PFX(SErrorSP0):
-  b   ASM_PFX(SErrorEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SERROR
 
 //
 // Current EL with SPx: 0x200 - 0x380
 //
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SYNC)
 ASM_PFX(SynchronousExceptionSPx):
-  b   ASM_PFX(SynchronousExceptionEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_IRQ)
 ASM_PFX(IrqSPx):
-  b   ASM_PFX(IrqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_IRQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_FIQ)
 ASM_PFX(FiqSPx):
-  b   ASM_PFX(FiqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_FIQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_CUR_SPx_SERR)
 ASM_PFX(SErrorSPx):
-  b   ASM_PFX(SErrorEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SERROR
 
 //
 // Lower EL using AArch64 : 0x400 - 0x580
 //
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SYNC)
 ASM_PFX(SynchronousExceptionA64):
-  b   ASM_PFX(SynchronousExceptionEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_IRQ)
 ASM_PFX(IrqA64):
-  b   ASM_PFX(IrqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_IRQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_FIQ)
 ASM_PFX(FiqA64):
-  b   ASM_PFX(FiqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_FIQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A64_SERR)
 ASM_PFX(SErrorA64):
-  b   ASM_PFX(SErrorEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SERROR
 
 //
 // Lower EL using AArch32 : 0x600 - 0x780
 //
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SYNC)
 ASM_PFX(SynchronousExceptionA32):
-  b   ASM_PFX(SynchronousExceptionEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_IRQ)
 ASM_PFX(IrqA32):
-  b   ASM_PFX(IrqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_IRQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_FIQ)
 ASM_PFX(FiqA32):
-  b   ASM_PFX(FiqEntry)
+  ExceptionEntry  EXCEPT_AARCH64_FIQ
 
 VECTOR_ENTRY(ExceptionHandlersStart, ARM_VECTOR_LOW_A32_SERR)
 ASM_PFX(SErrorA32):
-  b   ASM_PFX(SErrorEntry)
+  ExceptionEntry  EXCEPT_AARCH64_SERROR
 
 VECTOR_END(ExceptionHandlersStart)
 
-#undef  REG_PAIR
-#undef  REG_ONE
-#define REG_PAIR(REG1, REG2, OFFSET, CONTEXT_SIZE)  stp  REG1, REG2, [sp, #(OFFSET-CONTEXT_SIZE)]
-#define REG_ONE(REG1, OFFSET, CONTEXT_SIZE)         stur REG1, [sp, #(OFFSET-CONTEXT_SIZE)]
-
-ASM_PFX(SynchronousExceptionEntry):
-  // Move the stackpointer so we can reach our structure with the str instruction.
-  sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
-
-  // Save all the General regs before touching x0 and x1.
-  // This does not save r31(SP) as it is special. We do that later.
-  ALL_GP_REGS
-
-  // Record the type of exception that occurred.
-  mov       x0, #EXCEPT_AARCH64_SYNCHRONOUS_EXCEPTIONS
-
-  // Jump to our general handler to deal with all the common parts and process the exception.
-  ldr       x1, ASM_PFX(CommonExceptionEntry)
-  br        x1
-
-ASM_PFX(IrqEntry):
-  sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
-  ALL_GP_REGS
-  mov       x0, #EXCEPT_AARCH64_IRQ
-  ldr       x1, ASM_PFX(CommonExceptionEntry)
-  br        x1
-
-ASM_PFX(FiqEntry):
-  sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
-  ALL_GP_REGS
-  mov       x0, #EXCEPT_AARCH64_FIQ
-  ldr       x1, ASM_PFX(CommonExceptionEntry)
-  br        x1
-
-ASM_PFX(SErrorEntry):
-  sub sp, sp, #(FP_CONTEXT_SIZE + SYS_CONTEXT_SIZE)
-  ALL_GP_REGS
-  mov       x0, #EXCEPT_AARCH64_SERROR
-  ldr       x1, ASM_PFX(CommonExceptionEntry)
-  br        x1
-
-
-//
-// This gets patched by the C code that patches in the vector table
-//
-.align 3
-ASM_PFX(CommonExceptionEntry):
-  .8byte ASM_PFX(AsmCommonExceptionEntry)
-
 ASM_PFX(ExceptionHandlersEnd):
 
 
-
-//
-// This code runs from CpuDxe driver loaded address. It is patched into
-// CommonExceptionEntry.
-//
-ASM_PFX(AsmCommonExceptionEntry):
+ASM_PFX(CommonExceptionEntry):
   /* NOTE:
      We have to break up the save code because the immediate value to be used
      with the SP is too big to do it all in one step so we need to shuffle the SP