ArmPkg/ArmLib ArmMmuLib: Drop support for EL3/MON execution

Drop logic from the ARM architectural support libraries that can only
execute in EL3 on AArch64 or Monitor mode on 32-bit ARM. While early
32-bit ports (and even some early 64-bit code) included some monitor
logic in EDK2, UEFI per the spec runs in non-secure execution contexts
only, and secure monitor and other secure world duties are usually
delegated to TF-A (Trusted Firmware for the A profile).

Since there are no longer users of this code in EDK2 or the
edk2-platforms tree, let's remove it from the core support libraries.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
diff --git a/ArmPkg/Drivers/ArmGic/GicV3/AArch64/ArmGicV3.S b/ArmPkg/Drivers/ArmGic/GicV3/AArch64/ArmGicV3.S
index 20f83aa..7316502 100644
--- a/ArmPkg/Drivers/ArmGic/GicV3/AArch64/ArmGicV3.S
+++ b/ArmPkg/Drivers/ArmGic/GicV3/AArch64/ArmGicV3.S
@@ -17,7 +17,6 @@
 //

 #define ICC_SRE_EL1             S3_0_C12_C12_5

 #define ICC_SRE_EL2             S3_4_C12_C9_5

-#define ICC_SRE_EL3             S3_6_C12_C12_5

 #define ICC_IGRPEN1_EL1         S3_0_C12_C12_7

 #define ICC_EOIR1_EL1           S3_0_C12_C12_1

 #define ICC_IAR1_EL1            S3_0_C12_C12_0

@@ -32,12 +31,10 @@
 //  VOID

 //  );

 ASM_FUNC(ArmGicV3GetControlSystemRegisterEnable)

-        EL1_OR_EL2_OR_EL3(x1)

+        EL1_OR_EL2(x1)

 1:  mrs x0, ICC_SRE_EL1

         b       4f

 2:  mrs x0, ICC_SRE_EL2

-        b       4f

-3:  mrs x0, ICC_SRE_EL3

 4:  ret

 

 //VOID

@@ -46,12 +43,10 @@
 //  IN UINT32         ControlSystemRegisterEnable

 //  );

 ASM_FUNC(ArmGicV3SetControlSystemRegisterEnable)

-        EL1_OR_EL2_OR_EL3(x1)

+        EL1_OR_EL2(x1)

 1:  msr ICC_SRE_EL1, x0

         b       4f

 2:  msr ICC_SRE_EL2, x0

-        b       4f

-3:  msr ICC_SRE_EL3, x0

 4:  isb

         ret

 

diff --git a/ArmPkg/Include/AsmMacroIoLibV8.h b/ArmPkg/Include/AsmMacroIoLibV8.h
index 81164ea..a5c8635 100644
--- a/ArmPkg/Include/AsmMacroIoLibV8.h
+++ b/ArmPkg/Include/AsmMacroIoLibV8.h
@@ -23,17 +23,6 @@
         cbnz   SAFE_XREG, 1f        ;\

         b      .                    ;// We should never get here

 

-// CurrentEL : 0xC = EL3; 8 = EL2; 4 = EL1

-// This only selects between EL1 and EL2 and EL3, else we die.

-// Provide the Macro with a safe temp xreg to use.

-#define EL1_OR_EL2_OR_EL3(SAFE_XREG) \

-        mrs    SAFE_XREG, CurrentEL ;\

-        cmp    SAFE_XREG, #0x8      ;\

-        b.gt   3f                   ;\

-        b.eq   2f                   ;\

-        cbnz   SAFE_XREG, 1f        ;\

-        b      .                    ;// We should never get here

-

 #define _ASM_FUNC(Name, Section)    \

   .global   Name                  ; \

   .section  #Section, "ax"        ; \

diff --git a/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S b/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
index f5cbc2e..de9d331 100644
--- a/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
+++ b/ArmPkg/Library/ArmExceptionLib/AArch64/ExceptionSupport.S
@@ -260,7 +260,7 @@
 

 ASM_PFX(CommonExceptionEntry):

 

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:mrs      x2, elr_el1   // Exception Link Register

   mrs      x3, spsr_el1  // Saved Processor Status Register 32bit

   mrs      x5, esr_el1   // EL1 Exception syndrome register 32bit

@@ -271,12 +271,6 @@
   mrs      x3, spsr_el2  // Saved Processor Status Register 32bit

   mrs      x5, esr_el2   // EL2 Exception syndrome register 32bit

   mrs      x6, far_el2   // EL2 Fault Address Register

-  b        4f

-

-3:mrs      x2, elr_el3   // Exception Link Register

-  mrs      x3, spsr_el3  // Saved Processor Status Register 32bit

-  mrs      x5, esr_el3   // EL3 Exception syndrome register 32bit

-  mrs      x6, far_el3   // EL3 Fault Address Register

 

 4:mrs      x4, fpsr      // Floating point Status Register  32bit

 

@@ -365,15 +359,12 @@
   msr   daifset, #3

   isb

 

-  EL1_OR_EL2_OR_EL3(x28)

+  EL1_OR_EL2(x28)

 1:msr      elr_el1, x29  // Exception Link Register

   msr      spsr_el1, x30 // Saved Processor Status Register 32bit

   b        4f

 2:msr      elr_el2, x29  // Exception Link Register

   msr      spsr_el2, x30 // Saved Processor Status Register 32bit

-  b        4f

-3:msr      elr_el3, x29  // Exception Link Register

-  msr      spsr_el3, x30 // Saved Processor Status Register 32bit

 4:

 

   // pop remaining GP regs and return from exception.

diff --git a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
index 177d10e..1ec868e 100644
--- a/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
+++ b/ArmPkg/Library/ArmLib/AArch64/AArch64Support.S
@@ -67,14 +67,12 @@
 

 

 ASM_FUNC(ArmEnableMmu)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1       // Read System control register EL1

    b       4f

 2: mrs     x0, sctlr_el2       // Read System control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3       // Read System control register EL3

 4: orr     x0, x0, #CTRL_M_BIT // Set MMU enable bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: tlbi    vmalle1

    dsb     nsh

    isb

@@ -84,139 +82,107 @@
    dsb     nsh

    isb

    msr     sctlr_el2, x0       // Write back

-   b       4f

-3: tlbi    alle3

-   dsb     nsh

-   isb

-   msr     sctlr_el3, x0       // Write back

 4: isb

    ret

 

 

 ASM_FUNC(ArmDisableMmu)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Read System Control Register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Read System Control Register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Read System Control Register EL3

 4: and     x0, x0, #~CTRL_M_BIT  // Clear MMU enable bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back

    tlbi    vmalle1

    b       4f

 2: msr     sctlr_el2, x0        // Write back

    tlbi    alle2

-   b       4f

-3: msr     sctlr_el3, x0        // Write back

-   tlbi    alle3

 4: dsb     sy

    isb

    ret

 

 

 ASM_FUNC(ArmDisableCachesAndMmu)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: mov     x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT)  // Disable MMU, D & I caches

    and     x0, x0, x1

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

 

 

 ASM_FUNC(ArmMmuEnabled)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: and     x0, x0, #CTRL_M_BIT

    ret

 

 

 ASM_FUNC(ArmEnableDataCache)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: orr     x0, x0, #CTRL_C_BIT  // Set C bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

 

 

 ASM_FUNC(ArmDisableDataCache)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: and     x0, x0, #~CTRL_C_BIT  // Clear C bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

 

 

 ASM_FUNC(ArmEnableInstructionCache)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: orr     x0, x0, #CTRL_I_BIT  // Set I bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

 

 

 ASM_FUNC(ArmDisableInstructionCache)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: and     x0, x0, #~CTRL_I_BIT  // Clear I bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

@@ -238,19 +204,15 @@
 

 

 ASM_FUNC(ArmDisableAlignmentCheck)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: and     x0, x0, #~CTRL_A_BIT  // Clear A (alignment check) bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

@@ -271,19 +233,15 @@
 

 

 ASM_FUNC(ArmDisableStackAlignmentCheck)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs     x0, sctlr_el1        // Get control register EL1

    b       4f

 2: mrs     x0, sctlr_el2        // Get control register EL2

-   b       4f

-3: mrs     x0, sctlr_el3        // Get control register EL3

 4: bic     x0, x0, #CTRL_SA_BIT // Clear SA (stack alignment check) bit

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr     sctlr_el1, x0        // Write back control register

    b       4f

 2: msr     sctlr_el2, x0        // Write back control register

-   b       4f

-3: msr     sctlr_el3, x0        // Write back control register

 4: dsb     sy

    isb

    ret

@@ -374,24 +332,19 @@
 

 

 ASM_FUNC(ArmWriteVBar)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: msr   vbar_el1, x0            // Set the Address of the EL1 Vector Table in the VBAR register

    b     4f

 2: msr   vbar_el2, x0            // Set the Address of the EL2 Vector Table in the VBAR register

-   b     4f

-3: msr   vbar_el3, x0            // Set the Address of the EL3 Vector Table in the VBAR register

 4: isb

    ret

 

 ASM_FUNC(ArmReadVBar)

-   EL1_OR_EL2_OR_EL3(x1)

+   EL1_OR_EL2(x1)

 1: mrs   x0, vbar_el1            // Set the Address of the EL1 Vector Table in the VBAR register

    ret

 2: mrs   x0, vbar_el2            // Set the Address of the EL2 Vector Table in the VBAR register

    ret

-3: mrs   x0, vbar_el3            // Set the Address of the EL3 Vector Table in the VBAR register

-   ret

-

 

 ASM_FUNC(ArmEnableVFP)

   // Check whether floating-point is implemented in the processor.

@@ -409,15 +362,11 @@
   orr   x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1

   msr   cpacr_el1, x0           // Write back EL1 Coprocessor Access Control Register (CPACR)

   mov   x1, #AARCH64_CPTR_TFP   // TFP Bit for trapping VFP Exceptions

-  EL1_OR_EL2_OR_EL3(x2)

+  EL1_OR_EL2(x2)

 1:ret                           // Not configurable in EL1

 2:mrs   x0, cptr_el2            // Disable VFP traps to EL2

   bic   x0, x0, x1

   msr   cptr_el2, x0

-  ret

-3:mrs   x0, cptr_el3            // Disable VFP traps to EL3

-  bic   x0, x0, x1

-  msr   cptr_el3, x0

 4:ret

 

 

diff --git a/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S b/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S
index ba0ec56..ec34200 100644
--- a/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S
+++ b/ArmPkg/Library/ArmLib/AArch64/ArmLibSupport.S
@@ -54,12 +54,10 @@
   ret

 

 ASM_FUNC(ArmSetTTBR0)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:msr     ttbr0_el1, x0      // Translation Table Base Reg 0 (TTBR0)

   b       4f

 2:msr     ttbr0_el2, x0      // Translation Table Base Reg 0 (TTBR0)

-  b       4f

-3:msr     ttbr0_el3, x0      // Translation Table Base Reg 0 (TTBR0)

 4:isb

   ret

 

@@ -73,42 +71,34 @@
   ret

 

 ASM_FUNC(ArmGetTCR)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:mrs     x0, tcr_el1

   b       4f

 2:mrs     x0, tcr_el2

-  b       4f

-3:mrs     x0, tcr_el3

 4:isb

   ret

 

 ASM_FUNC(ArmSetTCR)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:msr     tcr_el1, x0

   b       4f

 2:msr     tcr_el2, x0

-  b       4f

-3:msr     tcr_el3, x0

 4:isb

   ret

 

 ASM_FUNC(ArmGetMAIR)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:mrs     x0, mair_el1

   b       4f

 2:mrs     x0, mair_el2

-  b       4f

-3:mrs     x0, mair_el3

 4:isb

   ret

 

 ASM_FUNC(ArmSetMAIR)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:msr     mair_el1, x0

   b       4f

 2:msr     mair_el2, x0

-  b       4f

-3:msr     mair_el3, x0

 4:isb

   ret

 

@@ -122,15 +112,12 @@
 ASM_FUNC(ArmUpdateTranslationTableEntry)

    dsb     nshst

    lsr     x1, x1, #12

-   EL1_OR_EL2_OR_EL3(x2)

+   EL1_OR_EL2(x2)

 1: tlbi    vaae1, x1             // TLB Invalidate VA , EL1

    mrs     x2, sctlr_el1

    b       4f

 2: tlbi    vae2, x1              // TLB Invalidate VA , EL2

    mrs     x2, sctlr_el2

-   b       4f

-3: tlbi    vae3, x1              // TLB Invalidate VA , EL3

-   mrs     x2, sctlr_el3

 4: tbnz    x2, SCTLR_ELx_M_BIT_POS, 5f

    dc      ivac, x0              // invalidate in Dcache if MMU is still off

 5: dsb     nsh

@@ -138,29 +125,14 @@
    ret

 

 ASM_FUNC(ArmInvalidateTlb)

-   EL1_OR_EL2_OR_EL3(x0)

+   EL1_OR_EL2(x0)

 1: tlbi  vmalle1

    b     4f

 2: tlbi  alle2

-   b     4f

-3: tlbi  alle3

 4: dsb   sy

    isb

    ret

 

-ASM_FUNC(ArmWriteCptr)

-  msr     cptr_el3, x0           // EL3 Coprocessor Trap Reg (CPTR)

-  ret

-

-ASM_FUNC(ArmWriteScr)

-  msr     scr_el3, x0            // Secure configuration register EL3

-  isb

-  ret

-

-ASM_FUNC(ArmWriteMVBar)

-  msr    vbar_el3, x0            // Exception Vector Base address for Monitor on EL3

-  ret

-

 ASM_FUNC(ArmCallWFE)

   wfe

   ret

@@ -180,22 +152,18 @@
   ret

 

 ASM_FUNC(ArmReadSctlr)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:mrs   x0, sctlr_el1

   ret

 2:mrs   x0, sctlr_el2

   ret

-3:mrs   x0, sctlr_el3

-4:ret

 

 ASM_FUNC(ArmWriteSctlr)

-  EL1_OR_EL2_OR_EL3(x1)

+  EL1_OR_EL2(x1)

 1:msr   sctlr_el1, x0

   ret

 2:msr   sctlr_el2, x0

   ret

-3:msr   sctlr_el3, x0

-4:ret

 

 ASM_FUNC(ArmGetPhysicalAddressBits)

   mrs   x0, id_aa64mmfr0_el1

diff --git a/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S b/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S
index 0856740..7e032dd 100644
--- a/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S
+++ b/ArmPkg/Library/ArmLib/Arm/ArmLibSupport.S
@@ -108,15 +108,6 @@
   isb

   bx      lr

 

-ASM_FUNC(ArmReadScr)

-  mrc     p15, 0, r0, c1, c1, 0

-  bx      lr

-

-ASM_FUNC(ArmWriteScr)

-  mcr     p15, 0, r0, c1, c1, 0

-  isb

-  bx      lr

-

 ASM_FUNC(ArmReadHVBar)

   mrc     p15, 4, r0, c12, c0, 0

   bx      lr

@@ -125,14 +116,6 @@
   mcr     p15, 4, r0, c12, c0, 0

   bx      lr

 

-ASM_FUNC(ArmReadMVBar)

-  mrc     p15, 0, r0, c12, c0, 1

-  bx      lr

-

-ASM_FUNC(ArmWriteMVBar)

-  mcr     p15, 0, r0, c12, c0, 1

-  bx      lr

-

 ASM_FUNC(ArmCallWFE)

   wfe

   bx      lr

diff --git a/ArmPkg/Library/ArmMmuLib/AArch64/ArmMmuLibReplaceEntry.S b/ArmPkg/Library/ArmMmuLib/AArch64/ArmMmuLibReplaceEntry.S
index 1f0d805..0332cf7 100644
--- a/ArmPkg/Library/ArmMmuLib/AArch64/ArmMmuLibReplaceEntry.S
+++ b/ArmPkg/Library/ArmMmuLib/AArch64/ArmMmuLibReplaceEntry.S
@@ -86,12 +86,10 @@
   msr   daifset, #0xf

   isb

 

-  EL1_OR_EL2_OR_EL3(x5)

+  EL1_OR_EL2(x5)

 1:__replace_entry 1

   b     4f

 2:__replace_entry 2

-  b     4f

-3:__replace_entry 3

 

 4:msr   daif, x4

   ret