summaryrefslogtreecommitdiff
path: root/lib/aarch32
diff options
context:
space:
mode:
authorYatharth Kochar <yatharth.kochar@arm.com>2016-06-28 16:58:26 +0100
committerYatharth Kochar <yatharth.kochar@arm.com>2016-09-21 16:27:15 +0100
commit1a0a3f0622e4b569513304109d9a0d093b71228a (patch)
tree9850566923168a92a6ec7ebbabe03119f14b2786 /lib/aarch32
parenta8aa7fec1d4a6df8617c0d0463f1e10f1827a609 (diff)
AArch32: Common changes needed for BL1/BL2
This patch adds common changes to support AArch32 state in BL1 and BL2. Following are the changes: * Added functions for disabling MMU from Secure state. * Added AArch32 specific SMC function. * Added semihosting support. * Added reporting of unhandled exceptions. * Added uniprocessor stack support. * Added `el3_entrypoint_common` macro that can be shared by BL1 and BL32 (SP_MIN) BL stages. The `el3_entrypoint_common` is similar to the AArch64 counterpart with the main difference in the assembly instructions and the registers that are relevant to AArch32 execution state. * Enabled `LOAD_IMAGE_V2` flag in Makefile for `ARCH=aarch32` and added check to make sure that platform has not overridden to disable it. Change-Id: I33c6d8dfefb2e5d142fdfd06a0f4a7332962e1a3
Diffstat (limited to 'lib/aarch32')
-rw-r--r--lib/aarch32/misc_helpers.S36
1 files changed, 36 insertions, 0 deletions
diff --git a/lib/aarch32/misc_helpers.S b/lib/aarch32/misc_helpers.S
index 63ac1a7e..fd7c6dd1 100644
--- a/lib/aarch32/misc_helpers.S
+++ b/lib/aarch32/misc_helpers.S
@@ -32,7 +32,21 @@
#include <asm_macros.S>
#include <assert_macros.S>
+ .globl smc
.globl zeromem
+ .globl disable_mmu_icache_secure
+ .globl disable_mmu_secure
+
+func smc
+ /*
+ * For AArch32 only r0-r3 will be in the registers;
+ * rest r4-r6 will be pushed on to the stack. So here, we'll
+ * have to load them from the stack to registers r4-r6 explicitly.
+ * Clobbers: r4-r6
+ */
+ ldm sp, {r4, r5, r6}
+ smc #0
+endfunc smc
/* -----------------------------------------------------------------------
* void zeromem(void *mem, unsigned int length);
@@ -58,3 +72,25 @@ z_loop:
z_end:
bx lr
endfunc zeromem
+
+/* ---------------------------------------------------------------------------
+ * Disable the MMU in Secure State
+ * ---------------------------------------------------------------------------
+ */
+
+func disable_mmu_secure
+ mov r1, #(SCTLR_M_BIT | SCTLR_C_BIT)
+do_disable_mmu:
+ ldcopr r0, SCTLR
+ bic r0, r0, r1
+ stcopr r0, SCTLR
+ isb // ensure MMU is off
+ dsb sy
+ bx lr
+endfunc disable_mmu_secure
+
+
+func disable_mmu_icache_secure
+ ldr r1, =(SCTLR_M_BIT | SCTLR_C_BIT | SCTLR_I_BIT)
+ b do_disable_mmu
+endfunc disable_mmu_icache_secure