summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bl31/bl31.mk4
-rw-r--r--bl32/sp_min/sp_min.mk4
-rw-r--r--bl32/sp_min/wa_cve_2017_5715_bpiall.S (renamed from bl32/sp_min/workaround_cve_2017_5715_bpiall.S)4
-rw-r--r--bl32/sp_min/wa_cve_2017_5715_icache_inv.S (renamed from bl32/sp_min/workaround_cve_2017_5715_icache_inv.S)4
-rw-r--r--include/lib/cpus/wa_cve_2017_5715.h12
-rw-r--r--include/lib/cpus/workaround_cve_2017_5715.h12
-rw-r--r--lib/cpus/aarch64/cortex_a57.S2
-rw-r--r--lib/cpus/aarch64/cortex_a72.S2
-rw-r--r--lib/cpus/aarch64/cortex_a73.S2
-rw-r--r--lib/cpus/aarch64/cortex_a75.S2
-rw-r--r--lib/cpus/aarch64/cpu_helpers.S8
-rw-r--r--lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S (renamed from lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S)164
-rw-r--r--lib/cpus/aarch64/wa_cve_2017_5715_mmu.S (renamed from lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S)86
-rw-r--r--services/arm_arch_svc/arm_arch_svc_setup.c4
14 files changed, 155 insertions, 155 deletions
diff --git a/bl31/bl31.mk b/bl31/bl31.mk
index 0e47ddf6..a6c0a9a0 100644
--- a/bl31/bl31.mk
+++ b/bl31/bl31.mk
@@ -61,8 +61,8 @@ BL31_SOURCES += lib/extensions/sve/sve.c
endif
ifeq (${WORKAROUND_CVE_2017_5715},1)
-BL31_SOURCES += lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S \
- lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S
+BL31_SOURCES += lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S \
+ lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
endif
BL31_LINKERFILE := bl31/bl31.ld.S
diff --git a/bl32/sp_min/sp_min.mk b/bl32/sp_min/sp_min.mk
index 193b1d5e..6233299d 100644
--- a/bl32/sp_min/sp_min.mk
+++ b/bl32/sp_min/sp_min.mk
@@ -29,8 +29,8 @@ BL32_SOURCES += lib/extensions/amu/aarch32/amu.c\
endif
ifeq (${WORKAROUND_CVE_2017_5715},1)
-BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \
- bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
+BL32_SOURCES += bl32/sp_min/wa_cve_2017_5715_bpiall.S \
+ bl32/sp_min/wa_cve_2017_5715_icache_inv.S
endif
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S
diff --git a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S b/bl32/sp_min/wa_cve_2017_5715_bpiall.S
index 5387cefc..385f3d4b 100644
--- a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S
+++ b/bl32/sp_min/wa_cve_2017_5715_bpiall.S
@@ -6,9 +6,9 @@
#include <asm_macros.S>
- .globl workaround_bpiall_runtime_exceptions
+ .globl wa_cve_2017_5715_bpiall_vbar
-vector_base workaround_bpiall_runtime_exceptions
+vector_base wa_cve_2017_5715_bpiall_vbar
/* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */
diff --git a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S b/bl32/sp_min/wa_cve_2017_5715_icache_inv.S
index 9102b02f..d0a46250 100644
--- a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
+++ b/bl32/sp_min/wa_cve_2017_5715_icache_inv.S
@@ -6,9 +6,9 @@
#include <asm_macros.S>
- .globl workaround_icache_inv_runtime_exceptions
+ .globl wa_cve_2017_5715_icache_inv_vbar
-vector_base workaround_icache_inv_runtime_exceptions
+vector_base wa_cve_2017_5715_icache_inv_vbar
/* We encode the exception entry in the bottom 3 bits of SP */
add sp, sp, #1 /* Reset: 0b111 */
add sp, sp, #1 /* Undef: 0b110 */
diff --git a/include/lib/cpus/wa_cve_2017_5715.h b/include/lib/cpus/wa_cve_2017_5715.h
new file mode 100644
index 00000000..0a65a569
--- /dev/null
+++ b/include/lib/cpus/wa_cve_2017_5715.h
@@ -0,0 +1,12 @@
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef __WA_CVE_2017_5715_H__
+#define __WA_CVE_2017_5715_H__
+
+int check_wa_cve_2017_5715(void);
+
+#endif /* __WA_CVE_2017_5715_H__ */
diff --git a/include/lib/cpus/workaround_cve_2017_5715.h b/include/lib/cpus/workaround_cve_2017_5715.h
deleted file mode 100644
index e837a673..00000000
--- a/include/lib/cpus/workaround_cve_2017_5715.h
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
-
-#ifndef __WORKAROUND_CVE_2017_5715_H__
-#define __WORKAROUND_CVE_2017_5715_H__
-
-int check_workaround_cve_2017_5715(void);
-
-#endif /* __WORKAROUND_CVE_2017_5715_H__ */
diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S
index 4d072e11..8470c6c5 100644
--- a/lib/cpus/aarch64/cortex_a57.S
+++ b/lib/cpus/aarch64/cortex_a57.S
@@ -393,7 +393,7 @@ func cortex_a57_reset_func
#endif
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
- adr x0, workaround_mmu_runtime_exceptions
+ adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0
#endif
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index 29fa77b9..b67c9877 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -126,7 +126,7 @@ func cortex_a72_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f
- adr x0, workaround_mmu_runtime_exceptions
+ adr x0, wa_cve_2017_5715_mmu_vbar
msr vbar_el3, x0
1:
#endif
diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S
index 0a961ea3..c66067d7 100644
--- a/lib/cpus/aarch64/cortex_a73.S
+++ b/lib/cpus/aarch64/cortex_a73.S
@@ -38,7 +38,7 @@ endfunc cortex_a73_disable_smp
func cortex_a73_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f
- adr x0, workaround_bpiall_vbar0_runtime_exceptions
+ adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0
1:
#endif
diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S
index 288f5afe..f92e4ed0 100644
--- a/lib/cpus/aarch64/cortex_a75.S
+++ b/lib/cpus/aarch64/cortex_a75.S
@@ -13,7 +13,7 @@
func cortex_a75_reset_func
#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
cpu_check_csv2 x0, 1f
- adr x0, workaround_bpiall_vbar0_runtime_exceptions
+ adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0
1:
#endif
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index 9f13ed2c..78c66e65 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -285,7 +285,7 @@ endfunc print_errata_status
#endif
/*
- * int check_workaround_cve_2017_5715(void);
+ * int check_wa_cve_2017_5715(void);
*
* This function returns:
* - ERRATA_APPLIES when firmware mitigation is required.
@@ -296,8 +296,8 @@ endfunc print_errata_status
* NOTE: Must be called only after cpu_ops have been initialized
* in per-CPU data.
*/
- .globl check_workaround_cve_2017_5715
-func check_workaround_cve_2017_5715
+ .globl check_wa_cve_2017_5715
+func check_wa_cve_2017_5715
mrs x0, tpidr_el3
#if ENABLE_ASSERTIONS
cmp x0, #0
@@ -315,4 +315,4 @@ func check_workaround_cve_2017_5715
1:
mov x0, #ERRATA_NOT_APPLIES
ret
-endfunc check_workaround_cve_2017_5715
+endfunc check_wa_cve_2017_5715
diff --git a/lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
index cd824973..84371551 100644
--- a/lib/cpus/aarch64/workaround_cve_2017_5715_bpiall.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_bpiall.S
@@ -9,13 +9,13 @@
#include <asm_macros.S>
#include <context.h>
- .globl workaround_bpiall_vbar0_runtime_exceptions
+ .globl wa_cve_2017_5715_bpiall_vbar
#define EMIT_BPIALL 0xee070fd5
#define EMIT_SMC 0xe1600070
#define ESR_EL3_A64_SMC0 0x5e000000
- .macro enter_workaround _from_vector
+ .macro apply_cve_2017_5715_wa _from_vector
/*
* Save register state to enable a call to AArch32 S-EL1 and return
* Identify the original calling vector in w2 (==_from_vector)
@@ -66,7 +66,7 @@
movz w8, SPSR_MODE32(MODE32_svc, SPSR_T_ARM, SPSR_E_LITTLE, SPSR_AIF_MASK)
/* Switch EL3 exception vectors while the workaround is executing. */
- adr x9, workaround_bpiall_vbar1_runtime_exceptions
+ adr x9, wa_cve_2017_5715_bpiall_ret_vbar
/* Setup SCTLR_EL1 with MMU off and I$ on */
ldr x10, stub_sel1_sctlr
@@ -93,13 +93,13 @@
* is not enabled, the existing runtime exception vector table is used.
* ---------------------------------------------------------------------
*/
-vector_base workaround_bpiall_vbar0_runtime_exceptions
+vector_base wa_cve_2017_5715_bpiall_vbar
/* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar0_sync_exception_sp_el0
+vector_entry bpiall_sync_exception_sp_el0
b sync_exception_sp_el0
nop /* to force 8 byte alignment for the following stub */
@@ -114,79 +114,79 @@ aarch32_stub:
.word EMIT_BPIALL
.word EMIT_SMC
- check_vector_size workaround_bpiall_vbar0_sync_exception_sp_el0
+ check_vector_size bpiall_sync_exception_sp_el0
-vector_entry workaround_bpiall_vbar0_irq_sp_el0
+vector_entry bpiall_irq_sp_el0
b irq_sp_el0
- check_vector_size workaround_bpiall_vbar0_irq_sp_el0
+ check_vector_size bpiall_irq_sp_el0
-vector_entry workaround_bpiall_vbar0_fiq_sp_el0
+vector_entry bpiall_fiq_sp_el0
b fiq_sp_el0
- check_vector_size workaround_bpiall_vbar0_fiq_sp_el0
+ check_vector_size bpiall_fiq_sp_el0
-vector_entry workaround_bpiall_vbar0_serror_sp_el0
+vector_entry bpiall_serror_sp_el0
b serror_sp_el0
- check_vector_size workaround_bpiall_vbar0_serror_sp_el0
+ check_vector_size bpiall_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar0_sync_exception_sp_elx
+vector_entry bpiall_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size workaround_bpiall_vbar0_sync_exception_sp_elx
+ check_vector_size bpiall_sync_exception_sp_elx
-vector_entry workaround_bpiall_vbar0_irq_sp_elx
+vector_entry bpiall_irq_sp_elx
b irq_sp_elx
- check_vector_size workaround_bpiall_vbar0_irq_sp_elx
+ check_vector_size bpiall_irq_sp_elx
-vector_entry workaround_bpiall_vbar0_fiq_sp_elx
+vector_entry bpiall_fiq_sp_elx
b fiq_sp_elx
- check_vector_size workaround_bpiall_vbar0_fiq_sp_elx
+ check_vector_size bpiall_fiq_sp_elx
-vector_entry workaround_bpiall_vbar0_serror_sp_elx
+vector_entry bpiall_serror_sp_elx
b serror_sp_elx
- check_vector_size workaround_bpiall_vbar0_serror_sp_elx
+ check_vector_size bpiall_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar0_sync_exception_aarch64
- enter_workaround 1
- check_vector_size workaround_bpiall_vbar0_sync_exception_aarch64
+vector_entry bpiall_sync_exception_aarch64
+ apply_cve_2017_5715_wa 1
+ check_vector_size bpiall_sync_exception_aarch64
-vector_entry workaround_bpiall_vbar0_irq_aarch64
- enter_workaround 2
- check_vector_size workaround_bpiall_vbar0_irq_aarch64
+vector_entry bpiall_irq_aarch64
+ apply_cve_2017_5715_wa 2
+ check_vector_size bpiall_irq_aarch64
-vector_entry workaround_bpiall_vbar0_fiq_aarch64
- enter_workaround 4
- check_vector_size workaround_bpiall_vbar0_fiq_aarch64
+vector_entry bpiall_fiq_aarch64
+ apply_cve_2017_5715_wa 4
+ check_vector_size bpiall_fiq_aarch64
-vector_entry workaround_bpiall_vbar0_serror_aarch64
- enter_workaround 8
- check_vector_size workaround_bpiall_vbar0_serror_aarch64
+vector_entry bpiall_serror_aarch64
+ apply_cve_2017_5715_wa 8
+ check_vector_size bpiall_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar0_sync_exception_aarch32
- enter_workaround 1
- check_vector_size workaround_bpiall_vbar0_sync_exception_aarch32
+vector_entry bpiall_sync_exception_aarch32
+ apply_cve_2017_5715_wa 1
+ check_vector_size bpiall_sync_exception_aarch32
-vector_entry workaround_bpiall_vbar0_irq_aarch32
- enter_workaround 2
- check_vector_size workaround_bpiall_vbar0_irq_aarch32
+vector_entry bpiall_irq_aarch32
+ apply_cve_2017_5715_wa 2
+ check_vector_size bpiall_irq_aarch32
-vector_entry workaround_bpiall_vbar0_fiq_aarch32
- enter_workaround 4
- check_vector_size workaround_bpiall_vbar0_fiq_aarch32
+vector_entry bpiall_fiq_aarch32
+ apply_cve_2017_5715_wa 4
+ check_vector_size bpiall_fiq_aarch32
-vector_entry workaround_bpiall_vbar0_serror_aarch32
- enter_workaround 8
- check_vector_size workaround_bpiall_vbar0_serror_aarch32
+vector_entry bpiall_serror_aarch32
+ apply_cve_2017_5715_wa 8
+ check_vector_size bpiall_serror_aarch32
/* ---------------------------------------------------------------------
* This vector table is used while the workaround is executing. It
@@ -195,73 +195,73 @@ vector_entry workaround_bpiall_vbar0_serror_aarch32
* EL3 state before proceeding with the normal runtime exception vector.
* ---------------------------------------------------------------------
*/
-vector_base workaround_bpiall_vbar1_runtime_exceptions
+vector_base wa_cve_2017_5715_bpiall_ret_vbar
/* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200 (UNUSED)
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar1_sync_exception_sp_el0
+vector_entry bpiall_ret_sync_exception_sp_el0
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_sync_exception_sp_el0
+ check_vector_size bpiall_ret_sync_exception_sp_el0
-vector_entry workaround_bpiall_vbar1_irq_sp_el0
+vector_entry bpiall_ret_irq_sp_el0
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_irq_sp_el0
+ check_vector_size bpiall_ret_irq_sp_el0
-vector_entry workaround_bpiall_vbar1_fiq_sp_el0
+vector_entry bpiall_ret_fiq_sp_el0
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_fiq_sp_el0
+ check_vector_size bpiall_ret_fiq_sp_el0
-vector_entry workaround_bpiall_vbar1_serror_sp_el0
+vector_entry bpiall_ret_serror_sp_el0
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_serror_sp_el0
+ check_vector_size bpiall_ret_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 (UNUSED)
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar1_sync_exception_sp_elx
+vector_entry bpiall_ret_sync_exception_sp_elx
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_sync_exception_sp_elx
+ check_vector_size bpiall_ret_sync_exception_sp_elx
-vector_entry workaround_bpiall_vbar1_irq_sp_elx
+vector_entry bpiall_ret_irq_sp_elx
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_irq_sp_elx
+ check_vector_size bpiall_ret_irq_sp_elx
-vector_entry workaround_bpiall_vbar1_fiq_sp_elx
+vector_entry bpiall_ret_fiq_sp_elx
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_fiq_sp_elx
+ check_vector_size bpiall_ret_fiq_sp_elx
-vector_entry workaround_bpiall_vbar1_serror_sp_elx
+vector_entry bpiall_ret_serror_sp_elx
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_serror_sp_elx
+ check_vector_size bpiall_ret_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 (UNUSED)
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar1_sync_exception_aarch64
+vector_entry bpiall_ret_sync_exception_aarch64
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_sync_exception_aarch64
+ check_vector_size bpiall_ret_sync_exception_aarch64
-vector_entry workaround_bpiall_vbar1_irq_aarch64
+vector_entry bpiall_ret_irq_aarch64
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_irq_aarch64
+ check_vector_size bpiall_ret_irq_aarch64
-vector_entry workaround_bpiall_vbar1_fiq_aarch64
+vector_entry bpiall_ret_fiq_aarch64
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_fiq_aarch64
+ check_vector_size bpiall_ret_fiq_aarch64
-vector_entry workaround_bpiall_vbar1_serror_aarch64
+vector_entry bpiall_ret_serror_aarch64
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_serror_aarch64
+ check_vector_size bpiall_ret_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
* ---------------------------------------------------------------------
*/
-vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
+vector_entry bpiall_ret_sync_exception_aarch32
/*
* w2 indicates which SEL1 stub was run and thus which original vector was used
* w3-w6 contain saved system register state (esr_el3 in w3)
@@ -281,7 +281,7 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
* to workaround entry table in preparation for subsequent
* Sync/IRQ/FIQ/SError exceptions.
*/
- adr x0, workaround_bpiall_vbar0_runtime_exceptions
+ adr x0, wa_cve_2017_5715_bpiall_vbar
msr vbar_el3, x0
/*
@@ -324,34 +324,34 @@ vector_entry workaround_bpiall_vbar1_sync_exception_aarch32
1:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b sync_exception_aarch64
- check_vector_size workaround_bpiall_vbar1_sync_exception_aarch32
+ check_vector_size bpiall_ret_sync_exception_aarch32
-vector_entry workaround_bpiall_vbar1_irq_aarch32
+vector_entry bpiall_ret_irq_aarch32
b report_unhandled_interrupt
/*
* Post-workaround fan-out for non-sync exceptions
*/
workaround_not_sync:
- tbnz w2, #3, workaround_bpiall_vbar1_serror
- tbnz w2, #2, workaround_bpiall_vbar1_fiq
+ tbnz w2, #3, bpiall_ret_serror
+ tbnz w2, #2, bpiall_ret_fiq
/* IRQ */
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b irq_aarch64
-workaround_bpiall_vbar1_fiq:
+bpiall_ret_fiq:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b fiq_aarch64
-workaround_bpiall_vbar1_serror:
+bpiall_ret_serror:
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
b serror_aarch64
- check_vector_size workaround_bpiall_vbar1_irq_aarch32
+ check_vector_size bpiall_ret_irq_aarch32
-vector_entry workaround_bpiall_vbar1_fiq_aarch32
+vector_entry bpiall_ret_fiq_aarch32
b report_unhandled_interrupt
- check_vector_size workaround_bpiall_vbar1_fiq_aarch32
+ check_vector_size bpiall_ret_fiq_aarch32
-vector_entry workaround_bpiall_vbar1_serror_aarch32
+vector_entry bpiall_ret_serror_aarch32
b report_unhandled_exception
- check_vector_size workaround_bpiall_vbar1_serror_aarch32
+ check_vector_size bpiall_ret_serror_aarch32
diff --git a/lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
index b24b620c..039e373c 100644
--- a/lib/cpus/aarch64/workaround_cve_2017_5715_mmu.S
+++ b/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
@@ -9,13 +9,13 @@
#include <asm_macros.S>
#include <context.h>
- .globl workaround_mmu_runtime_exceptions
+ .globl wa_cve_2017_5715_mmu_vbar
#define ESR_EL3_A64_SMC0 0x5e000000
-vector_base workaround_mmu_runtime_exceptions
+vector_base wa_cve_2017_5715_mmu_vbar
- .macro apply_workaround _is_sync_exception
+ .macro apply_cve_2017_5715_wa _is_sync_exception
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
mrs x1, sctlr_el3
/* Disable MMU */
@@ -63,86 +63,86 @@ vector_base workaround_mmu_runtime_exceptions
* Current EL with SP_EL0 : 0x0 - 0x200
* ---------------------------------------------------------------------
*/
-vector_entry workaround_mmu_sync_exception_sp_el0
+vector_entry mmu_sync_exception_sp_el0
b sync_exception_sp_el0
- check_vector_size workaround_mmu_sync_exception_sp_el0
+ check_vector_size mmu_sync_exception_sp_el0
-vector_entry workaround_mmu_irq_sp_el0
+vector_entry mmu_irq_sp_el0
b irq_sp_el0
- check_vector_size workaround_mmu_irq_sp_el0
+ check_vector_size mmu_irq_sp_el0
-vector_entry workaround_mmu_fiq_sp_el0
+vector_entry mmu_fiq_sp_el0
b fiq_sp_el0
- check_vector_size workaround_mmu_fiq_sp_el0
+ check_vector_size mmu_fiq_sp_el0
-vector_entry workaround_mmu_serror_sp_el0
+vector_entry mmu_serror_sp_el0
b serror_sp_el0
- check_vector_size workaround_mmu_serror_sp_el0
+ check_vector_size mmu_serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
* ---------------------------------------------------------------------
*/
-vector_entry workaround_mmu_sync_exception_sp_elx
+vector_entry mmu_sync_exception_sp_elx
b sync_exception_sp_elx
- check_vector_size workaround_mmu_sync_exception_sp_elx
+ check_vector_size mmu_sync_exception_sp_elx
-vector_entry workaround_mmu_irq_sp_elx
+vector_entry mmu_irq_sp_elx
b irq_sp_elx
- check_vector_size workaround_mmu_irq_sp_elx
+ check_vector_size mmu_irq_sp_elx
-vector_entry workaround_mmu_fiq_sp_elx
+vector_entry mmu_fiq_sp_elx
b fiq_sp_elx
- check_vector_size workaround_mmu_fiq_sp_elx
+ check_vector_size mmu_fiq_sp_elx
-vector_entry workaround_mmu_serror_sp_elx
+vector_entry mmu_serror_sp_elx
b serror_sp_elx
- check_vector_size workaround_mmu_serror_sp_elx
+ check_vector_size mmu_serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
* ---------------------------------------------------------------------
*/
-vector_entry workaround_mmu_sync_exception_aarch64
- apply_workaround _is_sync_exception=1
+vector_entry mmu_sync_exception_aarch64
+ apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch64
- check_vector_size workaround_mmu_sync_exception_aarch64
+ check_vector_size mmu_sync_exception_aarch64
-vector_entry workaround_mmu_irq_aarch64
- apply_workaround _is_sync_exception=0
+vector_entry mmu_irq_aarch64
+ apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch64
- check_vector_size workaround_mmu_irq_aarch64
+ check_vector_size mmu_irq_aarch64
-vector_entry workaround_mmu_fiq_aarch64
- apply_workaround _is_sync_exception=0
+vector_entry mmu_fiq_aarch64
+ apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch64
- check_vector_size workaround_mmu_fiq_aarch64
+ check_vector_size mmu_fiq_aarch64
-vector_entry workaround_mmu_serror_aarch64
- apply_workaround _is_sync_exception=0
+vector_entry mmu_serror_aarch64
+ apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch64
- check_vector_size workaround_mmu_serror_aarch64
+ check_vector_size mmu_serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
* ---------------------------------------------------------------------
*/
-vector_entry workaround_mmu_sync_exception_aarch32
- apply_workaround _is_sync_exception=1
+vector_entry mmu_sync_exception_aarch32
+ apply_cve_2017_5715_wa _is_sync_exception=1
b sync_exception_aarch32
- check_vector_size workaround_mmu_sync_exception_aarch32
+ check_vector_size mmu_sync_exception_aarch32
-vector_entry workaround_mmu_irq_aarch32
- apply_workaround _is_sync_exception=0
+vector_entry mmu_irq_aarch32
+ apply_cve_2017_5715_wa _is_sync_exception=0
b irq_aarch32
- check_vector_size workaround_mmu_irq_aarch32
+ check_vector_size mmu_irq_aarch32
-vector_entry workaround_mmu_fiq_aarch32
- apply_workaround _is_sync_exception=0
+vector_entry mmu_fiq_aarch32
+ apply_cve_2017_5715_wa _is_sync_exception=0
b fiq_aarch32
- check_vector_size workaround_mmu_fiq_aarch32
+ check_vector_size mmu_fiq_aarch32
-vector_entry workaround_mmu_serror_aarch32
- apply_workaround _is_sync_exception=0
+vector_entry mmu_serror_aarch32
+ apply_cve_2017_5715_wa _is_sync_exception=0
b serror_aarch32
- check_vector_size workaround_mmu_serror_aarch32
+ check_vector_size mmu_serror_aarch32
diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c
index eb736c06..c357ebdb 100644
--- a/services/arm_arch_svc/arm_arch_svc_setup.c
+++ b/services/arm_arch_svc/arm_arch_svc_setup.c
@@ -10,7 +10,7 @@
#include <runtime_svc.h>
#include <smccc.h>
#include <smccc_helpers.h>
-#include <workaround_cve_2017_5715.h>
+#include <wa_cve_2017_5715.h>
static int32_t smccc_version(void)
{
@@ -25,7 +25,7 @@ static int32_t smccc_arch_features(u_register_t arg)
return SMC_OK;
#if WORKAROUND_CVE_2017_5715
case SMCCC_ARCH_WORKAROUND_1:
- if (check_workaround_cve_2017_5715() == ERRATA_NOT_APPLIES)
+ if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
return 1;
return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
#endif