csel \tmp, \tmp2, \tmp, eq
msr hcr_el2, \tmp
+ /*
+ * Detect whether the system has a configurable memory system
+ * architecture at EL1&0
+ */
+ mrs \tmp, id_aa64mmfr0_el1
+ lsr \tmp, \tmp, #48
+ and \tmp, \tmp, #((ID_AA64MMFR0_EL1_MSA_MASK | \
+ ID_AA64MMFR0_EL1_MSA_FRAC_MASK) >> 48)
+ cmp \tmp, #((ID_AA64MMFR0_EL1_MSA_USE_FRAC | \
+ ID_AA64MMFR0_EL1_MSA_FRAC_VMSA) >> 48)
+ bne 2f
+
+ /* Ensure the EL1&0 VMSA is enabled */
+ mov \tmp, #(VTCR_EL2_MSA)
+ msr vtcr_el2, \tmp
+2:
+
/* Return to the EL1_SP1 mode from EL2 */
ldr \tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
#define HCR_EL2_RW_AARCH32 (0 << 31) /* Lower levels are AArch32 */
#define HCR_EL2_HCD_DIS (1 << 29) /* Hypervisor Call disabled */
+/*
+ * VTCR_EL2 bits definitions
+ */
+#define VTCR_EL2_MSA (1 << 31) /* EL1&0 memory architecture */
+
+/*
+ * ID_AA64MMFR0_EL1 bits definitions
+ */
+#define ID_AA64MMFR0_EL1_MSA_FRAC_MASK (0xFUL << 52) /* Memory system
+ architecture
+ frac */
+#define ID_AA64MMFR0_EL1_MSA_FRAC_VMSA (0x2UL << 52) /* EL1&0 supports
+ VMSA */
+#define ID_AA64MMFR0_EL1_MSA_FRAC_PMSA (0x1UL << 52) /* EL1&0 only
+ supports PMSA*/
+#define ID_AA64MMFR0_EL1_MSA_FRAC_NO_PMSA (0x0UL << 52) /* No PMSA
+ support */
+#define ID_AA64MMFR0_EL1_MSA_MASK (0xFUL << 48) /* Memory system
+ architecture */
+#define ID_AA64MMFR0_EL1_MSA_USE_FRAC (0xFUL << 48) /* Use MSA_FRAC */
+#define ID_AA64MMFR0_EL1_MSA_VMSA (0x0UL << 48) /* Memory system
+ architecture
+ is VMSA */
+
/*
* ID_AA64ISAR1_EL1 bits definitions
*/