target/arm: Add ALIGN_MEM to TBFLAG_ANY
Use this to signal when memory access alignment is required.
This value comes from the CCR register for M-profile, and
from the SCTLR register for A-profile.
Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-id: 20210419202257.161730-11-richard.henderson@linaro.org
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
diff --git a/target/arm/cpu.h b/target/arm/cpu.h
index 5e0131b..616b393 100644
--- a/target/arm/cpu.h
+++ b/target/arm/cpu.h
@@ -3414,6 +3414,8 @@
FIELD(TBFLAG_ANY, FPEXC_EL, 8, 2)
/* For A-profile only, target EL for debug exceptions. */
FIELD(TBFLAG_ANY, DEBUG_TARGET_EL, 10, 2)
+/* Memory operations require alignment: SCTLR_ELx.A or CCR.UNALIGN_TRP */
+FIELD(TBFLAG_ANY, ALIGN_MEM, 12, 1)
/*
* Bit usage when in AArch32 state, both A- and M-profile.
diff --git a/target/arm/helper.c b/target/arm/helper.c
index 4aa7650..9b1b987 100644
--- a/target/arm/helper.c
+++ b/target/arm/helper.c
@@ -13018,6 +13018,12 @@
ARMMMUIdx mmu_idx)
{
CPUARMTBFlags flags = {};
+ uint32_t ccr = env->v7m.ccr[env->v7m.secure];
+
+ /* Without HaveMainExt, CCR.UNALIGN_TRP is RES1. */
+ if (ccr & R_V7M_CCR_UNALIGN_TRP_MASK) {
+ DP_TBFLAG_ANY(flags, ALIGN_MEM, 1);
+ }
if (arm_v7m_is_handler_mode(env)) {
DP_TBFLAG_M32(flags, HANDLER, 1);
@@ -13030,7 +13036,7 @@
*/
if (arm_feature(env, ARM_FEATURE_V8) &&
!((mmu_idx & ARM_MMU_IDX_M_NEGPRI) &&
- (env->v7m.ccr[env->v7m.secure] & R_V7M_CCR_STKOFHFNMIGN_MASK))) {
+ (ccr & R_V7M_CCR_STKOFHFNMIGN_MASK))) {
DP_TBFLAG_M32(flags, STACKCHECK, 1);
}
@@ -13049,12 +13055,17 @@
ARMMMUIdx mmu_idx)
{
CPUARMTBFlags flags = rebuild_hflags_aprofile(env);
+ int el = arm_current_el(env);
+
+ if (arm_sctlr(env, el) & SCTLR_A) {
+ DP_TBFLAG_ANY(flags, ALIGN_MEM, 1);
+ }
if (arm_el_is_aa64(env, 1)) {
DP_TBFLAG_A32(flags, VFPEN, 1);
}
- if (arm_current_el(env) < 2 && env->cp15.hstr_el2 &&
+ if (el < 2 && env->cp15.hstr_el2 &&
(arm_hcr_el2_eff(env) & (HCR_E2H | HCR_TGE)) != (HCR_E2H | HCR_TGE)) {
DP_TBFLAG_A32(flags, HSTR_ACTIVE, 1);
}
@@ -13099,6 +13110,10 @@
sctlr = regime_sctlr(env, stage1);
+ if (sctlr & SCTLR_A) {
+ DP_TBFLAG_ANY(flags, ALIGN_MEM, 1);
+ }
+
if (arm_cpu_data_is_big_endian_a64(el, sctlr)) {
DP_TBFLAG_ANY(flags, BE_DATA, 1);
}
diff --git a/target/arm/translate-a64.c b/target/arm/translate-a64.c
index b32ff56..92a62b1 100644
--- a/target/arm/translate-a64.c
+++ b/target/arm/translate-a64.c
@@ -14697,6 +14697,7 @@
dc->user = (dc->current_el == 0);
#endif
dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
+ dc->align_mem = EX_TBFLAG_ANY(tb_flags, ALIGN_MEM);
dc->sve_excp_el = EX_TBFLAG_A64(tb_flags, SVEEXC_EL);
dc->sve_len = (EX_TBFLAG_A64(tb_flags, ZCR_LEN) + 1) * 16;
dc->pauth_active = EX_TBFLAG_A64(tb_flags, PAUTH_ACTIVE);
diff --git a/target/arm/translate.c b/target/arm/translate.c
index 6a15e5d..970e537 100644
--- a/target/arm/translate.c
+++ b/target/arm/translate.c
@@ -933,8 +933,7 @@
{
TCGv addr;
- if (arm_dc_feature(s, ARM_FEATURE_M) &&
- !arm_dc_feature(s, ARM_FEATURE_M_MAIN)) {
+ if (s->align_mem) {
opc |= MO_ALIGN;
}
@@ -948,8 +947,7 @@
{
TCGv addr;
- if (arm_dc_feature(s, ARM_FEATURE_M) &&
- !arm_dc_feature(s, ARM_FEATURE_M_MAIN)) {
+ if (s->align_mem) {
opc |= MO_ALIGN;
}
@@ -8877,6 +8875,7 @@
dc->user = (dc->current_el == 0);
#endif
dc->fp_excp_el = EX_TBFLAG_ANY(tb_flags, FPEXC_EL);
+ dc->align_mem = EX_TBFLAG_ANY(tb_flags, ALIGN_MEM);
if (arm_feature(env, ARM_FEATURE_M)) {
dc->vfp_enabled = 1;
diff --git a/target/arm/translate.h b/target/arm/translate.h
index 50c2aba..b185c14 100644
--- a/target/arm/translate.h
+++ b/target/arm/translate.h
@@ -87,6 +87,8 @@
bool bt;
/* True if any CP15 access is trapped by HSTR_EL2 */
bool hstr_active;
+ /* True if memory operations require alignment */
+ bool align_mem;
/*
* >= 0, a copy of PSTATE.BTYPE, which will be 0 without v8.5-BTI.
* < 0, set by the current instruction.