Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1 | /* |
| 2 | * PowerPC MMU, TLB, SLB and BAT emulation helpers for QEMU. |
| 3 | * |
| 4 | * Copyright (c) 2003-2007 Jocelyn Mayer |
| 5 | * |
| 6 | * This library is free software; you can redistribute it and/or |
| 7 | * modify it under the terms of the GNU Lesser General Public |
| 8 | * License as published by the Free Software Foundation; either |
Chetan Pant | 6bd039c | 2020-10-19 06:11:26 +0000 | [diff] [blame] | 9 | * version 2.1 of the License, or (at your option) any later version. |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 10 | * |
| 11 | * This library is distributed in the hope that it will be useful, |
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 14 | * Lesser General Public License for more details. |
| 15 | * |
| 16 | * You should have received a copy of the GNU Lesser General Public |
| 17 | * License along with this library; if not, see <http://www.gnu.org/licenses/>. |
| 18 | */ |
Markus Armbruster | db72581 | 2019-08-12 07:23:50 +0200 | [diff] [blame] | 19 | |
Peter Maydell | 0d75590 | 2016-01-26 18:16:58 +0000 | [diff] [blame] | 20 | #include "qemu/osdep.h" |
Philippe Mathieu-Daudé | ab3dd74 | 2018-06-25 09:42:24 -0300 | [diff] [blame] | 21 | #include "qemu/units.h" |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 22 | #include "cpu.h" |
Paolo Bonzini | 9c17d61 | 2012-12-17 18:20:04 +0100 | [diff] [blame] | 23 | #include "sysemu/kvm.h" |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 24 | #include "kvm_ppc.h" |
David Gibson | 10b4652 | 2013-03-12 00:31:06 +0000 | [diff] [blame] | 25 | #include "mmu-hash64.h" |
David Gibson | 9d7c3f4 | 2013-03-12 00:31:07 +0000 | [diff] [blame] | 26 | #include "mmu-hash32.h" |
Paolo Bonzini | 63c9155 | 2016-03-15 13:18:37 +0100 | [diff] [blame] | 27 | #include "exec/exec-all.h" |
Philippe Mathieu-Daudé | 74781c0 | 2023-12-06 20:27:32 +0100 | [diff] [blame] | 28 | #include "exec/page-protection.h" |
Paolo Bonzini | 508127e | 2016-01-07 16:55:28 +0300 | [diff] [blame] | 29 | #include "exec/log.h" |
Benjamin Herrenschmidt | cd0c6f4 | 2016-05-03 18:03:25 +0200 | [diff] [blame] | 30 | #include "helper_regs.h" |
Suraj Jitindar Singh | 8d63351 | 2017-02-24 12:05:12 +1100 | [diff] [blame] | 31 | #include "qemu/error-report.h" |
Markus Armbruster | fad866d | 2019-04-17 21:17:58 +0200 | [diff] [blame] | 32 | #include "qemu/qemu-print.h" |
Richard Henderson | 91e615a | 2021-05-18 15:11:29 -0500 | [diff] [blame] | 33 | #include "internal.h" |
Suraj Jitindar Singh | b289949 | 2017-03-01 17:54:38 +1100 | [diff] [blame] | 34 | #include "mmu-book3s-v3.h" |
Suraj Jitindar Singh | 95cb065 | 2017-07-03 16:19:47 +1000 | [diff] [blame] | 35 | #include "mmu-radix64.h" |
BALATON Zoltan | e7baac6 | 2024-05-13 01:28:08 +0200 | [diff] [blame] | 36 | #include "mmu-booke.h" |
Bruno Larsen (billionai) | 2b44e21 | 2021-05-25 08:53:53 -0300 | [diff] [blame] | 37 | #include "exec/helper-proto.h" |
| 38 | #include "exec/cpu_ldst.h" |
Lucas Mateus Castro (alqotel) | 5118ebe | 2021-07-23 14:56:25 -0300 | [diff] [blame] | 39 | |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 40 | /* #define FLUSH_ALL_TLBS */ |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 41 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 42 | /*****************************************************************************/ |
| 43 | /* PowerPC MMU emulation */ |
David Gibson | 5dc68eb | 2013-03-12 00:31:17 +0000 | [diff] [blame] | 44 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 45 | /* Software driven TLB helpers */ |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 46 | static inline void ppc6xx_tlb_invalidate_all(CPUPPCState *env) |
| 47 | { |
| 48 | ppc6xx_tlb_t *tlb; |
BALATON Zoltan | 5fd257f | 2024-05-13 01:28:02 +0200 | [diff] [blame] | 49 | int nr, max = 2 * env->nb_tlb; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 50 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 51 | for (nr = 0; nr < max; nr++) { |
| 52 | tlb = &env->tlb.tlb6[nr]; |
| 53 | pte_invalidate(&tlb->pte0); |
| 54 | } |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 55 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 56 | } |
| 57 | |
| 58 | static inline void ppc6xx_tlb_invalidate_virt2(CPUPPCState *env, |
| 59 | target_ulong eaddr, |
| 60 | int is_code, int match_epn) |
| 61 | { |
| 62 | #if !defined(FLUSH_ALL_TLBS) |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 63 | CPUState *cs = env_cpu(env); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 64 | ppc6xx_tlb_t *tlb; |
| 65 | int way, nr; |
| 66 | |
| 67 | /* Invalidate ITLB + DTLB, all ways */ |
| 68 | for (way = 0; way < env->nb_ways; way++) { |
| 69 | nr = ppc6xx_tlb_getnum(env, eaddr, way, is_code); |
| 70 | tlb = &env->tlb.tlb6[nr]; |
| 71 | if (pte_is_valid(tlb->pte0) && (match_epn == 0 || eaddr == tlb->EPN)) { |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 72 | qemu_log_mask(CPU_LOG_MMU, "TLB invalidate %d/%d " |
| 73 | TARGET_FMT_lx "\n", nr, env->nb_tlb, eaddr); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 74 | pte_invalidate(&tlb->pte0); |
Andreas Färber | 31b030d | 2013-09-04 01:29:02 +0200 | [diff] [blame] | 75 | tlb_flush_page(cs, tlb->EPN); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 76 | } |
| 77 | } |
| 78 | #else |
| 79 | /* XXX: PowerPC specification say this is valid as well */ |
| 80 | ppc6xx_tlb_invalidate_all(env); |
| 81 | #endif |
| 82 | } |
| 83 | |
| 84 | static inline void ppc6xx_tlb_invalidate_virt(CPUPPCState *env, |
| 85 | target_ulong eaddr, int is_code) |
| 86 | { |
| 87 | ppc6xx_tlb_invalidate_virt2(env, eaddr, is_code, 0); |
| 88 | } |
| 89 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 90 | static void ppc6xx_tlb_store(CPUPPCState *env, target_ulong EPN, int way, |
| 91 | int is_code, target_ulong pte0, target_ulong pte1) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 92 | { |
| 93 | ppc6xx_tlb_t *tlb; |
| 94 | int nr; |
| 95 | |
| 96 | nr = ppc6xx_tlb_getnum(env, EPN, way, is_code); |
| 97 | tlb = &env->tlb.tlb6[nr]; |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 98 | qemu_log_mask(CPU_LOG_MMU, "Set TLB %d/%d EPN " TARGET_FMT_lx " PTE0 " |
| 99 | TARGET_FMT_lx " PTE1 " TARGET_FMT_lx "\n", nr, env->nb_tlb, |
| 100 | EPN, pte0, pte1); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 101 | /* Invalidate any pending reference in QEMU for this virtual address */ |
| 102 | ppc6xx_tlb_invalidate_virt2(env, EPN, is_code, 1); |
| 103 | tlb->pte0 = pte0; |
| 104 | tlb->pte1 = pte1; |
| 105 | tlb->EPN = EPN; |
| 106 | /* Store last way for LRU mechanism */ |
| 107 | env->last_way = way; |
| 108 | } |
| 109 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 110 | /* Helpers specific to PowerPC 40x implementations */ |
| 111 | static inline void ppc4xx_tlb_invalidate_all(CPUPPCState *env) |
| 112 | { |
| 113 | ppcemb_tlb_t *tlb; |
| 114 | int i; |
| 115 | |
| 116 | for (i = 0; i < env->nb_tlb; i++) { |
| 117 | tlb = &env->tlb.tlbe[i]; |
| 118 | tlb->prot &= ~PAGE_VALID; |
| 119 | } |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 120 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 121 | } |
| 122 | |
Blue Swirl | 6575c28 | 2012-10-28 11:04:50 +0000 | [diff] [blame] | 123 | static void booke206_flush_tlb(CPUPPCState *env, int flags, |
| 124 | const int check_iprot) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 125 | { |
| 126 | int tlb_size; |
| 127 | int i, j; |
| 128 | ppcmas_tlb_t *tlb = env->tlb.tlbm; |
| 129 | |
| 130 | for (i = 0; i < BOOKE206_MAX_TLBN; i++) { |
| 131 | if (flags & (1 << i)) { |
| 132 | tlb_size = booke206_tlb_size(env, i); |
| 133 | for (j = 0; j < tlb_size; j++) { |
| 134 | if (!check_iprot || !(tlb[j].mas1 & MAS1_IPROT)) { |
| 135 | tlb[j].mas1 &= ~MAS1_VALID; |
| 136 | } |
| 137 | } |
| 138 | } |
| 139 | tlb += booke206_tlb_size(env, i); |
| 140 | } |
| 141 | |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 142 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 143 | } |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 144 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 145 | /*****************************************************************************/ |
| 146 | /* BATs management */ |
| 147 | #if !defined(FLUSH_ALL_TLBS) |
| 148 | static inline void do_invalidate_BAT(CPUPPCState *env, target_ulong BATu, |
| 149 | target_ulong mask) |
| 150 | { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 151 | CPUState *cs = env_cpu(env); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 152 | target_ulong base, end, page; |
| 153 | |
| 154 | base = BATu & ~0x0001FFFF; |
| 155 | end = base + mask + 0x00020000; |
Artyom Tarasenko | aaef873 | 2019-04-12 23:06:17 +0200 | [diff] [blame] | 156 | if (((end - base) >> TARGET_PAGE_BITS) > 1024) { |
| 157 | /* Flushing 1024 4K pages is slower than a complete flush */ |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 158 | qemu_log_mask(CPU_LOG_MMU, "Flush all BATs\n"); |
Philippe Mathieu-Daudé | 96449e4 | 2020-05-12 09:00:18 +0200 | [diff] [blame] | 159 | tlb_flush(cs); |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 160 | qemu_log_mask(CPU_LOG_MMU, "Flush done\n"); |
Artyom Tarasenko | aaef873 | 2019-04-12 23:06:17 +0200 | [diff] [blame] | 161 | return; |
| 162 | } |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 163 | qemu_log_mask(CPU_LOG_MMU, "Flush BAT from " TARGET_FMT_lx |
| 164 | " to " TARGET_FMT_lx " (" TARGET_FMT_lx ")\n", |
| 165 | base, end, mask); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 166 | for (page = base; page != end; page += TARGET_PAGE_SIZE) { |
Andreas Färber | 31b030d | 2013-09-04 01:29:02 +0200 | [diff] [blame] | 167 | tlb_flush_page(cs, page); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 168 | } |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 169 | qemu_log_mask(CPU_LOG_MMU, "Flush done\n"); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 170 | } |
| 171 | #endif |
| 172 | |
| 173 | static inline void dump_store_bat(CPUPPCState *env, char ID, int ul, int nr, |
| 174 | target_ulong value) |
| 175 | { |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 176 | qemu_log_mask(CPU_LOG_MMU, "Set %cBAT%d%c to " TARGET_FMT_lx " (" |
| 177 | TARGET_FMT_lx ")\n", ID, nr, ul == 0 ? 'u' : 'l', |
| 178 | value, env->nip); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 179 | } |
| 180 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 181 | void helper_store_ibatu(CPUPPCState *env, uint32_t nr, target_ulong value) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 182 | { |
| 183 | target_ulong mask; |
| 184 | |
| 185 | dump_store_bat(env, 'I', 0, nr, value); |
| 186 | if (env->IBAT[0][nr] != value) { |
| 187 | mask = (value << 15) & 0x0FFE0000UL; |
| 188 | #if !defined(FLUSH_ALL_TLBS) |
| 189 | do_invalidate_BAT(env, env->IBAT[0][nr], mask); |
| 190 | #endif |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 191 | /* |
| 192 | * When storing valid upper BAT, mask BEPI and BRPN and |
| 193 | * invalidate all TLBs covered by this BAT |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 194 | */ |
| 195 | mask = (value << 15) & 0x0FFE0000UL; |
| 196 | env->IBAT[0][nr] = (value & 0x00001FFFUL) | |
| 197 | (value & ~0x0001FFFFUL & ~mask); |
| 198 | env->IBAT[1][nr] = (env->IBAT[1][nr] & 0x0000007B) | |
| 199 | (env->IBAT[1][nr] & ~0x0001FFFF & ~mask); |
| 200 | #if !defined(FLUSH_ALL_TLBS) |
| 201 | do_invalidate_BAT(env, env->IBAT[0][nr], mask); |
| 202 | #else |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 203 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 204 | #endif |
| 205 | } |
| 206 | } |
| 207 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 208 | void helper_store_ibatl(CPUPPCState *env, uint32_t nr, target_ulong value) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 209 | { |
| 210 | dump_store_bat(env, 'I', 1, nr, value); |
| 211 | env->IBAT[1][nr] = value; |
| 212 | } |
| 213 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 214 | void helper_store_dbatu(CPUPPCState *env, uint32_t nr, target_ulong value) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 215 | { |
| 216 | target_ulong mask; |
| 217 | |
| 218 | dump_store_bat(env, 'D', 0, nr, value); |
| 219 | if (env->DBAT[0][nr] != value) { |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 220 | /* |
| 221 | * When storing valid upper BAT, mask BEPI and BRPN and |
| 222 | * invalidate all TLBs covered by this BAT |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 223 | */ |
| 224 | mask = (value << 15) & 0x0FFE0000UL; |
| 225 | #if !defined(FLUSH_ALL_TLBS) |
| 226 | do_invalidate_BAT(env, env->DBAT[0][nr], mask); |
| 227 | #endif |
| 228 | mask = (value << 15) & 0x0FFE0000UL; |
| 229 | env->DBAT[0][nr] = (value & 0x00001FFFUL) | |
| 230 | (value & ~0x0001FFFFUL & ~mask); |
| 231 | env->DBAT[1][nr] = (env->DBAT[1][nr] & 0x0000007B) | |
| 232 | (env->DBAT[1][nr] & ~0x0001FFFF & ~mask); |
| 233 | #if !defined(FLUSH_ALL_TLBS) |
| 234 | do_invalidate_BAT(env, env->DBAT[0][nr], mask); |
| 235 | #else |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 236 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 237 | #endif |
| 238 | } |
| 239 | } |
| 240 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 241 | void helper_store_dbatl(CPUPPCState *env, uint32_t nr, target_ulong value) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 242 | { |
| 243 | dump_store_bat(env, 'D', 1, nr, value); |
| 244 | env->DBAT[1][nr] = value; |
| 245 | } |
| 246 | |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 247 | /*****************************************************************************/ |
| 248 | /* TLB management */ |
| 249 | void ppc_tlb_invalidate_all(CPUPPCState *env) |
| 250 | { |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 251 | #if defined(TARGET_PPC64) |
Greg Kurz | d57d72a | 2020-12-09 18:35:36 +0100 | [diff] [blame] | 252 | if (mmu_is_64bit(env->mmu_model)) { |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 253 | env->tlb_need_flush = 0; |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 254 | tlb_flush(env_cpu(env)); |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 255 | } else |
| 256 | #endif /* defined(TARGET_PPC64) */ |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 257 | switch (env->mmu_model) { |
| 258 | case POWERPC_MMU_SOFT_6xx: |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 259 | ppc6xx_tlb_invalidate_all(env); |
| 260 | break; |
| 261 | case POWERPC_MMU_SOFT_4xx: |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 262 | ppc4xx_tlb_invalidate_all(env); |
| 263 | break; |
| 264 | case POWERPC_MMU_REAL: |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 265 | cpu_abort(env_cpu(env), "No TLB for PowerPC 4xx in real mode\n"); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 266 | break; |
| 267 | case POWERPC_MMU_MPC8xx: |
| 268 | /* XXX: TODO */ |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 269 | cpu_abort(env_cpu(env), "MPC8xx MMU model is not implemented\n"); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 270 | break; |
| 271 | case POWERPC_MMU_BOOKE: |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 272 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 273 | break; |
| 274 | case POWERPC_MMU_BOOKE206: |
| 275 | booke206_flush_tlb(env, -1, 0); |
| 276 | break; |
| 277 | case POWERPC_MMU_32B: |
Benjamin Herrenschmidt | c5a8d8f | 2016-06-07 12:50:22 +1000 | [diff] [blame] | 278 | env->tlb_need_flush = 0; |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 279 | tlb_flush(env_cpu(env)); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 280 | break; |
| 281 | default: |
| 282 | /* XXX: TODO */ |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 283 | cpu_abort(env_cpu(env), "Unknown MMU model %x\n", env->mmu_model); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 284 | break; |
| 285 | } |
| 286 | } |
| 287 | |
| 288 | void ppc_tlb_invalidate_one(CPUPPCState *env, target_ulong addr) |
| 289 | { |
| 290 | #if !defined(FLUSH_ALL_TLBS) |
| 291 | addr &= TARGET_PAGE_MASK; |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 292 | #if defined(TARGET_PPC64) |
Greg Kurz | d57d72a | 2020-12-09 18:35:36 +0100 | [diff] [blame] | 293 | if (mmu_is_64bit(env->mmu_model)) { |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 294 | /* tlbie invalidate TLBs for all segments */ |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 295 | /* |
| 296 | * XXX: given the fact that there are too many segments to invalidate, |
Sam Bobroff | ec975e8 | 2017-03-02 16:38:56 +1100 | [diff] [blame] | 297 | * and we still don't have a tlb_flush_mask(env, n, mask) in QEMU, |
| 298 | * we just invalidate all TLBs |
| 299 | */ |
| 300 | env->tlb_need_flush |= TLB_NEED_LOCAL_FLUSH; |
| 301 | } else |
| 302 | #endif /* defined(TARGET_PPC64) */ |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 303 | switch (env->mmu_model) { |
| 304 | case POWERPC_MMU_SOFT_6xx: |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 305 | ppc6xx_tlb_invalidate_virt(env, addr, 0); |
BALATON Zoltan | 5fd257f | 2024-05-13 01:28:02 +0200 | [diff] [blame] | 306 | ppc6xx_tlb_invalidate_virt(env, addr, 1); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 307 | break; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 308 | case POWERPC_MMU_32B: |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 309 | /* |
| 310 | * Actual CPUs invalidate entire congruence classes based on |
| 311 | * the geometry of their TLBs and some OSes take that into |
| 312 | * account, we just mark the TLB to be flushed later (context |
| 313 | * synchronizing event or sync instruction on 32-bit). |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 314 | */ |
Nikunj A Dadhania | a8a6d53 | 2016-09-20 22:04:59 +0530 | [diff] [blame] | 315 | env->tlb_need_flush |= TLB_NEED_LOCAL_FLUSH; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 316 | break; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 317 | default: |
David Gibson | 041d95f | 2016-01-30 23:49:22 +1100 | [diff] [blame] | 318 | /* Should never reach here with other MMU models */ |
| 319 | assert(0); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 320 | } |
| 321 | #else |
| 322 | ppc_tlb_invalidate_all(env); |
| 323 | #endif |
| 324 | } |
| 325 | |
| 326 | /*****************************************************************************/ |
| 327 | /* Special registers manipulation */ |
Cédric Le Goater | 4a7518e | 2018-04-24 13:30:42 +0200 | [diff] [blame] | 328 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 329 | /* Segment registers load and store */ |
| 330 | target_ulong helper_load_sr(CPUPPCState *env, target_ulong sr_num) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 331 | { |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 332 | #if defined(TARGET_PPC64) |
Greg Kurz | d57d72a | 2020-12-09 18:35:36 +0100 | [diff] [blame] | 333 | if (mmu_is_64bit(env->mmu_model)) { |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 334 | /* XXX */ |
| 335 | return 0; |
| 336 | } |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 337 | #endif |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 338 | return env->sr[sr_num]; |
| 339 | } |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 340 | |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 341 | void helper_store_sr(CPUPPCState *env, target_ulong srnum, target_ulong value) |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 342 | { |
Antony Pavlov | 339aaf5 | 2014-12-13 19:48:18 +0300 | [diff] [blame] | 343 | qemu_log_mask(CPU_LOG_MMU, |
| 344 | "%s: reg=%d " TARGET_FMT_lx " " TARGET_FMT_lx "\n", __func__, |
Blue Swirl | 9aa5b15 | 2012-05-30 04:23:34 +0000 | [diff] [blame] | 345 | (int)srnum, value, env->sr[srnum]); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 346 | #if defined(TARGET_PPC64) |
Greg Kurz | d57d72a | 2020-12-09 18:35:36 +0100 | [diff] [blame] | 347 | if (mmu_is_64bit(env->mmu_model)) { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 348 | PowerPCCPU *cpu = env_archcpu(env); |
David Gibson | bcd8123 | 2016-01-27 11:07:29 +1100 | [diff] [blame] | 349 | uint64_t esid, vsid; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 350 | |
| 351 | /* ESID = srnum */ |
David Gibson | bcd8123 | 2016-01-27 11:07:29 +1100 | [diff] [blame] | 352 | esid = ((uint64_t)(srnum & 0xf) << 28) | SLB_ESID_V; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 353 | |
| 354 | /* VSID = VSID */ |
David Gibson | bcd8123 | 2016-01-27 11:07:29 +1100 | [diff] [blame] | 355 | vsid = (value & 0xfffffff) << 12; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 356 | /* flags = flags */ |
David Gibson | bcd8123 | 2016-01-27 11:07:29 +1100 | [diff] [blame] | 357 | vsid |= ((value >> 27) & 0xf) << 8; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 358 | |
David Gibson | bcd8123 | 2016-01-27 11:07:29 +1100 | [diff] [blame] | 359 | ppc_store_slb(cpu, srnum, esid, vsid); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 360 | } else |
| 361 | #endif |
| 362 | if (env->sr[srnum] != value) { |
| 363 | env->sr[srnum] = value; |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 364 | /* |
| 365 | * Invalidating 256MB of virtual memory in 4kB pages is way |
zhaolichang | 136fbf6 | 2020-10-09 14:44:37 +0800 | [diff] [blame] | 366 | * longer than flushing the whole TLB. |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 367 | */ |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 368 | #if !defined(FLUSH_ALL_TLBS) && 0 |
| 369 | { |
| 370 | target_ulong page, end; |
| 371 | /* Invalidate 256 MB of virtual memory */ |
| 372 | page = (16 << 20) * srnum; |
| 373 | end = page + (16 << 20); |
| 374 | for (; page != end; page += TARGET_PAGE_SIZE) { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 375 | tlb_flush_page(env_cpu(env), page); |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 376 | } |
| 377 | } |
| 378 | #else |
Nikunj A Dadhania | a8a6d53 | 2016-09-20 22:04:59 +0530 | [diff] [blame] | 379 | env->tlb_need_flush |= TLB_NEED_LOCAL_FLUSH; |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 380 | #endif |
| 381 | } |
| 382 | } |
Blue Swirl | 8cbbe38 | 2012-05-30 04:23:33 +0000 | [diff] [blame] | 383 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 384 | /* TLB management */ |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 385 | void helper_tlbia(CPUPPCState *env) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 386 | { |
| 387 | ppc_tlb_invalidate_all(env); |
| 388 | } |
| 389 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 390 | void helper_tlbie(CPUPPCState *env, target_ulong addr) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 391 | { |
| 392 | ppc_tlb_invalidate_one(env, addr); |
| 393 | } |
| 394 | |
Leandro Lupori | e7beaea | 2022-07-12 16:37:41 -0300 | [diff] [blame] | 395 | #if defined(TARGET_PPC64) |
| 396 | |
| 397 | /* Invalidation Selector */ |
| 398 | #define TLBIE_IS_VA 0 |
| 399 | #define TLBIE_IS_PID 1 |
| 400 | #define TLBIE_IS_LPID 2 |
| 401 | #define TLBIE_IS_ALL 3 |
| 402 | |
| 403 | /* Radix Invalidation Control */ |
| 404 | #define TLBIE_RIC_TLB 0 |
| 405 | #define TLBIE_RIC_PWC 1 |
| 406 | #define TLBIE_RIC_ALL 2 |
| 407 | #define TLBIE_RIC_GRP 3 |
| 408 | |
| 409 | /* Radix Actual Page sizes */ |
| 410 | #define TLBIE_R_AP_4K 0 |
| 411 | #define TLBIE_R_AP_64K 5 |
| 412 | #define TLBIE_R_AP_2M 1 |
| 413 | #define TLBIE_R_AP_1G 2 |
| 414 | |
| 415 | /* RB field masks */ |
| 416 | #define TLBIE_RB_EPN_MASK PPC_BITMASK(0, 51) |
| 417 | #define TLBIE_RB_IS_MASK PPC_BITMASK(52, 53) |
| 418 | #define TLBIE_RB_AP_MASK PPC_BITMASK(56, 58) |
| 419 | |
| 420 | void helper_tlbie_isa300(CPUPPCState *env, target_ulong rb, target_ulong rs, |
| 421 | uint32_t flags) |
| 422 | { |
| 423 | unsigned ric = (flags & TLBIE_F_RIC_MASK) >> TLBIE_F_RIC_SHIFT; |
| 424 | /* |
| 425 | * With the exception of the checks for invalid instruction forms, |
| 426 | * PRS is currently ignored, because we don't know if a given TLB entry |
| 427 | * is process or partition scoped. |
| 428 | */ |
| 429 | bool prs = flags & TLBIE_F_PRS; |
| 430 | bool r = flags & TLBIE_F_R; |
| 431 | bool local = flags & TLBIE_F_LOCAL; |
| 432 | bool effR; |
| 433 | unsigned is = extract64(rb, PPC_BIT_NR(53), 2); |
| 434 | unsigned ap; /* actual page size */ |
| 435 | target_ulong addr, pgoffs_mask; |
| 436 | |
| 437 | qemu_log_mask(CPU_LOG_MMU, |
| 438 | "%s: local=%d addr=" TARGET_FMT_lx " ric=%u prs=%d r=%d is=%u\n", |
| 439 | __func__, local, rb & TARGET_PAGE_MASK, ric, prs, r, is); |
| 440 | |
| 441 | effR = FIELD_EX64(env->msr, MSR, HV) ? r : env->spr[SPR_LPCR] & LPCR_HR; |
| 442 | |
| 443 | /* Partial TLB invalidation is supported for Radix only for now. */ |
| 444 | if (!effR) { |
| 445 | goto inval_all; |
| 446 | } |
| 447 | |
| 448 | /* Check for invalid instruction forms (effR=1). */ |
| 449 | if (unlikely(ric == TLBIE_RIC_GRP || |
| 450 | ((ric == TLBIE_RIC_PWC || ric == TLBIE_RIC_ALL) && |
| 451 | is == TLBIE_IS_VA) || |
| 452 | (!prs && is == TLBIE_IS_PID))) { |
| 453 | qemu_log_mask(LOG_GUEST_ERROR, |
| 454 | "%s: invalid instruction form: ric=%u prs=%d r=%d is=%u\n", |
| 455 | __func__, ric, prs, r, is); |
| 456 | goto invalid; |
| 457 | } |
| 458 | |
| 459 | /* We don't cache Page Walks. */ |
| 460 | if (ric == TLBIE_RIC_PWC) { |
| 461 | if (local) { |
| 462 | unsigned set = extract64(rb, PPC_BIT_NR(51), 12); |
| 463 | if (set != 0) { |
| 464 | qemu_log_mask(LOG_GUEST_ERROR, "%s: invalid set: %d\n", |
| 465 | __func__, set); |
| 466 | goto invalid; |
| 467 | } |
| 468 | } |
| 469 | return; |
| 470 | } |
| 471 | |
| 472 | /* |
| 473 | * Invalidation by LPID or PID is not supported, so fallback |
| 474 | * to full TLB flush in these cases. |
| 475 | */ |
| 476 | if (is != TLBIE_IS_VA) { |
| 477 | goto inval_all; |
| 478 | } |
| 479 | |
| 480 | /* |
| 481 | * The results of an attempt to invalidate a translation outside of |
| 482 | * quadrant 0 for Radix Tree translation (effR=1, RIC=0, PRS=1, IS=0, |
| 483 | * and EA 0:1 != 0b00) are boundedly undefined. |
| 484 | */ |
| 485 | if (unlikely(ric == TLBIE_RIC_TLB && prs && is == TLBIE_IS_VA && |
| 486 | (rb & R_EADDR_QUADRANT) != R_EADDR_QUADRANT0)) { |
| 487 | qemu_log_mask(LOG_GUEST_ERROR, |
| 488 | "%s: attempt to invalidate a translation outside of quadrant 0\n", |
| 489 | __func__); |
| 490 | goto inval_all; |
| 491 | } |
| 492 | |
| 493 | assert(is == TLBIE_IS_VA); |
| 494 | assert(ric == TLBIE_RIC_TLB || ric == TLBIE_RIC_ALL); |
| 495 | |
| 496 | ap = extract64(rb, PPC_BIT_NR(58), 3); |
| 497 | switch (ap) { |
| 498 | case TLBIE_R_AP_4K: |
| 499 | pgoffs_mask = 0xfffull; |
| 500 | break; |
| 501 | |
| 502 | case TLBIE_R_AP_64K: |
| 503 | pgoffs_mask = 0xffffull; |
| 504 | break; |
| 505 | |
| 506 | case TLBIE_R_AP_2M: |
| 507 | pgoffs_mask = 0x1fffffull; |
| 508 | break; |
| 509 | |
| 510 | case TLBIE_R_AP_1G: |
| 511 | pgoffs_mask = 0x3fffffffull; |
| 512 | break; |
| 513 | |
| 514 | default: |
| 515 | /* |
| 516 | * If the value specified in RS 0:31, RS 32:63, RB 54:55, RB 56:58, |
| 517 | * RB 44:51, or RB 56:63, when it is needed to perform the specified |
| 518 | * operation, is not supported by the implementation, the instruction |
| 519 | * is treated as if the instruction form were invalid. |
| 520 | */ |
| 521 | qemu_log_mask(LOG_GUEST_ERROR, "%s: invalid AP: %d\n", __func__, ap); |
| 522 | goto invalid; |
| 523 | } |
| 524 | |
| 525 | addr = rb & TLBIE_RB_EPN_MASK & ~pgoffs_mask; |
| 526 | |
| 527 | if (local) { |
| 528 | tlb_flush_page(env_cpu(env), addr); |
| 529 | } else { |
Nicholas Piggin | 82676f1 | 2024-03-26 23:20:43 +1000 | [diff] [blame] | 530 | tlb_flush_page_all_cpus_synced(env_cpu(env), addr); |
Leandro Lupori | e7beaea | 2022-07-12 16:37:41 -0300 | [diff] [blame] | 531 | } |
| 532 | return; |
| 533 | |
| 534 | inval_all: |
| 535 | env->tlb_need_flush |= TLB_NEED_LOCAL_FLUSH; |
| 536 | if (!local) { |
| 537 | env->tlb_need_flush |= TLB_NEED_GLOBAL_FLUSH; |
| 538 | } |
| 539 | return; |
| 540 | |
| 541 | invalid: |
| 542 | raise_exception_err_ra(env, POWERPC_EXCP_PROGRAM, |
| 543 | POWERPC_EXCP_INVAL | |
| 544 | POWERPC_EXCP_INVAL_INVAL, GETPC()); |
| 545 | } |
| 546 | |
| 547 | #endif |
| 548 | |
David Gibson | 4693364 | 2016-01-28 10:31:04 +1100 | [diff] [blame] | 549 | void helper_tlbiva(CPUPPCState *env, target_ulong addr) |
| 550 | { |
David Gibson | 4693364 | 2016-01-28 10:31:04 +1100 | [diff] [blame] | 551 | /* tlbiva instruction only exists on BookE */ |
| 552 | assert(env->mmu_model == POWERPC_MMU_BOOKE); |
| 553 | /* XXX: TODO */ |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 554 | cpu_abort(env_cpu(env), "BookE MMU model is not implemented\n"); |
David Gibson | 4693364 | 2016-01-28 10:31:04 +1100 | [diff] [blame] | 555 | } |
| 556 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 557 | /* Software driven TLBs management */ |
| 558 | /* PowerPC 602/603 software TLB load instructions helpers */ |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 559 | static void do_6xx_tlb(CPUPPCState *env, target_ulong new_EPN, int is_code) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 560 | { |
| 561 | target_ulong RPN, CMP, EPN; |
| 562 | int way; |
| 563 | |
| 564 | RPN = env->spr[SPR_RPA]; |
| 565 | if (is_code) { |
| 566 | CMP = env->spr[SPR_ICMP]; |
| 567 | EPN = env->spr[SPR_IMISS]; |
| 568 | } else { |
| 569 | CMP = env->spr[SPR_DCMP]; |
| 570 | EPN = env->spr[SPR_DMISS]; |
| 571 | } |
| 572 | way = (env->spr[SPR_SRR1] >> 17) & 1; |
| 573 | (void)EPN; /* avoid a compiler warning */ |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 574 | qemu_log_mask(CPU_LOG_MMU, "%s: EPN " TARGET_FMT_lx " " TARGET_FMT_lx |
| 575 | " PTE0 " TARGET_FMT_lx " PTE1 " TARGET_FMT_lx " way %d\n", |
| 576 | __func__, new_EPN, EPN, CMP, RPN, way); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 577 | /* Store this TLB */ |
| 578 | ppc6xx_tlb_store(env, (uint32_t)(new_EPN & TARGET_PAGE_MASK), |
| 579 | way, is_code, CMP, RPN); |
| 580 | } |
| 581 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 582 | void helper_6xx_tlbd(CPUPPCState *env, target_ulong EPN) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 583 | { |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 584 | do_6xx_tlb(env, EPN, 0); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 585 | } |
| 586 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 587 | void helper_6xx_tlbi(CPUPPCState *env, target_ulong EPN) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 588 | { |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 589 | do_6xx_tlb(env, EPN, 1); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 590 | } |
| 591 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 592 | static inline target_ulong booke_tlb_to_page_size(int size) |
| 593 | { |
| 594 | return 1024 << (2 * size); |
| 595 | } |
| 596 | |
| 597 | static inline int booke_page_size_to_tlb(target_ulong page_size) |
| 598 | { |
| 599 | int size; |
| 600 | |
| 601 | switch (page_size) { |
| 602 | case 0x00000400UL: |
| 603 | size = 0x0; |
| 604 | break; |
| 605 | case 0x00001000UL: |
| 606 | size = 0x1; |
| 607 | break; |
| 608 | case 0x00004000UL: |
| 609 | size = 0x2; |
| 610 | break; |
| 611 | case 0x00010000UL: |
| 612 | size = 0x3; |
| 613 | break; |
| 614 | case 0x00040000UL: |
| 615 | size = 0x4; |
| 616 | break; |
| 617 | case 0x00100000UL: |
| 618 | size = 0x5; |
| 619 | break; |
| 620 | case 0x00400000UL: |
| 621 | size = 0x6; |
| 622 | break; |
| 623 | case 0x01000000UL: |
| 624 | size = 0x7; |
| 625 | break; |
| 626 | case 0x04000000UL: |
| 627 | size = 0x8; |
| 628 | break; |
| 629 | case 0x10000000UL: |
| 630 | size = 0x9; |
| 631 | break; |
| 632 | case 0x40000000UL: |
| 633 | size = 0xA; |
| 634 | break; |
| 635 | #if defined(TARGET_PPC64) |
| 636 | case 0x000100000000ULL: |
| 637 | size = 0xB; |
| 638 | break; |
| 639 | case 0x000400000000ULL: |
| 640 | size = 0xC; |
| 641 | break; |
| 642 | case 0x001000000000ULL: |
| 643 | size = 0xD; |
| 644 | break; |
| 645 | case 0x004000000000ULL: |
| 646 | size = 0xE; |
| 647 | break; |
| 648 | case 0x010000000000ULL: |
| 649 | size = 0xF; |
| 650 | break; |
| 651 | #endif |
| 652 | default: |
| 653 | size = -1; |
| 654 | break; |
| 655 | } |
| 656 | |
| 657 | return size; |
| 658 | } |
| 659 | |
| 660 | /* Helpers for 4xx TLB management */ |
| 661 | #define PPC4XX_TLB_ENTRY_MASK 0x0000003f /* Mask for 64 TLB entries */ |
| 662 | |
| 663 | #define PPC4XX_TLBHI_V 0x00000040 |
| 664 | #define PPC4XX_TLBHI_E 0x00000020 |
| 665 | #define PPC4XX_TLBHI_SIZE_MIN 0 |
| 666 | #define PPC4XX_TLBHI_SIZE_MAX 7 |
| 667 | #define PPC4XX_TLBHI_SIZE_DEFAULT 1 |
| 668 | #define PPC4XX_TLBHI_SIZE_SHIFT 7 |
| 669 | #define PPC4XX_TLBHI_SIZE_MASK 0x00000007 |
| 670 | |
| 671 | #define PPC4XX_TLBLO_EX 0x00000200 |
| 672 | #define PPC4XX_TLBLO_WR 0x00000100 |
| 673 | #define PPC4XX_TLBLO_ATTR_MASK 0x000000FF |
| 674 | #define PPC4XX_TLBLO_RPN_MASK 0xFFFFFC00 |
| 675 | |
Cédric Le Goater | 4782248 | 2022-01-28 13:15:03 +0100 | [diff] [blame] | 676 | void helper_store_40x_pid(CPUPPCState *env, target_ulong val) |
| 677 | { |
| 678 | if (env->spr[SPR_40x_PID] != val) { |
| 679 | env->spr[SPR_40x_PID] = val; |
| 680 | env->tlb_need_flush |= TLB_NEED_LOCAL_FLUSH; |
| 681 | } |
| 682 | } |
| 683 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 684 | target_ulong helper_4xx_tlbre_hi(CPUPPCState *env, target_ulong entry) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 685 | { |
| 686 | ppcemb_tlb_t *tlb; |
| 687 | target_ulong ret; |
| 688 | int size; |
| 689 | |
| 690 | entry &= PPC4XX_TLB_ENTRY_MASK; |
| 691 | tlb = &env->tlb.tlbe[entry]; |
| 692 | ret = tlb->EPN; |
| 693 | if (tlb->prot & PAGE_VALID) { |
| 694 | ret |= PPC4XX_TLBHI_V; |
| 695 | } |
| 696 | size = booke_page_size_to_tlb(tlb->size); |
| 697 | if (size < PPC4XX_TLBHI_SIZE_MIN || size > PPC4XX_TLBHI_SIZE_MAX) { |
| 698 | size = PPC4XX_TLBHI_SIZE_DEFAULT; |
| 699 | } |
| 700 | ret |= size << PPC4XX_TLBHI_SIZE_SHIFT; |
Cédric Le Goater | 4782248 | 2022-01-28 13:15:03 +0100 | [diff] [blame] | 701 | helper_store_40x_pid(env, tlb->PID); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 702 | return ret; |
| 703 | } |
| 704 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 705 | target_ulong helper_4xx_tlbre_lo(CPUPPCState *env, target_ulong entry) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 706 | { |
| 707 | ppcemb_tlb_t *tlb; |
| 708 | target_ulong ret; |
| 709 | |
| 710 | entry &= PPC4XX_TLB_ENTRY_MASK; |
| 711 | tlb = &env->tlb.tlbe[entry]; |
| 712 | ret = tlb->RPN; |
| 713 | if (tlb->prot & PAGE_EXEC) { |
| 714 | ret |= PPC4XX_TLBLO_EX; |
| 715 | } |
| 716 | if (tlb->prot & PAGE_WRITE) { |
| 717 | ret |= PPC4XX_TLBLO_WR; |
| 718 | } |
| 719 | return ret; |
| 720 | } |
| 721 | |
Nicholas Piggin | c191ad7 | 2023-11-13 22:04:06 +1000 | [diff] [blame] | 722 | static void ppcemb_tlb_flush(CPUState *cs, ppcemb_tlb_t *tlb) |
| 723 | { |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 724 | unsigned mmu_idx = 0; |
Nicholas Piggin | c191ad7 | 2023-11-13 22:04:06 +1000 | [diff] [blame] | 725 | |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 726 | if (tlb->prot & 0xf) { |
| 727 | mmu_idx |= 0x1; |
Nicholas Piggin | c191ad7 | 2023-11-13 22:04:06 +1000 | [diff] [blame] | 728 | } |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 729 | if ((tlb->prot >> 4) & 0xf) { |
| 730 | mmu_idx |= 0x2; |
| 731 | } |
| 732 | if (tlb->attr & 1) { |
| 733 | mmu_idx <<= 2; |
| 734 | } |
| 735 | |
| 736 | tlb_flush_range_by_mmuidx(cs, tlb->EPN, tlb->size, mmu_idx, |
| 737 | TARGET_LONG_BITS); |
Nicholas Piggin | c191ad7 | 2023-11-13 22:04:06 +1000 | [diff] [blame] | 738 | } |
| 739 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 740 | void helper_4xx_tlbwe_hi(CPUPPCState *env, target_ulong entry, |
| 741 | target_ulong val) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 742 | { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 743 | CPUState *cs = env_cpu(env); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 744 | ppcemb_tlb_t *tlb; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 745 | |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 746 | qemu_log_mask(CPU_LOG_MMU, "%s entry %d val " TARGET_FMT_lx "\n", |
| 747 | __func__, (int)entry, |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 748 | val); |
| 749 | entry &= PPC4XX_TLB_ENTRY_MASK; |
| 750 | tlb = &env->tlb.tlbe[entry]; |
| 751 | /* Invalidate previous TLB (if it's valid) */ |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 752 | if ((tlb->prot & PAGE_VALID) && tlb->PID == env->spr[SPR_40x_PID]) { |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 753 | qemu_log_mask(CPU_LOG_MMU, "%s: invalidate old TLB %d start " |
| 754 | TARGET_FMT_lx " end " TARGET_FMT_lx "\n", __func__, |
Nicholas Piggin | c191ad7 | 2023-11-13 22:04:06 +1000 | [diff] [blame] | 755 | (int)entry, tlb->EPN, tlb->EPN + tlb->size); |
| 756 | ppcemb_tlb_flush(cs, tlb); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 757 | } |
| 758 | tlb->size = booke_tlb_to_page_size((val >> PPC4XX_TLBHI_SIZE_SHIFT) |
| 759 | & PPC4XX_TLBHI_SIZE_MASK); |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 760 | /* |
| 761 | * We cannot handle TLB size < TARGET_PAGE_SIZE. |
Thomas Huth | a69dc53 | 2018-08-21 13:27:48 +0200 | [diff] [blame] | 762 | * If this ever occurs, we should implement TARGET_PAGE_BITS_VARY |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 763 | */ |
| 764 | if ((val & PPC4XX_TLBHI_V) && tlb->size < TARGET_PAGE_SIZE) { |
Andreas Färber | 31b030d | 2013-09-04 01:29:02 +0200 | [diff] [blame] | 765 | cpu_abort(cs, "TLB size " TARGET_FMT_lu " < %u " |
Thomas Huth | a69dc53 | 2018-08-21 13:27:48 +0200 | [diff] [blame] | 766 | "are not supported (%d)\n" |
| 767 | "Please implement TARGET_PAGE_BITS_VARY\n", |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 768 | tlb->size, TARGET_PAGE_SIZE, (int)((val >> 7) & 0x7)); |
| 769 | } |
| 770 | tlb->EPN = val & ~(tlb->size - 1); |
| 771 | if (val & PPC4XX_TLBHI_V) { |
| 772 | tlb->prot |= PAGE_VALID; |
| 773 | if (val & PPC4XX_TLBHI_E) { |
| 774 | /* XXX: TO BE FIXED */ |
Andreas Färber | 31b030d | 2013-09-04 01:29:02 +0200 | [diff] [blame] | 775 | cpu_abort(cs, |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 776 | "Little-endian TLB entries are not supported by now\n"); |
| 777 | } |
| 778 | } else { |
| 779 | tlb->prot &= ~PAGE_VALID; |
| 780 | } |
| 781 | tlb->PID = env->spr[SPR_40x_PID]; /* PID */ |
Philippe Mathieu-Daudé | 883f2c5 | 2023-01-10 22:29:47 +0100 | [diff] [blame] | 782 | qemu_log_mask(CPU_LOG_MMU, "%s: set up TLB %d RPN " HWADDR_FMT_plx |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 783 | " EPN " TARGET_FMT_lx " size " TARGET_FMT_lx |
| 784 | " prot %c%c%c%c PID %d\n", __func__, |
| 785 | (int)entry, tlb->RPN, tlb->EPN, tlb->size, |
| 786 | tlb->prot & PAGE_READ ? 'r' : '-', |
| 787 | tlb->prot & PAGE_WRITE ? 'w' : '-', |
| 788 | tlb->prot & PAGE_EXEC ? 'x' : '-', |
| 789 | tlb->prot & PAGE_VALID ? 'v' : '-', (int)tlb->PID); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 790 | } |
| 791 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 792 | void helper_4xx_tlbwe_lo(CPUPPCState *env, target_ulong entry, |
| 793 | target_ulong val) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 794 | { |
Nicholas Piggin | 2ab0348 | 2023-11-13 22:49:53 +1000 | [diff] [blame] | 795 | CPUState *cs = env_cpu(env); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 796 | ppcemb_tlb_t *tlb; |
| 797 | |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 798 | qemu_log_mask(CPU_LOG_MMU, "%s entry %i val " TARGET_FMT_lx "\n", |
| 799 | __func__, (int)entry, val); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 800 | entry &= PPC4XX_TLB_ENTRY_MASK; |
| 801 | tlb = &env->tlb.tlbe[entry]; |
Nicholas Piggin | 2ab0348 | 2023-11-13 22:49:53 +1000 | [diff] [blame] | 802 | /* Invalidate previous TLB (if it's valid) */ |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 803 | if ((tlb->prot & PAGE_VALID) && tlb->PID == env->spr[SPR_40x_PID]) { |
Nicholas Piggin | 2ab0348 | 2023-11-13 22:49:53 +1000 | [diff] [blame] | 804 | qemu_log_mask(CPU_LOG_MMU, "%s: invalidate old TLB %d start " |
| 805 | TARGET_FMT_lx " end " TARGET_FMT_lx "\n", __func__, |
| 806 | (int)entry, tlb->EPN, tlb->EPN + tlb->size); |
| 807 | ppcemb_tlb_flush(cs, tlb); |
| 808 | } |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 809 | tlb->attr = val & PPC4XX_TLBLO_ATTR_MASK; |
| 810 | tlb->RPN = val & PPC4XX_TLBLO_RPN_MASK; |
| 811 | tlb->prot = PAGE_READ; |
| 812 | if (val & PPC4XX_TLBLO_EX) { |
| 813 | tlb->prot |= PAGE_EXEC; |
| 814 | } |
| 815 | if (val & PPC4XX_TLBLO_WR) { |
| 816 | tlb->prot |= PAGE_WRITE; |
| 817 | } |
Philippe Mathieu-Daudé | 883f2c5 | 2023-01-10 22:29:47 +0100 | [diff] [blame] | 818 | qemu_log_mask(CPU_LOG_MMU, "%s: set up TLB %d RPN " HWADDR_FMT_plx |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 819 | " EPN " TARGET_FMT_lx |
| 820 | " size " TARGET_FMT_lx " prot %c%c%c%c PID %d\n", __func__, |
| 821 | (int)entry, tlb->RPN, tlb->EPN, tlb->size, |
| 822 | tlb->prot & PAGE_READ ? 'r' : '-', |
| 823 | tlb->prot & PAGE_WRITE ? 'w' : '-', |
| 824 | tlb->prot & PAGE_EXEC ? 'x' : '-', |
| 825 | tlb->prot & PAGE_VALID ? 'v' : '-', (int)tlb->PID); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 826 | } |
| 827 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 828 | target_ulong helper_4xx_tlbsx(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 829 | { |
| 830 | return ppcemb_tlb_search(env, address, env->spr[SPR_40x_PID]); |
| 831 | } |
| 832 | |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 833 | static bool mmubooke_pid_match(CPUPPCState *env, ppcemb_tlb_t *tlb) |
| 834 | { |
| 835 | if (tlb->PID == env->spr[SPR_BOOKE_PID]) { |
| 836 | return true; |
| 837 | } |
| 838 | if (!env->nb_pids) { |
| 839 | return false; |
| 840 | } |
| 841 | |
| 842 | if (env->spr[SPR_BOOKE_PID1] && tlb->PID == env->spr[SPR_BOOKE_PID1]) { |
| 843 | return true; |
| 844 | } |
| 845 | if (env->spr[SPR_BOOKE_PID2] && tlb->PID == env->spr[SPR_BOOKE_PID2]) { |
| 846 | return true; |
| 847 | } |
| 848 | |
| 849 | return false; |
| 850 | } |
| 851 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 852 | /* PowerPC 440 TLB management */ |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 853 | void helper_440_tlbwe(CPUPPCState *env, uint32_t word, target_ulong entry, |
| 854 | target_ulong value) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 855 | { |
| 856 | ppcemb_tlb_t *tlb; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 857 | |
Cédric Le Goater | 5696458 | 2022-01-04 07:55:34 +0100 | [diff] [blame] | 858 | qemu_log_mask(CPU_LOG_MMU, "%s word %d entry %d value " TARGET_FMT_lx "\n", |
| 859 | __func__, word, (int)entry, value); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 860 | entry &= 0x3F; |
| 861 | tlb = &env->tlb.tlbe[entry]; |
Nicholas Piggin | e8fe141 | 2023-11-13 22:19:15 +1000 | [diff] [blame] | 862 | |
| 863 | /* Invalidate previous TLB (if it's valid) */ |
Nicholas Piggin | 4acc505 | 2023-11-14 20:34:22 +1000 | [diff] [blame] | 864 | if ((tlb->prot & PAGE_VALID) && mmubooke_pid_match(env, tlb)) { |
| 865 | qemu_log_mask(CPU_LOG_MMU, "%s: invalidate old TLB %d start " |
| 866 | TARGET_FMT_lx " end " TARGET_FMT_lx "\n", __func__, |
| 867 | (int)entry, tlb->EPN, tlb->EPN + tlb->size); |
Nicholas Piggin | 1b72973 | 2023-11-13 22:23:06 +1000 | [diff] [blame] | 868 | ppcemb_tlb_flush(env_cpu(env), tlb); |
Nicholas Piggin | e8fe141 | 2023-11-13 22:19:15 +1000 | [diff] [blame] | 869 | } |
| 870 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 871 | switch (word) { |
| 872 | default: |
| 873 | /* Just here to please gcc */ |
| 874 | case 0: |
Nicholas Piggin | e8fe141 | 2023-11-13 22:19:15 +1000 | [diff] [blame] | 875 | tlb->EPN = value & 0xFFFFFC00; |
| 876 | tlb->size = booke_tlb_to_page_size((value >> 4) & 0xF); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 877 | tlb->attr &= ~0x1; |
| 878 | tlb->attr |= (value >> 8) & 1; |
| 879 | if (value & 0x200) { |
| 880 | tlb->prot |= PAGE_VALID; |
| 881 | } else { |
Nicholas Piggin | e8fe141 | 2023-11-13 22:19:15 +1000 | [diff] [blame] | 882 | tlb->prot &= ~PAGE_VALID; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 883 | } |
| 884 | tlb->PID = env->spr[SPR_440_MMUCR] & 0x000000FF; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 885 | break; |
| 886 | case 1: |
Nicholas Piggin | e8fe141 | 2023-11-13 22:19:15 +1000 | [diff] [blame] | 887 | tlb->RPN = value & 0xFFFFFC0F; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 888 | break; |
| 889 | case 2: |
| 890 | tlb->attr = (tlb->attr & 0x1) | (value & 0x0000FF00); |
| 891 | tlb->prot = tlb->prot & PAGE_VALID; |
| 892 | if (value & 0x1) { |
| 893 | tlb->prot |= PAGE_READ << 4; |
| 894 | } |
| 895 | if (value & 0x2) { |
| 896 | tlb->prot |= PAGE_WRITE << 4; |
| 897 | } |
| 898 | if (value & 0x4) { |
| 899 | tlb->prot |= PAGE_EXEC << 4; |
| 900 | } |
| 901 | if (value & 0x8) { |
| 902 | tlb->prot |= PAGE_READ; |
| 903 | } |
| 904 | if (value & 0x10) { |
| 905 | tlb->prot |= PAGE_WRITE; |
| 906 | } |
| 907 | if (value & 0x20) { |
| 908 | tlb->prot |= PAGE_EXEC; |
| 909 | } |
| 910 | break; |
| 911 | } |
| 912 | } |
| 913 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 914 | target_ulong helper_440_tlbre(CPUPPCState *env, uint32_t word, |
| 915 | target_ulong entry) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 916 | { |
| 917 | ppcemb_tlb_t *tlb; |
| 918 | target_ulong ret; |
| 919 | int size; |
| 920 | |
| 921 | entry &= 0x3F; |
| 922 | tlb = &env->tlb.tlbe[entry]; |
| 923 | switch (word) { |
| 924 | default: |
| 925 | /* Just here to please gcc */ |
| 926 | case 0: |
| 927 | ret = tlb->EPN; |
| 928 | size = booke_page_size_to_tlb(tlb->size); |
| 929 | if (size < 0 || size > 0xF) { |
| 930 | size = 1; |
| 931 | } |
| 932 | ret |= size << 4; |
| 933 | if (tlb->attr & 0x1) { |
| 934 | ret |= 0x100; |
| 935 | } |
| 936 | if (tlb->prot & PAGE_VALID) { |
| 937 | ret |= 0x200; |
| 938 | } |
| 939 | env->spr[SPR_440_MMUCR] &= ~0x000000FF; |
| 940 | env->spr[SPR_440_MMUCR] |= tlb->PID; |
| 941 | break; |
| 942 | case 1: |
| 943 | ret = tlb->RPN; |
| 944 | break; |
| 945 | case 2: |
| 946 | ret = tlb->attr & ~0x1; |
| 947 | if (tlb->prot & (PAGE_READ << 4)) { |
| 948 | ret |= 0x1; |
| 949 | } |
| 950 | if (tlb->prot & (PAGE_WRITE << 4)) { |
| 951 | ret |= 0x2; |
| 952 | } |
| 953 | if (tlb->prot & (PAGE_EXEC << 4)) { |
| 954 | ret |= 0x4; |
| 955 | } |
| 956 | if (tlb->prot & PAGE_READ) { |
| 957 | ret |= 0x8; |
| 958 | } |
| 959 | if (tlb->prot & PAGE_WRITE) { |
| 960 | ret |= 0x10; |
| 961 | } |
| 962 | if (tlb->prot & PAGE_EXEC) { |
| 963 | ret |= 0x20; |
| 964 | } |
| 965 | break; |
| 966 | } |
| 967 | return ret; |
| 968 | } |
| 969 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 970 | target_ulong helper_440_tlbsx(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 971 | { |
| 972 | return ppcemb_tlb_search(env, address, env->spr[SPR_440_MMUCR] & 0xFF); |
| 973 | } |
| 974 | |
| 975 | /* PowerPC BookE 2.06 TLB management */ |
| 976 | |
| 977 | static ppcmas_tlb_t *booke206_cur_tlb(CPUPPCState *env) |
| 978 | { |
| 979 | uint32_t tlbncfg = 0; |
| 980 | int esel = (env->spr[SPR_BOOKE_MAS0] & MAS0_ESEL_MASK) >> MAS0_ESEL_SHIFT; |
| 981 | int ea = (env->spr[SPR_BOOKE_MAS2] & MAS2_EPN_MASK); |
| 982 | int tlb; |
| 983 | |
| 984 | tlb = (env->spr[SPR_BOOKE_MAS0] & MAS0_TLBSEL_MASK) >> MAS0_TLBSEL_SHIFT; |
| 985 | tlbncfg = env->spr[SPR_BOOKE_TLB0CFG + tlb]; |
| 986 | |
| 987 | if ((tlbncfg & TLBnCFG_HES) && (env->spr[SPR_BOOKE_MAS0] & MAS0_HES)) { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 988 | cpu_abort(env_cpu(env), "we don't support HES yet\n"); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 989 | } |
| 990 | |
| 991 | return booke206_get_tlbm(env, tlb, ea, esel); |
| 992 | } |
| 993 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 994 | void helper_booke_setpid(CPUPPCState *env, uint32_t pidn, target_ulong pid) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 995 | { |
| 996 | env->spr[pidn] = pid; |
| 997 | /* changing PIDs mean we're in a different address space now */ |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 998 | tlb_flush(env_cpu(env)); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 999 | } |
| 1000 | |
Roman Kapl | 5072819 | 2018-09-21 08:59:07 +0200 | [diff] [blame] | 1001 | void helper_booke_set_eplc(CPUPPCState *env, target_ulong val) |
| 1002 | { |
Roman Kapl | 5072819 | 2018-09-21 08:59:07 +0200 | [diff] [blame] | 1003 | env->spr[SPR_BOOKE_EPLC] = val & EPID_MASK; |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1004 | tlb_flush_by_mmuidx(env_cpu(env), 1 << PPC_TLB_EPID_LOAD); |
Roman Kapl | 5072819 | 2018-09-21 08:59:07 +0200 | [diff] [blame] | 1005 | } |
| 1006 | void helper_booke_set_epsc(CPUPPCState *env, target_ulong val) |
| 1007 | { |
Roman Kapl | 5072819 | 2018-09-21 08:59:07 +0200 | [diff] [blame] | 1008 | env->spr[SPR_BOOKE_EPSC] = val & EPID_MASK; |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1009 | tlb_flush_by_mmuidx(env_cpu(env), 1 << PPC_TLB_EPID_STORE); |
Roman Kapl | 5072819 | 2018-09-21 08:59:07 +0200 | [diff] [blame] | 1010 | } |
| 1011 | |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1012 | static inline void flush_page(CPUPPCState *env, ppcmas_tlb_t *tlb) |
| 1013 | { |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1014 | if (booke206_tlb_to_page_size(env, tlb) == TARGET_PAGE_SIZE) { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1015 | tlb_flush_page(env_cpu(env), tlb->mas2 & MAS2_EPN_MASK); |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1016 | } else { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1017 | tlb_flush(env_cpu(env)); |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1018 | } |
| 1019 | } |
| 1020 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1021 | void helper_booke206_tlbwe(CPUPPCState *env) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1022 | { |
| 1023 | uint32_t tlbncfg, tlbn; |
| 1024 | ppcmas_tlb_t *tlb; |
| 1025 | uint32_t size_tlb, size_ps; |
Fabien Chouteau | 77c2cf3 | 2012-05-21 06:11:06 +0000 | [diff] [blame] | 1026 | target_ulong mask; |
| 1027 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1028 | |
| 1029 | switch (env->spr[SPR_BOOKE_MAS0] & MAS0_WQ_MASK) { |
| 1030 | case MAS0_WQ_ALWAYS: |
| 1031 | /* good to go, write that entry */ |
| 1032 | break; |
| 1033 | case MAS0_WQ_COND: |
| 1034 | /* XXX check if reserved */ |
| 1035 | if (0) { |
| 1036 | return; |
| 1037 | } |
| 1038 | break; |
| 1039 | case MAS0_WQ_CLR_RSRV: |
| 1040 | /* XXX clear entry */ |
| 1041 | return; |
| 1042 | default: |
| 1043 | /* no idea what to do */ |
| 1044 | return; |
| 1045 | } |
| 1046 | |
| 1047 | if (((env->spr[SPR_BOOKE_MAS0] & MAS0_ATSEL) == MAS0_ATSEL_LRAT) && |
Víctor Colombo | 10b2b37 | 2022-05-04 18:05:30 -0300 | [diff] [blame] | 1048 | !FIELD_EX64(env->msr, MSR, GS)) { |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1049 | /* XXX we don't support direct LRAT setting yet */ |
| 1050 | fprintf(stderr, "cpu: don't support LRAT setting yet\n"); |
| 1051 | return; |
| 1052 | } |
| 1053 | |
| 1054 | tlbn = (env->spr[SPR_BOOKE_MAS0] & MAS0_TLBSEL_MASK) >> MAS0_TLBSEL_SHIFT; |
| 1055 | tlbncfg = env->spr[SPR_BOOKE_TLB0CFG + tlbn]; |
| 1056 | |
| 1057 | tlb = booke206_cur_tlb(env); |
| 1058 | |
| 1059 | if (!tlb) { |
Benjamin Herrenschmidt | 8c8966e | 2016-07-27 16:56:37 +1000 | [diff] [blame] | 1060 | raise_exception_err_ra(env, POWERPC_EXCP_PROGRAM, |
| 1061 | POWERPC_EXCP_INVAL | |
| 1062 | POWERPC_EXCP_INVAL_INVAL, GETPC()); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1063 | } |
| 1064 | |
| 1065 | /* check that we support the targeted size */ |
| 1066 | size_tlb = (env->spr[SPR_BOOKE_MAS1] & MAS1_TSIZE_MASK) >> MAS1_TSIZE_SHIFT; |
| 1067 | size_ps = booke206_tlbnps(env, tlbn); |
| 1068 | if ((env->spr[SPR_BOOKE_MAS1] & MAS1_VALID) && (tlbncfg & TLBnCFG_AVAIL) && |
| 1069 | !(size_ps & (1 << size_tlb))) { |
Benjamin Herrenschmidt | 8c8966e | 2016-07-27 16:56:37 +1000 | [diff] [blame] | 1070 | raise_exception_err_ra(env, POWERPC_EXCP_PROGRAM, |
| 1071 | POWERPC_EXCP_INVAL | |
| 1072 | POWERPC_EXCP_INVAL_INVAL, GETPC()); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1073 | } |
| 1074 | |
Víctor Colombo | 10b2b37 | 2022-05-04 18:05:30 -0300 | [diff] [blame] | 1075 | if (FIELD_EX64(env->msr, MSR, GS)) { |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1076 | cpu_abort(env_cpu(env), "missing HV implementation\n"); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1077 | } |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1078 | |
| 1079 | if (tlb->mas1 & MAS1_VALID) { |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 1080 | /* |
| 1081 | * Invalidate the page in QEMU TLB if it was a valid entry. |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1082 | * |
| 1083 | * In "PowerPC e500 Core Family Reference Manual, Rev. 1", |
| 1084 | * Section "12.4.2 TLB Write Entry (tlbwe) Instruction": |
| 1085 | * (https://www.nxp.com/docs/en/reference-manual/E500CORERM.pdf) |
| 1086 | * |
| 1087 | * "Note that when an L2 TLB entry is written, it may be displacing an |
| 1088 | * already valid entry in the same L2 TLB location (a victim). If a |
| 1089 | * valid L1 TLB entry corresponds to the L2 MMU victim entry, that L1 |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 1090 | * TLB entry is automatically invalidated." |
| 1091 | */ |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1092 | flush_page(env, tlb); |
| 1093 | } |
| 1094 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1095 | tlb->mas7_3 = ((uint64_t)env->spr[SPR_BOOKE_MAS7] << 32) | |
| 1096 | env->spr[SPR_BOOKE_MAS3]; |
| 1097 | tlb->mas1 = env->spr[SPR_BOOKE_MAS1]; |
| 1098 | |
KONRAD Frederic | c449d8b | 2017-08-07 17:50:46 +0200 | [diff] [blame] | 1099 | if ((env->spr[SPR_MMUCFG] & MMUCFG_MAVN) == MMUCFG_MAVN_V2) { |
| 1100 | /* For TLB which has a fixed size TSIZE is ignored with MAV2 */ |
| 1101 | booke206_fixed_size_tlbn(env, tlbn, tlb); |
| 1102 | } else { |
| 1103 | if (!(tlbncfg & TLBnCFG_AVAIL)) { |
| 1104 | /* force !AVAIL TLB entries to correct page size */ |
| 1105 | tlb->mas1 &= ~MAS1_TSIZE_MASK; |
| 1106 | /* XXX can be configured in MMUCSR0 */ |
| 1107 | tlb->mas1 |= (tlbncfg & TLBnCFG_MINSIZE) >> 12; |
| 1108 | } |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1109 | } |
| 1110 | |
Fabien Chouteau | 77c2cf3 | 2012-05-21 06:11:06 +0000 | [diff] [blame] | 1111 | /* Make a mask from TLB size to discard invalid bits in EPN field */ |
| 1112 | mask = ~(booke206_tlb_to_page_size(env, tlb) - 1); |
| 1113 | /* Add a mask for page attributes */ |
| 1114 | mask |= MAS2_ACM | MAS2_VLE | MAS2_W | MAS2_I | MAS2_M | MAS2_G | MAS2_E; |
| 1115 | |
Víctor Colombo | cda2336 | 2022-05-04 18:05:32 -0300 | [diff] [blame] | 1116 | if (!FIELD_EX64(env->msr, MSR, CM)) { |
David Gibson | fe4ade3 | 2019-03-21 22:36:09 +1100 | [diff] [blame] | 1117 | /* |
| 1118 | * Executing a tlbwe instruction in 32-bit mode will set bits |
| 1119 | * 0:31 of the TLB EPN field to zero. |
Fabien Chouteau | 77c2cf3 | 2012-05-21 06:11:06 +0000 | [diff] [blame] | 1120 | */ |
| 1121 | mask &= 0xffffffff; |
| 1122 | } |
| 1123 | |
| 1124 | tlb->mas2 = env->spr[SPR_BOOKE_MAS2] & mask; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1125 | |
| 1126 | if (!(tlbncfg & TLBnCFG_IPROT)) { |
| 1127 | /* no IPROT supported by TLB */ |
| 1128 | tlb->mas1 &= ~MAS1_IPROT; |
| 1129 | } |
| 1130 | |
Luc MICHEL | 2e56984 | 2018-01-15 10:32:20 +0100 | [diff] [blame] | 1131 | flush_page(env, tlb); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1132 | } |
| 1133 | |
| 1134 | static inline void booke206_tlb_to_mas(CPUPPCState *env, ppcmas_tlb_t *tlb) |
| 1135 | { |
| 1136 | int tlbn = booke206_tlbm_to_tlbn(env, tlb); |
| 1137 | int way = booke206_tlbm_to_way(env, tlb); |
| 1138 | |
| 1139 | env->spr[SPR_BOOKE_MAS0] = tlbn << MAS0_TLBSEL_SHIFT; |
| 1140 | env->spr[SPR_BOOKE_MAS0] |= way << MAS0_ESEL_SHIFT; |
| 1141 | env->spr[SPR_BOOKE_MAS0] |= env->last_way << MAS0_NV_SHIFT; |
| 1142 | |
| 1143 | env->spr[SPR_BOOKE_MAS1] = tlb->mas1; |
| 1144 | env->spr[SPR_BOOKE_MAS2] = tlb->mas2; |
| 1145 | env->spr[SPR_BOOKE_MAS3] = tlb->mas7_3; |
| 1146 | env->spr[SPR_BOOKE_MAS7] = tlb->mas7_3 >> 32; |
| 1147 | } |
| 1148 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1149 | void helper_booke206_tlbre(CPUPPCState *env) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1150 | { |
| 1151 | ppcmas_tlb_t *tlb = NULL; |
| 1152 | |
| 1153 | tlb = booke206_cur_tlb(env); |
| 1154 | if (!tlb) { |
| 1155 | env->spr[SPR_BOOKE_MAS1] = 0; |
| 1156 | } else { |
| 1157 | booke206_tlb_to_mas(env, tlb); |
| 1158 | } |
| 1159 | } |
| 1160 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1161 | void helper_booke206_tlbsx(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1162 | { |
| 1163 | ppcmas_tlb_t *tlb = NULL; |
| 1164 | int i, j; |
Avi Kivity | a8170e5 | 2012-10-23 12:30:10 +0200 | [diff] [blame] | 1165 | hwaddr raddr; |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1166 | uint32_t spid, sas; |
| 1167 | |
| 1168 | spid = (env->spr[SPR_BOOKE_MAS6] & MAS6_SPID_MASK) >> MAS6_SPID_SHIFT; |
| 1169 | sas = env->spr[SPR_BOOKE_MAS6] & MAS6_SAS; |
| 1170 | |
| 1171 | for (i = 0; i < BOOKE206_MAX_TLBN; i++) { |
| 1172 | int ways = booke206_tlb_ways(env, i); |
| 1173 | |
| 1174 | for (j = 0; j < ways; j++) { |
| 1175 | tlb = booke206_get_tlbm(env, i, address, j); |
| 1176 | |
| 1177 | if (!tlb) { |
| 1178 | continue; |
| 1179 | } |
| 1180 | |
| 1181 | if (ppcmas_tlb_check(env, tlb, &raddr, address, spid)) { |
| 1182 | continue; |
| 1183 | } |
| 1184 | |
| 1185 | if (sas != ((tlb->mas1 & MAS1_TS) >> MAS1_TS_SHIFT)) { |
| 1186 | continue; |
| 1187 | } |
| 1188 | |
| 1189 | booke206_tlb_to_mas(env, tlb); |
| 1190 | return; |
| 1191 | } |
| 1192 | } |
| 1193 | |
| 1194 | /* no entry found, fill with defaults */ |
| 1195 | env->spr[SPR_BOOKE_MAS0] = env->spr[SPR_BOOKE_MAS4] & MAS4_TLBSELD_MASK; |
| 1196 | env->spr[SPR_BOOKE_MAS1] = env->spr[SPR_BOOKE_MAS4] & MAS4_TSIZED_MASK; |
| 1197 | env->spr[SPR_BOOKE_MAS2] = env->spr[SPR_BOOKE_MAS4] & MAS4_WIMGED_MASK; |
| 1198 | env->spr[SPR_BOOKE_MAS3] = 0; |
| 1199 | env->spr[SPR_BOOKE_MAS7] = 0; |
| 1200 | |
| 1201 | if (env->spr[SPR_BOOKE_MAS6] & MAS6_SAS) { |
| 1202 | env->spr[SPR_BOOKE_MAS1] |= MAS1_TS; |
| 1203 | } |
| 1204 | |
| 1205 | env->spr[SPR_BOOKE_MAS1] |= (env->spr[SPR_BOOKE_MAS6] >> 16) |
| 1206 | << MAS1_TID_SHIFT; |
| 1207 | |
| 1208 | /* next victim logic */ |
| 1209 | env->spr[SPR_BOOKE_MAS0] |= env->last_way << MAS0_ESEL_SHIFT; |
| 1210 | env->last_way++; |
| 1211 | env->last_way &= booke206_tlb_ways(env, 0) - 1; |
| 1212 | env->spr[SPR_BOOKE_MAS0] |= env->last_way << MAS0_NV_SHIFT; |
| 1213 | } |
| 1214 | |
| 1215 | static inline void booke206_invalidate_ea_tlb(CPUPPCState *env, int tlbn, |
Daniel Henrique Barboza | d139786 | 2021-11-10 17:25:16 -0300 | [diff] [blame] | 1216 | vaddr ea) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1217 | { |
| 1218 | int i; |
| 1219 | int ways = booke206_tlb_ways(env, tlbn); |
| 1220 | target_ulong mask; |
| 1221 | |
| 1222 | for (i = 0; i < ways; i++) { |
| 1223 | ppcmas_tlb_t *tlb = booke206_get_tlbm(env, tlbn, ea, i); |
| 1224 | if (!tlb) { |
| 1225 | continue; |
| 1226 | } |
| 1227 | mask = ~(booke206_tlb_to_page_size(env, tlb) - 1); |
| 1228 | if (((tlb->mas2 & MAS2_EPN_MASK) == (ea & mask)) && |
| 1229 | !(tlb->mas1 & MAS1_IPROT)) { |
| 1230 | tlb->mas1 &= ~MAS1_VALID; |
| 1231 | } |
| 1232 | } |
| 1233 | } |
| 1234 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1235 | void helper_booke206_tlbivax(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1236 | { |
Nikunj A Dadhania | d76ab5e | 2016-09-20 22:05:01 +0530 | [diff] [blame] | 1237 | CPUState *cs; |
Andreas Färber | 31b030d | 2013-09-04 01:29:02 +0200 | [diff] [blame] | 1238 | |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1239 | if (address & 0x4) { |
| 1240 | /* flush all entries */ |
| 1241 | if (address & 0x8) { |
| 1242 | /* flush all of TLB1 */ |
| 1243 | booke206_flush_tlb(env, BOOKE206_FLUSH_TLB1, 1); |
| 1244 | } else { |
| 1245 | /* flush all of TLB0 */ |
| 1246 | booke206_flush_tlb(env, BOOKE206_FLUSH_TLB0, 0); |
| 1247 | } |
| 1248 | return; |
| 1249 | } |
| 1250 | |
| 1251 | if (address & 0x8) { |
| 1252 | /* flush TLB1 entries */ |
| 1253 | booke206_invalidate_ea_tlb(env, 1, address); |
Nikunj A Dadhania | d76ab5e | 2016-09-20 22:05:01 +0530 | [diff] [blame] | 1254 | CPU_FOREACH(cs) { |
Alex Bennée | d10eb08 | 2016-11-14 14:17:28 +0000 | [diff] [blame] | 1255 | tlb_flush(cs); |
Nikunj A Dadhania | d76ab5e | 2016-09-20 22:05:01 +0530 | [diff] [blame] | 1256 | } |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1257 | } else { |
| 1258 | /* flush TLB0 entries */ |
| 1259 | booke206_invalidate_ea_tlb(env, 0, address); |
Nikunj A Dadhania | d76ab5e | 2016-09-20 22:05:01 +0530 | [diff] [blame] | 1260 | CPU_FOREACH(cs) { |
| 1261 | tlb_flush_page(cs, address & MAS2_EPN_MASK); |
| 1262 | } |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1263 | } |
| 1264 | } |
| 1265 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1266 | void helper_booke206_tlbilx0(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1267 | { |
| 1268 | /* XXX missing LPID handling */ |
| 1269 | booke206_flush_tlb(env, -1, 1); |
| 1270 | } |
| 1271 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1272 | void helper_booke206_tlbilx1(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1273 | { |
| 1274 | int i, j; |
| 1275 | int tid = (env->spr[SPR_BOOKE_MAS6] & MAS6_SPID); |
| 1276 | ppcmas_tlb_t *tlb = env->tlb.tlbm; |
| 1277 | int tlb_size; |
| 1278 | |
| 1279 | /* XXX missing LPID handling */ |
| 1280 | for (i = 0; i < BOOKE206_MAX_TLBN; i++) { |
| 1281 | tlb_size = booke206_tlb_size(env, i); |
| 1282 | for (j = 0; j < tlb_size; j++) { |
| 1283 | if (!(tlb[j].mas1 & MAS1_IPROT) && |
| 1284 | ((tlb[j].mas1 & MAS1_TID_MASK) == tid)) { |
| 1285 | tlb[j].mas1 &= ~MAS1_VALID; |
| 1286 | } |
| 1287 | } |
| 1288 | tlb += booke206_tlb_size(env, i); |
| 1289 | } |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1290 | tlb_flush(env_cpu(env)); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1291 | } |
| 1292 | |
Blue Swirl | c6c7cf0 | 2012-05-30 04:23:31 +0000 | [diff] [blame] | 1293 | void helper_booke206_tlbilx3(CPUPPCState *env, target_ulong address) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1294 | { |
| 1295 | int i, j; |
| 1296 | ppcmas_tlb_t *tlb; |
| 1297 | int tid = (env->spr[SPR_BOOKE_MAS6] & MAS6_SPID); |
| 1298 | int pid = tid >> MAS6_SPID_SHIFT; |
| 1299 | int sgs = env->spr[SPR_BOOKE_MAS5] & MAS5_SGS; |
| 1300 | int ind = (env->spr[SPR_BOOKE_MAS6] & MAS6_SIND) ? MAS1_IND : 0; |
| 1301 | /* XXX check for unsupported isize and raise an invalid opcode then */ |
| 1302 | int size = env->spr[SPR_BOOKE_MAS6] & MAS6_ISIZE_MASK; |
| 1303 | /* XXX implement MAV2 handling */ |
| 1304 | bool mav2 = false; |
| 1305 | |
| 1306 | /* XXX missing LPID handling */ |
| 1307 | /* flush by pid and ea */ |
| 1308 | for (i = 0; i < BOOKE206_MAX_TLBN; i++) { |
| 1309 | int ways = booke206_tlb_ways(env, i); |
| 1310 | |
| 1311 | for (j = 0; j < ways; j++) { |
| 1312 | tlb = booke206_get_tlbm(env, i, address, j); |
| 1313 | if (!tlb) { |
| 1314 | continue; |
| 1315 | } |
| 1316 | if ((ppcmas_tlb_check(env, tlb, NULL, address, pid) != 0) || |
| 1317 | (tlb->mas1 & MAS1_IPROT) || |
| 1318 | ((tlb->mas1 & MAS1_IND) != ind) || |
| 1319 | ((tlb->mas8 & MAS8_TGS) != sgs)) { |
| 1320 | continue; |
| 1321 | } |
| 1322 | if (mav2 && ((tlb->mas1 & MAS1_TSIZE_MASK) != size)) { |
| 1323 | /* XXX only check when MMUCFG[TWC] || TLBnCFG[HES] */ |
| 1324 | continue; |
| 1325 | } |
| 1326 | /* XXX e500mc doesn't match SAS, but other cores might */ |
| 1327 | tlb->mas1 &= ~MAS1_VALID; |
| 1328 | } |
| 1329 | } |
Richard Henderson | db70b31 | 2019-03-22 19:07:57 -0700 | [diff] [blame] | 1330 | tlb_flush(env_cpu(env)); |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1331 | } |
| 1332 | |
Alex Zuepke | a721d39 | 2014-05-28 19:25:36 +0200 | [diff] [blame] | 1333 | void helper_booke206_tlbflush(CPUPPCState *env, target_ulong type) |
Blue Swirl | ec19c4d | 2012-05-30 04:23:30 +0000 | [diff] [blame] | 1334 | { |
| 1335 | int flags = 0; |
| 1336 | |
| 1337 | if (type & 2) { |
| 1338 | flags |= BOOKE206_FLUSH_TLB1; |
| 1339 | } |
| 1340 | |
| 1341 | if (type & 4) { |
| 1342 | flags |= BOOKE206_FLUSH_TLB0; |
| 1343 | } |
| 1344 | |
| 1345 | booke206_flush_tlb(env, flags, 1); |
| 1346 | } |
David Gibson | eb20c1c | 2013-03-12 00:31:49 +0000 | [diff] [blame] | 1347 | |
| 1348 | |
Nikunj A Dadhania | e3cffe6 | 2016-09-20 22:05:00 +0530 | [diff] [blame] | 1349 | void helper_check_tlb_flush_local(CPUPPCState *env) |
Benjamin Herrenschmidt | cd0c6f4 | 2016-05-03 18:03:25 +0200 | [diff] [blame] | 1350 | { |
Nikunj A Dadhania | e3cffe6 | 2016-09-20 22:05:00 +0530 | [diff] [blame] | 1351 | check_tlb_flush(env, false); |
| 1352 | } |
| 1353 | |
| 1354 | void helper_check_tlb_flush_global(CPUPPCState *env) |
| 1355 | { |
| 1356 | check_tlb_flush(env, true); |
Benjamin Herrenschmidt | cd0c6f4 | 2016-05-03 18:03:25 +0200 | [diff] [blame] | 1357 | } |
| 1358 | |
David Gibson | eb20c1c | 2013-03-12 00:31:49 +0000 | [diff] [blame] | 1359 | |
Richard Henderson | 51806b5 | 2021-06-21 09:51:13 -0300 | [diff] [blame] | 1360 | bool ppc_cpu_tlb_fill(CPUState *cs, vaddr eaddr, int size, |
Richard Henderson | 351bc97 | 2019-04-02 17:03:41 +0700 | [diff] [blame] | 1361 | MMUAccessType access_type, int mmu_idx, |
| 1362 | bool probe, uintptr_t retaddr) |
David Gibson | eb20c1c | 2013-03-12 00:31:49 +0000 | [diff] [blame] | 1363 | { |
Andreas Färber | d5a11fe | 2013-08-27 00:28:06 +0200 | [diff] [blame] | 1364 | PowerPCCPU *cpu = POWERPC_CPU(cs); |
Richard Henderson | 51806b5 | 2021-06-21 09:51:13 -0300 | [diff] [blame] | 1365 | hwaddr raddr; |
| 1366 | int page_size, prot; |
David Gibson | eb20c1c | 2013-03-12 00:31:49 +0000 | [diff] [blame] | 1367 | |
Richard Henderson | 51806b5 | 2021-06-21 09:51:13 -0300 | [diff] [blame] | 1368 | if (ppc_xlate(cpu, eaddr, access_type, &raddr, |
| 1369 | &page_size, &prot, mmu_idx, !probe)) { |
| 1370 | tlb_set_page(cs, eaddr & TARGET_PAGE_MASK, raddr & TARGET_PAGE_MASK, |
| 1371 | prot, mmu_idx, 1UL << page_size); |
| 1372 | return true; |
David Gibson | b632a14 | 2013-03-13 11:40:33 +1100 | [diff] [blame] | 1373 | } |
Richard Henderson | 51806b5 | 2021-06-21 09:51:13 -0300 | [diff] [blame] | 1374 | if (probe) { |
| 1375 | return false; |
David Gibson | eb20c1c | 2013-03-12 00:31:49 +0000 | [diff] [blame] | 1376 | } |
Richard Henderson | 51806b5 | 2021-06-21 09:51:13 -0300 | [diff] [blame] | 1377 | raise_exception_err_ra(&cpu->env, cs->exception_index, |
| 1378 | cpu->env.error_code, retaddr); |
Richard Henderson | 351bc97 | 2019-04-02 17:03:41 +0700 | [diff] [blame] | 1379 | } |