bellard | e8af50a | 2004-09-30 21:55:55 +0000 | [diff] [blame] | 1 | /* |
Blue Swirl | 163fa5c | 2011-09-11 11:30:01 +0000 | [diff] [blame] | 2 | * Misc Sparc helpers |
ths | 5fafdf2 | 2007-09-16 21:08:06 +0000 | [diff] [blame] | 3 | * |
bellard | 8346901 | 2005-07-23 14:27:54 +0000 | [diff] [blame] | 4 | * Copyright (c) 2003-2005 Fabrice Bellard |
bellard | e8af50a | 2004-09-30 21:55:55 +0000 | [diff] [blame] | 5 | * |
| 6 | * This library is free software; you can redistribute it and/or |
| 7 | * modify it under the terms of the GNU Lesser General Public |
| 8 | * License as published by the Free Software Foundation; either |
| 9 | * version 2 of the License, or (at your option) any later version. |
| 10 | * |
| 11 | * This library is distributed in the hope that it will be useful, |
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 14 | * Lesser General Public License for more details. |
| 15 | * |
| 16 | * You should have received a copy of the GNU Lesser General Public |
Blue Swirl | 8167ee8 | 2009-07-16 20:47:01 +0000 | [diff] [blame] | 17 | * License along with this library; if not, see <http://www.gnu.org/licenses/>. |
bellard | e8af50a | 2004-09-30 21:55:55 +0000 | [diff] [blame] | 18 | */ |
bellard | ee5bbe3 | 2005-07-04 22:18:23 +0000 | [diff] [blame] | 19 | |
Peter Maydell | db5ebe5 | 2016-01-26 18:16:59 +0000 | [diff] [blame] | 20 | #include "qemu/osdep.h" |
bellard | ee5bbe3 | 2005-07-04 22:18:23 +0000 | [diff] [blame] | 21 | #include "cpu.h" |
Paolo Bonzini | 63c9155 | 2016-03-15 13:18:37 +0100 | [diff] [blame] | 22 | #include "exec/exec-all.h" |
Paolo Bonzini | 1de7afc | 2012-12-17 18:20:00 +0100 | [diff] [blame] | 23 | #include "qemu/host-utils.h" |
Richard Henderson | 2ef6175 | 2014-04-07 22:31:41 -0700 | [diff] [blame] | 24 | #include "exec/helper-proto.h" |
Paolo Bonzini | 9c17d61 | 2012-12-17 18:20:04 +0100 | [diff] [blame] | 25 | #include "sysemu/sysemu.h" |
bellard | e8af50a | 2004-09-30 21:55:55 +0000 | [diff] [blame] | 26 | |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 27 | void cpu_raise_exception_ra(CPUSPARCState *env, int tt, uintptr_t ra) |
| 28 | { |
| 29 | CPUState *cs = CPU(sparc_env_get_cpu(env)); |
| 30 | |
| 31 | cs->exception_index = tt; |
| 32 | cpu_loop_exit_restore(cs, ra); |
| 33 | } |
| 34 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 35 | void helper_raise_exception(CPUSPARCState *env, int tt) |
Blue Swirl | bc26531 | 2011-07-03 08:19:42 +0000 | [diff] [blame] | 36 | { |
Andreas Färber | 2710342 | 2013-08-26 08:31:06 +0200 | [diff] [blame] | 37 | CPUState *cs = CPU(sparc_env_get_cpu(env)); |
| 38 | |
| 39 | cs->exception_index = tt; |
Andreas Färber | 5638d18 | 2013-08-27 17:52:12 +0200 | [diff] [blame] | 40 | cpu_loop_exit(cs); |
Blue Swirl | bc26531 | 2011-07-03 08:19:42 +0000 | [diff] [blame] | 41 | } |
| 42 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 43 | void helper_debug(CPUSPARCState *env) |
Blue Swirl | bc26531 | 2011-07-03 08:19:42 +0000 | [diff] [blame] | 44 | { |
Andreas Färber | 2710342 | 2013-08-26 08:31:06 +0200 | [diff] [blame] | 45 | CPUState *cs = CPU(sparc_env_get_cpu(env)); |
| 46 | |
| 47 | cs->exception_index = EXCP_DEBUG; |
Andreas Färber | 5638d18 | 2013-08-27 17:52:12 +0200 | [diff] [blame] | 48 | cpu_loop_exit(cs); |
Blue Swirl | bc26531 | 2011-07-03 08:19:42 +0000 | [diff] [blame] | 49 | } |
| 50 | |
Blue Swirl | 2336c1f | 2011-07-03 07:05:50 +0000 | [diff] [blame] | 51 | #ifdef TARGET_SPARC64 |
Blue Swirl | 2336c1f | 2011-07-03 07:05:50 +0000 | [diff] [blame] | 52 | void helper_tick_set_count(void *opaque, uint64_t count) |
| 53 | { |
| 54 | #if !defined(CONFIG_USER_ONLY) |
| 55 | cpu_tick_set_count(opaque, count); |
| 56 | #endif |
| 57 | } |
| 58 | |
Mark Cave-Ayland | c9a4644 | 2015-11-08 17:11:59 +0000 | [diff] [blame] | 59 | uint64_t helper_tick_get_count(CPUSPARCState *env, void *opaque, int mem_idx) |
Blue Swirl | 2336c1f | 2011-07-03 07:05:50 +0000 | [diff] [blame] | 60 | { |
| 61 | #if !defined(CONFIG_USER_ONLY) |
Mark Cave-Ayland | c9a4644 | 2015-11-08 17:11:59 +0000 | [diff] [blame] | 62 | CPUTimer *timer = opaque; |
| 63 | |
| 64 | if (timer->npt && mem_idx < MMU_KERNEL_IDX) { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 65 | cpu_raise_exception_ra(env, TT_PRIV_INSN, GETPC()); |
Mark Cave-Ayland | c9a4644 | 2015-11-08 17:11:59 +0000 | [diff] [blame] | 66 | } |
| 67 | |
| 68 | return cpu_tick_get_count(timer); |
Blue Swirl | 2336c1f | 2011-07-03 07:05:50 +0000 | [diff] [blame] | 69 | #else |
Laurent Vivier | b8e13ba | 2018-05-28 21:48:12 +0200 | [diff] [blame] | 70 | /* In user-mode, QEMU_CLOCK_VIRTUAL doesn't exist. |
| 71 | Just pass through the host cpu clock ticks. */ |
| 72 | return cpu_get_host_ticks(); |
Blue Swirl | 2336c1f | 2011-07-03 07:05:50 +0000 | [diff] [blame] | 73 | #endif |
| 74 | } |
| 75 | |
| 76 | void helper_tick_set_limit(void *opaque, uint64_t limit) |
| 77 | { |
| 78 | #if !defined(CONFIG_USER_ONLY) |
| 79 | cpu_tick_set_limit(opaque, limit); |
| 80 | #endif |
| 81 | } |
| 82 | #endif |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 83 | |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 84 | static target_ulong do_udiv(CPUSPARCState *env, target_ulong a, |
| 85 | target_ulong b, int cc, uintptr_t ra) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 86 | { |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 87 | int overflow = 0; |
| 88 | uint64_t x0; |
| 89 | uint32_t x1; |
| 90 | |
| 91 | x0 = (a & 0xffffffff) | ((int64_t) (env->y) << 32); |
| 92 | x1 = (b & 0xffffffff); |
| 93 | |
| 94 | if (x1 == 0) { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 95 | cpu_raise_exception_ra(env, TT_DIV_ZERO, ra); |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 96 | } |
| 97 | |
| 98 | x0 = x0 / x1; |
Olivier Danet | 6a5b69a | 2014-03-21 02:25:19 +0100 | [diff] [blame] | 99 | if (x0 > UINT32_MAX) { |
| 100 | x0 = UINT32_MAX; |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 101 | overflow = 1; |
| 102 | } |
| 103 | |
| 104 | if (cc) { |
| 105 | env->cc_dst = x0; |
| 106 | env->cc_src2 = overflow; |
| 107 | env->cc_op = CC_OP_DIV; |
| 108 | } |
| 109 | return x0; |
| 110 | } |
| 111 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 112 | target_ulong helper_udiv(CPUSPARCState *env, target_ulong a, target_ulong b) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 113 | { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 114 | return do_udiv(env, a, b, 0, GETPC()); |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 115 | } |
| 116 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 117 | target_ulong helper_udiv_cc(CPUSPARCState *env, target_ulong a, target_ulong b) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 118 | { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 119 | return do_udiv(env, a, b, 1, GETPC()); |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 120 | } |
| 121 | |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 122 | static target_ulong do_sdiv(CPUSPARCState *env, target_ulong a, |
| 123 | target_ulong b, int cc, uintptr_t ra) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 124 | { |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 125 | int overflow = 0; |
| 126 | int64_t x0; |
| 127 | int32_t x1; |
| 128 | |
| 129 | x0 = (a & 0xffffffff) | ((int64_t) (env->y) << 32); |
| 130 | x1 = (b & 0xffffffff); |
| 131 | |
| 132 | if (x1 == 0) { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 133 | cpu_raise_exception_ra(env, TT_DIV_ZERO, ra); |
Olivier Danet | 6a5b69a | 2014-03-21 02:25:19 +0100 | [diff] [blame] | 134 | } else if (x1 == -1 && x0 == INT64_MIN) { |
| 135 | x0 = INT32_MAX; |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 136 | overflow = 1; |
Olivier Danet | 6a5b69a | 2014-03-21 02:25:19 +0100 | [diff] [blame] | 137 | } else { |
| 138 | x0 = x0 / x1; |
| 139 | if ((int32_t) x0 != x0) { |
| 140 | x0 = x0 < 0 ? INT32_MIN : INT32_MAX; |
| 141 | overflow = 1; |
| 142 | } |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 143 | } |
| 144 | |
| 145 | if (cc) { |
| 146 | env->cc_dst = x0; |
| 147 | env->cc_src2 = overflow; |
| 148 | env->cc_op = CC_OP_DIV; |
| 149 | } |
| 150 | return x0; |
| 151 | } |
| 152 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 153 | target_ulong helper_sdiv(CPUSPARCState *env, target_ulong a, target_ulong b) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 154 | { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 155 | return do_sdiv(env, a, b, 0, GETPC()); |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 156 | } |
| 157 | |
Andreas Färber | c5f9864 | 2012-03-14 01:38:22 +0100 | [diff] [blame] | 158 | target_ulong helper_sdiv_cc(CPUSPARCState *env, target_ulong a, target_ulong b) |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 159 | { |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 160 | return do_sdiv(env, a, b, 1, GETPC()); |
Blue Swirl | 7a5e448 | 2011-07-04 18:15:42 +0000 | [diff] [blame] | 161 | } |
Richard Henderson | c28ae41 | 2012-10-05 16:55:03 -0700 | [diff] [blame] | 162 | |
| 163 | #ifdef TARGET_SPARC64 |
| 164 | int64_t helper_sdivx(CPUSPARCState *env, int64_t a, int64_t b) |
| 165 | { |
| 166 | if (b == 0) { |
| 167 | /* Raise divide by zero trap. */ |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 168 | cpu_raise_exception_ra(env, TT_DIV_ZERO, GETPC()); |
Richard Henderson | c28ae41 | 2012-10-05 16:55:03 -0700 | [diff] [blame] | 169 | } else if (b == -1) { |
| 170 | /* Avoid overflow trap with i386 divide insn. */ |
| 171 | return -a; |
| 172 | } else { |
| 173 | return a / b; |
| 174 | } |
| 175 | } |
| 176 | |
| 177 | uint64_t helper_udivx(CPUSPARCState *env, uint64_t a, uint64_t b) |
| 178 | { |
| 179 | if (b == 0) { |
| 180 | /* Raise divide by zero trap. */ |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 181 | cpu_raise_exception_ra(env, TT_DIV_ZERO, GETPC()); |
Richard Henderson | c28ae41 | 2012-10-05 16:55:03 -0700 | [diff] [blame] | 182 | } |
| 183 | return a / b; |
| 184 | } |
| 185 | #endif |
Richard Henderson | a2ea4aa | 2012-10-05 16:55:05 -0700 | [diff] [blame] | 186 | |
| 187 | target_ulong helper_taddcctv(CPUSPARCState *env, target_ulong src1, |
| 188 | target_ulong src2) |
| 189 | { |
Richard Henderson | a2ea4aa | 2012-10-05 16:55:05 -0700 | [diff] [blame] | 190 | target_ulong dst; |
| 191 | |
| 192 | /* Tag overflow occurs if either input has bits 0 or 1 set. */ |
| 193 | if ((src1 | src2) & 3) { |
| 194 | goto tag_overflow; |
| 195 | } |
| 196 | |
| 197 | dst = src1 + src2; |
| 198 | |
| 199 | /* Tag overflow occurs if the addition overflows. */ |
| 200 | if (~(src1 ^ src2) & (src1 ^ dst) & (1u << 31)) { |
| 201 | goto tag_overflow; |
| 202 | } |
| 203 | |
| 204 | /* Only modify the CC after any exceptions have been generated. */ |
| 205 | env->cc_op = CC_OP_TADDTV; |
| 206 | env->cc_src = src1; |
| 207 | env->cc_src2 = src2; |
| 208 | env->cc_dst = dst; |
| 209 | return dst; |
| 210 | |
| 211 | tag_overflow: |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 212 | cpu_raise_exception_ra(env, TT_TOVF, GETPC()); |
Richard Henderson | a2ea4aa | 2012-10-05 16:55:05 -0700 | [diff] [blame] | 213 | } |
| 214 | |
| 215 | target_ulong helper_tsubcctv(CPUSPARCState *env, target_ulong src1, |
| 216 | target_ulong src2) |
| 217 | { |
Richard Henderson | a2ea4aa | 2012-10-05 16:55:05 -0700 | [diff] [blame] | 218 | target_ulong dst; |
| 219 | |
| 220 | /* Tag overflow occurs if either input has bits 0 or 1 set. */ |
| 221 | if ((src1 | src2) & 3) { |
| 222 | goto tag_overflow; |
| 223 | } |
| 224 | |
| 225 | dst = src1 - src2; |
| 226 | |
| 227 | /* Tag overflow occurs if the subtraction overflows. */ |
| 228 | if ((src1 ^ src2) & (src1 ^ dst) & (1u << 31)) { |
| 229 | goto tag_overflow; |
| 230 | } |
| 231 | |
| 232 | /* Only modify the CC after any exceptions have been generated. */ |
| 233 | env->cc_op = CC_OP_TSUBTV; |
| 234 | env->cc_src = src1; |
| 235 | env->cc_src2 = src2; |
| 236 | env->cc_dst = dst; |
| 237 | return dst; |
| 238 | |
| 239 | tag_overflow: |
Richard Henderson | 2f9d35f | 2016-07-12 13:12:50 -0700 | [diff] [blame] | 240 | cpu_raise_exception_ra(env, TT_TOVF, GETPC()); |
Richard Henderson | a2ea4aa | 2012-10-05 16:55:05 -0700 | [diff] [blame] | 241 | } |
Ronald Hecht | d1c36ba | 2013-02-19 12:45:07 +0100 | [diff] [blame] | 242 | |
| 243 | #ifndef TARGET_SPARC64 |
| 244 | void helper_power_down(CPUSPARCState *env) |
| 245 | { |
Andreas Färber | 259186a | 2013-01-17 18:51:17 +0100 | [diff] [blame] | 246 | CPUState *cs = CPU(sparc_env_get_cpu(env)); |
| 247 | |
| 248 | cs->halted = 1; |
Andreas Färber | 2710342 | 2013-08-26 08:31:06 +0200 | [diff] [blame] | 249 | cs->exception_index = EXCP_HLT; |
Ronald Hecht | d1c36ba | 2013-02-19 12:45:07 +0100 | [diff] [blame] | 250 | env->pc = env->npc; |
| 251 | env->npc = env->pc + 4; |
Andreas Färber | 5638d18 | 2013-08-27 17:52:12 +0200 | [diff] [blame] | 252 | cpu_loop_exit(cs); |
Ronald Hecht | d1c36ba | 2013-02-19 12:45:07 +0100 | [diff] [blame] | 253 | } |
| 254 | #endif |