malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Tiny Code Generator for QEMU |
| 3 | * |
| 4 | * Copyright (c) 2008 Fabrice Bellard |
| 5 | * |
| 6 | * Permission is hereby granted, free of charge, to any person obtaining a copy |
| 7 | * of this software and associated documentation files (the "Software"), to deal |
| 8 | * in the Software without restriction, including without limitation the rights |
| 9 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
| 10 | * copies of the Software, and to permit persons to whom the Software is |
| 11 | * furnished to do so, subject to the following conditions: |
| 12 | * |
| 13 | * The above copyright notice and this permission notice shall be included in |
| 14 | * all copies or substantial portions of the Software. |
| 15 | * |
| 16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 19 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 20 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 21 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
| 22 | * THE SOFTWARE. |
| 23 | */ |
| 24 | |
Richard Henderson | 53c89ef | 2017-07-31 06:03:03 +0000 | [diff] [blame] | 25 | #include "elf.h" |
| 26 | #include "tcg-pool.inc.c" |
| 27 | |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 28 | #if defined _CALL_DARWIN || defined __APPLE__ |
| 29 | #define TCG_TARGET_CALL_DARWIN |
| 30 | #endif |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 31 | #ifdef _CALL_SYSV |
| 32 | # define TCG_TARGET_CALL_ALIGN_ARGS 1 |
| 33 | #endif |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 34 | |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 35 | /* For some memory operations, we need a scratch that isn't R0. For the AIX |
| 36 | calling convention, we can re-use the TOC register since we'll be reloading |
| 37 | it at every call. Otherwise R12 will do nicely as neither a call-saved |
| 38 | register nor a parameter register. */ |
| 39 | #ifdef _CALL_AIX |
| 40 | # define TCG_REG_TMP1 TCG_REG_R2 |
| 41 | #else |
| 42 | # define TCG_REG_TMP1 TCG_REG_R12 |
| 43 | #endif |
| 44 | |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 45 | #define TCG_REG_TB TCG_REG_R31 |
| 46 | #define USE_REG_TB (TCG_TARGET_REG_BITS == 64) |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 47 | |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 48 | /* Shorthand for size of a pointer. Avoid promotion to unsigned. */ |
| 49 | #define SZP ((int)sizeof(void *)) |
| 50 | |
Richard Henderson | 4c3831a | 2014-03-24 16:03:59 -0700 | [diff] [blame] | 51 | /* Shorthand for size of a register. */ |
| 52 | #define SZR (TCG_TARGET_REG_BITS / 8) |
| 53 | |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 54 | #define TCG_CT_CONST_S16 0x100 |
| 55 | #define TCG_CT_CONST_U16 0x200 |
| 56 | #define TCG_CT_CONST_S32 0x400 |
| 57 | #define TCG_CT_CONST_U32 0x800 |
| 58 | #define TCG_CT_CONST_ZERO 0x1000 |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 59 | #define TCG_CT_CONST_MONE 0x2000 |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 60 | #define TCG_CT_CONST_WSZ 0x4000 |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 61 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 62 | static tcg_insn_unit *tb_ret_addr; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 63 | |
Richard Henderson | 33e75fb | 2016-11-22 11:43:12 +0000 | [diff] [blame] | 64 | bool have_isa_2_06; |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 65 | bool have_isa_3_00; |
| 66 | |
Richard Henderson | 1e6e9ac | 2013-02-18 09:11:15 -0800 | [diff] [blame] | 67 | #define HAVE_ISA_2_06 have_isa_2_06 |
| 68 | #define HAVE_ISEL have_isa_2_06 |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 69 | |
Laurent Vivier | 4cbea59 | 2015-08-24 01:42:07 +0200 | [diff] [blame] | 70 | #ifndef CONFIG_SOFTMMU |
malc | f6548c0 | 2009-07-18 10:08:40 +0400 | [diff] [blame] | 71 | #define TCG_GUEST_BASE_REG 30 |
malc | f6548c0 | 2009-07-18 10:08:40 +0400 | [diff] [blame] | 72 | #endif |
| 73 | |
Aurelien Jarno | 8d8fdba | 2016-04-21 10:48:50 +0200 | [diff] [blame] | 74 | #ifdef CONFIG_DEBUG_TCG |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 75 | static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = { |
| 76 | "r0", |
| 77 | "r1", |
malc | 98926b0 | 2010-04-07 02:26:22 +0400 | [diff] [blame] | 78 | "r2", |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 79 | "r3", |
| 80 | "r4", |
| 81 | "r5", |
| 82 | "r6", |
| 83 | "r7", |
| 84 | "r8", |
| 85 | "r9", |
| 86 | "r10", |
| 87 | "r11", |
| 88 | "r12", |
| 89 | "r13", |
| 90 | "r14", |
| 91 | "r15", |
| 92 | "r16", |
| 93 | "r17", |
| 94 | "r18", |
| 95 | "r19", |
| 96 | "r20", |
| 97 | "r21", |
| 98 | "r22", |
| 99 | "r23", |
| 100 | "r24", |
| 101 | "r25", |
| 102 | "r26", |
| 103 | "r27", |
| 104 | "r28", |
| 105 | "r29", |
| 106 | "r30", |
| 107 | "r31" |
| 108 | }; |
blueswir1 | d4a9eb1 | 2008-10-05 09:59:14 +0000 | [diff] [blame] | 109 | #endif |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 110 | |
| 111 | static const int tcg_target_reg_alloc_order[] = { |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 112 | TCG_REG_R14, /* call saved registers */ |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 113 | TCG_REG_R15, |
| 114 | TCG_REG_R16, |
| 115 | TCG_REG_R17, |
| 116 | TCG_REG_R18, |
| 117 | TCG_REG_R19, |
| 118 | TCG_REG_R20, |
| 119 | TCG_REG_R21, |
| 120 | TCG_REG_R22, |
| 121 | TCG_REG_R23, |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 122 | TCG_REG_R24, |
| 123 | TCG_REG_R25, |
| 124 | TCG_REG_R26, |
| 125 | TCG_REG_R27, |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 126 | TCG_REG_R28, |
| 127 | TCG_REG_R29, |
| 128 | TCG_REG_R30, |
| 129 | TCG_REG_R31, |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 130 | TCG_REG_R12, /* call clobbered, non-arguments */ |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 131 | TCG_REG_R11, |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 132 | TCG_REG_R2, |
| 133 | TCG_REG_R13, |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 134 | TCG_REG_R10, /* call clobbered, arguments */ |
| 135 | TCG_REG_R9, |
| 136 | TCG_REG_R8, |
| 137 | TCG_REG_R7, |
| 138 | TCG_REG_R6, |
| 139 | TCG_REG_R5, |
| 140 | TCG_REG_R4, |
| 141 | TCG_REG_R3, |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 142 | }; |
| 143 | |
| 144 | static const int tcg_target_call_iarg_regs[] = { |
| 145 | TCG_REG_R3, |
| 146 | TCG_REG_R4, |
| 147 | TCG_REG_R5, |
| 148 | TCG_REG_R6, |
| 149 | TCG_REG_R7, |
| 150 | TCG_REG_R8, |
| 151 | TCG_REG_R9, |
| 152 | TCG_REG_R10 |
| 153 | }; |
| 154 | |
Stefan Weil | be9c418 | 2011-09-05 11:07:02 +0200 | [diff] [blame] | 155 | static const int tcg_target_call_oarg_regs[] = { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 156 | TCG_REG_R3, |
| 157 | TCG_REG_R4 |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 158 | }; |
| 159 | |
| 160 | static const int tcg_target_callee_save_regs[] = { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 161 | #ifdef TCG_TARGET_CALL_DARWIN |
Andreas Faerber | 5d7ff5b | 2009-12-06 14:00:24 +0100 | [diff] [blame] | 162 | TCG_REG_R11, |
| 163 | #endif |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 164 | TCG_REG_R14, |
| 165 | TCG_REG_R15, |
| 166 | TCG_REG_R16, |
| 167 | TCG_REG_R17, |
| 168 | TCG_REG_R18, |
| 169 | TCG_REG_R19, |
| 170 | TCG_REG_R20, |
| 171 | TCG_REG_R21, |
| 172 | TCG_REG_R22, |
| 173 | TCG_REG_R23, |
malc | 095271d | 2009-02-11 18:54:02 +0000 | [diff] [blame] | 174 | TCG_REG_R24, |
| 175 | TCG_REG_R25, |
| 176 | TCG_REG_R26, |
Blue Swirl | cea5f9a | 2011-05-15 16:03:25 +0000 | [diff] [blame] | 177 | TCG_REG_R27, /* currently used for the global env */ |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 178 | TCG_REG_R28, |
| 179 | TCG_REG_R29, |
| 180 | TCG_REG_R30, |
| 181 | TCG_REG_R31 |
| 182 | }; |
| 183 | |
Richard Henderson | b0940da | 2013-08-31 06:30:45 -0700 | [diff] [blame] | 184 | static inline bool in_range_b(tcg_target_long target) |
| 185 | { |
| 186 | return target == sextract64(target, 0, 26); |
| 187 | } |
| 188 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 189 | static uint32_t reloc_pc24_val(tcg_insn_unit *pc, tcg_insn_unit *target) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 190 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 191 | ptrdiff_t disp = tcg_ptr_byte_diff(target, pc); |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 192 | tcg_debug_assert(in_range_b(disp)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 193 | return disp & 0x3fffffc; |
| 194 | } |
| 195 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 196 | static void reloc_pc24(tcg_insn_unit *pc, tcg_insn_unit *target) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 197 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 198 | *pc = (*pc & ~0x3fffffc) | reloc_pc24_val(pc, target); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 199 | } |
| 200 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 201 | static uint16_t reloc_pc14_val(tcg_insn_unit *pc, tcg_insn_unit *target) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 202 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 203 | ptrdiff_t disp = tcg_ptr_byte_diff(target, pc); |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 204 | tcg_debug_assert(disp == (int16_t) disp); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 205 | return disp & 0xfffc; |
| 206 | } |
| 207 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 208 | static void reloc_pc14(tcg_insn_unit *pc, tcg_insn_unit *target) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 209 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 210 | *pc = (*pc & ~0xfffc) | reloc_pc14_val(pc, target); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 211 | } |
| 212 | |
Richard Henderson | c7ca6a2 | 2013-08-30 17:58:10 -0700 | [diff] [blame] | 213 | static inline void tcg_out_b_noaddr(TCGContext *s, int insn) |
| 214 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 215 | unsigned retrans = *s->code_ptr & 0x3fffffc; |
Richard Henderson | c7ca6a2 | 2013-08-30 17:58:10 -0700 | [diff] [blame] | 216 | tcg_out32(s, insn | retrans); |
| 217 | } |
| 218 | |
| 219 | static inline void tcg_out_bc_noaddr(TCGContext *s, int insn) |
| 220 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 221 | unsigned retrans = *s->code_ptr & 0xfffc; |
Richard Henderson | c7ca6a2 | 2013-08-30 17:58:10 -0700 | [diff] [blame] | 222 | tcg_out32(s, insn | retrans); |
| 223 | } |
| 224 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 225 | /* parse target specific constraints */ |
Richard Henderson | 069ea73 | 2016-11-18 11:50:59 +0100 | [diff] [blame] | 226 | static const char *target_parse_constraint(TCGArgConstraint *ct, |
| 227 | const char *ct_str, TCGType type) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 228 | { |
Richard Henderson | 069ea73 | 2016-11-18 11:50:59 +0100 | [diff] [blame] | 229 | switch (*ct_str++) { |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 230 | case 'A': case 'B': case 'C': case 'D': |
| 231 | ct->ct |= TCG_CT_REG; |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 232 | tcg_regset_set_reg(ct->u.regs, 3 + ct_str[0] - 'A'); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 233 | break; |
| 234 | case 'r': |
| 235 | ct->ct |= TCG_CT_REG; |
Richard Henderson | f46934d | 2017-09-11 12:44:30 -0700 | [diff] [blame] | 236 | ct->u.regs = 0xffffffff; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 237 | break; |
| 238 | case 'L': /* qemu_ld constraint */ |
| 239 | ct->ct |= TCG_CT_REG; |
Richard Henderson | f46934d | 2017-09-11 12:44:30 -0700 | [diff] [blame] | 240 | ct->u.regs = 0xffffffff; |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 241 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3); |
malc | 735ee40 | 2008-08-20 22:39:22 +0000 | [diff] [blame] | 242 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 243 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4); |
| 244 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5); |
Andreas Färber | f4f7d01 | 2012-05-12 03:16:58 +0200 | [diff] [blame] | 245 | #endif |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 246 | break; |
malc | c070355 | 2008-07-26 11:21:03 +0000 | [diff] [blame] | 247 | case 'S': /* qemu_st constraint */ |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 248 | ct->ct |= TCG_CT_REG; |
Richard Henderson | f46934d | 2017-09-11 12:44:30 -0700 | [diff] [blame] | 249 | ct->u.regs = 0xffffffff; |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 250 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R3); |
malc | 735ee40 | 2008-08-20 22:39:22 +0000 | [diff] [blame] | 251 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 252 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R4); |
| 253 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R5); |
| 254 | tcg_regset_reset_reg(ct->u.regs, TCG_REG_R6); |
Andreas Färber | f4f7d01 | 2012-05-12 03:16:58 +0200 | [diff] [blame] | 255 | #endif |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 256 | break; |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 257 | case 'I': |
| 258 | ct->ct |= TCG_CT_CONST_S16; |
| 259 | break; |
| 260 | case 'J': |
| 261 | ct->ct |= TCG_CT_CONST_U16; |
| 262 | break; |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 263 | case 'M': |
| 264 | ct->ct |= TCG_CT_CONST_MONE; |
| 265 | break; |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 266 | case 'T': |
| 267 | ct->ct |= TCG_CT_CONST_S32; |
| 268 | break; |
| 269 | case 'U': |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 270 | ct->ct |= TCG_CT_CONST_U32; |
| 271 | break; |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 272 | case 'W': |
| 273 | ct->ct |= TCG_CT_CONST_WSZ; |
| 274 | break; |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 275 | case 'Z': |
| 276 | ct->ct |= TCG_CT_CONST_ZERO; |
| 277 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 278 | default: |
Richard Henderson | 069ea73 | 2016-11-18 11:50:59 +0100 | [diff] [blame] | 279 | return NULL; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 280 | } |
Richard Henderson | 069ea73 | 2016-11-18 11:50:59 +0100 | [diff] [blame] | 281 | return ct_str; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 282 | } |
| 283 | |
| 284 | /* test if a constant matches the constraint */ |
Richard Henderson | f6c6afc | 2014-03-30 21:22:11 -0700 | [diff] [blame] | 285 | static int tcg_target_const_match(tcg_target_long val, TCGType type, |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 286 | const TCGArgConstraint *arg_ct) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 287 | { |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 288 | int ct = arg_ct->ct; |
| 289 | if (ct & TCG_CT_CONST) { |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 290 | return 1; |
Richard Henderson | 1194dcb | 2014-03-30 22:07:27 -0700 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | /* The only 32-bit constraint we use aside from |
| 294 | TCG_CT_CONST is TCG_CT_CONST_S16. */ |
| 295 | if (type == TCG_TYPE_I32) { |
| 296 | val = (int32_t)val; |
| 297 | } |
| 298 | |
| 299 | if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val) { |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 300 | return 1; |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 301 | } else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val) { |
| 302 | return 1; |
| 303 | } else if ((ct & TCG_CT_CONST_S32) && val == (int32_t)val) { |
| 304 | return 1; |
| 305 | } else if ((ct & TCG_CT_CONST_U32) && val == (uint32_t)val) { |
| 306 | return 1; |
| 307 | } else if ((ct & TCG_CT_CONST_ZERO) && val == 0) { |
| 308 | return 1; |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 309 | } else if ((ct & TCG_CT_CONST_MONE) && val == -1) { |
| 310 | return 1; |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 311 | } else if ((ct & TCG_CT_CONST_WSZ) |
| 312 | && val == (type == TCG_TYPE_I32 ? 32 : 64)) { |
| 313 | return 1; |
Richard Henderson | 3d582c6 | 2013-02-01 16:51:53 -0800 | [diff] [blame] | 314 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 315 | return 0; |
| 316 | } |
| 317 | |
| 318 | #define OPCD(opc) ((opc)<<26) |
| 319 | #define XO19(opc) (OPCD(19)|((opc)<<1)) |
Anton Blanchard | 8a94cfb | 2013-06-11 21:19:35 +1000 | [diff] [blame] | 320 | #define MD30(opc) (OPCD(30)|((opc)<<2)) |
| 321 | #define MDS30(opc) (OPCD(30)|((opc)<<1)) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 322 | #define XO31(opc) (OPCD(31)|((opc)<<1)) |
| 323 | #define XO58(opc) (OPCD(58)|(opc)) |
| 324 | #define XO62(opc) (OPCD(62)|(opc)) |
| 325 | |
| 326 | #define B OPCD( 18) |
| 327 | #define BC OPCD( 16) |
| 328 | #define LBZ OPCD( 34) |
| 329 | #define LHZ OPCD( 40) |
| 330 | #define LHA OPCD( 42) |
| 331 | #define LWZ OPCD( 32) |
| 332 | #define STB OPCD( 38) |
| 333 | #define STH OPCD( 44) |
| 334 | #define STW OPCD( 36) |
| 335 | |
| 336 | #define STD XO62( 0) |
| 337 | #define STDU XO62( 1) |
| 338 | #define STDX XO31(149) |
| 339 | |
| 340 | #define LD XO58( 0) |
| 341 | #define LDX XO31( 21) |
| 342 | #define LDU XO58( 1) |
malc | 301f6d9 | 2008-07-27 10:28:15 +0000 | [diff] [blame] | 343 | #define LWA XO58( 2) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 344 | #define LWAX XO31(341) |
| 345 | |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 346 | #define ADDIC OPCD( 12) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 347 | #define ADDI OPCD( 14) |
| 348 | #define ADDIS OPCD( 15) |
| 349 | #define ORI OPCD( 24) |
| 350 | #define ORIS OPCD( 25) |
| 351 | #define XORI OPCD( 26) |
| 352 | #define XORIS OPCD( 27) |
| 353 | #define ANDI OPCD( 28) |
| 354 | #define ANDIS OPCD( 29) |
| 355 | #define MULLI OPCD( 7) |
| 356 | #define CMPLI OPCD( 10) |
| 357 | #define CMPI OPCD( 11) |
Richard Henderson | 148bdd2 | 2013-04-04 07:30:20 -0700 | [diff] [blame] | 358 | #define SUBFIC OPCD( 8) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 359 | |
| 360 | #define LWZU OPCD( 33) |
| 361 | #define STWU OPCD( 37) |
| 362 | |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 363 | #define RLWIMI OPCD( 20) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 364 | #define RLWINM OPCD( 21) |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 365 | #define RLWNM OPCD( 23) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 366 | |
Anton Blanchard | 8a94cfb | 2013-06-11 21:19:35 +1000 | [diff] [blame] | 367 | #define RLDICL MD30( 0) |
| 368 | #define RLDICR MD30( 1) |
| 369 | #define RLDIMI MD30( 3) |
| 370 | #define RLDCL MDS30( 8) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 371 | |
| 372 | #define BCLR XO19( 16) |
| 373 | #define BCCTR XO19(528) |
| 374 | #define CRAND XO19(257) |
| 375 | #define CRANDC XO19(129) |
| 376 | #define CRNAND XO19(225) |
| 377 | #define CROR XO19(449) |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 378 | #define CRNOR XO19( 33) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 379 | |
| 380 | #define EXTSB XO31(954) |
| 381 | #define EXTSH XO31(922) |
| 382 | #define EXTSW XO31(986) |
| 383 | #define ADD XO31(266) |
| 384 | #define ADDE XO31(138) |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 385 | #define ADDME XO31(234) |
| 386 | #define ADDZE XO31(202) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 387 | #define ADDC XO31( 10) |
| 388 | #define AND XO31( 28) |
| 389 | #define SUBF XO31( 40) |
| 390 | #define SUBFC XO31( 8) |
| 391 | #define SUBFE XO31(136) |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 392 | #define SUBFME XO31(232) |
| 393 | #define SUBFZE XO31(200) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 394 | #define OR XO31(444) |
| 395 | #define XOR XO31(316) |
| 396 | #define MULLW XO31(235) |
Richard Henderson | 8fa391a | 2014-03-26 11:37:06 -0700 | [diff] [blame] | 397 | #define MULHW XO31( 75) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 398 | #define MULHWU XO31( 11) |
| 399 | #define DIVW XO31(491) |
| 400 | #define DIVWU XO31(459) |
| 401 | #define CMP XO31( 0) |
| 402 | #define CMPL XO31( 32) |
| 403 | #define LHBRX XO31(790) |
| 404 | #define LWBRX XO31(534) |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 405 | #define LDBRX XO31(532) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 406 | #define STHBRX XO31(918) |
| 407 | #define STWBRX XO31(662) |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 408 | #define STDBRX XO31(660) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 409 | #define MFSPR XO31(339) |
| 410 | #define MTSPR XO31(467) |
| 411 | #define SRAWI XO31(824) |
| 412 | #define NEG XO31(104) |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 413 | #define MFCR XO31( 19) |
Richard Henderson | 6995a4a | 2013-04-02 15:09:52 -0700 | [diff] [blame] | 414 | #define MFOCRF (MFCR | (1u << 20)) |
malc | 157f266 | 2011-08-22 14:40:00 +0400 | [diff] [blame] | 415 | #define NOR XO31(124) |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 416 | #define CNTLZW XO31( 26) |
| 417 | #define CNTLZD XO31( 58) |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 418 | #define CNTTZW XO31(538) |
| 419 | #define CNTTZD XO31(570) |
Richard Henderson | 33e75fb | 2016-11-22 11:43:12 +0000 | [diff] [blame] | 420 | #define CNTPOPW XO31(378) |
| 421 | #define CNTPOPD XO31(506) |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 422 | #define ANDC XO31( 60) |
| 423 | #define ORC XO31(412) |
| 424 | #define EQV XO31(284) |
| 425 | #define NAND XO31(476) |
Richard Henderson | 70fac59 | 2013-04-02 15:16:10 -0700 | [diff] [blame] | 426 | #define ISEL XO31( 15) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 427 | |
| 428 | #define MULLD XO31(233) |
| 429 | #define MULHD XO31( 73) |
| 430 | #define MULHDU XO31( 9) |
| 431 | #define DIVD XO31(489) |
| 432 | #define DIVDU XO31(457) |
| 433 | |
| 434 | #define LBZX XO31( 87) |
malc | 4f4a67a | 2009-07-18 13:15:55 +0400 | [diff] [blame] | 435 | #define LHZX XO31(279) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 436 | #define LHAX XO31(343) |
| 437 | #define LWZX XO31( 23) |
| 438 | #define STBX XO31(215) |
| 439 | #define STHX XO31(407) |
| 440 | #define STWX XO31(151) |
| 441 | |
Pranith Kumar | 7b4af5e | 2016-07-14 16:20:19 -0400 | [diff] [blame] | 442 | #define EIEIO XO31(854) |
| 443 | #define HWSYNC XO31(598) |
| 444 | #define LWSYNC (HWSYNC | (1u << 21)) |
| 445 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 446 | #define SPR(a, b) ((((a)<<5)|(b))<<11) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 447 | #define LR SPR(8, 0) |
| 448 | #define CTR SPR(9, 0) |
| 449 | |
| 450 | #define SLW XO31( 24) |
| 451 | #define SRW XO31(536) |
| 452 | #define SRAW XO31(792) |
| 453 | |
| 454 | #define SLD XO31( 27) |
| 455 | #define SRD XO31(539) |
| 456 | #define SRAD XO31(794) |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 457 | #define SRADI XO31(413<<1) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 458 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 459 | #define TW XO31( 4) |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 460 | #define TRAP (TW | TO(31)) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 461 | |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 462 | #define NOP ORI /* ori 0,0,0 */ |
| 463 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 464 | #define RT(r) ((r)<<21) |
| 465 | #define RS(r) ((r)<<21) |
| 466 | #define RA(r) ((r)<<16) |
| 467 | #define RB(r) ((r)<<11) |
| 468 | #define TO(t) ((t)<<21) |
| 469 | #define SH(s) ((s)<<11) |
| 470 | #define MB(b) ((b)<<6) |
| 471 | #define ME(e) ((e)<<1) |
| 472 | #define BO(o) ((o)<<21) |
| 473 | #define MB64(b) ((b)<<5) |
Richard Henderson | 6995a4a | 2013-04-02 15:09:52 -0700 | [diff] [blame] | 474 | #define FXM(b) (1 << (19 - (b))) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 475 | |
| 476 | #define LK 1 |
| 477 | |
Richard Henderson | 2fd8edd | 2013-02-01 16:08:50 -0800 | [diff] [blame] | 478 | #define TAB(t, a, b) (RT(t) | RA(a) | RB(b)) |
| 479 | #define SAB(s, a, b) (RS(s) | RA(a) | RB(b)) |
| 480 | #define TAI(s, a, i) (RT(s) | RA(a) | ((i) & 0xffff)) |
| 481 | #define SAI(s, a, i) (RS(s) | RA(a) | ((i) & 0xffff)) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 482 | |
| 483 | #define BF(n) ((n)<<23) |
| 484 | #define BI(n, c) (((c)+((n)*4))<<16) |
| 485 | #define BT(n, c) (((c)+((n)*4))<<21) |
| 486 | #define BA(n, c) (((c)+((n)*4))<<16) |
| 487 | #define BB(n, c) (((c)+((n)*4))<<11) |
Richard Henderson | 70fac59 | 2013-04-02 15:16:10 -0700 | [diff] [blame] | 488 | #define BC_(n, c) (((c)+((n)*4))<<6) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 489 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 490 | #define BO_COND_TRUE BO(12) |
| 491 | #define BO_COND_FALSE BO( 4) |
| 492 | #define BO_ALWAYS BO(20) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 493 | |
| 494 | enum { |
| 495 | CR_LT, |
| 496 | CR_GT, |
| 497 | CR_EQ, |
| 498 | CR_SO |
| 499 | }; |
| 500 | |
Richard Henderson | 0aed257 | 2012-09-24 14:21:40 -0700 | [diff] [blame] | 501 | static const uint32_t tcg_to_bc[] = { |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 502 | [TCG_COND_EQ] = BC | BI(7, CR_EQ) | BO_COND_TRUE, |
| 503 | [TCG_COND_NE] = BC | BI(7, CR_EQ) | BO_COND_FALSE, |
| 504 | [TCG_COND_LT] = BC | BI(7, CR_LT) | BO_COND_TRUE, |
| 505 | [TCG_COND_GE] = BC | BI(7, CR_LT) | BO_COND_FALSE, |
| 506 | [TCG_COND_LE] = BC | BI(7, CR_GT) | BO_COND_FALSE, |
| 507 | [TCG_COND_GT] = BC | BI(7, CR_GT) | BO_COND_TRUE, |
| 508 | [TCG_COND_LTU] = BC | BI(7, CR_LT) | BO_COND_TRUE, |
| 509 | [TCG_COND_GEU] = BC | BI(7, CR_LT) | BO_COND_FALSE, |
| 510 | [TCG_COND_LEU] = BC | BI(7, CR_GT) | BO_COND_FALSE, |
| 511 | [TCG_COND_GTU] = BC | BI(7, CR_GT) | BO_COND_TRUE, |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 512 | }; |
| 513 | |
Richard Henderson | 70fac59 | 2013-04-02 15:16:10 -0700 | [diff] [blame] | 514 | /* The low bit here is set if the RA and RB fields must be inverted. */ |
| 515 | static const uint32_t tcg_to_isel[] = { |
| 516 | [TCG_COND_EQ] = ISEL | BC_(7, CR_EQ), |
| 517 | [TCG_COND_NE] = ISEL | BC_(7, CR_EQ) | 1, |
| 518 | [TCG_COND_LT] = ISEL | BC_(7, CR_LT), |
| 519 | [TCG_COND_GE] = ISEL | BC_(7, CR_LT) | 1, |
| 520 | [TCG_COND_LE] = ISEL | BC_(7, CR_GT) | 1, |
| 521 | [TCG_COND_GT] = ISEL | BC_(7, CR_GT), |
| 522 | [TCG_COND_LTU] = ISEL | BC_(7, CR_LT), |
| 523 | [TCG_COND_GEU] = ISEL | BC_(7, CR_LT) | 1, |
| 524 | [TCG_COND_LEU] = ISEL | BC_(7, CR_GT) | 1, |
| 525 | [TCG_COND_GTU] = ISEL | BC_(7, CR_GT), |
| 526 | }; |
| 527 | |
Richard Henderson | 030ffe3 | 2018-01-10 07:31:46 +0000 | [diff] [blame] | 528 | static void patch_reloc(tcg_insn_unit *code_ptr, int type, |
| 529 | intptr_t value, intptr_t addend) |
| 530 | { |
| 531 | tcg_insn_unit *target; |
| 532 | tcg_insn_unit old; |
| 533 | |
| 534 | value += addend; |
| 535 | target = (tcg_insn_unit *)value; |
| 536 | |
| 537 | switch (type) { |
| 538 | case R_PPC_REL14: |
| 539 | reloc_pc14(code_ptr, target); |
| 540 | break; |
| 541 | case R_PPC_REL24: |
| 542 | reloc_pc24(code_ptr, target); |
| 543 | break; |
| 544 | case R_PPC_ADDR16: |
| 545 | /* We are abusing this relocation type. This points to a pair |
| 546 | of insns, addis + load. If the displacement is small, we |
| 547 | can nop out the addis. */ |
| 548 | if (value == (int16_t)value) { |
| 549 | code_ptr[0] = NOP; |
| 550 | old = deposit32(code_ptr[1], 0, 16, value); |
| 551 | code_ptr[1] = deposit32(old, 16, 5, TCG_REG_TB); |
| 552 | } else { |
| 553 | int16_t lo = value; |
| 554 | int hi = value - lo; |
| 555 | assert(hi + lo == value); |
| 556 | code_ptr[0] = deposit32(code_ptr[0], 0, 16, hi >> 16); |
| 557 | code_ptr[1] = deposit32(code_ptr[1], 0, 16, lo); |
| 558 | } |
| 559 | break; |
| 560 | default: |
| 561 | g_assert_not_reached(); |
| 562 | } |
| 563 | } |
| 564 | |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 565 | static void tcg_out_mem_long(TCGContext *s, int opi, int opx, TCGReg rt, |
| 566 | TCGReg base, tcg_target_long offset); |
| 567 | |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 568 | static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 569 | { |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 570 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
Richard Henderson | f8b8412 | 2013-07-30 18:26:04 -1000 | [diff] [blame] | 571 | if (ret != arg) { |
| 572 | tcg_out32(s, OR | SAB(arg, ret, arg)); |
| 573 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 574 | } |
| 575 | |
Richard Henderson | aceac8d | 2013-02-01 14:48:37 -0800 | [diff] [blame] | 576 | static inline void tcg_out_rld(TCGContext *s, int op, TCGReg ra, TCGReg rs, |
| 577 | int sh, int mb) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 578 | { |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 579 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 580 | sh = SH(sh & 0x1f) | (((sh >> 5) & 1) << 1); |
| 581 | mb = MB64((mb >> 5) | ((mb << 1) & 0x3f)); |
| 582 | tcg_out32(s, op | RA(ra) | RS(rs) | sh | mb); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 583 | } |
| 584 | |
Richard Henderson | 9e555b7 | 2013-02-01 15:00:45 -0800 | [diff] [blame] | 585 | static inline void tcg_out_rlw(TCGContext *s, int op, TCGReg ra, TCGReg rs, |
| 586 | int sh, int mb, int me) |
| 587 | { |
| 588 | tcg_out32(s, op | RA(ra) | RS(rs) | SH(sh) | MB(mb) | ME(me)); |
| 589 | } |
| 590 | |
Richard Henderson | 6e5e060 | 2013-02-01 15:06:30 -0800 | [diff] [blame] | 591 | static inline void tcg_out_ext32u(TCGContext *s, TCGReg dst, TCGReg src) |
| 592 | { |
| 593 | tcg_out_rld(s, RLDICL, dst, src, 0, 32); |
| 594 | } |
| 595 | |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 596 | static inline void tcg_out_shli32(TCGContext *s, TCGReg dst, TCGReg src, int c) |
| 597 | { |
| 598 | tcg_out_rlw(s, RLWINM, dst, src, c, 0, 31 - c); |
| 599 | } |
| 600 | |
Richard Henderson | 0a9564b | 2013-02-01 15:12:14 -0800 | [diff] [blame] | 601 | static inline void tcg_out_shli64(TCGContext *s, TCGReg dst, TCGReg src, int c) |
| 602 | { |
| 603 | tcg_out_rld(s, RLDICR, dst, src, c, 63 - c); |
| 604 | } |
| 605 | |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 606 | static inline void tcg_out_shri32(TCGContext *s, TCGReg dst, TCGReg src, int c) |
| 607 | { |
| 608 | tcg_out_rlw(s, RLWINM, dst, src, 32 - c, c, 31); |
| 609 | } |
| 610 | |
Richard Henderson | 5e916c2 | 2013-02-01 15:19:05 -0800 | [diff] [blame] | 611 | static inline void tcg_out_shri64(TCGContext *s, TCGReg dst, TCGReg src, int c) |
| 612 | { |
| 613 | tcg_out_rld(s, RLDICL, dst, src, 64 - c, c); |
| 614 | } |
| 615 | |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 616 | /* Emit a move into ret of arg, if it can be done in one insn. */ |
| 617 | static bool tcg_out_movi_one(TCGContext *s, TCGReg ret, tcg_target_long arg) |
| 618 | { |
| 619 | if (arg == (int16_t)arg) { |
| 620 | tcg_out32(s, ADDI | TAI(ret, 0, arg)); |
| 621 | return true; |
| 622 | } |
| 623 | if (arg == (int32_t)arg && (arg & 0xffff) == 0) { |
| 624 | tcg_out32(s, ADDIS | TAI(ret, 0, arg >> 16)); |
| 625 | return true; |
| 626 | } |
| 627 | return false; |
| 628 | } |
| 629 | |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 630 | static void tcg_out_movi_int(TCGContext *s, TCGType type, TCGReg ret, |
| 631 | tcg_target_long arg, bool in_prologue) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 632 | { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 633 | intptr_t tb_diff; |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 634 | tcg_target_long tmp; |
| 635 | int shift; |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 636 | |
| 637 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
| 638 | |
| 639 | if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) { |
| 640 | arg = (int32_t)arg; |
| 641 | } |
| 642 | |
| 643 | /* Load 16-bit immediates with one insn. */ |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 644 | if (tcg_out_movi_one(s, ret, arg)) { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 645 | return; |
| 646 | } |
| 647 | |
| 648 | /* Load addresses within the TB with one insn. */ |
| 649 | tb_diff = arg - (intptr_t)s->code_gen_ptr; |
| 650 | if (!in_prologue && USE_REG_TB && tb_diff == (int16_t)tb_diff) { |
| 651 | tcg_out32(s, ADDI | TAI(ret, TCG_REG_TB, tb_diff)); |
| 652 | return; |
| 653 | } |
| 654 | |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 655 | /* Load 32-bit immediates with two insns. Note that we've already |
| 656 | eliminated bare ADDIS, so we know both insns are required. */ |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 657 | if (TCG_TARGET_REG_BITS == 32 || arg == (int32_t)arg) { |
Richard Henderson | 2fd8edd | 2013-02-01 16:08:50 -0800 | [diff] [blame] | 658 | tcg_out32(s, ADDIS | TAI(ret, 0, arg >> 16)); |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 659 | tcg_out32(s, ORI | SAI(ret, ret, arg)); |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 660 | return; |
| 661 | } |
| 662 | if (arg == (uint32_t)arg && !(arg & 0x8000)) { |
| 663 | tcg_out32(s, ADDI | TAI(ret, 0, arg)); |
| 664 | tcg_out32(s, ORIS | SAI(ret, ret, arg >> 16)); |
| 665 | return; |
| 666 | } |
| 667 | |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 668 | /* Load masked 16-bit value. */ |
| 669 | if (arg > 0 && (arg & 0x8000)) { |
| 670 | tmp = arg | 0x7fff; |
| 671 | if ((tmp & (tmp + 1)) == 0) { |
| 672 | int mb = clz64(tmp + 1) + 1; |
| 673 | tcg_out32(s, ADDI | TAI(ret, 0, arg)); |
| 674 | tcg_out_rld(s, RLDICL, ret, ret, 0, mb); |
| 675 | return; |
| 676 | } |
| 677 | } |
| 678 | |
| 679 | /* Load common masks with 2 insns. */ |
| 680 | shift = ctz64(arg); |
| 681 | tmp = arg >> shift; |
| 682 | if (tmp == (int16_t)tmp) { |
| 683 | tcg_out32(s, ADDI | TAI(ret, 0, tmp)); |
| 684 | tcg_out_shli64(s, ret, ret, shift); |
| 685 | return; |
| 686 | } |
| 687 | shift = clz64(arg); |
| 688 | if (tcg_out_movi_one(s, ret, arg << shift)) { |
| 689 | tcg_out_shri64(s, ret, ret, shift); |
| 690 | return; |
| 691 | } |
| 692 | |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 693 | /* Load addresses within 2GB of TB with 2 (or rarely 3) insns. */ |
| 694 | if (!in_prologue && USE_REG_TB && tb_diff == (int32_t)tb_diff) { |
| 695 | tcg_out_mem_long(s, ADDI, ADD, ret, TCG_REG_TB, tb_diff); |
| 696 | return; |
| 697 | } |
| 698 | |
Richard Henderson | 53c89ef | 2017-07-31 06:03:03 +0000 | [diff] [blame] | 699 | /* Use the constant pool, if possible. */ |
| 700 | if (!in_prologue && USE_REG_TB) { |
| 701 | new_pool_label(s, arg, R_PPC_ADDR16, s->code_ptr, |
| 702 | -(intptr_t)s->code_gen_ptr); |
Richard Henderson | 030ffe3 | 2018-01-10 07:31:46 +0000 | [diff] [blame] | 703 | tcg_out32(s, ADDIS | TAI(ret, TCG_REG_TB, 0)); |
| 704 | tcg_out32(s, LD | TAI(ret, ret, 0)); |
Richard Henderson | 53c89ef | 2017-07-31 06:03:03 +0000 | [diff] [blame] | 705 | return; |
| 706 | } |
| 707 | |
Richard Henderson | 77bfc7c | 2017-07-31 04:54:02 +0000 | [diff] [blame] | 708 | tmp = arg >> 31 >> 1; |
| 709 | tcg_out_movi(s, TCG_TYPE_I32, ret, tmp); |
| 710 | if (tmp) { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 711 | tcg_out_shli64(s, ret, ret, 32); |
| 712 | } |
| 713 | if (arg & 0xffff0000) { |
| 714 | tcg_out32(s, ORIS | SAI(ret, ret, arg >> 16)); |
| 715 | } |
| 716 | if (arg & 0xffff) { |
| 717 | tcg_out32(s, ORI | SAI(ret, ret, arg)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 718 | } |
| 719 | } |
| 720 | |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 721 | static inline void tcg_out_movi(TCGContext *s, TCGType type, TCGReg ret, |
| 722 | tcg_target_long arg) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 723 | { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 724 | tcg_out_movi_int(s, type, ret, arg, false); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 725 | } |
| 726 | |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 727 | static bool mask_operand(uint32_t c, int *mb, int *me) |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 728 | { |
| 729 | uint32_t lsb, test; |
| 730 | |
| 731 | /* Accept a bit pattern like: |
| 732 | 0....01....1 |
| 733 | 1....10....0 |
| 734 | 0..01..10..0 |
| 735 | Keep track of the transitions. */ |
| 736 | if (c == 0 || c == -1) { |
| 737 | return false; |
| 738 | } |
| 739 | test = c; |
| 740 | lsb = test & -test; |
| 741 | test += lsb; |
| 742 | if (test & (test - 1)) { |
| 743 | return false; |
| 744 | } |
| 745 | |
| 746 | *me = clz32(lsb); |
| 747 | *mb = test ? clz32(test & -test) + 1 : 0; |
| 748 | return true; |
| 749 | } |
| 750 | |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 751 | static bool mask64_operand(uint64_t c, int *mb, int *me) |
| 752 | { |
| 753 | uint64_t lsb; |
| 754 | |
| 755 | if (c == 0) { |
| 756 | return false; |
| 757 | } |
| 758 | |
| 759 | lsb = c & -c; |
| 760 | /* Accept 1..10..0. */ |
| 761 | if (c == -lsb) { |
| 762 | *mb = 0; |
| 763 | *me = clz64(lsb); |
| 764 | return true; |
| 765 | } |
| 766 | /* Accept 0..01..1. */ |
| 767 | if (lsb == 1 && (c & (c + 1)) == 0) { |
| 768 | *mb = clz64(c + 1) + 1; |
| 769 | *me = 63; |
| 770 | return true; |
| 771 | } |
| 772 | return false; |
| 773 | } |
| 774 | |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 775 | static void tcg_out_andi32(TCGContext *s, TCGReg dst, TCGReg src, uint32_t c) |
| 776 | { |
| 777 | int mb, me; |
| 778 | |
Richard Henderson | 1e1df96 | 2015-10-02 22:41:01 +0000 | [diff] [blame] | 779 | if (mask_operand(c, &mb, &me)) { |
| 780 | tcg_out_rlw(s, RLWINM, dst, src, 0, mb, me); |
| 781 | } else if ((c & 0xffff) == c) { |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 782 | tcg_out32(s, ANDI | SAI(src, dst, c)); |
| 783 | return; |
| 784 | } else if ((c & 0xffff0000) == c) { |
| 785 | tcg_out32(s, ANDIS | SAI(src, dst, c >> 16)); |
| 786 | return; |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 787 | } else { |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 788 | tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R0, c); |
| 789 | tcg_out32(s, AND | SAB(src, dst, TCG_REG_R0)); |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 790 | } |
| 791 | } |
| 792 | |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 793 | static void tcg_out_andi64(TCGContext *s, TCGReg dst, TCGReg src, uint64_t c) |
| 794 | { |
| 795 | int mb, me; |
| 796 | |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 797 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64); |
Richard Henderson | 1e1df96 | 2015-10-02 22:41:01 +0000 | [diff] [blame] | 798 | if (mask64_operand(c, &mb, &me)) { |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 799 | if (mb == 0) { |
| 800 | tcg_out_rld(s, RLDICR, dst, src, 0, me); |
| 801 | } else { |
| 802 | tcg_out_rld(s, RLDICL, dst, src, 0, mb); |
| 803 | } |
Richard Henderson | 1e1df96 | 2015-10-02 22:41:01 +0000 | [diff] [blame] | 804 | } else if ((c & 0xffff) == c) { |
| 805 | tcg_out32(s, ANDI | SAI(src, dst, c)); |
| 806 | return; |
| 807 | } else if ((c & 0xffff0000) == c) { |
| 808 | tcg_out32(s, ANDIS | SAI(src, dst, c >> 16)); |
| 809 | return; |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 810 | } else { |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 811 | tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_R0, c); |
| 812 | tcg_out32(s, AND | SAB(src, dst, TCG_REG_R0)); |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 813 | } |
| 814 | } |
| 815 | |
Richard Henderson | dce74c5 | 2013-02-01 20:22:05 -0800 | [diff] [blame] | 816 | static void tcg_out_zori32(TCGContext *s, TCGReg dst, TCGReg src, uint32_t c, |
| 817 | int op_lo, int op_hi) |
| 818 | { |
| 819 | if (c >> 16) { |
| 820 | tcg_out32(s, op_hi | SAI(src, dst, c >> 16)); |
| 821 | src = dst; |
| 822 | } |
| 823 | if (c & 0xffff) { |
| 824 | tcg_out32(s, op_lo | SAI(src, dst, c)); |
| 825 | src = dst; |
| 826 | } |
| 827 | } |
| 828 | |
| 829 | static void tcg_out_ori32(TCGContext *s, TCGReg dst, TCGReg src, uint32_t c) |
| 830 | { |
| 831 | tcg_out_zori32(s, dst, src, c, ORI, ORIS); |
| 832 | } |
| 833 | |
| 834 | static void tcg_out_xori32(TCGContext *s, TCGReg dst, TCGReg src, uint32_t c) |
| 835 | { |
| 836 | tcg_out_zori32(s, dst, src, c, XORI, XORIS); |
| 837 | } |
| 838 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 839 | static void tcg_out_b(TCGContext *s, int mask, tcg_insn_unit *target) |
Andreas Faerber | 5d7ff5b | 2009-12-06 14:00:24 +0100 | [diff] [blame] | 840 | { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 841 | ptrdiff_t disp = tcg_pcrel_diff(s, target); |
Richard Henderson | b0940da | 2013-08-31 06:30:45 -0700 | [diff] [blame] | 842 | if (in_range_b(disp)) { |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 843 | tcg_out32(s, B | (disp & 0x3fffffc) | mask); |
| 844 | } else { |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 845 | tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R0, (uintptr_t)target); |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 846 | tcg_out32(s, MTSPR | RS(TCG_REG_R0) | CTR); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 847 | tcg_out32(s, BCCTR | BO_ALWAYS | mask); |
Andreas Faerber | 5d7ff5b | 2009-12-06 14:00:24 +0100 | [diff] [blame] | 848 | } |
| 849 | } |
| 850 | |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 851 | static void tcg_out_mem_long(TCGContext *s, int opi, int opx, TCGReg rt, |
| 852 | TCGReg base, tcg_target_long offset) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 853 | { |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 854 | tcg_target_long orig = offset, l0, l1, extra = 0, align = 0; |
Richard Henderson | de7761a | 2014-03-25 12:22:18 -0700 | [diff] [blame] | 855 | bool is_store = false; |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 856 | TCGReg rs = TCG_REG_TMP1; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 857 | |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 858 | switch (opi) { |
| 859 | case LD: case LWA: |
| 860 | align = 3; |
| 861 | /* FALLTHRU */ |
| 862 | default: |
| 863 | if (rt != TCG_REG_R0) { |
| 864 | rs = rt; |
Richard Henderson | de7761a | 2014-03-25 12:22:18 -0700 | [diff] [blame] | 865 | break; |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 866 | } |
| 867 | break; |
| 868 | case STD: |
| 869 | align = 3; |
Richard Henderson | de7761a | 2014-03-25 12:22:18 -0700 | [diff] [blame] | 870 | /* FALLTHRU */ |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 871 | case STB: case STH: case STW: |
Richard Henderson | de7761a | 2014-03-25 12:22:18 -0700 | [diff] [blame] | 872 | is_store = true; |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 873 | break; |
| 874 | } |
| 875 | |
| 876 | /* For unaligned, or very large offsets, use the indexed form. */ |
| 877 | if (offset & align || offset != (int32_t)offset) { |
Richard Henderson | d4cba13 | 2014-06-26 21:26:00 -0700 | [diff] [blame] | 878 | if (rs == base) { |
| 879 | rs = TCG_REG_R0; |
| 880 | } |
| 881 | tcg_debug_assert(!is_store || rs != rt); |
Richard Henderson | de7761a | 2014-03-25 12:22:18 -0700 | [diff] [blame] | 882 | tcg_out_movi(s, TCG_TYPE_PTR, rs, orig); |
| 883 | tcg_out32(s, opx | TAB(rt, base, rs)); |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 884 | return; |
| 885 | } |
| 886 | |
| 887 | l0 = (int16_t)offset; |
| 888 | offset = (offset - l0) >> 16; |
| 889 | l1 = (int16_t)offset; |
| 890 | |
| 891 | if (l1 < 0 && orig >= 0) { |
| 892 | extra = 0x4000; |
| 893 | l1 = (int16_t)(offset - 0x4000); |
| 894 | } |
| 895 | if (l1) { |
| 896 | tcg_out32(s, ADDIS | TAI(rs, base, l1)); |
| 897 | base = rs; |
| 898 | } |
| 899 | if (extra) { |
| 900 | tcg_out32(s, ADDIS | TAI(rs, base, extra)); |
| 901 | base = rs; |
| 902 | } |
| 903 | if (opi != ADDI || base != rt || l0 != 0) { |
| 904 | tcg_out32(s, opi | TAI(rt, base, l0)); |
malc | 828808f | 2008-11-11 03:04:57 +0000 | [diff] [blame] | 905 | } |
| 906 | } |
| 907 | |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 908 | static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, |
| 909 | TCGReg arg1, intptr_t arg2) |
| 910 | { |
| 911 | int opi, opx; |
| 912 | |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 913 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 914 | if (type == TCG_TYPE_I32) { |
| 915 | opi = LWZ, opx = LWZX; |
| 916 | } else { |
| 917 | opi = LD, opx = LDX; |
| 918 | } |
| 919 | tcg_out_mem_long(s, opi, opx, ret, arg1, arg2); |
| 920 | } |
| 921 | |
| 922 | static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, |
| 923 | TCGReg arg1, intptr_t arg2) |
| 924 | { |
| 925 | int opi, opx; |
| 926 | |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 927 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 928 | if (type == TCG_TYPE_I32) { |
| 929 | opi = STW, opx = STWX; |
| 930 | } else { |
| 931 | opi = STD, opx = STDX; |
| 932 | } |
| 933 | tcg_out_mem_long(s, opi, opx, arg, arg1, arg2); |
| 934 | } |
| 935 | |
Richard Henderson | 59d7c14 | 2016-06-19 22:59:13 -0700 | [diff] [blame] | 936 | static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val, |
| 937 | TCGReg base, intptr_t ofs) |
| 938 | { |
| 939 | return false; |
| 940 | } |
| 941 | |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 942 | static void tcg_out_cmp(TCGContext *s, int cond, TCGArg arg1, TCGArg arg2, |
| 943 | int const_arg2, int cr, TCGType type) |
| 944 | { |
| 945 | int imm; |
| 946 | uint32_t op; |
| 947 | |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 948 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
| 949 | |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 950 | /* Simplify the comparisons below wrt CMPI. */ |
| 951 | if (type == TCG_TYPE_I32) { |
| 952 | arg2 = (int32_t)arg2; |
| 953 | } |
| 954 | |
| 955 | switch (cond) { |
| 956 | case TCG_COND_EQ: |
| 957 | case TCG_COND_NE: |
| 958 | if (const_arg2) { |
| 959 | if ((int16_t) arg2 == arg2) { |
| 960 | op = CMPI; |
| 961 | imm = 1; |
| 962 | break; |
| 963 | } else if ((uint16_t) arg2 == arg2) { |
| 964 | op = CMPLI; |
| 965 | imm = 1; |
| 966 | break; |
| 967 | } |
| 968 | } |
| 969 | op = CMPL; |
| 970 | imm = 0; |
| 971 | break; |
| 972 | |
| 973 | case TCG_COND_LT: |
| 974 | case TCG_COND_GE: |
| 975 | case TCG_COND_LE: |
| 976 | case TCG_COND_GT: |
| 977 | if (const_arg2) { |
| 978 | if ((int16_t) arg2 == arg2) { |
| 979 | op = CMPI; |
| 980 | imm = 1; |
| 981 | break; |
| 982 | } |
| 983 | } |
| 984 | op = CMP; |
| 985 | imm = 0; |
| 986 | break; |
| 987 | |
| 988 | case TCG_COND_LTU: |
| 989 | case TCG_COND_GEU: |
| 990 | case TCG_COND_LEU: |
| 991 | case TCG_COND_GTU: |
| 992 | if (const_arg2) { |
| 993 | if ((uint16_t) arg2 == arg2) { |
| 994 | op = CMPLI; |
| 995 | imm = 1; |
| 996 | break; |
| 997 | } |
| 998 | } |
| 999 | op = CMPL; |
| 1000 | imm = 0; |
| 1001 | break; |
| 1002 | |
| 1003 | default: |
| 1004 | tcg_abort(); |
| 1005 | } |
| 1006 | op |= BF(cr) | ((type == TCG_TYPE_I64) << 21); |
| 1007 | |
| 1008 | if (imm) { |
| 1009 | tcg_out32(s, op | RA(arg1) | (arg2 & 0xffff)); |
| 1010 | } else { |
| 1011 | if (const_arg2) { |
| 1012 | tcg_out_movi(s, type, TCG_REG_R0, arg2); |
| 1013 | arg2 = TCG_REG_R0; |
| 1014 | } |
| 1015 | tcg_out32(s, op | RA(arg1) | RB(arg2)); |
| 1016 | } |
| 1017 | } |
| 1018 | |
| 1019 | static void tcg_out_setcond_eq0(TCGContext *s, TCGType type, |
| 1020 | TCGReg dst, TCGReg src) |
| 1021 | { |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 1022 | if (type == TCG_TYPE_I32) { |
| 1023 | tcg_out32(s, CNTLZW | RS(src) | RA(dst)); |
| 1024 | tcg_out_shri32(s, dst, dst, 5); |
| 1025 | } else { |
| 1026 | tcg_out32(s, CNTLZD | RS(src) | RA(dst)); |
| 1027 | tcg_out_shri64(s, dst, dst, 6); |
| 1028 | } |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1029 | } |
| 1030 | |
| 1031 | static void tcg_out_setcond_ne0(TCGContext *s, TCGReg dst, TCGReg src) |
| 1032 | { |
| 1033 | /* X != 0 implies X + -1 generates a carry. Extra addition |
| 1034 | trickery means: R = X-1 + ~X + C = X-1 + (-X+1) + C = C. */ |
| 1035 | if (dst != src) { |
| 1036 | tcg_out32(s, ADDIC | TAI(dst, src, -1)); |
| 1037 | tcg_out32(s, SUBFE | TAB(dst, dst, src)); |
| 1038 | } else { |
| 1039 | tcg_out32(s, ADDIC | TAI(TCG_REG_R0, src, -1)); |
| 1040 | tcg_out32(s, SUBFE | TAB(dst, TCG_REG_R0, src)); |
| 1041 | } |
| 1042 | } |
| 1043 | |
| 1044 | static TCGReg tcg_gen_setcond_xor(TCGContext *s, TCGReg arg1, TCGArg arg2, |
| 1045 | bool const_arg2) |
| 1046 | { |
| 1047 | if (const_arg2) { |
| 1048 | if ((uint32_t)arg2 == arg2) { |
| 1049 | tcg_out_xori32(s, TCG_REG_R0, arg1, arg2); |
| 1050 | } else { |
| 1051 | tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_R0, arg2); |
| 1052 | tcg_out32(s, XOR | SAB(arg1, TCG_REG_R0, TCG_REG_R0)); |
| 1053 | } |
| 1054 | } else { |
| 1055 | tcg_out32(s, XOR | SAB(arg1, TCG_REG_R0, arg2)); |
| 1056 | } |
| 1057 | return TCG_REG_R0; |
| 1058 | } |
| 1059 | |
| 1060 | static void tcg_out_setcond(TCGContext *s, TCGType type, TCGCond cond, |
| 1061 | TCGArg arg0, TCGArg arg1, TCGArg arg2, |
| 1062 | int const_arg2) |
| 1063 | { |
| 1064 | int crop, sh; |
| 1065 | |
Aurelien Jarno | eabb7b9 | 2016-04-21 10:48:49 +0200 | [diff] [blame] | 1066 | tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32); |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 1067 | |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1068 | /* Ignore high bits of a potential constant arg2. */ |
| 1069 | if (type == TCG_TYPE_I32) { |
| 1070 | arg2 = (uint32_t)arg2; |
| 1071 | } |
| 1072 | |
| 1073 | /* Handle common and trivial cases before handling anything else. */ |
| 1074 | if (arg2 == 0) { |
| 1075 | switch (cond) { |
| 1076 | case TCG_COND_EQ: |
| 1077 | tcg_out_setcond_eq0(s, type, arg0, arg1); |
| 1078 | return; |
| 1079 | case TCG_COND_NE: |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 1080 | if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) { |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1081 | tcg_out_ext32u(s, TCG_REG_R0, arg1); |
| 1082 | arg1 = TCG_REG_R0; |
| 1083 | } |
| 1084 | tcg_out_setcond_ne0(s, arg0, arg1); |
| 1085 | return; |
| 1086 | case TCG_COND_GE: |
| 1087 | tcg_out32(s, NOR | SAB(arg1, arg0, arg1)); |
| 1088 | arg1 = arg0; |
| 1089 | /* FALLTHRU */ |
| 1090 | case TCG_COND_LT: |
| 1091 | /* Extract the sign bit. */ |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 1092 | if (type == TCG_TYPE_I32) { |
| 1093 | tcg_out_shri32(s, arg0, arg1, 31); |
| 1094 | } else { |
| 1095 | tcg_out_shri64(s, arg0, arg1, 63); |
| 1096 | } |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1097 | return; |
| 1098 | default: |
| 1099 | break; |
| 1100 | } |
| 1101 | } |
| 1102 | |
| 1103 | /* If we have ISEL, we can implement everything with 3 or 4 insns. |
| 1104 | All other cases below are also at least 3 insns, so speed up the |
| 1105 | code generator by not considering them and always using ISEL. */ |
| 1106 | if (HAVE_ISEL) { |
| 1107 | int isel, tab; |
| 1108 | |
| 1109 | tcg_out_cmp(s, cond, arg1, arg2, const_arg2, 7, type); |
| 1110 | |
| 1111 | isel = tcg_to_isel[cond]; |
| 1112 | |
| 1113 | tcg_out_movi(s, type, arg0, 1); |
| 1114 | if (isel & 1) { |
| 1115 | /* arg0 = (bc ? 0 : 1) */ |
| 1116 | tab = TAB(arg0, 0, arg0); |
| 1117 | isel &= ~1; |
| 1118 | } else { |
| 1119 | /* arg0 = (bc ? 1 : 0) */ |
| 1120 | tcg_out_movi(s, type, TCG_REG_R0, 0); |
| 1121 | tab = TAB(arg0, arg0, TCG_REG_R0); |
| 1122 | } |
| 1123 | tcg_out32(s, isel | tab); |
| 1124 | return; |
| 1125 | } |
| 1126 | |
| 1127 | switch (cond) { |
| 1128 | case TCG_COND_EQ: |
| 1129 | arg1 = tcg_gen_setcond_xor(s, arg1, arg2, const_arg2); |
| 1130 | tcg_out_setcond_eq0(s, type, arg0, arg1); |
| 1131 | return; |
| 1132 | |
| 1133 | case TCG_COND_NE: |
| 1134 | arg1 = tcg_gen_setcond_xor(s, arg1, arg2, const_arg2); |
| 1135 | /* Discard the high bits only once, rather than both inputs. */ |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 1136 | if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) { |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1137 | tcg_out_ext32u(s, TCG_REG_R0, arg1); |
| 1138 | arg1 = TCG_REG_R0; |
| 1139 | } |
| 1140 | tcg_out_setcond_ne0(s, arg0, arg1); |
| 1141 | return; |
| 1142 | |
| 1143 | case TCG_COND_GT: |
| 1144 | case TCG_COND_GTU: |
| 1145 | sh = 30; |
| 1146 | crop = 0; |
| 1147 | goto crtest; |
| 1148 | |
| 1149 | case TCG_COND_LT: |
| 1150 | case TCG_COND_LTU: |
| 1151 | sh = 29; |
| 1152 | crop = 0; |
| 1153 | goto crtest; |
| 1154 | |
| 1155 | case TCG_COND_GE: |
| 1156 | case TCG_COND_GEU: |
| 1157 | sh = 31; |
| 1158 | crop = CRNOR | BT(7, CR_EQ) | BA(7, CR_LT) | BB(7, CR_LT); |
| 1159 | goto crtest; |
| 1160 | |
| 1161 | case TCG_COND_LE: |
| 1162 | case TCG_COND_LEU: |
| 1163 | sh = 31; |
| 1164 | crop = CRNOR | BT(7, CR_EQ) | BA(7, CR_GT) | BB(7, CR_GT); |
| 1165 | crtest: |
| 1166 | tcg_out_cmp(s, cond, arg1, arg2, const_arg2, 7, type); |
| 1167 | if (crop) { |
| 1168 | tcg_out32(s, crop); |
| 1169 | } |
| 1170 | tcg_out32(s, MFOCRF | RT(TCG_REG_R0) | FXM(7)); |
| 1171 | tcg_out_rlw(s, RLWINM, arg0, TCG_REG_R0, sh, 31, 31); |
| 1172 | break; |
| 1173 | |
| 1174 | default: |
| 1175 | tcg_abort(); |
| 1176 | } |
| 1177 | } |
| 1178 | |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 1179 | static void tcg_out_bc(TCGContext *s, int bc, TCGLabel *l) |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1180 | { |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1181 | if (l->has_value) { |
| 1182 | tcg_out32(s, bc | reloc_pc14_val(s->code_ptr, l->u.value_ptr)); |
| 1183 | } else { |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 1184 | tcg_out_reloc(s, s->code_ptr, R_PPC_REL14, l, 0); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1185 | tcg_out_bc_noaddr(s, bc); |
| 1186 | } |
| 1187 | } |
| 1188 | |
| 1189 | static void tcg_out_brcond(TCGContext *s, TCGCond cond, |
| 1190 | TCGArg arg1, TCGArg arg2, int const_arg2, |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 1191 | TCGLabel *l, TCGType type) |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1192 | { |
| 1193 | tcg_out_cmp(s, cond, arg1, arg2, const_arg2, 7, type); |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 1194 | tcg_out_bc(s, tcg_to_bc[cond], l); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1195 | } |
| 1196 | |
| 1197 | static void tcg_out_movcond(TCGContext *s, TCGType type, TCGCond cond, |
| 1198 | TCGArg dest, TCGArg c1, TCGArg c2, TCGArg v1, |
| 1199 | TCGArg v2, bool const_c2) |
| 1200 | { |
| 1201 | /* If for some reason both inputs are zero, don't produce bad code. */ |
| 1202 | if (v1 == 0 && v2 == 0) { |
| 1203 | tcg_out_movi(s, type, dest, 0); |
| 1204 | return; |
| 1205 | } |
| 1206 | |
| 1207 | tcg_out_cmp(s, cond, c1, c2, const_c2, 7, type); |
| 1208 | |
| 1209 | if (HAVE_ISEL) { |
| 1210 | int isel = tcg_to_isel[cond]; |
| 1211 | |
| 1212 | /* Swap the V operands if the operation indicates inversion. */ |
| 1213 | if (isel & 1) { |
| 1214 | int t = v1; |
| 1215 | v1 = v2; |
| 1216 | v2 = t; |
| 1217 | isel &= ~1; |
| 1218 | } |
| 1219 | /* V1 == 0 is handled by isel; V2 == 0 must be handled by hand. */ |
| 1220 | if (v2 == 0) { |
| 1221 | tcg_out_movi(s, type, TCG_REG_R0, 0); |
| 1222 | } |
| 1223 | tcg_out32(s, isel | TAB(dest, v1, v2)); |
| 1224 | } else { |
| 1225 | if (dest == v2) { |
| 1226 | cond = tcg_invert_cond(cond); |
| 1227 | v2 = v1; |
| 1228 | } else if (dest != v1) { |
| 1229 | if (v1 == 0) { |
| 1230 | tcg_out_movi(s, type, dest, 0); |
| 1231 | } else { |
| 1232 | tcg_out_mov(s, type, dest, v1); |
| 1233 | } |
| 1234 | } |
| 1235 | /* Branch forward over one insn */ |
| 1236 | tcg_out32(s, tcg_to_bc[cond] | 8); |
| 1237 | if (v2 == 0) { |
| 1238 | tcg_out_movi(s, type, dest, 0); |
| 1239 | } else { |
| 1240 | tcg_out_mov(s, type, dest, v2); |
| 1241 | } |
| 1242 | } |
| 1243 | } |
| 1244 | |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 1245 | static void tcg_out_cntxz(TCGContext *s, TCGType type, uint32_t opc, |
| 1246 | TCGArg a0, TCGArg a1, TCGArg a2, bool const_a2) |
| 1247 | { |
| 1248 | if (const_a2 && a2 == (type == TCG_TYPE_I32 ? 32 : 64)) { |
| 1249 | tcg_out32(s, opc | RA(a0) | RS(a1)); |
| 1250 | } else { |
| 1251 | tcg_out_cmp(s, TCG_COND_EQ, a1, 0, 1, 7, type); |
| 1252 | /* Note that the only other valid constant for a2 is 0. */ |
| 1253 | if (HAVE_ISEL) { |
| 1254 | tcg_out32(s, opc | RA(TCG_REG_R0) | RS(a1)); |
| 1255 | tcg_out32(s, tcg_to_isel[TCG_COND_EQ] | TAB(a0, a2, TCG_REG_R0)); |
| 1256 | } else if (!const_a2 && a0 == a2) { |
| 1257 | tcg_out32(s, tcg_to_bc[TCG_COND_EQ] | 8); |
| 1258 | tcg_out32(s, opc | RA(a0) | RS(a1)); |
| 1259 | } else { |
| 1260 | tcg_out32(s, opc | RA(a0) | RS(a1)); |
| 1261 | tcg_out32(s, tcg_to_bc[TCG_COND_NE] | 8); |
| 1262 | if (const_a2) { |
| 1263 | tcg_out_movi(s, type, a0, 0); |
| 1264 | } else { |
| 1265 | tcg_out_mov(s, type, a0, a2); |
| 1266 | } |
| 1267 | } |
| 1268 | } |
| 1269 | } |
| 1270 | |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 1271 | static void tcg_out_cmp2(TCGContext *s, const TCGArg *args, |
| 1272 | const int *const_args) |
| 1273 | { |
| 1274 | static const struct { uint8_t bit1, bit2; } bits[] = { |
| 1275 | [TCG_COND_LT ] = { CR_LT, CR_LT }, |
| 1276 | [TCG_COND_LE ] = { CR_LT, CR_GT }, |
| 1277 | [TCG_COND_GT ] = { CR_GT, CR_GT }, |
| 1278 | [TCG_COND_GE ] = { CR_GT, CR_LT }, |
| 1279 | [TCG_COND_LTU] = { CR_LT, CR_LT }, |
| 1280 | [TCG_COND_LEU] = { CR_LT, CR_GT }, |
| 1281 | [TCG_COND_GTU] = { CR_GT, CR_GT }, |
| 1282 | [TCG_COND_GEU] = { CR_GT, CR_LT }, |
| 1283 | }; |
| 1284 | |
| 1285 | TCGCond cond = args[4], cond2; |
| 1286 | TCGArg al, ah, bl, bh; |
| 1287 | int blconst, bhconst; |
| 1288 | int op, bit1, bit2; |
| 1289 | |
| 1290 | al = args[0]; |
| 1291 | ah = args[1]; |
| 1292 | bl = args[2]; |
| 1293 | bh = args[3]; |
| 1294 | blconst = const_args[2]; |
| 1295 | bhconst = const_args[3]; |
| 1296 | |
| 1297 | switch (cond) { |
| 1298 | case TCG_COND_EQ: |
| 1299 | op = CRAND; |
| 1300 | goto do_equality; |
| 1301 | case TCG_COND_NE: |
| 1302 | op = CRNAND; |
| 1303 | do_equality: |
| 1304 | tcg_out_cmp(s, cond, al, bl, blconst, 6, TCG_TYPE_I32); |
| 1305 | tcg_out_cmp(s, cond, ah, bh, bhconst, 7, TCG_TYPE_I32); |
| 1306 | tcg_out32(s, op | BT(7, CR_EQ) | BA(6, CR_EQ) | BB(7, CR_EQ)); |
| 1307 | break; |
| 1308 | |
| 1309 | case TCG_COND_LT: |
| 1310 | case TCG_COND_LE: |
| 1311 | case TCG_COND_GT: |
| 1312 | case TCG_COND_GE: |
| 1313 | case TCG_COND_LTU: |
| 1314 | case TCG_COND_LEU: |
| 1315 | case TCG_COND_GTU: |
| 1316 | case TCG_COND_GEU: |
| 1317 | bit1 = bits[cond].bit1; |
| 1318 | bit2 = bits[cond].bit2; |
| 1319 | op = (bit1 != bit2 ? CRANDC : CRAND); |
| 1320 | cond2 = tcg_unsigned_cond(cond); |
| 1321 | |
| 1322 | tcg_out_cmp(s, cond, ah, bh, bhconst, 6, TCG_TYPE_I32); |
| 1323 | tcg_out_cmp(s, cond2, al, bl, blconst, 7, TCG_TYPE_I32); |
| 1324 | tcg_out32(s, op | BT(7, CR_EQ) | BA(6, CR_EQ) | BB(7, bit2)); |
| 1325 | tcg_out32(s, CROR | BT(7, CR_EQ) | BA(6, bit1) | BB(7, CR_EQ)); |
| 1326 | break; |
| 1327 | |
| 1328 | default: |
| 1329 | tcg_abort(); |
| 1330 | } |
| 1331 | } |
| 1332 | |
| 1333 | static void tcg_out_setcond2(TCGContext *s, const TCGArg *args, |
| 1334 | const int *const_args) |
| 1335 | { |
| 1336 | tcg_out_cmp2(s, args + 1, const_args + 1); |
| 1337 | tcg_out32(s, MFOCRF | RT(TCG_REG_R0) | FXM(7)); |
| 1338 | tcg_out_rlw(s, RLWINM, args[0], TCG_REG_R0, 31, 31, 31); |
| 1339 | } |
| 1340 | |
| 1341 | static void tcg_out_brcond2 (TCGContext *s, const TCGArg *args, |
| 1342 | const int *const_args) |
| 1343 | { |
| 1344 | tcg_out_cmp2(s, args, const_args); |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 1345 | tcg_out_bc(s, BC | BI(7, CR_EQ) | BO_COND_TRUE, arg_label(args[5])); |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 1346 | } |
| 1347 | |
Pranith Kumar | 7b4af5e | 2016-07-14 16:20:19 -0400 | [diff] [blame] | 1348 | static void tcg_out_mb(TCGContext *s, TCGArg a0) |
| 1349 | { |
| 1350 | uint32_t insn = HWSYNC; |
| 1351 | a0 &= TCG_MO_ALL; |
| 1352 | if (a0 == TCG_MO_LD_LD) { |
| 1353 | insn = LWSYNC; |
| 1354 | } else if (a0 == TCG_MO_ST_ST) { |
| 1355 | insn = EIEIO; |
| 1356 | } |
| 1357 | tcg_out32(s, insn); |
| 1358 | } |
| 1359 | |
Richard Henderson | a858339 | 2017-07-31 22:02:31 -0700 | [diff] [blame] | 1360 | void tb_target_set_jmp_target(uintptr_t tc_ptr, uintptr_t jmp_addr, |
| 1361 | uintptr_t addr) |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1362 | { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1363 | if (TCG_TARGET_REG_BITS == 64) { |
| 1364 | tcg_insn_unit i1, i2; |
| 1365 | intptr_t tb_diff = addr - tc_ptr; |
| 1366 | intptr_t br_diff = addr - (jmp_addr + 4); |
| 1367 | uint64_t pair; |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1368 | |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1369 | /* This does not exercise the range of the branch, but we do |
| 1370 | still need to be able to load the new value of TCG_REG_TB. |
| 1371 | But this does still happen quite often. */ |
| 1372 | if (tb_diff == (int16_t)tb_diff) { |
| 1373 | i1 = ADDI | TAI(TCG_REG_TB, TCG_REG_TB, tb_diff); |
| 1374 | i2 = B | (br_diff & 0x3fffffc); |
| 1375 | } else { |
| 1376 | intptr_t lo = (int16_t)tb_diff; |
| 1377 | intptr_t hi = (int32_t)(tb_diff - lo); |
| 1378 | assert(tb_diff == hi + lo); |
| 1379 | i1 = ADDIS | TAI(TCG_REG_TB, TCG_REG_TB, hi >> 16); |
| 1380 | i2 = ADDI | TAI(TCG_REG_TB, TCG_REG_TB, lo); |
| 1381 | } |
Richard Henderson | 5bfd75a | 2015-10-02 22:25:28 +0000 | [diff] [blame] | 1382 | #ifdef HOST_WORDS_BIGENDIAN |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1383 | pair = (uint64_t)i1 << 32 | i2; |
Richard Henderson | 5bfd75a | 2015-10-02 22:25:28 +0000 | [diff] [blame] | 1384 | #else |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1385 | pair = (uint64_t)i2 << 32 | i1; |
Richard Henderson | 5bfd75a | 2015-10-02 22:25:28 +0000 | [diff] [blame] | 1386 | #endif |
| 1387 | |
Philippe Mathieu-Daudé | ba02660 | 2017-09-11 17:49:36 -0300 | [diff] [blame] | 1388 | /* As per the enclosing if, this is ppc64. Avoid the _Static_assert |
| 1389 | within atomic_set that would fail to build a ppc32 host. */ |
| 1390 | atomic_set__nocheck((uint64_t *)jmp_addr, pair); |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1391 | flush_icache_range(jmp_addr, jmp_addr + 8); |
| 1392 | } else { |
| 1393 | intptr_t diff = addr - jmp_addr; |
| 1394 | tcg_debug_assert(in_range_b(diff)); |
| 1395 | atomic_set((uint32_t *)jmp_addr, B | (diff & 0x3fffffc)); |
| 1396 | flush_icache_range(jmp_addr, jmp_addr + 4); |
| 1397 | } |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1398 | } |
| 1399 | |
| 1400 | static void tcg_out_call(TCGContext *s, tcg_insn_unit *target) |
| 1401 | { |
Richard Henderson | eaf7d1c | 2014-04-30 11:57:11 -0700 | [diff] [blame] | 1402 | #ifdef _CALL_AIX |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1403 | /* Look through the descriptor. If the branch is in range, and we |
| 1404 | don't have to spend too much effort on building the toc. */ |
| 1405 | void *tgt = ((void **)target)[0]; |
| 1406 | uintptr_t toc = ((uintptr_t *)target)[1]; |
| 1407 | intptr_t diff = tcg_pcrel_diff(s, tgt); |
| 1408 | |
| 1409 | if (in_range_b(diff) && toc == (uint32_t)toc) { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1410 | tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP1, toc); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1411 | tcg_out_b(s, LK, tgt); |
| 1412 | } else { |
| 1413 | /* Fold the low bits of the constant into the addresses below. */ |
| 1414 | intptr_t arg = (intptr_t)target; |
| 1415 | int ofs = (int16_t)arg; |
| 1416 | |
| 1417 | if (ofs + 8 < 0x8000) { |
| 1418 | arg -= ofs; |
| 1419 | } else { |
| 1420 | ofs = 0; |
| 1421 | } |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1422 | tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP1, arg); |
| 1423 | tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R0, TCG_REG_TMP1, ofs); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1424 | tcg_out32(s, MTSPR | RA(TCG_REG_R0) | CTR); |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1425 | tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R2, TCG_REG_TMP1, ofs + SZP); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1426 | tcg_out32(s, BCCTR | BO_ALWAYS | LK); |
| 1427 | } |
Ulrich Weigand | 77e58d0 | 2014-04-30 14:33:05 -0700 | [diff] [blame] | 1428 | #elif defined(_CALL_ELF) && _CALL_ELF == 2 |
| 1429 | intptr_t diff; |
| 1430 | |
| 1431 | /* In the ELFv2 ABI, we have to set up r12 to contain the destination |
| 1432 | address, which the callee uses to compute its TOC address. */ |
| 1433 | /* FIXME: when the branch is in range, we could avoid r12 load if we |
| 1434 | knew that the destination uses the same TOC, and what its local |
| 1435 | entry point offset is. */ |
| 1436 | tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R12, (intptr_t)target); |
| 1437 | |
| 1438 | diff = tcg_pcrel_diff(s, target); |
| 1439 | if (in_range_b(diff)) { |
| 1440 | tcg_out_b(s, LK, target); |
| 1441 | } else { |
| 1442 | tcg_out32(s, MTSPR | RS(TCG_REG_R12) | CTR); |
| 1443 | tcg_out32(s, BCCTR | BO_ALWAYS | LK); |
| 1444 | } |
Richard Henderson | eaf7d1c | 2014-04-30 11:57:11 -0700 | [diff] [blame] | 1445 | #else |
| 1446 | tcg_out_b(s, LK, target); |
Richard Henderson | d604f1a | 2014-03-24 15:44:09 -0700 | [diff] [blame] | 1447 | #endif |
| 1448 | } |
| 1449 | |
Richard Henderson | a058557 | 2013-09-03 17:05:37 -0700 | [diff] [blame] | 1450 | static const uint32_t qemu_ldx_opc[16] = { |
| 1451 | [MO_UB] = LBZX, |
| 1452 | [MO_UW] = LHZX, |
| 1453 | [MO_UL] = LWZX, |
| 1454 | [MO_Q] = LDX, |
| 1455 | [MO_SW] = LHAX, |
| 1456 | [MO_SL] = LWAX, |
| 1457 | [MO_BSWAP | MO_UB] = LBZX, |
| 1458 | [MO_BSWAP | MO_UW] = LHBRX, |
| 1459 | [MO_BSWAP | MO_UL] = LWBRX, |
| 1460 | [MO_BSWAP | MO_Q] = LDBRX, |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1461 | }; |
| 1462 | |
Richard Henderson | a058557 | 2013-09-03 17:05:37 -0700 | [diff] [blame] | 1463 | static const uint32_t qemu_stx_opc[16] = { |
| 1464 | [MO_UB] = STBX, |
| 1465 | [MO_UW] = STHX, |
| 1466 | [MO_UL] = STWX, |
| 1467 | [MO_Q] = STDX, |
| 1468 | [MO_BSWAP | MO_UB] = STBX, |
| 1469 | [MO_BSWAP | MO_UW] = STHBRX, |
| 1470 | [MO_BSWAP | MO_UL] = STWBRX, |
| 1471 | [MO_BSWAP | MO_Q] = STDBRX, |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1472 | }; |
| 1473 | |
| 1474 | static const uint32_t qemu_exts_opc[4] = { |
| 1475 | EXTSB, EXTSH, EXTSW, 0 |
| 1476 | }; |
| 1477 | |
| 1478 | #if defined (CONFIG_SOFTMMU) |
Richard Henderson | 659ef5c | 2017-07-30 12:30:41 -0700 | [diff] [blame] | 1479 | #include "tcg-ldst.inc.c" |
| 1480 | |
Blue Swirl | e141ab5 | 2011-09-18 14:55:46 +0000 | [diff] [blame] | 1481 | /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr, |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1482 | * int mmu_idx, uintptr_t ra) |
| 1483 | */ |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1484 | static void * const qemu_ld_helpers[16] = { |
Richard Henderson | e349a8d | 2013-09-10 09:05:15 -0700 | [diff] [blame] | 1485 | [MO_UB] = helper_ret_ldub_mmu, |
| 1486 | [MO_LEUW] = helper_le_lduw_mmu, |
| 1487 | [MO_LEUL] = helper_le_ldul_mmu, |
| 1488 | [MO_LEQ] = helper_le_ldq_mmu, |
| 1489 | [MO_BEUW] = helper_be_lduw_mmu, |
| 1490 | [MO_BEUL] = helper_be_ldul_mmu, |
| 1491 | [MO_BEQ] = helper_be_ldq_mmu, |
Blue Swirl | e141ab5 | 2011-09-18 14:55:46 +0000 | [diff] [blame] | 1492 | }; |
| 1493 | |
| 1494 | /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr, |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1495 | * uintxx_t val, int mmu_idx, uintptr_t ra) |
| 1496 | */ |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1497 | static void * const qemu_st_helpers[16] = { |
Richard Henderson | e349a8d | 2013-09-10 09:05:15 -0700 | [diff] [blame] | 1498 | [MO_UB] = helper_ret_stb_mmu, |
| 1499 | [MO_LEUW] = helper_le_stw_mmu, |
| 1500 | [MO_LEUL] = helper_le_stl_mmu, |
| 1501 | [MO_LEQ] = helper_le_stq_mmu, |
| 1502 | [MO_BEUW] = helper_be_stw_mmu, |
| 1503 | [MO_BEUL] = helper_be_stl_mmu, |
| 1504 | [MO_BEQ] = helper_be_stq_mmu, |
Blue Swirl | e141ab5 | 2011-09-18 14:55:46 +0000 | [diff] [blame] | 1505 | }; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1506 | |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1507 | /* Perform the TLB load and compare. Places the result of the comparison |
| 1508 | in CR7, loads the addend of the TLB into R3, and returns the register |
| 1509 | containing the guest address (zero-extended into R4). Clobbers R0 and R2. */ |
| 1510 | |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1511 | static TCGReg tcg_out_tlb_read(TCGContext *s, TCGMemOp opc, |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1512 | TCGReg addrlo, TCGReg addrhi, |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1513 | int mem_index, bool is_read) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1514 | { |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1515 | int cmp_off |
| 1516 | = (is_read |
| 1517 | ? offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) |
| 1518 | : offsetof(CPUArchState, tlb_table[mem_index][0].addr_write)); |
| 1519 | int add_off = offsetof(CPUArchState, tlb_table[mem_index][0].addend); |
| 1520 | TCGReg base = TCG_AREG0; |
Richard Henderson | 85aa808 | 2016-07-14 12:43:06 -0700 | [diff] [blame] | 1521 | unsigned s_bits = opc & MO_SIZE; |
| 1522 | unsigned a_bits = get_alignment_bits(opc); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1523 | |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1524 | /* Extract the page index, shifted into place for tlb index. */ |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1525 | if (TCG_TARGET_REG_BITS == 64) { |
| 1526 | if (TARGET_LONG_BITS == 32) { |
| 1527 | /* Zero-extend the address into a place helpful for further use. */ |
| 1528 | tcg_out_ext32u(s, TCG_REG_R4, addrlo); |
| 1529 | addrlo = TCG_REG_R4; |
| 1530 | } else { |
| 1531 | tcg_out_rld(s, RLDICL, TCG_REG_R3, addrlo, |
| 1532 | 64 - TARGET_PAGE_BITS, 64 - CPU_TLB_BITS); |
| 1533 | } |
malc | 4a40e23 | 2008-07-27 11:09:21 +0000 | [diff] [blame] | 1534 | } |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1535 | |
| 1536 | /* Compensate for very large offsets. */ |
| 1537 | if (add_off >= 0x8000) { |
Richard Henderson | 4a64e0f | 2018-01-10 07:47:25 +0000 | [diff] [blame] | 1538 | int low = (int16_t)cmp_off; |
| 1539 | int high = cmp_off - low; |
| 1540 | assert((high & 0xffff) == 0); |
| 1541 | assert(cmp_off - high == (int16_t)(cmp_off - high)); |
| 1542 | assert(add_off - high == (int16_t)(add_off - high)); |
| 1543 | tcg_out32(s, ADDIS | TAI(TCG_REG_TMP1, base, high >> 16)); |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1544 | base = TCG_REG_TMP1; |
Richard Henderson | 4a64e0f | 2018-01-10 07:47:25 +0000 | [diff] [blame] | 1545 | cmp_off -= high; |
| 1546 | add_off -= high; |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1547 | } |
| 1548 | |
| 1549 | /* Extraction and shifting, part 2. */ |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1550 | if (TCG_TARGET_REG_BITS == 32 || TARGET_LONG_BITS == 32) { |
| 1551 | tcg_out_rlw(s, RLWINM, TCG_REG_R3, addrlo, |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1552 | 32 - (TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS), |
| 1553 | 32 - (CPU_TLB_BITS + CPU_TLB_ENTRY_BITS), |
| 1554 | 31 - CPU_TLB_ENTRY_BITS); |
| 1555 | } else { |
| 1556 | tcg_out_shli64(s, TCG_REG_R3, TCG_REG_R3, CPU_TLB_ENTRY_BITS); |
| 1557 | } |
| 1558 | |
| 1559 | tcg_out32(s, ADD | TAB(TCG_REG_R3, TCG_REG_R3, base)); |
| 1560 | |
| 1561 | /* Load the tlb comparator. */ |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1562 | if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) { |
| 1563 | tcg_out_ld(s, TCG_TYPE_I32, TCG_REG_R4, TCG_REG_R3, cmp_off); |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1564 | tcg_out_ld(s, TCG_TYPE_I32, TCG_REG_TMP1, TCG_REG_R3, cmp_off + 4); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1565 | } else { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1566 | tcg_out_ld(s, TCG_TYPE_TL, TCG_REG_TMP1, TCG_REG_R3, cmp_off); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1567 | } |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1568 | |
| 1569 | /* Load the TLB addend for use on the fast path. Do this asap |
| 1570 | to minimize any load use delay. */ |
Richard Henderson | 4c3831a | 2014-03-24 16:03:59 -0700 | [diff] [blame] | 1571 | tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R3, TCG_REG_R3, add_off); |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1572 | |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1573 | /* Clear the non-page, non-alignment bits from the address */ |
Richard Henderson | 85aa808 | 2016-07-14 12:43:06 -0700 | [diff] [blame] | 1574 | if (TCG_TARGET_REG_BITS == 32) { |
| 1575 | /* We don't support unaligned accesses on 32-bits. |
| 1576 | * Preserve the bottom bits and thus trigger a comparison |
| 1577 | * failure on unaligned accesses. |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1578 | */ |
Richard Henderson | 85aa808 | 2016-07-14 12:43:06 -0700 | [diff] [blame] | 1579 | if (a_bits < s_bits) { |
Sergey Sorokin | 1f00b27 | 2016-06-23 21:16:46 +0300 | [diff] [blame] | 1580 | a_bits = s_bits; |
| 1581 | } |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1582 | tcg_out_rlw(s, RLWINM, TCG_REG_R0, addrlo, 0, |
Sergey Sorokin | 1f00b27 | 2016-06-23 21:16:46 +0300 | [diff] [blame] | 1583 | (32 - a_bits) & 31, 31 - TARGET_PAGE_BITS); |
Richard Henderson | 85aa808 | 2016-07-14 12:43:06 -0700 | [diff] [blame] | 1584 | } else { |
| 1585 | TCGReg t = addrlo; |
| 1586 | |
| 1587 | /* If the access is unaligned, we need to make sure we fail if we |
| 1588 | * cross a page boundary. The trick is to add the access size-1 |
| 1589 | * to the address before masking the low bits. That will make the |
| 1590 | * address overflow to the next page if we cross a page boundary, |
| 1591 | * which will then force a mismatch of the TLB compare. |
| 1592 | */ |
| 1593 | if (a_bits < s_bits) { |
| 1594 | unsigned a_mask = (1 << a_bits) - 1; |
| 1595 | unsigned s_mask = (1 << s_bits) - 1; |
| 1596 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, t, s_mask - a_mask)); |
| 1597 | t = TCG_REG_R0; |
| 1598 | } |
| 1599 | |
| 1600 | /* Mask the address for the requested alignment. */ |
| 1601 | if (TARGET_LONG_BITS == 32) { |
| 1602 | tcg_out_rlw(s, RLWINM, TCG_REG_R0, t, 0, |
| 1603 | (32 - a_bits) & 31, 31 - TARGET_PAGE_BITS); |
| 1604 | } else if (a_bits == 0) { |
| 1605 | tcg_out_rld(s, RLDICR, TCG_REG_R0, t, 0, 63 - TARGET_PAGE_BITS); |
| 1606 | } else { |
| 1607 | tcg_out_rld(s, RLDICL, TCG_REG_R0, t, |
Sergey Sorokin | 1f00b27 | 2016-06-23 21:16:46 +0300 | [diff] [blame] | 1608 | 64 - TARGET_PAGE_BITS, TARGET_PAGE_BITS - a_bits); |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1609 | tcg_out_rld(s, RLDICL, TCG_REG_R0, TCG_REG_R0, TARGET_PAGE_BITS, 0); |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1610 | } |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1611 | } |
| 1612 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1613 | if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1614 | tcg_out_cmp(s, TCG_COND_EQ, TCG_REG_R0, TCG_REG_TMP1, |
| 1615 | 0, 7, TCG_TYPE_I32); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1616 | tcg_out_cmp(s, TCG_COND_EQ, addrhi, TCG_REG_R4, 0, 6, TCG_TYPE_I32); |
| 1617 | tcg_out32(s, CRAND | BT(7, CR_EQ) | BA(6, CR_EQ) | BB(7, CR_EQ)); |
| 1618 | } else { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1619 | tcg_out_cmp(s, TCG_COND_EQ, TCG_REG_R0, TCG_REG_TMP1, |
| 1620 | 0, 7, TCG_TYPE_TL); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1621 | } |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1622 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1623 | return addrlo; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1624 | } |
| 1625 | |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1626 | /* Record the context of a call to the out of line helper code for the slow |
| 1627 | path for a load or store, so that we can later generate the correct |
| 1628 | helper code. */ |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1629 | static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOpIdx oi, |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1630 | TCGReg datalo_reg, TCGReg datahi_reg, |
| 1631 | TCGReg addrlo_reg, TCGReg addrhi_reg, |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1632 | tcg_insn_unit *raddr, tcg_insn_unit *lptr) |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1633 | { |
Richard Henderson | 9ecefc8 | 2013-10-03 14:51:24 -0500 | [diff] [blame] | 1634 | TCGLabelQemuLdst *label = new_ldst_label(s); |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 1635 | |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1636 | label->is_ld = is_ld; |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1637 | label->oi = oi; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1638 | label->datalo_reg = datalo_reg; |
| 1639 | label->datahi_reg = datahi_reg; |
| 1640 | label->addrlo_reg = addrlo_reg; |
| 1641 | label->addrhi_reg = addrhi_reg; |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1642 | label->raddr = raddr; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1643 | label->label_ptr[0] = lptr; |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1644 | } |
| 1645 | |
| 1646 | static void tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb) |
| 1647 | { |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1648 | TCGMemOpIdx oi = lb->oi; |
| 1649 | TCGMemOp opc = get_memop(oi); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1650 | TCGReg hi, lo, arg = TCG_REG_R3; |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1651 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1652 | reloc_pc14(lb->label_ptr[0], s->code_ptr); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1653 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1654 | tcg_out_mov(s, TCG_TYPE_PTR, arg++, TCG_AREG0); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1655 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1656 | lo = lb->addrlo_reg; |
| 1657 | hi = lb->addrhi_reg; |
| 1658 | if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) { |
| 1659 | #ifdef TCG_TARGET_CALL_ALIGN_ARGS |
| 1660 | arg |= 1; |
| 1661 | #endif |
| 1662 | tcg_out_mov(s, TCG_TYPE_I32, arg++, hi); |
| 1663 | tcg_out_mov(s, TCG_TYPE_I32, arg++, lo); |
| 1664 | } else { |
| 1665 | /* If the address needed to be zero-extended, we'll have already |
| 1666 | placed it in R4. The only remaining case is 64-bit guest. */ |
| 1667 | tcg_out_mov(s, TCG_TYPE_TL, arg++, lo); |
| 1668 | } |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1669 | |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1670 | tcg_out_movi(s, TCG_TYPE_I32, arg++, oi); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1671 | tcg_out32(s, MFSPR | RT(arg) | LR); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1672 | |
Richard Henderson | 2b7ec66 | 2015-05-29 09:16:51 -0700 | [diff] [blame] | 1673 | tcg_out_call(s, qemu_ld_helpers[opc & (MO_BSWAP | MO_SIZE)]); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1674 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1675 | lo = lb->datalo_reg; |
| 1676 | hi = lb->datahi_reg; |
| 1677 | if (TCG_TARGET_REG_BITS == 32 && (opc & MO_SIZE) == MO_64) { |
| 1678 | tcg_out_mov(s, TCG_TYPE_I32, lo, TCG_REG_R4); |
| 1679 | tcg_out_mov(s, TCG_TYPE_I32, hi, TCG_REG_R3); |
| 1680 | } else if (opc & MO_SIGN) { |
Richard Henderson | e349a8d | 2013-09-10 09:05:15 -0700 | [diff] [blame] | 1681 | uint32_t insn = qemu_exts_opc[opc & MO_SIZE]; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1682 | tcg_out32(s, insn | RA(lo) | RS(TCG_REG_R3)); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1683 | } else { |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1684 | tcg_out_mov(s, TCG_TYPE_REG, lo, TCG_REG_R3); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1685 | } |
| 1686 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1687 | tcg_out_b(s, 0, lb->raddr); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1688 | } |
| 1689 | |
| 1690 | static void tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb) |
| 1691 | { |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1692 | TCGMemOpIdx oi = lb->oi; |
| 1693 | TCGMemOp opc = get_memop(oi); |
Richard Henderson | e349a8d | 2013-09-10 09:05:15 -0700 | [diff] [blame] | 1694 | TCGMemOp s_bits = opc & MO_SIZE; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1695 | TCGReg hi, lo, arg = TCG_REG_R3; |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1696 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1697 | reloc_pc14(lb->label_ptr[0], s->code_ptr); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1698 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1699 | tcg_out_mov(s, TCG_TYPE_PTR, arg++, TCG_AREG0); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1700 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1701 | lo = lb->addrlo_reg; |
| 1702 | hi = lb->addrhi_reg; |
| 1703 | if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) { |
| 1704 | #ifdef TCG_TARGET_CALL_ALIGN_ARGS |
| 1705 | arg |= 1; |
| 1706 | #endif |
| 1707 | tcg_out_mov(s, TCG_TYPE_I32, arg++, hi); |
| 1708 | tcg_out_mov(s, TCG_TYPE_I32, arg++, lo); |
| 1709 | } else { |
| 1710 | /* If the address needed to be zero-extended, we'll have already |
| 1711 | placed it in R4. The only remaining case is 64-bit guest. */ |
| 1712 | tcg_out_mov(s, TCG_TYPE_TL, arg++, lo); |
| 1713 | } |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1714 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1715 | lo = lb->datalo_reg; |
| 1716 | hi = lb->datahi_reg; |
| 1717 | if (TCG_TARGET_REG_BITS == 32) { |
| 1718 | switch (s_bits) { |
| 1719 | case MO_64: |
| 1720 | #ifdef TCG_TARGET_CALL_ALIGN_ARGS |
| 1721 | arg |= 1; |
| 1722 | #endif |
| 1723 | tcg_out_mov(s, TCG_TYPE_I32, arg++, hi); |
| 1724 | /* FALLTHRU */ |
| 1725 | case MO_32: |
| 1726 | tcg_out_mov(s, TCG_TYPE_I32, arg++, lo); |
| 1727 | break; |
| 1728 | default: |
| 1729 | tcg_out_rlw(s, RLWINM, arg++, lo, 0, 32 - (8 << s_bits), 31); |
| 1730 | break; |
| 1731 | } |
| 1732 | } else { |
| 1733 | if (s_bits == MO_64) { |
| 1734 | tcg_out_mov(s, TCG_TYPE_I64, arg++, lo); |
| 1735 | } else { |
| 1736 | tcg_out_rld(s, RLDICL, arg++, lo, 0, 64 - (8 << s_bits)); |
| 1737 | } |
| 1738 | } |
| 1739 | |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1740 | tcg_out_movi(s, TCG_TYPE_I32, arg++, oi); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1741 | tcg_out32(s, MFSPR | RT(arg) | LR); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1742 | |
Richard Henderson | 2b7ec66 | 2015-05-29 09:16:51 -0700 | [diff] [blame] | 1743 | tcg_out_call(s, qemu_st_helpers[opc & (MO_BSWAP | MO_SIZE)]); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1744 | |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1745 | tcg_out_b(s, 0, lb->raddr); |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1746 | } |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1747 | #endif /* SOFTMMU */ |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 1748 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1749 | static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1750 | { |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1751 | TCGReg datalo, datahi, addrlo, rbase; |
| 1752 | TCGReg addrhi __attribute__((unused)); |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1753 | TCGMemOpIdx oi; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1754 | TCGMemOp opc, s_bits; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1755 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1756 | int mem_index; |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1757 | tcg_insn_unit *label_ptr; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1758 | #endif |
| 1759 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1760 | datalo = *args++; |
| 1761 | datahi = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0); |
| 1762 | addrlo = *args++; |
| 1763 | addrhi = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0); |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1764 | oi = *args++; |
| 1765 | opc = get_memop(oi); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1766 | s_bits = opc & MO_SIZE; |
| 1767 | |
David Gibson | 9df3b45 | 2011-10-30 19:57:33 +0000 | [diff] [blame] | 1768 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1769 | mem_index = get_mmuidx(oi); |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1770 | addrlo = tcg_out_tlb_read(s, opc, addrlo, addrhi, mem_index, true); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1771 | |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1772 | /* Load a pointer into the current opcode w/conditional branch-link. */ |
| 1773 | label_ptr = s->code_ptr; |
| 1774 | tcg_out_bc_noaddr(s, BC | BI(7, CR_EQ) | BO_COND_FALSE | LK); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1775 | |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1776 | rbase = TCG_REG_R3; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1777 | #else /* !CONFIG_SOFTMMU */ |
Laurent Vivier | b76f21a | 2015-08-24 14:53:54 +0200 | [diff] [blame] | 1778 | rbase = guest_base ? TCG_GUEST_BASE_REG : 0; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1779 | if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1780 | tcg_out_ext32u(s, TCG_REG_TMP1, addrlo); |
| 1781 | addrlo = TCG_REG_TMP1; |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1782 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1783 | #endif |
| 1784 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1785 | if (TCG_TARGET_REG_BITS == 32 && s_bits == MO_64) { |
| 1786 | if (opc & MO_BSWAP) { |
| 1787 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); |
| 1788 | tcg_out32(s, LWBRX | TAB(datalo, rbase, addrlo)); |
| 1789 | tcg_out32(s, LWBRX | TAB(datahi, rbase, TCG_REG_R0)); |
| 1790 | } else if (rbase != 0) { |
| 1791 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); |
| 1792 | tcg_out32(s, LWZX | TAB(datahi, rbase, addrlo)); |
| 1793 | tcg_out32(s, LWZX | TAB(datalo, rbase, TCG_REG_R0)); |
| 1794 | } else if (addrlo == datahi) { |
| 1795 | tcg_out32(s, LWZ | TAI(datalo, addrlo, 4)); |
| 1796 | tcg_out32(s, LWZ | TAI(datahi, addrlo, 0)); |
| 1797 | } else { |
| 1798 | tcg_out32(s, LWZ | TAI(datahi, addrlo, 0)); |
| 1799 | tcg_out32(s, LWZ | TAI(datalo, addrlo, 4)); |
| 1800 | } |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 1801 | } else { |
Richard Henderson | 2b7ec66 | 2015-05-29 09:16:51 -0700 | [diff] [blame] | 1802 | uint32_t insn = qemu_ldx_opc[opc & (MO_BSWAP | MO_SSIZE)]; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1803 | if (!HAVE_ISA_2_06 && insn == LDBRX) { |
| 1804 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); |
| 1805 | tcg_out32(s, LWBRX | TAB(datalo, rbase, addrlo)); |
| 1806 | tcg_out32(s, LWBRX | TAB(TCG_REG_R0, rbase, TCG_REG_R0)); |
| 1807 | tcg_out_rld(s, RLDIMI, datalo, TCG_REG_R0, 32, 0); |
| 1808 | } else if (insn) { |
| 1809 | tcg_out32(s, insn | TAB(datalo, rbase, addrlo)); |
| 1810 | } else { |
| 1811 | insn = qemu_ldx_opc[opc & (MO_SIZE | MO_BSWAP)]; |
| 1812 | tcg_out32(s, insn | TAB(datalo, rbase, addrlo)); |
| 1813 | insn = qemu_exts_opc[s_bits]; |
| 1814 | tcg_out32(s, insn | RA(datalo) | RS(datalo)); |
| 1815 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1816 | } |
| 1817 | |
| 1818 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1819 | add_qemu_ldst_label(s, true, oi, datalo, datahi, addrlo, addrhi, |
| 1820 | s->code_ptr, label_ptr); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1821 | #endif |
| 1822 | } |
| 1823 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1824 | static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1825 | { |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1826 | TCGReg datalo, datahi, addrlo, rbase; |
| 1827 | TCGReg addrhi __attribute__((unused)); |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1828 | TCGMemOpIdx oi; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1829 | TCGMemOp opc, s_bits; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1830 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1831 | int mem_index; |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 1832 | tcg_insn_unit *label_ptr; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1833 | #endif |
| 1834 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1835 | datalo = *args++; |
| 1836 | datahi = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0); |
| 1837 | addrlo = *args++; |
| 1838 | addrhi = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0); |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1839 | oi = *args++; |
| 1840 | opc = get_memop(oi); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1841 | s_bits = opc & MO_SIZE; |
| 1842 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1843 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 59227d5 | 2015-05-12 11:51:44 -0700 | [diff] [blame] | 1844 | mem_index = get_mmuidx(oi); |
Benjamin Herrenschmidt | 68d45bb | 2015-07-21 15:19:38 +1000 | [diff] [blame] | 1845 | addrlo = tcg_out_tlb_read(s, opc, addrlo, addrhi, mem_index, false); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1846 | |
Richard Henderson | 7f12d64 | 2013-07-31 16:15:18 -0700 | [diff] [blame] | 1847 | /* Load a pointer into the current opcode w/conditional branch-link. */ |
| 1848 | label_ptr = s->code_ptr; |
| 1849 | tcg_out_bc_noaddr(s, BC | BI(7, CR_EQ) | BO_COND_FALSE | LK); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1850 | |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1851 | rbase = TCG_REG_R3; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1852 | #else /* !CONFIG_SOFTMMU */ |
Laurent Vivier | b76f21a | 2015-08-24 14:53:54 +0200 | [diff] [blame] | 1853 | rbase = guest_base ? TCG_GUEST_BASE_REG : 0; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1854 | if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) { |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1855 | tcg_out_ext32u(s, TCG_REG_TMP1, addrlo); |
| 1856 | addrlo = TCG_REG_TMP1; |
Richard Henderson | fedee3e | 2013-07-31 15:11:44 -0700 | [diff] [blame] | 1857 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1858 | #endif |
| 1859 | |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1860 | if (TCG_TARGET_REG_BITS == 32 && s_bits == MO_64) { |
| 1861 | if (opc & MO_BSWAP) { |
| 1862 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); |
| 1863 | tcg_out32(s, STWBRX | SAB(datalo, rbase, addrlo)); |
| 1864 | tcg_out32(s, STWBRX | SAB(datahi, rbase, TCG_REG_R0)); |
| 1865 | } else if (rbase != 0) { |
| 1866 | tcg_out32(s, ADDI | TAI(TCG_REG_R0, addrlo, 4)); |
| 1867 | tcg_out32(s, STWX | SAB(datahi, rbase, addrlo)); |
| 1868 | tcg_out32(s, STWX | SAB(datalo, rbase, TCG_REG_R0)); |
| 1869 | } else { |
| 1870 | tcg_out32(s, STW | TAI(datahi, addrlo, 0)); |
| 1871 | tcg_out32(s, STW | TAI(datalo, addrlo, 4)); |
| 1872 | } |
Richard Henderson | 49d9870 | 2013-02-02 00:58:14 -0800 | [diff] [blame] | 1873 | } else { |
Richard Henderson | 2b7ec66 | 2015-05-29 09:16:51 -0700 | [diff] [blame] | 1874 | uint32_t insn = qemu_stx_opc[opc & (MO_BSWAP | MO_SIZE)]; |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1875 | if (!HAVE_ISA_2_06 && insn == STDBRX) { |
| 1876 | tcg_out32(s, STWBRX | SAB(datalo, rbase, addrlo)); |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1877 | tcg_out32(s, ADDI | TAI(TCG_REG_TMP1, addrlo, 4)); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1878 | tcg_out_shri64(s, TCG_REG_R0, datalo, 32); |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 1879 | tcg_out32(s, STWBRX | SAB(TCG_REG_R0, rbase, TCG_REG_TMP1)); |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 1880 | } else { |
| 1881 | tcg_out32(s, insn | SAB(datalo, rbase, addrlo)); |
| 1882 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1883 | } |
| 1884 | |
| 1885 | #ifdef CONFIG_SOFTMMU |
Richard Henderson | 3972ef6 | 2015-05-13 09:10:33 -0700 | [diff] [blame] | 1886 | add_qemu_ldst_label(s, false, oi, datalo, datahi, addrlo, addrhi, |
| 1887 | s->code_ptr, label_ptr); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1888 | #endif |
| 1889 | } |
| 1890 | |
Richard Henderson | 53c89ef | 2017-07-31 06:03:03 +0000 | [diff] [blame] | 1891 | static void tcg_out_nop_fill(tcg_insn_unit *p, int count) |
| 1892 | { |
| 1893 | int i; |
| 1894 | for (i = 0; i < count; ++i) { |
| 1895 | p[i] = NOP; |
| 1896 | } |
| 1897 | } |
| 1898 | |
Richard Henderson | a921fdd | 2014-03-25 08:11:53 -0700 | [diff] [blame] | 1899 | /* Parameters for function call generation, used in tcg.c. */ |
| 1900 | #define TCG_TARGET_STACK_ALIGN 16 |
Richard Henderson | a921fdd | 2014-03-25 08:11:53 -0700 | [diff] [blame] | 1901 | #define TCG_TARGET_EXTEND_ARGS 1 |
| 1902 | |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1903 | #ifdef _CALL_AIX |
| 1904 | # define LINK_AREA_SIZE (6 * SZR) |
| 1905 | # define LR_OFFSET (1 * SZR) |
| 1906 | # define TCG_TARGET_CALL_STACK_OFFSET (LINK_AREA_SIZE + 8 * SZR) |
Peter Maydell | 1045fc0 | 2014-06-26 16:37:17 +0100 | [diff] [blame] | 1907 | #elif defined(TCG_TARGET_CALL_DARWIN) |
| 1908 | # define LINK_AREA_SIZE (6 * SZR) |
| 1909 | # define LR_OFFSET (2 * SZR) |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 1910 | #elif TCG_TARGET_REG_BITS == 64 |
| 1911 | # if defined(_CALL_ELF) && _CALL_ELF == 2 |
| 1912 | # define LINK_AREA_SIZE (4 * SZR) |
| 1913 | # define LR_OFFSET (1 * SZR) |
| 1914 | # endif |
| 1915 | #else /* TCG_TARGET_REG_BITS == 32 */ |
| 1916 | # if defined(_CALL_SYSV) |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 1917 | # define LINK_AREA_SIZE (2 * SZR) |
| 1918 | # define LR_OFFSET (1 * SZR) |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 1919 | # endif |
| 1920 | #endif |
| 1921 | #ifndef LR_OFFSET |
| 1922 | # error "Unhandled abi" |
| 1923 | #endif |
| 1924 | #ifndef TCG_TARGET_CALL_STACK_OFFSET |
Richard Henderson | a2a98f8 | 2014-03-25 08:57:23 -0700 | [diff] [blame] | 1925 | # define TCG_TARGET_CALL_STACK_OFFSET LINK_AREA_SIZE |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1926 | #endif |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 1927 | |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1928 | #define CPU_TEMP_BUF_SIZE (CPU_TEMP_BUF_NLONGS * (int)sizeof(long)) |
| 1929 | #define REG_SAVE_SIZE ((int)ARRAY_SIZE(tcg_target_callee_save_regs) * SZR) |
| 1930 | |
| 1931 | #define FRAME_SIZE ((TCG_TARGET_CALL_STACK_OFFSET \ |
| 1932 | + TCG_STATIC_CALL_ARGS_SIZE \ |
| 1933 | + CPU_TEMP_BUF_SIZE \ |
| 1934 | + REG_SAVE_SIZE \ |
| 1935 | + TCG_TARGET_STACK_ALIGN - 1) \ |
| 1936 | & -TCG_TARGET_STACK_ALIGN) |
| 1937 | |
| 1938 | #define REG_SAVE_BOT (FRAME_SIZE - REG_SAVE_SIZE) |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 1939 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 1940 | static void tcg_target_qemu_prologue(TCGContext *s) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1941 | { |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 1942 | int i; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1943 | |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 1944 | #ifdef _CALL_AIX |
| 1945 | void **desc = (void **)s->code_ptr; |
| 1946 | desc[0] = desc + 2; /* entry point */ |
| 1947 | desc[1] = 0; /* environment pointer */ |
| 1948 | s->code_ptr = (void *)(desc + 2); /* skip over descriptor */ |
| 1949 | #endif |
| 1950 | |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1951 | tcg_set_frame(s, TCG_REG_CALL_STACK, REG_SAVE_BOT - CPU_TEMP_BUF_SIZE, |
| 1952 | CPU_TEMP_BUF_SIZE); |
Blue Swirl | 136a0b5 | 2011-06-26 22:23:54 +0300 | [diff] [blame] | 1953 | |
malc | a69abbe | 2008-07-24 17:37:09 +0000 | [diff] [blame] | 1954 | /* Prologue */ |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 1955 | tcg_out32(s, MFSPR | RT(TCG_REG_R0) | LR); |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 1956 | tcg_out32(s, (SZR == 8 ? STDU : STWU) |
| 1957 | | SAI(TCG_REG_R1, TCG_REG_R1, -FRAME_SIZE)); |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1958 | |
Richard Henderson | 29b6919 | 2013-08-31 05:23:23 -0700 | [diff] [blame] | 1959 | for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); ++i) { |
Richard Henderson | 4c3831a | 2014-03-24 16:03:59 -0700 | [diff] [blame] | 1960 | tcg_out_st(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i], |
| 1961 | TCG_REG_R1, REG_SAVE_BOT + i * SZR); |
Richard Henderson | 29b6919 | 2013-08-31 05:23:23 -0700 | [diff] [blame] | 1962 | } |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1963 | tcg_out_st(s, TCG_TYPE_PTR, TCG_REG_R0, TCG_REG_R1, FRAME_SIZE+LR_OFFSET); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1964 | |
Laurent Vivier | 4cbea59 | 2015-08-24 01:42:07 +0200 | [diff] [blame] | 1965 | #ifndef CONFIG_SOFTMMU |
Laurent Vivier | b76f21a | 2015-08-24 14:53:54 +0200 | [diff] [blame] | 1966 | if (guest_base) { |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1967 | tcg_out_movi_int(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, guest_base, true); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 1968 | tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG); |
Richard Henderson | b9e946c | 2010-05-06 05:50:45 +0000 | [diff] [blame] | 1969 | } |
malc | f6548c0 | 2009-07-18 10:08:40 +0400 | [diff] [blame] | 1970 | #endif |
| 1971 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 1972 | tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]); |
| 1973 | tcg_out32(s, MTSPR | RS(tcg_target_call_iarg_regs[1]) | CTR); |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1974 | if (USE_REG_TB) { |
| 1975 | tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, tcg_target_call_iarg_regs[1]); |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 1976 | } |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1977 | tcg_out32(s, BCCTR | BO_ALWAYS); |
malc | a69abbe | 2008-07-24 17:37:09 +0000 | [diff] [blame] | 1978 | |
| 1979 | /* Epilogue */ |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 1980 | s->code_gen_epilogue = tb_ret_addr = s->code_ptr; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1981 | |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 1982 | tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_R0, TCG_REG_R1, FRAME_SIZE+LR_OFFSET); |
Richard Henderson | 29b6919 | 2013-08-31 05:23:23 -0700 | [diff] [blame] | 1983 | for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); ++i) { |
Richard Henderson | 4c3831a | 2014-03-24 16:03:59 -0700 | [diff] [blame] | 1984 | tcg_out_ld(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i], |
| 1985 | TCG_REG_R1, REG_SAVE_BOT + i * SZR); |
Richard Henderson | 29b6919 | 2013-08-31 05:23:23 -0700 | [diff] [blame] | 1986 | } |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 1987 | tcg_out32(s, MTSPR | RS(TCG_REG_R0) | LR); |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 1988 | tcg_out32(s, ADDI | TAI(TCG_REG_R1, TCG_REG_R1, FRAME_SIZE)); |
Richard Henderson | 2fd8edd | 2013-02-01 16:08:50 -0800 | [diff] [blame] | 1989 | tcg_out32(s, BCLR | BO_ALWAYS); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1990 | } |
| 1991 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 1992 | static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, |
| 1993 | const int *const_args) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1994 | { |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 1995 | TCGArg a0, a1, a2; |
malc | e46b968 | 2008-07-23 20:01:23 +0000 | [diff] [blame] | 1996 | int c; |
| 1997 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 1998 | switch (opc) { |
| 1999 | case INDEX_op_exit_tb: |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 2000 | tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R3, args[0]); |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 2001 | tcg_out_b(s, 0, tb_ret_addr); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2002 | break; |
| 2003 | case INDEX_op_goto_tb: |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 2004 | if (s->tb_jmp_insn_offset) { |
| 2005 | /* Direct jump. */ |
| 2006 | if (TCG_TARGET_REG_BITS == 64) { |
| 2007 | /* Ensure the next insns are 8-byte aligned. */ |
| 2008 | if ((uintptr_t)s->code_ptr & 7) { |
| 2009 | tcg_out32(s, NOP); |
| 2010 | } |
| 2011 | s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s); |
| 2012 | tcg_out32(s, ADDIS | TAI(TCG_REG_TB, TCG_REG_TB, 0)); |
| 2013 | tcg_out32(s, ADDI | TAI(TCG_REG_TB, TCG_REG_TB, 0)); |
| 2014 | } else { |
| 2015 | s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s); |
| 2016 | tcg_out32(s, B); |
| 2017 | s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s); |
| 2018 | break; |
| 2019 | } |
| 2020 | } else { |
| 2021 | /* Indirect jump. */ |
| 2022 | tcg_debug_assert(s->tb_jmp_insn_offset == NULL); |
| 2023 | tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TB, 0, |
| 2024 | (intptr_t)(s->tb_jmp_insn_offset + args[0])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2025 | } |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 2026 | tcg_out32(s, MTSPR | RS(TCG_REG_TB) | CTR); |
Richard Henderson | 5bfd75a | 2015-10-02 22:25:28 +0000 | [diff] [blame] | 2027 | tcg_out32(s, BCCTR | BO_ALWAYS); |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 2028 | s->tb_jmp_reset_offset[args[0]] = c = tcg_current_code_size(s); |
| 2029 | if (USE_REG_TB) { |
| 2030 | /* For the unlinked case, need to reset TCG_REG_TB. */ |
| 2031 | c = -c; |
| 2032 | assert(c == (int16_t)c); |
| 2033 | tcg_out32(s, ADDI | TAI(TCG_REG_TB, TCG_REG_TB, c)); |
| 2034 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2035 | break; |
Richard Henderson | 0c24078 | 2017-04-26 11:50:31 +0000 | [diff] [blame] | 2036 | case INDEX_op_goto_ptr: |
| 2037 | tcg_out32(s, MTSPR | RS(args[0]) | CTR); |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 2038 | if (USE_REG_TB) { |
| 2039 | tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_TB, args[0]); |
| 2040 | } |
| 2041 | tcg_out32(s, ADDI | TAI(TCG_REG_R3, 0, 0)); |
Richard Henderson | 0c24078 | 2017-04-26 11:50:31 +0000 | [diff] [blame] | 2042 | tcg_out32(s, BCCTR | BO_ALWAYS); |
| 2043 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2044 | case INDEX_op_br: |
| 2045 | { |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 2046 | TCGLabel *l = arg_label(args[0]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2047 | |
| 2048 | if (l->has_value) { |
Richard Henderson | e083c4a | 2014-03-28 14:58:38 -0700 | [diff] [blame] | 2049 | tcg_out_b(s, 0, l->u.value_ptr); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2050 | } else { |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 2051 | tcg_out_reloc(s, s->code_ptr, R_PPC_REL24, l, 0); |
Richard Henderson | c7ca6a2 | 2013-08-30 17:58:10 -0700 | [diff] [blame] | 2052 | tcg_out_b_noaddr(s, B); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2053 | } |
| 2054 | } |
| 2055 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2056 | case INDEX_op_ld8u_i32: |
| 2057 | case INDEX_op_ld8u_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2058 | tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2059 | break; |
| 2060 | case INDEX_op_ld8s_i32: |
| 2061 | case INDEX_op_ld8s_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2062 | tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2063 | tcg_out32(s, EXTSB | RS(args[0]) | RA(args[0])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2064 | break; |
| 2065 | case INDEX_op_ld16u_i32: |
| 2066 | case INDEX_op_ld16u_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2067 | tcg_out_mem_long(s, LHZ, LHZX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2068 | break; |
| 2069 | case INDEX_op_ld16s_i32: |
| 2070 | case INDEX_op_ld16s_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2071 | tcg_out_mem_long(s, LHA, LHAX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2072 | break; |
| 2073 | case INDEX_op_ld_i32: |
| 2074 | case INDEX_op_ld32u_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2075 | tcg_out_mem_long(s, LWZ, LWZX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2076 | break; |
| 2077 | case INDEX_op_ld32s_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2078 | tcg_out_mem_long(s, LWA, LWAX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2079 | break; |
| 2080 | case INDEX_op_ld_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2081 | tcg_out_mem_long(s, LD, LDX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2082 | break; |
| 2083 | case INDEX_op_st8_i32: |
| 2084 | case INDEX_op_st8_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2085 | tcg_out_mem_long(s, STB, STBX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2086 | break; |
| 2087 | case INDEX_op_st16_i32: |
| 2088 | case INDEX_op_st16_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2089 | tcg_out_mem_long(s, STH, STHX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2090 | break; |
| 2091 | case INDEX_op_st_i32: |
| 2092 | case INDEX_op_st32_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2093 | tcg_out_mem_long(s, STW, STWX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2094 | break; |
| 2095 | case INDEX_op_st_i64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2096 | tcg_out_mem_long(s, STD, STDX, args[0], args[1], args[2]); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2097 | break; |
| 2098 | |
| 2099 | case INDEX_op_add_i32: |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2100 | a0 = args[0], a1 = args[1], a2 = args[2]; |
| 2101 | if (const_args[2]) { |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2102 | do_addi_32: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2103 | tcg_out_mem_long(s, ADDI, ADD, a0, a1, (int32_t)a2); |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2104 | } else { |
| 2105 | tcg_out32(s, ADD | TAB(a0, a1, a2)); |
| 2106 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2107 | break; |
| 2108 | case INDEX_op_sub_i32: |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2109 | a0 = args[0], a1 = args[1], a2 = args[2]; |
Richard Henderson | 148bdd2 | 2013-04-04 07:30:20 -0700 | [diff] [blame] | 2110 | if (const_args[1]) { |
| 2111 | if (const_args[2]) { |
| 2112 | tcg_out_movi(s, TCG_TYPE_I32, a0, a1 - a2); |
| 2113 | } else { |
| 2114 | tcg_out32(s, SUBFIC | TAI(a0, a2, a1)); |
| 2115 | } |
| 2116 | } else if (const_args[2]) { |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2117 | a2 = -a2; |
| 2118 | goto do_addi_32; |
| 2119 | } else { |
| 2120 | tcg_out32(s, SUBF | TAB(a0, a2, a1)); |
| 2121 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2122 | break; |
| 2123 | |
| 2124 | case INDEX_op_and_i32: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2125 | a0 = args[0], a1 = args[1], a2 = args[2]; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2126 | if (const_args[2]) { |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2127 | tcg_out_andi32(s, a0, a1, a2); |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 2128 | } else { |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2129 | tcg_out32(s, AND | SAB(a1, a0, a2)); |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 2130 | } |
| 2131 | break; |
| 2132 | case INDEX_op_and_i64: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2133 | a0 = args[0], a1 = args[1], a2 = args[2]; |
Richard Henderson | a9249df | 2013-02-01 23:43:42 -0800 | [diff] [blame] | 2134 | if (const_args[2]) { |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2135 | tcg_out_andi64(s, a0, a1, a2); |
Richard Henderson | 637af30 | 2013-02-01 23:58:17 -0800 | [diff] [blame] | 2136 | } else { |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2137 | tcg_out32(s, AND | SAB(a1, a0, a2)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2138 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2139 | break; |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 2140 | case INDEX_op_or_i64: |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2141 | case INDEX_op_or_i32: |
Richard Henderson | dce74c5 | 2013-02-01 20:22:05 -0800 | [diff] [blame] | 2142 | a0 = args[0], a1 = args[1], a2 = args[2]; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2143 | if (const_args[2]) { |
Richard Henderson | dce74c5 | 2013-02-01 20:22:05 -0800 | [diff] [blame] | 2144 | tcg_out_ori32(s, a0, a1, a2); |
| 2145 | } else { |
| 2146 | tcg_out32(s, OR | SAB(a1, a0, a2)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2147 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2148 | break; |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 2149 | case INDEX_op_xor_i64: |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2150 | case INDEX_op_xor_i32: |
Richard Henderson | dce74c5 | 2013-02-01 20:22:05 -0800 | [diff] [blame] | 2151 | a0 = args[0], a1 = args[1], a2 = args[2]; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2152 | if (const_args[2]) { |
Richard Henderson | dce74c5 | 2013-02-01 20:22:05 -0800 | [diff] [blame] | 2153 | tcg_out_xori32(s, a0, a1, a2); |
| 2154 | } else { |
| 2155 | tcg_out32(s, XOR | SAB(a1, a0, a2)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2156 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2157 | break; |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 2158 | case INDEX_op_andc_i32: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2159 | a0 = args[0], a1 = args[1], a2 = args[2]; |
| 2160 | if (const_args[2]) { |
| 2161 | tcg_out_andi32(s, a0, a1, ~a2); |
| 2162 | } else { |
| 2163 | tcg_out32(s, ANDC | SAB(a1, a0, a2)); |
| 2164 | } |
| 2165 | break; |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 2166 | case INDEX_op_andc_i64: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2167 | a0 = args[0], a1 = args[1], a2 = args[2]; |
| 2168 | if (const_args[2]) { |
| 2169 | tcg_out_andi64(s, a0, a1, ~a2); |
| 2170 | } else { |
| 2171 | tcg_out32(s, ANDC | SAB(a1, a0, a2)); |
| 2172 | } |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 2173 | break; |
| 2174 | case INDEX_op_orc_i32: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2175 | if (const_args[2]) { |
| 2176 | tcg_out_ori32(s, args[0], args[1], ~args[2]); |
| 2177 | break; |
| 2178 | } |
| 2179 | /* FALLTHRU */ |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 2180 | case INDEX_op_orc_i64: |
| 2181 | tcg_out32(s, ORC | SAB(args[1], args[0], args[2])); |
| 2182 | break; |
| 2183 | case INDEX_op_eqv_i32: |
Richard Henderson | 37251b9 | 2013-03-04 13:48:38 -0800 | [diff] [blame] | 2184 | if (const_args[2]) { |
| 2185 | tcg_out_xori32(s, args[0], args[1], ~args[2]); |
| 2186 | break; |
| 2187 | } |
| 2188 | /* FALLTHRU */ |
Richard Henderson | ce1010d | 2013-01-31 07:49:13 -0800 | [diff] [blame] | 2189 | case INDEX_op_eqv_i64: |
| 2190 | tcg_out32(s, EQV | SAB(args[1], args[0], args[2])); |
| 2191 | break; |
| 2192 | case INDEX_op_nand_i32: |
| 2193 | case INDEX_op_nand_i64: |
| 2194 | tcg_out32(s, NAND | SAB(args[1], args[0], args[2])); |
| 2195 | break; |
| 2196 | case INDEX_op_nor_i32: |
| 2197 | case INDEX_op_nor_i64: |
| 2198 | tcg_out32(s, NOR | SAB(args[1], args[0], args[2])); |
| 2199 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2200 | |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 2201 | case INDEX_op_clz_i32: |
| 2202 | tcg_out_cntxz(s, TCG_TYPE_I32, CNTLZW, args[0], args[1], |
| 2203 | args[2], const_args[2]); |
| 2204 | break; |
| 2205 | case INDEX_op_ctz_i32: |
| 2206 | tcg_out_cntxz(s, TCG_TYPE_I32, CNTTZW, args[0], args[1], |
| 2207 | args[2], const_args[2]); |
| 2208 | break; |
Richard Henderson | 33e75fb | 2016-11-22 11:43:12 +0000 | [diff] [blame] | 2209 | case INDEX_op_ctpop_i32: |
| 2210 | tcg_out32(s, CNTPOPW | SAB(args[1], args[0], 0)); |
| 2211 | break; |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 2212 | |
| 2213 | case INDEX_op_clz_i64: |
| 2214 | tcg_out_cntxz(s, TCG_TYPE_I64, CNTLZD, args[0], args[1], |
| 2215 | args[2], const_args[2]); |
| 2216 | break; |
| 2217 | case INDEX_op_ctz_i64: |
| 2218 | tcg_out_cntxz(s, TCG_TYPE_I64, CNTTZD, args[0], args[1], |
| 2219 | args[2], const_args[2]); |
| 2220 | break; |
Richard Henderson | 33e75fb | 2016-11-22 11:43:12 +0000 | [diff] [blame] | 2221 | case INDEX_op_ctpop_i64: |
| 2222 | tcg_out32(s, CNTPOPD | SAB(args[1], args[0], 0)); |
| 2223 | break; |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 2224 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2225 | case INDEX_op_mul_i32: |
Richard Henderson | ef80930 | 2013-01-31 09:45:11 -0800 | [diff] [blame] | 2226 | a0 = args[0], a1 = args[1], a2 = args[2]; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2227 | if (const_args[2]) { |
Richard Henderson | ef80930 | 2013-01-31 09:45:11 -0800 | [diff] [blame] | 2228 | tcg_out32(s, MULLI | TAI(a0, a1, a2)); |
| 2229 | } else { |
| 2230 | tcg_out32(s, MULLW | TAB(a0, a1, a2)); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2231 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2232 | break; |
| 2233 | |
| 2234 | case INDEX_op_div_i32: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2235 | tcg_out32(s, DIVW | TAB(args[0], args[1], args[2])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2236 | break; |
| 2237 | |
| 2238 | case INDEX_op_divu_i32: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2239 | tcg_out32(s, DIVWU | TAB(args[0], args[1], args[2])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2240 | break; |
| 2241 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2242 | case INDEX_op_shl_i32: |
| 2243 | if (const_args[2]) { |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 2244 | tcg_out_shli32(s, args[0], args[1], args[2]); |
Richard Henderson | 9e555b7 | 2013-02-01 15:00:45 -0800 | [diff] [blame] | 2245 | } else { |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2246 | tcg_out32(s, SLW | SAB(args[1], args[0], args[2])); |
Richard Henderson | 9e555b7 | 2013-02-01 15:00:45 -0800 | [diff] [blame] | 2247 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2248 | break; |
| 2249 | case INDEX_op_shr_i32: |
| 2250 | if (const_args[2]) { |
Richard Henderson | a757e1e | 2014-03-26 18:10:43 -0700 | [diff] [blame] | 2251 | tcg_out_shri32(s, args[0], args[1], args[2]); |
Richard Henderson | 9e555b7 | 2013-02-01 15:00:45 -0800 | [diff] [blame] | 2252 | } else { |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2253 | tcg_out32(s, SRW | SAB(args[1], args[0], args[2])); |
Richard Henderson | 9e555b7 | 2013-02-01 15:00:45 -0800 | [diff] [blame] | 2254 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2255 | break; |
| 2256 | case INDEX_op_sar_i32: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2257 | if (const_args[2]) { |
| 2258 | tcg_out32(s, SRAWI | RS(args[1]) | RA(args[0]) | SH(args[2])); |
| 2259 | } else { |
| 2260 | tcg_out32(s, SRAW | SAB(args[1], args[0], args[2])); |
| 2261 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2262 | break; |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 2263 | case INDEX_op_rotl_i32: |
| 2264 | if (const_args[2]) { |
| 2265 | tcg_out_rlw(s, RLWINM, args[0], args[1], args[2], 0, 31); |
| 2266 | } else { |
| 2267 | tcg_out32(s, RLWNM | SAB(args[1], args[0], args[2]) |
| 2268 | | MB(0) | ME(31)); |
| 2269 | } |
| 2270 | break; |
| 2271 | case INDEX_op_rotr_i32: |
| 2272 | if (const_args[2]) { |
| 2273 | tcg_out_rlw(s, RLWINM, args[0], args[1], 32 - args[2], 0, 31); |
| 2274 | } else { |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 2275 | tcg_out32(s, SUBFIC | TAI(TCG_REG_R0, args[2], 32)); |
| 2276 | tcg_out32(s, RLWNM | SAB(args[1], args[0], TCG_REG_R0) |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 2277 | | MB(0) | ME(31)); |
| 2278 | } |
| 2279 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2280 | |
| 2281 | case INDEX_op_brcond_i32: |
Richard Henderson | 4c314da | 2013-04-02 14:58:27 -0700 | [diff] [blame] | 2282 | tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 2283 | arg_label(args[3]), TCG_TYPE_I32); |
malc | e924bbe | 2008-07-28 19:42:23 +0000 | [diff] [blame] | 2284 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2285 | case INDEX_op_brcond_i64: |
Richard Henderson | 4c314da | 2013-04-02 14:58:27 -0700 | [diff] [blame] | 2286 | tcg_out_brcond(s, args[2], args[0], args[1], const_args[1], |
Richard Henderson | bec1631 | 2015-02-13 13:39:54 -0800 | [diff] [blame] | 2287 | arg_label(args[3]), TCG_TYPE_I64); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2288 | break; |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 2289 | case INDEX_op_brcond2_i32: |
| 2290 | tcg_out_brcond2(s, args, const_args); |
| 2291 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2292 | |
| 2293 | case INDEX_op_neg_i32: |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2294 | case INDEX_op_neg_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2295 | tcg_out32(s, NEG | RT(args[0]) | RA(args[1])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2296 | break; |
| 2297 | |
malc | 157f266 | 2011-08-22 14:40:00 +0400 | [diff] [blame] | 2298 | case INDEX_op_not_i32: |
| 2299 | case INDEX_op_not_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2300 | tcg_out32(s, NOR | SAB(args[1], args[0], args[1])); |
malc | 157f266 | 2011-08-22 14:40:00 +0400 | [diff] [blame] | 2301 | break; |
| 2302 | |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2303 | case INDEX_op_add_i64: |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2304 | a0 = args[0], a1 = args[1], a2 = args[2]; |
| 2305 | if (const_args[2]) { |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2306 | do_addi_64: |
Richard Henderson | b18d5d2 | 2013-07-31 11:36:42 -0700 | [diff] [blame] | 2307 | tcg_out_mem_long(s, ADDI, ADD, a0, a1, a2); |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2308 | } else { |
| 2309 | tcg_out32(s, ADD | TAB(a0, a1, a2)); |
| 2310 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2311 | break; |
| 2312 | case INDEX_op_sub_i64: |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2313 | a0 = args[0], a1 = args[1], a2 = args[2]; |
Richard Henderson | 148bdd2 | 2013-04-04 07:30:20 -0700 | [diff] [blame] | 2314 | if (const_args[1]) { |
| 2315 | if (const_args[2]) { |
| 2316 | tcg_out_movi(s, TCG_TYPE_I64, a0, a1 - a2); |
| 2317 | } else { |
| 2318 | tcg_out32(s, SUBFIC | TAI(a0, a2, a1)); |
| 2319 | } |
| 2320 | } else if (const_args[2]) { |
Richard Henderson | ee924fa | 2013-02-01 16:17:17 -0800 | [diff] [blame] | 2321 | a2 = -a2; |
| 2322 | goto do_addi_64; |
| 2323 | } else { |
| 2324 | tcg_out32(s, SUBF | TAB(a0, a2, a1)); |
| 2325 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2326 | break; |
| 2327 | |
| 2328 | case INDEX_op_shl_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2329 | if (const_args[2]) { |
Richard Henderson | 0a9564b | 2013-02-01 15:12:14 -0800 | [diff] [blame] | 2330 | tcg_out_shli64(s, args[0], args[1], args[2]); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2331 | } else { |
| 2332 | tcg_out32(s, SLD | SAB(args[1], args[0], args[2])); |
| 2333 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2334 | break; |
| 2335 | case INDEX_op_shr_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2336 | if (const_args[2]) { |
Richard Henderson | 5e916c2 | 2013-02-01 15:19:05 -0800 | [diff] [blame] | 2337 | tcg_out_shri64(s, args[0], args[1], args[2]); |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2338 | } else { |
| 2339 | tcg_out32(s, SRD | SAB(args[1], args[0], args[2])); |
| 2340 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2341 | break; |
| 2342 | case INDEX_op_sar_i64: |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 2343 | if (const_args[2]) { |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2344 | int sh = SH(args[2] & 0x1f) | (((args[2] >> 5) & 1) << 1); |
| 2345 | tcg_out32(s, SRADI | RA(args[0]) | RS(args[1]) | sh); |
| 2346 | } else { |
| 2347 | tcg_out32(s, SRAD | SAB(args[1], args[0], args[2])); |
malc | fe6f943 | 2008-07-28 23:46:06 +0000 | [diff] [blame] | 2348 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2349 | break; |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 2350 | case INDEX_op_rotl_i64: |
| 2351 | if (const_args[2]) { |
| 2352 | tcg_out_rld(s, RLDICL, args[0], args[1], args[2], 0); |
| 2353 | } else { |
| 2354 | tcg_out32(s, RLDCL | SAB(args[1], args[0], args[2]) | MB64(0)); |
| 2355 | } |
| 2356 | break; |
| 2357 | case INDEX_op_rotr_i64: |
| 2358 | if (const_args[2]) { |
| 2359 | tcg_out_rld(s, RLDICL, args[0], args[1], 64 - args[2], 0); |
| 2360 | } else { |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 2361 | tcg_out32(s, SUBFIC | TAI(TCG_REG_R0, args[2], 64)); |
| 2362 | tcg_out32(s, RLDCL | SAB(args[1], args[0], TCG_REG_R0) | MB64(0)); |
Richard Henderson | 313d91c | 2013-01-30 19:24:06 -0800 | [diff] [blame] | 2363 | } |
| 2364 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2365 | |
| 2366 | case INDEX_op_mul_i64: |
Richard Henderson | ef80930 | 2013-01-31 09:45:11 -0800 | [diff] [blame] | 2367 | a0 = args[0], a1 = args[1], a2 = args[2]; |
| 2368 | if (const_args[2]) { |
| 2369 | tcg_out32(s, MULLI | TAI(a0, a1, a2)); |
| 2370 | } else { |
| 2371 | tcg_out32(s, MULLD | TAB(a0, a1, a2)); |
| 2372 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2373 | break; |
| 2374 | case INDEX_op_div_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2375 | tcg_out32(s, DIVD | TAB(args[0], args[1], args[2])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2376 | break; |
| 2377 | case INDEX_op_divu_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2378 | tcg_out32(s, DIVDU | TAB(args[0], args[1], args[2])); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2379 | break; |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2380 | |
Richard Henderson | 1768ec0 | 2013-09-10 10:15:25 -0700 | [diff] [blame] | 2381 | case INDEX_op_qemu_ld_i32: |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 2382 | tcg_out_qemu_ld(s, args, false); |
| 2383 | break; |
Richard Henderson | 1768ec0 | 2013-09-10 10:15:25 -0700 | [diff] [blame] | 2384 | case INDEX_op_qemu_ld_i64: |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 2385 | tcg_out_qemu_ld(s, args, true); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2386 | break; |
Richard Henderson | 1768ec0 | 2013-09-10 10:15:25 -0700 | [diff] [blame] | 2387 | case INDEX_op_qemu_st_i32: |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 2388 | tcg_out_qemu_st(s, args, false); |
| 2389 | break; |
Richard Henderson | 1768ec0 | 2013-09-10 10:15:25 -0700 | [diff] [blame] | 2390 | case INDEX_op_qemu_st_i64: |
Richard Henderson | 7f25c46 | 2014-03-25 12:11:48 -0700 | [diff] [blame] | 2391 | tcg_out_qemu_st(s, args, true); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2392 | break; |
| 2393 | |
malc | e46b968 | 2008-07-23 20:01:23 +0000 | [diff] [blame] | 2394 | case INDEX_op_ext8s_i32: |
| 2395 | case INDEX_op_ext8s_i64: |
| 2396 | c = EXTSB; |
| 2397 | goto gen_ext; |
| 2398 | case INDEX_op_ext16s_i32: |
| 2399 | case INDEX_op_ext16s_i64: |
| 2400 | c = EXTSH; |
| 2401 | goto gen_ext; |
Aurelien Jarno | 4f2331e | 2015-07-27 12:41:45 +0200 | [diff] [blame] | 2402 | case INDEX_op_ext_i32_i64: |
malc | e46b968 | 2008-07-23 20:01:23 +0000 | [diff] [blame] | 2403 | case INDEX_op_ext32s_i64: |
| 2404 | c = EXTSW; |
| 2405 | goto gen_ext; |
| 2406 | gen_ext: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2407 | tcg_out32(s, c | RS(args[1]) | RA(args[0])); |
malc | e46b968 | 2008-07-23 20:01:23 +0000 | [diff] [blame] | 2408 | break; |
Aurelien Jarno | 4f2331e | 2015-07-27 12:41:45 +0200 | [diff] [blame] | 2409 | case INDEX_op_extu_i32_i64: |
| 2410 | tcg_out_ext32u(s, args[0], args[1]); |
| 2411 | break; |
malc | e46b968 | 2008-07-23 20:01:23 +0000 | [diff] [blame] | 2412 | |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 2413 | case INDEX_op_setcond_i32: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2414 | tcg_out_setcond(s, TCG_TYPE_I32, args[3], args[0], args[1], args[2], |
| 2415 | const_args[2]); |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 2416 | break; |
| 2417 | case INDEX_op_setcond_i64: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2418 | tcg_out_setcond(s, TCG_TYPE_I64, args[3], args[0], args[1], args[2], |
| 2419 | const_args[2]); |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 2420 | break; |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 2421 | case INDEX_op_setcond2_i32: |
| 2422 | tcg_out_setcond2(s, args, const_args); |
| 2423 | break; |
malc | 1cd62ae | 2010-02-07 02:48:53 +0300 | [diff] [blame] | 2424 | |
Richard Henderson | 5d22158 | 2013-01-30 21:16:38 -0800 | [diff] [blame] | 2425 | case INDEX_op_bswap16_i32: |
| 2426 | case INDEX_op_bswap16_i64: |
| 2427 | a0 = args[0], a1 = args[1]; |
| 2428 | /* a1 = abcd */ |
| 2429 | if (a0 != a1) { |
| 2430 | /* a0 = (a1 r<< 24) & 0xff # 000c */ |
| 2431 | tcg_out_rlw(s, RLWINM, a0, a1, 24, 24, 31); |
| 2432 | /* a0 = (a0 & ~0xff00) | (a1 r<< 8) & 0xff00 # 00dc */ |
| 2433 | tcg_out_rlw(s, RLWIMI, a0, a1, 8, 16, 23); |
| 2434 | } else { |
| 2435 | /* r0 = (a1 r<< 8) & 0xff00 # 00d0 */ |
| 2436 | tcg_out_rlw(s, RLWINM, TCG_REG_R0, a1, 8, 16, 23); |
| 2437 | /* a0 = (a1 r<< 24) & 0xff # 000c */ |
| 2438 | tcg_out_rlw(s, RLWINM, a0, a1, 24, 24, 31); |
| 2439 | /* a0 = a0 | r0 # 00dc */ |
| 2440 | tcg_out32(s, OR | SAB(TCG_REG_R0, a0, a0)); |
| 2441 | } |
| 2442 | break; |
| 2443 | |
| 2444 | case INDEX_op_bswap32_i32: |
| 2445 | case INDEX_op_bswap32_i64: |
| 2446 | /* Stolen from gcc's builtin_bswap32 */ |
| 2447 | a1 = args[1]; |
| 2448 | a0 = args[0] == a1 ? TCG_REG_R0 : args[0]; |
| 2449 | |
| 2450 | /* a1 = args[1] # abcd */ |
| 2451 | /* a0 = rotate_left (a1, 8) # bcda */ |
| 2452 | tcg_out_rlw(s, RLWINM, a0, a1, 8, 0, 31); |
| 2453 | /* a0 = (a0 & ~0xff000000) | ((a1 r<< 24) & 0xff000000) # dcda */ |
| 2454 | tcg_out_rlw(s, RLWIMI, a0, a1, 24, 0, 7); |
| 2455 | /* a0 = (a0 & ~0x0000ff00) | ((a1 r<< 24) & 0x0000ff00) # dcba */ |
| 2456 | tcg_out_rlw(s, RLWIMI, a0, a1, 24, 16, 23); |
| 2457 | |
| 2458 | if (a0 == TCG_REG_R0) { |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 2459 | tcg_out_mov(s, TCG_TYPE_REG, args[0], a0); |
Richard Henderson | 5d22158 | 2013-01-30 21:16:38 -0800 | [diff] [blame] | 2460 | } |
| 2461 | break; |
| 2462 | |
Richard Henderson | 68aebd4 | 2013-01-30 21:41:54 -0800 | [diff] [blame] | 2463 | case INDEX_op_bswap64_i64: |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 2464 | a0 = args[0], a1 = args[1], a2 = TCG_REG_R0; |
Richard Henderson | 68aebd4 | 2013-01-30 21:41:54 -0800 | [diff] [blame] | 2465 | if (a0 == a1) { |
Richard Henderson | 8327a47 | 2013-08-31 05:41:45 -0700 | [diff] [blame] | 2466 | a0 = TCG_REG_R0; |
Richard Henderson | 68aebd4 | 2013-01-30 21:41:54 -0800 | [diff] [blame] | 2467 | a2 = a1; |
| 2468 | } |
| 2469 | |
| 2470 | /* a1 = # abcd efgh */ |
| 2471 | /* a0 = rl32(a1, 8) # 0000 fghe */ |
| 2472 | tcg_out_rlw(s, RLWINM, a0, a1, 8, 0, 31); |
| 2473 | /* a0 = dep(a0, rl32(a1, 24), 0xff000000) # 0000 hghe */ |
| 2474 | tcg_out_rlw(s, RLWIMI, a0, a1, 24, 0, 7); |
| 2475 | /* a0 = dep(a0, rl32(a1, 24), 0x0000ff00) # 0000 hgfe */ |
| 2476 | tcg_out_rlw(s, RLWIMI, a0, a1, 24, 16, 23); |
| 2477 | |
| 2478 | /* a0 = rl64(a0, 32) # hgfe 0000 */ |
| 2479 | /* a2 = rl64(a1, 32) # efgh abcd */ |
| 2480 | tcg_out_rld(s, RLDICL, a0, a0, 32, 0); |
| 2481 | tcg_out_rld(s, RLDICL, a2, a1, 32, 0); |
| 2482 | |
| 2483 | /* a0 = dep(a0, rl32(a2, 8), 0xffffffff) # hgfe bcda */ |
| 2484 | tcg_out_rlw(s, RLWIMI, a0, a2, 8, 0, 31); |
| 2485 | /* a0 = dep(a0, rl32(a2, 24), 0xff000000) # hgfe dcda */ |
| 2486 | tcg_out_rlw(s, RLWIMI, a0, a2, 24, 0, 7); |
| 2487 | /* a0 = dep(a0, rl32(a2, 24), 0x0000ff00) # hgfe dcba */ |
| 2488 | tcg_out_rlw(s, RLWIMI, a0, a2, 24, 16, 23); |
| 2489 | |
| 2490 | if (a0 == 0) { |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 2491 | tcg_out_mov(s, TCG_TYPE_REG, args[0], a0); |
Richard Henderson | 68aebd4 | 2013-01-30 21:41:54 -0800 | [diff] [blame] | 2492 | } |
| 2493 | break; |
| 2494 | |
Richard Henderson | 33de9ed | 2013-01-31 08:39:30 -0800 | [diff] [blame] | 2495 | case INDEX_op_deposit_i32: |
Richard Henderson | 39dc85b | 2013-04-04 12:47:22 -0700 | [diff] [blame] | 2496 | if (const_args[2]) { |
| 2497 | uint32_t mask = ((2u << (args[4] - 1)) - 1) << args[3]; |
| 2498 | tcg_out_andi32(s, args[0], args[0], ~mask); |
| 2499 | } else { |
| 2500 | tcg_out_rlw(s, RLWIMI, args[0], args[2], args[3], |
| 2501 | 32 - args[3] - args[4], 31 - args[3]); |
| 2502 | } |
Richard Henderson | 33de9ed | 2013-01-31 08:39:30 -0800 | [diff] [blame] | 2503 | break; |
| 2504 | case INDEX_op_deposit_i64: |
Richard Henderson | 39dc85b | 2013-04-04 12:47:22 -0700 | [diff] [blame] | 2505 | if (const_args[2]) { |
| 2506 | uint64_t mask = ((2ull << (args[4] - 1)) - 1) << args[3]; |
| 2507 | tcg_out_andi64(s, args[0], args[0], ~mask); |
| 2508 | } else { |
| 2509 | tcg_out_rld(s, RLDIMI, args[0], args[2], args[3], |
| 2510 | 64 - args[3] - args[4]); |
| 2511 | } |
Richard Henderson | 33de9ed | 2013-01-31 08:39:30 -0800 | [diff] [blame] | 2512 | break; |
| 2513 | |
Richard Henderson | c05021c | 2016-10-14 14:18:03 -0500 | [diff] [blame] | 2514 | case INDEX_op_extract_i32: |
| 2515 | tcg_out_rlw(s, RLWINM, args[0], args[1], |
| 2516 | 32 - args[2], 32 - args[3], 31); |
| 2517 | break; |
| 2518 | case INDEX_op_extract_i64: |
| 2519 | tcg_out_rld(s, RLDICL, args[0], args[1], 64 - args[2], 64 - args[3]); |
| 2520 | break; |
| 2521 | |
Richard Henderson | 027ffea | 2013-02-01 13:00:05 -0800 | [diff] [blame] | 2522 | case INDEX_op_movcond_i32: |
| 2523 | tcg_out_movcond(s, TCG_TYPE_I32, args[5], args[0], args[1], args[2], |
| 2524 | args[3], args[4], const_args[2]); |
| 2525 | break; |
| 2526 | case INDEX_op_movcond_i64: |
| 2527 | tcg_out_movcond(s, TCG_TYPE_I64, args[5], args[0], args[1], args[2], |
| 2528 | args[3], args[4], const_args[2]); |
| 2529 | break; |
| 2530 | |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 2531 | #if TCG_TARGET_REG_BITS == 64 |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2532 | case INDEX_op_add2_i64: |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 2533 | #else |
| 2534 | case INDEX_op_add2_i32: |
| 2535 | #endif |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2536 | /* Note that the CA bit is defined based on the word size of the |
| 2537 | environment. So in 64-bit mode it's always carry-out of bit 63. |
| 2538 | The fallback code using deposit works just as well for 32-bit. */ |
| 2539 | a0 = args[0], a1 = args[1]; |
Anton Blanchard | 8424735 | 2013-06-02 22:29:39 +1000 | [diff] [blame] | 2540 | if (a0 == args[3] || (!const_args[5] && a0 == args[5])) { |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2541 | a0 = TCG_REG_R0; |
| 2542 | } |
Anton Blanchard | 8424735 | 2013-06-02 22:29:39 +1000 | [diff] [blame] | 2543 | if (const_args[4]) { |
| 2544 | tcg_out32(s, ADDIC | TAI(a0, args[2], args[4])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2545 | } else { |
Anton Blanchard | 8424735 | 2013-06-02 22:29:39 +1000 | [diff] [blame] | 2546 | tcg_out32(s, ADDC | TAB(a0, args[2], args[4])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2547 | } |
| 2548 | if (const_args[5]) { |
Anton Blanchard | 8424735 | 2013-06-02 22:29:39 +1000 | [diff] [blame] | 2549 | tcg_out32(s, (args[5] ? ADDME : ADDZE) | RT(a1) | RA(args[3])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2550 | } else { |
Anton Blanchard | 8424735 | 2013-06-02 22:29:39 +1000 | [diff] [blame] | 2551 | tcg_out32(s, ADDE | TAB(a1, args[3], args[5])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2552 | } |
| 2553 | if (a0 != args[0]) { |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 2554 | tcg_out_mov(s, TCG_TYPE_REG, args[0], a0); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2555 | } |
| 2556 | break; |
| 2557 | |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 2558 | #if TCG_TARGET_REG_BITS == 64 |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2559 | case INDEX_op_sub2_i64: |
Richard Henderson | 796f1a6 | 2014-04-30 11:39:20 -0700 | [diff] [blame] | 2560 | #else |
| 2561 | case INDEX_op_sub2_i32: |
| 2562 | #endif |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2563 | a0 = args[0], a1 = args[1]; |
Richard Henderson | b31284c | 2014-03-26 18:56:31 -0700 | [diff] [blame] | 2564 | if (a0 == args[5] || (!const_args[3] && a0 == args[3])) { |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2565 | a0 = TCG_REG_R0; |
| 2566 | } |
| 2567 | if (const_args[2]) { |
Richard Henderson | b31284c | 2014-03-26 18:56:31 -0700 | [diff] [blame] | 2568 | tcg_out32(s, SUBFIC | TAI(a0, args[4], args[2])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2569 | } else { |
Richard Henderson | b31284c | 2014-03-26 18:56:31 -0700 | [diff] [blame] | 2570 | tcg_out32(s, SUBFC | TAB(a0, args[4], args[2])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2571 | } |
Richard Henderson | b31284c | 2014-03-26 18:56:31 -0700 | [diff] [blame] | 2572 | if (const_args[3]) { |
| 2573 | tcg_out32(s, (args[3] ? SUBFME : SUBFZE) | RT(a1) | RA(args[5])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2574 | } else { |
Richard Henderson | b31284c | 2014-03-26 18:56:31 -0700 | [diff] [blame] | 2575 | tcg_out32(s, SUBFE | TAB(a1, args[5], args[3])); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2576 | } |
| 2577 | if (a0 != args[0]) { |
Richard Henderson | de3d636 | 2014-03-24 15:22:35 -0700 | [diff] [blame] | 2578 | tcg_out_mov(s, TCG_TYPE_REG, args[0], a0); |
Richard Henderson | 6c85876 | 2013-03-04 14:26:52 -0800 | [diff] [blame] | 2579 | } |
| 2580 | break; |
| 2581 | |
Richard Henderson | abcf61c | 2014-04-30 11:55:34 -0700 | [diff] [blame] | 2582 | case INDEX_op_muluh_i32: |
| 2583 | tcg_out32(s, MULHWU | TAB(args[0], args[1], args[2])); |
| 2584 | break; |
Richard Henderson | 8fa391a | 2014-03-26 11:37:06 -0700 | [diff] [blame] | 2585 | case INDEX_op_mulsh_i32: |
| 2586 | tcg_out32(s, MULHW | TAB(args[0], args[1], args[2])); |
| 2587 | break; |
Richard Henderson | 32f5717 | 2013-08-14 14:46:08 -0700 | [diff] [blame] | 2588 | case INDEX_op_muluh_i64: |
| 2589 | tcg_out32(s, MULHDU | TAB(args[0], args[1], args[2])); |
| 2590 | break; |
| 2591 | case INDEX_op_mulsh_i64: |
| 2592 | tcg_out32(s, MULHD | TAB(args[0], args[1], args[2])); |
Richard Henderson | 6645c14 | 2013-03-04 16:20:51 -0800 | [diff] [blame] | 2593 | break; |
| 2594 | |
Pranith Kumar | 7b4af5e | 2016-07-14 16:20:19 -0400 | [diff] [blame] | 2595 | case INDEX_op_mb: |
| 2596 | tcg_out_mb(s, args[0]); |
| 2597 | break; |
| 2598 | |
Richard Henderson | 96d0ee7 | 2014-04-25 15:19:33 -0400 | [diff] [blame] | 2599 | case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ |
| 2600 | case INDEX_op_mov_i64: |
| 2601 | case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */ |
| 2602 | case INDEX_op_movi_i64: |
| 2603 | case INDEX_op_call: /* Always emitted via tcg_out_call. */ |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2604 | default: |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2605 | tcg_abort(); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2606 | } |
| 2607 | } |
| 2608 | |
Richard Henderson | f69d277 | 2016-11-18 09:31:40 +0100 | [diff] [blame] | 2609 | static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op) |
| 2610 | { |
Richard Henderson | 6cb3658 | 2017-09-14 02:29:32 +0000 | [diff] [blame] | 2611 | static const TCGTargetOpDef r = { .args_ct_str = { "r" } }; |
| 2612 | static const TCGTargetOpDef r_r = { .args_ct_str = { "r", "r" } }; |
| 2613 | static const TCGTargetOpDef r_L = { .args_ct_str = { "r", "L" } }; |
| 2614 | static const TCGTargetOpDef S_S = { .args_ct_str = { "S", "S" } }; |
| 2615 | static const TCGTargetOpDef r_ri = { .args_ct_str = { "r", "ri" } }; |
| 2616 | static const TCGTargetOpDef r_r_r = { .args_ct_str = { "r", "r", "r" } }; |
| 2617 | static const TCGTargetOpDef r_L_L = { .args_ct_str = { "r", "L", "L" } }; |
| 2618 | static const TCGTargetOpDef L_L_L = { .args_ct_str = { "L", "L", "L" } }; |
| 2619 | static const TCGTargetOpDef S_S_S = { .args_ct_str = { "S", "S", "S" } }; |
| 2620 | static const TCGTargetOpDef r_r_ri = { .args_ct_str = { "r", "r", "ri" } }; |
| 2621 | static const TCGTargetOpDef r_r_rI = { .args_ct_str = { "r", "r", "rI" } }; |
| 2622 | static const TCGTargetOpDef r_r_rT = { .args_ct_str = { "r", "r", "rT" } }; |
| 2623 | static const TCGTargetOpDef r_r_rU = { .args_ct_str = { "r", "r", "rU" } }; |
| 2624 | static const TCGTargetOpDef r_rI_ri |
| 2625 | = { .args_ct_str = { "r", "rI", "ri" } }; |
| 2626 | static const TCGTargetOpDef r_rI_rT |
| 2627 | = { .args_ct_str = { "r", "rI", "rT" } }; |
| 2628 | static const TCGTargetOpDef r_r_rZW |
| 2629 | = { .args_ct_str = { "r", "r", "rZW" } }; |
| 2630 | static const TCGTargetOpDef L_L_L_L |
| 2631 | = { .args_ct_str = { "L", "L", "L", "L" } }; |
| 2632 | static const TCGTargetOpDef S_S_S_S |
| 2633 | = { .args_ct_str = { "S", "S", "S", "S" } }; |
| 2634 | static const TCGTargetOpDef movc |
| 2635 | = { .args_ct_str = { "r", "r", "ri", "rZ", "rZ" } }; |
| 2636 | static const TCGTargetOpDef dep |
| 2637 | = { .args_ct_str = { "r", "0", "rZ" } }; |
| 2638 | static const TCGTargetOpDef br2 |
| 2639 | = { .args_ct_str = { "r", "r", "ri", "ri" } }; |
| 2640 | static const TCGTargetOpDef setc2 |
| 2641 | = { .args_ct_str = { "r", "r", "r", "ri", "ri" } }; |
| 2642 | static const TCGTargetOpDef add2 |
| 2643 | = { .args_ct_str = { "r", "r", "r", "r", "rI", "rZM" } }; |
| 2644 | static const TCGTargetOpDef sub2 |
| 2645 | = { .args_ct_str = { "r", "r", "rI", "rZM", "r", "r" } }; |
Richard Henderson | f69d277 | 2016-11-18 09:31:40 +0100 | [diff] [blame] | 2646 | |
Richard Henderson | 6cb3658 | 2017-09-14 02:29:32 +0000 | [diff] [blame] | 2647 | switch (op) { |
| 2648 | case INDEX_op_goto_ptr: |
| 2649 | return &r; |
| 2650 | |
| 2651 | case INDEX_op_ld8u_i32: |
| 2652 | case INDEX_op_ld8s_i32: |
| 2653 | case INDEX_op_ld16u_i32: |
| 2654 | case INDEX_op_ld16s_i32: |
| 2655 | case INDEX_op_ld_i32: |
| 2656 | case INDEX_op_st8_i32: |
| 2657 | case INDEX_op_st16_i32: |
| 2658 | case INDEX_op_st_i32: |
| 2659 | case INDEX_op_ctpop_i32: |
| 2660 | case INDEX_op_neg_i32: |
| 2661 | case INDEX_op_not_i32: |
| 2662 | case INDEX_op_ext8s_i32: |
| 2663 | case INDEX_op_ext16s_i32: |
| 2664 | case INDEX_op_bswap16_i32: |
| 2665 | case INDEX_op_bswap32_i32: |
| 2666 | case INDEX_op_extract_i32: |
| 2667 | case INDEX_op_ld8u_i64: |
| 2668 | case INDEX_op_ld8s_i64: |
| 2669 | case INDEX_op_ld16u_i64: |
| 2670 | case INDEX_op_ld16s_i64: |
| 2671 | case INDEX_op_ld32u_i64: |
| 2672 | case INDEX_op_ld32s_i64: |
| 2673 | case INDEX_op_ld_i64: |
| 2674 | case INDEX_op_st8_i64: |
| 2675 | case INDEX_op_st16_i64: |
| 2676 | case INDEX_op_st32_i64: |
| 2677 | case INDEX_op_st_i64: |
| 2678 | case INDEX_op_ctpop_i64: |
| 2679 | case INDEX_op_neg_i64: |
| 2680 | case INDEX_op_not_i64: |
| 2681 | case INDEX_op_ext8s_i64: |
| 2682 | case INDEX_op_ext16s_i64: |
| 2683 | case INDEX_op_ext32s_i64: |
| 2684 | case INDEX_op_ext_i32_i64: |
| 2685 | case INDEX_op_extu_i32_i64: |
| 2686 | case INDEX_op_bswap16_i64: |
| 2687 | case INDEX_op_bswap32_i64: |
| 2688 | case INDEX_op_bswap64_i64: |
| 2689 | case INDEX_op_extract_i64: |
| 2690 | return &r_r; |
| 2691 | |
| 2692 | case INDEX_op_add_i32: |
| 2693 | case INDEX_op_and_i32: |
| 2694 | case INDEX_op_or_i32: |
| 2695 | case INDEX_op_xor_i32: |
| 2696 | case INDEX_op_andc_i32: |
| 2697 | case INDEX_op_orc_i32: |
| 2698 | case INDEX_op_eqv_i32: |
| 2699 | case INDEX_op_shl_i32: |
| 2700 | case INDEX_op_shr_i32: |
| 2701 | case INDEX_op_sar_i32: |
| 2702 | case INDEX_op_rotl_i32: |
| 2703 | case INDEX_op_rotr_i32: |
| 2704 | case INDEX_op_setcond_i32: |
| 2705 | case INDEX_op_and_i64: |
| 2706 | case INDEX_op_andc_i64: |
| 2707 | case INDEX_op_shl_i64: |
| 2708 | case INDEX_op_shr_i64: |
| 2709 | case INDEX_op_sar_i64: |
| 2710 | case INDEX_op_rotl_i64: |
| 2711 | case INDEX_op_rotr_i64: |
| 2712 | case INDEX_op_setcond_i64: |
| 2713 | return &r_r_ri; |
| 2714 | case INDEX_op_mul_i32: |
| 2715 | case INDEX_op_mul_i64: |
| 2716 | return &r_r_rI; |
| 2717 | case INDEX_op_div_i32: |
| 2718 | case INDEX_op_divu_i32: |
| 2719 | case INDEX_op_nand_i32: |
| 2720 | case INDEX_op_nor_i32: |
| 2721 | case INDEX_op_muluh_i32: |
| 2722 | case INDEX_op_mulsh_i32: |
| 2723 | case INDEX_op_orc_i64: |
| 2724 | case INDEX_op_eqv_i64: |
| 2725 | case INDEX_op_nand_i64: |
| 2726 | case INDEX_op_nor_i64: |
| 2727 | case INDEX_op_div_i64: |
| 2728 | case INDEX_op_divu_i64: |
| 2729 | case INDEX_op_mulsh_i64: |
| 2730 | case INDEX_op_muluh_i64: |
| 2731 | return &r_r_r; |
| 2732 | case INDEX_op_sub_i32: |
| 2733 | return &r_rI_ri; |
| 2734 | case INDEX_op_add_i64: |
| 2735 | return &r_r_rT; |
| 2736 | case INDEX_op_or_i64: |
| 2737 | case INDEX_op_xor_i64: |
| 2738 | return &r_r_rU; |
| 2739 | case INDEX_op_sub_i64: |
| 2740 | return &r_rI_rT; |
| 2741 | case INDEX_op_clz_i32: |
| 2742 | case INDEX_op_ctz_i32: |
| 2743 | case INDEX_op_clz_i64: |
| 2744 | case INDEX_op_ctz_i64: |
| 2745 | return &r_r_rZW; |
| 2746 | |
| 2747 | case INDEX_op_brcond_i32: |
| 2748 | case INDEX_op_brcond_i64: |
| 2749 | return &r_ri; |
| 2750 | |
| 2751 | case INDEX_op_movcond_i32: |
| 2752 | case INDEX_op_movcond_i64: |
| 2753 | return &movc; |
| 2754 | case INDEX_op_deposit_i32: |
| 2755 | case INDEX_op_deposit_i64: |
| 2756 | return &dep; |
| 2757 | case INDEX_op_brcond2_i32: |
| 2758 | return &br2; |
| 2759 | case INDEX_op_setcond2_i32: |
| 2760 | return &setc2; |
| 2761 | case INDEX_op_add2_i64: |
| 2762 | case INDEX_op_add2_i32: |
| 2763 | return &add2; |
| 2764 | case INDEX_op_sub2_i64: |
| 2765 | case INDEX_op_sub2_i32: |
| 2766 | return &sub2; |
| 2767 | |
| 2768 | case INDEX_op_qemu_ld_i32: |
| 2769 | return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32 |
| 2770 | ? &r_L : &r_L_L); |
| 2771 | case INDEX_op_qemu_st_i32: |
| 2772 | return (TCG_TARGET_REG_BITS == 64 || TARGET_LONG_BITS == 32 |
| 2773 | ? &S_S : &S_S_S); |
| 2774 | case INDEX_op_qemu_ld_i64: |
| 2775 | return (TCG_TARGET_REG_BITS == 64 ? &r_L |
| 2776 | : TARGET_LONG_BITS == 32 ? &L_L_L : &L_L_L_L); |
| 2777 | case INDEX_op_qemu_st_i64: |
| 2778 | return (TCG_TARGET_REG_BITS == 64 ? &S_S |
| 2779 | : TARGET_LONG_BITS == 32 ? &S_S_S : &S_S_S_S); |
| 2780 | |
| 2781 | default: |
| 2782 | return NULL; |
Richard Henderson | f69d277 | 2016-11-18 09:31:40 +0100 | [diff] [blame] | 2783 | } |
Richard Henderson | f69d277 | 2016-11-18 09:31:40 +0100 | [diff] [blame] | 2784 | } |
| 2785 | |
Richard Henderson | 541dd4c | 2013-08-31 05:14:53 -0700 | [diff] [blame] | 2786 | static void tcg_target_init(TCGContext *s) |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2787 | { |
Richard Henderson | cd629de | 2013-06-04 11:37:17 -0700 | [diff] [blame] | 2788 | unsigned long hwcap = qemu_getauxval(AT_HWCAP); |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 2789 | unsigned long hwcap2 = qemu_getauxval(AT_HWCAP2); |
| 2790 | |
Richard Henderson | 1e6e9ac | 2013-02-18 09:11:15 -0800 | [diff] [blame] | 2791 | if (hwcap & PPC_FEATURE_ARCH_2_06) { |
| 2792 | have_isa_2_06 = true; |
| 2793 | } |
Richard Henderson | d0b0748 | 2016-11-16 12:48:55 +0100 | [diff] [blame] | 2794 | #ifdef PPC_FEATURE2_ARCH_3_00 |
| 2795 | if (hwcap2 & PPC_FEATURE2_ARCH_3_00) { |
| 2796 | have_isa_3_00 = true; |
| 2797 | } |
| 2798 | #endif |
Richard Henderson | 1e6e9ac | 2013-02-18 09:11:15 -0800 | [diff] [blame] | 2799 | |
Richard Henderson | f46934d | 2017-09-11 12:44:30 -0700 | [diff] [blame] | 2800 | tcg_target_available_regs[TCG_TYPE_I32] = 0xffffffff; |
| 2801 | tcg_target_available_regs[TCG_TYPE_I64] = 0xffffffff; |
| 2802 | |
| 2803 | tcg_target_call_clobber_regs = 0; |
| 2804 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R0); |
| 2805 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R2); |
| 2806 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R3); |
| 2807 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R4); |
| 2808 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R5); |
| 2809 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R6); |
| 2810 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R7); |
| 2811 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8); |
| 2812 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9); |
| 2813 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10); |
| 2814 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11); |
| 2815 | tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R12); |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2816 | |
Richard Henderson | ccb1bb6 | 2017-09-11 11:25:55 -0700 | [diff] [blame] | 2817 | s->reserved_regs = 0; |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 2818 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* tcg temp */ |
| 2819 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* stack pointer */ |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 2820 | #if defined(_CALL_SYSV) |
| 2821 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* toc pointer */ |
Andreas Faerber | 5d7ff5b | 2009-12-06 14:00:24 +0100 | [diff] [blame] | 2822 | #endif |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 2823 | #if defined(_CALL_SYSV) || TCG_TARGET_REG_BITS == 64 |
Richard Henderson | 5e1702b | 2013-07-31 10:18:49 -0700 | [diff] [blame] | 2824 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */ |
Richard Henderson | dfca177 | 2014-04-30 12:12:16 -0700 | [diff] [blame] | 2825 | #endif |
| 2826 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP1); /* mem temp */ |
Richard Henderson | 5964fca | 2017-07-31 04:16:10 +0000 | [diff] [blame] | 2827 | if (USE_REG_TB) { |
| 2828 | tcg_regset_set_reg(s->reserved_regs, TCG_REG_TB); /* tb->tc_ptr */ |
Richard Henderson | a84ac4c | 2014-03-28 06:53:53 -0700 | [diff] [blame] | 2829 | } |
malc | 810260a | 2008-07-23 19:17:46 +0000 | [diff] [blame] | 2830 | } |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2831 | |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 2832 | #ifdef __ELF__ |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2833 | typedef struct { |
| 2834 | DebugFrameCIE cie; |
| 2835 | DebugFrameFDEHeader fde; |
| 2836 | uint8_t fde_def_cfa[4]; |
| 2837 | uint8_t fde_reg_ofs[ARRAY_SIZE(tcg_target_callee_save_regs) * 2 + 3]; |
| 2838 | } DebugFrame; |
| 2839 | |
| 2840 | /* We're expecting a 2 byte uleb128 encoded value. */ |
| 2841 | QEMU_BUILD_BUG_ON(FRAME_SIZE >= (1 << 14)); |
| 2842 | |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 2843 | #if TCG_TARGET_REG_BITS == 64 |
| 2844 | # define ELF_HOST_MACHINE EM_PPC64 |
| 2845 | #else |
| 2846 | # define ELF_HOST_MACHINE EM_PPC |
| 2847 | #endif |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2848 | |
| 2849 | static DebugFrame debug_frame = { |
| 2850 | .cie.len = sizeof(DebugFrameCIE)-4, /* length after .len member */ |
| 2851 | .cie.id = -1, |
| 2852 | .cie.version = 1, |
| 2853 | .cie.code_align = 1, |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 2854 | .cie.data_align = (-SZR & 0x7f), /* sleb128 -SZR */ |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2855 | .cie.return_column = 65, |
| 2856 | |
| 2857 | /* Total FDE size does not include the "len" member. */ |
| 2858 | .fde.len = sizeof(DebugFrame) - offsetof(DebugFrame, fde.cie_offset), |
| 2859 | |
| 2860 | .fde_def_cfa = { |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 2861 | 12, TCG_REG_R1, /* DW_CFA_def_cfa r1, ... */ |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2862 | (FRAME_SIZE & 0x7f) | 0x80, /* ... uleb128 FRAME_SIZE */ |
| 2863 | (FRAME_SIZE >> 7) |
| 2864 | }, |
| 2865 | .fde_reg_ofs = { |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 2866 | /* DW_CFA_offset_extended_sf, lr, LR_OFFSET */ |
| 2867 | 0x11, 65, (LR_OFFSET / -SZR) & 0x7f, |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2868 | } |
| 2869 | }; |
| 2870 | |
| 2871 | void tcg_register_jit(void *buf, size_t buf_size) |
| 2872 | { |
| 2873 | uint8_t *p = &debug_frame.fde_reg_ofs[3]; |
| 2874 | int i; |
| 2875 | |
| 2876 | for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); ++i, p += 2) { |
| 2877 | p[0] = 0x80 + tcg_target_callee_save_regs[i]; |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 2878 | p[1] = (FRAME_SIZE - (REG_SAVE_BOT + i * SZR)) / SZR; |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2879 | } |
| 2880 | |
Richard Henderson | 802ca56 | 2014-03-25 08:55:12 -0700 | [diff] [blame] | 2881 | debug_frame.fde.func_start = (uintptr_t)buf; |
Richard Henderson | fa94c3b | 2013-08-31 04:44:21 -0700 | [diff] [blame] | 2882 | debug_frame.fde.func_len = buf_size; |
| 2883 | |
| 2884 | tcg_register_jit_int(buf, buf_size, &debug_frame, sizeof(debug_frame)); |
| 2885 | } |
Richard Henderson | ffcfbec | 2014-03-25 09:13:38 -0700 | [diff] [blame] | 2886 | #endif /* __ELF__ */ |
Richard Henderson | 224f9fd | 2014-04-30 13:56:50 -0700 | [diff] [blame] | 2887 | |
Richard Henderson | 224f9fd | 2014-04-30 13:56:50 -0700 | [diff] [blame] | 2888 | void flush_icache_range(uintptr_t start, uintptr_t stop) |
| 2889 | { |
| 2890 | uintptr_t p, start1, stop1; |
Emilio G. Cota | b255b2c | 2017-06-06 20:17:04 -0400 | [diff] [blame] | 2891 | size_t dsize = qemu_dcache_linesize; |
| 2892 | size_t isize = qemu_icache_linesize; |
Richard Henderson | 224f9fd | 2014-04-30 13:56:50 -0700 | [diff] [blame] | 2893 | |
| 2894 | start1 = start & ~(dsize - 1); |
| 2895 | stop1 = (stop + dsize - 1) & ~(dsize - 1); |
| 2896 | for (p = start1; p < stop1; p += dsize) { |
| 2897 | asm volatile ("dcbst 0,%0" : : "r"(p) : "memory"); |
| 2898 | } |
| 2899 | asm volatile ("sync" : : : "memory"); |
| 2900 | |
| 2901 | start &= start & ~(isize - 1); |
| 2902 | stop1 = (stop + isize - 1) & ~(isize - 1); |
| 2903 | for (p = start1; p < stop1; p += isize) { |
| 2904 | asm volatile ("icbi 0,%0" : : "r"(p) : "memory"); |
| 2905 | } |
| 2906 | asm volatile ("sync" : : : "memory"); |
| 2907 | asm volatile ("isync" : : : "memory"); |
| 2908 | } |