blob: 3eb95e20b65ba9f94fd9c062bff5a53dfebe4043 [file] [log] [blame]
Stefan Weil7657f4b2011-09-27 06:30:58 +02001/*
2 * Tiny Code Interpreter for QEMU
3 *
Stefan Weil3ccdbec2016-04-05 22:24:51 +02004 * Copyright (c) 2009, 2011, 2016 Stefan Weil
Stefan Weil7657f4b2011-09-27 06:30:58 +02005 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
Peter Maydelld38ea872016-01-29 17:50:05 +000020#include "qemu/osdep.h"
Richard Hendersonad3d0e42023-03-28 18:17:24 -070021#include "tcg/tcg.h"
Richard Hendersoned30e7b2024-03-14 08:49:49 -100022#include "tcg/helper-info.h"
Richard Hendersond2ba8022021-07-27 11:10:22 -100023#include "tcg/tcg-ldst.h"
Philippe Mathieu-Daudé069ea4c2024-04-19 07:37:00 +020024#include "disas/dis-asm.h"
Richard Henderson7b7d8b22021-01-30 14:24:25 -080025#include <ffi.h>
Stefan Weil7657f4b2011-09-27 06:30:58 +020026
Richard Henderson7b7d8b22021-01-30 14:24:25 -080027
28/*
29 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
30 * Without assertions, the interpreter runs much faster.
31 */
Stefan Weil3ccdbec2016-04-05 22:24:51 +020032#if defined(CONFIG_DEBUG_TCG)
33# define tci_assert(cond) assert(cond)
34#else
Richard Henderson92bc4fa2021-01-30 13:23:02 -080035# define tci_assert(cond) ((void)(cond))
Stefan Weil7657f4b2011-09-27 06:30:58 +020036#endif
37
Richard Henderson13e71f02021-01-24 10:57:01 -100038__thread uintptr_t tci_tb_ptr;
39
Emilio G. Cota5e751502017-07-13 17:10:31 -040040static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
41 uint32_t low_index, uint64_t value)
Stefan Weil7657f4b2011-09-27 06:30:58 +020042{
Richard Hendersonf6db0d82021-02-02 17:21:27 -080043 regs[low_index] = (uint32_t)value;
Richard Henderson7e00a082021-02-01 11:30:30 -100044 regs[high_index] = value >> 32;
Stefan Weil7657f4b2011-09-27 06:30:58 +020045}
Stefan Weil7657f4b2011-09-27 06:30:58 +020046
Stefan Weil7657f4b2011-09-27 06:30:58 +020047/* Create a 64 bit value from two 32 bit values. */
48static uint64_t tci_uint64(uint32_t high, uint32_t low)
49{
50 return ((uint64_t)high << 32) + low;
51}
Stefan Weil7657f4b2011-09-27 06:30:58 +020052
Richard Hendersoncdd97992021-01-29 12:55:41 -100053/*
54 * Load sets of arguments all at once. The naming convention is:
55 * tci_args_<arguments>
56 * where arguments is a sequence of
57 *
Richard Henderson79dd3a42021-01-29 22:36:40 -100058 * b = immediate (bit position)
Richard Henderson963e9fa2021-01-29 13:14:11 -100059 * c = condition (TCGCond)
Richard Hendersonb95aa122021-01-29 21:49:24 -100060 * i = immediate (uint32_t)
61 * I = immediate (tcg_target_ulong)
Richard Hendersonf28ca032021-01-29 21:18:45 -100062 * l = label or pointer
Richard Henderson9002ffc2021-07-25 12:06:49 -100063 * m = immediate (MemOpIdx)
Richard Henderson7b7d8b22021-01-30 14:24:25 -080064 * n = immediate (call return length)
Richard Hendersoncdd97992021-01-29 12:55:41 -100065 * r = register
66 * s = signed ldst offset
67 */
68
Richard Henderson65089882021-02-01 21:27:41 -100069static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0)
Richard Henderson92bc4fa2021-01-30 13:23:02 -080070{
Richard Henderson65089882021-02-01 21:27:41 -100071 int diff = sextract32(insn, 12, 20);
72 *l0 = diff ? (void *)tb_ptr + diff : NULL;
Richard Henderson92bc4fa2021-01-30 13:23:02 -080073}
74
Richard Henderson6eea0432021-02-02 09:40:22 -100075static void tci_args_r(uint32_t insn, TCGReg *r0)
76{
77 *r0 = extract32(insn, 8, 4);
78}
79
Richard Henderson65089882021-02-01 21:27:41 -100080static void tci_args_nl(uint32_t insn, const void *tb_ptr,
81 uint8_t *n0, void **l1)
Richard Hendersonf28ca032021-01-29 21:18:45 -100082{
Richard Henderson65089882021-02-01 21:27:41 -100083 *n0 = extract32(insn, 8, 4);
84 *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
Richard Hendersonf28ca032021-01-29 21:18:45 -100085}
86
Richard Henderson65089882021-02-01 21:27:41 -100087static void tci_args_rl(uint32_t insn, const void *tb_ptr,
88 TCGReg *r0, void **l1)
Richard Henderson7b7d8b22021-01-30 14:24:25 -080089{
Richard Henderson65089882021-02-01 21:27:41 -100090 *r0 = extract32(insn, 8, 4);
91 *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
Richard Henderson7b7d8b22021-01-30 14:24:25 -080092}
93
Richard Henderson65089882021-02-01 21:27:41 -100094static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1)
Richard Hendersonfc8ec9e2021-02-01 09:41:20 -100095{
Richard Henderson65089882021-02-01 21:27:41 -100096 *r0 = extract32(insn, 8, 4);
97 *r1 = extract32(insn, 12, 4);
Richard Hendersonfc8ec9e2021-02-01 09:41:20 -100098}
99
Richard Henderson65089882021-02-01 21:27:41 -1000100static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1)
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000101{
Richard Henderson65089882021-02-01 21:27:41 -1000102 *r0 = extract32(insn, 8, 4);
103 *i1 = sextract32(insn, 12, 20);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000104}
105
Richard Henderson65089882021-02-01 21:27:41 -1000106static void tci_args_rrm(uint32_t insn, TCGReg *r0,
Richard Henderson9002ffc2021-07-25 12:06:49 -1000107 TCGReg *r1, MemOpIdx *m2)
Richard Hendersonb95aa122021-01-29 21:49:24 -1000108{
Richard Henderson65089882021-02-01 21:27:41 -1000109 *r0 = extract32(insn, 8, 4);
110 *r1 = extract32(insn, 12, 4);
Richard Hendersonab64da72023-06-06 17:52:41 -0700111 *m2 = extract32(insn, 16, 16);
Richard Hendersonb95aa122021-01-29 21:49:24 -1000112}
113
Richard Henderson65089882021-02-01 21:27:41 -1000114static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2)
Richard Hendersonb95aa122021-01-29 21:49:24 -1000115{
Richard Henderson65089882021-02-01 21:27:41 -1000116 *r0 = extract32(insn, 8, 4);
117 *r1 = extract32(insn, 12, 4);
118 *r2 = extract32(insn, 16, 4);
Richard Henderson63041ed2021-01-29 22:52:12 -1000119}
120
Richard Henderson65089882021-02-01 21:27:41 -1000121static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2)
Richard Hendersone85e4b82021-01-29 13:10:28 -1000122{
Richard Henderson65089882021-02-01 21:27:41 -1000123 *r0 = extract32(insn, 8, 4);
124 *r1 = extract32(insn, 12, 4);
125 *i2 = sextract32(insn, 16, 16);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000126}
127
Richard Henderson0f10d7c2021-02-02 16:48:48 -0800128static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
129 uint8_t *i2, uint8_t *i3)
130{
131 *r0 = extract32(insn, 8, 4);
132 *r1 = extract32(insn, 12, 4);
133 *i2 = extract32(insn, 16, 6);
134 *i3 = extract32(insn, 22, 6);
135}
136
Richard Henderson65089882021-02-01 21:27:41 -1000137static void tci_args_rrrc(uint32_t insn,
Richard Henderson963e9fa2021-01-29 13:14:11 -1000138 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
139{
Richard Henderson65089882021-02-01 21:27:41 -1000140 *r0 = extract32(insn, 8, 4);
141 *r1 = extract32(insn, 12, 4);
142 *r2 = extract32(insn, 16, 4);
143 *c3 = extract32(insn, 20, 4);
Richard Henderson963e9fa2021-01-29 13:14:11 -1000144}
145
Richard Henderson65089882021-02-01 21:27:41 -1000146static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
Richard Henderson79dd3a42021-01-29 22:36:40 -1000147 TCGReg *r2, uint8_t *i3, uint8_t *i4)
148{
Richard Henderson65089882021-02-01 21:27:41 -1000149 *r0 = extract32(insn, 8, 4);
150 *r1 = extract32(insn, 12, 4);
151 *r2 = extract32(insn, 16, 4);
152 *i3 = extract32(insn, 20, 6);
153 *i4 = extract32(insn, 26, 6);
Richard Henderson79dd3a42021-01-29 22:36:40 -1000154}
155
Richard Henderson65089882021-02-01 21:27:41 -1000156static void tci_args_rrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
157 TCGReg *r2, TCGReg *r3, TCGReg *r4)
Richard Henderson63041ed2021-01-29 22:52:12 -1000158{
Richard Henderson65089882021-02-01 21:27:41 -1000159 *r0 = extract32(insn, 8, 4);
160 *r1 = extract32(insn, 12, 4);
161 *r2 = extract32(insn, 16, 4);
162 *r3 = extract32(insn, 20, 4);
163 *r4 = extract32(insn, 24, 4);
Richard Henderson63041ed2021-01-29 22:52:12 -1000164}
165
Richard Henderson65089882021-02-01 21:27:41 -1000166static void tci_args_rrrr(uint32_t insn,
Richard Hendersoncbe87132021-01-29 22:18:37 -1000167 TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
168{
Richard Henderson65089882021-02-01 21:27:41 -1000169 *r0 = extract32(insn, 8, 4);
170 *r1 = extract32(insn, 12, 4);
171 *r2 = extract32(insn, 16, 4);
172 *r3 = extract32(insn, 20, 4);
Richard Hendersoncbe87132021-01-29 22:18:37 -1000173}
174
Richard Henderson65089882021-02-01 21:27:41 -1000175static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1,
Richard Henderson817cadd2021-01-29 21:30:04 -1000176 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
177{
Richard Henderson65089882021-02-01 21:27:41 -1000178 *r0 = extract32(insn, 8, 4);
179 *r1 = extract32(insn, 12, 4);
180 *r2 = extract32(insn, 16, 4);
181 *r3 = extract32(insn, 20, 4);
182 *r4 = extract32(insn, 24, 4);
183 *c5 = extract32(insn, 28, 4);
Richard Henderson817cadd2021-01-29 21:30:04 -1000184}
Richard Henderson120402b2021-01-29 22:16:05 -1000185
Richard Henderson65089882021-02-01 21:27:41 -1000186static void tci_args_rrrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
Richard Henderson120402b2021-01-29 22:16:05 -1000187 TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
188{
Richard Henderson65089882021-02-01 21:27:41 -1000189 *r0 = extract32(insn, 8, 4);
190 *r1 = extract32(insn, 12, 4);
191 *r2 = extract32(insn, 16, 4);
192 *r3 = extract32(insn, 20, 4);
193 *r4 = extract32(insn, 24, 4);
194 *r5 = extract32(insn, 28, 4);
Richard Henderson120402b2021-01-29 22:16:05 -1000195}
Richard Henderson817cadd2021-01-29 21:30:04 -1000196
Stefan Weil7657f4b2011-09-27 06:30:58 +0200197static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
198{
199 bool result = false;
200 int32_t i0 = u0;
201 int32_t i1 = u1;
202 switch (condition) {
203 case TCG_COND_EQ:
204 result = (u0 == u1);
205 break;
206 case TCG_COND_NE:
207 result = (u0 != u1);
208 break;
209 case TCG_COND_LT:
210 result = (i0 < i1);
211 break;
212 case TCG_COND_GE:
213 result = (i0 >= i1);
214 break;
215 case TCG_COND_LE:
216 result = (i0 <= i1);
217 break;
218 case TCG_COND_GT:
219 result = (i0 > i1);
220 break;
221 case TCG_COND_LTU:
222 result = (u0 < u1);
223 break;
224 case TCG_COND_GEU:
225 result = (u0 >= u1);
226 break;
227 case TCG_COND_LEU:
228 result = (u0 <= u1);
229 break;
230 case TCG_COND_GTU:
231 result = (u0 > u1);
232 break;
Richard Henderson23c56922023-10-24 22:52:26 -0700233 case TCG_COND_TSTEQ:
234 result = (u0 & u1) == 0;
235 break;
236 case TCG_COND_TSTNE:
237 result = (u0 & u1) != 0;
238 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200239 default:
Richard Hendersonf6996f92021-01-27 20:11:11 -1000240 g_assert_not_reached();
Stefan Weil7657f4b2011-09-27 06:30:58 +0200241 }
242 return result;
243}
244
245static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
246{
247 bool result = false;
248 int64_t i0 = u0;
249 int64_t i1 = u1;
250 switch (condition) {
251 case TCG_COND_EQ:
252 result = (u0 == u1);
253 break;
254 case TCG_COND_NE:
255 result = (u0 != u1);
256 break;
257 case TCG_COND_LT:
258 result = (i0 < i1);
259 break;
260 case TCG_COND_GE:
261 result = (i0 >= i1);
262 break;
263 case TCG_COND_LE:
264 result = (i0 <= i1);
265 break;
266 case TCG_COND_GT:
267 result = (i0 > i1);
268 break;
269 case TCG_COND_LTU:
270 result = (u0 < u1);
271 break;
272 case TCG_COND_GEU:
273 result = (u0 >= u1);
274 break;
275 case TCG_COND_LEU:
276 result = (u0 <= u1);
277 break;
278 case TCG_COND_GTU:
279 result = (u0 > u1);
280 break;
Richard Henderson23c56922023-10-24 22:52:26 -0700281 case TCG_COND_TSTEQ:
282 result = (u0 & u1) == 0;
283 break;
284 case TCG_COND_TSTNE:
285 result = (u0 & u1) != 0;
286 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200287 default:
Richard Hendersonf6996f92021-01-27 20:11:11 -1000288 g_assert_not_reached();
Stefan Weil7657f4b2011-09-27 06:30:58 +0200289 }
290 return result;
291}
292
Richard Hendersondd7dc932023-03-20 07:48:09 -0700293static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr,
Richard Henderson9002ffc2021-07-25 12:06:49 -1000294 MemOpIdx oi, const void *tb_ptr)
Richard Henderson69acc022021-05-27 12:21:59 -0700295{
Richard Hendersonfe1bee32021-08-03 14:56:51 -1000296 MemOp mop = get_memop(oi);
Richard Hendersond1b13482021-05-27 12:37:57 -0700297 uintptr_t ra = (uintptr_t)tb_ptr;
298
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100299 switch (mop & MO_SSIZE) {
Richard Henderson69acc022021-05-27 12:21:59 -0700300 case MO_UB:
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100301 return helper_ldub_mmu(env, taddr, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700302 case MO_SB:
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100303 return helper_ldsb_mmu(env, taddr, oi, ra);
304 case MO_UW:
305 return helper_lduw_mmu(env, taddr, oi, ra);
306 case MO_SW:
307 return helper_ldsw_mmu(env, taddr, oi, ra);
308 case MO_UL:
309 return helper_ldul_mmu(env, taddr, oi, ra);
310 case MO_SL:
311 return helper_ldsl_mmu(env, taddr, oi, ra);
312 case MO_UQ:
313 return helper_ldq_mmu(env, taddr, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700314 default:
315 g_assert_not_reached();
316 }
317}
318
Richard Hendersondd7dc932023-03-20 07:48:09 -0700319static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
Richard Henderson9002ffc2021-07-25 12:06:49 -1000320 MemOpIdx oi, const void *tb_ptr)
Richard Henderson69acc022021-05-27 12:21:59 -0700321{
Richard Hendersonfe1bee32021-08-03 14:56:51 -1000322 MemOp mop = get_memop(oi);
Richard Hendersond1b13482021-05-27 12:37:57 -0700323 uintptr_t ra = (uintptr_t)tb_ptr;
324
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100325 switch (mop & MO_SIZE) {
Richard Henderson69acc022021-05-27 12:21:59 -0700326 case MO_UB:
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100327 helper_stb_mmu(env, taddr, val, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700328 break;
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100329 case MO_UW:
330 helper_stw_mmu(env, taddr, val, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700331 break;
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100332 case MO_UL:
333 helper_stl_mmu(env, taddr, val, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700334 break;
Richard Henderson0cadc1e2022-11-01 12:51:04 +1100335 case MO_UQ:
336 helper_stq_mmu(env, taddr, val, oi, ra);
Richard Henderson69acc022021-05-27 12:21:59 -0700337 break;
338 default:
339 g_assert_not_reached();
340 }
341}
342
Richard Henderson7f33f5c2021-01-27 19:37:55 -1000343#if TCG_TARGET_REG_BITS == 64
344# define CASE_32_64(x) \
345 case glue(glue(INDEX_op_, x), _i64): \
346 case glue(glue(INDEX_op_, x), _i32):
347# define CASE_64(x) \
348 case glue(glue(INDEX_op_, x), _i64):
349#else
350# define CASE_32_64(x) \
351 case glue(glue(INDEX_op_, x), _i32):
352# define CASE_64(x)
353#endif
354
Stefan Weil7657f4b2011-09-27 06:30:58 +0200355/* Interpret pseudo code in tb. */
Daniele Buonoc905a362020-12-04 18:06:12 -0500356/*
357 * Disable CFI checks.
358 * One possible operation in the pseudo code is a call to binary code.
359 * Therefore, disable CFI checks in the interpreter function
360 */
Richard Hendersondb0c51a2020-10-28 12:05:44 -0700361uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
362 const void *v_tb_ptr)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200363{
Richard Henderson65089882021-02-01 21:27:41 -1000364 const uint32_t *tb_ptr = v_tb_ptr;
Emilio G. Cota5e751502017-07-13 17:10:31 -0400365 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800366 uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
367 / sizeof(uint64_t)];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200368
Emilio G. Cota5e751502017-07-13 17:10:31 -0400369 regs[TCG_AREG0] = (tcg_target_ulong)env;
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800370 regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200371 tci_assert(tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200372
373 for (;;) {
Richard Henderson65089882021-02-01 21:27:41 -1000374 uint32_t insn;
375 TCGOpcode opc;
Richard Henderson08096b12021-02-02 17:40:12 -0800376 TCGReg r0, r1, r2, r3, r4, r5;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200377 tcg_target_ulong t1;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200378 TCGCond condition;
Richard Henderson79dd3a42021-01-29 22:36:40 -1000379 uint8_t pos, len;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200380 uint32_t tmp32;
Richard Hendersondd7dc932023-03-20 07:48:09 -0700381 uint64_t tmp64, taddr;
Richard Henderson5a0adf32021-01-29 21:41:13 -1000382 uint64_t T1, T2;
Richard Henderson9002ffc2021-07-25 12:06:49 -1000383 MemOpIdx oi;
Richard Hendersoncdd97992021-01-29 12:55:41 -1000384 int32_t ofs;
Richard Henderson65089882021-02-01 21:27:41 -1000385 void *ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200386
Richard Henderson65089882021-02-01 21:27:41 -1000387 insn = *tb_ptr++;
388 opc = extract32(insn, 0, 8);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200389
390 switch (opc) {
Stefan Weil7657f4b2011-09-27 06:30:58 +0200391 case INDEX_op_call:
Richard Henderson65089882021-02-01 21:27:41 -1000392 {
Richard Hendersone9709e12022-10-21 10:47:54 +1000393 void *call_slots[MAX_CALL_IARGS];
394 ffi_cif *cif;
395 void *func;
396 unsigned i, s, n;
397
398 tci_args_nl(insn, tb_ptr, &len, &ptr);
399 func = ((void **)ptr)[0];
400 cif = ((void **)ptr)[1];
401
402 n = cif->nargs;
403 for (i = s = 0; i < n; ++i) {
404 ffi_type *t = cif->arg_types[i];
405 call_slots[i] = &stack[s];
406 s += DIV_ROUND_UP(t->size, 8);
407 }
408
409 /* Helper functions may need to access the "return address" */
410 tci_tb_ptr = (uintptr_t)tb_ptr;
411 ffi_call(cif, func, stack, call_slots);
Richard Henderson65089882021-02-01 21:27:41 -1000412 }
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800413
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800414 switch (len) {
415 case 0: /* void */
416 break;
417 case 1: /* uint32_t */
418 /*
Richard Henderson896c76e2022-10-21 10:34:21 +1000419 * The result winds up "left-aligned" in the stack[0] slot.
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800420 * Note that libffi has an odd special case in that it will
421 * always widen an integral result to ffi_arg.
422 */
Richard Henderson896c76e2022-10-21 10:34:21 +1000423 if (sizeof(ffi_arg) == 8) {
424 regs[TCG_REG_R0] = (uint32_t)stack[0];
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800425 } else {
Richard Henderson896c76e2022-10-21 10:34:21 +1000426 regs[TCG_REG_R0] = *(uint32_t *)stack;
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800427 }
428 break;
Richard Henderson896c76e2022-10-21 10:34:21 +1000429 case 2: /* uint64_t */
430 /*
431 * For TCG_TARGET_REG_BITS == 32, the register pair
432 * must stay in host memory order.
433 */
434 memcpy(&regs[TCG_REG_R0], stack, 8);
435 break;
Richard Hendersone9709e12022-10-21 10:47:54 +1000436 case 3: /* Int128 */
437 memcpy(&regs[TCG_REG_R0], stack, 16);
438 break;
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800439 default:
440 g_assert_not_reached();
441 }
Stefan Weil7657f4b2011-09-27 06:30:58 +0200442 break;
Richard Henderson7b7d8b22021-01-30 14:24:25 -0800443
Stefan Weil7657f4b2011-09-27 06:30:58 +0200444 case INDEX_op_br:
Richard Henderson65089882021-02-01 21:27:41 -1000445 tci_args_l(insn, tb_ptr, &ptr);
Richard Hendersonf28ca032021-01-29 21:18:45 -1000446 tb_ptr = ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200447 continue;
448 case INDEX_op_setcond_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000449 tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
Richard Henderson963e9fa2021-01-29 13:14:11 -1000450 regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200451 break;
Richard Hendersondf093c12021-02-02 16:15:45 -0800452 case INDEX_op_movcond_i32:
453 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
454 tmp32 = tci_compare32(regs[r1], regs[r2], condition);
455 regs[r0] = regs[tmp32 ? r3 : r4];
456 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200457#if TCG_TARGET_REG_BITS == 32
458 case INDEX_op_setcond2_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000459 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
Richard Henderson817cadd2021-01-29 21:30:04 -1000460 T1 = tci_uint64(regs[r2], regs[r1]);
461 T2 = tci_uint64(regs[r4], regs[r3]);
462 regs[r0] = tci_compare64(T1, T2, condition);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200463 break;
464#elif TCG_TARGET_REG_BITS == 64
465 case INDEX_op_setcond_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000466 tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
Richard Henderson963e9fa2021-01-29 13:14:11 -1000467 regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200468 break;
Richard Hendersondf093c12021-02-02 16:15:45 -0800469 case INDEX_op_movcond_i64:
470 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
471 tmp32 = tci_compare64(regs[r1], regs[r2], condition);
472 regs[r0] = regs[tmp32 ? r3 : r4];
473 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200474#endif
Richard Henderson9e9acb72021-01-29 12:29:22 -1000475 CASE_32_64(mov)
Richard Henderson65089882021-02-01 21:27:41 -1000476 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000477 regs[r0] = regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200478 break;
Richard Henderson65089882021-02-01 21:27:41 -1000479 case INDEX_op_tci_movi:
480 tci_args_ri(insn, &r0, &t1);
Richard Hendersonb95aa122021-01-29 21:49:24 -1000481 regs[r0] = t1;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200482 break;
Richard Henderson65089882021-02-01 21:27:41 -1000483 case INDEX_op_tci_movl:
484 tci_args_rl(insn, tb_ptr, &r0, &ptr);
485 regs[r0] = *(tcg_target_ulong *)ptr;
486 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200487
488 /* Load/store operations (32 bit). */
489
Richard Henderson7f33f5c2021-01-27 19:37:55 -1000490 CASE_32_64(ld8u)
Richard Henderson65089882021-02-01 21:27:41 -1000491 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000492 ptr = (void *)(regs[r1] + ofs);
493 regs[r0] = *(uint8_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200494 break;
Richard Henderson850163e2021-01-27 19:39:39 -1000495 CASE_32_64(ld8s)
Richard Henderson65089882021-02-01 21:27:41 -1000496 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000497 ptr = (void *)(regs[r1] + ofs);
498 regs[r0] = *(int8_t *)ptr;
Stefan Weil2f160e02019-04-10 21:48:38 +0200499 break;
Richard Henderson77c38c72021-01-27 19:41:17 -1000500 CASE_32_64(ld16u)
Richard Henderson65089882021-02-01 21:27:41 -1000501 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000502 ptr = (void *)(regs[r1] + ofs);
503 regs[r0] = *(uint16_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200504 break;
Richard Hendersonb09d78b2021-01-27 19:42:42 -1000505 CASE_32_64(ld16s)
Richard Henderson65089882021-02-01 21:27:41 -1000506 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000507 ptr = (void *)(regs[r1] + ofs);
508 regs[r0] = *(int16_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200509 break;
510 case INDEX_op_ld_i32:
Richard Hendersonc1d77e92021-01-27 19:44:01 -1000511 CASE_64(ld32u)
Richard Henderson65089882021-02-01 21:27:41 -1000512 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000513 ptr = (void *)(regs[r1] + ofs);
514 regs[r0] = *(uint32_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200515 break;
Richard Hendersonba9a80c2021-01-27 19:47:02 -1000516 CASE_32_64(st8)
Richard Henderson65089882021-02-01 21:27:41 -1000517 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000518 ptr = (void *)(regs[r1] + ofs);
519 *(uint8_t *)ptr = regs[r0];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200520 break;
Richard Henderson90be4dd2021-01-27 19:49:37 -1000521 CASE_32_64(st16)
Richard Henderson65089882021-02-01 21:27:41 -1000522 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000523 ptr = (void *)(regs[r1] + ofs);
524 *(uint16_t *)ptr = regs[r0];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200525 break;
526 case INDEX_op_st_i32:
Richard Hendersonb4d5bf02021-01-27 19:53:59 -1000527 CASE_64(st32)
Richard Henderson65089882021-02-01 21:27:41 -1000528 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000529 ptr = (void *)(regs[r1] + ofs);
530 *(uint32_t *)ptr = regs[r0];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200531 break;
532
Richard Hendersondd2bb202021-01-29 12:15:58 -1000533 /* Arithmetic operations (mixed 32/64 bit). */
Stefan Weil7657f4b2011-09-27 06:30:58 +0200534
Richard Hendersondd2bb202021-01-29 12:15:58 -1000535 CASE_32_64(add)
Richard Henderson65089882021-02-01 21:27:41 -1000536 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000537 regs[r0] = regs[r1] + regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200538 break;
Richard Hendersondd2bb202021-01-29 12:15:58 -1000539 CASE_32_64(sub)
Richard Henderson65089882021-02-01 21:27:41 -1000540 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000541 regs[r0] = regs[r1] - regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200542 break;
Richard Hendersondd2bb202021-01-29 12:15:58 -1000543 CASE_32_64(mul)
Richard Henderson65089882021-02-01 21:27:41 -1000544 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000545 regs[r0] = regs[r1] * regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200546 break;
Richard Hendersondd2bb202021-01-29 12:15:58 -1000547 CASE_32_64(and)
Richard Henderson65089882021-02-01 21:27:41 -1000548 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000549 regs[r0] = regs[r1] & regs[r2];
Richard Hendersondd2bb202021-01-29 12:15:58 -1000550 break;
551 CASE_32_64(or)
Richard Henderson65089882021-02-01 21:27:41 -1000552 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000553 regs[r0] = regs[r1] | regs[r2];
Richard Hendersondd2bb202021-01-29 12:15:58 -1000554 break;
555 CASE_32_64(xor)
Richard Henderson65089882021-02-01 21:27:41 -1000556 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000557 regs[r0] = regs[r1] ^ regs[r2];
Richard Hendersondd2bb202021-01-29 12:15:58 -1000558 break;
Richard Hendersona81520b2021-02-02 16:29:18 -0800559#if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
560 CASE_32_64(andc)
561 tci_args_rrr(insn, &r0, &r1, &r2);
562 regs[r0] = regs[r1] & ~regs[r2];
563 break;
564#endif
565#if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
566 CASE_32_64(orc)
567 tci_args_rrr(insn, &r0, &r1, &r2);
568 regs[r0] = regs[r1] | ~regs[r2];
569 break;
570#endif
571#if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
572 CASE_32_64(eqv)
573 tci_args_rrr(insn, &r0, &r1, &r2);
574 regs[r0] = ~(regs[r1] ^ regs[r2]);
575 break;
576#endif
577#if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
578 CASE_32_64(nand)
579 tci_args_rrr(insn, &r0, &r1, &r2);
580 regs[r0] = ~(regs[r1] & regs[r2]);
581 break;
582#endif
583#if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
584 CASE_32_64(nor)
585 tci_args_rrr(insn, &r0, &r1, &r2);
586 regs[r0] = ~(regs[r1] | regs[r2]);
587 break;
588#endif
Richard Hendersondd2bb202021-01-29 12:15:58 -1000589
590 /* Arithmetic operations (32 bit). */
591
Stefan Weil7657f4b2011-09-27 06:30:58 +0200592 case INDEX_op_div_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000593 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000594 regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200595 break;
596 case INDEX_op_divu_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000597 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000598 regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200599 break;
600 case INDEX_op_rem_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000601 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000602 regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200603 break;
604 case INDEX_op_remu_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000605 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000606 regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200607 break;
Richard Henderson5255f482021-02-02 17:01:57 -0800608#if TCG_TARGET_HAS_clz_i32
609 case INDEX_op_clz_i32:
610 tci_args_rrr(insn, &r0, &r1, &r2);
611 tmp32 = regs[r1];
612 regs[r0] = tmp32 ? clz32(tmp32) : regs[r2];
613 break;
614#endif
615#if TCG_TARGET_HAS_ctz_i32
616 case INDEX_op_ctz_i32:
617 tci_args_rrr(insn, &r0, &r1, &r2);
618 tmp32 = regs[r1];
619 regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2];
620 break;
621#endif
622#if TCG_TARGET_HAS_ctpop_i32
623 case INDEX_op_ctpop_i32:
624 tci_args_rr(insn, &r0, &r1);
625 regs[r0] = ctpop32(regs[r1]);
626 break;
627#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200628
629 /* Shift/rotate operations (32 bit). */
630
631 case INDEX_op_shl_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000632 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000633 regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200634 break;
635 case INDEX_op_shr_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000636 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000637 regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200638 break;
639 case INDEX_op_sar_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000640 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000641 regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200642 break;
643#if TCG_TARGET_HAS_rot_i32
644 case INDEX_op_rotl_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000645 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000646 regs[r0] = rol32(regs[r1], regs[r2] & 31);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200647 break;
648 case INDEX_op_rotr_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000649 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000650 regs[r0] = ror32(regs[r1], regs[r2] & 31);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200651 break;
652#endif
Stefan Weile24dc9f2012-09-18 22:52:14 +0200653#if TCG_TARGET_HAS_deposit_i32
654 case INDEX_op_deposit_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000655 tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
Richard Henderson79dd3a42021-01-29 22:36:40 -1000656 regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
Stefan Weile24dc9f2012-09-18 22:52:14 +0200657 break;
658#endif
Richard Henderson0f10d7c2021-02-02 16:48:48 -0800659#if TCG_TARGET_HAS_extract_i32
660 case INDEX_op_extract_i32:
661 tci_args_rrbb(insn, &r0, &r1, &pos, &len);
662 regs[r0] = extract32(regs[r1], pos, len);
663 break;
664#endif
665#if TCG_TARGET_HAS_sextract_i32
666 case INDEX_op_sextract_i32:
667 tci_args_rrbb(insn, &r0, &r1, &pos, &len);
668 regs[r0] = sextract32(regs[r1], pos, len);
669 break;
670#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200671 case INDEX_op_brcond_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000672 tci_args_rl(insn, tb_ptr, &r0, &ptr);
Richard Hendersonfc8ec9e2021-02-01 09:41:20 -1000673 if ((uint32_t)regs[r0]) {
Richard Henderson5a0adf32021-01-29 21:41:13 -1000674 tb_ptr = ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200675 }
676 break;
Richard Henderson08096b12021-02-02 17:40:12 -0800677#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
Stefan Weil7657f4b2011-09-27 06:30:58 +0200678 case INDEX_op_add2_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000679 tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
Richard Henderson120402b2021-01-29 22:16:05 -1000680 T1 = tci_uint64(regs[r3], regs[r2]);
681 T2 = tci_uint64(regs[r5], regs[r4]);
682 tci_write_reg64(regs, r1, r0, T1 + T2);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200683 break;
Richard Henderson08096b12021-02-02 17:40:12 -0800684#endif
685#if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
Stefan Weil7657f4b2011-09-27 06:30:58 +0200686 case INDEX_op_sub2_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000687 tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
Richard Henderson120402b2021-01-29 22:16:05 -1000688 T1 = tci_uint64(regs[r3], regs[r2]);
689 T2 = tci_uint64(regs[r5], regs[r4]);
690 tci_write_reg64(regs, r1, r0, T1 - T2);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200691 break;
Richard Henderson08096b12021-02-02 17:40:12 -0800692#endif
Richard Hendersonf6db0d82021-02-02 17:21:27 -0800693#if TCG_TARGET_HAS_mulu2_i32
Stefan Weil7657f4b2011-09-27 06:30:58 +0200694 case INDEX_op_mulu2_i32:
Richard Henderson65089882021-02-01 21:27:41 -1000695 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Hendersonf6db0d82021-02-02 17:21:27 -0800696 tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
697 tci_write_reg64(regs, r1, r0, tmp64);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200698 break;
Richard Hendersonf6db0d82021-02-02 17:21:27 -0800699#endif
700#if TCG_TARGET_HAS_muls2_i32
701 case INDEX_op_muls2_i32:
702 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
703 tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
704 tci_write_reg64(regs, r1, r0, tmp64);
705 break;
706#endif
Richard Henderson13a1d642021-01-29 12:21:18 -1000707#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
708 CASE_32_64(ext8s)
Richard Henderson65089882021-02-01 21:27:41 -1000709 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000710 regs[r0] = (int8_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200711 break;
712#endif
Richard Henderson0d57d362021-06-13 12:34:30 -0700713#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
714 TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
Richard Henderson13a1d642021-01-29 12:21:18 -1000715 CASE_32_64(ext16s)
Richard Henderson65089882021-02-01 21:27:41 -1000716 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000717 regs[r0] = (int16_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200718 break;
719#endif
Richard Henderson13a1d642021-01-29 12:21:18 -1000720#if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
721 CASE_32_64(ext8u)
Richard Henderson65089882021-02-01 21:27:41 -1000722 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000723 regs[r0] = (uint8_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200724 break;
725#endif
Richard Henderson13a1d642021-01-29 12:21:18 -1000726#if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
727 CASE_32_64(ext16u)
Richard Henderson65089882021-02-01 21:27:41 -1000728 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000729 regs[r0] = (uint16_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200730 break;
731#endif
Richard Hendersonfe2b13b2021-01-29 12:27:20 -1000732#if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
733 CASE_32_64(bswap16)
Richard Henderson65089882021-02-01 21:27:41 -1000734 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000735 regs[r0] = bswap16(regs[r1]);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200736 break;
737#endif
Richard Hendersonfe2b13b2021-01-29 12:27:20 -1000738#if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
739 CASE_32_64(bswap32)
Richard Henderson65089882021-02-01 21:27:41 -1000740 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000741 regs[r0] = bswap32(regs[r1]);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200742 break;
743#endif
Richard Henderson9e9acb72021-01-29 12:29:22 -1000744#if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
745 CASE_32_64(not)
Richard Henderson65089882021-02-01 21:27:41 -1000746 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000747 regs[r0] = ~regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200748 break;
749#endif
Richard Henderson9e9acb72021-01-29 12:29:22 -1000750 CASE_32_64(neg)
Richard Henderson65089882021-02-01 21:27:41 -1000751 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000752 regs[r0] = -regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200753 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200754#if TCG_TARGET_REG_BITS == 64
Stefan Weil7657f4b2011-09-27 06:30:58 +0200755 /* Load/store operations (64 bit). */
756
Stefan Weil7657f4b2011-09-27 06:30:58 +0200757 case INDEX_op_ld32s_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000758 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000759 ptr = (void *)(regs[r1] + ofs);
760 regs[r0] = *(int32_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200761 break;
762 case INDEX_op_ld_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000763 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000764 ptr = (void *)(regs[r1] + ofs);
765 regs[r0] = *(uint64_t *)ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200766 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200767 case INDEX_op_st_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000768 tci_args_rrs(insn, &r0, &r1, &ofs);
Richard Hendersoncdd97992021-01-29 12:55:41 -1000769 ptr = (void *)(regs[r1] + ofs);
770 *(uint64_t *)ptr = regs[r0];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200771 break;
772
773 /* Arithmetic operations (64 bit). */
774
Stefan Weil7657f4b2011-09-27 06:30:58 +0200775 case INDEX_op_div_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000776 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000777 regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200778 break;
Richard Hendersonae40c092021-01-27 20:30:00 -1000779 case INDEX_op_divu_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000780 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000781 regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
Richard Hendersonae40c092021-01-27 20:30:00 -1000782 break;
783 case INDEX_op_rem_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000784 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000785 regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
Richard Hendersonae40c092021-01-27 20:30:00 -1000786 break;
787 case INDEX_op_remu_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000788 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000789 regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
Richard Hendersonae40c092021-01-27 20:30:00 -1000790 break;
Richard Henderson5255f482021-02-02 17:01:57 -0800791#if TCG_TARGET_HAS_clz_i64
792 case INDEX_op_clz_i64:
793 tci_args_rrr(insn, &r0, &r1, &r2);
794 regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2];
795 break;
796#endif
797#if TCG_TARGET_HAS_ctz_i64
798 case INDEX_op_ctz_i64:
799 tci_args_rrr(insn, &r0, &r1, &r2);
800 regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2];
801 break;
802#endif
803#if TCG_TARGET_HAS_ctpop_i64
804 case INDEX_op_ctpop_i64:
805 tci_args_rr(insn, &r0, &r1);
806 regs[r0] = ctpop64(regs[r1]);
807 break;
808#endif
Richard Hendersonf6db0d82021-02-02 17:21:27 -0800809#if TCG_TARGET_HAS_mulu2_i64
810 case INDEX_op_mulu2_i64:
811 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
812 mulu64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
813 break;
814#endif
815#if TCG_TARGET_HAS_muls2_i64
816 case INDEX_op_muls2_i64:
817 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
818 muls64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
819 break;
820#endif
Richard Henderson08096b12021-02-02 17:40:12 -0800821#if TCG_TARGET_HAS_add2_i64
822 case INDEX_op_add2_i64:
823 tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
824 T1 = regs[r2] + regs[r4];
825 T2 = regs[r3] + regs[r5] + (T1 < regs[r2]);
826 regs[r0] = T1;
827 regs[r1] = T2;
828 break;
829#endif
830#if TCG_TARGET_HAS_add2_i64
831 case INDEX_op_sub2_i64:
832 tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
833 T1 = regs[r2] - regs[r4];
834 T2 = regs[r3] - regs[r5] - (regs[r2] < regs[r4]);
835 regs[r0] = T1;
836 regs[r1] = T2;
837 break;
838#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200839
840 /* Shift/rotate operations (64 bit). */
841
842 case INDEX_op_shl_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000843 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000844 regs[r0] = regs[r1] << (regs[r2] & 63);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200845 break;
846 case INDEX_op_shr_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000847 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000848 regs[r0] = regs[r1] >> (regs[r2] & 63);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200849 break;
850 case INDEX_op_sar_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000851 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000852 regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200853 break;
854#if TCG_TARGET_HAS_rot_i64
855 case INDEX_op_rotl_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000856 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000857 regs[r0] = rol64(regs[r1], regs[r2] & 63);
Stefan Weild285bf72013-09-12 21:13:11 +0200858 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200859 case INDEX_op_rotr_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000860 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Hendersone85e4b82021-01-29 13:10:28 -1000861 regs[r0] = ror64(regs[r1], regs[r2] & 63);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200862 break;
863#endif
Stefan Weile24dc9f2012-09-18 22:52:14 +0200864#if TCG_TARGET_HAS_deposit_i64
865 case INDEX_op_deposit_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000866 tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
Richard Henderson79dd3a42021-01-29 22:36:40 -1000867 regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
Stefan Weile24dc9f2012-09-18 22:52:14 +0200868 break;
869#endif
Richard Henderson0f10d7c2021-02-02 16:48:48 -0800870#if TCG_TARGET_HAS_extract_i64
871 case INDEX_op_extract_i64:
872 tci_args_rrbb(insn, &r0, &r1, &pos, &len);
873 regs[r0] = extract64(regs[r1], pos, len);
874 break;
875#endif
876#if TCG_TARGET_HAS_sextract_i64
877 case INDEX_op_sextract_i64:
878 tci_args_rrbb(insn, &r0, &r1, &pos, &len);
879 regs[r0] = sextract64(regs[r1], pos, len);
880 break;
881#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200882 case INDEX_op_brcond_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000883 tci_args_rl(insn, tb_ptr, &r0, &ptr);
Richard Hendersonfc8ec9e2021-02-01 09:41:20 -1000884 if (regs[r0]) {
Richard Henderson5a0adf32021-01-29 21:41:13 -1000885 tb_ptr = ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200886 }
887 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200888 case INDEX_op_ext32s_i64:
Aurelien Jarno4f2331e2015-07-27 12:41:45 +0200889 case INDEX_op_ext_i32_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000890 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000891 regs[r0] = (int32_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200892 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200893 case INDEX_op_ext32u_i64:
Aurelien Jarno4f2331e2015-07-27 12:41:45 +0200894 case INDEX_op_extu_i32_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000895 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000896 regs[r0] = (uint32_t)regs[r1];
Stefan Weil7657f4b2011-09-27 06:30:58 +0200897 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200898#if TCG_TARGET_HAS_bswap64_i64
899 case INDEX_op_bswap64_i64:
Richard Henderson65089882021-02-01 21:27:41 -1000900 tci_args_rr(insn, &r0, &r1);
Richard Hendersonfc4a62f2021-01-29 13:05:01 -1000901 regs[r0] = bswap64(regs[r1]);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200902 break;
903#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200904#endif /* TCG_TARGET_REG_BITS == 64 */
905
906 /* QEMU specific operations. */
907
Stefan Weil7657f4b2011-09-27 06:30:58 +0200908 case INDEX_op_exit_tb:
Richard Henderson65089882021-02-01 21:27:41 -1000909 tci_args_l(insn, tb_ptr, &ptr);
Richard Henderson158d3872021-01-29 22:01:11 -1000910 return (uintptr_t)ptr;
911
Stefan Weil7657f4b2011-09-27 06:30:58 +0200912 case INDEX_op_goto_tb:
Richard Henderson65089882021-02-01 21:27:41 -1000913 tci_args_l(insn, tb_ptr, &ptr);
Richard Henderson1670a2b2021-01-29 22:11:43 -1000914 tb_ptr = *(void **)ptr;
Richard Henderson92bc4fa2021-01-30 13:23:02 -0800915 break;
Richard Henderson1670a2b2021-01-29 22:11:43 -1000916
Richard Henderson6eea0432021-02-02 09:40:22 -1000917 case INDEX_op_goto_ptr:
918 tci_args_r(insn, &r0);
919 ptr = (void *)regs[r0];
920 if (!ptr) {
921 return 0;
922 }
923 tb_ptr = ptr;
924 break;
925
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700926 case INDEX_op_qemu_ld_a32_i32:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700927 tci_args_rrm(insn, &r0, &r1, &oi);
928 taddr = (uint32_t)regs[r1];
929 goto do_ld_i32;
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700930 case INDEX_op_qemu_ld_a64_i32:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700931 if (TCG_TARGET_REG_BITS == 64) {
Richard Henderson65089882021-02-01 21:27:41 -1000932 tci_args_rrm(insn, &r0, &r1, &oi);
Richard Henderson63041ed2021-01-29 22:52:12 -1000933 taddr = regs[r1];
934 } else {
Richard Hendersonab64da72023-06-06 17:52:41 -0700935 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Henderson63041ed2021-01-29 22:52:12 -1000936 taddr = tci_uint64(regs[r2], regs[r1]);
Richard Hendersonab64da72023-06-06 17:52:41 -0700937 oi = regs[r3];
Richard Henderson63041ed2021-01-29 22:52:12 -1000938 }
Richard Hendersondd7dc932023-03-20 07:48:09 -0700939 do_ld_i32:
940 regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200941 break;
Richard Henderson63041ed2021-01-29 22:52:12 -1000942
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700943 case INDEX_op_qemu_ld_a32_i64:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700944 if (TCG_TARGET_REG_BITS == 64) {
945 tci_args_rrm(insn, &r0, &r1, &oi);
946 taddr = (uint32_t)regs[r1];
947 } else {
Richard Hendersonab64da72023-06-06 17:52:41 -0700948 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Hendersondd7dc932023-03-20 07:48:09 -0700949 taddr = (uint32_t)regs[r2];
Richard Hendersonab64da72023-06-06 17:52:41 -0700950 oi = regs[r3];
Richard Hendersondd7dc932023-03-20 07:48:09 -0700951 }
952 goto do_ld_i64;
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700953 case INDEX_op_qemu_ld_a64_i64:
Richard Henderson63041ed2021-01-29 22:52:12 -1000954 if (TCG_TARGET_REG_BITS == 64) {
Richard Henderson65089882021-02-01 21:27:41 -1000955 tci_args_rrm(insn, &r0, &r1, &oi);
Richard Henderson63041ed2021-01-29 22:52:12 -1000956 taddr = regs[r1];
Richard Henderson63041ed2021-01-29 22:52:12 -1000957 } else {
Richard Henderson65089882021-02-01 21:27:41 -1000958 tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
Richard Henderson63041ed2021-01-29 22:52:12 -1000959 taddr = tci_uint64(regs[r3], regs[r2]);
Richard Henderson65089882021-02-01 21:27:41 -1000960 oi = regs[r4];
Richard Henderson76782fa2014-05-26 20:59:16 -0700961 }
Richard Hendersondd7dc932023-03-20 07:48:09 -0700962 do_ld_i64:
Richard Henderson69acc022021-05-27 12:21:59 -0700963 tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
Richard Henderson76782fa2014-05-26 20:59:16 -0700964 if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson63041ed2021-01-29 22:52:12 -1000965 tci_write_reg64(regs, r1, r0, tmp64);
966 } else {
967 regs[r0] = tmp64;
Richard Henderson76782fa2014-05-26 20:59:16 -0700968 }
Stefan Weil7657f4b2011-09-27 06:30:58 +0200969 break;
Richard Henderson63041ed2021-01-29 22:52:12 -1000970
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700971 case INDEX_op_qemu_st_a32_i32:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700972 tci_args_rrm(insn, &r0, &r1, &oi);
973 taddr = (uint32_t)regs[r1];
974 goto do_st_i32;
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700975 case INDEX_op_qemu_st_a64_i32:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700976 if (TCG_TARGET_REG_BITS == 64) {
Richard Henderson65089882021-02-01 21:27:41 -1000977 tci_args_rrm(insn, &r0, &r1, &oi);
Richard Henderson63041ed2021-01-29 22:52:12 -1000978 taddr = regs[r1];
979 } else {
Richard Hendersonab64da72023-06-06 17:52:41 -0700980 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Henderson63041ed2021-01-29 22:52:12 -1000981 taddr = tci_uint64(regs[r2], regs[r1]);
Richard Hendersonab64da72023-06-06 17:52:41 -0700982 oi = regs[r3];
Richard Henderson63041ed2021-01-29 22:52:12 -1000983 }
Richard Hendersondd7dc932023-03-20 07:48:09 -0700984 do_st_i32:
985 tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200986 break;
Richard Henderson63041ed2021-01-29 22:52:12 -1000987
Richard Hendersonfecccfc2023-05-16 20:07:20 -0700988 case INDEX_op_qemu_st_a32_i64:
Richard Hendersondd7dc932023-03-20 07:48:09 -0700989 if (TCG_TARGET_REG_BITS == 64) {
990 tci_args_rrm(insn, &r0, &r1, &oi);
991 tmp64 = regs[r0];
992 taddr = (uint32_t)regs[r1];
993 } else {
Richard Hendersonab64da72023-06-06 17:52:41 -0700994 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Hendersondd7dc932023-03-20 07:48:09 -0700995 tmp64 = tci_uint64(regs[r1], regs[r0]);
996 taddr = (uint32_t)regs[r2];
Richard Hendersonab64da72023-06-06 17:52:41 -0700997 oi = regs[r3];
Richard Hendersondd7dc932023-03-20 07:48:09 -0700998 }
999 goto do_st_i64;
Richard Hendersonfecccfc2023-05-16 20:07:20 -07001000 case INDEX_op_qemu_st_a64_i64:
Richard Henderson63041ed2021-01-29 22:52:12 -10001001 if (TCG_TARGET_REG_BITS == 64) {
Richard Henderson65089882021-02-01 21:27:41 -10001002 tci_args_rrm(insn, &r0, &r1, &oi);
Richard Henderson63041ed2021-01-29 22:52:12 -10001003 tmp64 = regs[r0];
Richard Hendersondd7dc932023-03-20 07:48:09 -07001004 taddr = regs[r1];
Richard Henderson63041ed2021-01-29 22:52:12 -10001005 } else {
Richard Hendersondd7dc932023-03-20 07:48:09 -07001006 tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
Richard Henderson63041ed2021-01-29 22:52:12 -10001007 tmp64 = tci_uint64(regs[r1], regs[r0]);
Richard Hendersondd7dc932023-03-20 07:48:09 -07001008 taddr = tci_uint64(regs[r3], regs[r2]);
1009 oi = regs[r4];
Richard Henderson63041ed2021-01-29 22:52:12 -10001010 }
Richard Hendersondd7dc932023-03-20 07:48:09 -07001011 do_st_i64:
Richard Henderson69acc022021-05-27 12:21:59 -07001012 tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +02001013 break;
Richard Henderson63041ed2021-01-29 22:52:12 -10001014
Pranith Kumara1e69e22016-07-14 16:20:22 -04001015 case INDEX_op_mb:
1016 /* Ensure ordering for all kinds */
1017 smp_mb();
1018 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001019 default:
Richard Hendersonf6996f92021-01-27 20:11:11 -10001020 g_assert_not_reached();
Stefan Weil7657f4b2011-09-27 06:30:58 +02001021 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001022 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001023}
Richard Henderson59964b42021-01-30 17:48:19 -08001024
1025/*
1026 * Disassembler that matches the interpreter
1027 */
1028
1029static const char *str_r(TCGReg r)
1030{
1031 static const char regs[TCG_TARGET_NB_REGS][4] = {
1032 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
1033 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
1034 };
1035
1036 QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
1037 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
1038
1039 assert((unsigned)r < TCG_TARGET_NB_REGS);
1040 return regs[r];
1041}
1042
1043static const char *str_c(TCGCond c)
1044{
1045 static const char cond[16][8] = {
1046 [TCG_COND_NEVER] = "never",
1047 [TCG_COND_ALWAYS] = "always",
1048 [TCG_COND_EQ] = "eq",
1049 [TCG_COND_NE] = "ne",
1050 [TCG_COND_LT] = "lt",
1051 [TCG_COND_GE] = "ge",
1052 [TCG_COND_LE] = "le",
1053 [TCG_COND_GT] = "gt",
1054 [TCG_COND_LTU] = "ltu",
1055 [TCG_COND_GEU] = "geu",
1056 [TCG_COND_LEU] = "leu",
1057 [TCG_COND_GTU] = "gtu",
Richard Henderson23c56922023-10-24 22:52:26 -07001058 [TCG_COND_TSTEQ] = "tsteq",
1059 [TCG_COND_TSTNE] = "tstne",
Richard Henderson59964b42021-01-30 17:48:19 -08001060 };
1061
1062 assert((unsigned)c < ARRAY_SIZE(cond));
1063 assert(cond[c][0] != 0);
1064 return cond[c];
1065}
1066
1067/* Disassemble TCI bytecode. */
1068int print_insn_tci(bfd_vma addr, disassemble_info *info)
1069{
Richard Henderson65089882021-02-01 21:27:41 -10001070 const uint32_t *tb_ptr = (const void *)(uintptr_t)addr;
Richard Henderson59964b42021-01-30 17:48:19 -08001071 const TCGOpDef *def;
1072 const char *op_name;
Richard Henderson65089882021-02-01 21:27:41 -10001073 uint32_t insn;
Richard Henderson59964b42021-01-30 17:48:19 -08001074 TCGOpcode op;
Richard Henderson08096b12021-02-02 17:40:12 -08001075 TCGReg r0, r1, r2, r3, r4, r5;
Richard Henderson59964b42021-01-30 17:48:19 -08001076 tcg_target_ulong i1;
1077 int32_t s2;
1078 TCGCond c;
Richard Henderson9002ffc2021-07-25 12:06:49 -10001079 MemOpIdx oi;
Richard Henderson59964b42021-01-30 17:48:19 -08001080 uint8_t pos, len;
Richard Henderson65089882021-02-01 21:27:41 -10001081 void *ptr;
Richard Henderson59964b42021-01-30 17:48:19 -08001082
Richard Henderson65089882021-02-01 21:27:41 -10001083 /* TCI is always the host, so we don't need to load indirect. */
1084 insn = *tb_ptr++;
Richard Henderson59964b42021-01-30 17:48:19 -08001085
Richard Henderson65089882021-02-01 21:27:41 -10001086 info->fprintf_func(info->stream, "%08x ", insn);
Richard Henderson59964b42021-01-30 17:48:19 -08001087
Richard Henderson65089882021-02-01 21:27:41 -10001088 op = extract32(insn, 0, 8);
Richard Henderson59964b42021-01-30 17:48:19 -08001089 def = &tcg_op_defs[op];
1090 op_name = def->name;
Richard Henderson59964b42021-01-30 17:48:19 -08001091
1092 switch (op) {
1093 case INDEX_op_br:
Richard Henderson59964b42021-01-30 17:48:19 -08001094 case INDEX_op_exit_tb:
1095 case INDEX_op_goto_tb:
Richard Henderson65089882021-02-01 21:27:41 -10001096 tci_args_l(insn, tb_ptr, &ptr);
Richard Henderson59964b42021-01-30 17:48:19 -08001097 info->fprintf_func(info->stream, "%-12s %p", op_name, ptr);
1098 break;
1099
Richard Henderson6eea0432021-02-02 09:40:22 -10001100 case INDEX_op_goto_ptr:
1101 tci_args_r(insn, &r0);
1102 info->fprintf_func(info->stream, "%-12s %s", op_name, str_r(r0));
1103 break;
1104
Richard Henderson7b7d8b22021-01-30 14:24:25 -08001105 case INDEX_op_call:
Richard Henderson65089882021-02-01 21:27:41 -10001106 tci_args_nl(insn, tb_ptr, &len, &ptr);
1107 info->fprintf_func(info->stream, "%-12s %d, %p", op_name, len, ptr);
Richard Henderson7b7d8b22021-01-30 14:24:25 -08001108 break;
1109
Richard Henderson59964b42021-01-30 17:48:19 -08001110 case INDEX_op_brcond_i32:
1111 case INDEX_op_brcond_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001112 tci_args_rl(insn, tb_ptr, &r0, &ptr);
Richard Hendersonfc8ec9e2021-02-01 09:41:20 -10001113 info->fprintf_func(info->stream, "%-12s %s, 0, ne, %p",
1114 op_name, str_r(r0), ptr);
Richard Henderson59964b42021-01-30 17:48:19 -08001115 break;
1116
1117 case INDEX_op_setcond_i32:
1118 case INDEX_op_setcond_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001119 tci_args_rrrc(insn, &r0, &r1, &r2, &c);
Richard Henderson59964b42021-01-30 17:48:19 -08001120 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1121 op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
1122 break;
1123
Richard Henderson65089882021-02-01 21:27:41 -10001124 case INDEX_op_tci_movi:
1125 tci_args_ri(insn, &r0, &i1);
Richard Henderson59964b42021-01-30 17:48:19 -08001126 info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
1127 op_name, str_r(r0), i1);
1128 break;
1129
Richard Henderson65089882021-02-01 21:27:41 -10001130 case INDEX_op_tci_movl:
1131 tci_args_rl(insn, tb_ptr, &r0, &ptr);
1132 info->fprintf_func(info->stream, "%-12s %s, %p",
1133 op_name, str_r(r0), ptr);
Richard Henderson59964b42021-01-30 17:48:19 -08001134 break;
Richard Henderson59964b42021-01-30 17:48:19 -08001135
1136 case INDEX_op_ld8u_i32:
1137 case INDEX_op_ld8u_i64:
1138 case INDEX_op_ld8s_i32:
1139 case INDEX_op_ld8s_i64:
1140 case INDEX_op_ld16u_i32:
1141 case INDEX_op_ld16u_i64:
1142 case INDEX_op_ld16s_i32:
1143 case INDEX_op_ld16s_i64:
1144 case INDEX_op_ld32u_i64:
1145 case INDEX_op_ld32s_i64:
1146 case INDEX_op_ld_i32:
1147 case INDEX_op_ld_i64:
1148 case INDEX_op_st8_i32:
1149 case INDEX_op_st8_i64:
1150 case INDEX_op_st16_i32:
1151 case INDEX_op_st16_i64:
1152 case INDEX_op_st32_i64:
1153 case INDEX_op_st_i32:
1154 case INDEX_op_st_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001155 tci_args_rrs(insn, &r0, &r1, &s2);
Richard Henderson59964b42021-01-30 17:48:19 -08001156 info->fprintf_func(info->stream, "%-12s %s, %s, %d",
1157 op_name, str_r(r0), str_r(r1), s2);
1158 break;
1159
1160 case INDEX_op_mov_i32:
1161 case INDEX_op_mov_i64:
1162 case INDEX_op_ext8s_i32:
1163 case INDEX_op_ext8s_i64:
1164 case INDEX_op_ext8u_i32:
1165 case INDEX_op_ext8u_i64:
1166 case INDEX_op_ext16s_i32:
1167 case INDEX_op_ext16s_i64:
1168 case INDEX_op_ext16u_i32:
1169 case INDEX_op_ext32s_i64:
1170 case INDEX_op_ext32u_i64:
1171 case INDEX_op_ext_i32_i64:
1172 case INDEX_op_extu_i32_i64:
1173 case INDEX_op_bswap16_i32:
1174 case INDEX_op_bswap16_i64:
1175 case INDEX_op_bswap32_i32:
1176 case INDEX_op_bswap32_i64:
1177 case INDEX_op_bswap64_i64:
1178 case INDEX_op_not_i32:
1179 case INDEX_op_not_i64:
1180 case INDEX_op_neg_i32:
1181 case INDEX_op_neg_i64:
Richard Henderson5255f482021-02-02 17:01:57 -08001182 case INDEX_op_ctpop_i32:
1183 case INDEX_op_ctpop_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001184 tci_args_rr(insn, &r0, &r1);
Richard Henderson59964b42021-01-30 17:48:19 -08001185 info->fprintf_func(info->stream, "%-12s %s, %s",
1186 op_name, str_r(r0), str_r(r1));
1187 break;
1188
1189 case INDEX_op_add_i32:
1190 case INDEX_op_add_i64:
1191 case INDEX_op_sub_i32:
1192 case INDEX_op_sub_i64:
1193 case INDEX_op_mul_i32:
1194 case INDEX_op_mul_i64:
1195 case INDEX_op_and_i32:
1196 case INDEX_op_and_i64:
1197 case INDEX_op_or_i32:
1198 case INDEX_op_or_i64:
1199 case INDEX_op_xor_i32:
1200 case INDEX_op_xor_i64:
Richard Hendersona81520b2021-02-02 16:29:18 -08001201 case INDEX_op_andc_i32:
1202 case INDEX_op_andc_i64:
1203 case INDEX_op_orc_i32:
1204 case INDEX_op_orc_i64:
1205 case INDEX_op_eqv_i32:
1206 case INDEX_op_eqv_i64:
1207 case INDEX_op_nand_i32:
1208 case INDEX_op_nand_i64:
1209 case INDEX_op_nor_i32:
1210 case INDEX_op_nor_i64:
Richard Henderson59964b42021-01-30 17:48:19 -08001211 case INDEX_op_div_i32:
1212 case INDEX_op_div_i64:
1213 case INDEX_op_rem_i32:
1214 case INDEX_op_rem_i64:
1215 case INDEX_op_divu_i32:
1216 case INDEX_op_divu_i64:
1217 case INDEX_op_remu_i32:
1218 case INDEX_op_remu_i64:
1219 case INDEX_op_shl_i32:
1220 case INDEX_op_shl_i64:
1221 case INDEX_op_shr_i32:
1222 case INDEX_op_shr_i64:
1223 case INDEX_op_sar_i32:
1224 case INDEX_op_sar_i64:
1225 case INDEX_op_rotl_i32:
1226 case INDEX_op_rotl_i64:
1227 case INDEX_op_rotr_i32:
1228 case INDEX_op_rotr_i64:
Richard Henderson5255f482021-02-02 17:01:57 -08001229 case INDEX_op_clz_i32:
1230 case INDEX_op_clz_i64:
1231 case INDEX_op_ctz_i32:
1232 case INDEX_op_ctz_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001233 tci_args_rrr(insn, &r0, &r1, &r2);
Richard Henderson59964b42021-01-30 17:48:19 -08001234 info->fprintf_func(info->stream, "%-12s %s, %s, %s",
1235 op_name, str_r(r0), str_r(r1), str_r(r2));
1236 break;
1237
1238 case INDEX_op_deposit_i32:
1239 case INDEX_op_deposit_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001240 tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
Richard Henderson59964b42021-01-30 17:48:19 -08001241 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d",
1242 op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
1243 break;
1244
Richard Henderson0f10d7c2021-02-02 16:48:48 -08001245 case INDEX_op_extract_i32:
1246 case INDEX_op_extract_i64:
1247 case INDEX_op_sextract_i32:
1248 case INDEX_op_sextract_i64:
1249 tci_args_rrbb(insn, &r0, &r1, &pos, &len);
1250 info->fprintf_func(info->stream, "%-12s %s,%s,%d,%d",
1251 op_name, str_r(r0), str_r(r1), pos, len);
1252 break;
1253
Richard Hendersondf093c12021-02-02 16:15:45 -08001254 case INDEX_op_movcond_i32:
1255 case INDEX_op_movcond_i64:
Richard Henderson59964b42021-01-30 17:48:19 -08001256 case INDEX_op_setcond2_i32:
Richard Henderson65089882021-02-01 21:27:41 -10001257 tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c);
Richard Henderson59964b42021-01-30 17:48:19 -08001258 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1259 op_name, str_r(r0), str_r(r1), str_r(r2),
1260 str_r(r3), str_r(r4), str_c(c));
1261 break;
1262
Richard Henderson59964b42021-01-30 17:48:19 -08001263 case INDEX_op_mulu2_i32:
Richard Hendersonf6db0d82021-02-02 17:21:27 -08001264 case INDEX_op_mulu2_i64:
1265 case INDEX_op_muls2_i32:
1266 case INDEX_op_muls2_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001267 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
Richard Henderson59964b42021-01-30 17:48:19 -08001268 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1269 op_name, str_r(r0), str_r(r1),
1270 str_r(r2), str_r(r3));
1271 break;
1272
1273 case INDEX_op_add2_i32:
Richard Henderson08096b12021-02-02 17:40:12 -08001274 case INDEX_op_add2_i64:
Richard Henderson59964b42021-01-30 17:48:19 -08001275 case INDEX_op_sub2_i32:
Richard Henderson08096b12021-02-02 17:40:12 -08001276 case INDEX_op_sub2_i64:
Richard Henderson65089882021-02-01 21:27:41 -10001277 tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
Richard Henderson59964b42021-01-30 17:48:19 -08001278 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
1279 op_name, str_r(r0), str_r(r1), str_r(r2),
1280 str_r(r3), str_r(r4), str_r(r5));
1281 break;
Richard Henderson59964b42021-01-30 17:48:19 -08001282
Richard Hendersonfecccfc2023-05-16 20:07:20 -07001283 case INDEX_op_qemu_ld_a32_i32:
1284 case INDEX_op_qemu_st_a32_i32:
1285 len = 1 + 1;
Richard Henderson59964b42021-01-30 17:48:19 -08001286 goto do_qemu_ldst;
Richard Hendersonfecccfc2023-05-16 20:07:20 -07001287 case INDEX_op_qemu_ld_a32_i64:
1288 case INDEX_op_qemu_st_a32_i64:
1289 case INDEX_op_qemu_ld_a64_i32:
1290 case INDEX_op_qemu_st_a64_i32:
1291 len = 1 + DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1292 goto do_qemu_ldst;
1293 case INDEX_op_qemu_ld_a64_i64:
1294 case INDEX_op_qemu_st_a64_i64:
1295 len = 2 * DIV_ROUND_UP(64, TCG_TARGET_REG_BITS);
1296 goto do_qemu_ldst;
Richard Henderson59964b42021-01-30 17:48:19 -08001297 do_qemu_ldst:
Richard Henderson59964b42021-01-30 17:48:19 -08001298 switch (len) {
1299 case 2:
Richard Henderson65089882021-02-01 21:27:41 -10001300 tci_args_rrm(insn, &r0, &r1, &oi);
Richard Henderson59964b42021-01-30 17:48:19 -08001301 info->fprintf_func(info->stream, "%-12s %s, %s, %x",
1302 op_name, str_r(r0), str_r(r1), oi);
1303 break;
1304 case 3:
Richard Hendersonab64da72023-06-06 17:52:41 -07001305 tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
1306 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
1307 op_name, str_r(r0), str_r(r1),
1308 str_r(r2), str_r(r3));
Richard Henderson59964b42021-01-30 17:48:19 -08001309 break;
1310 case 4:
Richard Henderson65089882021-02-01 21:27:41 -10001311 tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
1312 info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s",
Richard Henderson59964b42021-01-30 17:48:19 -08001313 op_name, str_r(r0), str_r(r1),
Richard Henderson65089882021-02-01 21:27:41 -10001314 str_r(r2), str_r(r3), str_r(r4));
Richard Henderson59964b42021-01-30 17:48:19 -08001315 break;
1316 default:
1317 g_assert_not_reached();
1318 }
1319 break;
1320
Richard Henderson65089882021-02-01 21:27:41 -10001321 case 0:
1322 /* tcg_out_nop_fill uses zeros */
1323 if (insn == 0) {
1324 info->fprintf_func(info->stream, "align");
1325 break;
1326 }
1327 /* fall through */
1328
Richard Henderson59964b42021-01-30 17:48:19 -08001329 default:
1330 info->fprintf_func(info->stream, "illegal opcode %d", op);
1331 break;
1332 }
1333
Richard Henderson65089882021-02-01 21:27:41 -10001334 return sizeof(insn);
Richard Henderson59964b42021-01-30 17:48:19 -08001335}