blob: 4bdc645f2a55ef8a15950772f70ce17f393593c9 [file] [log] [blame]
Stefan Weil7657f4b2011-09-27 06:30:58 +02001/*
2 * Tiny Code Interpreter for QEMU
3 *
Stefan Weil3ccdbec2016-04-05 22:24:51 +02004 * Copyright (c) 2009, 2011, 2016 Stefan Weil
Stefan Weil7657f4b2011-09-27 06:30:58 +02005 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
Peter Maydelld38ea872016-01-29 17:50:05 +000020#include "qemu/osdep.h"
Stefan Weil7657f4b2011-09-27 06:30:58 +020021
Stefan Weil3ccdbec2016-04-05 22:24:51 +020022/* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24#if defined(CONFIG_DEBUG_TCG)
25# define tci_assert(cond) assert(cond)
26#else
27# define tci_assert(cond) ((void)0)
Stefan Weil7657f4b2011-09-27 06:30:58 +020028#endif
29
30#include "qemu-common.h"
Paolo Bonzini65603e22016-05-20 13:57:31 +020031#include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
Paolo Bonzinif08b6172014-03-28 19:42:10 +010032#include "exec/cpu_ldst.h"
Stefan Weil7657f4b2011-09-27 06:30:58 +020033#include "tcg-op.h"
34
35/* Marker for missing code. */
36#define TODO() \
37 do { \
38 fprintf(stderr, "TODO %s:%u: %s()\n", \
39 __FILE__, __LINE__, __func__); \
40 tcg_abort(); \
41 } while (0)
42
Stefan Weil6673f472012-09-18 22:43:38 +020043#if MAX_OPC_PARAM_IARGS != 5
Stefan Weil7657f4b2011-09-27 06:30:58 +020044# error Fix needed, number of supported input arguments changed!
45#endif
46#if TCG_TARGET_REG_BITS == 32
47typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
Stefan Weil6673f472012-09-18 22:43:38 +020050 tcg_target_ulong, tcg_target_ulong,
Stefan Weil7657f4b2011-09-27 06:30:58 +020051 tcg_target_ulong, tcg_target_ulong);
52#else
53typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
Stefan Weil6673f472012-09-18 22:43:38 +020054 tcg_target_ulong, tcg_target_ulong,
55 tcg_target_ulong);
Stefan Weil7657f4b2011-09-27 06:30:58 +020056#endif
57
Stefan Weil7657f4b2011-09-27 06:30:58 +020058static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
59
Richard Henderson771142c2011-11-09 08:03:33 +000060static tcg_target_ulong tci_read_reg(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020061{
Stefan Weil3ccdbec2016-04-05 22:24:51 +020062 tci_assert(index < ARRAY_SIZE(tci_reg));
Stefan Weil7657f4b2011-09-27 06:30:58 +020063 return tci_reg[index];
64}
65
66#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
Richard Henderson771142c2011-11-09 08:03:33 +000067static int8_t tci_read_reg8s(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020068{
69 return (int8_t)tci_read_reg(index);
70}
71#endif
72
73#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
Richard Henderson771142c2011-11-09 08:03:33 +000074static int16_t tci_read_reg16s(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020075{
76 return (int16_t)tci_read_reg(index);
77}
78#endif
79
80#if TCG_TARGET_REG_BITS == 64
Richard Henderson771142c2011-11-09 08:03:33 +000081static int32_t tci_read_reg32s(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020082{
83 return (int32_t)tci_read_reg(index);
84}
85#endif
86
Richard Henderson771142c2011-11-09 08:03:33 +000087static uint8_t tci_read_reg8(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020088{
89 return (uint8_t)tci_read_reg(index);
90}
91
Richard Henderson771142c2011-11-09 08:03:33 +000092static uint16_t tci_read_reg16(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020093{
94 return (uint16_t)tci_read_reg(index);
95}
96
Richard Henderson771142c2011-11-09 08:03:33 +000097static uint32_t tci_read_reg32(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +020098{
99 return (uint32_t)tci_read_reg(index);
100}
101
102#if TCG_TARGET_REG_BITS == 64
Richard Henderson771142c2011-11-09 08:03:33 +0000103static uint64_t tci_read_reg64(TCGReg index)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200104{
105 return tci_read_reg(index);
106}
107#endif
108
Richard Henderson771142c2011-11-09 08:03:33 +0000109static void tci_write_reg(TCGReg index, tcg_target_ulong value)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200110{
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200111 tci_assert(index < ARRAY_SIZE(tci_reg));
112 tci_assert(index != TCG_AREG0);
113 tci_assert(index != TCG_REG_CALL_STACK);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200114 tci_reg[index] = value;
115}
116
Stefan Weil7657f4b2011-09-27 06:30:58 +0200117#if TCG_TARGET_REG_BITS == 64
Richard Henderson771142c2011-11-09 08:03:33 +0000118static void tci_write_reg32s(TCGReg index, int32_t value)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200119{
120 tci_write_reg(index, value);
121}
122#endif
123
Richard Henderson771142c2011-11-09 08:03:33 +0000124static void tci_write_reg8(TCGReg index, uint8_t value)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200125{
126 tci_write_reg(index, value);
127}
128
Richard Henderson771142c2011-11-09 08:03:33 +0000129static void tci_write_reg32(TCGReg index, uint32_t value)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200130{
131 tci_write_reg(index, value);
132}
133
134#if TCG_TARGET_REG_BITS == 32
135static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
136 uint64_t value)
137{
138 tci_write_reg(low_index, value);
139 tci_write_reg(high_index, value >> 32);
140}
141#elif TCG_TARGET_REG_BITS == 64
Richard Henderson771142c2011-11-09 08:03:33 +0000142static void tci_write_reg64(TCGReg index, uint64_t value)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200143{
144 tci_write_reg(index, value);
145}
146#endif
147
148#if TCG_TARGET_REG_BITS == 32
149/* Create a 64 bit value from two 32 bit values. */
150static uint64_t tci_uint64(uint32_t high, uint32_t low)
151{
152 return ((uint64_t)high << 32) + low;
153}
154#endif
155
156/* Read constant (native size) from bytecode. */
157static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
158{
159 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
160 *tb_ptr += sizeof(value);
161 return value;
162}
163
Richard Henderson03fc0542013-03-28 05:37:51 +0000164/* Read unsigned constant (32 bit) from bytecode. */
Stefan Weil7657f4b2011-09-27 06:30:58 +0200165static uint32_t tci_read_i32(uint8_t **tb_ptr)
166{
167 uint32_t value = *(uint32_t *)(*tb_ptr);
168 *tb_ptr += sizeof(value);
169 return value;
170}
171
Richard Henderson03fc0542013-03-28 05:37:51 +0000172/* Read signed constant (32 bit) from bytecode. */
173static int32_t tci_read_s32(uint8_t **tb_ptr)
174{
175 int32_t value = *(int32_t *)(*tb_ptr);
176 *tb_ptr += sizeof(value);
177 return value;
178}
179
Stefan Weil7657f4b2011-09-27 06:30:58 +0200180#if TCG_TARGET_REG_BITS == 64
181/* Read constant (64 bit) from bytecode. */
182static uint64_t tci_read_i64(uint8_t **tb_ptr)
183{
184 uint64_t value = *(uint64_t *)(*tb_ptr);
185 *tb_ptr += sizeof(value);
186 return value;
187}
188#endif
189
190/* Read indexed register (native size) from bytecode. */
191static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
192{
193 tcg_target_ulong value = tci_read_reg(**tb_ptr);
194 *tb_ptr += 1;
195 return value;
196}
197
198/* Read indexed register (8 bit) from bytecode. */
199static uint8_t tci_read_r8(uint8_t **tb_ptr)
200{
201 uint8_t value = tci_read_reg8(**tb_ptr);
202 *tb_ptr += 1;
203 return value;
204}
205
206#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
207/* Read indexed register (8 bit signed) from bytecode. */
208static int8_t tci_read_r8s(uint8_t **tb_ptr)
209{
210 int8_t value = tci_read_reg8s(**tb_ptr);
211 *tb_ptr += 1;
212 return value;
213}
214#endif
215
216/* Read indexed register (16 bit) from bytecode. */
217static uint16_t tci_read_r16(uint8_t **tb_ptr)
218{
219 uint16_t value = tci_read_reg16(**tb_ptr);
220 *tb_ptr += 1;
221 return value;
222}
223
224#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
225/* Read indexed register (16 bit signed) from bytecode. */
226static int16_t tci_read_r16s(uint8_t **tb_ptr)
227{
228 int16_t value = tci_read_reg16s(**tb_ptr);
229 *tb_ptr += 1;
230 return value;
231}
232#endif
233
234/* Read indexed register (32 bit) from bytecode. */
235static uint32_t tci_read_r32(uint8_t **tb_ptr)
236{
237 uint32_t value = tci_read_reg32(**tb_ptr);
238 *tb_ptr += 1;
239 return value;
240}
241
242#if TCG_TARGET_REG_BITS == 32
243/* Read two indexed registers (2 * 32 bit) from bytecode. */
244static uint64_t tci_read_r64(uint8_t **tb_ptr)
245{
246 uint32_t low = tci_read_r32(tb_ptr);
247 return tci_uint64(tci_read_r32(tb_ptr), low);
248}
249#elif TCG_TARGET_REG_BITS == 64
250/* Read indexed register (32 bit signed) from bytecode. */
251static int32_t tci_read_r32s(uint8_t **tb_ptr)
252{
253 int32_t value = tci_read_reg32s(**tb_ptr);
254 *tb_ptr += 1;
255 return value;
256}
257
258/* Read indexed register (64 bit) from bytecode. */
259static uint64_t tci_read_r64(uint8_t **tb_ptr)
260{
261 uint64_t value = tci_read_reg64(**tb_ptr);
262 *tb_ptr += 1;
263 return value;
264}
265#endif
266
267/* Read indexed register(s) with target address from bytecode. */
268static target_ulong tci_read_ulong(uint8_t **tb_ptr)
269{
270 target_ulong taddr = tci_read_r(tb_ptr);
271#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
272 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
273#endif
274 return taddr;
275}
276
277/* Read indexed register or constant (native size) from bytecode. */
278static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
279{
280 tcg_target_ulong value;
Richard Henderson771142c2011-11-09 08:03:33 +0000281 TCGReg r = **tb_ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200282 *tb_ptr += 1;
283 if (r == TCG_CONST) {
284 value = tci_read_i(tb_ptr);
285 } else {
286 value = tci_read_reg(r);
287 }
288 return value;
289}
290
291/* Read indexed register or constant (32 bit) from bytecode. */
292static uint32_t tci_read_ri32(uint8_t **tb_ptr)
293{
294 uint32_t value;
Richard Henderson771142c2011-11-09 08:03:33 +0000295 TCGReg r = **tb_ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200296 *tb_ptr += 1;
297 if (r == TCG_CONST) {
298 value = tci_read_i32(tb_ptr);
299 } else {
300 value = tci_read_reg32(r);
301 }
302 return value;
303}
304
305#if TCG_TARGET_REG_BITS == 32
306/* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
307static uint64_t tci_read_ri64(uint8_t **tb_ptr)
308{
309 uint32_t low = tci_read_ri32(tb_ptr);
310 return tci_uint64(tci_read_ri32(tb_ptr), low);
311}
312#elif TCG_TARGET_REG_BITS == 64
313/* Read indexed register or constant (64 bit) from bytecode. */
314static uint64_t tci_read_ri64(uint8_t **tb_ptr)
315{
316 uint64_t value;
Richard Henderson771142c2011-11-09 08:03:33 +0000317 TCGReg r = **tb_ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200318 *tb_ptr += 1;
319 if (r == TCG_CONST) {
320 value = tci_read_i64(tb_ptr);
321 } else {
322 value = tci_read_reg64(r);
323 }
324 return value;
325}
326#endif
327
Richard Hendersonc6c50632012-11-19 12:43:14 -0800328static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200329{
Richard Hendersonc6c50632012-11-19 12:43:14 -0800330 tcg_target_ulong label = tci_read_i(tb_ptr);
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200331 tci_assert(label != 0);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200332 return label;
333}
334
335static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
336{
337 bool result = false;
338 int32_t i0 = u0;
339 int32_t i1 = u1;
340 switch (condition) {
341 case TCG_COND_EQ:
342 result = (u0 == u1);
343 break;
344 case TCG_COND_NE:
345 result = (u0 != u1);
346 break;
347 case TCG_COND_LT:
348 result = (i0 < i1);
349 break;
350 case TCG_COND_GE:
351 result = (i0 >= i1);
352 break;
353 case TCG_COND_LE:
354 result = (i0 <= i1);
355 break;
356 case TCG_COND_GT:
357 result = (i0 > i1);
358 break;
359 case TCG_COND_LTU:
360 result = (u0 < u1);
361 break;
362 case TCG_COND_GEU:
363 result = (u0 >= u1);
364 break;
365 case TCG_COND_LEU:
366 result = (u0 <= u1);
367 break;
368 case TCG_COND_GTU:
369 result = (u0 > u1);
370 break;
371 default:
372 TODO();
373 }
374 return result;
375}
376
377static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
378{
379 bool result = false;
380 int64_t i0 = u0;
381 int64_t i1 = u1;
382 switch (condition) {
383 case TCG_COND_EQ:
384 result = (u0 == u1);
385 break;
386 case TCG_COND_NE:
387 result = (u0 != u1);
388 break;
389 case TCG_COND_LT:
390 result = (i0 < i1);
391 break;
392 case TCG_COND_GE:
393 result = (i0 >= i1);
394 break;
395 case TCG_COND_LE:
396 result = (i0 <= i1);
397 break;
398 case TCG_COND_GT:
399 result = (i0 > i1);
400 break;
401 case TCG_COND_LTU:
402 result = (u0 < u1);
403 break;
404 case TCG_COND_GEU:
405 result = (u0 >= u1);
406 break;
407 case TCG_COND_LEU:
408 result = (u0 <= u1);
409 break;
410 case TCG_COND_GTU:
411 result = (u0 > u1);
412 break;
413 default:
414 TODO();
415 }
416 return result;
417}
418
Richard Henderson76782fa2014-05-26 20:59:16 -0700419#ifdef CONFIG_SOFTMMU
Richard Henderson76782fa2014-05-26 20:59:16 -0700420# define qemu_ld_ub \
Richard Henderson3972ef62015-05-13 09:10:33 -0700421 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700422# define qemu_ld_leuw \
Richard Henderson3972ef62015-05-13 09:10:33 -0700423 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700424# define qemu_ld_leul \
Richard Henderson3972ef62015-05-13 09:10:33 -0700425 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700426# define qemu_ld_leq \
Richard Henderson3972ef62015-05-13 09:10:33 -0700427 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700428# define qemu_ld_beuw \
Richard Henderson3972ef62015-05-13 09:10:33 -0700429 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700430# define qemu_ld_beul \
Richard Henderson3972ef62015-05-13 09:10:33 -0700431 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700432# define qemu_ld_beq \
Richard Henderson3972ef62015-05-13 09:10:33 -0700433 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700434# define qemu_st_b(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700435 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700436# define qemu_st_lew(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700437 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700438# define qemu_st_lel(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700439 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700440# define qemu_st_leq(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700441 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700442# define qemu_st_bew(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700443 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700444# define qemu_st_bel(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700445 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700446# define qemu_st_beq(X) \
Richard Henderson3972ef62015-05-13 09:10:33 -0700447 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
Richard Henderson76782fa2014-05-26 20:59:16 -0700448#else
449# define qemu_ld_ub ldub_p(g2h(taddr))
450# define qemu_ld_leuw lduw_le_p(g2h(taddr))
451# define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
452# define qemu_ld_leq ldq_le_p(g2h(taddr))
453# define qemu_ld_beuw lduw_be_p(g2h(taddr))
454# define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
455# define qemu_ld_beq ldq_be_p(g2h(taddr))
456# define qemu_st_b(X) stb_p(g2h(taddr), X)
457# define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
458# define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
459# define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
460# define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
461# define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
462# define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
463#endif
464
Stefan Weil7657f4b2011-09-27 06:30:58 +0200465/* Interpret pseudo code in tb. */
Richard Henderson04d5a1d2013-08-20 14:35:34 -0700466uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200467{
Richard Hendersonee79c352013-03-28 05:37:55 +0000468 long tcg_temps[CPU_TEMP_BUF_NLONGS];
469 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
Sergey Fedorov819af242016-04-21 15:58:23 +0300470 uintptr_t ret = 0;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200471
Stefan Weil7657f4b2011-09-27 06:30:58 +0200472 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
Richard Hendersonee79c352013-03-28 05:37:55 +0000473 tci_reg[TCG_REG_CALL_STACK] = sp_value;
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200474 tci_assert(tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200475
476 for (;;) {
Stefan Weil7657f4b2011-09-27 06:30:58 +0200477 TCGOpcode opc = tb_ptr[0];
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200478#if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
Stefan Weil7657f4b2011-09-27 06:30:58 +0200479 uint8_t op_size = tb_ptr[1];
480 uint8_t *old_code_ptr = tb_ptr;
481#endif
482 tcg_target_ulong t0;
483 tcg_target_ulong t1;
484 tcg_target_ulong t2;
485 tcg_target_ulong label;
486 TCGCond condition;
487 target_ulong taddr;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200488 uint8_t tmp8;
489 uint16_t tmp16;
490 uint32_t tmp32;
491 uint64_t tmp64;
492#if TCG_TARGET_REG_BITS == 32
493 uint64_t v64;
494#endif
Richard Henderson59227d52015-05-12 11:51:44 -0700495 TCGMemOpIdx oi;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200496
Richard Hendersondea8fde2013-03-28 05:37:53 +0000497#if defined(GETPC)
498 tci_tb_ptr = (uintptr_t)tb_ptr;
499#endif
500
Stefan Weil7657f4b2011-09-27 06:30:58 +0200501 /* Skip opcode and size entry. */
502 tb_ptr += 2;
503
504 switch (opc) {
Stefan Weil7657f4b2011-09-27 06:30:58 +0200505 case INDEX_op_call:
506 t0 = tci_read_ri(&tb_ptr);
507#if TCG_TARGET_REG_BITS == 32
508 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
509 tci_read_reg(TCG_REG_R1),
510 tci_read_reg(TCG_REG_R2),
511 tci_read_reg(TCG_REG_R3),
512 tci_read_reg(TCG_REG_R5),
513 tci_read_reg(TCG_REG_R6),
514 tci_read_reg(TCG_REG_R7),
Stefan Weil6673f472012-09-18 22:43:38 +0200515 tci_read_reg(TCG_REG_R8),
516 tci_read_reg(TCG_REG_R9),
517 tci_read_reg(TCG_REG_R10));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200518 tci_write_reg(TCG_REG_R0, tmp64);
519 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
520#else
521 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
522 tci_read_reg(TCG_REG_R1),
523 tci_read_reg(TCG_REG_R2),
Stefan Weil6673f472012-09-18 22:43:38 +0200524 tci_read_reg(TCG_REG_R3),
525 tci_read_reg(TCG_REG_R5));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200526 tci_write_reg(TCG_REG_R0, tmp64);
527#endif
528 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200529 case INDEX_op_br:
530 label = tci_read_label(&tb_ptr);
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200531 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200532 tb_ptr = (uint8_t *)label;
533 continue;
534 case INDEX_op_setcond_i32:
535 t0 = *tb_ptr++;
536 t1 = tci_read_r32(&tb_ptr);
537 t2 = tci_read_ri32(&tb_ptr);
538 condition = *tb_ptr++;
539 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
540 break;
541#if TCG_TARGET_REG_BITS == 32
542 case INDEX_op_setcond2_i32:
543 t0 = *tb_ptr++;
544 tmp64 = tci_read_r64(&tb_ptr);
545 v64 = tci_read_ri64(&tb_ptr);
546 condition = *tb_ptr++;
547 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
548 break;
549#elif TCG_TARGET_REG_BITS == 64
550 case INDEX_op_setcond_i64:
551 t0 = *tb_ptr++;
552 t1 = tci_read_r64(&tb_ptr);
553 t2 = tci_read_ri64(&tb_ptr);
554 condition = *tb_ptr++;
555 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
556 break;
557#endif
558 case INDEX_op_mov_i32:
559 t0 = *tb_ptr++;
560 t1 = tci_read_r32(&tb_ptr);
561 tci_write_reg32(t0, t1);
562 break;
563 case INDEX_op_movi_i32:
564 t0 = *tb_ptr++;
565 t1 = tci_read_i32(&tb_ptr);
566 tci_write_reg32(t0, t1);
567 break;
568
569 /* Load/store operations (32 bit). */
570
571 case INDEX_op_ld8u_i32:
572 t0 = *tb_ptr++;
573 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000574 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200575 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
576 break;
577 case INDEX_op_ld8s_i32:
578 case INDEX_op_ld16u_i32:
579 TODO();
580 break;
581 case INDEX_op_ld16s_i32:
582 TODO();
583 break;
584 case INDEX_op_ld_i32:
585 t0 = *tb_ptr++;
586 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000587 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200588 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
589 break;
590 case INDEX_op_st8_i32:
591 t0 = tci_read_r8(&tb_ptr);
592 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000593 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200594 *(uint8_t *)(t1 + t2) = t0;
595 break;
596 case INDEX_op_st16_i32:
597 t0 = tci_read_r16(&tb_ptr);
598 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000599 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200600 *(uint16_t *)(t1 + t2) = t0;
601 break;
602 case INDEX_op_st_i32:
603 t0 = tci_read_r32(&tb_ptr);
604 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000605 t2 = tci_read_s32(&tb_ptr);
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200606 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200607 *(uint32_t *)(t1 + t2) = t0;
608 break;
609
610 /* Arithmetic operations (32 bit). */
611
612 case INDEX_op_add_i32:
613 t0 = *tb_ptr++;
614 t1 = tci_read_ri32(&tb_ptr);
615 t2 = tci_read_ri32(&tb_ptr);
616 tci_write_reg32(t0, t1 + t2);
617 break;
618 case INDEX_op_sub_i32:
619 t0 = *tb_ptr++;
620 t1 = tci_read_ri32(&tb_ptr);
621 t2 = tci_read_ri32(&tb_ptr);
622 tci_write_reg32(t0, t1 - t2);
623 break;
624 case INDEX_op_mul_i32:
625 t0 = *tb_ptr++;
626 t1 = tci_read_ri32(&tb_ptr);
627 t2 = tci_read_ri32(&tb_ptr);
628 tci_write_reg32(t0, t1 * t2);
629 break;
630#if TCG_TARGET_HAS_div_i32
631 case INDEX_op_div_i32:
632 t0 = *tb_ptr++;
633 t1 = tci_read_ri32(&tb_ptr);
634 t2 = tci_read_ri32(&tb_ptr);
635 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
636 break;
637 case INDEX_op_divu_i32:
638 t0 = *tb_ptr++;
639 t1 = tci_read_ri32(&tb_ptr);
640 t2 = tci_read_ri32(&tb_ptr);
641 tci_write_reg32(t0, t1 / t2);
642 break;
643 case INDEX_op_rem_i32:
644 t0 = *tb_ptr++;
645 t1 = tci_read_ri32(&tb_ptr);
646 t2 = tci_read_ri32(&tb_ptr);
647 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
648 break;
649 case INDEX_op_remu_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(&tb_ptr);
652 t2 = tci_read_ri32(&tb_ptr);
653 tci_write_reg32(t0, t1 % t2);
654 break;
655#elif TCG_TARGET_HAS_div2_i32
656 case INDEX_op_div2_i32:
657 case INDEX_op_divu2_i32:
658 TODO();
659 break;
660#endif
661 case INDEX_op_and_i32:
662 t0 = *tb_ptr++;
663 t1 = tci_read_ri32(&tb_ptr);
664 t2 = tci_read_ri32(&tb_ptr);
665 tci_write_reg32(t0, t1 & t2);
666 break;
667 case INDEX_op_or_i32:
668 t0 = *tb_ptr++;
669 t1 = tci_read_ri32(&tb_ptr);
670 t2 = tci_read_ri32(&tb_ptr);
671 tci_write_reg32(t0, t1 | t2);
672 break;
673 case INDEX_op_xor_i32:
674 t0 = *tb_ptr++;
675 t1 = tci_read_ri32(&tb_ptr);
676 t2 = tci_read_ri32(&tb_ptr);
677 tci_write_reg32(t0, t1 ^ t2);
678 break;
679
680 /* Shift/rotate operations (32 bit). */
681
682 case INDEX_op_shl_i32:
683 t0 = *tb_ptr++;
684 t1 = tci_read_ri32(&tb_ptr);
685 t2 = tci_read_ri32(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700686 tci_write_reg32(t0, t1 << (t2 & 31));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200687 break;
688 case INDEX_op_shr_i32:
689 t0 = *tb_ptr++;
690 t1 = tci_read_ri32(&tb_ptr);
691 t2 = tci_read_ri32(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700692 tci_write_reg32(t0, t1 >> (t2 & 31));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200693 break;
694 case INDEX_op_sar_i32:
695 t0 = *tb_ptr++;
696 t1 = tci_read_ri32(&tb_ptr);
697 t2 = tci_read_ri32(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700698 tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200699 break;
700#if TCG_TARGET_HAS_rot_i32
701 case INDEX_op_rotl_i32:
702 t0 = *tb_ptr++;
703 t1 = tci_read_ri32(&tb_ptr);
704 t2 = tci_read_ri32(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700705 tci_write_reg32(t0, rol32(t1, t2 & 31));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200706 break;
707 case INDEX_op_rotr_i32:
708 t0 = *tb_ptr++;
709 t1 = tci_read_ri32(&tb_ptr);
710 t2 = tci_read_ri32(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700711 tci_write_reg32(t0, ror32(t1, t2 & 31));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200712 break;
713#endif
Stefan Weile24dc9f2012-09-18 22:52:14 +0200714#if TCG_TARGET_HAS_deposit_i32
715 case INDEX_op_deposit_i32:
716 t0 = *tb_ptr++;
717 t1 = tci_read_r32(&tb_ptr);
718 t2 = tci_read_r32(&tb_ptr);
719 tmp16 = *tb_ptr++;
720 tmp8 = *tb_ptr++;
721 tmp32 = (((1 << tmp8) - 1) << tmp16);
722 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
723 break;
724#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200725 case INDEX_op_brcond_i32:
726 t0 = tci_read_r32(&tb_ptr);
727 t1 = tci_read_ri32(&tb_ptr);
728 condition = *tb_ptr++;
729 label = tci_read_label(&tb_ptr);
730 if (tci_compare32(t0, t1, condition)) {
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200731 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200732 tb_ptr = (uint8_t *)label;
733 continue;
734 }
735 break;
736#if TCG_TARGET_REG_BITS == 32
737 case INDEX_op_add2_i32:
738 t0 = *tb_ptr++;
739 t1 = *tb_ptr++;
740 tmp64 = tci_read_r64(&tb_ptr);
741 tmp64 += tci_read_r64(&tb_ptr);
742 tci_write_reg64(t1, t0, tmp64);
743 break;
744 case INDEX_op_sub2_i32:
745 t0 = *tb_ptr++;
746 t1 = *tb_ptr++;
747 tmp64 = tci_read_r64(&tb_ptr);
748 tmp64 -= tci_read_r64(&tb_ptr);
749 tci_write_reg64(t1, t0, tmp64);
750 break;
751 case INDEX_op_brcond2_i32:
752 tmp64 = tci_read_r64(&tb_ptr);
753 v64 = tci_read_ri64(&tb_ptr);
754 condition = *tb_ptr++;
755 label = tci_read_label(&tb_ptr);
756 if (tci_compare64(tmp64, v64, condition)) {
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200757 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200758 tb_ptr = (uint8_t *)label;
759 continue;
760 }
761 break;
762 case INDEX_op_mulu2_i32:
763 t0 = *tb_ptr++;
764 t1 = *tb_ptr++;
765 t2 = tci_read_r32(&tb_ptr);
766 tmp64 = tci_read_r32(&tb_ptr);
767 tci_write_reg64(t1, t0, t2 * tmp64);
768 break;
769#endif /* TCG_TARGET_REG_BITS == 32 */
770#if TCG_TARGET_HAS_ext8s_i32
771 case INDEX_op_ext8s_i32:
772 t0 = *tb_ptr++;
773 t1 = tci_read_r8s(&tb_ptr);
774 tci_write_reg32(t0, t1);
775 break;
776#endif
777#if TCG_TARGET_HAS_ext16s_i32
778 case INDEX_op_ext16s_i32:
779 t0 = *tb_ptr++;
780 t1 = tci_read_r16s(&tb_ptr);
781 tci_write_reg32(t0, t1);
782 break;
783#endif
784#if TCG_TARGET_HAS_ext8u_i32
785 case INDEX_op_ext8u_i32:
786 t0 = *tb_ptr++;
787 t1 = tci_read_r8(&tb_ptr);
788 tci_write_reg32(t0, t1);
789 break;
790#endif
791#if TCG_TARGET_HAS_ext16u_i32
792 case INDEX_op_ext16u_i32:
793 t0 = *tb_ptr++;
794 t1 = tci_read_r16(&tb_ptr);
795 tci_write_reg32(t0, t1);
796 break;
797#endif
798#if TCG_TARGET_HAS_bswap16_i32
799 case INDEX_op_bswap16_i32:
800 t0 = *tb_ptr++;
801 t1 = tci_read_r16(&tb_ptr);
802 tci_write_reg32(t0, bswap16(t1));
803 break;
804#endif
805#if TCG_TARGET_HAS_bswap32_i32
806 case INDEX_op_bswap32_i32:
807 t0 = *tb_ptr++;
808 t1 = tci_read_r32(&tb_ptr);
809 tci_write_reg32(t0, bswap32(t1));
810 break;
811#endif
812#if TCG_TARGET_HAS_not_i32
813 case INDEX_op_not_i32:
814 t0 = *tb_ptr++;
815 t1 = tci_read_r32(&tb_ptr);
816 tci_write_reg32(t0, ~t1);
817 break;
818#endif
819#if TCG_TARGET_HAS_neg_i32
820 case INDEX_op_neg_i32:
821 t0 = *tb_ptr++;
822 t1 = tci_read_r32(&tb_ptr);
823 tci_write_reg32(t0, -t1);
824 break;
825#endif
826#if TCG_TARGET_REG_BITS == 64
827 case INDEX_op_mov_i64:
828 t0 = *tb_ptr++;
829 t1 = tci_read_r64(&tb_ptr);
830 tci_write_reg64(t0, t1);
831 break;
832 case INDEX_op_movi_i64:
833 t0 = *tb_ptr++;
834 t1 = tci_read_i64(&tb_ptr);
835 tci_write_reg64(t0, t1);
836 break;
837
838 /* Load/store operations (64 bit). */
839
840 case INDEX_op_ld8u_i64:
841 t0 = *tb_ptr++;
842 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000843 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200844 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
845 break;
846 case INDEX_op_ld8s_i64:
847 case INDEX_op_ld16u_i64:
848 case INDEX_op_ld16s_i64:
849 TODO();
850 break;
851 case INDEX_op_ld32u_i64:
852 t0 = *tb_ptr++;
853 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000854 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200855 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
856 break;
857 case INDEX_op_ld32s_i64:
858 t0 = *tb_ptr++;
859 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000860 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200861 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
862 break;
863 case INDEX_op_ld_i64:
864 t0 = *tb_ptr++;
865 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000866 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200867 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
868 break;
869 case INDEX_op_st8_i64:
870 t0 = tci_read_r8(&tb_ptr);
871 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000872 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200873 *(uint8_t *)(t1 + t2) = t0;
874 break;
875 case INDEX_op_st16_i64:
876 t0 = tci_read_r16(&tb_ptr);
877 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000878 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200879 *(uint16_t *)(t1 + t2) = t0;
880 break;
881 case INDEX_op_st32_i64:
882 t0 = tci_read_r32(&tb_ptr);
883 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000884 t2 = tci_read_s32(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200885 *(uint32_t *)(t1 + t2) = t0;
886 break;
887 case INDEX_op_st_i64:
888 t0 = tci_read_r64(&tb_ptr);
889 t1 = tci_read_r(&tb_ptr);
Richard Henderson03fc0542013-03-28 05:37:51 +0000890 t2 = tci_read_s32(&tb_ptr);
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200891 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200892 *(uint64_t *)(t1 + t2) = t0;
893 break;
894
895 /* Arithmetic operations (64 bit). */
896
897 case INDEX_op_add_i64:
898 t0 = *tb_ptr++;
899 t1 = tci_read_ri64(&tb_ptr);
900 t2 = tci_read_ri64(&tb_ptr);
901 tci_write_reg64(t0, t1 + t2);
902 break;
903 case INDEX_op_sub_i64:
904 t0 = *tb_ptr++;
905 t1 = tci_read_ri64(&tb_ptr);
906 t2 = tci_read_ri64(&tb_ptr);
907 tci_write_reg64(t0, t1 - t2);
908 break;
909 case INDEX_op_mul_i64:
910 t0 = *tb_ptr++;
911 t1 = tci_read_ri64(&tb_ptr);
912 t2 = tci_read_ri64(&tb_ptr);
913 tci_write_reg64(t0, t1 * t2);
914 break;
915#if TCG_TARGET_HAS_div_i64
916 case INDEX_op_div_i64:
917 case INDEX_op_divu_i64:
918 case INDEX_op_rem_i64:
919 case INDEX_op_remu_i64:
920 TODO();
921 break;
922#elif TCG_TARGET_HAS_div2_i64
923 case INDEX_op_div2_i64:
924 case INDEX_op_divu2_i64:
925 TODO();
926 break;
927#endif
928 case INDEX_op_and_i64:
929 t0 = *tb_ptr++;
930 t1 = tci_read_ri64(&tb_ptr);
931 t2 = tci_read_ri64(&tb_ptr);
932 tci_write_reg64(t0, t1 & t2);
933 break;
934 case INDEX_op_or_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_ri64(&tb_ptr);
937 t2 = tci_read_ri64(&tb_ptr);
938 tci_write_reg64(t0, t1 | t2);
939 break;
940 case INDEX_op_xor_i64:
941 t0 = *tb_ptr++;
942 t1 = tci_read_ri64(&tb_ptr);
943 t2 = tci_read_ri64(&tb_ptr);
944 tci_write_reg64(t0, t1 ^ t2);
945 break;
946
947 /* Shift/rotate operations (64 bit). */
948
949 case INDEX_op_shl_i64:
950 t0 = *tb_ptr++;
951 t1 = tci_read_ri64(&tb_ptr);
952 t2 = tci_read_ri64(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700953 tci_write_reg64(t0, t1 << (t2 & 63));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200954 break;
955 case INDEX_op_shr_i64:
956 t0 = *tb_ptr++;
957 t1 = tci_read_ri64(&tb_ptr);
958 t2 = tci_read_ri64(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700959 tci_write_reg64(t0, t1 >> (t2 & 63));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200960 break;
961 case INDEX_op_sar_i64:
962 t0 = *tb_ptr++;
963 t1 = tci_read_ri64(&tb_ptr);
964 t2 = tci_read_ri64(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700965 tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200966 break;
967#if TCG_TARGET_HAS_rot_i64
968 case INDEX_op_rotl_i64:
Stefan Weild285bf72013-09-12 21:13:11 +0200969 t0 = *tb_ptr++;
970 t1 = tci_read_ri64(&tb_ptr);
971 t2 = tci_read_ri64(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700972 tci_write_reg64(t0, rol64(t1, t2 & 63));
Stefan Weild285bf72013-09-12 21:13:11 +0200973 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +0200974 case INDEX_op_rotr_i64:
Stefan Weild285bf72013-09-12 21:13:11 +0200975 t0 = *tb_ptr++;
976 t1 = tci_read_ri64(&tb_ptr);
977 t2 = tci_read_ri64(&tb_ptr);
Richard Henderson1976ccc2014-03-18 08:44:05 -0700978 tci_write_reg64(t0, ror64(t1, t2 & 63));
Stefan Weil7657f4b2011-09-27 06:30:58 +0200979 break;
980#endif
Stefan Weile24dc9f2012-09-18 22:52:14 +0200981#if TCG_TARGET_HAS_deposit_i64
982 case INDEX_op_deposit_i64:
983 t0 = *tb_ptr++;
984 t1 = tci_read_r64(&tb_ptr);
985 t2 = tci_read_r64(&tb_ptr);
986 tmp16 = *tb_ptr++;
987 tmp8 = *tb_ptr++;
988 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
989 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
990 break;
991#endif
Stefan Weil7657f4b2011-09-27 06:30:58 +0200992 case INDEX_op_brcond_i64:
993 t0 = tci_read_r64(&tb_ptr);
994 t1 = tci_read_ri64(&tb_ptr);
995 condition = *tb_ptr++;
996 label = tci_read_label(&tb_ptr);
997 if (tci_compare64(t0, t1, condition)) {
Stefan Weil3ccdbec2016-04-05 22:24:51 +0200998 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +0200999 tb_ptr = (uint8_t *)label;
1000 continue;
1001 }
1002 break;
1003#if TCG_TARGET_HAS_ext8u_i64
1004 case INDEX_op_ext8u_i64:
1005 t0 = *tb_ptr++;
1006 t1 = tci_read_r8(&tb_ptr);
1007 tci_write_reg64(t0, t1);
1008 break;
1009#endif
1010#if TCG_TARGET_HAS_ext8s_i64
1011 case INDEX_op_ext8s_i64:
1012 t0 = *tb_ptr++;
1013 t1 = tci_read_r8s(&tb_ptr);
1014 tci_write_reg64(t0, t1);
1015 break;
1016#endif
1017#if TCG_TARGET_HAS_ext16s_i64
1018 case INDEX_op_ext16s_i64:
1019 t0 = *tb_ptr++;
1020 t1 = tci_read_r16s(&tb_ptr);
1021 tci_write_reg64(t0, t1);
1022 break;
1023#endif
1024#if TCG_TARGET_HAS_ext16u_i64
1025 case INDEX_op_ext16u_i64:
1026 t0 = *tb_ptr++;
1027 t1 = tci_read_r16(&tb_ptr);
1028 tci_write_reg64(t0, t1);
1029 break;
1030#endif
1031#if TCG_TARGET_HAS_ext32s_i64
1032 case INDEX_op_ext32s_i64:
Aurelien Jarno4f2331e2015-07-27 12:41:45 +02001033#endif
1034 case INDEX_op_ext_i32_i64:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001035 t0 = *tb_ptr++;
1036 t1 = tci_read_r32s(&tb_ptr);
1037 tci_write_reg64(t0, t1);
1038 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001039#if TCG_TARGET_HAS_ext32u_i64
1040 case INDEX_op_ext32u_i64:
Aurelien Jarno4f2331e2015-07-27 12:41:45 +02001041#endif
1042 case INDEX_op_extu_i32_i64:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001043 t0 = *tb_ptr++;
1044 t1 = tci_read_r32(&tb_ptr);
1045 tci_write_reg64(t0, t1);
1046 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001047#if TCG_TARGET_HAS_bswap16_i64
1048 case INDEX_op_bswap16_i64:
1049 TODO();
1050 t0 = *tb_ptr++;
1051 t1 = tci_read_r16(&tb_ptr);
1052 tci_write_reg64(t0, bswap16(t1));
1053 break;
1054#endif
1055#if TCG_TARGET_HAS_bswap32_i64
1056 case INDEX_op_bswap32_i64:
1057 t0 = *tb_ptr++;
1058 t1 = tci_read_r32(&tb_ptr);
1059 tci_write_reg64(t0, bswap32(t1));
1060 break;
1061#endif
1062#if TCG_TARGET_HAS_bswap64_i64
1063 case INDEX_op_bswap64_i64:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001064 t0 = *tb_ptr++;
1065 t1 = tci_read_r64(&tb_ptr);
1066 tci_write_reg64(t0, bswap64(t1));
1067 break;
1068#endif
1069#if TCG_TARGET_HAS_not_i64
1070 case INDEX_op_not_i64:
1071 t0 = *tb_ptr++;
1072 t1 = tci_read_r64(&tb_ptr);
1073 tci_write_reg64(t0, ~t1);
1074 break;
1075#endif
1076#if TCG_TARGET_HAS_neg_i64
1077 case INDEX_op_neg_i64:
1078 t0 = *tb_ptr++;
1079 t1 = tci_read_r64(&tb_ptr);
1080 tci_write_reg64(t0, -t1);
1081 break;
1082#endif
1083#endif /* TCG_TARGET_REG_BITS == 64 */
1084
1085 /* QEMU specific operations. */
1086
Stefan Weil7657f4b2011-09-27 06:30:58 +02001087 case INDEX_op_exit_tb:
Sergey Fedorov819af242016-04-21 15:58:23 +03001088 ret = *(uint64_t *)tb_ptr;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001089 goto exit;
1090 break;
1091 case INDEX_op_goto_tb:
Sergey Fedorov76442a92016-04-22 19:08:45 +03001092 /* Jump address is aligned */
1093 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1094 t0 = atomic_read((int32_t *)tb_ptr);
1095 tb_ptr += sizeof(int32_t);
Stefan Weil3ccdbec2016-04-05 22:24:51 +02001096 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +02001097 tb_ptr += (int32_t)t0;
1098 continue;
Richard Henderson76782fa2014-05-26 20:59:16 -07001099 case INDEX_op_qemu_ld_i32:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001100 t0 = *tb_ptr++;
1101 taddr = tci_read_ulong(&tb_ptr);
Richard Henderson59227d52015-05-12 11:51:44 -07001102 oi = tci_read_i(&tb_ptr);
Richard Henderson2b7ec662015-05-29 09:16:51 -07001103 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
Richard Henderson76782fa2014-05-26 20:59:16 -07001104 case MO_UB:
1105 tmp32 = qemu_ld_ub;
1106 break;
1107 case MO_SB:
1108 tmp32 = (int8_t)qemu_ld_ub;
1109 break;
1110 case MO_LEUW:
1111 tmp32 = qemu_ld_leuw;
1112 break;
1113 case MO_LESW:
1114 tmp32 = (int16_t)qemu_ld_leuw;
1115 break;
1116 case MO_LEUL:
1117 tmp32 = qemu_ld_leul;
1118 break;
1119 case MO_BEUW:
1120 tmp32 = qemu_ld_beuw;
1121 break;
1122 case MO_BESW:
1123 tmp32 = (int16_t)qemu_ld_beuw;
1124 break;
1125 case MO_BEUL:
1126 tmp32 = qemu_ld_beul;
1127 break;
1128 default:
1129 tcg_abort();
1130 }
1131 tci_write_reg(t0, tmp32);
Stefan Weil7657f4b2011-09-27 06:30:58 +02001132 break;
Richard Henderson76782fa2014-05-26 20:59:16 -07001133 case INDEX_op_qemu_ld_i64:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001134 t0 = *tb_ptr++;
Richard Henderson76782fa2014-05-26 20:59:16 -07001135 if (TCG_TARGET_REG_BITS == 32) {
1136 t1 = *tb_ptr++;
1137 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001138 taddr = tci_read_ulong(&tb_ptr);
Richard Henderson59227d52015-05-12 11:51:44 -07001139 oi = tci_read_i(&tb_ptr);
Richard Henderson2b7ec662015-05-29 09:16:51 -07001140 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
Richard Henderson76782fa2014-05-26 20:59:16 -07001141 case MO_UB:
1142 tmp64 = qemu_ld_ub;
1143 break;
1144 case MO_SB:
1145 tmp64 = (int8_t)qemu_ld_ub;
1146 break;
1147 case MO_LEUW:
1148 tmp64 = qemu_ld_leuw;
1149 break;
1150 case MO_LESW:
1151 tmp64 = (int16_t)qemu_ld_leuw;
1152 break;
1153 case MO_LEUL:
1154 tmp64 = qemu_ld_leul;
1155 break;
1156 case MO_LESL:
1157 tmp64 = (int32_t)qemu_ld_leul;
1158 break;
1159 case MO_LEQ:
1160 tmp64 = qemu_ld_leq;
1161 break;
1162 case MO_BEUW:
1163 tmp64 = qemu_ld_beuw;
1164 break;
1165 case MO_BESW:
1166 tmp64 = (int16_t)qemu_ld_beuw;
1167 break;
1168 case MO_BEUL:
1169 tmp64 = qemu_ld_beul;
1170 break;
1171 case MO_BESL:
1172 tmp64 = (int32_t)qemu_ld_beul;
1173 break;
1174 case MO_BEQ:
1175 tmp64 = qemu_ld_beq;
1176 break;
1177 default:
1178 tcg_abort();
1179 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001180 tci_write_reg(t0, tmp64);
Richard Henderson76782fa2014-05-26 20:59:16 -07001181 if (TCG_TARGET_REG_BITS == 32) {
1182 tci_write_reg(t1, tmp64 >> 32);
1183 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001184 break;
Richard Henderson76782fa2014-05-26 20:59:16 -07001185 case INDEX_op_qemu_st_i32:
1186 t0 = tci_read_r(&tb_ptr);
Stefan Weil7657f4b2011-09-27 06:30:58 +02001187 taddr = tci_read_ulong(&tb_ptr);
Richard Henderson59227d52015-05-12 11:51:44 -07001188 oi = tci_read_i(&tb_ptr);
Richard Henderson2b7ec662015-05-29 09:16:51 -07001189 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
Richard Henderson76782fa2014-05-26 20:59:16 -07001190 case MO_UB:
1191 qemu_st_b(t0);
1192 break;
1193 case MO_LEUW:
1194 qemu_st_lew(t0);
1195 break;
1196 case MO_LEUL:
1197 qemu_st_lel(t0);
1198 break;
1199 case MO_BEUW:
1200 qemu_st_bew(t0);
1201 break;
1202 case MO_BEUL:
1203 qemu_st_bel(t0);
1204 break;
1205 default:
1206 tcg_abort();
1207 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001208 break;
Richard Henderson76782fa2014-05-26 20:59:16 -07001209 case INDEX_op_qemu_st_i64:
Stefan Weil7657f4b2011-09-27 06:30:58 +02001210 tmp64 = tci_read_r64(&tb_ptr);
1211 taddr = tci_read_ulong(&tb_ptr);
Richard Henderson59227d52015-05-12 11:51:44 -07001212 oi = tci_read_i(&tb_ptr);
Richard Henderson2b7ec662015-05-29 09:16:51 -07001213 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
Richard Henderson76782fa2014-05-26 20:59:16 -07001214 case MO_UB:
1215 qemu_st_b(tmp64);
1216 break;
1217 case MO_LEUW:
1218 qemu_st_lew(tmp64);
1219 break;
1220 case MO_LEUL:
1221 qemu_st_lel(tmp64);
1222 break;
1223 case MO_LEQ:
1224 qemu_st_leq(tmp64);
1225 break;
1226 case MO_BEUW:
1227 qemu_st_bew(tmp64);
1228 break;
1229 case MO_BEUL:
1230 qemu_st_bel(tmp64);
1231 break;
1232 case MO_BEQ:
1233 qemu_st_beq(tmp64);
1234 break;
1235 default:
1236 tcg_abort();
1237 }
Stefan Weil7657f4b2011-09-27 06:30:58 +02001238 break;
Pranith Kumara1e69e22016-07-14 16:20:22 -04001239 case INDEX_op_mb:
1240 /* Ensure ordering for all kinds */
1241 smp_mb();
1242 break;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001243 default:
1244 TODO();
1245 break;
1246 }
Stefan Weil3ccdbec2016-04-05 22:24:51 +02001247 tci_assert(tb_ptr == old_code_ptr + op_size);
Stefan Weil7657f4b2011-09-27 06:30:58 +02001248 }
1249exit:
Sergey Fedorov819af242016-04-21 15:58:23 +03001250 return ret;
Stefan Weil7657f4b2011-09-27 06:30:58 +02001251}