blob: 1f9ff97b63be1a32dc380b8ae6452c4f06b23257 [file] [log] [blame]
bellard2c0262a2003-09-30 20:34:21 +00001/*
2 * ARM translation
ths5fafdf22007-09-16 21:08:06 +00003 *
bellard2c0262a2003-09-30 20:34:21 +00004 * Copyright (c) 2003 Fabrice Bellard
pbrook9ee6e8b2007-11-11 00:04:49 +00005 * Copyright (c) 2005-2007 CodeSourcery
balrog18c9b562007-04-30 02:02:17 +00006 * Copyright (c) 2007 OpenedHand, Ltd.
bellard2c0262a2003-09-30 20:34:21 +00007 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
Blue Swirl8167ee82009-07-16 20:47:01 +000019 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
bellard2c0262a2003-09-30 20:34:21 +000020 */
Peter Maydell74c21bd2015-12-07 16:23:44 +000021#include "qemu/osdep.h"
bellard2c0262a2003-09-30 20:34:21 +000022
23#include "cpu.h"
Peter Maydellccd38082014-04-15 19:18:37 +010024#include "internals.h"
Paolo Bonzini76cad712012-10-24 11:12:21 +020025#include "disas/disas.h"
Paolo Bonzini63c91552016-03-15 13:18:37 +010026#include "exec/exec-all.h"
bellard57fec1f2008-02-01 10:50:11 +000027#include "tcg-op.h"
Richard Henderson36a71932018-03-02 10:45:42 +000028#include "tcg-op-gvec.h"
Paolo Bonzini1de7afc2012-12-17 18:20:00 +010029#include "qemu/log.h"
Peter Maydell534df152013-09-10 19:09:32 +010030#include "qemu/bitops.h"
Markus Armbruster90c84c52019-04-17 21:18:02 +020031#include "qemu/qemu-print.h"
Paolo Bonzini1d854762014-03-28 19:09:49 +010032#include "arm_ldst.h"
Alex Bennéef1672e62019-05-13 14:43:57 +010033#include "hw/semihosting/semihost.h"
pbrook1497c962008-03-31 03:45:50 +000034
Richard Henderson2ef61752014-04-07 22:31:41 -070035#include "exec/helper-proto.h"
36#include "exec/helper-gen.h"
bellard2c0262a2003-09-30 20:34:21 +000037
Lluís Vilanovaa7e30d82014-05-30 14:12:25 +020038#include "trace-tcg.h"
Paolo Bonzini508127e2016-01-07 16:55:28 +030039#include "exec/log.h"
Lluís Vilanovaa7e30d82014-05-30 14:12:25 +020040
41
Peter Maydell2b516682014-10-28 19:24:00 +000042#define ENABLE_ARCH_4T arm_dc_feature(s, ARM_FEATURE_V4T)
43#define ENABLE_ARCH_5 arm_dc_feature(s, ARM_FEATURE_V5)
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040044/* currently all emulated v5 cores are also v5TE, so don't bother */
Peter Maydell2b516682014-10-28 19:24:00 +000045#define ENABLE_ARCH_5TE arm_dc_feature(s, ARM_FEATURE_V5)
Richard Henderson09cbd502018-10-24 07:50:17 +010046#define ENABLE_ARCH_5J dc_isar_feature(jazelle, s)
Peter Maydell2b516682014-10-28 19:24:00 +000047#define ENABLE_ARCH_6 arm_dc_feature(s, ARM_FEATURE_V6)
48#define ENABLE_ARCH_6K arm_dc_feature(s, ARM_FEATURE_V6K)
49#define ENABLE_ARCH_6T2 arm_dc_feature(s, ARM_FEATURE_THUMB2)
50#define ENABLE_ARCH_7 arm_dc_feature(s, ARM_FEATURE_V7)
51#define ENABLE_ARCH_8 arm_dc_feature(s, ARM_FEATURE_V8)
bellardb5ff1b32005-11-26 10:38:39 +000052
pbrook86753402008-10-22 20:35:54 +000053#define ARCH(x) do { if (!ENABLE_ARCH_##x) goto illegal_op; } while(0)
bellardb5ff1b32005-11-26 10:38:39 +000054
Alexander Graff570c612013-09-03 20:12:03 +010055#include "translate.h"
Peter Maydelle12ce782011-01-14 20:39:19 +010056
bellardb5ff1b32005-11-26 10:38:39 +000057#if defined(CONFIG_USER_ONLY)
58#define IS_USER(s) 1
59#else
60#define IS_USER(s) (s->user)
61#endif
62
pbrookad694712008-03-31 03:48:30 +000063/* We reuse the same 64-bit temporaries for efficiency. */
pbrooka7812ae2008-11-17 14:43:54 +000064static TCGv_i64 cpu_V0, cpu_V1, cpu_M0;
Filip Navara155c3ea2009-10-15 12:00:41 +020065static TCGv_i32 cpu_R[16];
Richard Henderson78bcaa32015-09-14 14:39:47 +010066TCGv_i32 cpu_CF, cpu_NF, cpu_VF, cpu_ZF;
67TCGv_i64 cpu_exclusive_addr;
68TCGv_i64 cpu_exclusive_val;
pbrookad694712008-03-31 03:48:30 +000069
pbrookb26eefb2008-03-31 03:44:26 +000070/* FIXME: These should be removed. */
Peter Maydell39d54922013-05-23 12:59:55 +010071static TCGv_i32 cpu_F0s, cpu_F1s;
pbrooka7812ae2008-11-17 14:43:54 +000072static TCGv_i64 cpu_F0d, cpu_F1d;
pbrookb26eefb2008-03-31 03:44:26 +000073
Paolo Bonzini022c62c2012-12-17 18:19:49 +010074#include "exec/gen-icount.h"
pbrook2e70f6e2008-06-29 01:03:05 +000075
Richard Henderson308e5632018-10-24 07:50:18 +010076static const char * const regnames[] =
Filip Navara155c3ea2009-10-15 12:00:41 +020077 { "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
78 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "pc" };
79
Richard Henderson61adacc2018-03-02 10:45:42 +000080/* Function prototypes for gen_ functions calling Neon helpers. */
81typedef void NeonGenThreeOpEnvFn(TCGv_i32, TCGv_env, TCGv_i32,
82 TCGv_i32, TCGv_i32);
83
pbrookb26eefb2008-03-31 03:44:26 +000084/* initialize TCG globals. */
85void arm_translate_init(void)
86{
Filip Navara155c3ea2009-10-15 12:00:41 +020087 int i;
88
Filip Navara155c3ea2009-10-15 12:00:41 +020089 for (i = 0; i < 16; i++) {
Richard Hendersone1ccc052013-09-18 12:53:09 -070090 cpu_R[i] = tcg_global_mem_new_i32(cpu_env,
Andreas Färber0ecb72a2012-03-14 01:38:21 +010091 offsetof(CPUARMState, regs[i]),
Filip Navara155c3ea2009-10-15 12:00:41 +020092 regnames[i]);
93 }
Richard Hendersone1ccc052013-09-18 12:53:09 -070094 cpu_CF = tcg_global_mem_new_i32(cpu_env, offsetof(CPUARMState, CF), "CF");
95 cpu_NF = tcg_global_mem_new_i32(cpu_env, offsetof(CPUARMState, NF), "NF");
96 cpu_VF = tcg_global_mem_new_i32(cpu_env, offsetof(CPUARMState, VF), "VF");
97 cpu_ZF = tcg_global_mem_new_i32(cpu_env, offsetof(CPUARMState, ZF), "ZF");
Aurelien Jarno66c374d2012-10-05 15:04:44 +010098
Richard Hendersone1ccc052013-09-18 12:53:09 -070099 cpu_exclusive_addr = tcg_global_mem_new_i64(cpu_env,
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100100 offsetof(CPUARMState, exclusive_addr), "exclusive_addr");
Richard Hendersone1ccc052013-09-18 12:53:09 -0700101 cpu_exclusive_val = tcg_global_mem_new_i64(cpu_env,
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100102 offsetof(CPUARMState, exclusive_val), "exclusive_val");
Filip Navara155c3ea2009-10-15 12:00:41 +0200103
Alexander Graf14ade102013-09-03 20:12:10 +0100104 a64_translate_init();
pbrookb26eefb2008-03-31 03:44:26 +0000105}
106
Peter Maydell9bb65582017-02-07 18:30:00 +0000107/* Flags for the disas_set_da_iss info argument:
108 * lower bits hold the Rt register number, higher bits are flags.
109 */
110typedef enum ISSInfo {
111 ISSNone = 0,
112 ISSRegMask = 0x1f,
113 ISSInvalid = (1 << 5),
114 ISSIsAcqRel = (1 << 6),
115 ISSIsWrite = (1 << 7),
116 ISSIs16Bit = (1 << 8),
117} ISSInfo;
118
119/* Save the syndrome information for a Data Abort */
120static void disas_set_da_iss(DisasContext *s, TCGMemOp memop, ISSInfo issinfo)
121{
122 uint32_t syn;
123 int sas = memop & MO_SIZE;
124 bool sse = memop & MO_SIGN;
125 bool is_acqrel = issinfo & ISSIsAcqRel;
126 bool is_write = issinfo & ISSIsWrite;
127 bool is_16bit = issinfo & ISSIs16Bit;
128 int srt = issinfo & ISSRegMask;
129
130 if (issinfo & ISSInvalid) {
131 /* Some callsites want to conditionally provide ISS info,
132 * eg "only if this was not a writeback"
133 */
134 return;
135 }
136
137 if (srt == 15) {
138 /* For AArch32, insns where the src/dest is R15 never generate
139 * ISS information. Catching that here saves checking at all
140 * the call sites.
141 */
142 return;
143 }
144
145 syn = syn_data_abort_with_iss(0, sas, sse, srt, 0, is_acqrel,
146 0, 0, 0, is_write, 0, is_16bit);
147 disas_set_insn_syndrome(s, syn);
148}
149
Peter Maydell8bd5c822017-06-02 11:51:47 +0100150static inline int get_a32_user_mem_index(DisasContext *s)
Peter Maydell579d21c2015-02-05 13:37:23 +0000151{
Peter Maydell8bd5c822017-06-02 11:51:47 +0100152 /* Return the core mmu_idx to use for A32/T32 "unprivileged load/store"
Peter Maydell579d21c2015-02-05 13:37:23 +0000153 * insns:
154 * if PL2, UNPREDICTABLE (we choose to implement as if PL0)
155 * otherwise, access as if at PL0.
156 */
157 switch (s->mmu_idx) {
158 case ARMMMUIdx_S1E2: /* this one is UNPREDICTABLE */
159 case ARMMMUIdx_S12NSE0:
160 case ARMMMUIdx_S12NSE1:
Peter Maydell8bd5c822017-06-02 11:51:47 +0100161 return arm_to_core_mmu_idx(ARMMMUIdx_S12NSE0);
Peter Maydell579d21c2015-02-05 13:37:23 +0000162 case ARMMMUIdx_S1E3:
163 case ARMMMUIdx_S1SE0:
164 case ARMMMUIdx_S1SE1:
Peter Maydell8bd5c822017-06-02 11:51:47 +0100165 return arm_to_core_mmu_idx(ARMMMUIdx_S1SE0);
Peter Maydelle7b921c2017-06-02 11:51:47 +0100166 case ARMMMUIdx_MUser:
167 case ARMMMUIdx_MPriv:
168 return arm_to_core_mmu_idx(ARMMMUIdx_MUser);
Peter Maydell62593712017-12-13 17:59:23 +0000169 case ARMMMUIdx_MUserNegPri:
170 case ARMMMUIdx_MPrivNegPri:
171 return arm_to_core_mmu_idx(ARMMMUIdx_MUserNegPri);
Peter Maydellb9f587d2017-10-09 14:48:31 +0100172 case ARMMMUIdx_MSUser:
173 case ARMMMUIdx_MSPriv:
Peter Maydellb9f587d2017-10-09 14:48:31 +0100174 return arm_to_core_mmu_idx(ARMMMUIdx_MSUser);
Peter Maydell62593712017-12-13 17:59:23 +0000175 case ARMMMUIdx_MSUserNegPri:
176 case ARMMMUIdx_MSPrivNegPri:
177 return arm_to_core_mmu_idx(ARMMMUIdx_MSUserNegPri);
Peter Maydell579d21c2015-02-05 13:37:23 +0000178 case ARMMMUIdx_S2NS:
179 default:
180 g_assert_not_reached();
181 }
182}
183
Peter Maydell39d54922013-05-23 12:59:55 +0100184static inline TCGv_i32 load_cpu_offset(int offset)
pbrookd9ba4832008-03-31 03:46:50 +0000185{
Peter Maydell39d54922013-05-23 12:59:55 +0100186 TCGv_i32 tmp = tcg_temp_new_i32();
pbrookd9ba4832008-03-31 03:46:50 +0000187 tcg_gen_ld_i32(tmp, cpu_env, offset);
188 return tmp;
189}
190
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100191#define load_cpu_field(name) load_cpu_offset(offsetof(CPUARMState, name))
pbrookd9ba4832008-03-31 03:46:50 +0000192
Peter Maydell39d54922013-05-23 12:59:55 +0100193static inline void store_cpu_offset(TCGv_i32 var, int offset)
pbrookd9ba4832008-03-31 03:46:50 +0000194{
195 tcg_gen_st_i32(var, cpu_env, offset);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000196 tcg_temp_free_i32(var);
pbrookd9ba4832008-03-31 03:46:50 +0000197}
198
199#define store_cpu_field(var, name) \
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100200 store_cpu_offset(var, offsetof(CPUARMState, name))
pbrookd9ba4832008-03-31 03:46:50 +0000201
pbrookb26eefb2008-03-31 03:44:26 +0000202/* Set a variable to the value of a CPU register. */
Peter Maydell39d54922013-05-23 12:59:55 +0100203static void load_reg_var(DisasContext *s, TCGv_i32 var, int reg)
pbrookb26eefb2008-03-31 03:44:26 +0000204{
205 if (reg == 15) {
206 uint32_t addr;
Peter Maydellb90372a2012-08-06 17:42:18 +0100207 /* normally, since we updated PC, we need only to add one insn */
pbrookb26eefb2008-03-31 03:44:26 +0000208 if (s->thumb)
209 addr = (long)s->pc + 2;
210 else
211 addr = (long)s->pc + 4;
212 tcg_gen_movi_i32(var, addr);
213 } else {
Filip Navara155c3ea2009-10-15 12:00:41 +0200214 tcg_gen_mov_i32(var, cpu_R[reg]);
pbrookb26eefb2008-03-31 03:44:26 +0000215 }
216}
217
218/* Create a new temporary and set it to the value of a CPU register. */
Peter Maydell39d54922013-05-23 12:59:55 +0100219static inline TCGv_i32 load_reg(DisasContext *s, int reg)
pbrookb26eefb2008-03-31 03:44:26 +0000220{
Peter Maydell39d54922013-05-23 12:59:55 +0100221 TCGv_i32 tmp = tcg_temp_new_i32();
pbrookb26eefb2008-03-31 03:44:26 +0000222 load_reg_var(s, tmp, reg);
223 return tmp;
224}
225
226/* Set a CPU register. The source must be a temporary and will be
227 marked as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +0100228static void store_reg(DisasContext *s, int reg, TCGv_i32 var)
pbrookb26eefb2008-03-31 03:44:26 +0000229{
230 if (reg == 15) {
Peter Maydell9b6a3ea2016-10-04 13:28:10 +0100231 /* In Thumb mode, we must ignore bit 0.
232 * In ARM mode, for ARMv4 and ARMv5, it is UNPREDICTABLE if bits [1:0]
233 * are not 0b00, but for ARMv6 and above, we must ignore bits [1:0].
234 * We choose to ignore [1:0] in ARM mode for all architecture versions.
235 */
236 tcg_gen_andi_i32(var, var, s->thumb ? ~1 : ~3);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300237 s->base.is_jmp = DISAS_JUMP;
pbrookb26eefb2008-03-31 03:44:26 +0000238 }
Filip Navara155c3ea2009-10-15 12:00:41 +0200239 tcg_gen_mov_i32(cpu_R[reg], var);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000240 tcg_temp_free_i32(var);
pbrookb26eefb2008-03-31 03:44:26 +0000241}
242
Peter Maydell55203182018-10-08 14:55:04 +0100243/*
244 * Variant of store_reg which applies v8M stack-limit checks before updating
245 * SP. If the check fails this will result in an exception being taken.
246 * We disable the stack checks for CONFIG_USER_ONLY because we have
247 * no idea what the stack limits should be in that case.
248 * If stack checking is not being done this just acts like store_reg().
249 */
250static void store_sp_checked(DisasContext *s, TCGv_i32 var)
251{
252#ifndef CONFIG_USER_ONLY
253 if (s->v8m_stackcheck) {
254 gen_helper_v8m_stackcheck(cpu_env, var);
255 }
256#endif
257 store_reg(s, 13, var);
258}
259
pbrookb26eefb2008-03-31 03:44:26 +0000260/* Value extensions. */
pbrook86831432008-05-11 12:22:01 +0000261#define gen_uxtb(var) tcg_gen_ext8u_i32(var, var)
262#define gen_uxth(var) tcg_gen_ext16u_i32(var, var)
pbrookb26eefb2008-03-31 03:44:26 +0000263#define gen_sxtb(var) tcg_gen_ext8s_i32(var, var)
264#define gen_sxth(var) tcg_gen_ext16s_i32(var, var)
265
pbrook1497c962008-03-31 03:45:50 +0000266#define gen_sxtb16(var) gen_helper_sxtb16(var, var)
267#define gen_uxtb16(var) gen_helper_uxtb16(var, var)
pbrook8f012452008-03-31 03:46:03 +0000268
pbrookb26eefb2008-03-31 03:44:26 +0000269
Peter Maydell39d54922013-05-23 12:59:55 +0100270static inline void gen_set_cpsr(TCGv_i32 var, uint32_t mask)
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300271{
Peter Maydell39d54922013-05-23 12:59:55 +0100272 TCGv_i32 tmp_mask = tcg_const_i32(mask);
Blue Swirl1ce94f82012-09-04 20:08:34 +0000273 gen_helper_cpsr_write(cpu_env, var, tmp_mask);
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300274 tcg_temp_free_i32(tmp_mask);
275}
pbrookd9ba4832008-03-31 03:46:50 +0000276/* Set NZCV flags from the high 4 bits of var. */
277#define gen_set_nzcv(var) gen_set_cpsr(var, CPSR_NZCV)
278
Peter Maydelld4a2dc62014-04-15 19:18:38 +0100279static void gen_exception_internal(int excp)
pbrookd9ba4832008-03-31 03:46:50 +0000280{
Peter Maydelld4a2dc62014-04-15 19:18:38 +0100281 TCGv_i32 tcg_excp = tcg_const_i32(excp);
282
283 assert(excp_is_internal(excp));
284 gen_helper_exception_internal(cpu_env, tcg_excp);
285 tcg_temp_free_i32(tcg_excp);
286}
287
Greg Bellows73710362015-05-29 11:28:50 +0100288static void gen_exception(int excp, uint32_t syndrome, uint32_t target_el)
Peter Maydelld4a2dc62014-04-15 19:18:38 +0100289{
290 TCGv_i32 tcg_excp = tcg_const_i32(excp);
291 TCGv_i32 tcg_syn = tcg_const_i32(syndrome);
Greg Bellows73710362015-05-29 11:28:50 +0100292 TCGv_i32 tcg_el = tcg_const_i32(target_el);
Peter Maydelld4a2dc62014-04-15 19:18:38 +0100293
Greg Bellows73710362015-05-29 11:28:50 +0100294 gen_helper_exception_with_syndrome(cpu_env, tcg_excp,
295 tcg_syn, tcg_el);
296
297 tcg_temp_free_i32(tcg_el);
Peter Maydelld4a2dc62014-04-15 19:18:38 +0100298 tcg_temp_free_i32(tcg_syn);
299 tcg_temp_free_i32(tcg_excp);
pbrookd9ba4832008-03-31 03:46:50 +0000300}
301
Peter Maydell50225ad2014-08-19 18:56:27 +0100302static void gen_step_complete_exception(DisasContext *s)
303{
304 /* We just completed step of an insn. Move from Active-not-pending
305 * to Active-pending, and then also take the swstep exception.
306 * This corresponds to making the (IMPDEF) choice to prioritize
307 * swstep exceptions over asynchronous exceptions taken to an exception
308 * level where debug is disabled. This choice has the advantage that
309 * we do not need to maintain internal state corresponding to the
310 * ISV/EX syndrome bits between completion of the step and generation
311 * of the exception, and our syndrome information is always correct.
312 */
313 gen_ss_advance(s);
Greg Bellows73710362015-05-29 11:28:50 +0100314 gen_exception(EXCP_UDEF, syn_swstep(s->ss_same_el, 1, s->is_ldex),
315 default_exception_el(s));
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300316 s->base.is_jmp = DISAS_NORETURN;
Peter Maydell50225ad2014-08-19 18:56:27 +0100317}
318
Peter Maydell54254152017-04-20 17:32:30 +0100319static void gen_singlestep_exception(DisasContext *s)
320{
321 /* Generate the right kind of exception for singlestep, which is
322 * either the architectural singlestep or EXCP_DEBUG for QEMU's
323 * gdb singlestepping.
324 */
325 if (s->ss_active) {
326 gen_step_complete_exception(s);
327 } else {
328 gen_exception_internal(EXCP_DEBUG);
329 }
330}
331
Peter Maydellb6366492017-04-20 17:32:30 +0100332static inline bool is_singlestepping(DisasContext *s)
333{
334 /* Return true if we are singlestepping either because of
335 * architectural singlestep or QEMU gdbstub singlestep. This does
336 * not include the command line '-singlestep' mode which is rather
337 * misnamed as it only means "one instruction per TB" and doesn't
338 * affect the code we generate.
339 */
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300340 return s->base.singlestep_enabled || s->ss_active;
Peter Maydellb6366492017-04-20 17:32:30 +0100341}
342
Peter Maydell39d54922013-05-23 12:59:55 +0100343static void gen_smul_dual(TCGv_i32 a, TCGv_i32 b)
pbrook36706692008-03-31 03:46:19 +0000344{
Peter Maydell39d54922013-05-23 12:59:55 +0100345 TCGv_i32 tmp1 = tcg_temp_new_i32();
346 TCGv_i32 tmp2 = tcg_temp_new_i32();
balrog22478e72008-07-19 10:12:22 +0000347 tcg_gen_ext16s_i32(tmp1, a);
348 tcg_gen_ext16s_i32(tmp2, b);
pbrook36706692008-03-31 03:46:19 +0000349 tcg_gen_mul_i32(tmp1, tmp1, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000350 tcg_temp_free_i32(tmp2);
pbrook36706692008-03-31 03:46:19 +0000351 tcg_gen_sari_i32(a, a, 16);
352 tcg_gen_sari_i32(b, b, 16);
353 tcg_gen_mul_i32(b, b, a);
354 tcg_gen_mov_i32(a, tmp1);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000355 tcg_temp_free_i32(tmp1);
pbrook36706692008-03-31 03:46:19 +0000356}
357
358/* Byteswap each halfword. */
Peter Maydell39d54922013-05-23 12:59:55 +0100359static void gen_rev16(TCGv_i32 var)
pbrook36706692008-03-31 03:46:19 +0000360{
Peter Maydell39d54922013-05-23 12:59:55 +0100361 TCGv_i32 tmp = tcg_temp_new_i32();
Aurelien Jarno68cedf72017-05-17 01:01:56 +0200362 TCGv_i32 mask = tcg_const_i32(0x00ff00ff);
pbrook36706692008-03-31 03:46:19 +0000363 tcg_gen_shri_i32(tmp, var, 8);
Aurelien Jarno68cedf72017-05-17 01:01:56 +0200364 tcg_gen_and_i32(tmp, tmp, mask);
365 tcg_gen_and_i32(var, var, mask);
pbrook36706692008-03-31 03:46:19 +0000366 tcg_gen_shli_i32(var, var, 8);
pbrook36706692008-03-31 03:46:19 +0000367 tcg_gen_or_i32(var, var, tmp);
Aurelien Jarno68cedf72017-05-17 01:01:56 +0200368 tcg_temp_free_i32(mask);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000369 tcg_temp_free_i32(tmp);
pbrook36706692008-03-31 03:46:19 +0000370}
371
372/* Byteswap low halfword and sign extend. */
Peter Maydell39d54922013-05-23 12:59:55 +0100373static void gen_revsh(TCGv_i32 var)
pbrook36706692008-03-31 03:46:19 +0000374{
Aurelien Jarno1a855022010-12-27 19:54:49 +0100375 tcg_gen_ext16u_i32(var, var);
376 tcg_gen_bswap16_i32(var, var);
377 tcg_gen_ext16s_i32(var, var);
pbrook36706692008-03-31 03:46:19 +0000378}
379
Aurelien Jarno838fa722011-01-06 19:53:56 +0100380/* Return (b << 32) + a. Mark inputs as dead */
Peter Maydell39d54922013-05-23 12:59:55 +0100381static TCGv_i64 gen_addq_msw(TCGv_i64 a, TCGv_i32 b)
pbrook36706692008-03-31 03:46:19 +0000382{
Aurelien Jarno838fa722011-01-06 19:53:56 +0100383 TCGv_i64 tmp64 = tcg_temp_new_i64();
384
385 tcg_gen_extu_i32_i64(tmp64, b);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000386 tcg_temp_free_i32(b);
Aurelien Jarno838fa722011-01-06 19:53:56 +0100387 tcg_gen_shli_i64(tmp64, tmp64, 32);
388 tcg_gen_add_i64(a, tmp64, a);
389
390 tcg_temp_free_i64(tmp64);
391 return a;
392}
393
394/* Return (b << 32) - a. Mark inputs as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +0100395static TCGv_i64 gen_subq_msw(TCGv_i64 a, TCGv_i32 b)
Aurelien Jarno838fa722011-01-06 19:53:56 +0100396{
397 TCGv_i64 tmp64 = tcg_temp_new_i64();
398
399 tcg_gen_extu_i32_i64(tmp64, b);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000400 tcg_temp_free_i32(b);
Aurelien Jarno838fa722011-01-06 19:53:56 +0100401 tcg_gen_shli_i64(tmp64, tmp64, 32);
402 tcg_gen_sub_i64(a, tmp64, a);
403
404 tcg_temp_free_i64(tmp64);
405 return a;
pbrook36706692008-03-31 03:46:19 +0000406}
407
pbrook5e3f8782008-03-31 03:47:34 +0000408/* 32x32->64 multiply. Marks inputs as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +0100409static TCGv_i64 gen_mulu_i64_i32(TCGv_i32 a, TCGv_i32 b)
pbrook5e3f8782008-03-31 03:47:34 +0000410{
Peter Maydell39d54922013-05-23 12:59:55 +0100411 TCGv_i32 lo = tcg_temp_new_i32();
412 TCGv_i32 hi = tcg_temp_new_i32();
Richard Henderson831d7fe2013-02-19 23:52:05 -0800413 TCGv_i64 ret;
pbrook5e3f8782008-03-31 03:47:34 +0000414
Richard Henderson831d7fe2013-02-19 23:52:05 -0800415 tcg_gen_mulu2_i32(lo, hi, a, b);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000416 tcg_temp_free_i32(a);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000417 tcg_temp_free_i32(b);
Richard Henderson831d7fe2013-02-19 23:52:05 -0800418
419 ret = tcg_temp_new_i64();
420 tcg_gen_concat_i32_i64(ret, lo, hi);
Peter Maydell39d54922013-05-23 12:59:55 +0100421 tcg_temp_free_i32(lo);
422 tcg_temp_free_i32(hi);
Richard Henderson831d7fe2013-02-19 23:52:05 -0800423
424 return ret;
pbrook5e3f8782008-03-31 03:47:34 +0000425}
426
Peter Maydell39d54922013-05-23 12:59:55 +0100427static TCGv_i64 gen_muls_i64_i32(TCGv_i32 a, TCGv_i32 b)
pbrook5e3f8782008-03-31 03:47:34 +0000428{
Peter Maydell39d54922013-05-23 12:59:55 +0100429 TCGv_i32 lo = tcg_temp_new_i32();
430 TCGv_i32 hi = tcg_temp_new_i32();
Richard Henderson831d7fe2013-02-19 23:52:05 -0800431 TCGv_i64 ret;
pbrook5e3f8782008-03-31 03:47:34 +0000432
Richard Henderson831d7fe2013-02-19 23:52:05 -0800433 tcg_gen_muls2_i32(lo, hi, a, b);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000434 tcg_temp_free_i32(a);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000435 tcg_temp_free_i32(b);
Richard Henderson831d7fe2013-02-19 23:52:05 -0800436
437 ret = tcg_temp_new_i64();
438 tcg_gen_concat_i32_i64(ret, lo, hi);
Peter Maydell39d54922013-05-23 12:59:55 +0100439 tcg_temp_free_i32(lo);
440 tcg_temp_free_i32(hi);
Richard Henderson831d7fe2013-02-19 23:52:05 -0800441
442 return ret;
pbrook5e3f8782008-03-31 03:47:34 +0000443}
444
pbrook8f012452008-03-31 03:46:03 +0000445/* Swap low and high halfwords. */
Peter Maydell39d54922013-05-23 12:59:55 +0100446static void gen_swap_half(TCGv_i32 var)
pbrook8f012452008-03-31 03:46:03 +0000447{
Peter Maydell39d54922013-05-23 12:59:55 +0100448 TCGv_i32 tmp = tcg_temp_new_i32();
pbrook8f012452008-03-31 03:46:03 +0000449 tcg_gen_shri_i32(tmp, var, 16);
450 tcg_gen_shli_i32(var, var, 16);
451 tcg_gen_or_i32(var, var, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000452 tcg_temp_free_i32(tmp);
pbrook8f012452008-03-31 03:46:03 +0000453}
454
pbrookb26eefb2008-03-31 03:44:26 +0000455/* Dual 16-bit add. Result placed in t0 and t1 is marked as dead.
456 tmp = (t0 ^ t1) & 0x8000;
457 t0 &= ~0x8000;
458 t1 &= ~0x8000;
459 t0 = (t0 + t1) ^ tmp;
460 */
461
Peter Maydell39d54922013-05-23 12:59:55 +0100462static void gen_add16(TCGv_i32 t0, TCGv_i32 t1)
pbrookb26eefb2008-03-31 03:44:26 +0000463{
Peter Maydell39d54922013-05-23 12:59:55 +0100464 TCGv_i32 tmp = tcg_temp_new_i32();
pbrookb26eefb2008-03-31 03:44:26 +0000465 tcg_gen_xor_i32(tmp, t0, t1);
466 tcg_gen_andi_i32(tmp, tmp, 0x8000);
467 tcg_gen_andi_i32(t0, t0, ~0x8000);
468 tcg_gen_andi_i32(t1, t1, ~0x8000);
469 tcg_gen_add_i32(t0, t0, t1);
470 tcg_gen_xor_i32(t0, t0, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000471 tcg_temp_free_i32(tmp);
472 tcg_temp_free_i32(t1);
pbrookb26eefb2008-03-31 03:44:26 +0000473}
474
475/* Set CF to the top bit of var. */
Peter Maydell39d54922013-05-23 12:59:55 +0100476static void gen_set_CF_bit31(TCGv_i32 var)
pbrookb26eefb2008-03-31 03:44:26 +0000477{
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100478 tcg_gen_shri_i32(cpu_CF, var, 31);
pbrookb26eefb2008-03-31 03:44:26 +0000479}
480
481/* Set N and Z flags from var. */
Peter Maydell39d54922013-05-23 12:59:55 +0100482static inline void gen_logic_CC(TCGv_i32 var)
pbrookb26eefb2008-03-31 03:44:26 +0000483{
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100484 tcg_gen_mov_i32(cpu_NF, var);
485 tcg_gen_mov_i32(cpu_ZF, var);
pbrookb26eefb2008-03-31 03:44:26 +0000486}
487
488/* T0 += T1 + CF. */
Peter Maydell39d54922013-05-23 12:59:55 +0100489static void gen_adc(TCGv_i32 t0, TCGv_i32 t1)
pbrookb26eefb2008-03-31 03:44:26 +0000490{
Filip Navara396e4672009-10-15 12:55:34 +0200491 tcg_gen_add_i32(t0, t0, t1);
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100492 tcg_gen_add_i32(t0, t0, cpu_CF);
pbrookb26eefb2008-03-31 03:44:26 +0000493}
494
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +0300495/* dest = T0 + T1 + CF. */
Peter Maydell39d54922013-05-23 12:59:55 +0100496static void gen_add_carry(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +0300497{
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +0300498 tcg_gen_add_i32(dest, t0, t1);
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100499 tcg_gen_add_i32(dest, dest, cpu_CF);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +0300500}
501
pbrook36706692008-03-31 03:46:19 +0000502/* dest = T0 - T1 + CF - 1. */
Peter Maydell39d54922013-05-23 12:59:55 +0100503static void gen_sub_carry(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
pbrook36706692008-03-31 03:46:19 +0000504{
pbrook36706692008-03-31 03:46:19 +0000505 tcg_gen_sub_i32(dest, t0, t1);
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100506 tcg_gen_add_i32(dest, dest, cpu_CF);
pbrook36706692008-03-31 03:46:19 +0000507 tcg_gen_subi_i32(dest, dest, 1);
pbrook36706692008-03-31 03:46:19 +0000508}
509
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100510/* dest = T0 + T1. Compute C, N, V and Z flags */
Peter Maydell39d54922013-05-23 12:59:55 +0100511static void gen_add_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100512{
Peter Maydell39d54922013-05-23 12:59:55 +0100513 TCGv_i32 tmp = tcg_temp_new_i32();
Richard Hendersone3482cb2013-02-19 23:52:07 -0800514 tcg_gen_movi_i32(tmp, 0);
515 tcg_gen_add2_i32(cpu_NF, cpu_CF, t0, tmp, t1, tmp);
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100516 tcg_gen_mov_i32(cpu_ZF, cpu_NF);
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100517 tcg_gen_xor_i32(cpu_VF, cpu_NF, t0);
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100518 tcg_gen_xor_i32(tmp, t0, t1);
519 tcg_gen_andc_i32(cpu_VF, cpu_VF, tmp);
520 tcg_temp_free_i32(tmp);
521 tcg_gen_mov_i32(dest, cpu_NF);
522}
523
Richard Henderson49b4c312013-02-19 23:52:08 -0800524/* dest = T0 + T1 + CF. Compute C, N, V and Z flags */
Peter Maydell39d54922013-05-23 12:59:55 +0100525static void gen_adc_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Richard Henderson49b4c312013-02-19 23:52:08 -0800526{
Peter Maydell39d54922013-05-23 12:59:55 +0100527 TCGv_i32 tmp = tcg_temp_new_i32();
Richard Henderson49b4c312013-02-19 23:52:08 -0800528 if (TCG_TARGET_HAS_add2_i32) {
529 tcg_gen_movi_i32(tmp, 0);
530 tcg_gen_add2_i32(cpu_NF, cpu_CF, t0, tmp, cpu_CF, tmp);
Peter Crosthwaite8c3ac602013-02-25 11:41:38 -0800531 tcg_gen_add2_i32(cpu_NF, cpu_CF, cpu_NF, cpu_CF, t1, tmp);
Richard Henderson49b4c312013-02-19 23:52:08 -0800532 } else {
533 TCGv_i64 q0 = tcg_temp_new_i64();
534 TCGv_i64 q1 = tcg_temp_new_i64();
535 tcg_gen_extu_i32_i64(q0, t0);
536 tcg_gen_extu_i32_i64(q1, t1);
537 tcg_gen_add_i64(q0, q0, q1);
538 tcg_gen_extu_i32_i64(q1, cpu_CF);
539 tcg_gen_add_i64(q0, q0, q1);
540 tcg_gen_extr_i64_i32(cpu_NF, cpu_CF, q0);
541 tcg_temp_free_i64(q0);
542 tcg_temp_free_i64(q1);
543 }
544 tcg_gen_mov_i32(cpu_ZF, cpu_NF);
545 tcg_gen_xor_i32(cpu_VF, cpu_NF, t0);
546 tcg_gen_xor_i32(tmp, t0, t1);
547 tcg_gen_andc_i32(cpu_VF, cpu_VF, tmp);
548 tcg_temp_free_i32(tmp);
549 tcg_gen_mov_i32(dest, cpu_NF);
550}
551
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100552/* dest = T0 - T1. Compute C, N, V and Z flags */
Peter Maydell39d54922013-05-23 12:59:55 +0100553static void gen_sub_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100554{
Peter Maydell39d54922013-05-23 12:59:55 +0100555 TCGv_i32 tmp;
Aurelien Jarno72485ec2012-10-05 15:04:44 +0100556 tcg_gen_sub_i32(cpu_NF, t0, t1);
557 tcg_gen_mov_i32(cpu_ZF, cpu_NF);
558 tcg_gen_setcond_i32(TCG_COND_GEU, cpu_CF, t0, t1);
559 tcg_gen_xor_i32(cpu_VF, cpu_NF, t0);
560 tmp = tcg_temp_new_i32();
561 tcg_gen_xor_i32(tmp, t0, t1);
562 tcg_gen_and_i32(cpu_VF, cpu_VF, tmp);
563 tcg_temp_free_i32(tmp);
564 tcg_gen_mov_i32(dest, cpu_NF);
565}
566
Richard Hendersone77f0832013-02-25 11:41:39 -0800567/* dest = T0 + ~T1 + CF. Compute C, N, V and Z flags */
Peter Maydell39d54922013-05-23 12:59:55 +0100568static void gen_sbc_CC(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Richard Henderson2de68a42013-02-19 23:52:09 -0800569{
Peter Maydell39d54922013-05-23 12:59:55 +0100570 TCGv_i32 tmp = tcg_temp_new_i32();
Richard Hendersone77f0832013-02-25 11:41:39 -0800571 tcg_gen_not_i32(tmp, t1);
572 gen_adc_CC(dest, t0, tmp);
Peter Maydell39d54922013-05-23 12:59:55 +0100573 tcg_temp_free_i32(tmp);
Richard Henderson2de68a42013-02-19 23:52:09 -0800574}
575
Aurelien Jarno365af802012-10-05 15:04:44 +0100576#define GEN_SHIFT(name) \
Peter Maydell39d54922013-05-23 12:59:55 +0100577static void gen_##name(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1) \
Aurelien Jarno365af802012-10-05 15:04:44 +0100578{ \
Peter Maydell39d54922013-05-23 12:59:55 +0100579 TCGv_i32 tmp1, tmp2, tmp3; \
Aurelien Jarno365af802012-10-05 15:04:44 +0100580 tmp1 = tcg_temp_new_i32(); \
581 tcg_gen_andi_i32(tmp1, t1, 0xff); \
582 tmp2 = tcg_const_i32(0); \
583 tmp3 = tcg_const_i32(0x1f); \
584 tcg_gen_movcond_i32(TCG_COND_GTU, tmp2, tmp1, tmp3, tmp2, t0); \
585 tcg_temp_free_i32(tmp3); \
586 tcg_gen_andi_i32(tmp1, tmp1, 0x1f); \
587 tcg_gen_##name##_i32(dest, tmp2, tmp1); \
588 tcg_temp_free_i32(tmp2); \
589 tcg_temp_free_i32(tmp1); \
590}
591GEN_SHIFT(shl)
592GEN_SHIFT(shr)
593#undef GEN_SHIFT
594
Peter Maydell39d54922013-05-23 12:59:55 +0100595static void gen_sar(TCGv_i32 dest, TCGv_i32 t0, TCGv_i32 t1)
Aurelien Jarno365af802012-10-05 15:04:44 +0100596{
Peter Maydell39d54922013-05-23 12:59:55 +0100597 TCGv_i32 tmp1, tmp2;
Aurelien Jarno365af802012-10-05 15:04:44 +0100598 tmp1 = tcg_temp_new_i32();
599 tcg_gen_andi_i32(tmp1, t1, 0xff);
600 tmp2 = tcg_const_i32(0x1f);
601 tcg_gen_movcond_i32(TCG_COND_GTU, tmp1, tmp1, tmp2, tmp2, tmp1);
602 tcg_temp_free_i32(tmp2);
603 tcg_gen_sar_i32(dest, t0, tmp1);
604 tcg_temp_free_i32(tmp1);
605}
606
Peter Maydell39d54922013-05-23 12:59:55 +0100607static void shifter_out_im(TCGv_i32 var, int shift)
pbrookb26eefb2008-03-31 03:44:26 +0000608{
pbrook9a119ff2008-03-31 03:45:35 +0000609 if (shift == 0) {
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100610 tcg_gen_andi_i32(cpu_CF, var, 1);
pbrookb26eefb2008-03-31 03:44:26 +0000611 } else {
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100612 tcg_gen_shri_i32(cpu_CF, var, shift);
613 if (shift != 31) {
614 tcg_gen_andi_i32(cpu_CF, cpu_CF, 1);
615 }
pbrook9a119ff2008-03-31 03:45:35 +0000616 }
pbrook9a119ff2008-03-31 03:45:35 +0000617}
pbrookb26eefb2008-03-31 03:44:26 +0000618
pbrook9a119ff2008-03-31 03:45:35 +0000619/* Shift by immediate. Includes special handling for shift == 0. */
Peter Maydell39d54922013-05-23 12:59:55 +0100620static inline void gen_arm_shift_im(TCGv_i32 var, int shiftop,
621 int shift, int flags)
pbrook9a119ff2008-03-31 03:45:35 +0000622{
623 switch (shiftop) {
624 case 0: /* LSL */
625 if (shift != 0) {
626 if (flags)
627 shifter_out_im(var, 32 - shift);
628 tcg_gen_shli_i32(var, var, shift);
629 }
630 break;
631 case 1: /* LSR */
632 if (shift == 0) {
633 if (flags) {
Aurelien Jarno66c374d2012-10-05 15:04:44 +0100634 tcg_gen_shri_i32(cpu_CF, var, 31);
pbrook9a119ff2008-03-31 03:45:35 +0000635 }
636 tcg_gen_movi_i32(var, 0);
637 } else {
638 if (flags)
639 shifter_out_im(var, shift - 1);
640 tcg_gen_shri_i32(var, var, shift);
641 }
642 break;
643 case 2: /* ASR */
644 if (shift == 0)
645 shift = 32;
646 if (flags)
647 shifter_out_im(var, shift - 1);
648 if (shift == 32)
649 shift = 31;
650 tcg_gen_sari_i32(var, var, shift);
651 break;
652 case 3: /* ROR/RRX */
653 if (shift != 0) {
654 if (flags)
655 shifter_out_im(var, shift - 1);
Aurelien Jarnof669df22009-10-15 16:45:14 +0200656 tcg_gen_rotri_i32(var, var, shift); break;
pbrook9a119ff2008-03-31 03:45:35 +0000657 } else {
Peter Maydell39d54922013-05-23 12:59:55 +0100658 TCGv_i32 tmp = tcg_temp_new_i32();
Peter Crosthwaiteb6348f22012-10-16 19:15:50 +1000659 tcg_gen_shli_i32(tmp, cpu_CF, 31);
pbrook9a119ff2008-03-31 03:45:35 +0000660 if (flags)
661 shifter_out_im(var, 0);
662 tcg_gen_shri_i32(var, var, 1);
pbrookb26eefb2008-03-31 03:44:26 +0000663 tcg_gen_or_i32(var, var, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +0000664 tcg_temp_free_i32(tmp);
pbrookb26eefb2008-03-31 03:44:26 +0000665 }
666 }
667};
668
Peter Maydell39d54922013-05-23 12:59:55 +0100669static inline void gen_arm_shift_reg(TCGv_i32 var, int shiftop,
670 TCGv_i32 shift, int flags)
pbrook8984bd22008-03-31 03:47:48 +0000671{
672 if (flags) {
673 switch (shiftop) {
Blue Swirl9ef39272012-09-04 20:19:15 +0000674 case 0: gen_helper_shl_cc(var, cpu_env, var, shift); break;
675 case 1: gen_helper_shr_cc(var, cpu_env, var, shift); break;
676 case 2: gen_helper_sar_cc(var, cpu_env, var, shift); break;
677 case 3: gen_helper_ror_cc(var, cpu_env, var, shift); break;
pbrook8984bd22008-03-31 03:47:48 +0000678 }
679 } else {
680 switch (shiftop) {
Aurelien Jarno365af802012-10-05 15:04:44 +0100681 case 0:
682 gen_shl(var, var, shift);
683 break;
684 case 1:
685 gen_shr(var, var, shift);
686 break;
687 case 2:
688 gen_sar(var, var, shift);
689 break;
Aurelien Jarnof669df22009-10-15 16:45:14 +0200690 case 3: tcg_gen_andi_i32(shift, shift, 0x1f);
691 tcg_gen_rotr_i32(var, var, shift); break;
pbrook8984bd22008-03-31 03:47:48 +0000692 }
693 }
Peter Maydell7d1b0092011-03-06 21:39:54 +0000694 tcg_temp_free_i32(shift);
pbrook8984bd22008-03-31 03:47:48 +0000695}
696
pbrook6ddbc6e2008-03-31 03:46:33 +0000697#define PAS_OP(pfx) \
698 switch (op2) { \
699 case 0: gen_pas_helper(glue(pfx,add16)); break; \
700 case 1: gen_pas_helper(glue(pfx,addsubx)); break; \
701 case 2: gen_pas_helper(glue(pfx,subaddx)); break; \
702 case 3: gen_pas_helper(glue(pfx,sub16)); break; \
703 case 4: gen_pas_helper(glue(pfx,add8)); break; \
704 case 7: gen_pas_helper(glue(pfx,sub8)); break; \
705 }
Peter Maydell39d54922013-05-23 12:59:55 +0100706static void gen_arm_parallel_addsub(int op1, int op2, TCGv_i32 a, TCGv_i32 b)
pbrook6ddbc6e2008-03-31 03:46:33 +0000707{
pbrooka7812ae2008-11-17 14:43:54 +0000708 TCGv_ptr tmp;
pbrook9ee6e8b2007-11-11 00:04:49 +0000709
pbrook6ddbc6e2008-03-31 03:46:33 +0000710 switch (op1) {
711#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b, tmp)
712 case 1:
pbrooka7812ae2008-11-17 14:43:54 +0000713 tmp = tcg_temp_new_ptr();
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100714 tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUARMState, GE));
pbrook6ddbc6e2008-03-31 03:46:33 +0000715 PAS_OP(s)
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300716 tcg_temp_free_ptr(tmp);
pbrook6ddbc6e2008-03-31 03:46:33 +0000717 break;
718 case 5:
pbrooka7812ae2008-11-17 14:43:54 +0000719 tmp = tcg_temp_new_ptr();
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100720 tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUARMState, GE));
pbrook6ddbc6e2008-03-31 03:46:33 +0000721 PAS_OP(u)
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300722 tcg_temp_free_ptr(tmp);
pbrook6ddbc6e2008-03-31 03:46:33 +0000723 break;
724#undef gen_pas_helper
725#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b)
726 case 2:
727 PAS_OP(q);
728 break;
729 case 3:
730 PAS_OP(sh);
731 break;
732 case 6:
733 PAS_OP(uq);
734 break;
735 case 7:
736 PAS_OP(uh);
737 break;
738#undef gen_pas_helper
739 }
740}
pbrook9ee6e8b2007-11-11 00:04:49 +0000741#undef PAS_OP
742
pbrook6ddbc6e2008-03-31 03:46:33 +0000743/* For unknown reasons Arm and Thumb-2 use arbitrarily different encodings. */
744#define PAS_OP(pfx) \
Chih-Min Chaoed89a2f2010-06-28 23:54:05 +0800745 switch (op1) { \
pbrook6ddbc6e2008-03-31 03:46:33 +0000746 case 0: gen_pas_helper(glue(pfx,add8)); break; \
747 case 1: gen_pas_helper(glue(pfx,add16)); break; \
748 case 2: gen_pas_helper(glue(pfx,addsubx)); break; \
749 case 4: gen_pas_helper(glue(pfx,sub8)); break; \
750 case 5: gen_pas_helper(glue(pfx,sub16)); break; \
751 case 6: gen_pas_helper(glue(pfx,subaddx)); break; \
752 }
Peter Maydell39d54922013-05-23 12:59:55 +0100753static void gen_thumb2_parallel_addsub(int op1, int op2, TCGv_i32 a, TCGv_i32 b)
pbrook6ddbc6e2008-03-31 03:46:33 +0000754{
pbrooka7812ae2008-11-17 14:43:54 +0000755 TCGv_ptr tmp;
pbrook9ee6e8b2007-11-11 00:04:49 +0000756
Chih-Min Chaoed89a2f2010-06-28 23:54:05 +0800757 switch (op2) {
pbrook6ddbc6e2008-03-31 03:46:33 +0000758#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b, tmp)
759 case 0:
pbrooka7812ae2008-11-17 14:43:54 +0000760 tmp = tcg_temp_new_ptr();
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100761 tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUARMState, GE));
pbrook6ddbc6e2008-03-31 03:46:33 +0000762 PAS_OP(s)
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300763 tcg_temp_free_ptr(tmp);
pbrook6ddbc6e2008-03-31 03:46:33 +0000764 break;
765 case 4:
pbrooka7812ae2008-11-17 14:43:54 +0000766 tmp = tcg_temp_new_ptr();
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100767 tcg_gen_addi_ptr(tmp, cpu_env, offsetof(CPUARMState, GE));
pbrook6ddbc6e2008-03-31 03:46:33 +0000768 PAS_OP(u)
Juha Riihimäkib75263d2009-10-22 15:17:36 +0300769 tcg_temp_free_ptr(tmp);
pbrook6ddbc6e2008-03-31 03:46:33 +0000770 break;
771#undef gen_pas_helper
772#define gen_pas_helper(name) glue(gen_helper_,name)(a, a, b)
773 case 1:
774 PAS_OP(q);
775 break;
776 case 2:
777 PAS_OP(sh);
778 break;
779 case 5:
780 PAS_OP(uq);
781 break;
782 case 6:
783 PAS_OP(uh);
784 break;
785#undef gen_pas_helper
786 }
787}
pbrook9ee6e8b2007-11-11 00:04:49 +0000788#undef PAS_OP
789
Alexander Graf39fb7302013-12-17 19:42:33 +0000790/*
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100791 * Generate a conditional based on ARM condition code cc.
Alexander Graf39fb7302013-12-17 19:42:33 +0000792 * This is common between ARM and Aarch64 targets.
793 */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100794void arm_test_cc(DisasCompare *cmp, int cc)
pbrookd9ba4832008-03-31 03:46:50 +0000795{
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100796 TCGv_i32 value;
797 TCGCond cond;
798 bool global = true;
pbrookd9ba4832008-03-31 03:46:50 +0000799
pbrookd9ba4832008-03-31 03:46:50 +0000800 switch (cc) {
801 case 0: /* eq: Z */
pbrookd9ba4832008-03-31 03:46:50 +0000802 case 1: /* ne: !Z */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100803 cond = TCG_COND_EQ;
804 value = cpu_ZF;
pbrookd9ba4832008-03-31 03:46:50 +0000805 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100806
pbrookd9ba4832008-03-31 03:46:50 +0000807 case 2: /* cs: C */
pbrookd9ba4832008-03-31 03:46:50 +0000808 case 3: /* cc: !C */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100809 cond = TCG_COND_NE;
810 value = cpu_CF;
pbrookd9ba4832008-03-31 03:46:50 +0000811 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100812
pbrookd9ba4832008-03-31 03:46:50 +0000813 case 4: /* mi: N */
pbrookd9ba4832008-03-31 03:46:50 +0000814 case 5: /* pl: !N */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100815 cond = TCG_COND_LT;
816 value = cpu_NF;
pbrookd9ba4832008-03-31 03:46:50 +0000817 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100818
pbrookd9ba4832008-03-31 03:46:50 +0000819 case 6: /* vs: V */
pbrookd9ba4832008-03-31 03:46:50 +0000820 case 7: /* vc: !V */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100821 cond = TCG_COND_LT;
822 value = cpu_VF;
pbrookd9ba4832008-03-31 03:46:50 +0000823 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100824
pbrookd9ba4832008-03-31 03:46:50 +0000825 case 8: /* hi: C && !Z */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100826 case 9: /* ls: !C || Z -> !(C && !Z) */
827 cond = TCG_COND_NE;
828 value = tcg_temp_new_i32();
829 global = false;
830 /* CF is 1 for C, so -CF is an all-bits-set mask for C;
831 ZF is non-zero for !Z; so AND the two subexpressions. */
832 tcg_gen_neg_i32(value, cpu_CF);
833 tcg_gen_and_i32(value, value, cpu_ZF);
pbrookd9ba4832008-03-31 03:46:50 +0000834 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100835
pbrookd9ba4832008-03-31 03:46:50 +0000836 case 10: /* ge: N == V -> N ^ V == 0 */
pbrookd9ba4832008-03-31 03:46:50 +0000837 case 11: /* lt: N != V -> N ^ V != 0 */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100838 /* Since we're only interested in the sign bit, == 0 is >= 0. */
839 cond = TCG_COND_GE;
840 value = tcg_temp_new_i32();
841 global = false;
842 tcg_gen_xor_i32(value, cpu_VF, cpu_NF);
pbrookd9ba4832008-03-31 03:46:50 +0000843 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100844
pbrookd9ba4832008-03-31 03:46:50 +0000845 case 12: /* gt: !Z && N == V */
pbrookd9ba4832008-03-31 03:46:50 +0000846 case 13: /* le: Z || N != V */
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100847 cond = TCG_COND_NE;
848 value = tcg_temp_new_i32();
849 global = false;
850 /* (N == V) is equal to the sign bit of ~(NF ^ VF). Propagate
851 * the sign bit then AND with ZF to yield the result. */
852 tcg_gen_xor_i32(value, cpu_VF, cpu_NF);
853 tcg_gen_sari_i32(value, value, 31);
854 tcg_gen_andc_i32(value, cpu_ZF, value);
pbrookd9ba4832008-03-31 03:46:50 +0000855 break;
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100856
Richard Henderson9305eac2015-09-14 14:39:47 +0100857 case 14: /* always */
858 case 15: /* always */
859 /* Use the ALWAYS condition, which will fold early.
860 * It doesn't matter what we use for the value. */
861 cond = TCG_COND_ALWAYS;
862 value = cpu_ZF;
863 goto no_invert;
864
pbrookd9ba4832008-03-31 03:46:50 +0000865 default:
866 fprintf(stderr, "Bad condition code 0x%x\n", cc);
867 abort();
868 }
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100869
870 if (cc & 1) {
871 cond = tcg_invert_cond(cond);
872 }
873
Richard Henderson9305eac2015-09-14 14:39:47 +0100874 no_invert:
Richard Henderson6c2c63d2015-09-14 14:39:47 +0100875 cmp->cond = cond;
876 cmp->value = value;
877 cmp->value_global = global;
878}
879
880void arm_free_cc(DisasCompare *cmp)
881{
882 if (!cmp->value_global) {
883 tcg_temp_free_i32(cmp->value);
884 }
885}
886
887void arm_jump_cc(DisasCompare *cmp, TCGLabel *label)
888{
889 tcg_gen_brcondi_i32(cmp->cond, cmp->value, 0, label);
890}
891
892void arm_gen_test_cc(int cc, TCGLabel *label)
893{
894 DisasCompare cmp;
895 arm_test_cc(&cmp, cc);
896 arm_jump_cc(&cmp, label);
897 arm_free_cc(&cmp);
pbrookd9ba4832008-03-31 03:46:50 +0000898}
bellard2c0262a2003-09-30 20:34:21 +0000899
blueswir1b1d8e522008-10-26 13:43:07 +0000900static const uint8_t table_logic_cc[16] = {
bellard2c0262a2003-09-30 20:34:21 +0000901 1, /* and */
902 1, /* xor */
903 0, /* sub */
904 0, /* rsb */
905 0, /* add */
906 0, /* adc */
907 0, /* sbc */
908 0, /* rsc */
909 1, /* andl */
910 1, /* xorl */
911 0, /* cmp */
912 0, /* cmn */
913 1, /* orr */
914 1, /* mov */
915 1, /* bic */
916 1, /* mvn */
917};
ths3b46e622007-09-17 08:09:54 +0000918
Peter Maydell4d5e8c92017-04-20 17:32:30 +0100919static inline void gen_set_condexec(DisasContext *s)
920{
921 if (s->condexec_mask) {
922 uint32_t val = (s->condexec_cond << 4) | (s->condexec_mask >> 1);
923 TCGv_i32 tmp = tcg_temp_new_i32();
924 tcg_gen_movi_i32(tmp, val);
925 store_cpu_field(tmp, condexec_bits);
926 }
927}
928
929static inline void gen_set_pc_im(DisasContext *s, target_ulong val)
930{
931 tcg_gen_movi_i32(cpu_R[15], val);
932}
933
pbrookd9ba4832008-03-31 03:46:50 +0000934/* Set PC and Thumb state from an immediate address. */
935static inline void gen_bx_im(DisasContext *s, uint32_t addr)
bellard99c475a2005-01-31 20:45:13 +0000936{
Peter Maydell39d54922013-05-23 12:59:55 +0100937 TCGv_i32 tmp;
bellard99c475a2005-01-31 20:45:13 +0000938
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300939 s->base.is_jmp = DISAS_JUMP;
pbrookd9ba4832008-03-31 03:46:50 +0000940 if (s->thumb != (addr & 1)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +0000941 tmp = tcg_temp_new_i32();
pbrookd9ba4832008-03-31 03:46:50 +0000942 tcg_gen_movi_i32(tmp, addr & 1);
Andreas Färber0ecb72a2012-03-14 01:38:21 +0100943 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUARMState, thumb));
Peter Maydell7d1b0092011-03-06 21:39:54 +0000944 tcg_temp_free_i32(tmp);
pbrookd9ba4832008-03-31 03:46:50 +0000945 }
Filip Navara155c3ea2009-10-15 12:00:41 +0200946 tcg_gen_movi_i32(cpu_R[15], addr & ~1);
pbrookd9ba4832008-03-31 03:46:50 +0000947}
948
949/* Set PC and Thumb state from var. var is marked as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +0100950static inline void gen_bx(DisasContext *s, TCGv_i32 var)
pbrookd9ba4832008-03-31 03:46:50 +0000951{
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300952 s->base.is_jmp = DISAS_JUMP;
Filip Navara155c3ea2009-10-15 12:00:41 +0200953 tcg_gen_andi_i32(cpu_R[15], var, ~1);
954 tcg_gen_andi_i32(var, var, 1);
955 store_cpu_field(var, thumb);
pbrookd9ba4832008-03-31 03:46:50 +0000956}
957
Peter Maydell3bb8a962017-04-20 17:32:31 +0100958/* Set PC and Thumb state from var. var is marked as dead.
959 * For M-profile CPUs, include logic to detect exception-return
960 * branches and handle them. This is needed for Thumb POP/LDM to PC, LDR to PC,
961 * and BX reg, and no others, and happens only for code in Handler mode.
962 */
963static inline void gen_bx_excret(DisasContext *s, TCGv_i32 var)
964{
965 /* Generate the same code here as for a simple bx, but flag via
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300966 * s->base.is_jmp that we need to do the rest of the work later.
Peter Maydell3bb8a962017-04-20 17:32:31 +0100967 */
968 gen_bx(s, var);
Peter Maydelld02a8692017-10-09 14:48:34 +0100969 if (arm_dc_feature(s, ARM_FEATURE_M_SECURITY) ||
970 (s->v7m_handler_mode && arm_dc_feature(s, ARM_FEATURE_M))) {
Lluís Vilanovadcba3a82017-07-14 12:01:59 +0300971 s->base.is_jmp = DISAS_BX_EXCRET;
Peter Maydell3bb8a962017-04-20 17:32:31 +0100972 }
973}
974
975static inline void gen_bx_excret_final_code(DisasContext *s)
976{
977 /* Generate the code to finish possible exception return and end the TB */
978 TCGLabel *excret_label = gen_new_label();
Peter Maydelld02a8692017-10-09 14:48:34 +0100979 uint32_t min_magic;
980
981 if (arm_dc_feature(s, ARM_FEATURE_M_SECURITY)) {
982 /* Covers FNC_RETURN and EXC_RETURN magic */
983 min_magic = FNC_RETURN_MIN_MAGIC;
984 } else {
985 /* EXC_RETURN magic only */
986 min_magic = EXC_RETURN_MIN_MAGIC;
987 }
Peter Maydell3bb8a962017-04-20 17:32:31 +0100988
989 /* Is the new PC value in the magic range indicating exception return? */
Peter Maydelld02a8692017-10-09 14:48:34 +0100990 tcg_gen_brcondi_i32(TCG_COND_GEU, cpu_R[15], min_magic, excret_label);
Peter Maydell3bb8a962017-04-20 17:32:31 +0100991 /* No: end the TB as we would for a DISAS_JMP */
992 if (is_singlestepping(s)) {
993 gen_singlestep_exception(s);
994 } else {
Richard Henderson07ea28b2018-05-30 18:06:23 -0700995 tcg_gen_exit_tb(NULL, 0);
Peter Maydell3bb8a962017-04-20 17:32:31 +0100996 }
997 gen_set_label(excret_label);
998 /* Yes: this is an exception return.
999 * At this point in runtime env->regs[15] and env->thumb will hold
1000 * the exception-return magic number, which do_v7m_exception_exit()
1001 * will read. Nothing else will be able to see those values because
1002 * the cpu-exec main loop guarantees that we will always go straight
1003 * from raising the exception to the exception-handling code.
1004 *
1005 * gen_ss_advance(s) does nothing on M profile currently but
1006 * calling it is conceptually the right thing as we have executed
1007 * this instruction (compare SWI, HVC, SMC handling).
1008 */
1009 gen_ss_advance(s);
1010 gen_exception_internal(EXCP_EXCEPTION_EXIT);
1011}
1012
Peter Maydellfb602cb2017-09-07 13:54:54 +01001013static inline void gen_bxns(DisasContext *s, int rm)
1014{
1015 TCGv_i32 var = load_reg(s, rm);
1016
1017 /* The bxns helper may raise an EXCEPTION_EXIT exception, so in theory
1018 * we need to sync state before calling it, but:
1019 * - we don't need to do gen_set_pc_im() because the bxns helper will
1020 * always set the PC itself
1021 * - we don't need to do gen_set_condexec() because BXNS is UNPREDICTABLE
1022 * unless it's outside an IT block or the last insn in an IT block,
1023 * so we know that condexec == 0 (already set at the top of the TB)
1024 * is correct in the non-UNPREDICTABLE cases, and we can choose
1025 * "zeroes the IT bits" as our UNPREDICTABLE behaviour otherwise.
1026 */
1027 gen_helper_v7m_bxns(cpu_env, var);
1028 tcg_temp_free_i32(var);
Peter Maydellef475b52017-09-07 16:42:55 +01001029 s->base.is_jmp = DISAS_EXIT;
Peter Maydellfb602cb2017-09-07 13:54:54 +01001030}
1031
Peter Maydell3e3fa232017-10-09 14:48:33 +01001032static inline void gen_blxns(DisasContext *s, int rm)
1033{
1034 TCGv_i32 var = load_reg(s, rm);
1035
1036 /* We don't need to sync condexec state, for the same reason as bxns.
1037 * We do however need to set the PC, because the blxns helper reads it.
1038 * The blxns helper may throw an exception.
1039 */
1040 gen_set_pc_im(s, s->pc);
1041 gen_helper_v7m_blxns(cpu_env, var);
1042 tcg_temp_free_i32(var);
1043 s->base.is_jmp = DISAS_EXIT;
1044}
1045
Juha Riihimäki21aeb342009-05-06 09:16:12 +03001046/* Variant of store_reg which uses branch&exchange logic when storing
1047 to r15 in ARM architecture v7 and above. The source must be a temporary
1048 and will be marked as dead. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00001049static inline void store_reg_bx(DisasContext *s, int reg, TCGv_i32 var)
Juha Riihimäki21aeb342009-05-06 09:16:12 +03001050{
1051 if (reg == 15 && ENABLE_ARCH_7) {
1052 gen_bx(s, var);
1053 } else {
1054 store_reg(s, reg, var);
1055 }
1056}
1057
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04001058/* Variant of store_reg which uses branch&exchange logic when storing
1059 * to r15 in ARM architecture v5T and above. This is used for storing
1060 * the results of a LDR/LDM/POP into r15, and corresponds to the cases
1061 * in the ARM ARM which use the LoadWritePC() pseudocode function. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00001062static inline void store_reg_from_load(DisasContext *s, int reg, TCGv_i32 var)
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04001063{
1064 if (reg == 15 && ENABLE_ARCH_5) {
Peter Maydell3bb8a962017-04-20 17:32:31 +01001065 gen_bx_excret(s, var);
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04001066 } else {
1067 store_reg(s, reg, var);
1068 }
1069}
1070
Paolo Bonzinie334bd32016-03-04 11:30:21 +00001071#ifdef CONFIG_USER_ONLY
1072#define IS_USER_ONLY 1
1073#else
1074#define IS_USER_ONLY 0
1075#endif
1076
Peter Maydell08307562013-09-03 20:12:02 +01001077/* Abstractions of "generate code to do a guest load/store for
1078 * AArch32", where a vaddr is always 32 bits (and is zero
1079 * extended if we're a 64 bit core) and data is also
1080 * 32 bits unless specifically doing a 64 bit access.
1081 * These functions work like tcg_gen_qemu_{ld,st}* except
Richard Henderson09f78132013-12-09 14:37:06 -08001082 * that the address argument is TCGv_i32 rather than TCGv.
Peter Maydell08307562013-09-03 20:12:02 +01001083 */
Peter Maydell08307562013-09-03 20:12:02 +01001084
Richard Henderson7f5616f2016-06-30 11:44:14 -07001085static inline TCGv gen_aa32_addr(DisasContext *s, TCGv_i32 a32, TCGMemOp op)
Peter Maydell08307562013-09-03 20:12:02 +01001086{
Richard Henderson7f5616f2016-06-30 11:44:14 -07001087 TCGv addr = tcg_temp_new();
1088 tcg_gen_extu_i32_tl(addr, a32);
1089
1090 /* Not needed for user-mode BE32, where we use MO_BE instead. */
1091 if (!IS_USER_ONLY && s->sctlr_b && (op & MO_SIZE) < MO_32) {
1092 tcg_gen_xori_tl(addr, addr, 4 - (1 << (op & MO_SIZE)));
1093 }
1094 return addr;
1095}
1096
1097static void gen_aa32_ld_i32(DisasContext *s, TCGv_i32 val, TCGv_i32 a32,
1098 int index, TCGMemOp opc)
1099{
Julia Suvorova2aeba0d2018-06-22 13:28:41 +01001100 TCGv addr;
1101
1102 if (arm_dc_feature(s, ARM_FEATURE_M) &&
1103 !arm_dc_feature(s, ARM_FEATURE_M_MAIN)) {
1104 opc |= MO_ALIGN;
1105 }
1106
1107 addr = gen_aa32_addr(s, a32, opc);
Richard Henderson7f5616f2016-06-30 11:44:14 -07001108 tcg_gen_qemu_ld_i32(val, addr, index, opc);
1109 tcg_temp_free(addr);
1110}
1111
1112static void gen_aa32_st_i32(DisasContext *s, TCGv_i32 val, TCGv_i32 a32,
1113 int index, TCGMemOp opc)
1114{
Julia Suvorova2aeba0d2018-06-22 13:28:41 +01001115 TCGv addr;
1116
1117 if (arm_dc_feature(s, ARM_FEATURE_M) &&
1118 !arm_dc_feature(s, ARM_FEATURE_M_MAIN)) {
1119 opc |= MO_ALIGN;
1120 }
1121
1122 addr = gen_aa32_addr(s, a32, opc);
Richard Henderson7f5616f2016-06-30 11:44:14 -07001123 tcg_gen_qemu_st_i32(val, addr, index, opc);
1124 tcg_temp_free(addr);
1125}
1126
1127#define DO_GEN_LD(SUFF, OPC) \
1128static inline void gen_aa32_ld##SUFF(DisasContext *s, TCGv_i32 val, \
1129 TCGv_i32 a32, int index) \
1130{ \
1131 gen_aa32_ld_i32(s, val, a32, index, OPC | s->be_data); \
Peter Maydell9bb65582017-02-07 18:30:00 +00001132} \
1133static inline void gen_aa32_ld##SUFF##_iss(DisasContext *s, \
1134 TCGv_i32 val, \
1135 TCGv_i32 a32, int index, \
1136 ISSInfo issinfo) \
1137{ \
1138 gen_aa32_ld##SUFF(s, val, a32, index); \
1139 disas_set_da_iss(s, OPC, issinfo); \
Richard Henderson7f5616f2016-06-30 11:44:14 -07001140}
1141
1142#define DO_GEN_ST(SUFF, OPC) \
1143static inline void gen_aa32_st##SUFF(DisasContext *s, TCGv_i32 val, \
1144 TCGv_i32 a32, int index) \
1145{ \
1146 gen_aa32_st_i32(s, val, a32, index, OPC | s->be_data); \
Peter Maydell9bb65582017-02-07 18:30:00 +00001147} \
1148static inline void gen_aa32_st##SUFF##_iss(DisasContext *s, \
1149 TCGv_i32 val, \
1150 TCGv_i32 a32, int index, \
1151 ISSInfo issinfo) \
1152{ \
1153 gen_aa32_st##SUFF(s, val, a32, index); \
1154 disas_set_da_iss(s, OPC, issinfo | ISSIsWrite); \
Richard Henderson7f5616f2016-06-30 11:44:14 -07001155}
1156
1157static inline void gen_aa32_frob64(DisasContext *s, TCGv_i64 val)
1158{
Paolo Bonzinie334bd32016-03-04 11:30:21 +00001159 /* Not needed for user-mode BE32, where we use MO_BE instead. */
1160 if (!IS_USER_ONLY && s->sctlr_b) {
1161 tcg_gen_rotri_i64(val, val, 32);
1162 }
Peter Maydell08307562013-09-03 20:12:02 +01001163}
1164
Richard Henderson7f5616f2016-06-30 11:44:14 -07001165static void gen_aa32_ld_i64(DisasContext *s, TCGv_i64 val, TCGv_i32 a32,
1166 int index, TCGMemOp opc)
Peter Maydell08307562013-09-03 20:12:02 +01001167{
Richard Henderson7f5616f2016-06-30 11:44:14 -07001168 TCGv addr = gen_aa32_addr(s, a32, opc);
1169 tcg_gen_qemu_ld_i64(val, addr, index, opc);
1170 gen_aa32_frob64(s, val);
1171 tcg_temp_free(addr);
1172}
1173
1174static inline void gen_aa32_ld64(DisasContext *s, TCGv_i64 val,
1175 TCGv_i32 a32, int index)
1176{
1177 gen_aa32_ld_i64(s, val, a32, index, MO_Q | s->be_data);
1178}
1179
1180static void gen_aa32_st_i64(DisasContext *s, TCGv_i64 val, TCGv_i32 a32,
1181 int index, TCGMemOp opc)
1182{
1183 TCGv addr = gen_aa32_addr(s, a32, opc);
1184
Paolo Bonzinie334bd32016-03-04 11:30:21 +00001185 /* Not needed for user-mode BE32, where we use MO_BE instead. */
1186 if (!IS_USER_ONLY && s->sctlr_b) {
1187 TCGv_i64 tmp = tcg_temp_new_i64();
1188 tcg_gen_rotri_i64(tmp, val, 32);
1189 tcg_gen_qemu_st_i64(tmp, addr, index, opc);
1190 tcg_temp_free_i64(tmp);
Richard Henderson7f5616f2016-06-30 11:44:14 -07001191 } else {
1192 tcg_gen_qemu_st_i64(val, addr, index, opc);
Paolo Bonzinie334bd32016-03-04 11:30:21 +00001193 }
Richard Henderson7f5616f2016-06-30 11:44:14 -07001194 tcg_temp_free(addr);
Peter Maydell08307562013-09-03 20:12:02 +01001195}
1196
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001197static inline void gen_aa32_st64(DisasContext *s, TCGv_i64 val,
Richard Henderson7f5616f2016-06-30 11:44:14 -07001198 TCGv_i32 a32, int index)
Peter Maydell08307562013-09-03 20:12:02 +01001199{
Richard Henderson7f5616f2016-06-30 11:44:14 -07001200 gen_aa32_st_i64(s, val, a32, index, MO_Q | s->be_data);
Peter Maydell08307562013-09-03 20:12:02 +01001201}
1202
Richard Henderson7f5616f2016-06-30 11:44:14 -07001203DO_GEN_LD(8s, MO_SB)
1204DO_GEN_LD(8u, MO_UB)
1205DO_GEN_LD(16s, MO_SW)
1206DO_GEN_LD(16u, MO_UW)
1207DO_GEN_LD(32u, MO_UL)
Richard Henderson7f5616f2016-06-30 11:44:14 -07001208DO_GEN_ST(8, MO_UB)
1209DO_GEN_ST(16, MO_UW)
1210DO_GEN_ST(32, MO_UL)
Peter Maydell08307562013-09-03 20:12:02 +01001211
Peter Maydell37e64562014-10-24 12:19:13 +01001212static inline void gen_hvc(DisasContext *s, int imm16)
1213{
1214 /* The pre HVC helper handles cases when HVC gets trapped
1215 * as an undefined insn by runtime configuration (ie before
1216 * the insn really executes).
1217 */
1218 gen_set_pc_im(s, s->pc - 4);
1219 gen_helper_pre_hvc(cpu_env);
1220 /* Otherwise we will treat this as a real exception which
1221 * happens after execution of the insn. (The distinction matters
1222 * for the PC value reported to the exception handler and also
1223 * for single stepping.)
1224 */
1225 s->svc_imm = imm16;
1226 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03001227 s->base.is_jmp = DISAS_HVC;
Peter Maydell37e64562014-10-24 12:19:13 +01001228}
1229
1230static inline void gen_smc(DisasContext *s)
1231{
1232 /* As with HVC, we may take an exception either before or after
1233 * the insn executes.
1234 */
1235 TCGv_i32 tmp;
1236
1237 gen_set_pc_im(s, s->pc - 4);
1238 tmp = tcg_const_i32(syn_aa32_smc());
1239 gen_helper_pre_smc(cpu_env, tmp);
1240 tcg_temp_free_i32(tmp);
1241 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03001242 s->base.is_jmp = DISAS_SMC;
Peter Maydell37e64562014-10-24 12:19:13 +01001243}
1244
Peter Maydelld4a2dc62014-04-15 19:18:38 +01001245static void gen_exception_internal_insn(DisasContext *s, int offset, int excp)
1246{
1247 gen_set_condexec(s);
1248 gen_set_pc_im(s, s->pc - offset);
1249 gen_exception_internal(excp);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03001250 s->base.is_jmp = DISAS_NORETURN;
Peter Maydelld4a2dc62014-04-15 19:18:38 +01001251}
1252
Greg Bellows73710362015-05-29 11:28:50 +01001253static void gen_exception_insn(DisasContext *s, int offset, int excp,
1254 int syn, uint32_t target_el)
Peter Maydelld4a2dc62014-04-15 19:18:38 +01001255{
1256 gen_set_condexec(s);
1257 gen_set_pc_im(s, s->pc - offset);
Greg Bellows73710362015-05-29 11:28:50 +01001258 gen_exception(excp, syn, target_el);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03001259 s->base.is_jmp = DISAS_NORETURN;
Peter Maydelld4a2dc62014-04-15 19:18:38 +01001260}
1261
Peter Maydellc900a2e2018-03-23 18:26:46 +00001262static void gen_exception_bkpt_insn(DisasContext *s, int offset, uint32_t syn)
1263{
1264 TCGv_i32 tcg_syn;
1265
1266 gen_set_condexec(s);
1267 gen_set_pc_im(s, s->pc - offset);
1268 tcg_syn = tcg_const_i32(syn);
1269 gen_helper_exception_bkpt_insn(cpu_env, tcg_syn);
1270 tcg_temp_free_i32(tcg_syn);
1271 s->base.is_jmp = DISAS_NORETURN;
1272}
1273
bellardb5ff1b32005-11-26 10:38:39 +00001274/* Force a TB lookup after an instruction that changes the CPU state. */
1275static inline void gen_lookup_tb(DisasContext *s)
1276{
Filip Navaraa6445c52009-10-15 12:45:48 +02001277 tcg_gen_movi_i32(cpu_R[15], s->pc & ~1);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03001278 s->base.is_jmp = DISAS_EXIT;
bellardb5ff1b32005-11-26 10:38:39 +00001279}
1280
Peter Maydell19a6e312016-10-24 16:26:56 +01001281static inline void gen_hlt(DisasContext *s, int imm)
1282{
1283 /* HLT. This has two purposes.
1284 * Architecturally, it is an external halting debug instruction.
1285 * Since QEMU doesn't implement external debug, we treat this as
1286 * it is required for halting debug disabled: it will UNDEF.
1287 * Secondly, "HLT 0x3C" is a T32 semihosting trap instruction,
1288 * and "HLT 0xF000" is an A32 semihosting syscall. These traps
1289 * must trigger semihosting even for ARMv7 and earlier, where
1290 * HLT was an undefined encoding.
1291 * In system mode, we don't allow userspace access to
1292 * semihosting, to provide some semblance of security
1293 * (and for consistency with our 32-bit semihosting).
1294 */
1295 if (semihosting_enabled() &&
1296#ifndef CONFIG_USER_ONLY
1297 s->current_el != 0 &&
1298#endif
1299 (imm == (s->thumb ? 0x3c : 0xf000))) {
1300 gen_exception_internal_insn(s, 0, EXCP_SEMIHOST);
1301 return;
1302 }
1303
1304 gen_exception_insn(s, s->thumb ? 2 : 4, EXCP_UDEF, syn_uncategorized(),
1305 default_exception_el(s));
1306}
1307
pbrookb0109802008-03-31 03:47:03 +00001308static inline void gen_add_data_offset(DisasContext *s, unsigned int insn,
Peter Maydell39d54922013-05-23 12:59:55 +01001309 TCGv_i32 var)
bellard2c0262a2003-09-30 20:34:21 +00001310{
bellard1e8d4ee2004-12-08 23:40:14 +00001311 int val, rm, shift, shiftop;
Peter Maydell39d54922013-05-23 12:59:55 +01001312 TCGv_i32 offset;
bellard2c0262a2003-09-30 20:34:21 +00001313
1314 if (!(insn & (1 << 25))) {
1315 /* immediate */
1316 val = insn & 0xfff;
1317 if (!(insn & (1 << 23)))
1318 val = -val;
bellard537730b2004-02-22 13:40:57 +00001319 if (val != 0)
pbrookb0109802008-03-31 03:47:03 +00001320 tcg_gen_addi_i32(var, var, val);
bellard2c0262a2003-09-30 20:34:21 +00001321 } else {
1322 /* shift/register */
1323 rm = (insn) & 0xf;
1324 shift = (insn >> 7) & 0x1f;
bellard1e8d4ee2004-12-08 23:40:14 +00001325 shiftop = (insn >> 5) & 3;
pbrookb26eefb2008-03-31 03:44:26 +00001326 offset = load_reg(s, rm);
pbrook9a119ff2008-03-31 03:45:35 +00001327 gen_arm_shift_im(offset, shiftop, shift, 0);
bellard2c0262a2003-09-30 20:34:21 +00001328 if (!(insn & (1 << 23)))
pbrookb0109802008-03-31 03:47:03 +00001329 tcg_gen_sub_i32(var, var, offset);
bellard2c0262a2003-09-30 20:34:21 +00001330 else
pbrookb0109802008-03-31 03:47:03 +00001331 tcg_gen_add_i32(var, var, offset);
Peter Maydell7d1b0092011-03-06 21:39:54 +00001332 tcg_temp_free_i32(offset);
bellard2c0262a2003-09-30 20:34:21 +00001333 }
1334}
1335
pbrook191f9a92006-06-14 14:36:07 +00001336static inline void gen_add_datah_offset(DisasContext *s, unsigned int insn,
Peter Maydell39d54922013-05-23 12:59:55 +01001337 int extra, TCGv_i32 var)
bellard2c0262a2003-09-30 20:34:21 +00001338{
1339 int val, rm;
Peter Maydell39d54922013-05-23 12:59:55 +01001340 TCGv_i32 offset;
ths3b46e622007-09-17 08:09:54 +00001341
bellard2c0262a2003-09-30 20:34:21 +00001342 if (insn & (1 << 22)) {
1343 /* immediate */
1344 val = (insn & 0xf) | ((insn >> 4) & 0xf0);
1345 if (!(insn & (1 << 23)))
1346 val = -val;
pbrook18acad92007-02-14 20:17:03 +00001347 val += extra;
bellard537730b2004-02-22 13:40:57 +00001348 if (val != 0)
pbrookb0109802008-03-31 03:47:03 +00001349 tcg_gen_addi_i32(var, var, val);
bellard2c0262a2003-09-30 20:34:21 +00001350 } else {
1351 /* register */
pbrook191f9a92006-06-14 14:36:07 +00001352 if (extra)
pbrookb0109802008-03-31 03:47:03 +00001353 tcg_gen_addi_i32(var, var, extra);
bellard2c0262a2003-09-30 20:34:21 +00001354 rm = (insn) & 0xf;
pbrookb26eefb2008-03-31 03:44:26 +00001355 offset = load_reg(s, rm);
bellard2c0262a2003-09-30 20:34:21 +00001356 if (!(insn & (1 << 23)))
pbrookb0109802008-03-31 03:47:03 +00001357 tcg_gen_sub_i32(var, var, offset);
bellard2c0262a2003-09-30 20:34:21 +00001358 else
pbrookb0109802008-03-31 03:47:03 +00001359 tcg_gen_add_i32(var, var, offset);
Peter Maydell7d1b0092011-03-06 21:39:54 +00001360 tcg_temp_free_i32(offset);
bellard2c0262a2003-09-30 20:34:21 +00001361 }
1362}
1363
Peter Maydell5aaebd12011-05-25 15:16:10 +00001364static TCGv_ptr get_fpstatus_ptr(int neon)
1365{
1366 TCGv_ptr statusptr = tcg_temp_new_ptr();
1367 int offset;
1368 if (neon) {
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001369 offset = offsetof(CPUARMState, vfp.standard_fp_status);
Peter Maydell5aaebd12011-05-25 15:16:10 +00001370 } else {
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001371 offset = offsetof(CPUARMState, vfp.fp_status);
Peter Maydell5aaebd12011-05-25 15:16:10 +00001372 }
1373 tcg_gen_addi_ptr(statusptr, cpu_env, offset);
1374 return statusptr;
1375}
1376
pbrook4373f3c2008-03-31 03:47:19 +00001377#define VFP_OP2(name) \
1378static inline void gen_vfp_##name(int dp) \
1379{ \
Peter Maydellae1857e2011-05-25 14:51:48 +00001380 TCGv_ptr fpst = get_fpstatus_ptr(0); \
1381 if (dp) { \
1382 gen_helper_vfp_##name##d(cpu_F0d, cpu_F0d, cpu_F1d, fpst); \
1383 } else { \
1384 gen_helper_vfp_##name##s(cpu_F0s, cpu_F0s, cpu_F1s, fpst); \
1385 } \
1386 tcg_temp_free_ptr(fpst); \
bellardb7bcbe92005-02-22 19:27:29 +00001387}
1388
pbrook4373f3c2008-03-31 03:47:19 +00001389VFP_OP2(add)
1390VFP_OP2(sub)
1391VFP_OP2(mul)
1392VFP_OP2(div)
bellardb7bcbe92005-02-22 19:27:29 +00001393
pbrook4373f3c2008-03-31 03:47:19 +00001394#undef VFP_OP2
bellardb7bcbe92005-02-22 19:27:29 +00001395
Peter Maydell605a6ae2011-05-05 19:35:35 +01001396static inline void gen_vfp_F1_mul(int dp)
1397{
1398 /* Like gen_vfp_mul() but put result in F1 */
Peter Maydellae1857e2011-05-25 14:51:48 +00001399 TCGv_ptr fpst = get_fpstatus_ptr(0);
Peter Maydell605a6ae2011-05-05 19:35:35 +01001400 if (dp) {
Peter Maydellae1857e2011-05-25 14:51:48 +00001401 gen_helper_vfp_muld(cpu_F1d, cpu_F0d, cpu_F1d, fpst);
Peter Maydell605a6ae2011-05-05 19:35:35 +01001402 } else {
Peter Maydellae1857e2011-05-25 14:51:48 +00001403 gen_helper_vfp_muls(cpu_F1s, cpu_F0s, cpu_F1s, fpst);
Peter Maydell605a6ae2011-05-05 19:35:35 +01001404 }
Peter Maydellae1857e2011-05-25 14:51:48 +00001405 tcg_temp_free_ptr(fpst);
Peter Maydell605a6ae2011-05-05 19:35:35 +01001406}
1407
1408static inline void gen_vfp_F1_neg(int dp)
1409{
1410 /* Like gen_vfp_neg() but put result in F1 */
1411 if (dp) {
1412 gen_helper_vfp_negd(cpu_F1d, cpu_F0d);
1413 } else {
1414 gen_helper_vfp_negs(cpu_F1s, cpu_F0s);
1415 }
1416}
1417
pbrook4373f3c2008-03-31 03:47:19 +00001418static inline void gen_vfp_abs(int dp)
pbrook9ee6e8b2007-11-11 00:04:49 +00001419{
1420 if (dp)
pbrook4373f3c2008-03-31 03:47:19 +00001421 gen_helper_vfp_absd(cpu_F0d, cpu_F0d);
pbrook9ee6e8b2007-11-11 00:04:49 +00001422 else
pbrook4373f3c2008-03-31 03:47:19 +00001423 gen_helper_vfp_abss(cpu_F0s, cpu_F0s);
pbrook9ee6e8b2007-11-11 00:04:49 +00001424}
1425
pbrook4373f3c2008-03-31 03:47:19 +00001426static inline void gen_vfp_neg(int dp)
1427{
1428 if (dp)
1429 gen_helper_vfp_negd(cpu_F0d, cpu_F0d);
1430 else
1431 gen_helper_vfp_negs(cpu_F0s, cpu_F0s);
1432}
1433
1434static inline void gen_vfp_sqrt(int dp)
1435{
1436 if (dp)
1437 gen_helper_vfp_sqrtd(cpu_F0d, cpu_F0d, cpu_env);
1438 else
1439 gen_helper_vfp_sqrts(cpu_F0s, cpu_F0s, cpu_env);
1440}
1441
1442static inline void gen_vfp_cmp(int dp)
1443{
1444 if (dp)
1445 gen_helper_vfp_cmpd(cpu_F0d, cpu_F1d, cpu_env);
1446 else
1447 gen_helper_vfp_cmps(cpu_F0s, cpu_F1s, cpu_env);
1448}
1449
1450static inline void gen_vfp_cmpe(int dp)
1451{
1452 if (dp)
1453 gen_helper_vfp_cmped(cpu_F0d, cpu_F1d, cpu_env);
1454 else
1455 gen_helper_vfp_cmpes(cpu_F0s, cpu_F1s, cpu_env);
1456}
1457
1458static inline void gen_vfp_F1_ld0(int dp)
1459{
1460 if (dp)
balrog5b340b52008-04-14 02:19:57 +00001461 tcg_gen_movi_i64(cpu_F1d, 0);
pbrook4373f3c2008-03-31 03:47:19 +00001462 else
balrog5b340b52008-04-14 02:19:57 +00001463 tcg_gen_movi_i32(cpu_F1s, 0);
pbrook4373f3c2008-03-31 03:47:19 +00001464}
1465
Peter Maydell5500b062011-05-19 14:46:19 +01001466#define VFP_GEN_ITOF(name) \
1467static inline void gen_vfp_##name(int dp, int neon) \
1468{ \
Peter Maydell5aaebd12011-05-25 15:16:10 +00001469 TCGv_ptr statusptr = get_fpstatus_ptr(neon); \
Peter Maydell5500b062011-05-19 14:46:19 +01001470 if (dp) { \
1471 gen_helper_vfp_##name##d(cpu_F0d, cpu_F0s, statusptr); \
1472 } else { \
1473 gen_helper_vfp_##name##s(cpu_F0s, cpu_F0s, statusptr); \
1474 } \
Peter Maydellb7fa9212011-05-26 12:03:36 +01001475 tcg_temp_free_ptr(statusptr); \
pbrook4373f3c2008-03-31 03:47:19 +00001476}
1477
Peter Maydell5500b062011-05-19 14:46:19 +01001478VFP_GEN_ITOF(uito)
1479VFP_GEN_ITOF(sito)
1480#undef VFP_GEN_ITOF
1481
1482#define VFP_GEN_FTOI(name) \
1483static inline void gen_vfp_##name(int dp, int neon) \
1484{ \
Peter Maydell5aaebd12011-05-25 15:16:10 +00001485 TCGv_ptr statusptr = get_fpstatus_ptr(neon); \
Peter Maydell5500b062011-05-19 14:46:19 +01001486 if (dp) { \
1487 gen_helper_vfp_##name##d(cpu_F0s, cpu_F0d, statusptr); \
1488 } else { \
1489 gen_helper_vfp_##name##s(cpu_F0s, cpu_F0s, statusptr); \
1490 } \
Peter Maydellb7fa9212011-05-26 12:03:36 +01001491 tcg_temp_free_ptr(statusptr); \
pbrook4373f3c2008-03-31 03:47:19 +00001492}
1493
Peter Maydell5500b062011-05-19 14:46:19 +01001494VFP_GEN_FTOI(toui)
1495VFP_GEN_FTOI(touiz)
1496VFP_GEN_FTOI(tosi)
1497VFP_GEN_FTOI(tosiz)
1498#undef VFP_GEN_FTOI
pbrook4373f3c2008-03-31 03:47:19 +00001499
Will Newton16d5b3c2014-01-07 17:19:13 +00001500#define VFP_GEN_FIX(name, round) \
Peter Maydell5500b062011-05-19 14:46:19 +01001501static inline void gen_vfp_##name(int dp, int shift, int neon) \
pbrook4373f3c2008-03-31 03:47:19 +00001502{ \
Peter Maydell39d54922013-05-23 12:59:55 +01001503 TCGv_i32 tmp_shift = tcg_const_i32(shift); \
Peter Maydell5aaebd12011-05-25 15:16:10 +00001504 TCGv_ptr statusptr = get_fpstatus_ptr(neon); \
Peter Maydell5500b062011-05-19 14:46:19 +01001505 if (dp) { \
Will Newton16d5b3c2014-01-07 17:19:13 +00001506 gen_helper_vfp_##name##d##round(cpu_F0d, cpu_F0d, tmp_shift, \
1507 statusptr); \
Peter Maydell5500b062011-05-19 14:46:19 +01001508 } else { \
Will Newton16d5b3c2014-01-07 17:19:13 +00001509 gen_helper_vfp_##name##s##round(cpu_F0s, cpu_F0s, tmp_shift, \
1510 statusptr); \
Peter Maydell5500b062011-05-19 14:46:19 +01001511 } \
Juha Riihimäkib75263d2009-10-22 15:17:36 +03001512 tcg_temp_free_i32(tmp_shift); \
Peter Maydellb7fa9212011-05-26 12:03:36 +01001513 tcg_temp_free_ptr(statusptr); \
pbrook4373f3c2008-03-31 03:47:19 +00001514}
Will Newton16d5b3c2014-01-07 17:19:13 +00001515VFP_GEN_FIX(tosh, _round_to_zero)
1516VFP_GEN_FIX(tosl, _round_to_zero)
1517VFP_GEN_FIX(touh, _round_to_zero)
1518VFP_GEN_FIX(toul, _round_to_zero)
1519VFP_GEN_FIX(shto, )
1520VFP_GEN_FIX(slto, )
1521VFP_GEN_FIX(uhto, )
1522VFP_GEN_FIX(ulto, )
pbrook4373f3c2008-03-31 03:47:19 +00001523#undef VFP_GEN_FIX
1524
Peter Maydell39d54922013-05-23 12:59:55 +01001525static inline void gen_vfp_ld(DisasContext *s, int dp, TCGv_i32 addr)
bellardb5ff1b32005-11-26 10:38:39 +00001526{
Peter Maydell08307562013-09-03 20:12:02 +01001527 if (dp) {
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001528 gen_aa32_ld64(s, cpu_F0d, addr, get_mem_index(s));
Peter Maydell08307562013-09-03 20:12:02 +01001529 } else {
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001530 gen_aa32_ld32u(s, cpu_F0s, addr, get_mem_index(s));
Peter Maydell08307562013-09-03 20:12:02 +01001531 }
bellardb5ff1b32005-11-26 10:38:39 +00001532}
1533
Peter Maydell39d54922013-05-23 12:59:55 +01001534static inline void gen_vfp_st(DisasContext *s, int dp, TCGv_i32 addr)
bellardb5ff1b32005-11-26 10:38:39 +00001535{
Peter Maydell08307562013-09-03 20:12:02 +01001536 if (dp) {
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001537 gen_aa32_st64(s, cpu_F0d, addr, get_mem_index(s));
Peter Maydell08307562013-09-03 20:12:02 +01001538 } else {
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001539 gen_aa32_st32(s, cpu_F0s, addr, get_mem_index(s));
Peter Maydell08307562013-09-03 20:12:02 +01001540 }
bellardb5ff1b32005-11-26 10:38:39 +00001541}
1542
Richard Hendersonc39c2b92018-02-09 10:40:31 +00001543static inline long vfp_reg_offset(bool dp, unsigned reg)
bellard8e960052005-04-07 19:42:46 +00001544{
Richard Henderson9a2b5252018-01-25 11:45:29 +00001545 if (dp) {
Richard Hendersonc39c2b92018-02-09 10:40:31 +00001546 return offsetof(CPUARMState, vfp.zregs[reg >> 1].d[reg & 1]);
bellard8e960052005-04-07 19:42:46 +00001547 } else {
Richard Hendersonc39c2b92018-02-09 10:40:31 +00001548 long ofs = offsetof(CPUARMState, vfp.zregs[reg >> 2].d[(reg >> 1) & 1]);
Richard Henderson9a2b5252018-01-25 11:45:29 +00001549 if (reg & 1) {
1550 ofs += offsetof(CPU_DoubleU, l.upper);
1551 } else {
1552 ofs += offsetof(CPU_DoubleU, l.lower);
1553 }
1554 return ofs;
bellard8e960052005-04-07 19:42:46 +00001555 }
1556}
pbrook9ee6e8b2007-11-11 00:04:49 +00001557
1558/* Return the offset of a 32-bit piece of a NEON register.
1559 zero is the least significant end of the register. */
1560static inline long
1561neon_reg_offset (int reg, int n)
1562{
1563 int sreg;
1564 sreg = reg * 2 + n;
1565 return vfp_reg_offset(0, sreg);
1566}
1567
Richard Henderson32f91fb2018-10-24 07:50:19 +01001568/* Return the offset of a 2**SIZE piece of a NEON register, at index ELE,
1569 * where 0 is the least significant end of the register.
1570 */
1571static inline long
1572neon_element_offset(int reg, int element, TCGMemOp size)
1573{
1574 int element_size = 1 << size;
1575 int ofs = element * element_size;
1576#ifdef HOST_WORDS_BIGENDIAN
1577 /* Calculate the offset assuming fully little-endian,
1578 * then XOR to account for the order of the 8-byte units.
1579 */
1580 if (element_size < 8) {
1581 ofs ^= 8 - element_size;
1582 }
1583#endif
1584 return neon_reg_offset(reg, 0) + ofs;
1585}
1586
Peter Maydell39d54922013-05-23 12:59:55 +01001587static TCGv_i32 neon_load_reg(int reg, int pass)
pbrook8f8e3aa2008-03-31 03:48:01 +00001588{
Peter Maydell39d54922013-05-23 12:59:55 +01001589 TCGv_i32 tmp = tcg_temp_new_i32();
pbrook8f8e3aa2008-03-31 03:48:01 +00001590 tcg_gen_ld_i32(tmp, cpu_env, neon_reg_offset(reg, pass));
1591 return tmp;
1592}
1593
Richard Henderson2d6ac922018-10-24 07:50:20 +01001594static void neon_load_element(TCGv_i32 var, int reg, int ele, TCGMemOp mop)
1595{
1596 long offset = neon_element_offset(reg, ele, mop & MO_SIZE);
1597
1598 switch (mop) {
1599 case MO_UB:
1600 tcg_gen_ld8u_i32(var, cpu_env, offset);
1601 break;
1602 case MO_UW:
1603 tcg_gen_ld16u_i32(var, cpu_env, offset);
1604 break;
1605 case MO_UL:
1606 tcg_gen_ld_i32(var, cpu_env, offset);
1607 break;
1608 default:
1609 g_assert_not_reached();
1610 }
1611}
1612
Richard Hendersonac55d002018-10-24 07:50:20 +01001613static void neon_load_element64(TCGv_i64 var, int reg, int ele, TCGMemOp mop)
1614{
1615 long offset = neon_element_offset(reg, ele, mop & MO_SIZE);
1616
1617 switch (mop) {
1618 case MO_UB:
1619 tcg_gen_ld8u_i64(var, cpu_env, offset);
1620 break;
1621 case MO_UW:
1622 tcg_gen_ld16u_i64(var, cpu_env, offset);
1623 break;
1624 case MO_UL:
1625 tcg_gen_ld32u_i64(var, cpu_env, offset);
1626 break;
1627 case MO_Q:
1628 tcg_gen_ld_i64(var, cpu_env, offset);
1629 break;
1630 default:
1631 g_assert_not_reached();
1632 }
1633}
1634
Peter Maydell39d54922013-05-23 12:59:55 +01001635static void neon_store_reg(int reg, int pass, TCGv_i32 var)
pbrook8f8e3aa2008-03-31 03:48:01 +00001636{
1637 tcg_gen_st_i32(var, cpu_env, neon_reg_offset(reg, pass));
Peter Maydell7d1b0092011-03-06 21:39:54 +00001638 tcg_temp_free_i32(var);
pbrook8f8e3aa2008-03-31 03:48:01 +00001639}
1640
Richard Henderson2d6ac922018-10-24 07:50:20 +01001641static void neon_store_element(int reg, int ele, TCGMemOp size, TCGv_i32 var)
1642{
1643 long offset = neon_element_offset(reg, ele, size);
1644
1645 switch (size) {
1646 case MO_8:
1647 tcg_gen_st8_i32(var, cpu_env, offset);
1648 break;
1649 case MO_16:
1650 tcg_gen_st16_i32(var, cpu_env, offset);
1651 break;
1652 case MO_32:
1653 tcg_gen_st_i32(var, cpu_env, offset);
1654 break;
1655 default:
1656 g_assert_not_reached();
1657 }
1658}
1659
Richard Hendersonac55d002018-10-24 07:50:20 +01001660static void neon_store_element64(int reg, int ele, TCGMemOp size, TCGv_i64 var)
1661{
1662 long offset = neon_element_offset(reg, ele, size);
1663
1664 switch (size) {
1665 case MO_8:
1666 tcg_gen_st8_i64(var, cpu_env, offset);
1667 break;
1668 case MO_16:
1669 tcg_gen_st16_i64(var, cpu_env, offset);
1670 break;
1671 case MO_32:
1672 tcg_gen_st32_i64(var, cpu_env, offset);
1673 break;
1674 case MO_64:
1675 tcg_gen_st_i64(var, cpu_env, offset);
1676 break;
1677 default:
1678 g_assert_not_reached();
1679 }
1680}
1681
pbrooka7812ae2008-11-17 14:43:54 +00001682static inline void neon_load_reg64(TCGv_i64 var, int reg)
pbrookad694712008-03-31 03:48:30 +00001683{
1684 tcg_gen_ld_i64(var, cpu_env, vfp_reg_offset(1, reg));
1685}
1686
pbrooka7812ae2008-11-17 14:43:54 +00001687static inline void neon_store_reg64(TCGv_i64 var, int reg)
pbrookad694712008-03-31 03:48:30 +00001688{
1689 tcg_gen_st_i64(var, cpu_env, vfp_reg_offset(1, reg));
1690}
1691
Richard Henderson1a66ac62018-01-25 11:45:28 +00001692static TCGv_ptr vfp_reg_ptr(bool dp, int reg)
1693{
1694 TCGv_ptr ret = tcg_temp_new_ptr();
1695 tcg_gen_addi_ptr(ret, cpu_env, vfp_reg_offset(dp, reg));
1696 return ret;
1697}
1698
pbrook4373f3c2008-03-31 03:47:19 +00001699#define tcg_gen_ld_f32 tcg_gen_ld_i32
1700#define tcg_gen_ld_f64 tcg_gen_ld_i64
1701#define tcg_gen_st_f32 tcg_gen_st_i32
1702#define tcg_gen_st_f64 tcg_gen_st_i64
1703
bellardb7bcbe92005-02-22 19:27:29 +00001704static inline void gen_mov_F0_vreg(int dp, int reg)
1705{
1706 if (dp)
pbrook4373f3c2008-03-31 03:47:19 +00001707 tcg_gen_ld_f64(cpu_F0d, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001708 else
pbrook4373f3c2008-03-31 03:47:19 +00001709 tcg_gen_ld_f32(cpu_F0s, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001710}
1711
1712static inline void gen_mov_F1_vreg(int dp, int reg)
1713{
1714 if (dp)
pbrook4373f3c2008-03-31 03:47:19 +00001715 tcg_gen_ld_f64(cpu_F1d, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001716 else
pbrook4373f3c2008-03-31 03:47:19 +00001717 tcg_gen_ld_f32(cpu_F1s, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001718}
1719
1720static inline void gen_mov_vreg_F0(int dp, int reg)
1721{
1722 if (dp)
pbrook4373f3c2008-03-31 03:47:19 +00001723 tcg_gen_st_f64(cpu_F0d, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001724 else
pbrook4373f3c2008-03-31 03:47:19 +00001725 tcg_gen_st_f32(cpu_F0s, cpu_env, vfp_reg_offset(dp, reg));
bellardb7bcbe92005-02-22 19:27:29 +00001726}
1727
Peter Maydelld00584b2018-08-24 13:17:47 +01001728#define ARM_CP_RW_BIT (1 << 20)
balrog18c9b562007-04-30 02:02:17 +00001729
Peter Maydell78e138b2019-06-11 16:39:41 +01001730/* Include the VFP decoder */
1731#include "translate-vfp.inc.c"
1732
pbrooka7812ae2008-11-17 14:43:54 +00001733static inline void iwmmxt_load_reg(TCGv_i64 var, int reg)
pbrooke6771372008-03-31 03:49:05 +00001734{
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001735 tcg_gen_ld_i64(var, cpu_env, offsetof(CPUARMState, iwmmxt.regs[reg]));
pbrooke6771372008-03-31 03:49:05 +00001736}
1737
pbrooka7812ae2008-11-17 14:43:54 +00001738static inline void iwmmxt_store_reg(TCGv_i64 var, int reg)
pbrooke6771372008-03-31 03:49:05 +00001739{
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001740 tcg_gen_st_i64(var, cpu_env, offsetof(CPUARMState, iwmmxt.regs[reg]));
pbrooke6771372008-03-31 03:49:05 +00001741}
1742
Peter Maydell39d54922013-05-23 12:59:55 +01001743static inline TCGv_i32 iwmmxt_load_creg(int reg)
pbrooke6771372008-03-31 03:49:05 +00001744{
Peter Maydell39d54922013-05-23 12:59:55 +01001745 TCGv_i32 var = tcg_temp_new_i32();
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001746 tcg_gen_ld_i32(var, cpu_env, offsetof(CPUARMState, iwmmxt.cregs[reg]));
Filip Navarada6b5332009-10-15 14:39:02 +02001747 return var;
pbrooke6771372008-03-31 03:49:05 +00001748}
1749
Peter Maydell39d54922013-05-23 12:59:55 +01001750static inline void iwmmxt_store_creg(int reg, TCGv_i32 var)
pbrooke6771372008-03-31 03:49:05 +00001751{
Andreas Färber0ecb72a2012-03-14 01:38:21 +01001752 tcg_gen_st_i32(var, cpu_env, offsetof(CPUARMState, iwmmxt.cregs[reg]));
Peter Maydell7d1b0092011-03-06 21:39:54 +00001753 tcg_temp_free_i32(var);
pbrooke6771372008-03-31 03:49:05 +00001754}
1755
1756static inline void gen_op_iwmmxt_movq_wRn_M0(int rn)
1757{
1758 iwmmxt_store_reg(cpu_M0, rn);
1759}
1760
1761static inline void gen_op_iwmmxt_movq_M0_wRn(int rn)
1762{
1763 iwmmxt_load_reg(cpu_M0, rn);
1764}
1765
1766static inline void gen_op_iwmmxt_orq_M0_wRn(int rn)
1767{
1768 iwmmxt_load_reg(cpu_V1, rn);
1769 tcg_gen_or_i64(cpu_M0, cpu_M0, cpu_V1);
1770}
1771
1772static inline void gen_op_iwmmxt_andq_M0_wRn(int rn)
1773{
1774 iwmmxt_load_reg(cpu_V1, rn);
1775 tcg_gen_and_i64(cpu_M0, cpu_M0, cpu_V1);
1776}
1777
1778static inline void gen_op_iwmmxt_xorq_M0_wRn(int rn)
1779{
1780 iwmmxt_load_reg(cpu_V1, rn);
1781 tcg_gen_xor_i64(cpu_M0, cpu_M0, cpu_V1);
1782}
1783
1784#define IWMMXT_OP(name) \
1785static inline void gen_op_iwmmxt_##name##_M0_wRn(int rn) \
1786{ \
1787 iwmmxt_load_reg(cpu_V1, rn); \
1788 gen_helper_iwmmxt_##name(cpu_M0, cpu_M0, cpu_V1); \
1789}
1790
Peter Maydell477955b2011-05-25 13:22:31 +00001791#define IWMMXT_OP_ENV(name) \
1792static inline void gen_op_iwmmxt_##name##_M0_wRn(int rn) \
1793{ \
1794 iwmmxt_load_reg(cpu_V1, rn); \
1795 gen_helper_iwmmxt_##name(cpu_M0, cpu_env, cpu_M0, cpu_V1); \
1796}
pbrooke6771372008-03-31 03:49:05 +00001797
Peter Maydell477955b2011-05-25 13:22:31 +00001798#define IWMMXT_OP_ENV_SIZE(name) \
1799IWMMXT_OP_ENV(name##b) \
1800IWMMXT_OP_ENV(name##w) \
1801IWMMXT_OP_ENV(name##l)
1802
1803#define IWMMXT_OP_ENV1(name) \
pbrooke6771372008-03-31 03:49:05 +00001804static inline void gen_op_iwmmxt_##name##_M0(void) \
1805{ \
Peter Maydell477955b2011-05-25 13:22:31 +00001806 gen_helper_iwmmxt_##name(cpu_M0, cpu_env, cpu_M0); \
pbrooke6771372008-03-31 03:49:05 +00001807}
1808
1809IWMMXT_OP(maddsq)
1810IWMMXT_OP(madduq)
1811IWMMXT_OP(sadb)
1812IWMMXT_OP(sadw)
1813IWMMXT_OP(mulslw)
1814IWMMXT_OP(mulshw)
1815IWMMXT_OP(mululw)
1816IWMMXT_OP(muluhw)
1817IWMMXT_OP(macsw)
1818IWMMXT_OP(macuw)
1819
Peter Maydell477955b2011-05-25 13:22:31 +00001820IWMMXT_OP_ENV_SIZE(unpackl)
1821IWMMXT_OP_ENV_SIZE(unpackh)
pbrooke6771372008-03-31 03:49:05 +00001822
Peter Maydell477955b2011-05-25 13:22:31 +00001823IWMMXT_OP_ENV1(unpacklub)
1824IWMMXT_OP_ENV1(unpackluw)
1825IWMMXT_OP_ENV1(unpacklul)
1826IWMMXT_OP_ENV1(unpackhub)
1827IWMMXT_OP_ENV1(unpackhuw)
1828IWMMXT_OP_ENV1(unpackhul)
1829IWMMXT_OP_ENV1(unpacklsb)
1830IWMMXT_OP_ENV1(unpacklsw)
1831IWMMXT_OP_ENV1(unpacklsl)
1832IWMMXT_OP_ENV1(unpackhsb)
1833IWMMXT_OP_ENV1(unpackhsw)
1834IWMMXT_OP_ENV1(unpackhsl)
pbrooke6771372008-03-31 03:49:05 +00001835
Peter Maydell477955b2011-05-25 13:22:31 +00001836IWMMXT_OP_ENV_SIZE(cmpeq)
1837IWMMXT_OP_ENV_SIZE(cmpgtu)
1838IWMMXT_OP_ENV_SIZE(cmpgts)
pbrooke6771372008-03-31 03:49:05 +00001839
Peter Maydell477955b2011-05-25 13:22:31 +00001840IWMMXT_OP_ENV_SIZE(mins)
1841IWMMXT_OP_ENV_SIZE(minu)
1842IWMMXT_OP_ENV_SIZE(maxs)
1843IWMMXT_OP_ENV_SIZE(maxu)
pbrooke6771372008-03-31 03:49:05 +00001844
Peter Maydell477955b2011-05-25 13:22:31 +00001845IWMMXT_OP_ENV_SIZE(subn)
1846IWMMXT_OP_ENV_SIZE(addn)
1847IWMMXT_OP_ENV_SIZE(subu)
1848IWMMXT_OP_ENV_SIZE(addu)
1849IWMMXT_OP_ENV_SIZE(subs)
1850IWMMXT_OP_ENV_SIZE(adds)
pbrooke6771372008-03-31 03:49:05 +00001851
Peter Maydell477955b2011-05-25 13:22:31 +00001852IWMMXT_OP_ENV(avgb0)
1853IWMMXT_OP_ENV(avgb1)
1854IWMMXT_OP_ENV(avgw0)
1855IWMMXT_OP_ENV(avgw1)
pbrooke6771372008-03-31 03:49:05 +00001856
Peter Maydell477955b2011-05-25 13:22:31 +00001857IWMMXT_OP_ENV(packuw)
1858IWMMXT_OP_ENV(packul)
1859IWMMXT_OP_ENV(packuq)
1860IWMMXT_OP_ENV(packsw)
1861IWMMXT_OP_ENV(packsl)
1862IWMMXT_OP_ENV(packsq)
pbrooke6771372008-03-31 03:49:05 +00001863
pbrooke6771372008-03-31 03:49:05 +00001864static void gen_op_iwmmxt_set_mup(void)
1865{
Peter Maydell39d54922013-05-23 12:59:55 +01001866 TCGv_i32 tmp;
pbrooke6771372008-03-31 03:49:05 +00001867 tmp = load_cpu_field(iwmmxt.cregs[ARM_IWMMXT_wCon]);
1868 tcg_gen_ori_i32(tmp, tmp, 2);
1869 store_cpu_field(tmp, iwmmxt.cregs[ARM_IWMMXT_wCon]);
1870}
1871
1872static void gen_op_iwmmxt_set_cup(void)
1873{
Peter Maydell39d54922013-05-23 12:59:55 +01001874 TCGv_i32 tmp;
pbrooke6771372008-03-31 03:49:05 +00001875 tmp = load_cpu_field(iwmmxt.cregs[ARM_IWMMXT_wCon]);
1876 tcg_gen_ori_i32(tmp, tmp, 1);
1877 store_cpu_field(tmp, iwmmxt.cregs[ARM_IWMMXT_wCon]);
1878}
1879
1880static void gen_op_iwmmxt_setpsr_nz(void)
1881{
Peter Maydell39d54922013-05-23 12:59:55 +01001882 TCGv_i32 tmp = tcg_temp_new_i32();
pbrooke6771372008-03-31 03:49:05 +00001883 gen_helper_iwmmxt_setpsr_nz(tmp, cpu_M0);
1884 store_cpu_field(tmp, iwmmxt.cregs[ARM_IWMMXT_wCASF]);
1885}
1886
1887static inline void gen_op_iwmmxt_addl_M0_wRn(int rn)
1888{
1889 iwmmxt_load_reg(cpu_V1, rn);
pbrook86831432008-05-11 12:22:01 +00001890 tcg_gen_ext32u_i64(cpu_V1, cpu_V1);
pbrooke6771372008-03-31 03:49:05 +00001891 tcg_gen_add_i64(cpu_M0, cpu_M0, cpu_V1);
1892}
1893
Peter Maydell39d54922013-05-23 12:59:55 +01001894static inline int gen_iwmmxt_address(DisasContext *s, uint32_t insn,
1895 TCGv_i32 dest)
balrog18c9b562007-04-30 02:02:17 +00001896{
1897 int rd;
1898 uint32_t offset;
Peter Maydell39d54922013-05-23 12:59:55 +01001899 TCGv_i32 tmp;
balrog18c9b562007-04-30 02:02:17 +00001900
1901 rd = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02001902 tmp = load_reg(s, rd);
balrog18c9b562007-04-30 02:02:17 +00001903
1904 offset = (insn & 0xff) << ((insn >> 7) & 2);
1905 if (insn & (1 << 24)) {
1906 /* Pre indexed */
1907 if (insn & (1 << 23))
Filip Navarada6b5332009-10-15 14:39:02 +02001908 tcg_gen_addi_i32(tmp, tmp, offset);
balrog18c9b562007-04-30 02:02:17 +00001909 else
Filip Navarada6b5332009-10-15 14:39:02 +02001910 tcg_gen_addi_i32(tmp, tmp, -offset);
1911 tcg_gen_mov_i32(dest, tmp);
balrog18c9b562007-04-30 02:02:17 +00001912 if (insn & (1 << 21))
Filip Navarada6b5332009-10-15 14:39:02 +02001913 store_reg(s, rd, tmp);
1914 else
Peter Maydell7d1b0092011-03-06 21:39:54 +00001915 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00001916 } else if (insn & (1 << 21)) {
1917 /* Post indexed */
Filip Navarada6b5332009-10-15 14:39:02 +02001918 tcg_gen_mov_i32(dest, tmp);
balrog18c9b562007-04-30 02:02:17 +00001919 if (insn & (1 << 23))
Filip Navarada6b5332009-10-15 14:39:02 +02001920 tcg_gen_addi_i32(tmp, tmp, offset);
balrog18c9b562007-04-30 02:02:17 +00001921 else
Filip Navarada6b5332009-10-15 14:39:02 +02001922 tcg_gen_addi_i32(tmp, tmp, -offset);
1923 store_reg(s, rd, tmp);
balrog18c9b562007-04-30 02:02:17 +00001924 } else if (!(insn & (1 << 23)))
1925 return 1;
1926 return 0;
1927}
1928
Peter Maydell39d54922013-05-23 12:59:55 +01001929static inline int gen_iwmmxt_shift(uint32_t insn, uint32_t mask, TCGv_i32 dest)
balrog18c9b562007-04-30 02:02:17 +00001930{
1931 int rd = (insn >> 0) & 0xf;
Peter Maydell39d54922013-05-23 12:59:55 +01001932 TCGv_i32 tmp;
balrog18c9b562007-04-30 02:02:17 +00001933
Filip Navarada6b5332009-10-15 14:39:02 +02001934 if (insn & (1 << 8)) {
1935 if (rd < ARM_IWMMXT_wCGR0 || rd > ARM_IWMMXT_wCGR3) {
balrog18c9b562007-04-30 02:02:17 +00001936 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02001937 } else {
1938 tmp = iwmmxt_load_creg(rd);
1939 }
1940 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00001941 tmp = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02001942 iwmmxt_load_reg(cpu_V0, rd);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07001943 tcg_gen_extrl_i64_i32(tmp, cpu_V0);
Filip Navarada6b5332009-10-15 14:39:02 +02001944 }
1945 tcg_gen_andi_i32(tmp, tmp, mask);
1946 tcg_gen_mov_i32(dest, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00001947 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00001948 return 0;
1949}
1950
Stefan Weila1c72732011-04-28 17:20:38 +02001951/* Disassemble an iwMMXt instruction. Returns nonzero if an error occurred
balrog18c9b562007-04-30 02:02:17 +00001952 (ie. an undefined instruction). */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00001953static int disas_iwmmxt_insn(DisasContext *s, uint32_t insn)
balrog18c9b562007-04-30 02:02:17 +00001954{
1955 int rd, wrd;
1956 int rdhi, rdlo, rd0, rd1, i;
Peter Maydell39d54922013-05-23 12:59:55 +01001957 TCGv_i32 addr;
1958 TCGv_i32 tmp, tmp2, tmp3;
balrog18c9b562007-04-30 02:02:17 +00001959
1960 if ((insn & 0x0e000e00) == 0x0c000000) {
1961 if ((insn & 0x0fe00ff0) == 0x0c400000) {
1962 wrd = insn & 0xf;
1963 rdlo = (insn >> 12) & 0xf;
1964 rdhi = (insn >> 16) & 0xf;
Peter Maydelld00584b2018-08-24 13:17:47 +01001965 if (insn & ARM_CP_RW_BIT) { /* TMRRC */
Filip Navarada6b5332009-10-15 14:39:02 +02001966 iwmmxt_load_reg(cpu_V0, wrd);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07001967 tcg_gen_extrl_i64_i32(cpu_R[rdlo], cpu_V0);
Filip Navarada6b5332009-10-15 14:39:02 +02001968 tcg_gen_shri_i64(cpu_V0, cpu_V0, 32);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07001969 tcg_gen_extrl_i64_i32(cpu_R[rdhi], cpu_V0);
Peter Maydelld00584b2018-08-24 13:17:47 +01001970 } else { /* TMCRR */
Filip Navarada6b5332009-10-15 14:39:02 +02001971 tcg_gen_concat_i32_i64(cpu_V0, cpu_R[rdlo], cpu_R[rdhi]);
1972 iwmmxt_store_reg(cpu_V0, wrd);
balrog18c9b562007-04-30 02:02:17 +00001973 gen_op_iwmmxt_set_mup();
1974 }
1975 return 0;
1976 }
1977
1978 wrd = (insn >> 12) & 0xf;
Peter Maydell7d1b0092011-03-06 21:39:54 +00001979 addr = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02001980 if (gen_iwmmxt_address(s, insn, addr)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00001981 tcg_temp_free_i32(addr);
balrog18c9b562007-04-30 02:02:17 +00001982 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02001983 }
balrog18c9b562007-04-30 02:02:17 +00001984 if (insn & ARM_CP_RW_BIT) {
Peter Maydelld00584b2018-08-24 13:17:47 +01001985 if ((insn >> 28) == 0xf) { /* WLDRW wCx */
Peter Maydell7d1b0092011-03-06 21:39:54 +00001986 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001987 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
Filip Navarada6b5332009-10-15 14:39:02 +02001988 iwmmxt_store_creg(wrd, tmp);
balrog18c9b562007-04-30 02:02:17 +00001989 } else {
pbrooke6771372008-03-31 03:49:05 +00001990 i = 1;
1991 if (insn & (1 << 8)) {
Peter Maydelld00584b2018-08-24 13:17:47 +01001992 if (insn & (1 << 22)) { /* WLDRD */
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001993 gen_aa32_ld64(s, cpu_M0, addr, get_mem_index(s));
pbrooke6771372008-03-31 03:49:05 +00001994 i = 0;
Peter Maydelld00584b2018-08-24 13:17:47 +01001995 } else { /* WLDRW wRd */
Peter Maydell29531142013-05-23 12:59:57 +01001996 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +00001997 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrooke6771372008-03-31 03:49:05 +00001998 }
1999 } else {
Peter Maydell29531142013-05-23 12:59:57 +01002000 tmp = tcg_temp_new_i32();
Peter Maydelld00584b2018-08-24 13:17:47 +01002001 if (insn & (1 << 22)) { /* WLDRH */
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002002 gen_aa32_ld16u(s, tmp, addr, get_mem_index(s));
Peter Maydelld00584b2018-08-24 13:17:47 +01002003 } else { /* WLDRB */
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002004 gen_aa32_ld8u(s, tmp, addr, get_mem_index(s));
pbrooke6771372008-03-31 03:49:05 +00002005 }
2006 }
2007 if (i) {
2008 tcg_gen_extu_i32_i64(cpu_M0, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002009 tcg_temp_free_i32(tmp);
pbrooke6771372008-03-31 03:49:05 +00002010 }
balrog18c9b562007-04-30 02:02:17 +00002011 gen_op_iwmmxt_movq_wRn_M0(wrd);
2012 }
2013 } else {
Peter Maydelld00584b2018-08-24 13:17:47 +01002014 if ((insn >> 28) == 0xf) { /* WSTRW wCx */
Filip Navarada6b5332009-10-15 14:39:02 +02002015 tmp = iwmmxt_load_creg(wrd);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002016 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
balrog18c9b562007-04-30 02:02:17 +00002017 } else {
2018 gen_op_iwmmxt_movq_M0_wRn(wrd);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002019 tmp = tcg_temp_new_i32();
pbrooke6771372008-03-31 03:49:05 +00002020 if (insn & (1 << 8)) {
Peter Maydelld00584b2018-08-24 13:17:47 +01002021 if (insn & (1 << 22)) { /* WSTRD */
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002022 gen_aa32_st64(s, cpu_M0, addr, get_mem_index(s));
Peter Maydelld00584b2018-08-24 13:17:47 +01002023 } else { /* WSTRW wRd */
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002024 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002025 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
pbrooke6771372008-03-31 03:49:05 +00002026 }
2027 } else {
Peter Maydelld00584b2018-08-24 13:17:47 +01002028 if (insn & (1 << 22)) { /* WSTRH */
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002029 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002030 gen_aa32_st16(s, tmp, addr, get_mem_index(s));
Peter Maydelld00584b2018-08-24 13:17:47 +01002031 } else { /* WSTRB */
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002032 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00002033 gen_aa32_st8(s, tmp, addr, get_mem_index(s));
pbrooke6771372008-03-31 03:49:05 +00002034 }
2035 }
balrog18c9b562007-04-30 02:02:17 +00002036 }
Peter Maydell29531142013-05-23 12:59:57 +01002037 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002038 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002039 tcg_temp_free_i32(addr);
balrog18c9b562007-04-30 02:02:17 +00002040 return 0;
2041 }
2042
2043 if ((insn & 0x0f000000) != 0x0e000000)
2044 return 1;
2045
2046 switch (((insn >> 12) & 0xf00) | ((insn >> 4) & 0xff)) {
Peter Maydelld00584b2018-08-24 13:17:47 +01002047 case 0x000: /* WOR */
balrog18c9b562007-04-30 02:02:17 +00002048 wrd = (insn >> 12) & 0xf;
2049 rd0 = (insn >> 0) & 0xf;
2050 rd1 = (insn >> 16) & 0xf;
2051 gen_op_iwmmxt_movq_M0_wRn(rd0);
2052 gen_op_iwmmxt_orq_M0_wRn(rd1);
2053 gen_op_iwmmxt_setpsr_nz();
2054 gen_op_iwmmxt_movq_wRn_M0(wrd);
2055 gen_op_iwmmxt_set_mup();
2056 gen_op_iwmmxt_set_cup();
2057 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002058 case 0x011: /* TMCR */
balrog18c9b562007-04-30 02:02:17 +00002059 if (insn & 0xf)
2060 return 1;
2061 rd = (insn >> 12) & 0xf;
2062 wrd = (insn >> 16) & 0xf;
2063 switch (wrd) {
2064 case ARM_IWMMXT_wCID:
2065 case ARM_IWMMXT_wCASF:
2066 break;
2067 case ARM_IWMMXT_wCon:
2068 gen_op_iwmmxt_set_cup();
2069 /* Fall through. */
2070 case ARM_IWMMXT_wCSSF:
Filip Navarada6b5332009-10-15 14:39:02 +02002071 tmp = iwmmxt_load_creg(wrd);
2072 tmp2 = load_reg(s, rd);
Aurelien Jarnof669df22009-10-15 16:45:14 +02002073 tcg_gen_andc_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002074 tcg_temp_free_i32(tmp2);
Filip Navarada6b5332009-10-15 14:39:02 +02002075 iwmmxt_store_creg(wrd, tmp);
balrog18c9b562007-04-30 02:02:17 +00002076 break;
2077 case ARM_IWMMXT_wCGR0:
2078 case ARM_IWMMXT_wCGR1:
2079 case ARM_IWMMXT_wCGR2:
2080 case ARM_IWMMXT_wCGR3:
2081 gen_op_iwmmxt_set_cup();
Filip Navarada6b5332009-10-15 14:39:02 +02002082 tmp = load_reg(s, rd);
2083 iwmmxt_store_creg(wrd, tmp);
balrog18c9b562007-04-30 02:02:17 +00002084 break;
2085 default:
2086 return 1;
2087 }
2088 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002089 case 0x100: /* WXOR */
balrog18c9b562007-04-30 02:02:17 +00002090 wrd = (insn >> 12) & 0xf;
2091 rd0 = (insn >> 0) & 0xf;
2092 rd1 = (insn >> 16) & 0xf;
2093 gen_op_iwmmxt_movq_M0_wRn(rd0);
2094 gen_op_iwmmxt_xorq_M0_wRn(rd1);
2095 gen_op_iwmmxt_setpsr_nz();
2096 gen_op_iwmmxt_movq_wRn_M0(wrd);
2097 gen_op_iwmmxt_set_mup();
2098 gen_op_iwmmxt_set_cup();
2099 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002100 case 0x111: /* TMRC */
balrog18c9b562007-04-30 02:02:17 +00002101 if (insn & 0xf)
2102 return 1;
2103 rd = (insn >> 12) & 0xf;
2104 wrd = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02002105 tmp = iwmmxt_load_creg(wrd);
2106 store_reg(s, rd, tmp);
balrog18c9b562007-04-30 02:02:17 +00002107 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002108 case 0x300: /* WANDN */
balrog18c9b562007-04-30 02:02:17 +00002109 wrd = (insn >> 12) & 0xf;
2110 rd0 = (insn >> 0) & 0xf;
2111 rd1 = (insn >> 16) & 0xf;
2112 gen_op_iwmmxt_movq_M0_wRn(rd0);
pbrooke6771372008-03-31 03:49:05 +00002113 tcg_gen_neg_i64(cpu_M0, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002114 gen_op_iwmmxt_andq_M0_wRn(rd1);
2115 gen_op_iwmmxt_setpsr_nz();
2116 gen_op_iwmmxt_movq_wRn_M0(wrd);
2117 gen_op_iwmmxt_set_mup();
2118 gen_op_iwmmxt_set_cup();
2119 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002120 case 0x200: /* WAND */
balrog18c9b562007-04-30 02:02:17 +00002121 wrd = (insn >> 12) & 0xf;
2122 rd0 = (insn >> 0) & 0xf;
2123 rd1 = (insn >> 16) & 0xf;
2124 gen_op_iwmmxt_movq_M0_wRn(rd0);
2125 gen_op_iwmmxt_andq_M0_wRn(rd1);
2126 gen_op_iwmmxt_setpsr_nz();
2127 gen_op_iwmmxt_movq_wRn_M0(wrd);
2128 gen_op_iwmmxt_set_mup();
2129 gen_op_iwmmxt_set_cup();
2130 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002131 case 0x810: case 0xa10: /* WMADD */
balrog18c9b562007-04-30 02:02:17 +00002132 wrd = (insn >> 12) & 0xf;
2133 rd0 = (insn >> 0) & 0xf;
2134 rd1 = (insn >> 16) & 0xf;
2135 gen_op_iwmmxt_movq_M0_wRn(rd0);
2136 if (insn & (1 << 21))
2137 gen_op_iwmmxt_maddsq_M0_wRn(rd1);
2138 else
2139 gen_op_iwmmxt_madduq_M0_wRn(rd1);
2140 gen_op_iwmmxt_movq_wRn_M0(wrd);
2141 gen_op_iwmmxt_set_mup();
2142 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002143 case 0x10e: case 0x50e: case 0x90e: case 0xd0e: /* WUNPCKIL */
balrog18c9b562007-04-30 02:02:17 +00002144 wrd = (insn >> 12) & 0xf;
2145 rd0 = (insn >> 16) & 0xf;
2146 rd1 = (insn >> 0) & 0xf;
2147 gen_op_iwmmxt_movq_M0_wRn(rd0);
2148 switch ((insn >> 22) & 3) {
2149 case 0:
2150 gen_op_iwmmxt_unpacklb_M0_wRn(rd1);
2151 break;
2152 case 1:
2153 gen_op_iwmmxt_unpacklw_M0_wRn(rd1);
2154 break;
2155 case 2:
2156 gen_op_iwmmxt_unpackll_M0_wRn(rd1);
2157 break;
2158 case 3:
2159 return 1;
2160 }
2161 gen_op_iwmmxt_movq_wRn_M0(wrd);
2162 gen_op_iwmmxt_set_mup();
2163 gen_op_iwmmxt_set_cup();
2164 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002165 case 0x10c: case 0x50c: case 0x90c: case 0xd0c: /* WUNPCKIH */
balrog18c9b562007-04-30 02:02:17 +00002166 wrd = (insn >> 12) & 0xf;
2167 rd0 = (insn >> 16) & 0xf;
2168 rd1 = (insn >> 0) & 0xf;
2169 gen_op_iwmmxt_movq_M0_wRn(rd0);
2170 switch ((insn >> 22) & 3) {
2171 case 0:
2172 gen_op_iwmmxt_unpackhb_M0_wRn(rd1);
2173 break;
2174 case 1:
2175 gen_op_iwmmxt_unpackhw_M0_wRn(rd1);
2176 break;
2177 case 2:
2178 gen_op_iwmmxt_unpackhl_M0_wRn(rd1);
2179 break;
2180 case 3:
2181 return 1;
2182 }
2183 gen_op_iwmmxt_movq_wRn_M0(wrd);
2184 gen_op_iwmmxt_set_mup();
2185 gen_op_iwmmxt_set_cup();
2186 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002187 case 0x012: case 0x112: case 0x412: case 0x512: /* WSAD */
balrog18c9b562007-04-30 02:02:17 +00002188 wrd = (insn >> 12) & 0xf;
2189 rd0 = (insn >> 16) & 0xf;
2190 rd1 = (insn >> 0) & 0xf;
2191 gen_op_iwmmxt_movq_M0_wRn(rd0);
2192 if (insn & (1 << 22))
2193 gen_op_iwmmxt_sadw_M0_wRn(rd1);
2194 else
2195 gen_op_iwmmxt_sadb_M0_wRn(rd1);
2196 if (!(insn & (1 << 20)))
2197 gen_op_iwmmxt_addl_M0_wRn(wrd);
2198 gen_op_iwmmxt_movq_wRn_M0(wrd);
2199 gen_op_iwmmxt_set_mup();
2200 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002201 case 0x010: case 0x110: case 0x210: case 0x310: /* WMUL */
balrog18c9b562007-04-30 02:02:17 +00002202 wrd = (insn >> 12) & 0xf;
2203 rd0 = (insn >> 16) & 0xf;
2204 rd1 = (insn >> 0) & 0xf;
2205 gen_op_iwmmxt_movq_M0_wRn(rd0);
pbrooke6771372008-03-31 03:49:05 +00002206 if (insn & (1 << 21)) {
2207 if (insn & (1 << 20))
2208 gen_op_iwmmxt_mulshw_M0_wRn(rd1);
2209 else
2210 gen_op_iwmmxt_mulslw_M0_wRn(rd1);
2211 } else {
2212 if (insn & (1 << 20))
2213 gen_op_iwmmxt_muluhw_M0_wRn(rd1);
2214 else
2215 gen_op_iwmmxt_mululw_M0_wRn(rd1);
2216 }
balrog18c9b562007-04-30 02:02:17 +00002217 gen_op_iwmmxt_movq_wRn_M0(wrd);
2218 gen_op_iwmmxt_set_mup();
2219 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002220 case 0x410: case 0x510: case 0x610: case 0x710: /* WMAC */
balrog18c9b562007-04-30 02:02:17 +00002221 wrd = (insn >> 12) & 0xf;
2222 rd0 = (insn >> 16) & 0xf;
2223 rd1 = (insn >> 0) & 0xf;
2224 gen_op_iwmmxt_movq_M0_wRn(rd0);
2225 if (insn & (1 << 21))
2226 gen_op_iwmmxt_macsw_M0_wRn(rd1);
2227 else
2228 gen_op_iwmmxt_macuw_M0_wRn(rd1);
2229 if (!(insn & (1 << 20))) {
pbrooke6771372008-03-31 03:49:05 +00002230 iwmmxt_load_reg(cpu_V1, wrd);
2231 tcg_gen_add_i64(cpu_M0, cpu_M0, cpu_V1);
balrog18c9b562007-04-30 02:02:17 +00002232 }
2233 gen_op_iwmmxt_movq_wRn_M0(wrd);
2234 gen_op_iwmmxt_set_mup();
2235 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002236 case 0x006: case 0x406: case 0x806: case 0xc06: /* WCMPEQ */
balrog18c9b562007-04-30 02:02:17 +00002237 wrd = (insn >> 12) & 0xf;
2238 rd0 = (insn >> 16) & 0xf;
2239 rd1 = (insn >> 0) & 0xf;
2240 gen_op_iwmmxt_movq_M0_wRn(rd0);
2241 switch ((insn >> 22) & 3) {
2242 case 0:
2243 gen_op_iwmmxt_cmpeqb_M0_wRn(rd1);
2244 break;
2245 case 1:
2246 gen_op_iwmmxt_cmpeqw_M0_wRn(rd1);
2247 break;
2248 case 2:
2249 gen_op_iwmmxt_cmpeql_M0_wRn(rd1);
2250 break;
2251 case 3:
2252 return 1;
2253 }
2254 gen_op_iwmmxt_movq_wRn_M0(wrd);
2255 gen_op_iwmmxt_set_mup();
2256 gen_op_iwmmxt_set_cup();
2257 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002258 case 0x800: case 0x900: case 0xc00: case 0xd00: /* WAVG2 */
balrog18c9b562007-04-30 02:02:17 +00002259 wrd = (insn >> 12) & 0xf;
2260 rd0 = (insn >> 16) & 0xf;
2261 rd1 = (insn >> 0) & 0xf;
2262 gen_op_iwmmxt_movq_M0_wRn(rd0);
pbrooke6771372008-03-31 03:49:05 +00002263 if (insn & (1 << 22)) {
2264 if (insn & (1 << 20))
2265 gen_op_iwmmxt_avgw1_M0_wRn(rd1);
2266 else
2267 gen_op_iwmmxt_avgw0_M0_wRn(rd1);
2268 } else {
2269 if (insn & (1 << 20))
2270 gen_op_iwmmxt_avgb1_M0_wRn(rd1);
2271 else
2272 gen_op_iwmmxt_avgb0_M0_wRn(rd1);
2273 }
balrog18c9b562007-04-30 02:02:17 +00002274 gen_op_iwmmxt_movq_wRn_M0(wrd);
2275 gen_op_iwmmxt_set_mup();
2276 gen_op_iwmmxt_set_cup();
2277 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002278 case 0x802: case 0x902: case 0xa02: case 0xb02: /* WALIGNR */
balrog18c9b562007-04-30 02:02:17 +00002279 wrd = (insn >> 12) & 0xf;
2280 rd0 = (insn >> 16) & 0xf;
2281 rd1 = (insn >> 0) & 0xf;
2282 gen_op_iwmmxt_movq_M0_wRn(rd0);
Filip Navarada6b5332009-10-15 14:39:02 +02002283 tmp = iwmmxt_load_creg(ARM_IWMMXT_wCGR0 + ((insn >> 20) & 3));
2284 tcg_gen_andi_i32(tmp, tmp, 7);
2285 iwmmxt_load_reg(cpu_V1, rd1);
2286 gen_helper_iwmmxt_align(cpu_M0, cpu_M0, cpu_V1, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002287 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002288 gen_op_iwmmxt_movq_wRn_M0(wrd);
2289 gen_op_iwmmxt_set_mup();
2290 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002291 case 0x601: case 0x605: case 0x609: case 0x60d: /* TINSR */
Filip Navarada6b5332009-10-15 14:39:02 +02002292 if (((insn >> 6) & 3) == 3)
2293 return 1;
balrog18c9b562007-04-30 02:02:17 +00002294 rd = (insn >> 12) & 0xf;
2295 wrd = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02002296 tmp = load_reg(s, rd);
balrog18c9b562007-04-30 02:02:17 +00002297 gen_op_iwmmxt_movq_M0_wRn(wrd);
2298 switch ((insn >> 6) & 3) {
2299 case 0:
Filip Navarada6b5332009-10-15 14:39:02 +02002300 tmp2 = tcg_const_i32(0xff);
2301 tmp3 = tcg_const_i32((insn & 7) << 3);
balrog18c9b562007-04-30 02:02:17 +00002302 break;
2303 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002304 tmp2 = tcg_const_i32(0xffff);
2305 tmp3 = tcg_const_i32((insn & 3) << 4);
balrog18c9b562007-04-30 02:02:17 +00002306 break;
2307 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002308 tmp2 = tcg_const_i32(0xffffffff);
2309 tmp3 = tcg_const_i32((insn & 1) << 5);
balrog18c9b562007-04-30 02:02:17 +00002310 break;
Filip Navarada6b5332009-10-15 14:39:02 +02002311 default:
Richard Hendersonf7647182017-11-02 12:47:37 +01002312 tmp2 = NULL;
2313 tmp3 = NULL;
balrog18c9b562007-04-30 02:02:17 +00002314 }
Filip Navarada6b5332009-10-15 14:39:02 +02002315 gen_helper_iwmmxt_insr(cpu_M0, cpu_M0, tmp, tmp2, tmp3);
Peter Maydell39d54922013-05-23 12:59:55 +01002316 tcg_temp_free_i32(tmp3);
2317 tcg_temp_free_i32(tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002318 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002319 gen_op_iwmmxt_movq_wRn_M0(wrd);
2320 gen_op_iwmmxt_set_mup();
2321 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002322 case 0x107: case 0x507: case 0x907: case 0xd07: /* TEXTRM */
balrog18c9b562007-04-30 02:02:17 +00002323 rd = (insn >> 12) & 0xf;
2324 wrd = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02002325 if (rd == 15 || ((insn >> 22) & 3) == 3)
balrog18c9b562007-04-30 02:02:17 +00002326 return 1;
2327 gen_op_iwmmxt_movq_M0_wRn(wrd);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002328 tmp = tcg_temp_new_i32();
balrog18c9b562007-04-30 02:02:17 +00002329 switch ((insn >> 22) & 3) {
2330 case 0:
Filip Navarada6b5332009-10-15 14:39:02 +02002331 tcg_gen_shri_i64(cpu_M0, cpu_M0, (insn & 7) << 3);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002332 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
Filip Navarada6b5332009-10-15 14:39:02 +02002333 if (insn & 8) {
2334 tcg_gen_ext8s_i32(tmp, tmp);
2335 } else {
2336 tcg_gen_andi_i32(tmp, tmp, 0xff);
balrog18c9b562007-04-30 02:02:17 +00002337 }
2338 break;
2339 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002340 tcg_gen_shri_i64(cpu_M0, cpu_M0, (insn & 3) << 4);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002341 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
Filip Navarada6b5332009-10-15 14:39:02 +02002342 if (insn & 8) {
2343 tcg_gen_ext16s_i32(tmp, tmp);
2344 } else {
2345 tcg_gen_andi_i32(tmp, tmp, 0xffff);
balrog18c9b562007-04-30 02:02:17 +00002346 }
2347 break;
2348 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002349 tcg_gen_shri_i64(cpu_M0, cpu_M0, (insn & 1) << 5);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07002350 tcg_gen_extrl_i64_i32(tmp, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002351 break;
balrog18c9b562007-04-30 02:02:17 +00002352 }
Filip Navarada6b5332009-10-15 14:39:02 +02002353 store_reg(s, rd, tmp);
balrog18c9b562007-04-30 02:02:17 +00002354 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002355 case 0x117: case 0x517: case 0x917: case 0xd17: /* TEXTRC */
Filip Navarada6b5332009-10-15 14:39:02 +02002356 if ((insn & 0x000ff008) != 0x0003f000 || ((insn >> 22) & 3) == 3)
balrog18c9b562007-04-30 02:02:17 +00002357 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002358 tmp = iwmmxt_load_creg(ARM_IWMMXT_wCASF);
balrog18c9b562007-04-30 02:02:17 +00002359 switch ((insn >> 22) & 3) {
2360 case 0:
Filip Navarada6b5332009-10-15 14:39:02 +02002361 tcg_gen_shri_i32(tmp, tmp, ((insn & 7) << 2) + 0);
balrog18c9b562007-04-30 02:02:17 +00002362 break;
2363 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002364 tcg_gen_shri_i32(tmp, tmp, ((insn & 3) << 3) + 4);
balrog18c9b562007-04-30 02:02:17 +00002365 break;
2366 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002367 tcg_gen_shri_i32(tmp, tmp, ((insn & 1) << 4) + 12);
balrog18c9b562007-04-30 02:02:17 +00002368 break;
balrog18c9b562007-04-30 02:02:17 +00002369 }
Filip Navarada6b5332009-10-15 14:39:02 +02002370 tcg_gen_shli_i32(tmp, tmp, 28);
2371 gen_set_nzcv(tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002372 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002373 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002374 case 0x401: case 0x405: case 0x409: case 0x40d: /* TBCST */
Filip Navarada6b5332009-10-15 14:39:02 +02002375 if (((insn >> 6) & 3) == 3)
2376 return 1;
balrog18c9b562007-04-30 02:02:17 +00002377 rd = (insn >> 12) & 0xf;
2378 wrd = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02002379 tmp = load_reg(s, rd);
balrog18c9b562007-04-30 02:02:17 +00002380 switch ((insn >> 6) & 3) {
2381 case 0:
Filip Navarada6b5332009-10-15 14:39:02 +02002382 gen_helper_iwmmxt_bcstb(cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002383 break;
2384 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002385 gen_helper_iwmmxt_bcstw(cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002386 break;
2387 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002388 gen_helper_iwmmxt_bcstl(cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002389 break;
balrog18c9b562007-04-30 02:02:17 +00002390 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002391 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002392 gen_op_iwmmxt_movq_wRn_M0(wrd);
2393 gen_op_iwmmxt_set_mup();
2394 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002395 case 0x113: case 0x513: case 0x913: case 0xd13: /* TANDC */
Filip Navarada6b5332009-10-15 14:39:02 +02002396 if ((insn & 0x000ff00f) != 0x0003f000 || ((insn >> 22) & 3) == 3)
balrog18c9b562007-04-30 02:02:17 +00002397 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002398 tmp = iwmmxt_load_creg(ARM_IWMMXT_wCASF);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002399 tmp2 = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02002400 tcg_gen_mov_i32(tmp2, tmp);
balrog18c9b562007-04-30 02:02:17 +00002401 switch ((insn >> 22) & 3) {
2402 case 0:
2403 for (i = 0; i < 7; i ++) {
Filip Navarada6b5332009-10-15 14:39:02 +02002404 tcg_gen_shli_i32(tmp2, tmp2, 4);
2405 tcg_gen_and_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002406 }
2407 break;
2408 case 1:
2409 for (i = 0; i < 3; i ++) {
Filip Navarada6b5332009-10-15 14:39:02 +02002410 tcg_gen_shli_i32(tmp2, tmp2, 8);
2411 tcg_gen_and_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002412 }
2413 break;
2414 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002415 tcg_gen_shli_i32(tmp2, tmp2, 16);
2416 tcg_gen_and_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002417 break;
balrog18c9b562007-04-30 02:02:17 +00002418 }
Filip Navarada6b5332009-10-15 14:39:02 +02002419 gen_set_nzcv(tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002420 tcg_temp_free_i32(tmp2);
2421 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002422 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002423 case 0x01c: case 0x41c: case 0x81c: case 0xc1c: /* WACC */
balrog18c9b562007-04-30 02:02:17 +00002424 wrd = (insn >> 12) & 0xf;
2425 rd0 = (insn >> 16) & 0xf;
2426 gen_op_iwmmxt_movq_M0_wRn(rd0);
2427 switch ((insn >> 22) & 3) {
2428 case 0:
pbrooke6771372008-03-31 03:49:05 +00002429 gen_helper_iwmmxt_addcb(cpu_M0, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002430 break;
2431 case 1:
pbrooke6771372008-03-31 03:49:05 +00002432 gen_helper_iwmmxt_addcw(cpu_M0, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002433 break;
2434 case 2:
pbrooke6771372008-03-31 03:49:05 +00002435 gen_helper_iwmmxt_addcl(cpu_M0, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002436 break;
2437 case 3:
2438 return 1;
2439 }
2440 gen_op_iwmmxt_movq_wRn_M0(wrd);
2441 gen_op_iwmmxt_set_mup();
2442 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002443 case 0x115: case 0x515: case 0x915: case 0xd15: /* TORC */
Filip Navarada6b5332009-10-15 14:39:02 +02002444 if ((insn & 0x000ff00f) != 0x0003f000 || ((insn >> 22) & 3) == 3)
balrog18c9b562007-04-30 02:02:17 +00002445 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002446 tmp = iwmmxt_load_creg(ARM_IWMMXT_wCASF);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002447 tmp2 = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02002448 tcg_gen_mov_i32(tmp2, tmp);
balrog18c9b562007-04-30 02:02:17 +00002449 switch ((insn >> 22) & 3) {
2450 case 0:
2451 for (i = 0; i < 7; i ++) {
Filip Navarada6b5332009-10-15 14:39:02 +02002452 tcg_gen_shli_i32(tmp2, tmp2, 4);
2453 tcg_gen_or_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002454 }
2455 break;
2456 case 1:
2457 for (i = 0; i < 3; i ++) {
Filip Navarada6b5332009-10-15 14:39:02 +02002458 tcg_gen_shli_i32(tmp2, tmp2, 8);
2459 tcg_gen_or_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002460 }
2461 break;
2462 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002463 tcg_gen_shli_i32(tmp2, tmp2, 16);
2464 tcg_gen_or_i32(tmp, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002465 break;
balrog18c9b562007-04-30 02:02:17 +00002466 }
Filip Navarada6b5332009-10-15 14:39:02 +02002467 gen_set_nzcv(tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002468 tcg_temp_free_i32(tmp2);
2469 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002470 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002471 case 0x103: case 0x503: case 0x903: case 0xd03: /* TMOVMSK */
balrog18c9b562007-04-30 02:02:17 +00002472 rd = (insn >> 12) & 0xf;
2473 rd0 = (insn >> 16) & 0xf;
Filip Navarada6b5332009-10-15 14:39:02 +02002474 if ((insn & 0xf) != 0 || ((insn >> 22) & 3) == 3)
balrog18c9b562007-04-30 02:02:17 +00002475 return 1;
2476 gen_op_iwmmxt_movq_M0_wRn(rd0);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002477 tmp = tcg_temp_new_i32();
balrog18c9b562007-04-30 02:02:17 +00002478 switch ((insn >> 22) & 3) {
2479 case 0:
Filip Navarada6b5332009-10-15 14:39:02 +02002480 gen_helper_iwmmxt_msbb(tmp, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002481 break;
2482 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002483 gen_helper_iwmmxt_msbw(tmp, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002484 break;
2485 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002486 gen_helper_iwmmxt_msbl(tmp, cpu_M0);
balrog18c9b562007-04-30 02:02:17 +00002487 break;
balrog18c9b562007-04-30 02:02:17 +00002488 }
Filip Navarada6b5332009-10-15 14:39:02 +02002489 store_reg(s, rd, tmp);
balrog18c9b562007-04-30 02:02:17 +00002490 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002491 case 0x106: case 0x306: case 0x506: case 0x706: /* WCMPGT */
balrog18c9b562007-04-30 02:02:17 +00002492 case 0x906: case 0xb06: case 0xd06: case 0xf06:
2493 wrd = (insn >> 12) & 0xf;
2494 rd0 = (insn >> 16) & 0xf;
2495 rd1 = (insn >> 0) & 0xf;
2496 gen_op_iwmmxt_movq_M0_wRn(rd0);
2497 switch ((insn >> 22) & 3) {
2498 case 0:
2499 if (insn & (1 << 21))
2500 gen_op_iwmmxt_cmpgtsb_M0_wRn(rd1);
2501 else
2502 gen_op_iwmmxt_cmpgtub_M0_wRn(rd1);
2503 break;
2504 case 1:
2505 if (insn & (1 << 21))
2506 gen_op_iwmmxt_cmpgtsw_M0_wRn(rd1);
2507 else
2508 gen_op_iwmmxt_cmpgtuw_M0_wRn(rd1);
2509 break;
2510 case 2:
2511 if (insn & (1 << 21))
2512 gen_op_iwmmxt_cmpgtsl_M0_wRn(rd1);
2513 else
2514 gen_op_iwmmxt_cmpgtul_M0_wRn(rd1);
2515 break;
2516 case 3:
2517 return 1;
2518 }
2519 gen_op_iwmmxt_movq_wRn_M0(wrd);
2520 gen_op_iwmmxt_set_mup();
2521 gen_op_iwmmxt_set_cup();
2522 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002523 case 0x00e: case 0x20e: case 0x40e: case 0x60e: /* WUNPCKEL */
balrog18c9b562007-04-30 02:02:17 +00002524 case 0x80e: case 0xa0e: case 0xc0e: case 0xe0e:
2525 wrd = (insn >> 12) & 0xf;
2526 rd0 = (insn >> 16) & 0xf;
2527 gen_op_iwmmxt_movq_M0_wRn(rd0);
2528 switch ((insn >> 22) & 3) {
2529 case 0:
2530 if (insn & (1 << 21))
2531 gen_op_iwmmxt_unpacklsb_M0();
2532 else
2533 gen_op_iwmmxt_unpacklub_M0();
2534 break;
2535 case 1:
2536 if (insn & (1 << 21))
2537 gen_op_iwmmxt_unpacklsw_M0();
2538 else
2539 gen_op_iwmmxt_unpackluw_M0();
2540 break;
2541 case 2:
2542 if (insn & (1 << 21))
2543 gen_op_iwmmxt_unpacklsl_M0();
2544 else
2545 gen_op_iwmmxt_unpacklul_M0();
2546 break;
2547 case 3:
2548 return 1;
2549 }
2550 gen_op_iwmmxt_movq_wRn_M0(wrd);
2551 gen_op_iwmmxt_set_mup();
2552 gen_op_iwmmxt_set_cup();
2553 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002554 case 0x00c: case 0x20c: case 0x40c: case 0x60c: /* WUNPCKEH */
balrog18c9b562007-04-30 02:02:17 +00002555 case 0x80c: case 0xa0c: case 0xc0c: case 0xe0c:
2556 wrd = (insn >> 12) & 0xf;
2557 rd0 = (insn >> 16) & 0xf;
2558 gen_op_iwmmxt_movq_M0_wRn(rd0);
2559 switch ((insn >> 22) & 3) {
2560 case 0:
2561 if (insn & (1 << 21))
2562 gen_op_iwmmxt_unpackhsb_M0();
2563 else
2564 gen_op_iwmmxt_unpackhub_M0();
2565 break;
2566 case 1:
2567 if (insn & (1 << 21))
2568 gen_op_iwmmxt_unpackhsw_M0();
2569 else
2570 gen_op_iwmmxt_unpackhuw_M0();
2571 break;
2572 case 2:
2573 if (insn & (1 << 21))
2574 gen_op_iwmmxt_unpackhsl_M0();
2575 else
2576 gen_op_iwmmxt_unpackhul_M0();
2577 break;
2578 case 3:
2579 return 1;
2580 }
2581 gen_op_iwmmxt_movq_wRn_M0(wrd);
2582 gen_op_iwmmxt_set_mup();
2583 gen_op_iwmmxt_set_cup();
2584 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002585 case 0x204: case 0x604: case 0xa04: case 0xe04: /* WSRL */
balrog18c9b562007-04-30 02:02:17 +00002586 case 0x214: case 0x614: case 0xa14: case 0xe14:
Filip Navarada6b5332009-10-15 14:39:02 +02002587 if (((insn >> 22) & 3) == 0)
2588 return 1;
balrog18c9b562007-04-30 02:02:17 +00002589 wrd = (insn >> 12) & 0xf;
2590 rd0 = (insn >> 16) & 0xf;
2591 gen_op_iwmmxt_movq_M0_wRn(rd0);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002592 tmp = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02002593 if (gen_iwmmxt_shift(insn, 0xff, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002594 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002595 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002596 }
balrog18c9b562007-04-30 02:02:17 +00002597 switch ((insn >> 22) & 3) {
balrog18c9b562007-04-30 02:02:17 +00002598 case 1:
Peter Maydell477955b2011-05-25 13:22:31 +00002599 gen_helper_iwmmxt_srlw(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002600 break;
2601 case 2:
Peter Maydell477955b2011-05-25 13:22:31 +00002602 gen_helper_iwmmxt_srll(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002603 break;
2604 case 3:
Peter Maydell477955b2011-05-25 13:22:31 +00002605 gen_helper_iwmmxt_srlq(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002606 break;
2607 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002608 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002609 gen_op_iwmmxt_movq_wRn_M0(wrd);
2610 gen_op_iwmmxt_set_mup();
2611 gen_op_iwmmxt_set_cup();
2612 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002613 case 0x004: case 0x404: case 0x804: case 0xc04: /* WSRA */
balrog18c9b562007-04-30 02:02:17 +00002614 case 0x014: case 0x414: case 0x814: case 0xc14:
Filip Navarada6b5332009-10-15 14:39:02 +02002615 if (((insn >> 22) & 3) == 0)
2616 return 1;
balrog18c9b562007-04-30 02:02:17 +00002617 wrd = (insn >> 12) & 0xf;
2618 rd0 = (insn >> 16) & 0xf;
2619 gen_op_iwmmxt_movq_M0_wRn(rd0);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002620 tmp = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02002621 if (gen_iwmmxt_shift(insn, 0xff, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002622 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002623 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002624 }
balrog18c9b562007-04-30 02:02:17 +00002625 switch ((insn >> 22) & 3) {
balrog18c9b562007-04-30 02:02:17 +00002626 case 1:
Peter Maydell477955b2011-05-25 13:22:31 +00002627 gen_helper_iwmmxt_sraw(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002628 break;
2629 case 2:
Peter Maydell477955b2011-05-25 13:22:31 +00002630 gen_helper_iwmmxt_sral(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002631 break;
2632 case 3:
Peter Maydell477955b2011-05-25 13:22:31 +00002633 gen_helper_iwmmxt_sraq(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002634 break;
2635 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002636 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002637 gen_op_iwmmxt_movq_wRn_M0(wrd);
2638 gen_op_iwmmxt_set_mup();
2639 gen_op_iwmmxt_set_cup();
2640 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002641 case 0x104: case 0x504: case 0x904: case 0xd04: /* WSLL */
balrog18c9b562007-04-30 02:02:17 +00002642 case 0x114: case 0x514: case 0x914: case 0xd14:
Filip Navarada6b5332009-10-15 14:39:02 +02002643 if (((insn >> 22) & 3) == 0)
2644 return 1;
balrog18c9b562007-04-30 02:02:17 +00002645 wrd = (insn >> 12) & 0xf;
2646 rd0 = (insn >> 16) & 0xf;
2647 gen_op_iwmmxt_movq_M0_wRn(rd0);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002648 tmp = tcg_temp_new_i32();
Filip Navarada6b5332009-10-15 14:39:02 +02002649 if (gen_iwmmxt_shift(insn, 0xff, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002650 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002651 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002652 }
balrog18c9b562007-04-30 02:02:17 +00002653 switch ((insn >> 22) & 3) {
balrog18c9b562007-04-30 02:02:17 +00002654 case 1:
Peter Maydell477955b2011-05-25 13:22:31 +00002655 gen_helper_iwmmxt_sllw(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002656 break;
2657 case 2:
Peter Maydell477955b2011-05-25 13:22:31 +00002658 gen_helper_iwmmxt_slll(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002659 break;
2660 case 3:
Peter Maydell477955b2011-05-25 13:22:31 +00002661 gen_helper_iwmmxt_sllq(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002662 break;
2663 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002664 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002665 gen_op_iwmmxt_movq_wRn_M0(wrd);
2666 gen_op_iwmmxt_set_mup();
2667 gen_op_iwmmxt_set_cup();
2668 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002669 case 0x304: case 0x704: case 0xb04: case 0xf04: /* WROR */
balrog18c9b562007-04-30 02:02:17 +00002670 case 0x314: case 0x714: case 0xb14: case 0xf14:
Filip Navarada6b5332009-10-15 14:39:02 +02002671 if (((insn >> 22) & 3) == 0)
2672 return 1;
balrog18c9b562007-04-30 02:02:17 +00002673 wrd = (insn >> 12) & 0xf;
2674 rd0 = (insn >> 16) & 0xf;
2675 gen_op_iwmmxt_movq_M0_wRn(rd0);
Peter Maydell7d1b0092011-03-06 21:39:54 +00002676 tmp = tcg_temp_new_i32();
balrog18c9b562007-04-30 02:02:17 +00002677 switch ((insn >> 22) & 3) {
balrog18c9b562007-04-30 02:02:17 +00002678 case 1:
Filip Navarada6b5332009-10-15 14:39:02 +02002679 if (gen_iwmmxt_shift(insn, 0xf, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002680 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002681 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002682 }
Peter Maydell477955b2011-05-25 13:22:31 +00002683 gen_helper_iwmmxt_rorw(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002684 break;
2685 case 2:
Filip Navarada6b5332009-10-15 14:39:02 +02002686 if (gen_iwmmxt_shift(insn, 0x1f, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002687 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002688 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002689 }
Peter Maydell477955b2011-05-25 13:22:31 +00002690 gen_helper_iwmmxt_rorl(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002691 break;
2692 case 3:
Filip Navarada6b5332009-10-15 14:39:02 +02002693 if (gen_iwmmxt_shift(insn, 0x3f, tmp)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00002694 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002695 return 1;
Filip Navarada6b5332009-10-15 14:39:02 +02002696 }
Peter Maydell477955b2011-05-25 13:22:31 +00002697 gen_helper_iwmmxt_rorq(cpu_M0, cpu_env, cpu_M0, tmp);
balrog18c9b562007-04-30 02:02:17 +00002698 break;
2699 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002700 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002701 gen_op_iwmmxt_movq_wRn_M0(wrd);
2702 gen_op_iwmmxt_set_mup();
2703 gen_op_iwmmxt_set_cup();
2704 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002705 case 0x116: case 0x316: case 0x516: case 0x716: /* WMIN */
balrog18c9b562007-04-30 02:02:17 +00002706 case 0x916: case 0xb16: case 0xd16: case 0xf16:
2707 wrd = (insn >> 12) & 0xf;
2708 rd0 = (insn >> 16) & 0xf;
2709 rd1 = (insn >> 0) & 0xf;
2710 gen_op_iwmmxt_movq_M0_wRn(rd0);
2711 switch ((insn >> 22) & 3) {
2712 case 0:
2713 if (insn & (1 << 21))
2714 gen_op_iwmmxt_minsb_M0_wRn(rd1);
2715 else
2716 gen_op_iwmmxt_minub_M0_wRn(rd1);
2717 break;
2718 case 1:
2719 if (insn & (1 << 21))
2720 gen_op_iwmmxt_minsw_M0_wRn(rd1);
2721 else
2722 gen_op_iwmmxt_minuw_M0_wRn(rd1);
2723 break;
2724 case 2:
2725 if (insn & (1 << 21))
2726 gen_op_iwmmxt_minsl_M0_wRn(rd1);
2727 else
2728 gen_op_iwmmxt_minul_M0_wRn(rd1);
2729 break;
2730 case 3:
2731 return 1;
2732 }
2733 gen_op_iwmmxt_movq_wRn_M0(wrd);
2734 gen_op_iwmmxt_set_mup();
2735 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002736 case 0x016: case 0x216: case 0x416: case 0x616: /* WMAX */
balrog18c9b562007-04-30 02:02:17 +00002737 case 0x816: case 0xa16: case 0xc16: case 0xe16:
2738 wrd = (insn >> 12) & 0xf;
2739 rd0 = (insn >> 16) & 0xf;
2740 rd1 = (insn >> 0) & 0xf;
2741 gen_op_iwmmxt_movq_M0_wRn(rd0);
2742 switch ((insn >> 22) & 3) {
2743 case 0:
2744 if (insn & (1 << 21))
2745 gen_op_iwmmxt_maxsb_M0_wRn(rd1);
2746 else
2747 gen_op_iwmmxt_maxub_M0_wRn(rd1);
2748 break;
2749 case 1:
2750 if (insn & (1 << 21))
2751 gen_op_iwmmxt_maxsw_M0_wRn(rd1);
2752 else
2753 gen_op_iwmmxt_maxuw_M0_wRn(rd1);
2754 break;
2755 case 2:
2756 if (insn & (1 << 21))
2757 gen_op_iwmmxt_maxsl_M0_wRn(rd1);
2758 else
2759 gen_op_iwmmxt_maxul_M0_wRn(rd1);
2760 break;
2761 case 3:
2762 return 1;
2763 }
2764 gen_op_iwmmxt_movq_wRn_M0(wrd);
2765 gen_op_iwmmxt_set_mup();
2766 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002767 case 0x002: case 0x102: case 0x202: case 0x302: /* WALIGNI */
balrog18c9b562007-04-30 02:02:17 +00002768 case 0x402: case 0x502: case 0x602: case 0x702:
2769 wrd = (insn >> 12) & 0xf;
2770 rd0 = (insn >> 16) & 0xf;
2771 rd1 = (insn >> 0) & 0xf;
2772 gen_op_iwmmxt_movq_M0_wRn(rd0);
Filip Navarada6b5332009-10-15 14:39:02 +02002773 tmp = tcg_const_i32((insn >> 20) & 3);
2774 iwmmxt_load_reg(cpu_V1, rd1);
2775 gen_helper_iwmmxt_align(cpu_M0, cpu_M0, cpu_V1, tmp);
Peter Maydell39d54922013-05-23 12:59:55 +01002776 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002777 gen_op_iwmmxt_movq_wRn_M0(wrd);
2778 gen_op_iwmmxt_set_mup();
2779 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002780 case 0x01a: case 0x11a: case 0x21a: case 0x31a: /* WSUB */
balrog18c9b562007-04-30 02:02:17 +00002781 case 0x41a: case 0x51a: case 0x61a: case 0x71a:
2782 case 0x81a: case 0x91a: case 0xa1a: case 0xb1a:
2783 case 0xc1a: case 0xd1a: case 0xe1a: case 0xf1a:
2784 wrd = (insn >> 12) & 0xf;
2785 rd0 = (insn >> 16) & 0xf;
2786 rd1 = (insn >> 0) & 0xf;
2787 gen_op_iwmmxt_movq_M0_wRn(rd0);
2788 switch ((insn >> 20) & 0xf) {
2789 case 0x0:
2790 gen_op_iwmmxt_subnb_M0_wRn(rd1);
2791 break;
2792 case 0x1:
2793 gen_op_iwmmxt_subub_M0_wRn(rd1);
2794 break;
2795 case 0x3:
2796 gen_op_iwmmxt_subsb_M0_wRn(rd1);
2797 break;
2798 case 0x4:
2799 gen_op_iwmmxt_subnw_M0_wRn(rd1);
2800 break;
2801 case 0x5:
2802 gen_op_iwmmxt_subuw_M0_wRn(rd1);
2803 break;
2804 case 0x7:
2805 gen_op_iwmmxt_subsw_M0_wRn(rd1);
2806 break;
2807 case 0x8:
2808 gen_op_iwmmxt_subnl_M0_wRn(rd1);
2809 break;
2810 case 0x9:
2811 gen_op_iwmmxt_subul_M0_wRn(rd1);
2812 break;
2813 case 0xb:
2814 gen_op_iwmmxt_subsl_M0_wRn(rd1);
2815 break;
2816 default:
2817 return 1;
2818 }
2819 gen_op_iwmmxt_movq_wRn_M0(wrd);
2820 gen_op_iwmmxt_set_mup();
2821 gen_op_iwmmxt_set_cup();
2822 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002823 case 0x01e: case 0x11e: case 0x21e: case 0x31e: /* WSHUFH */
balrog18c9b562007-04-30 02:02:17 +00002824 case 0x41e: case 0x51e: case 0x61e: case 0x71e:
2825 case 0x81e: case 0x91e: case 0xa1e: case 0xb1e:
2826 case 0xc1e: case 0xd1e: case 0xe1e: case 0xf1e:
2827 wrd = (insn >> 12) & 0xf;
2828 rd0 = (insn >> 16) & 0xf;
2829 gen_op_iwmmxt_movq_M0_wRn(rd0);
Filip Navarada6b5332009-10-15 14:39:02 +02002830 tmp = tcg_const_i32(((insn >> 16) & 0xf0) | (insn & 0x0f));
Peter Maydell477955b2011-05-25 13:22:31 +00002831 gen_helper_iwmmxt_shufh(cpu_M0, cpu_env, cpu_M0, tmp);
Peter Maydell39d54922013-05-23 12:59:55 +01002832 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002833 gen_op_iwmmxt_movq_wRn_M0(wrd);
2834 gen_op_iwmmxt_set_mup();
2835 gen_op_iwmmxt_set_cup();
2836 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002837 case 0x018: case 0x118: case 0x218: case 0x318: /* WADD */
balrog18c9b562007-04-30 02:02:17 +00002838 case 0x418: case 0x518: case 0x618: case 0x718:
2839 case 0x818: case 0x918: case 0xa18: case 0xb18:
2840 case 0xc18: case 0xd18: case 0xe18: case 0xf18:
2841 wrd = (insn >> 12) & 0xf;
2842 rd0 = (insn >> 16) & 0xf;
2843 rd1 = (insn >> 0) & 0xf;
2844 gen_op_iwmmxt_movq_M0_wRn(rd0);
2845 switch ((insn >> 20) & 0xf) {
2846 case 0x0:
2847 gen_op_iwmmxt_addnb_M0_wRn(rd1);
2848 break;
2849 case 0x1:
2850 gen_op_iwmmxt_addub_M0_wRn(rd1);
2851 break;
2852 case 0x3:
2853 gen_op_iwmmxt_addsb_M0_wRn(rd1);
2854 break;
2855 case 0x4:
2856 gen_op_iwmmxt_addnw_M0_wRn(rd1);
2857 break;
2858 case 0x5:
2859 gen_op_iwmmxt_adduw_M0_wRn(rd1);
2860 break;
2861 case 0x7:
2862 gen_op_iwmmxt_addsw_M0_wRn(rd1);
2863 break;
2864 case 0x8:
2865 gen_op_iwmmxt_addnl_M0_wRn(rd1);
2866 break;
2867 case 0x9:
2868 gen_op_iwmmxt_addul_M0_wRn(rd1);
2869 break;
2870 case 0xb:
2871 gen_op_iwmmxt_addsl_M0_wRn(rd1);
2872 break;
2873 default:
2874 return 1;
2875 }
2876 gen_op_iwmmxt_movq_wRn_M0(wrd);
2877 gen_op_iwmmxt_set_mup();
2878 gen_op_iwmmxt_set_cup();
2879 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002880 case 0x008: case 0x108: case 0x208: case 0x308: /* WPACK */
balrog18c9b562007-04-30 02:02:17 +00002881 case 0x408: case 0x508: case 0x608: case 0x708:
2882 case 0x808: case 0x908: case 0xa08: case 0xb08:
2883 case 0xc08: case 0xd08: case 0xe08: case 0xf08:
Filip Navarada6b5332009-10-15 14:39:02 +02002884 if (!(insn & (1 << 20)) || ((insn >> 22) & 3) == 0)
2885 return 1;
balrog18c9b562007-04-30 02:02:17 +00002886 wrd = (insn >> 12) & 0xf;
2887 rd0 = (insn >> 16) & 0xf;
2888 rd1 = (insn >> 0) & 0xf;
2889 gen_op_iwmmxt_movq_M0_wRn(rd0);
balrog18c9b562007-04-30 02:02:17 +00002890 switch ((insn >> 22) & 3) {
balrog18c9b562007-04-30 02:02:17 +00002891 case 1:
2892 if (insn & (1 << 21))
2893 gen_op_iwmmxt_packsw_M0_wRn(rd1);
2894 else
2895 gen_op_iwmmxt_packuw_M0_wRn(rd1);
2896 break;
2897 case 2:
2898 if (insn & (1 << 21))
2899 gen_op_iwmmxt_packsl_M0_wRn(rd1);
2900 else
2901 gen_op_iwmmxt_packul_M0_wRn(rd1);
2902 break;
2903 case 3:
2904 if (insn & (1 << 21))
2905 gen_op_iwmmxt_packsq_M0_wRn(rd1);
2906 else
2907 gen_op_iwmmxt_packuq_M0_wRn(rd1);
2908 break;
2909 }
2910 gen_op_iwmmxt_movq_wRn_M0(wrd);
2911 gen_op_iwmmxt_set_mup();
2912 gen_op_iwmmxt_set_cup();
2913 break;
2914 case 0x201: case 0x203: case 0x205: case 0x207:
2915 case 0x209: case 0x20b: case 0x20d: case 0x20f:
2916 case 0x211: case 0x213: case 0x215: case 0x217:
2917 case 0x219: case 0x21b: case 0x21d: case 0x21f:
2918 wrd = (insn >> 5) & 0xf;
2919 rd0 = (insn >> 12) & 0xf;
2920 rd1 = (insn >> 0) & 0xf;
2921 if (rd0 == 0xf || rd1 == 0xf)
2922 return 1;
2923 gen_op_iwmmxt_movq_M0_wRn(wrd);
Filip Navarada6b5332009-10-15 14:39:02 +02002924 tmp = load_reg(s, rd0);
2925 tmp2 = load_reg(s, rd1);
balrog18c9b562007-04-30 02:02:17 +00002926 switch ((insn >> 16) & 0xf) {
Peter Maydelld00584b2018-08-24 13:17:47 +01002927 case 0x0: /* TMIA */
Filip Navarada6b5332009-10-15 14:39:02 +02002928 gen_helper_iwmmxt_muladdsl(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002929 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002930 case 0x8: /* TMIAPH */
Filip Navarada6b5332009-10-15 14:39:02 +02002931 gen_helper_iwmmxt_muladdsw(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002932 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002933 case 0xc: case 0xd: case 0xe: case 0xf: /* TMIAxy */
balrog18c9b562007-04-30 02:02:17 +00002934 if (insn & (1 << 16))
Filip Navarada6b5332009-10-15 14:39:02 +02002935 tcg_gen_shri_i32(tmp, tmp, 16);
balrog18c9b562007-04-30 02:02:17 +00002936 if (insn & (1 << 17))
Filip Navarada6b5332009-10-15 14:39:02 +02002937 tcg_gen_shri_i32(tmp2, tmp2, 16);
2938 gen_helper_iwmmxt_muladdswl(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002939 break;
2940 default:
Peter Maydell7d1b0092011-03-06 21:39:54 +00002941 tcg_temp_free_i32(tmp2);
2942 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002943 return 1;
2944 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002945 tcg_temp_free_i32(tmp2);
2946 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002947 gen_op_iwmmxt_movq_wRn_M0(wrd);
2948 gen_op_iwmmxt_set_mup();
2949 break;
2950 default:
2951 return 1;
2952 }
2953
2954 return 0;
2955}
2956
Stefan Weila1c72732011-04-28 17:20:38 +02002957/* Disassemble an XScale DSP instruction. Returns nonzero if an error occurred
balrog18c9b562007-04-30 02:02:17 +00002958 (ie. an undefined instruction). */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00002959static int disas_dsp_insn(DisasContext *s, uint32_t insn)
balrog18c9b562007-04-30 02:02:17 +00002960{
2961 int acc, rd0, rd1, rdhi, rdlo;
Peter Maydell39d54922013-05-23 12:59:55 +01002962 TCGv_i32 tmp, tmp2;
balrog18c9b562007-04-30 02:02:17 +00002963
2964 if ((insn & 0x0ff00f10) == 0x0e200010) {
2965 /* Multiply with Internal Accumulate Format */
2966 rd0 = (insn >> 12) & 0xf;
2967 rd1 = insn & 0xf;
2968 acc = (insn >> 5) & 7;
2969
2970 if (acc != 0)
2971 return 1;
2972
Filip Navara3a554c02009-10-15 14:38:54 +02002973 tmp = load_reg(s, rd0);
2974 tmp2 = load_reg(s, rd1);
balrog18c9b562007-04-30 02:02:17 +00002975 switch ((insn >> 16) & 0xf) {
Peter Maydelld00584b2018-08-24 13:17:47 +01002976 case 0x0: /* MIA */
Filip Navara3a554c02009-10-15 14:38:54 +02002977 gen_helper_iwmmxt_muladdsl(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002978 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002979 case 0x8: /* MIAPH */
Filip Navara3a554c02009-10-15 14:38:54 +02002980 gen_helper_iwmmxt_muladdsw(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002981 break;
Peter Maydelld00584b2018-08-24 13:17:47 +01002982 case 0xc: /* MIABB */
2983 case 0xd: /* MIABT */
2984 case 0xe: /* MIATB */
2985 case 0xf: /* MIATT */
balrog18c9b562007-04-30 02:02:17 +00002986 if (insn & (1 << 16))
Filip Navara3a554c02009-10-15 14:38:54 +02002987 tcg_gen_shri_i32(tmp, tmp, 16);
balrog18c9b562007-04-30 02:02:17 +00002988 if (insn & (1 << 17))
Filip Navara3a554c02009-10-15 14:38:54 +02002989 tcg_gen_shri_i32(tmp2, tmp2, 16);
2990 gen_helper_iwmmxt_muladdswl(cpu_M0, cpu_M0, tmp, tmp2);
balrog18c9b562007-04-30 02:02:17 +00002991 break;
2992 default:
2993 return 1;
2994 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00002995 tcg_temp_free_i32(tmp2);
2996 tcg_temp_free_i32(tmp);
balrog18c9b562007-04-30 02:02:17 +00002997
2998 gen_op_iwmmxt_movq_wRn_M0(acc);
2999 return 0;
3000 }
3001
3002 if ((insn & 0x0fe00ff8) == 0x0c400000) {
3003 /* Internal Accumulator Access Format */
3004 rdhi = (insn >> 16) & 0xf;
3005 rdlo = (insn >> 12) & 0xf;
3006 acc = insn & 7;
3007
3008 if (acc != 0)
3009 return 1;
3010
Peter Maydelld00584b2018-08-24 13:17:47 +01003011 if (insn & ARM_CP_RW_BIT) { /* MRA */
Filip Navara3a554c02009-10-15 14:38:54 +02003012 iwmmxt_load_reg(cpu_V0, acc);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07003013 tcg_gen_extrl_i64_i32(cpu_R[rdlo], cpu_V0);
Filip Navara3a554c02009-10-15 14:38:54 +02003014 tcg_gen_shri_i64(cpu_V0, cpu_V0, 32);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07003015 tcg_gen_extrl_i64_i32(cpu_R[rdhi], cpu_V0);
Filip Navara3a554c02009-10-15 14:38:54 +02003016 tcg_gen_andi_i32(cpu_R[rdhi], cpu_R[rdhi], (1 << (40 - 32)) - 1);
Peter Maydelld00584b2018-08-24 13:17:47 +01003017 } else { /* MAR */
Filip Navara3a554c02009-10-15 14:38:54 +02003018 tcg_gen_concat_i32_i64(cpu_V0, cpu_R[rdlo], cpu_R[rdhi]);
3019 iwmmxt_store_reg(cpu_V0, acc);
balrog18c9b562007-04-30 02:02:17 +00003020 }
3021 return 0;
3022 }
3023
3024 return 1;
3025}
3026
pbrook9ee6e8b2007-11-11 00:04:49 +00003027#define VFP_REG_SHR(x, n) (((n) > 0) ? (x) >> (n) : (x) << -(n))
3028#define VFP_SREG(insn, bigbit, smallbit) \
3029 ((VFP_REG_SHR(insn, bigbit - 1) & 0x1e) | (((insn) >> (smallbit)) & 1))
3030#define VFP_DREG(reg, insn, bigbit, smallbit) do { \
Peter Maydelld614a512014-10-28 19:24:01 +00003031 if (arm_dc_feature(s, ARM_FEATURE_VFP3)) { \
pbrook9ee6e8b2007-11-11 00:04:49 +00003032 reg = (((insn) >> (bigbit)) & 0x0f) \
3033 | (((insn) >> ((smallbit) - 4)) & 0x10); \
3034 } else { \
3035 if (insn & (1 << (smallbit))) \
3036 return 1; \
3037 reg = ((insn) >> (bigbit)) & 0x0f; \
3038 }} while (0)
3039
3040#define VFP_SREG_D(insn) VFP_SREG(insn, 12, 22)
3041#define VFP_DREG_D(reg, insn) VFP_DREG(reg, insn, 12, 22)
3042#define VFP_SREG_N(insn) VFP_SREG(insn, 16, 7)
3043#define VFP_DREG_N(reg, insn) VFP_DREG(reg, insn, 16, 7)
3044#define VFP_SREG_M(insn) VFP_SREG(insn, 0, 5)
3045#define VFP_DREG_M(reg, insn) VFP_DREG(reg, insn, 0, 5)
3046
pbrook4373f3c2008-03-31 03:47:19 +00003047/* Move between integer and VFP cores. */
Peter Maydell39d54922013-05-23 12:59:55 +01003048static TCGv_i32 gen_vfp_mrs(void)
pbrook4373f3c2008-03-31 03:47:19 +00003049{
Peter Maydell39d54922013-05-23 12:59:55 +01003050 TCGv_i32 tmp = tcg_temp_new_i32();
pbrook4373f3c2008-03-31 03:47:19 +00003051 tcg_gen_mov_i32(tmp, cpu_F0s);
3052 return tmp;
3053}
3054
Peter Maydell39d54922013-05-23 12:59:55 +01003055static void gen_vfp_msr(TCGv_i32 tmp)
pbrook4373f3c2008-03-31 03:47:19 +00003056{
3057 tcg_gen_mov_i32(cpu_F0s, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003058 tcg_temp_free_i32(tmp);
pbrook4373f3c2008-03-31 03:47:19 +00003059}
3060
Peter Maydell39d54922013-05-23 12:59:55 +01003061static void gen_neon_dup_low16(TCGv_i32 var)
pbrookad694712008-03-31 03:48:30 +00003062{
Peter Maydell39d54922013-05-23 12:59:55 +01003063 TCGv_i32 tmp = tcg_temp_new_i32();
pbrook86831432008-05-11 12:22:01 +00003064 tcg_gen_ext16u_i32(var, var);
pbrookad694712008-03-31 03:48:30 +00003065 tcg_gen_shli_i32(tmp, var, 16);
3066 tcg_gen_or_i32(var, var, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003067 tcg_temp_free_i32(tmp);
pbrookad694712008-03-31 03:48:30 +00003068}
3069
Peter Maydell39d54922013-05-23 12:59:55 +01003070static void gen_neon_dup_high16(TCGv_i32 var)
pbrookad694712008-03-31 03:48:30 +00003071{
Peter Maydell39d54922013-05-23 12:59:55 +01003072 TCGv_i32 tmp = tcg_temp_new_i32();
pbrookad694712008-03-31 03:48:30 +00003073 tcg_gen_andi_i32(var, var, 0xffff0000);
3074 tcg_gen_shri_i32(tmp, var, 16);
3075 tcg_gen_or_i32(var, var, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003076 tcg_temp_free_i32(tmp);
pbrookad694712008-03-31 03:48:30 +00003077}
3078
Peter Maydellb3ff4b82019-06-11 16:39:42 +01003079static bool trans_VSEL(DisasContext *s, arg_VSEL *a)
Will Newton04731fb2013-12-06 17:01:40 +00003080{
Peter Maydellb3ff4b82019-06-11 16:39:42 +01003081 uint32_t rd, rn, rm;
3082 bool dp = a->dp;
3083
3084 if (!dc_isar_feature(aa32_vsel, s)) {
3085 return false;
3086 }
3087
3088 /* UNDEF accesses to D16-D31 if they don't exist */
3089 if (dp && !dc_isar_feature(aa32_fp_d32, s) &&
3090 ((a->vm | a->vn | a->vd) & 0x10)) {
3091 return false;
3092 }
3093 rd = a->vd;
3094 rn = a->vn;
3095 rm = a->vm;
3096
3097 if (!vfp_access_check(s)) {
3098 return true;
3099 }
Will Newton04731fb2013-12-06 17:01:40 +00003100
3101 if (dp) {
3102 TCGv_i64 frn, frm, dest;
3103 TCGv_i64 tmp, zero, zf, nf, vf;
3104
3105 zero = tcg_const_i64(0);
3106
3107 frn = tcg_temp_new_i64();
3108 frm = tcg_temp_new_i64();
3109 dest = tcg_temp_new_i64();
3110
3111 zf = tcg_temp_new_i64();
3112 nf = tcg_temp_new_i64();
3113 vf = tcg_temp_new_i64();
3114
3115 tcg_gen_extu_i32_i64(zf, cpu_ZF);
3116 tcg_gen_ext_i32_i64(nf, cpu_NF);
3117 tcg_gen_ext_i32_i64(vf, cpu_VF);
3118
3119 tcg_gen_ld_f64(frn, cpu_env, vfp_reg_offset(dp, rn));
3120 tcg_gen_ld_f64(frm, cpu_env, vfp_reg_offset(dp, rm));
Peter Maydellb3ff4b82019-06-11 16:39:42 +01003121 switch (a->cc) {
Will Newton04731fb2013-12-06 17:01:40 +00003122 case 0: /* eq: Z */
3123 tcg_gen_movcond_i64(TCG_COND_EQ, dest, zf, zero,
3124 frn, frm);
3125 break;
3126 case 1: /* vs: V */
3127 tcg_gen_movcond_i64(TCG_COND_LT, dest, vf, zero,
3128 frn, frm);
3129 break;
3130 case 2: /* ge: N == V -> N ^ V == 0 */
3131 tmp = tcg_temp_new_i64();
3132 tcg_gen_xor_i64(tmp, vf, nf);
3133 tcg_gen_movcond_i64(TCG_COND_GE, dest, tmp, zero,
3134 frn, frm);
3135 tcg_temp_free_i64(tmp);
3136 break;
3137 case 3: /* gt: !Z && N == V */
3138 tcg_gen_movcond_i64(TCG_COND_NE, dest, zf, zero,
3139 frn, frm);
3140 tmp = tcg_temp_new_i64();
3141 tcg_gen_xor_i64(tmp, vf, nf);
3142 tcg_gen_movcond_i64(TCG_COND_GE, dest, tmp, zero,
3143 dest, frm);
3144 tcg_temp_free_i64(tmp);
3145 break;
3146 }
3147 tcg_gen_st_f64(dest, cpu_env, vfp_reg_offset(dp, rd));
3148 tcg_temp_free_i64(frn);
3149 tcg_temp_free_i64(frm);
3150 tcg_temp_free_i64(dest);
3151
3152 tcg_temp_free_i64(zf);
3153 tcg_temp_free_i64(nf);
3154 tcg_temp_free_i64(vf);
3155
3156 tcg_temp_free_i64(zero);
3157 } else {
3158 TCGv_i32 frn, frm, dest;
3159 TCGv_i32 tmp, zero;
3160
3161 zero = tcg_const_i32(0);
3162
3163 frn = tcg_temp_new_i32();
3164 frm = tcg_temp_new_i32();
3165 dest = tcg_temp_new_i32();
3166 tcg_gen_ld_f32(frn, cpu_env, vfp_reg_offset(dp, rn));
3167 tcg_gen_ld_f32(frm, cpu_env, vfp_reg_offset(dp, rm));
Peter Maydellb3ff4b82019-06-11 16:39:42 +01003168 switch (a->cc) {
Will Newton04731fb2013-12-06 17:01:40 +00003169 case 0: /* eq: Z */
3170 tcg_gen_movcond_i32(TCG_COND_EQ, dest, cpu_ZF, zero,
3171 frn, frm);
3172 break;
3173 case 1: /* vs: V */
3174 tcg_gen_movcond_i32(TCG_COND_LT, dest, cpu_VF, zero,
3175 frn, frm);
3176 break;
3177 case 2: /* ge: N == V -> N ^ V == 0 */
3178 tmp = tcg_temp_new_i32();
3179 tcg_gen_xor_i32(tmp, cpu_VF, cpu_NF);
3180 tcg_gen_movcond_i32(TCG_COND_GE, dest, tmp, zero,
3181 frn, frm);
3182 tcg_temp_free_i32(tmp);
3183 break;
3184 case 3: /* gt: !Z && N == V */
3185 tcg_gen_movcond_i32(TCG_COND_NE, dest, cpu_ZF, zero,
3186 frn, frm);
3187 tmp = tcg_temp_new_i32();
3188 tcg_gen_xor_i32(tmp, cpu_VF, cpu_NF);
3189 tcg_gen_movcond_i32(TCG_COND_GE, dest, tmp, zero,
3190 dest, frm);
3191 tcg_temp_free_i32(tmp);
3192 break;
3193 }
3194 tcg_gen_st_f32(dest, cpu_env, vfp_reg_offset(dp, rd));
3195 tcg_temp_free_i32(frn);
3196 tcg_temp_free_i32(frm);
3197 tcg_temp_free_i32(dest);
3198
3199 tcg_temp_free_i32(zero);
3200 }
3201
Peter Maydellb3ff4b82019-06-11 16:39:42 +01003202 return true;
Will Newton04731fb2013-12-06 17:01:40 +00003203}
3204
Peter Maydellf65988a2019-06-11 16:39:43 +01003205static bool trans_VMINMAXNM(DisasContext *s, arg_VMINMAXNM *a)
Will Newton40cfacd2013-12-06 17:01:41 +00003206{
Peter Maydellf65988a2019-06-11 16:39:43 +01003207 uint32_t rd, rn, rm;
3208 bool dp = a->dp;
3209 bool vmin = a->op;
3210 TCGv_ptr fpst;
3211
3212 if (!dc_isar_feature(aa32_vminmaxnm, s)) {
3213 return false;
3214 }
3215
3216 /* UNDEF accesses to D16-D31 if they don't exist */
3217 if (dp && !dc_isar_feature(aa32_fp_d32, s) &&
3218 ((a->vm | a->vn | a->vd) & 0x10)) {
3219 return false;
3220 }
3221 rd = a->vd;
3222 rn = a->vn;
3223 rm = a->vm;
3224
3225 if (!vfp_access_check(s)) {
3226 return true;
3227 }
3228
3229 fpst = get_fpstatus_ptr(0);
Will Newton40cfacd2013-12-06 17:01:41 +00003230
3231 if (dp) {
3232 TCGv_i64 frn, frm, dest;
3233
3234 frn = tcg_temp_new_i64();
3235 frm = tcg_temp_new_i64();
3236 dest = tcg_temp_new_i64();
3237
3238 tcg_gen_ld_f64(frn, cpu_env, vfp_reg_offset(dp, rn));
3239 tcg_gen_ld_f64(frm, cpu_env, vfp_reg_offset(dp, rm));
3240 if (vmin) {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00003241 gen_helper_vfp_minnumd(dest, frn, frm, fpst);
Will Newton40cfacd2013-12-06 17:01:41 +00003242 } else {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00003243 gen_helper_vfp_maxnumd(dest, frn, frm, fpst);
Will Newton40cfacd2013-12-06 17:01:41 +00003244 }
3245 tcg_gen_st_f64(dest, cpu_env, vfp_reg_offset(dp, rd));
3246 tcg_temp_free_i64(frn);
3247 tcg_temp_free_i64(frm);
3248 tcg_temp_free_i64(dest);
3249 } else {
3250 TCGv_i32 frn, frm, dest;
3251
3252 frn = tcg_temp_new_i32();
3253 frm = tcg_temp_new_i32();
3254 dest = tcg_temp_new_i32();
3255
3256 tcg_gen_ld_f32(frn, cpu_env, vfp_reg_offset(dp, rn));
3257 tcg_gen_ld_f32(frm, cpu_env, vfp_reg_offset(dp, rm));
3258 if (vmin) {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00003259 gen_helper_vfp_minnums(dest, frn, frm, fpst);
Will Newton40cfacd2013-12-06 17:01:41 +00003260 } else {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00003261 gen_helper_vfp_maxnums(dest, frn, frm, fpst);
Will Newton40cfacd2013-12-06 17:01:41 +00003262 }
3263 tcg_gen_st_f32(dest, cpu_env, vfp_reg_offset(dp, rd));
3264 tcg_temp_free_i32(frn);
3265 tcg_temp_free_i32(frm);
3266 tcg_temp_free_i32(dest);
3267 }
3268
3269 tcg_temp_free_ptr(fpst);
Peter Maydellf65988a2019-06-11 16:39:43 +01003270 return true;
Will Newton40cfacd2013-12-06 17:01:41 +00003271}
3272
Peter Maydelle3bb5992019-06-11 16:39:43 +01003273/*
3274 * Table for converting the most common AArch32 encoding of
3275 * rounding mode to arm_fprounding order (which matches the
3276 * common AArch64 order); see ARM ARM pseudocode FPDecodeRM().
3277 */
3278static const uint8_t fp_decode_rm[] = {
3279 FPROUNDING_TIEAWAY,
3280 FPROUNDING_TIEEVEN,
3281 FPROUNDING_POSINF,
3282 FPROUNDING_NEGINF,
3283};
3284
3285static bool trans_VRINT(DisasContext *s, arg_VRINT *a)
Will Newton7655f392014-01-31 14:47:33 +00003286{
Peter Maydelle3bb5992019-06-11 16:39:43 +01003287 uint32_t rd, rm;
3288 bool dp = a->dp;
3289 TCGv_ptr fpst;
Will Newton7655f392014-01-31 14:47:33 +00003290 TCGv_i32 tcg_rmode;
Peter Maydelle3bb5992019-06-11 16:39:43 +01003291 int rounding = fp_decode_rm[a->rm];
3292
3293 if (!dc_isar_feature(aa32_vrint, s)) {
3294 return false;
3295 }
3296
3297 /* UNDEF accesses to D16-D31 if they don't exist */
3298 if (dp && !dc_isar_feature(aa32_fp_d32, s) &&
3299 ((a->vm | a->vd) & 0x10)) {
3300 return false;
3301 }
3302 rd = a->vd;
3303 rm = a->vm;
3304
3305 if (!vfp_access_check(s)) {
3306 return true;
3307 }
3308
3309 fpst = get_fpstatus_ptr(0);
Will Newton7655f392014-01-31 14:47:33 +00003310
3311 tcg_rmode = tcg_const_i32(arm_rmode_to_sf(rounding));
Alex Bennée9b049912018-03-01 11:05:47 +00003312 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newton7655f392014-01-31 14:47:33 +00003313
3314 if (dp) {
3315 TCGv_i64 tcg_op;
3316 TCGv_i64 tcg_res;
3317 tcg_op = tcg_temp_new_i64();
3318 tcg_res = tcg_temp_new_i64();
3319 tcg_gen_ld_f64(tcg_op, cpu_env, vfp_reg_offset(dp, rm));
3320 gen_helper_rintd(tcg_res, tcg_op, fpst);
3321 tcg_gen_st_f64(tcg_res, cpu_env, vfp_reg_offset(dp, rd));
3322 tcg_temp_free_i64(tcg_op);
3323 tcg_temp_free_i64(tcg_res);
3324 } else {
3325 TCGv_i32 tcg_op;
3326 TCGv_i32 tcg_res;
3327 tcg_op = tcg_temp_new_i32();
3328 tcg_res = tcg_temp_new_i32();
3329 tcg_gen_ld_f32(tcg_op, cpu_env, vfp_reg_offset(dp, rm));
3330 gen_helper_rints(tcg_res, tcg_op, fpst);
3331 tcg_gen_st_f32(tcg_res, cpu_env, vfp_reg_offset(dp, rd));
3332 tcg_temp_free_i32(tcg_op);
3333 tcg_temp_free_i32(tcg_res);
3334 }
3335
Alex Bennée9b049912018-03-01 11:05:47 +00003336 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newton7655f392014-01-31 14:47:33 +00003337 tcg_temp_free_i32(tcg_rmode);
3338
3339 tcg_temp_free_ptr(fpst);
Peter Maydelle3bb5992019-06-11 16:39:43 +01003340 return true;
Will Newton7655f392014-01-31 14:47:33 +00003341}
3342
Peter Maydellc2a46a92019-06-11 16:39:43 +01003343static bool trans_VCVT(DisasContext *s, arg_VCVT *a)
Will Newtonc9975a82014-01-31 14:47:35 +00003344{
Peter Maydellc2a46a92019-06-11 16:39:43 +01003345 uint32_t rd, rm;
3346 bool dp = a->dp;
3347 TCGv_ptr fpst;
Will Newtonc9975a82014-01-31 14:47:35 +00003348 TCGv_i32 tcg_rmode, tcg_shift;
Peter Maydellc2a46a92019-06-11 16:39:43 +01003349 int rounding = fp_decode_rm[a->rm];
3350 bool is_signed = a->op;
3351
3352 if (!dc_isar_feature(aa32_vcvt_dr, s)) {
3353 return false;
3354 }
3355
3356 /* UNDEF accesses to D16-D31 if they don't exist */
3357 if (dp && !dc_isar_feature(aa32_fp_d32, s) && (a->vm & 0x10)) {
3358 return false;
3359 }
3360 rd = a->vd;
3361 rm = a->vm;
3362
3363 if (!vfp_access_check(s)) {
3364 return true;
3365 }
3366
3367 fpst = get_fpstatus_ptr(0);
Will Newtonc9975a82014-01-31 14:47:35 +00003368
3369 tcg_shift = tcg_const_i32(0);
3370
3371 tcg_rmode = tcg_const_i32(arm_rmode_to_sf(rounding));
Alex Bennée9b049912018-03-01 11:05:47 +00003372 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newtonc9975a82014-01-31 14:47:35 +00003373
3374 if (dp) {
3375 TCGv_i64 tcg_double, tcg_res;
3376 TCGv_i32 tcg_tmp;
Will Newtonc9975a82014-01-31 14:47:35 +00003377 tcg_double = tcg_temp_new_i64();
3378 tcg_res = tcg_temp_new_i64();
3379 tcg_tmp = tcg_temp_new_i32();
3380 tcg_gen_ld_f64(tcg_double, cpu_env, vfp_reg_offset(1, rm));
3381 if (is_signed) {
3382 gen_helper_vfp_tosld(tcg_res, tcg_double, tcg_shift, fpst);
3383 } else {
3384 gen_helper_vfp_tould(tcg_res, tcg_double, tcg_shift, fpst);
3385 }
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07003386 tcg_gen_extrl_i64_i32(tcg_tmp, tcg_res);
Will Newtonc9975a82014-01-31 14:47:35 +00003387 tcg_gen_st_f32(tcg_tmp, cpu_env, vfp_reg_offset(0, rd));
3388 tcg_temp_free_i32(tcg_tmp);
3389 tcg_temp_free_i64(tcg_res);
3390 tcg_temp_free_i64(tcg_double);
3391 } else {
3392 TCGv_i32 tcg_single, tcg_res;
3393 tcg_single = tcg_temp_new_i32();
3394 tcg_res = tcg_temp_new_i32();
3395 tcg_gen_ld_f32(tcg_single, cpu_env, vfp_reg_offset(0, rm));
3396 if (is_signed) {
3397 gen_helper_vfp_tosls(tcg_res, tcg_single, tcg_shift, fpst);
3398 } else {
3399 gen_helper_vfp_touls(tcg_res, tcg_single, tcg_shift, fpst);
3400 }
3401 tcg_gen_st_f32(tcg_res, cpu_env, vfp_reg_offset(0, rd));
3402 tcg_temp_free_i32(tcg_res);
3403 tcg_temp_free_i32(tcg_single);
3404 }
3405
Alex Bennée9b049912018-03-01 11:05:47 +00003406 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newtonc9975a82014-01-31 14:47:35 +00003407 tcg_temp_free_i32(tcg_rmode);
3408
3409 tcg_temp_free_i32(tcg_shift);
3410
3411 tcg_temp_free_ptr(fpst);
3412
Peter Maydellc2a46a92019-06-11 16:39:43 +01003413 return true;
Will Newton04731fb2013-12-06 17:01:40 +00003414}
3415
Peter Maydell06db8192019-06-11 16:39:41 +01003416/*
3417 * Disassemble a VFP instruction. Returns nonzero if an error occurred
3418 * (ie. an undefined instruction).
3419 */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00003420static int disas_vfp_insn(DisasContext *s, uint32_t insn)
bellardb7bcbe92005-02-22 19:27:29 +00003421{
3422 uint32_t rd, rn, rm, op, i, n, offset, delta_d, delta_m, bank_mask;
3423 int dp, veclen;
Peter Maydell39d54922013-05-23 12:59:55 +01003424 TCGv_i32 addr;
3425 TCGv_i32 tmp;
3426 TCGv_i32 tmp2;
Peter Maydell06db8192019-06-11 16:39:41 +01003427 bool ignore_vfp_enabled = false;
bellardb7bcbe92005-02-22 19:27:29 +00003428
Peter Maydelld614a512014-10-28 19:24:01 +00003429 if (!arm_dc_feature(s, ARM_FEATURE_VFP)) {
pbrook40f137e2006-02-20 00:33:36 +00003430 return 1;
Peter Maydelld614a512014-10-28 19:24:01 +00003431 }
pbrook40f137e2006-02-20 00:33:36 +00003432
Peter Maydell78e138b2019-06-11 16:39:41 +01003433 /*
3434 * If the decodetree decoder handles this insn it will always
3435 * emit code to either execute the insn or generate an appropriate
3436 * exception; so we don't need to ever return non-zero to tell
3437 * the calling code to emit an UNDEF exception.
3438 */
3439 if (extract32(insn, 28, 4) == 0xf) {
3440 if (disas_vfp_uncond(s, insn)) {
3441 return 0;
3442 }
3443 } else {
3444 if (disas_vfp(s, insn)) {
3445 return 0;
3446 }
3447 }
3448
Peter Maydellc2a46a92019-06-11 16:39:43 +01003449 if (extract32(insn, 28, 4) == 0xf) {
3450 /*
3451 * Encodings with T=1 (Thumb) or unconditional (ARM): these
3452 * were all handled by the decodetree decoder, so any insn
3453 * patterns which get here must be UNDEF.
3454 */
3455 return 1;
3456 }
3457
Peter Maydell06db8192019-06-11 16:39:41 +01003458 /*
3459 * FIXME: this access check should not take precedence over UNDEF
Peter Maydell2c7ffc42014-04-15 19:18:40 +01003460 * for invalid encodings; we will generate incorrect syndrome information
3461 * for attempts to execute invalid vfp/neon encodings with FP disabled.
3462 */
Peter Maydell06db8192019-06-11 16:39:41 +01003463 if ((insn & 0x0fe00fff) == 0x0ee00a10) {
pbrook40f137e2006-02-20 00:33:36 +00003464 rn = (insn >> 16) & 0xf;
Peter Maydell06db8192019-06-11 16:39:41 +01003465 if (rn == ARM_VFP_FPSID || rn == ARM_VFP_FPEXC || rn == ARM_VFP_MVFR2
3466 || rn == ARM_VFP_MVFR1 || rn == ARM_VFP_MVFR0) {
3467 ignore_vfp_enabled = true;
Peter Maydella50c0f52014-04-15 19:18:44 +01003468 }
pbrook40f137e2006-02-20 00:33:36 +00003469 }
Peter Maydell06db8192019-06-11 16:39:41 +01003470 if (!full_vfp_access_check(s, ignore_vfp_enabled)) {
3471 return 0;
Peter Maydell6d60c672019-04-29 17:36:01 +01003472 }
3473
bellardb7bcbe92005-02-22 19:27:29 +00003474 dp = ((insn & 0xf00) == 0xb00);
3475 switch ((insn >> 24) & 0xf) {
3476 case 0xe:
3477 if (insn & (1 << 4)) {
3478 /* single register transfer */
bellardb7bcbe92005-02-22 19:27:29 +00003479 rd = (insn >> 12) & 0xf;
3480 if (dp) {
pbrook9ee6e8b2007-11-11 00:04:49 +00003481 int size;
3482 int pass;
3483
3484 VFP_DREG_N(rn, insn);
3485 if (insn & 0xf)
bellardb7bcbe92005-02-22 19:27:29 +00003486 return 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00003487 if (insn & 0x00c00060
Peter Maydelld614a512014-10-28 19:24:01 +00003488 && !arm_dc_feature(s, ARM_FEATURE_NEON)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00003489 return 1;
Peter Maydelld614a512014-10-28 19:24:01 +00003490 }
pbrook9ee6e8b2007-11-11 00:04:49 +00003491
3492 pass = (insn >> 21) & 1;
3493 if (insn & (1 << 22)) {
3494 size = 0;
3495 offset = ((insn >> 5) & 3) * 8;
3496 } else if (insn & (1 << 5)) {
3497 size = 1;
3498 offset = (insn & (1 << 6)) ? 16 : 0;
3499 } else {
3500 size = 2;
3501 offset = 0;
3502 }
balrog18c9b562007-04-30 02:02:17 +00003503 if (insn & ARM_CP_RW_BIT) {
bellardb7bcbe92005-02-22 19:27:29 +00003504 /* vfp->arm */
pbrookad694712008-03-31 03:48:30 +00003505 tmp = neon_load_reg(rn, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00003506 switch (size) {
3507 case 0:
pbrook9ee6e8b2007-11-11 00:04:49 +00003508 if (offset)
pbrookad694712008-03-31 03:48:30 +00003509 tcg_gen_shri_i32(tmp, tmp, offset);
pbrook9ee6e8b2007-11-11 00:04:49 +00003510 if (insn & (1 << 23))
pbrookad694712008-03-31 03:48:30 +00003511 gen_uxtb(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003512 else
pbrookad694712008-03-31 03:48:30 +00003513 gen_sxtb(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003514 break;
3515 case 1:
pbrook9ee6e8b2007-11-11 00:04:49 +00003516 if (insn & (1 << 23)) {
3517 if (offset) {
pbrookad694712008-03-31 03:48:30 +00003518 tcg_gen_shri_i32(tmp, tmp, 16);
pbrook9ee6e8b2007-11-11 00:04:49 +00003519 } else {
pbrookad694712008-03-31 03:48:30 +00003520 gen_uxth(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003521 }
3522 } else {
3523 if (offset) {
pbrookad694712008-03-31 03:48:30 +00003524 tcg_gen_sari_i32(tmp, tmp, 16);
pbrook9ee6e8b2007-11-11 00:04:49 +00003525 } else {
pbrookad694712008-03-31 03:48:30 +00003526 gen_sxth(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003527 }
3528 }
3529 break;
3530 case 2:
pbrook9ee6e8b2007-11-11 00:04:49 +00003531 break;
3532 }
pbrookad694712008-03-31 03:48:30 +00003533 store_reg(s, rd, tmp);
bellardb7bcbe92005-02-22 19:27:29 +00003534 } else {
3535 /* arm->vfp */
pbrookad694712008-03-31 03:48:30 +00003536 tmp = load_reg(s, rd);
pbrook9ee6e8b2007-11-11 00:04:49 +00003537 if (insn & (1 << 23)) {
3538 /* VDUP */
Richard Henderson32f91fb2018-10-24 07:50:19 +01003539 int vec_size = pass ? 16 : 8;
3540 tcg_gen_gvec_dup_i32(size, neon_reg_offset(rn, 0),
3541 vec_size, vec_size, tmp);
3542 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003543 } else {
3544 /* VMOV */
3545 switch (size) {
3546 case 0:
pbrookad694712008-03-31 03:48:30 +00003547 tmp2 = neon_load_reg(rn, pass);
Aurelien Jarnod593c482012-10-05 15:04:45 +01003548 tcg_gen_deposit_i32(tmp, tmp2, tmp, offset, 8);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003549 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00003550 break;
3551 case 1:
pbrookad694712008-03-31 03:48:30 +00003552 tmp2 = neon_load_reg(rn, pass);
Aurelien Jarnod593c482012-10-05 15:04:45 +01003553 tcg_gen_deposit_i32(tmp, tmp2, tmp, offset, 16);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003554 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00003555 break;
3556 case 2:
pbrook9ee6e8b2007-11-11 00:04:49 +00003557 break;
3558 }
pbrookad694712008-03-31 03:48:30 +00003559 neon_store_reg(rn, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00003560 }
bellardb7bcbe92005-02-22 19:27:29 +00003561 }
pbrook9ee6e8b2007-11-11 00:04:49 +00003562 } else { /* !dp */
Peter Maydellef9aae22019-04-29 17:35:58 +01003563 bool is_sysreg;
3564
pbrook9ee6e8b2007-11-11 00:04:49 +00003565 if ((insn & 0x6f) != 0x00)
3566 return 1;
3567 rn = VFP_SREG_N(insn);
Peter Maydellef9aae22019-04-29 17:35:58 +01003568
3569 is_sysreg = extract32(insn, 21, 1);
3570
3571 if (arm_dc_feature(s, ARM_FEATURE_M)) {
3572 /*
3573 * The only M-profile VFP vmrs/vmsr sysreg is FPSCR.
3574 * Writes to R15 are UNPREDICTABLE; we choose to undef.
3575 */
3576 if (is_sysreg && (rd == 15 || (rn >> 1) != ARM_VFP_FPSCR)) {
3577 return 1;
3578 }
3579 }
3580
balrog18c9b562007-04-30 02:02:17 +00003581 if (insn & ARM_CP_RW_BIT) {
bellardb7bcbe92005-02-22 19:27:29 +00003582 /* vfp->arm */
Peter Maydellef9aae22019-04-29 17:35:58 +01003583 if (is_sysreg) {
bellardb7bcbe92005-02-22 19:27:29 +00003584 /* system register */
pbrook40f137e2006-02-20 00:33:36 +00003585 rn >>= 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00003586
bellardb7bcbe92005-02-22 19:27:29 +00003587 switch (rn) {
pbrook40f137e2006-02-20 00:33:36 +00003588 case ARM_VFP_FPSID:
pbrook4373f3c2008-03-31 03:47:19 +00003589 /* VFP2 allows access to FSID from userspace.
pbrook9ee6e8b2007-11-11 00:04:49 +00003590 VFP3 restricts all id registers to privileged
3591 accesses. */
3592 if (IS_USER(s)
Peter Maydelld614a512014-10-28 19:24:01 +00003593 && arm_dc_feature(s, ARM_FEATURE_VFP3)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00003594 return 1;
Peter Maydelld614a512014-10-28 19:24:01 +00003595 }
pbrook4373f3c2008-03-31 03:47:19 +00003596 tmp = load_cpu_field(vfp.xregs[rn]);
pbrook9ee6e8b2007-11-11 00:04:49 +00003597 break;
pbrook40f137e2006-02-20 00:33:36 +00003598 case ARM_VFP_FPEXC:
pbrook9ee6e8b2007-11-11 00:04:49 +00003599 if (IS_USER(s))
3600 return 1;
pbrook4373f3c2008-03-31 03:47:19 +00003601 tmp = load_cpu_field(vfp.xregs[rn]);
pbrook9ee6e8b2007-11-11 00:04:49 +00003602 break;
pbrook40f137e2006-02-20 00:33:36 +00003603 case ARM_VFP_FPINST:
3604 case ARM_VFP_FPINST2:
pbrook9ee6e8b2007-11-11 00:04:49 +00003605 /* Not present in VFP3. */
3606 if (IS_USER(s)
Peter Maydelld614a512014-10-28 19:24:01 +00003607 || arm_dc_feature(s, ARM_FEATURE_VFP3)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00003608 return 1;
Peter Maydelld614a512014-10-28 19:24:01 +00003609 }
pbrook4373f3c2008-03-31 03:47:19 +00003610 tmp = load_cpu_field(vfp.xregs[rn]);
bellardb7bcbe92005-02-22 19:27:29 +00003611 break;
pbrook40f137e2006-02-20 00:33:36 +00003612 case ARM_VFP_FPSCR:
balrog601d70b2008-04-20 01:03:45 +00003613 if (rd == 15) {
pbrook4373f3c2008-03-31 03:47:19 +00003614 tmp = load_cpu_field(vfp.xregs[ARM_VFP_FPSCR]);
3615 tcg_gen_andi_i32(tmp, tmp, 0xf0000000);
3616 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00003617 tmp = tcg_temp_new_i32();
pbrook4373f3c2008-03-31 03:47:19 +00003618 gen_helper_vfp_get_fpscr(tmp, cpu_env);
3619 }
bellardb7bcbe92005-02-22 19:27:29 +00003620 break;
Peter Maydella50c0f52014-04-15 19:18:44 +01003621 case ARM_VFP_MVFR2:
Peter Maydelld614a512014-10-28 19:24:01 +00003622 if (!arm_dc_feature(s, ARM_FEATURE_V8)) {
Peter Maydella50c0f52014-04-15 19:18:44 +01003623 return 1;
3624 }
3625 /* fall through */
pbrook9ee6e8b2007-11-11 00:04:49 +00003626 case ARM_VFP_MVFR0:
3627 case ARM_VFP_MVFR1:
3628 if (IS_USER(s)
Peter Maydelld614a512014-10-28 19:24:01 +00003629 || !arm_dc_feature(s, ARM_FEATURE_MVFR)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00003630 return 1;
Peter Maydelld614a512014-10-28 19:24:01 +00003631 }
pbrook4373f3c2008-03-31 03:47:19 +00003632 tmp = load_cpu_field(vfp.xregs[rn]);
pbrook9ee6e8b2007-11-11 00:04:49 +00003633 break;
bellardb7bcbe92005-02-22 19:27:29 +00003634 default:
3635 return 1;
3636 }
3637 } else {
3638 gen_mov_F0_vreg(0, rn);
pbrook4373f3c2008-03-31 03:47:19 +00003639 tmp = gen_vfp_mrs();
bellardb7bcbe92005-02-22 19:27:29 +00003640 }
3641 if (rd == 15) {
bellardb5ff1b32005-11-26 10:38:39 +00003642 /* Set the 4 flag bits in the CPSR. */
pbrook4373f3c2008-03-31 03:47:19 +00003643 gen_set_nzcv(tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003644 tcg_temp_free_i32(tmp);
pbrook4373f3c2008-03-31 03:47:19 +00003645 } else {
3646 store_reg(s, rd, tmp);
3647 }
bellardb7bcbe92005-02-22 19:27:29 +00003648 } else {
3649 /* arm->vfp */
Peter Maydellef9aae22019-04-29 17:35:58 +01003650 if (is_sysreg) {
pbrook40f137e2006-02-20 00:33:36 +00003651 rn >>= 1;
bellardb7bcbe92005-02-22 19:27:29 +00003652 /* system register */
3653 switch (rn) {
pbrook40f137e2006-02-20 00:33:36 +00003654 case ARM_VFP_FPSID:
pbrook9ee6e8b2007-11-11 00:04:49 +00003655 case ARM_VFP_MVFR0:
3656 case ARM_VFP_MVFR1:
bellardb7bcbe92005-02-22 19:27:29 +00003657 /* Writes are ignored. */
3658 break;
pbrook40f137e2006-02-20 00:33:36 +00003659 case ARM_VFP_FPSCR:
Peter Maydelle4c1cfa2013-01-30 16:01:56 +00003660 tmp = load_reg(s, rd);
pbrook4373f3c2008-03-31 03:47:19 +00003661 gen_helper_vfp_set_fpscr(cpu_env, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00003662 tcg_temp_free_i32(tmp);
bellardb5ff1b32005-11-26 10:38:39 +00003663 gen_lookup_tb(s);
bellardb7bcbe92005-02-22 19:27:29 +00003664 break;
pbrook40f137e2006-02-20 00:33:36 +00003665 case ARM_VFP_FPEXC:
pbrook9ee6e8b2007-11-11 00:04:49 +00003666 if (IS_USER(s))
3667 return 1;
Juha Riihimäki71b3c3d2009-10-26 11:46:42 +02003668 /* TODO: VFP subarchitecture support.
3669 * For now, keep the EN bit only */
Peter Maydelle4c1cfa2013-01-30 16:01:56 +00003670 tmp = load_reg(s, rd);
Juha Riihimäki71b3c3d2009-10-26 11:46:42 +02003671 tcg_gen_andi_i32(tmp, tmp, 1 << 30);
pbrook4373f3c2008-03-31 03:47:19 +00003672 store_cpu_field(tmp, vfp.xregs[rn]);
pbrook40f137e2006-02-20 00:33:36 +00003673 gen_lookup_tb(s);
3674 break;
3675 case ARM_VFP_FPINST:
3676 case ARM_VFP_FPINST2:
Peter Maydell23adb862014-10-24 12:19:14 +01003677 if (IS_USER(s)) {
3678 return 1;
3679 }
Peter Maydelle4c1cfa2013-01-30 16:01:56 +00003680 tmp = load_reg(s, rd);
pbrook4373f3c2008-03-31 03:47:19 +00003681 store_cpu_field(tmp, vfp.xregs[rn]);
pbrook40f137e2006-02-20 00:33:36 +00003682 break;
bellardb7bcbe92005-02-22 19:27:29 +00003683 default:
3684 return 1;
3685 }
3686 } else {
Peter Maydelle4c1cfa2013-01-30 16:01:56 +00003687 tmp = load_reg(s, rd);
pbrook4373f3c2008-03-31 03:47:19 +00003688 gen_vfp_msr(tmp);
bellardb7bcbe92005-02-22 19:27:29 +00003689 gen_mov_vreg_F0(0, rn);
3690 }
3691 }
3692 }
3693 } else {
3694 /* data processing */
Richard Hendersone80941b2019-02-21 18:17:45 +00003695 bool rd_is_dp = dp;
3696 bool rm_is_dp = dp;
3697 bool no_output = false;
3698
bellardb7bcbe92005-02-22 19:27:29 +00003699 /* The opcode is in bits 23, 21, 20 and 6. */
3700 op = ((insn >> 20) & 8) | ((insn >> 19) & 6) | ((insn >> 6) & 1);
Richard Hendersone80941b2019-02-21 18:17:45 +00003701 rn = VFP_SREG_N(insn);
bellardb7bcbe92005-02-22 19:27:29 +00003702
Richard Hendersone80941b2019-02-21 18:17:45 +00003703 if (op == 15) {
3704 /* rn is opcode, encoded as per VFP_SREG_N. */
3705 switch (rn) {
3706 case 0x00: /* vmov */
3707 case 0x01: /* vabs */
3708 case 0x02: /* vneg */
3709 case 0x03: /* vsqrt */
3710 break;
3711
3712 case 0x04: /* vcvtb.f64.f16, vcvtb.f32.f16 */
3713 case 0x05: /* vcvtt.f64.f16, vcvtt.f32.f16 */
3714 /*
3715 * VCVTB, VCVTT: only present with the halfprec extension
3716 * UNPREDICTABLE if bit 8 is set prior to ARMv8
3717 * (we choose to UNDEF)
Peter Maydell04595bf2010-12-07 15:37:34 +00003718 */
Peter Maydell602f6e42019-02-28 10:55:16 +00003719 if (dp) {
3720 if (!dc_isar_feature(aa32_fp16_dpconv, s)) {
3721 return 1;
3722 }
3723 } else {
3724 if (!dc_isar_feature(aa32_fp16_spconv, s)) {
3725 return 1;
3726 }
Richard Hendersone80941b2019-02-21 18:17:45 +00003727 }
3728 rm_is_dp = false;
3729 break;
3730 case 0x06: /* vcvtb.f16.f32, vcvtb.f16.f64 */
3731 case 0x07: /* vcvtt.f16.f32, vcvtt.f16.f64 */
Peter Maydell602f6e42019-02-28 10:55:16 +00003732 if (dp) {
3733 if (!dc_isar_feature(aa32_fp16_dpconv, s)) {
3734 return 1;
3735 }
3736 } else {
3737 if (!dc_isar_feature(aa32_fp16_spconv, s)) {
3738 return 1;
3739 }
Richard Hendersone80941b2019-02-21 18:17:45 +00003740 }
3741 rd_is_dp = false;
3742 break;
3743
3744 case 0x08: case 0x0a: /* vcmp, vcmpz */
3745 case 0x09: case 0x0b: /* vcmpe, vcmpez */
3746 no_output = true;
3747 break;
3748
3749 case 0x0c: /* vrintr */
3750 case 0x0d: /* vrintz */
3751 case 0x0e: /* vrintx */
3752 break;
3753
3754 case 0x0f: /* vcvt double<->single */
3755 rd_is_dp = !dp;
3756 break;
3757
3758 case 0x10: /* vcvt.fxx.u32 */
3759 case 0x11: /* vcvt.fxx.s32 */
3760 rm_is_dp = false;
3761 break;
3762 case 0x18: /* vcvtr.u32.fxx */
3763 case 0x19: /* vcvtz.u32.fxx */
3764 case 0x1a: /* vcvtr.s32.fxx */
3765 case 0x1b: /* vcvtz.s32.fxx */
3766 rd_is_dp = false;
3767 break;
3768
3769 case 0x14: /* vcvt fp <-> fixed */
3770 case 0x15:
3771 case 0x16:
3772 case 0x17:
3773 case 0x1c:
3774 case 0x1d:
3775 case 0x1e:
3776 case 0x1f:
3777 if (!arm_dc_feature(s, ARM_FEATURE_VFP3)) {
3778 return 1;
3779 }
3780 /* Immediate frac_bits has same format as SREG_M. */
3781 rm_is_dp = false;
3782 break;
3783
Richard Henderson6c1f6f22019-02-21 18:17:46 +00003784 case 0x13: /* vjcvt */
3785 if (!dp || !dc_isar_feature(aa32_jscvt, s)) {
3786 return 1;
3787 }
3788 rd_is_dp = false;
3789 break;
3790
Richard Hendersone80941b2019-02-21 18:17:45 +00003791 default:
3792 return 1;
bellardb7bcbe92005-02-22 19:27:29 +00003793 }
Richard Hendersone80941b2019-02-21 18:17:45 +00003794 } else if (dp) {
3795 /* rn is register number */
3796 VFP_DREG_N(rn, insn);
3797 }
3798
3799 if (rd_is_dp) {
3800 VFP_DREG_D(rd, insn);
bellardb7bcbe92005-02-22 19:27:29 +00003801 } else {
Richard Hendersone80941b2019-02-21 18:17:45 +00003802 rd = VFP_SREG_D(insn);
3803 }
3804 if (rm_is_dp) {
3805 VFP_DREG_M(rm, insn);
3806 } else {
pbrook9ee6e8b2007-11-11 00:04:49 +00003807 rm = VFP_SREG_M(insn);
bellardb7bcbe92005-02-22 19:27:29 +00003808 }
3809
Peter Maydell69d1fc22011-01-14 20:39:19 +01003810 veclen = s->vec_len;
Richard Hendersone80941b2019-02-21 18:17:45 +00003811 if (op == 15 && rn > 3) {
bellardb7bcbe92005-02-22 19:27:29 +00003812 veclen = 0;
Richard Hendersone80941b2019-02-21 18:17:45 +00003813 }
bellardb7bcbe92005-02-22 19:27:29 +00003814
3815 /* Shut up compiler warnings. */
3816 delta_m = 0;
3817 delta_d = 0;
3818 bank_mask = 0;
ths3b46e622007-09-17 08:09:54 +00003819
bellardb7bcbe92005-02-22 19:27:29 +00003820 if (veclen > 0) {
3821 if (dp)
3822 bank_mask = 0xc;
3823 else
3824 bank_mask = 0x18;
3825
3826 /* Figure out what type of vector operation this is. */
3827 if ((rd & bank_mask) == 0) {
3828 /* scalar */
3829 veclen = 0;
3830 } else {
3831 if (dp)
Peter Maydell69d1fc22011-01-14 20:39:19 +01003832 delta_d = (s->vec_stride >> 1) + 1;
bellardb7bcbe92005-02-22 19:27:29 +00003833 else
Peter Maydell69d1fc22011-01-14 20:39:19 +01003834 delta_d = s->vec_stride + 1;
bellardb7bcbe92005-02-22 19:27:29 +00003835
3836 if ((rm & bank_mask) == 0) {
3837 /* mixed scalar/vector */
3838 delta_m = 0;
3839 } else {
3840 /* vector */
3841 delta_m = delta_d;
3842 }
3843 }
3844 }
3845
3846 /* Load the initial operands. */
3847 if (op == 15) {
3848 switch (rn) {
Richard Hendersone80941b2019-02-21 18:17:45 +00003849 case 0x08: case 0x09: /* Compare */
bellardb7bcbe92005-02-22 19:27:29 +00003850 gen_mov_F0_vreg(dp, rd);
3851 gen_mov_F1_vreg(dp, rm);
3852 break;
Richard Hendersone80941b2019-02-21 18:17:45 +00003853 case 0x0a: case 0x0b: /* Compare with zero */
bellardb7bcbe92005-02-22 19:27:29 +00003854 gen_mov_F0_vreg(dp, rd);
3855 gen_vfp_F1_ld0(dp);
3856 break;
Richard Hendersone80941b2019-02-21 18:17:45 +00003857 case 0x14: /* vcvt fp <-> fixed */
3858 case 0x15:
3859 case 0x16:
3860 case 0x17:
3861 case 0x1c:
3862 case 0x1d:
3863 case 0x1e:
3864 case 0x1f:
pbrook9ee6e8b2007-11-11 00:04:49 +00003865 /* Source and destination the same. */
3866 gen_mov_F0_vreg(dp, rd);
3867 break;
bellardb7bcbe92005-02-22 19:27:29 +00003868 default:
3869 /* One source operand. */
Richard Hendersone80941b2019-02-21 18:17:45 +00003870 gen_mov_F0_vreg(rm_is_dp, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +00003871 break;
bellardb7bcbe92005-02-22 19:27:29 +00003872 }
3873 } else {
3874 /* Two source operands. */
3875 gen_mov_F0_vreg(dp, rn);
3876 gen_mov_F1_vreg(dp, rm);
3877 }
3878
3879 for (;;) {
3880 /* Perform the calculation. */
3881 switch (op) {
Peter Maydell605a6ae2011-05-05 19:35:35 +01003882 case 0: /* VMLA: fd + (fn * fm) */
3883 /* Note that order of inputs to the add matters for NaNs */
3884 gen_vfp_F1_mul(dp);
3885 gen_mov_F0_vreg(dp, rd);
bellardb7bcbe92005-02-22 19:27:29 +00003886 gen_vfp_add(dp);
3887 break;
Peter Maydell605a6ae2011-05-05 19:35:35 +01003888 case 1: /* VMLS: fd + -(fn * fm) */
bellardb7bcbe92005-02-22 19:27:29 +00003889 gen_vfp_mul(dp);
Peter Maydell605a6ae2011-05-05 19:35:35 +01003890 gen_vfp_F1_neg(dp);
3891 gen_mov_F0_vreg(dp, rd);
bellardb7bcbe92005-02-22 19:27:29 +00003892 gen_vfp_add(dp);
3893 break;
Peter Maydell605a6ae2011-05-05 19:35:35 +01003894 case 2: /* VNMLS: -fd + (fn * fm) */
3895 /* Note that it isn't valid to replace (-A + B) with (B - A)
3896 * or similar plausible looking simplifications
3897 * because this will give wrong results for NaNs.
3898 */
3899 gen_vfp_F1_mul(dp);
3900 gen_mov_F0_vreg(dp, rd);
bellardb7bcbe92005-02-22 19:27:29 +00003901 gen_vfp_neg(dp);
Peter Maydell605a6ae2011-05-05 19:35:35 +01003902 gen_vfp_add(dp);
3903 break;
3904 case 3: /* VNMLA: -fd + -(fn * fm) */
3905 gen_vfp_mul(dp);
3906 gen_vfp_F1_neg(dp);
3907 gen_mov_F0_vreg(dp, rd);
3908 gen_vfp_neg(dp);
3909 gen_vfp_add(dp);
bellardb7bcbe92005-02-22 19:27:29 +00003910 break;
3911 case 4: /* mul: fn * fm */
3912 gen_vfp_mul(dp);
3913 break;
3914 case 5: /* nmul: -(fn * fm) */
3915 gen_vfp_mul(dp);
3916 gen_vfp_neg(dp);
3917 break;
3918 case 6: /* add: fn + fm */
3919 gen_vfp_add(dp);
3920 break;
3921 case 7: /* sub: fn - fm */
3922 gen_vfp_sub(dp);
3923 break;
3924 case 8: /* div: fn / fm */
3925 gen_vfp_div(dp);
3926 break;
Peter Maydellda97f522011-10-19 16:14:07 +00003927 case 10: /* VFNMA : fd = muladd(-fd, fn, fm) */
3928 case 11: /* VFNMS : fd = muladd(-fd, -fn, fm) */
3929 case 12: /* VFMA : fd = muladd( fd, fn, fm) */
3930 case 13: /* VFMS : fd = muladd( fd, -fn, fm) */
3931 /* These are fused multiply-add, and must be done as one
3932 * floating point operation with no rounding between the
3933 * multiplication and addition steps.
3934 * NB that doing the negations here as separate steps is
3935 * correct : an input NaN should come out with its sign bit
3936 * flipped if it is a negated-input.
3937 */
Peter Maydelld614a512014-10-28 19:24:01 +00003938 if (!arm_dc_feature(s, ARM_FEATURE_VFP4)) {
Peter Maydellda97f522011-10-19 16:14:07 +00003939 return 1;
3940 }
3941 if (dp) {
3942 TCGv_ptr fpst;
3943 TCGv_i64 frd;
3944 if (op & 1) {
3945 /* VFNMS, VFMS */
3946 gen_helper_vfp_negd(cpu_F0d, cpu_F0d);
3947 }
3948 frd = tcg_temp_new_i64();
3949 tcg_gen_ld_f64(frd, cpu_env, vfp_reg_offset(dp, rd));
3950 if (op & 2) {
3951 /* VFNMA, VFNMS */
3952 gen_helper_vfp_negd(frd, frd);
3953 }
3954 fpst = get_fpstatus_ptr(0);
3955 gen_helper_vfp_muladdd(cpu_F0d, cpu_F0d,
3956 cpu_F1d, frd, fpst);
3957 tcg_temp_free_ptr(fpst);
3958 tcg_temp_free_i64(frd);
3959 } else {
3960 TCGv_ptr fpst;
3961 TCGv_i32 frd;
3962 if (op & 1) {
3963 /* VFNMS, VFMS */
3964 gen_helper_vfp_negs(cpu_F0s, cpu_F0s);
3965 }
3966 frd = tcg_temp_new_i32();
3967 tcg_gen_ld_f32(frd, cpu_env, vfp_reg_offset(dp, rd));
3968 if (op & 2) {
3969 gen_helper_vfp_negs(frd, frd);
3970 }
3971 fpst = get_fpstatus_ptr(0);
3972 gen_helper_vfp_muladds(cpu_F0s, cpu_F0s,
3973 cpu_F1s, frd, fpst);
3974 tcg_temp_free_ptr(fpst);
3975 tcg_temp_free_i32(frd);
3976 }
3977 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00003978 case 14: /* fconst */
Peter Maydelld614a512014-10-28 19:24:01 +00003979 if (!arm_dc_feature(s, ARM_FEATURE_VFP3)) {
3980 return 1;
3981 }
pbrook9ee6e8b2007-11-11 00:04:49 +00003982
3983 n = (insn << 12) & 0x80000000;
3984 i = ((insn >> 12) & 0x70) | (insn & 0xf);
3985 if (dp) {
3986 if (i & 0x40)
3987 i |= 0x3f80;
3988 else
3989 i |= 0x4000;
3990 n |= i << 16;
pbrook4373f3c2008-03-31 03:47:19 +00003991 tcg_gen_movi_i64(cpu_F0d, ((uint64_t)n) << 32);
pbrook9ee6e8b2007-11-11 00:04:49 +00003992 } else {
3993 if (i & 0x40)
3994 i |= 0x780;
3995 else
3996 i |= 0x800;
3997 n |= i << 19;
balrog5b340b52008-04-14 02:19:57 +00003998 tcg_gen_movi_i32(cpu_F0s, n);
pbrook9ee6e8b2007-11-11 00:04:49 +00003999 }
pbrook9ee6e8b2007-11-11 00:04:49 +00004000 break;
bellardb7bcbe92005-02-22 19:27:29 +00004001 case 15: /* extension space */
4002 switch (rn) {
4003 case 0: /* cpy */
4004 /* no-op */
4005 break;
4006 case 1: /* abs */
4007 gen_vfp_abs(dp);
4008 break;
4009 case 2: /* neg */
4010 gen_vfp_neg(dp);
4011 break;
4012 case 3: /* sqrt */
4013 gen_vfp_sqrt(dp);
4014 break;
Will Newton239c20c2014-01-29 10:31:51 +00004015 case 4: /* vcvtb.f32.f16, vcvtb.f64.f16 */
Alex Bennée486624f2018-05-07 13:17:16 +01004016 {
4017 TCGv_ptr fpst = get_fpstatus_ptr(false);
4018 TCGv_i32 ahp_mode = get_ahp_flag();
Paul Brook60011492009-11-19 16:45:20 +00004019 tmp = gen_vfp_mrs();
4020 tcg_gen_ext16u_i32(tmp, tmp);
Will Newton239c20c2014-01-29 10:31:51 +00004021 if (dp) {
4022 gen_helper_vfp_fcvt_f16_to_f64(cpu_F0d, tmp,
Alex Bennée486624f2018-05-07 13:17:16 +01004023 fpst, ahp_mode);
Will Newton239c20c2014-01-29 10:31:51 +00004024 } else {
4025 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp,
Alex Bennée486624f2018-05-07 13:17:16 +01004026 fpst, ahp_mode);
Will Newton239c20c2014-01-29 10:31:51 +00004027 }
Alex Bennée486624f2018-05-07 13:17:16 +01004028 tcg_temp_free_i32(ahp_mode);
4029 tcg_temp_free_ptr(fpst);
Peter Maydell7d1b0092011-03-06 21:39:54 +00004030 tcg_temp_free_i32(tmp);
Paul Brook60011492009-11-19 16:45:20 +00004031 break;
Alex Bennée486624f2018-05-07 13:17:16 +01004032 }
Will Newton239c20c2014-01-29 10:31:51 +00004033 case 5: /* vcvtt.f32.f16, vcvtt.f64.f16 */
Alex Bennée486624f2018-05-07 13:17:16 +01004034 {
4035 TCGv_ptr fpst = get_fpstatus_ptr(false);
4036 TCGv_i32 ahp = get_ahp_flag();
Paul Brook60011492009-11-19 16:45:20 +00004037 tmp = gen_vfp_mrs();
4038 tcg_gen_shri_i32(tmp, tmp, 16);
Will Newton239c20c2014-01-29 10:31:51 +00004039 if (dp) {
4040 gen_helper_vfp_fcvt_f16_to_f64(cpu_F0d, tmp,
Alex Bennée486624f2018-05-07 13:17:16 +01004041 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004042 } else {
4043 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp,
Alex Bennée486624f2018-05-07 13:17:16 +01004044 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004045 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00004046 tcg_temp_free_i32(tmp);
Alex Bennée486624f2018-05-07 13:17:16 +01004047 tcg_temp_free_i32(ahp);
4048 tcg_temp_free_ptr(fpst);
Paul Brook60011492009-11-19 16:45:20 +00004049 break;
Alex Bennée486624f2018-05-07 13:17:16 +01004050 }
Will Newton239c20c2014-01-29 10:31:51 +00004051 case 6: /* vcvtb.f16.f32, vcvtb.f16.f64 */
Alex Bennée486624f2018-05-07 13:17:16 +01004052 {
4053 TCGv_ptr fpst = get_fpstatus_ptr(false);
4054 TCGv_i32 ahp = get_ahp_flag();
Peter Maydell7d1b0092011-03-06 21:39:54 +00004055 tmp = tcg_temp_new_i32();
Alex Bennée486624f2018-05-07 13:17:16 +01004056
Will Newton239c20c2014-01-29 10:31:51 +00004057 if (dp) {
4058 gen_helper_vfp_fcvt_f64_to_f16(tmp, cpu_F0d,
Alex Bennée486624f2018-05-07 13:17:16 +01004059 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004060 } else {
4061 gen_helper_vfp_fcvt_f32_to_f16(tmp, cpu_F0s,
Alex Bennée486624f2018-05-07 13:17:16 +01004062 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004063 }
Alex Bennée486624f2018-05-07 13:17:16 +01004064 tcg_temp_free_i32(ahp);
4065 tcg_temp_free_ptr(fpst);
Paul Brook60011492009-11-19 16:45:20 +00004066 gen_mov_F0_vreg(0, rd);
4067 tmp2 = gen_vfp_mrs();
4068 tcg_gen_andi_i32(tmp2, tmp2, 0xffff0000);
4069 tcg_gen_or_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00004070 tcg_temp_free_i32(tmp2);
Paul Brook60011492009-11-19 16:45:20 +00004071 gen_vfp_msr(tmp);
4072 break;
Alex Bennée486624f2018-05-07 13:17:16 +01004073 }
Will Newton239c20c2014-01-29 10:31:51 +00004074 case 7: /* vcvtt.f16.f32, vcvtt.f16.f64 */
Alex Bennée486624f2018-05-07 13:17:16 +01004075 {
4076 TCGv_ptr fpst = get_fpstatus_ptr(false);
4077 TCGv_i32 ahp = get_ahp_flag();
Peter Maydell7d1b0092011-03-06 21:39:54 +00004078 tmp = tcg_temp_new_i32();
Will Newton239c20c2014-01-29 10:31:51 +00004079 if (dp) {
4080 gen_helper_vfp_fcvt_f64_to_f16(tmp, cpu_F0d,
Alex Bennée486624f2018-05-07 13:17:16 +01004081 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004082 } else {
4083 gen_helper_vfp_fcvt_f32_to_f16(tmp, cpu_F0s,
Alex Bennée486624f2018-05-07 13:17:16 +01004084 fpst, ahp);
Will Newton239c20c2014-01-29 10:31:51 +00004085 }
Alex Bennée486624f2018-05-07 13:17:16 +01004086 tcg_temp_free_i32(ahp);
4087 tcg_temp_free_ptr(fpst);
Paul Brook60011492009-11-19 16:45:20 +00004088 tcg_gen_shli_i32(tmp, tmp, 16);
4089 gen_mov_F0_vreg(0, rd);
4090 tmp2 = gen_vfp_mrs();
4091 tcg_gen_ext16u_i32(tmp2, tmp2);
4092 tcg_gen_or_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00004093 tcg_temp_free_i32(tmp2);
Paul Brook60011492009-11-19 16:45:20 +00004094 gen_vfp_msr(tmp);
4095 break;
Alex Bennée486624f2018-05-07 13:17:16 +01004096 }
bellardb7bcbe92005-02-22 19:27:29 +00004097 case 8: /* cmp */
4098 gen_vfp_cmp(dp);
4099 break;
4100 case 9: /* cmpe */
4101 gen_vfp_cmpe(dp);
4102 break;
4103 case 10: /* cmpz */
4104 gen_vfp_cmp(dp);
4105 break;
4106 case 11: /* cmpez */
4107 gen_vfp_F1_ld0(dp);
4108 gen_vfp_cmpe(dp);
4109 break;
Will Newton664c6732014-01-31 14:47:34 +00004110 case 12: /* vrintr */
4111 {
4112 TCGv_ptr fpst = get_fpstatus_ptr(0);
4113 if (dp) {
4114 gen_helper_rintd(cpu_F0d, cpu_F0d, fpst);
4115 } else {
4116 gen_helper_rints(cpu_F0s, cpu_F0s, fpst);
4117 }
4118 tcg_temp_free_ptr(fpst);
4119 break;
4120 }
Will Newtona290c622014-01-31 14:47:34 +00004121 case 13: /* vrintz */
4122 {
4123 TCGv_ptr fpst = get_fpstatus_ptr(0);
4124 TCGv_i32 tcg_rmode;
4125 tcg_rmode = tcg_const_i32(float_round_to_zero);
Alex Bennée9b049912018-03-01 11:05:47 +00004126 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newtona290c622014-01-31 14:47:34 +00004127 if (dp) {
4128 gen_helper_rintd(cpu_F0d, cpu_F0d, fpst);
4129 } else {
4130 gen_helper_rints(cpu_F0s, cpu_F0s, fpst);
4131 }
Alex Bennée9b049912018-03-01 11:05:47 +00004132 gen_helper_set_rmode(tcg_rmode, tcg_rmode, fpst);
Will Newtona290c622014-01-31 14:47:34 +00004133 tcg_temp_free_i32(tcg_rmode);
4134 tcg_temp_free_ptr(fpst);
4135 break;
4136 }
Will Newton4e82bc02014-01-31 14:47:34 +00004137 case 14: /* vrintx */
4138 {
4139 TCGv_ptr fpst = get_fpstatus_ptr(0);
4140 if (dp) {
4141 gen_helper_rintd_exact(cpu_F0d, cpu_F0d, fpst);
4142 } else {
4143 gen_helper_rints_exact(cpu_F0s, cpu_F0s, fpst);
4144 }
4145 tcg_temp_free_ptr(fpst);
4146 break;
4147 }
bellardb7bcbe92005-02-22 19:27:29 +00004148 case 15: /* single<->double conversion */
Richard Hendersone80941b2019-02-21 18:17:45 +00004149 if (dp) {
pbrook4373f3c2008-03-31 03:47:19 +00004150 gen_helper_vfp_fcvtsd(cpu_F0s, cpu_F0d, cpu_env);
Richard Hendersone80941b2019-02-21 18:17:45 +00004151 } else {
pbrook4373f3c2008-03-31 03:47:19 +00004152 gen_helper_vfp_fcvtds(cpu_F0d, cpu_F0s, cpu_env);
Richard Hendersone80941b2019-02-21 18:17:45 +00004153 }
bellardb7bcbe92005-02-22 19:27:29 +00004154 break;
4155 case 16: /* fuito */
Peter Maydell5500b062011-05-19 14:46:19 +01004156 gen_vfp_uito(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004157 break;
4158 case 17: /* fsito */
Peter Maydell5500b062011-05-19 14:46:19 +01004159 gen_vfp_sito(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004160 break;
Richard Henderson6c1f6f22019-02-21 18:17:46 +00004161 case 19: /* vjcvt */
4162 gen_helper_vjcvt(cpu_F0s, cpu_F0d, cpu_env);
4163 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00004164 case 20: /* fshto */
Peter Maydell5500b062011-05-19 14:46:19 +01004165 gen_vfp_shto(dp, 16 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004166 break;
4167 case 21: /* fslto */
Peter Maydell5500b062011-05-19 14:46:19 +01004168 gen_vfp_slto(dp, 32 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004169 break;
4170 case 22: /* fuhto */
Peter Maydell5500b062011-05-19 14:46:19 +01004171 gen_vfp_uhto(dp, 16 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004172 break;
4173 case 23: /* fulto */
Peter Maydell5500b062011-05-19 14:46:19 +01004174 gen_vfp_ulto(dp, 32 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004175 break;
bellardb7bcbe92005-02-22 19:27:29 +00004176 case 24: /* ftoui */
Peter Maydell5500b062011-05-19 14:46:19 +01004177 gen_vfp_toui(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004178 break;
4179 case 25: /* ftouiz */
Peter Maydell5500b062011-05-19 14:46:19 +01004180 gen_vfp_touiz(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004181 break;
4182 case 26: /* ftosi */
Peter Maydell5500b062011-05-19 14:46:19 +01004183 gen_vfp_tosi(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004184 break;
4185 case 27: /* ftosiz */
Peter Maydell5500b062011-05-19 14:46:19 +01004186 gen_vfp_tosiz(dp, 0);
bellardb7bcbe92005-02-22 19:27:29 +00004187 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00004188 case 28: /* ftosh */
Peter Maydell5500b062011-05-19 14:46:19 +01004189 gen_vfp_tosh(dp, 16 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004190 break;
4191 case 29: /* ftosl */
Peter Maydell5500b062011-05-19 14:46:19 +01004192 gen_vfp_tosl(dp, 32 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004193 break;
4194 case 30: /* ftouh */
Peter Maydell5500b062011-05-19 14:46:19 +01004195 gen_vfp_touh(dp, 16 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004196 break;
4197 case 31: /* ftoul */
Peter Maydell5500b062011-05-19 14:46:19 +01004198 gen_vfp_toul(dp, 32 - rm, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00004199 break;
bellardb7bcbe92005-02-22 19:27:29 +00004200 default: /* undefined */
Richard Hendersone80941b2019-02-21 18:17:45 +00004201 g_assert_not_reached();
bellardb7bcbe92005-02-22 19:27:29 +00004202 }
4203 break;
4204 default: /* undefined */
bellardb7bcbe92005-02-22 19:27:29 +00004205 return 1;
4206 }
4207
Richard Hendersone80941b2019-02-21 18:17:45 +00004208 /* Write back the result, if any. */
4209 if (!no_output) {
4210 gen_mov_vreg_F0(rd_is_dp, rd);
Will Newton239c20c2014-01-29 10:31:51 +00004211 }
bellardb7bcbe92005-02-22 19:27:29 +00004212
4213 /* break out of the loop if we have finished */
Richard Hendersone80941b2019-02-21 18:17:45 +00004214 if (veclen == 0) {
bellardb7bcbe92005-02-22 19:27:29 +00004215 break;
Richard Hendersone80941b2019-02-21 18:17:45 +00004216 }
bellardb7bcbe92005-02-22 19:27:29 +00004217
4218 if (op == 15 && delta_m == 0) {
4219 /* single source one-many */
4220 while (veclen--) {
4221 rd = ((rd + delta_d) & (bank_mask - 1))
4222 | (rd & bank_mask);
4223 gen_mov_vreg_F0(dp, rd);
4224 }
4225 break;
4226 }
4227 /* Setup the next operands. */
4228 veclen--;
4229 rd = ((rd + delta_d) & (bank_mask - 1))
4230 | (rd & bank_mask);
4231
4232 if (op == 15) {
4233 /* One source operand. */
4234 rm = ((rm + delta_m) & (bank_mask - 1))
4235 | (rm & bank_mask);
4236 gen_mov_F0_vreg(dp, rm);
4237 } else {
4238 /* Two source operands. */
4239 rn = ((rn + delta_d) & (bank_mask - 1))
4240 | (rn & bank_mask);
4241 gen_mov_F0_vreg(dp, rn);
4242 if (delta_m) {
4243 rm = ((rm + delta_m) & (bank_mask - 1))
4244 | (rm & bank_mask);
4245 gen_mov_F1_vreg(dp, rm);
4246 }
4247 }
4248 }
4249 }
4250 break;
4251 case 0xc:
4252 case 0xd:
Peter Maydell8387da82011-03-01 17:35:19 +00004253 if ((insn & 0x03e00000) == 0x00400000) {
bellardb7bcbe92005-02-22 19:27:29 +00004254 /* two-register transfer */
4255 rn = (insn >> 16) & 0xf;
4256 rd = (insn >> 12) & 0xf;
4257 if (dp) {
pbrook9ee6e8b2007-11-11 00:04:49 +00004258 VFP_DREG_M(rm, insn);
4259 } else {
4260 rm = VFP_SREG_M(insn);
4261 }
bellardb7bcbe92005-02-22 19:27:29 +00004262
balrog18c9b562007-04-30 02:02:17 +00004263 if (insn & ARM_CP_RW_BIT) {
bellardb7bcbe92005-02-22 19:27:29 +00004264 /* vfp->arm */
4265 if (dp) {
pbrook4373f3c2008-03-31 03:47:19 +00004266 gen_mov_F0_vreg(0, rm * 2);
4267 tmp = gen_vfp_mrs();
4268 store_reg(s, rd, tmp);
4269 gen_mov_F0_vreg(0, rm * 2 + 1);
4270 tmp = gen_vfp_mrs();
4271 store_reg(s, rn, tmp);
bellardb7bcbe92005-02-22 19:27:29 +00004272 } else {
4273 gen_mov_F0_vreg(0, rm);
pbrook4373f3c2008-03-31 03:47:19 +00004274 tmp = gen_vfp_mrs();
Peter Maydell8387da82011-03-01 17:35:19 +00004275 store_reg(s, rd, tmp);
bellardb7bcbe92005-02-22 19:27:29 +00004276 gen_mov_F0_vreg(0, rm + 1);
pbrook4373f3c2008-03-31 03:47:19 +00004277 tmp = gen_vfp_mrs();
Peter Maydell8387da82011-03-01 17:35:19 +00004278 store_reg(s, rn, tmp);
bellardb7bcbe92005-02-22 19:27:29 +00004279 }
4280 } else {
4281 /* arm->vfp */
4282 if (dp) {
pbrook4373f3c2008-03-31 03:47:19 +00004283 tmp = load_reg(s, rd);
4284 gen_vfp_msr(tmp);
4285 gen_mov_vreg_F0(0, rm * 2);
4286 tmp = load_reg(s, rn);
4287 gen_vfp_msr(tmp);
4288 gen_mov_vreg_F0(0, rm * 2 + 1);
bellardb7bcbe92005-02-22 19:27:29 +00004289 } else {
Peter Maydell8387da82011-03-01 17:35:19 +00004290 tmp = load_reg(s, rd);
pbrook4373f3c2008-03-31 03:47:19 +00004291 gen_vfp_msr(tmp);
bellardb7bcbe92005-02-22 19:27:29 +00004292 gen_mov_vreg_F0(0, rm);
Peter Maydell8387da82011-03-01 17:35:19 +00004293 tmp = load_reg(s, rn);
pbrook4373f3c2008-03-31 03:47:19 +00004294 gen_vfp_msr(tmp);
bellardb7bcbe92005-02-22 19:27:29 +00004295 gen_mov_vreg_F0(0, rm + 1);
4296 }
4297 }
4298 } else {
4299 /* Load/store */
4300 rn = (insn >> 16) & 0xf;
4301 if (dp)
pbrook9ee6e8b2007-11-11 00:04:49 +00004302 VFP_DREG_D(rd, insn);
bellardb7bcbe92005-02-22 19:27:29 +00004303 else
pbrook9ee6e8b2007-11-11 00:04:49 +00004304 rd = VFP_SREG_D(insn);
bellardb7bcbe92005-02-22 19:27:29 +00004305 if ((insn & 0x01200000) == 0x01000000) {
4306 /* Single load/store */
4307 offset = (insn & 0xff) << 2;
4308 if ((insn & (1 << 23)) == 0)
4309 offset = -offset;
Peter Maydell934814f2011-07-22 00:51:19 +00004310 if (s->thumb && rn == 15) {
4311 /* This is actually UNPREDICTABLE */
4312 addr = tcg_temp_new_i32();
4313 tcg_gen_movi_i32(addr, s->pc & ~2);
4314 } else {
4315 addr = load_reg(s, rn);
4316 }
Filip Navara312eea92009-10-15 14:48:19 +02004317 tcg_gen_addi_i32(addr, addr, offset);
bellardb7bcbe92005-02-22 19:27:29 +00004318 if (insn & (1 << 20)) {
Filip Navara312eea92009-10-15 14:48:19 +02004319 gen_vfp_ld(s, dp, addr);
bellardb7bcbe92005-02-22 19:27:29 +00004320 gen_mov_vreg_F0(dp, rd);
4321 } else {
4322 gen_mov_F0_vreg(dp, rd);
Filip Navara312eea92009-10-15 14:48:19 +02004323 gen_vfp_st(s, dp, addr);
bellardb7bcbe92005-02-22 19:27:29 +00004324 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00004325 tcg_temp_free_i32(addr);
bellardb7bcbe92005-02-22 19:27:29 +00004326 } else {
4327 /* load/store multiple */
Peter Maydell934814f2011-07-22 00:51:19 +00004328 int w = insn & (1 << 21);
bellardb7bcbe92005-02-22 19:27:29 +00004329 if (dp)
4330 n = (insn >> 1) & 0x7f;
4331 else
4332 n = insn & 0xff;
4333
Peter Maydell934814f2011-07-22 00:51:19 +00004334 if (w && !(((insn >> 23) ^ (insn >> 24)) & 1)) {
4335 /* P == U , W == 1 => UNDEF */
4336 return 1;
4337 }
4338 if (n == 0 || (rd + n) > 32 || (dp && n > 16)) {
4339 /* UNPREDICTABLE cases for bad immediates: we choose to
4340 * UNDEF to avoid generating huge numbers of TCG ops
4341 */
4342 return 1;
4343 }
4344 if (rn == 15 && w) {
4345 /* writeback to PC is UNPREDICTABLE, we choose to UNDEF */
4346 return 1;
4347 }
4348
4349 if (s->thumb && rn == 15) {
4350 /* This is actually UNPREDICTABLE */
4351 addr = tcg_temp_new_i32();
4352 tcg_gen_movi_i32(addr, s->pc & ~2);
4353 } else {
4354 addr = load_reg(s, rn);
4355 }
bellardb7bcbe92005-02-22 19:27:29 +00004356 if (insn & (1 << 24)) /* pre-decrement */
Filip Navara312eea92009-10-15 14:48:19 +02004357 tcg_gen_addi_i32(addr, addr, -((insn & 0xff) << 2));
bellardb7bcbe92005-02-22 19:27:29 +00004358
Peter Maydell8a954fa2018-10-08 14:55:05 +01004359 if (s->v8m_stackcheck && rn == 13 && w) {
4360 /*
4361 * Here 'addr' is the lowest address we will store to,
4362 * and is either the old SP (if post-increment) or
4363 * the new SP (if pre-decrement). For post-increment
4364 * where the old value is below the limit and the new
4365 * value is above, it is UNKNOWN whether the limit check
4366 * triggers; we choose to trigger.
4367 */
4368 gen_helper_v8m_stackcheck(cpu_env, addr);
4369 }
4370
bellardb7bcbe92005-02-22 19:27:29 +00004371 if (dp)
4372 offset = 8;
4373 else
4374 offset = 4;
4375 for (i = 0; i < n; i++) {
balrog18c9b562007-04-30 02:02:17 +00004376 if (insn & ARM_CP_RW_BIT) {
bellardb7bcbe92005-02-22 19:27:29 +00004377 /* load */
Filip Navara312eea92009-10-15 14:48:19 +02004378 gen_vfp_ld(s, dp, addr);
bellardb7bcbe92005-02-22 19:27:29 +00004379 gen_mov_vreg_F0(dp, rd + i);
4380 } else {
4381 /* store */
4382 gen_mov_F0_vreg(dp, rd + i);
Filip Navara312eea92009-10-15 14:48:19 +02004383 gen_vfp_st(s, dp, addr);
bellardb7bcbe92005-02-22 19:27:29 +00004384 }
Filip Navara312eea92009-10-15 14:48:19 +02004385 tcg_gen_addi_i32(addr, addr, offset);
bellardb7bcbe92005-02-22 19:27:29 +00004386 }
Peter Maydell934814f2011-07-22 00:51:19 +00004387 if (w) {
bellardb7bcbe92005-02-22 19:27:29 +00004388 /* writeback */
4389 if (insn & (1 << 24))
4390 offset = -offset * n;
4391 else if (dp && (insn & 1))
4392 offset = 4;
4393 else
4394 offset = 0;
4395
4396 if (offset != 0)
Filip Navara312eea92009-10-15 14:48:19 +02004397 tcg_gen_addi_i32(addr, addr, offset);
4398 store_reg(s, rn, addr);
4399 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00004400 tcg_temp_free_i32(addr);
bellardb7bcbe92005-02-22 19:27:29 +00004401 }
4402 }
4403 }
4404 break;
4405 default:
4406 /* Should never happen. */
4407 return 1;
4408 }
4409 return 0;
4410}
4411
Sergey Fedorov90aa39a2016-04-09 01:00:23 +03004412static inline bool use_goto_tb(DisasContext *s, target_ulong dest)
4413{
4414#ifndef CONFIG_USER_ONLY
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004415 return (s->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
Sergey Fedorov90aa39a2016-04-09 01:00:23 +03004416 ((s->pc - 1) & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4417#else
4418 return true;
4419#endif
4420}
4421
Emilio G. Cota8a6b28c2017-04-26 23:29:20 -04004422static void gen_goto_ptr(void)
4423{
Emilio G. Cota7f116362017-07-11 17:06:48 -04004424 tcg_gen_lookup_and_goto_ptr();
Emilio G. Cota8a6b28c2017-04-26 23:29:20 -04004425}
4426
Alex Bennée4cae8f52017-07-17 13:36:07 +01004427/* This will end the TB but doesn't guarantee we'll return to
4428 * cpu_loop_exec. Any live exit_requests will be processed as we
4429 * enter the next TB.
4430 */
Emilio G. Cota8a6b28c2017-04-26 23:29:20 -04004431static void gen_goto_tb(DisasContext *s, int n, target_ulong dest)
bellardc53be332005-10-30 21:39:19 +00004432{
Sergey Fedorov90aa39a2016-04-09 01:00:23 +03004433 if (use_goto_tb(s, dest)) {
bellard57fec1f2008-02-01 10:50:11 +00004434 tcg_gen_goto_tb(n);
Peter Maydelleaed1292013-09-03 20:12:06 +01004435 gen_set_pc_im(s, dest);
Richard Henderson07ea28b2018-05-30 18:06:23 -07004436 tcg_gen_exit_tb(s->base.tb, n);
bellard6e256c92005-11-20 10:32:05 +00004437 } else {
Peter Maydelleaed1292013-09-03 20:12:06 +01004438 gen_set_pc_im(s, dest);
Emilio G. Cota8a6b28c2017-04-26 23:29:20 -04004439 gen_goto_ptr();
bellard6e256c92005-11-20 10:32:05 +00004440 }
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004441 s->base.is_jmp = DISAS_NORETURN;
bellardc53be332005-10-30 21:39:19 +00004442}
4443
bellard8aaca4c2005-04-23 18:27:52 +00004444static inline void gen_jmp (DisasContext *s, uint32_t dest)
4445{
Peter Maydellb6366492017-04-20 17:32:30 +01004446 if (unlikely(is_singlestepping(s))) {
bellard8aaca4c2005-04-23 18:27:52 +00004447 /* An indirect jump so that we still trigger the debug exception. */
bellard5899f382005-04-27 20:25:20 +00004448 if (s->thumb)
pbrookd9ba4832008-03-31 03:46:50 +00004449 dest |= 1;
4450 gen_bx_im(s, dest);
bellard8aaca4c2005-04-23 18:27:52 +00004451 } else {
bellard6e256c92005-11-20 10:32:05 +00004452 gen_goto_tb(s, 0, dest);
bellard8aaca4c2005-04-23 18:27:52 +00004453 }
4454}
4455
Peter Maydell39d54922013-05-23 12:59:55 +01004456static inline void gen_mulxy(TCGv_i32 t0, TCGv_i32 t1, int x, int y)
bellardb5ff1b32005-11-26 10:38:39 +00004457{
bellardee097182005-12-04 18:56:28 +00004458 if (x)
pbrookd9ba4832008-03-31 03:46:50 +00004459 tcg_gen_sari_i32(t0, t0, 16);
bellardb5ff1b32005-11-26 10:38:39 +00004460 else
pbrookd9ba4832008-03-31 03:46:50 +00004461 gen_sxth(t0);
bellardee097182005-12-04 18:56:28 +00004462 if (y)
pbrookd9ba4832008-03-31 03:46:50 +00004463 tcg_gen_sari_i32(t1, t1, 16);
bellardb5ff1b32005-11-26 10:38:39 +00004464 else
pbrookd9ba4832008-03-31 03:46:50 +00004465 gen_sxth(t1);
4466 tcg_gen_mul_i32(t0, t0, t1);
bellardb5ff1b32005-11-26 10:38:39 +00004467}
4468
4469/* Return the mask of PSR bits set by a MSR instruction. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00004470static uint32_t msr_mask(DisasContext *s, int flags, int spsr)
4471{
bellardb5ff1b32005-11-26 10:38:39 +00004472 uint32_t mask;
4473
4474 mask = 0;
4475 if (flags & (1 << 0))
4476 mask |= 0xff;
4477 if (flags & (1 << 1))
4478 mask |= 0xff00;
4479 if (flags & (1 << 2))
4480 mask |= 0xff0000;
4481 if (flags & (1 << 3))
4482 mask |= 0xff000000;
pbrook9ee6e8b2007-11-11 00:04:49 +00004483
pbrook2ae23e72006-02-11 16:20:39 +00004484 /* Mask out undefined bits. */
pbrook9ee6e8b2007-11-11 00:04:49 +00004485 mask &= ~CPSR_RESERVED;
Peter Maydelld614a512014-10-28 19:24:01 +00004486 if (!arm_dc_feature(s, ARM_FEATURE_V4T)) {
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04004487 mask &= ~CPSR_T;
Peter Maydelld614a512014-10-28 19:24:01 +00004488 }
4489 if (!arm_dc_feature(s, ARM_FEATURE_V5)) {
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04004490 mask &= ~CPSR_Q; /* V5TE in reality*/
Peter Maydelld614a512014-10-28 19:24:01 +00004491 }
4492 if (!arm_dc_feature(s, ARM_FEATURE_V6)) {
pbrooke160c512007-11-11 14:36:36 +00004493 mask &= ~(CPSR_E | CPSR_GE);
Peter Maydelld614a512014-10-28 19:24:01 +00004494 }
4495 if (!arm_dc_feature(s, ARM_FEATURE_THUMB2)) {
pbrooke160c512007-11-11 14:36:36 +00004496 mask &= ~CPSR_IT;
Peter Maydelld614a512014-10-28 19:24:01 +00004497 }
Peter Maydell4051e122014-08-19 18:56:26 +01004498 /* Mask out execution state and reserved bits. */
4499 if (!spsr) {
4500 mask &= ~(CPSR_EXEC | CPSR_RESERVED);
4501 }
bellardb5ff1b32005-11-26 10:38:39 +00004502 /* Mask out privileged bits. */
4503 if (IS_USER(s))
pbrook9ee6e8b2007-11-11 00:04:49 +00004504 mask &= CPSR_USER;
bellardb5ff1b32005-11-26 10:38:39 +00004505 return mask;
4506}
4507
Filip Navara2fbac542009-10-15 12:43:04 +02004508/* Returns nonzero if access to the PSR is not permitted. Marks t0 as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +01004509static int gen_set_psr(DisasContext *s, uint32_t mask, int spsr, TCGv_i32 t0)
bellardb5ff1b32005-11-26 10:38:39 +00004510{
Peter Maydell39d54922013-05-23 12:59:55 +01004511 TCGv_i32 tmp;
bellardb5ff1b32005-11-26 10:38:39 +00004512 if (spsr) {
4513 /* ??? This is also undefined in system mode. */
4514 if (IS_USER(s))
4515 return 1;
pbrookd9ba4832008-03-31 03:46:50 +00004516
4517 tmp = load_cpu_field(spsr);
4518 tcg_gen_andi_i32(tmp, tmp, ~mask);
Filip Navara2fbac542009-10-15 12:43:04 +02004519 tcg_gen_andi_i32(t0, t0, mask);
4520 tcg_gen_or_i32(tmp, tmp, t0);
pbrookd9ba4832008-03-31 03:46:50 +00004521 store_cpu_field(tmp, spsr);
bellardb5ff1b32005-11-26 10:38:39 +00004522 } else {
Filip Navara2fbac542009-10-15 12:43:04 +02004523 gen_set_cpsr(t0, mask);
bellardb5ff1b32005-11-26 10:38:39 +00004524 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00004525 tcg_temp_free_i32(t0);
bellardb5ff1b32005-11-26 10:38:39 +00004526 gen_lookup_tb(s);
4527 return 0;
4528}
4529
Filip Navara2fbac542009-10-15 12:43:04 +02004530/* Returns nonzero if access to the PSR is not permitted. */
4531static int gen_set_psr_im(DisasContext *s, uint32_t mask, int spsr, uint32_t val)
4532{
Peter Maydell39d54922013-05-23 12:59:55 +01004533 TCGv_i32 tmp;
Peter Maydell7d1b0092011-03-06 21:39:54 +00004534 tmp = tcg_temp_new_i32();
Filip Navara2fbac542009-10-15 12:43:04 +02004535 tcg_gen_movi_i32(tmp, val);
4536 return gen_set_psr(s, mask, spsr, tmp);
4537}
4538
Peter Maydell8bfd0552016-03-16 17:05:58 +00004539static bool msr_banked_access_decode(DisasContext *s, int r, int sysm, int rn,
4540 int *tgtmode, int *regno)
4541{
4542 /* Decode the r and sysm fields of MSR/MRS banked accesses into
4543 * the target mode and register number, and identify the various
4544 * unpredictable cases.
4545 * MSR (banked) and MRS (banked) are CONSTRAINED UNPREDICTABLE if:
4546 * + executed in user mode
4547 * + using R15 as the src/dest register
4548 * + accessing an unimplemented register
4549 * + accessing a register that's inaccessible at current PL/security state*
4550 * + accessing a register that you could access with a different insn
4551 * We choose to UNDEF in all these cases.
4552 * Since we don't know which of the various AArch32 modes we are in
4553 * we have to defer some checks to runtime.
4554 * Accesses to Monitor mode registers from Secure EL1 (which implies
4555 * that EL3 is AArch64) must trap to EL3.
4556 *
4557 * If the access checks fail this function will emit code to take
4558 * an exception and return false. Otherwise it will return true,
4559 * and set *tgtmode and *regno appropriately.
4560 */
4561 int exc_target = default_exception_el(s);
4562
4563 /* These instructions are present only in ARMv8, or in ARMv7 with the
4564 * Virtualization Extensions.
4565 */
4566 if (!arm_dc_feature(s, ARM_FEATURE_V8) &&
4567 !arm_dc_feature(s, ARM_FEATURE_EL2)) {
4568 goto undef;
4569 }
4570
4571 if (IS_USER(s) || rn == 15) {
4572 goto undef;
4573 }
4574
4575 /* The table in the v8 ARM ARM section F5.2.3 describes the encoding
4576 * of registers into (r, sysm).
4577 */
4578 if (r) {
4579 /* SPSRs for other modes */
4580 switch (sysm) {
4581 case 0xe: /* SPSR_fiq */
4582 *tgtmode = ARM_CPU_MODE_FIQ;
4583 break;
4584 case 0x10: /* SPSR_irq */
4585 *tgtmode = ARM_CPU_MODE_IRQ;
4586 break;
4587 case 0x12: /* SPSR_svc */
4588 *tgtmode = ARM_CPU_MODE_SVC;
4589 break;
4590 case 0x14: /* SPSR_abt */
4591 *tgtmode = ARM_CPU_MODE_ABT;
4592 break;
4593 case 0x16: /* SPSR_und */
4594 *tgtmode = ARM_CPU_MODE_UND;
4595 break;
4596 case 0x1c: /* SPSR_mon */
4597 *tgtmode = ARM_CPU_MODE_MON;
4598 break;
4599 case 0x1e: /* SPSR_hyp */
4600 *tgtmode = ARM_CPU_MODE_HYP;
4601 break;
4602 default: /* unallocated */
4603 goto undef;
4604 }
4605 /* We arbitrarily assign SPSR a register number of 16. */
4606 *regno = 16;
4607 } else {
4608 /* general purpose registers for other modes */
4609 switch (sysm) {
4610 case 0x0 ... 0x6: /* 0b00xxx : r8_usr ... r14_usr */
4611 *tgtmode = ARM_CPU_MODE_USR;
4612 *regno = sysm + 8;
4613 break;
4614 case 0x8 ... 0xe: /* 0b01xxx : r8_fiq ... r14_fiq */
4615 *tgtmode = ARM_CPU_MODE_FIQ;
4616 *regno = sysm;
4617 break;
4618 case 0x10 ... 0x11: /* 0b1000x : r14_irq, r13_irq */
4619 *tgtmode = ARM_CPU_MODE_IRQ;
4620 *regno = sysm & 1 ? 13 : 14;
4621 break;
4622 case 0x12 ... 0x13: /* 0b1001x : r14_svc, r13_svc */
4623 *tgtmode = ARM_CPU_MODE_SVC;
4624 *regno = sysm & 1 ? 13 : 14;
4625 break;
4626 case 0x14 ... 0x15: /* 0b1010x : r14_abt, r13_abt */
4627 *tgtmode = ARM_CPU_MODE_ABT;
4628 *regno = sysm & 1 ? 13 : 14;
4629 break;
4630 case 0x16 ... 0x17: /* 0b1011x : r14_und, r13_und */
4631 *tgtmode = ARM_CPU_MODE_UND;
4632 *regno = sysm & 1 ? 13 : 14;
4633 break;
4634 case 0x1c ... 0x1d: /* 0b1110x : r14_mon, r13_mon */
4635 *tgtmode = ARM_CPU_MODE_MON;
4636 *regno = sysm & 1 ? 13 : 14;
4637 break;
4638 case 0x1e ... 0x1f: /* 0b1111x : elr_hyp, r13_hyp */
4639 *tgtmode = ARM_CPU_MODE_HYP;
4640 /* Arbitrarily pick 17 for ELR_Hyp (which is not a banked LR!) */
4641 *regno = sysm & 1 ? 13 : 17;
4642 break;
4643 default: /* unallocated */
4644 goto undef;
4645 }
4646 }
4647
4648 /* Catch the 'accessing inaccessible register' cases we can detect
4649 * at translate time.
4650 */
4651 switch (*tgtmode) {
4652 case ARM_CPU_MODE_MON:
4653 if (!arm_dc_feature(s, ARM_FEATURE_EL3) || s->ns) {
4654 goto undef;
4655 }
4656 if (s->current_el == 1) {
4657 /* If we're in Secure EL1 (which implies that EL3 is AArch64)
4658 * then accesses to Mon registers trap to EL3
4659 */
4660 exc_target = 3;
4661 goto undef;
4662 }
4663 break;
4664 case ARM_CPU_MODE_HYP:
Peter Maydellaec4dd02018-08-20 11:24:32 +01004665 /*
4666 * SPSR_hyp and r13_hyp can only be accessed from Monitor mode
4667 * (and so we can forbid accesses from EL2 or below). elr_hyp
4668 * can be accessed also from Hyp mode, so forbid accesses from
4669 * EL0 or EL1.
Peter Maydell8bfd0552016-03-16 17:05:58 +00004670 */
Peter Maydellaec4dd02018-08-20 11:24:32 +01004671 if (!arm_dc_feature(s, ARM_FEATURE_EL2) || s->current_el < 2 ||
4672 (s->current_el < 3 && *regno != 17)) {
Peter Maydell8bfd0552016-03-16 17:05:58 +00004673 goto undef;
4674 }
4675 break;
4676 default:
4677 break;
4678 }
4679
4680 return true;
4681
4682undef:
4683 /* If we get here then some access check did not pass */
4684 gen_exception_insn(s, 4, EXCP_UDEF, syn_uncategorized(), exc_target);
4685 return false;
4686}
4687
4688static void gen_msr_banked(DisasContext *s, int r, int sysm, int rn)
4689{
4690 TCGv_i32 tcg_reg, tcg_tgtmode, tcg_regno;
4691 int tgtmode = 0, regno = 0;
4692
4693 if (!msr_banked_access_decode(s, r, sysm, rn, &tgtmode, &regno)) {
4694 return;
4695 }
4696
4697 /* Sync state because msr_banked() can raise exceptions */
4698 gen_set_condexec(s);
4699 gen_set_pc_im(s, s->pc - 4);
4700 tcg_reg = load_reg(s, rn);
4701 tcg_tgtmode = tcg_const_i32(tgtmode);
4702 tcg_regno = tcg_const_i32(regno);
4703 gen_helper_msr_banked(cpu_env, tcg_reg, tcg_tgtmode, tcg_regno);
4704 tcg_temp_free_i32(tcg_tgtmode);
4705 tcg_temp_free_i32(tcg_regno);
4706 tcg_temp_free_i32(tcg_reg);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004707 s->base.is_jmp = DISAS_UPDATE;
Peter Maydell8bfd0552016-03-16 17:05:58 +00004708}
4709
4710static void gen_mrs_banked(DisasContext *s, int r, int sysm, int rn)
4711{
4712 TCGv_i32 tcg_reg, tcg_tgtmode, tcg_regno;
4713 int tgtmode = 0, regno = 0;
4714
4715 if (!msr_banked_access_decode(s, r, sysm, rn, &tgtmode, &regno)) {
4716 return;
4717 }
4718
4719 /* Sync state because mrs_banked() can raise exceptions */
4720 gen_set_condexec(s);
4721 gen_set_pc_im(s, s->pc - 4);
4722 tcg_reg = tcg_temp_new_i32();
4723 tcg_tgtmode = tcg_const_i32(tgtmode);
4724 tcg_regno = tcg_const_i32(regno);
4725 gen_helper_mrs_banked(tcg_reg, cpu_env, tcg_tgtmode, tcg_regno);
4726 tcg_temp_free_i32(tcg_tgtmode);
4727 tcg_temp_free_i32(tcg_regno);
4728 store_reg(s, rn, tcg_reg);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004729 s->base.is_jmp = DISAS_UPDATE;
Peter Maydell8bfd0552016-03-16 17:05:58 +00004730}
4731
Peter Maydellfb0e8e72016-10-10 16:26:03 +01004732/* Store value to PC as for an exception return (ie don't
4733 * mask bits). The subsequent call to gen_helper_cpsr_write_eret()
4734 * will do the masking based on the new value of the Thumb bit.
4735 */
4736static void store_pc_exc_ret(DisasContext *s, TCGv_i32 pc)
bellardb5ff1b32005-11-26 10:38:39 +00004737{
Peter Maydellfb0e8e72016-10-10 16:26:03 +01004738 tcg_gen_mov_i32(cpu_R[15], pc);
4739 tcg_temp_free_i32(pc);
bellardb5ff1b32005-11-26 10:38:39 +00004740}
4741
pbrookb0109802008-03-31 03:47:03 +00004742/* Generate a v6 exception return. Marks both values as dead. */
Peter Maydell39d54922013-05-23 12:59:55 +01004743static void gen_rfe(DisasContext *s, TCGv_i32 pc, TCGv_i32 cpsr)
pbrook9ee6e8b2007-11-11 00:04:49 +00004744{
Peter Maydellfb0e8e72016-10-10 16:26:03 +01004745 store_pc_exc_ret(s, pc);
4746 /* The cpsr_write_eret helper will mask the low bits of PC
4747 * appropriately depending on the new Thumb bit, so it must
4748 * be called after storing the new PC.
4749 */
Aaron Lindsaye69ad9d2018-04-26 11:04:39 +01004750 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
4751 gen_io_start();
4752 }
Peter Maydell235ea1f2016-02-23 15:36:43 +00004753 gen_helper_cpsr_write_eret(cpu_env, cpsr);
Aaron Lindsaye69ad9d2018-04-26 11:04:39 +01004754 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
4755 gen_io_end();
4756 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00004757 tcg_temp_free_i32(cpsr);
Alex Bennéeb29fd332017-07-17 13:36:07 +01004758 /* Must exit loop to check un-masked IRQs */
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004759 s->base.is_jmp = DISAS_EXIT;
pbrook9ee6e8b2007-11-11 00:04:49 +00004760}
4761
Peter Maydellfb0e8e72016-10-10 16:26:03 +01004762/* Generate an old-style exception return. Marks pc as dead. */
4763static void gen_exception_return(DisasContext *s, TCGv_i32 pc)
4764{
4765 gen_rfe(s, pc, load_cpu_field(spsr));
4766}
4767
Alex Bennéec22edfe2017-02-23 18:29:24 +00004768/*
4769 * For WFI we will halt the vCPU until an IRQ. For WFE and YIELD we
4770 * only call the helper when running single threaded TCG code to ensure
4771 * the next round-robin scheduled vCPU gets a crack. In MTTCG mode we
4772 * just skip this instruction. Currently the SEV/SEVL instructions
4773 * which are *one* of many ways to wake the CPU from WFE are not
4774 * implemented so we can't sleep like WFI does.
4775 */
pbrook9ee6e8b2007-11-11 00:04:49 +00004776static void gen_nop_hint(DisasContext *s, int val)
4777{
4778 switch (val) {
Emilio G. Cota2399d4e2017-07-14 18:20:49 -04004779 /* When running in MTTCG we don't generate jumps to the yield and
4780 * WFE helpers as it won't affect the scheduling of other vCPUs.
4781 * If we wanted to more completely model WFE/SEV so we don't busy
4782 * spin unnecessarily we would need to do something more involved.
4783 */
Peter Maydellc87e5a62015-07-06 10:05:44 +01004784 case 1: /* yield */
Emilio G. Cota2399d4e2017-07-14 18:20:49 -04004785 if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) {
Alex Bennéec22edfe2017-02-23 18:29:24 +00004786 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004787 s->base.is_jmp = DISAS_YIELD;
Alex Bennéec22edfe2017-02-23 18:29:24 +00004788 }
Peter Maydellc87e5a62015-07-06 10:05:44 +01004789 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00004790 case 3: /* wfi */
Peter Maydelleaed1292013-09-03 20:12:06 +01004791 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004792 s->base.is_jmp = DISAS_WFI;
pbrook9ee6e8b2007-11-11 00:04:49 +00004793 break;
4794 case 2: /* wfe */
Emilio G. Cota2399d4e2017-07-14 18:20:49 -04004795 if (!(tb_cflags(s->base.tb) & CF_PARALLEL)) {
Alex Bennéec22edfe2017-02-23 18:29:24 +00004796 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03004797 s->base.is_jmp = DISAS_WFE;
Alex Bennéec22edfe2017-02-23 18:29:24 +00004798 }
Peter Maydell72c1d3a2014-03-10 14:56:30 +00004799 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00004800 case 4: /* sev */
Mans Rullgard12b10572013-07-15 14:35:25 +01004801 case 5: /* sevl */
4802 /* TODO: Implement SEV, SEVL and WFE. May help SMP performance. */
pbrook9ee6e8b2007-11-11 00:04:49 +00004803 default: /* nop */
4804 break;
4805 }
4806}
4807
pbrookad694712008-03-31 03:48:30 +00004808#define CPU_V001 cpu_V0, cpu_V0, cpu_V1
pbrook9ee6e8b2007-11-11 00:04:49 +00004809
Peter Maydell39d54922013-05-23 12:59:55 +01004810static inline void gen_neon_add(int size, TCGv_i32 t0, TCGv_i32 t1)
pbrook9ee6e8b2007-11-11 00:04:49 +00004811{
4812 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02004813 case 0: gen_helper_neon_add_u8(t0, t0, t1); break;
4814 case 1: gen_helper_neon_add_u16(t0, t0, t1); break;
4815 case 2: tcg_gen_add_i32(t0, t0, t1); break;
Peter Maydell62698be2011-04-11 16:26:11 +01004816 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00004817 }
pbrook9ee6e8b2007-11-11 00:04:49 +00004818}
4819
Peter Maydell39d54922013-05-23 12:59:55 +01004820static inline void gen_neon_rsb(int size, TCGv_i32 t0, TCGv_i32 t1)
pbrookad694712008-03-31 03:48:30 +00004821{
4822 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02004823 case 0: gen_helper_neon_sub_u8(t0, t1, t0); break;
4824 case 1: gen_helper_neon_sub_u16(t0, t1, t0); break;
4825 case 2: tcg_gen_sub_i32(t0, t1, t0); break;
pbrookad694712008-03-31 03:48:30 +00004826 default: return;
4827 }
4828}
4829
4830/* 32-bit pairwise ops end up the same as the elementwise versions. */
Richard Henderson9ecd3c52019-02-15 09:56:40 +00004831#define gen_helper_neon_pmax_s32 tcg_gen_smax_i32
4832#define gen_helper_neon_pmax_u32 tcg_gen_umax_i32
4833#define gen_helper_neon_pmin_s32 tcg_gen_smin_i32
4834#define gen_helper_neon_pmin_u32 tcg_gen_umin_i32
pbrookad694712008-03-31 03:48:30 +00004835
pbrookad694712008-03-31 03:48:30 +00004836#define GEN_NEON_INTEGER_OP_ENV(name) do { \
4837 switch ((size << 1) | u) { \
4838 case 0: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004839 gen_helper_neon_##name##_s8(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004840 break; \
4841 case 1: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004842 gen_helper_neon_##name##_u8(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004843 break; \
4844 case 2: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004845 gen_helper_neon_##name##_s16(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004846 break; \
4847 case 3: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004848 gen_helper_neon_##name##_u16(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004849 break; \
4850 case 4: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004851 gen_helper_neon_##name##_s32(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004852 break; \
4853 case 5: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004854 gen_helper_neon_##name##_u32(tmp, cpu_env, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004855 break; \
4856 default: return 1; \
4857 }} while (0)
pbrook9ee6e8b2007-11-11 00:04:49 +00004858
4859#define GEN_NEON_INTEGER_OP(name) do { \
4860 switch ((size << 1) | u) { \
pbrookad694712008-03-31 03:48:30 +00004861 case 0: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004862 gen_helper_neon_##name##_s8(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004863 break; \
4864 case 1: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004865 gen_helper_neon_##name##_u8(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004866 break; \
4867 case 2: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004868 gen_helper_neon_##name##_s16(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004869 break; \
4870 case 3: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004871 gen_helper_neon_##name##_u16(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004872 break; \
4873 case 4: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004874 gen_helper_neon_##name##_s32(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004875 break; \
4876 case 5: \
Filip Navaradd8fbd72009-10-15 13:07:14 +02004877 gen_helper_neon_##name##_u32(tmp, tmp, tmp2); \
pbrookad694712008-03-31 03:48:30 +00004878 break; \
pbrook9ee6e8b2007-11-11 00:04:49 +00004879 default: return 1; \
4880 }} while (0)
4881
Peter Maydell39d54922013-05-23 12:59:55 +01004882static TCGv_i32 neon_load_scratch(int scratch)
pbrook9ee6e8b2007-11-11 00:04:49 +00004883{
Peter Maydell39d54922013-05-23 12:59:55 +01004884 TCGv_i32 tmp = tcg_temp_new_i32();
Filip Navaradd8fbd72009-10-15 13:07:14 +02004885 tcg_gen_ld_i32(tmp, cpu_env, offsetof(CPUARMState, vfp.scratch[scratch]));
4886 return tmp;
pbrook9ee6e8b2007-11-11 00:04:49 +00004887}
4888
Peter Maydell39d54922013-05-23 12:59:55 +01004889static void neon_store_scratch(int scratch, TCGv_i32 var)
pbrook9ee6e8b2007-11-11 00:04:49 +00004890{
Filip Navaradd8fbd72009-10-15 13:07:14 +02004891 tcg_gen_st_i32(var, cpu_env, offsetof(CPUARMState, vfp.scratch[scratch]));
Peter Maydell7d1b0092011-03-06 21:39:54 +00004892 tcg_temp_free_i32(var);
pbrook9ee6e8b2007-11-11 00:04:49 +00004893}
4894
Peter Maydell39d54922013-05-23 12:59:55 +01004895static inline TCGv_i32 neon_get_scalar(int size, int reg)
pbrook9ee6e8b2007-11-11 00:04:49 +00004896{
Peter Maydell39d54922013-05-23 12:59:55 +01004897 TCGv_i32 tmp;
pbrook9ee6e8b2007-11-11 00:04:49 +00004898 if (size == 1) {
Peter Maydell0fad6ef2011-01-19 19:29:53 +00004899 tmp = neon_load_reg(reg & 7, reg >> 4);
4900 if (reg & 8) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02004901 gen_neon_dup_high16(tmp);
Peter Maydell0fad6ef2011-01-19 19:29:53 +00004902 } else {
4903 gen_neon_dup_low16(tmp);
Filip Navaradd8fbd72009-10-15 13:07:14 +02004904 }
Peter Maydell0fad6ef2011-01-19 19:29:53 +00004905 } else {
4906 tmp = neon_load_reg(reg & 15, reg >> 4);
pbrook9ee6e8b2007-11-11 00:04:49 +00004907 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02004908 return tmp;
pbrook9ee6e8b2007-11-11 00:04:49 +00004909}
4910
Peter Maydell02acedf2011-02-14 10:22:48 +00004911static int gen_neon_unzip(int rd, int rm, int size, int q)
Filip Navara19457612009-10-15 12:45:57 +02004912{
Richard Hendersonb13708b2018-01-25 11:45:28 +00004913 TCGv_ptr pd, pm;
4914
Peter Maydell600b8282011-04-11 16:26:20 +01004915 if (!q && size == 2) {
Peter Maydell02acedf2011-02-14 10:22:48 +00004916 return 1;
4917 }
Richard Hendersonb13708b2018-01-25 11:45:28 +00004918 pd = vfp_reg_ptr(true, rd);
4919 pm = vfp_reg_ptr(true, rm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004920 if (q) {
4921 switch (size) {
4922 case 0:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004923 gen_helper_neon_qunzip8(pd, pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004924 break;
4925 case 1:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004926 gen_helper_neon_qunzip16(pd, pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004927 break;
4928 case 2:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004929 gen_helper_neon_qunzip32(pd, pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004930 break;
4931 default:
4932 abort();
4933 }
4934 } else {
4935 switch (size) {
4936 case 0:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004937 gen_helper_neon_unzip8(pd, pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004938 break;
4939 case 1:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004940 gen_helper_neon_unzip16(pd, pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004941 break;
4942 default:
4943 abort();
4944 }
4945 }
Richard Hendersonb13708b2018-01-25 11:45:28 +00004946 tcg_temp_free_ptr(pd);
4947 tcg_temp_free_ptr(pm);
Peter Maydell02acedf2011-02-14 10:22:48 +00004948 return 0;
Filip Navara19457612009-10-15 12:45:57 +02004949}
4950
Peter Maydelld68a6f32011-02-14 10:22:49 +00004951static int gen_neon_zip(int rd, int rm, int size, int q)
Filip Navara19457612009-10-15 12:45:57 +02004952{
Richard Hendersonb13708b2018-01-25 11:45:28 +00004953 TCGv_ptr pd, pm;
4954
Peter Maydell600b8282011-04-11 16:26:20 +01004955 if (!q && size == 2) {
Peter Maydelld68a6f32011-02-14 10:22:49 +00004956 return 1;
4957 }
Richard Hendersonb13708b2018-01-25 11:45:28 +00004958 pd = vfp_reg_ptr(true, rd);
4959 pm = vfp_reg_ptr(true, rm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004960 if (q) {
4961 switch (size) {
4962 case 0:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004963 gen_helper_neon_qzip8(pd, pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004964 break;
4965 case 1:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004966 gen_helper_neon_qzip16(pd, pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004967 break;
4968 case 2:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004969 gen_helper_neon_qzip32(pd, pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004970 break;
4971 default:
4972 abort();
4973 }
4974 } else {
4975 switch (size) {
4976 case 0:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004977 gen_helper_neon_zip8(pd, pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004978 break;
4979 case 1:
Richard Hendersonb13708b2018-01-25 11:45:28 +00004980 gen_helper_neon_zip16(pd, pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004981 break;
4982 default:
4983 abort();
4984 }
4985 }
Richard Hendersonb13708b2018-01-25 11:45:28 +00004986 tcg_temp_free_ptr(pd);
4987 tcg_temp_free_ptr(pm);
Peter Maydelld68a6f32011-02-14 10:22:49 +00004988 return 0;
Filip Navara19457612009-10-15 12:45:57 +02004989}
4990
Peter Maydell39d54922013-05-23 12:59:55 +01004991static void gen_neon_trn_u8(TCGv_i32 t0, TCGv_i32 t1)
Filip Navara19457612009-10-15 12:45:57 +02004992{
Peter Maydell39d54922013-05-23 12:59:55 +01004993 TCGv_i32 rd, tmp;
Filip Navara19457612009-10-15 12:45:57 +02004994
Peter Maydell7d1b0092011-03-06 21:39:54 +00004995 rd = tcg_temp_new_i32();
4996 tmp = tcg_temp_new_i32();
Filip Navara19457612009-10-15 12:45:57 +02004997
4998 tcg_gen_shli_i32(rd, t0, 8);
4999 tcg_gen_andi_i32(rd, rd, 0xff00ff00);
5000 tcg_gen_andi_i32(tmp, t1, 0x00ff00ff);
5001 tcg_gen_or_i32(rd, rd, tmp);
5002
5003 tcg_gen_shri_i32(t1, t1, 8);
5004 tcg_gen_andi_i32(t1, t1, 0x00ff00ff);
5005 tcg_gen_andi_i32(tmp, t0, 0xff00ff00);
5006 tcg_gen_or_i32(t1, t1, tmp);
5007 tcg_gen_mov_i32(t0, rd);
5008
Peter Maydell7d1b0092011-03-06 21:39:54 +00005009 tcg_temp_free_i32(tmp);
5010 tcg_temp_free_i32(rd);
Filip Navara19457612009-10-15 12:45:57 +02005011}
5012
Peter Maydell39d54922013-05-23 12:59:55 +01005013static void gen_neon_trn_u16(TCGv_i32 t0, TCGv_i32 t1)
Filip Navara19457612009-10-15 12:45:57 +02005014{
Peter Maydell39d54922013-05-23 12:59:55 +01005015 TCGv_i32 rd, tmp;
Filip Navara19457612009-10-15 12:45:57 +02005016
Peter Maydell7d1b0092011-03-06 21:39:54 +00005017 rd = tcg_temp_new_i32();
5018 tmp = tcg_temp_new_i32();
Filip Navara19457612009-10-15 12:45:57 +02005019
5020 tcg_gen_shli_i32(rd, t0, 16);
5021 tcg_gen_andi_i32(tmp, t1, 0xffff);
5022 tcg_gen_or_i32(rd, rd, tmp);
5023 tcg_gen_shri_i32(t1, t1, 16);
5024 tcg_gen_andi_i32(tmp, t0, 0xffff0000);
5025 tcg_gen_or_i32(t1, t1, tmp);
5026 tcg_gen_mov_i32(t0, rd);
5027
Peter Maydell7d1b0092011-03-06 21:39:54 +00005028 tcg_temp_free_i32(tmp);
5029 tcg_temp_free_i32(rd);
Filip Navara19457612009-10-15 12:45:57 +02005030}
5031
5032
pbrook9ee6e8b2007-11-11 00:04:49 +00005033static struct {
5034 int nregs;
5035 int interleave;
5036 int spacing;
Richard Henderson308e5632018-10-24 07:50:18 +01005037} const neon_ls_element_type[11] = {
Richard Hendersonac55d002018-10-24 07:50:20 +01005038 {1, 4, 1},
5039 {1, 4, 2},
pbrook9ee6e8b2007-11-11 00:04:49 +00005040 {4, 1, 1},
Richard Hendersonac55d002018-10-24 07:50:20 +01005041 {2, 2, 2},
5042 {1, 3, 1},
5043 {1, 3, 2},
pbrook9ee6e8b2007-11-11 00:04:49 +00005044 {3, 1, 1},
5045 {1, 1, 1},
Richard Hendersonac55d002018-10-24 07:50:20 +01005046 {1, 2, 1},
5047 {1, 2, 2},
pbrook9ee6e8b2007-11-11 00:04:49 +00005048 {2, 1, 1}
5049};
5050
5051/* Translate a NEON load/store element instruction. Return nonzero if the
5052 instruction is invalid. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00005053static int disas_neon_ls_insn(DisasContext *s, uint32_t insn)
pbrook9ee6e8b2007-11-11 00:04:49 +00005054{
5055 int rd, rn, rm;
5056 int op;
5057 int nregs;
5058 int interleave;
Juha Riihimäki84496232009-10-24 15:19:01 +03005059 int spacing;
pbrook9ee6e8b2007-11-11 00:04:49 +00005060 int stride;
5061 int size;
5062 int reg;
pbrook9ee6e8b2007-11-11 00:04:49 +00005063 int load;
pbrook9ee6e8b2007-11-11 00:04:49 +00005064 int n;
Richard Henderson7377c2c2018-10-24 07:50:20 +01005065 int vec_size;
Richard Hendersonac55d002018-10-24 07:50:20 +01005066 int mmu_idx;
5067 TCGMemOp endian;
Peter Maydell39d54922013-05-23 12:59:55 +01005068 TCGv_i32 addr;
5069 TCGv_i32 tmp;
5070 TCGv_i32 tmp2;
Juha Riihimäki84496232009-10-24 15:19:01 +03005071 TCGv_i64 tmp64;
pbrook9ee6e8b2007-11-11 00:04:49 +00005072
Peter Maydell2c7ffc42014-04-15 19:18:40 +01005073 /* FIXME: this access check should not take precedence over UNDEF
5074 * for invalid encodings; we will generate incorrect syndrome information
5075 * for attempts to execute invalid vfp/neon encodings with FP disabled.
5076 */
Greg Bellows9dbbc742015-05-29 11:28:53 +01005077 if (s->fp_excp_el) {
Peter Maydell2c7ffc42014-04-15 19:18:40 +01005078 gen_exception_insn(s, 4, EXCP_UDEF,
Peter Maydell4be42f42018-10-24 07:50:18 +01005079 syn_simd_access_trap(1, 0xe, false), s->fp_excp_el);
Peter Maydell2c7ffc42014-04-15 19:18:40 +01005080 return 0;
5081 }
5082
Peter Maydell5df8bac2011-01-14 20:39:19 +01005083 if (!s->vfp_enabled)
pbrook9ee6e8b2007-11-11 00:04:49 +00005084 return 1;
5085 VFP_DREG_D(rd, insn);
5086 rn = (insn >> 16) & 0xf;
5087 rm = insn & 0xf;
5088 load = (insn & (1 << 21)) != 0;
Richard Hendersonac55d002018-10-24 07:50:20 +01005089 endian = s->be_data;
5090 mmu_idx = get_mem_index(s);
pbrook9ee6e8b2007-11-11 00:04:49 +00005091 if ((insn & (1 << 23)) == 0) {
5092 /* Load store all elements. */
5093 op = (insn >> 8) & 0xf;
5094 size = (insn >> 6) & 3;
Juha Riihimäki84496232009-10-24 15:19:01 +03005095 if (op > 10)
pbrook9ee6e8b2007-11-11 00:04:49 +00005096 return 1;
Peter Maydellf2dd89d2011-04-18 19:07:12 +01005097 /* Catch UNDEF cases for bad values of align field */
5098 switch (op & 0xc) {
5099 case 4:
5100 if (((insn >> 5) & 1) == 1) {
5101 return 1;
5102 }
5103 break;
5104 case 8:
5105 if (((insn >> 4) & 3) == 3) {
5106 return 1;
5107 }
5108 break;
5109 default:
5110 break;
5111 }
pbrook9ee6e8b2007-11-11 00:04:49 +00005112 nregs = neon_ls_element_type[op].nregs;
5113 interleave = neon_ls_element_type[op].interleave;
Juha Riihimäki84496232009-10-24 15:19:01 +03005114 spacing = neon_ls_element_type[op].spacing;
Richard Hendersonac55d002018-10-24 07:50:20 +01005115 if (size == 3 && (interleave | spacing) != 1) {
Juha Riihimäki84496232009-10-24 15:19:01 +03005116 return 1;
Richard Hendersonac55d002018-10-24 07:50:20 +01005117 }
Richard Hendersone23f12b2018-10-24 07:50:20 +01005118 /* For our purposes, bytes are always little-endian. */
5119 if (size == 0) {
5120 endian = MO_LE;
5121 }
5122 /* Consecutive little-endian elements from a single register
5123 * can be promoted to a larger little-endian operation.
5124 */
5125 if (interleave == 1 && endian == MO_LE) {
5126 size = 3;
5127 }
Richard Hendersonac55d002018-10-24 07:50:20 +01005128 tmp64 = tcg_temp_new_i64();
Peter Maydelle318a602011-03-15 16:26:52 +00005129 addr = tcg_temp_new_i32();
Richard Hendersonac55d002018-10-24 07:50:20 +01005130 tmp2 = tcg_const_i32(1 << size);
Aurelien Jarnodcc65022009-10-18 16:00:18 +02005131 load_reg_var(s, addr, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +00005132 for (reg = 0; reg < nregs; reg++) {
Richard Hendersonac55d002018-10-24 07:50:20 +01005133 for (n = 0; n < 8 >> size; n++) {
5134 int xs;
5135 for (xs = 0; xs < interleave; xs++) {
5136 int tt = rd + reg + spacing * xs;
5137
5138 if (load) {
5139 gen_aa32_ld_i64(s, tmp64, addr, mmu_idx, endian | size);
5140 neon_store_element64(tt, n, size, tmp64);
5141 } else {
5142 neon_load_element64(tmp64, tt, n, size);
5143 gen_aa32_st_i64(s, tmp64, addr, mmu_idx, endian | size);
pbrook9ee6e8b2007-11-11 00:04:49 +00005144 }
Richard Hendersonac55d002018-10-24 07:50:20 +01005145 tcg_gen_add_i32(addr, addr, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00005146 }
5147 }
pbrook9ee6e8b2007-11-11 00:04:49 +00005148 }
Peter Maydelle318a602011-03-15 16:26:52 +00005149 tcg_temp_free_i32(addr);
Richard Hendersonac55d002018-10-24 07:50:20 +01005150 tcg_temp_free_i32(tmp2);
5151 tcg_temp_free_i64(tmp64);
5152 stride = nregs * interleave * 8;
pbrook9ee6e8b2007-11-11 00:04:49 +00005153 } else {
5154 size = (insn >> 10) & 3;
5155 if (size == 3) {
5156 /* Load single element to all lanes. */
Peter Maydell8e18cde2011-03-15 16:26:51 +00005157 int a = (insn >> 4) & 1;
5158 if (!load) {
pbrook9ee6e8b2007-11-11 00:04:49 +00005159 return 1;
Peter Maydell8e18cde2011-03-15 16:26:51 +00005160 }
pbrook9ee6e8b2007-11-11 00:04:49 +00005161 size = (insn >> 6) & 3;
5162 nregs = ((insn >> 8) & 3) + 1;
Peter Maydell8e18cde2011-03-15 16:26:51 +00005163
5164 if (size == 3) {
5165 if (nregs != 4 || a == 0) {
pbrook9ee6e8b2007-11-11 00:04:49 +00005166 return 1;
5167 }
Peter Maydell8e18cde2011-03-15 16:26:51 +00005168 /* For VLD4 size==3 a == 1 means 32 bits at 16 byte alignment */
5169 size = 2;
5170 }
5171 if (nregs == 1 && a == 1 && size == 0) {
5172 return 1;
5173 }
5174 if (nregs == 3 && a == 1) {
5175 return 1;
5176 }
Peter Maydelle318a602011-03-15 16:26:52 +00005177 addr = tcg_temp_new_i32();
Peter Maydell8e18cde2011-03-15 16:26:51 +00005178 load_reg_var(s, addr, rn);
Richard Henderson7377c2c2018-10-24 07:50:20 +01005179
5180 /* VLD1 to all lanes: bit 5 indicates how many Dregs to write.
5181 * VLD2/3/4 to all lanes: bit 5 indicates register stride.
5182 */
5183 stride = (insn & (1 << 5)) ? 2 : 1;
5184 vec_size = nregs == 1 ? stride * 8 : 8;
5185
5186 tmp = tcg_temp_new_i32();
5187 for (reg = 0; reg < nregs; reg++) {
5188 gen_aa32_ld_i32(s, tmp, addr, get_mem_index(s),
5189 s->be_data | size);
5190 if ((rd & 1) && vec_size == 16) {
5191 /* We cannot write 16 bytes at once because the
5192 * destination is unaligned.
5193 */
5194 tcg_gen_gvec_dup_i32(size, neon_reg_offset(rd, 0),
5195 8, 8, tmp);
5196 tcg_gen_gvec_mov(0, neon_reg_offset(rd + 1, 0),
5197 neon_reg_offset(rd, 0), 8, 8);
5198 } else {
5199 tcg_gen_gvec_dup_i32(size, neon_reg_offset(rd, 0),
5200 vec_size, vec_size, tmp);
Peter Maydell8e18cde2011-03-15 16:26:51 +00005201 }
Richard Henderson7377c2c2018-10-24 07:50:20 +01005202 tcg_gen_addi_i32(addr, addr, 1 << size);
5203 rd += stride;
pbrook9ee6e8b2007-11-11 00:04:49 +00005204 }
Richard Henderson7377c2c2018-10-24 07:50:20 +01005205 tcg_temp_free_i32(tmp);
Peter Maydelle318a602011-03-15 16:26:52 +00005206 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +00005207 stride = (1 << size) * nregs;
5208 } else {
5209 /* Single element. */
Peter Maydell93262b12011-04-18 19:07:11 +01005210 int idx = (insn >> 4) & 0xf;
Richard Henderson2d6ac922018-10-24 07:50:20 +01005211 int reg_idx;
pbrook9ee6e8b2007-11-11 00:04:49 +00005212 switch (size) {
5213 case 0:
Richard Henderson2d6ac922018-10-24 07:50:20 +01005214 reg_idx = (insn >> 5) & 7;
pbrook9ee6e8b2007-11-11 00:04:49 +00005215 stride = 1;
5216 break;
5217 case 1:
Richard Henderson2d6ac922018-10-24 07:50:20 +01005218 reg_idx = (insn >> 6) & 3;
pbrook9ee6e8b2007-11-11 00:04:49 +00005219 stride = (insn & (1 << 5)) ? 2 : 1;
5220 break;
5221 case 2:
Richard Henderson2d6ac922018-10-24 07:50:20 +01005222 reg_idx = (insn >> 7) & 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00005223 stride = (insn & (1 << 6)) ? 2 : 1;
5224 break;
5225 default:
5226 abort();
5227 }
5228 nregs = ((insn >> 8) & 3) + 1;
Peter Maydell93262b12011-04-18 19:07:11 +01005229 /* Catch the UNDEF cases. This is unavoidably a bit messy. */
5230 switch (nregs) {
5231 case 1:
5232 if (((idx & (1 << size)) != 0) ||
5233 (size == 2 && ((idx & 3) == 1 || (idx & 3) == 2))) {
5234 return 1;
5235 }
5236 break;
5237 case 3:
5238 if ((idx & 1) != 0) {
5239 return 1;
5240 }
5241 /* fall through */
5242 case 2:
5243 if (size == 2 && (idx & 2) != 0) {
5244 return 1;
5245 }
5246 break;
5247 case 4:
5248 if ((size == 2) && ((idx & 3) == 3)) {
5249 return 1;
5250 }
5251 break;
5252 default:
5253 abort();
5254 }
5255 if ((rd + stride * (nregs - 1)) > 31) {
5256 /* Attempts to write off the end of the register file
5257 * are UNPREDICTABLE; we choose to UNDEF because otherwise
5258 * the neon_load_reg() would write off the end of the array.
5259 */
5260 return 1;
5261 }
Richard Henderson2d6ac922018-10-24 07:50:20 +01005262 tmp = tcg_temp_new_i32();
Peter Maydelle318a602011-03-15 16:26:52 +00005263 addr = tcg_temp_new_i32();
Aurelien Jarnodcc65022009-10-18 16:00:18 +02005264 load_reg_var(s, addr, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +00005265 for (reg = 0; reg < nregs; reg++) {
5266 if (load) {
Richard Henderson2d6ac922018-10-24 07:50:20 +01005267 gen_aa32_ld_i32(s, tmp, addr, get_mem_index(s),
5268 s->be_data | size);
5269 neon_store_element(rd, reg_idx, size, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00005270 } else { /* Store */
Richard Henderson2d6ac922018-10-24 07:50:20 +01005271 neon_load_element(tmp, rd, reg_idx, size);
5272 gen_aa32_st_i32(s, tmp, addr, get_mem_index(s),
5273 s->be_data | size);
pbrook9ee6e8b2007-11-11 00:04:49 +00005274 }
5275 rd += stride;
Filip Navara1b2b1e52009-10-15 13:07:21 +02005276 tcg_gen_addi_i32(addr, addr, 1 << size);
pbrook9ee6e8b2007-11-11 00:04:49 +00005277 }
Peter Maydelle318a602011-03-15 16:26:52 +00005278 tcg_temp_free_i32(addr);
Richard Henderson2d6ac922018-10-24 07:50:20 +01005279 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00005280 stride = nregs * (1 << size);
5281 }
5282 }
5283 if (rm != 15) {
Peter Maydell39d54922013-05-23 12:59:55 +01005284 TCGv_i32 base;
pbrookb26eefb2008-03-31 03:44:26 +00005285
5286 base = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +00005287 if (rm == 13) {
pbrookb26eefb2008-03-31 03:44:26 +00005288 tcg_gen_addi_i32(base, base, stride);
pbrook9ee6e8b2007-11-11 00:04:49 +00005289 } else {
Peter Maydell39d54922013-05-23 12:59:55 +01005290 TCGv_i32 index;
pbrookb26eefb2008-03-31 03:44:26 +00005291 index = load_reg(s, rm);
5292 tcg_gen_add_i32(base, base, index);
Peter Maydell7d1b0092011-03-06 21:39:54 +00005293 tcg_temp_free_i32(index);
pbrook9ee6e8b2007-11-11 00:04:49 +00005294 }
pbrookb26eefb2008-03-31 03:44:26 +00005295 store_reg(s, rn, base);
pbrook9ee6e8b2007-11-11 00:04:49 +00005296 }
5297 return 0;
5298}
5299
Peter Maydell39d54922013-05-23 12:59:55 +01005300static inline void gen_neon_narrow(int size, TCGv_i32 dest, TCGv_i64 src)
pbrookad694712008-03-31 03:48:30 +00005301{
5302 switch (size) {
5303 case 0: gen_helper_neon_narrow_u8(dest, src); break;
5304 case 1: gen_helper_neon_narrow_u16(dest, src); break;
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07005305 case 2: tcg_gen_extrl_i64_i32(dest, src); break;
pbrookad694712008-03-31 03:48:30 +00005306 default: abort();
5307 }
5308}
5309
Peter Maydell39d54922013-05-23 12:59:55 +01005310static inline void gen_neon_narrow_sats(int size, TCGv_i32 dest, TCGv_i64 src)
pbrookad694712008-03-31 03:48:30 +00005311{
5312 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00005313 case 0: gen_helper_neon_narrow_sat_s8(dest, cpu_env, src); break;
5314 case 1: gen_helper_neon_narrow_sat_s16(dest, cpu_env, src); break;
5315 case 2: gen_helper_neon_narrow_sat_s32(dest, cpu_env, src); break;
pbrookad694712008-03-31 03:48:30 +00005316 default: abort();
5317 }
5318}
5319
Peter Maydell39d54922013-05-23 12:59:55 +01005320static inline void gen_neon_narrow_satu(int size, TCGv_i32 dest, TCGv_i64 src)
pbrookad694712008-03-31 03:48:30 +00005321{
5322 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00005323 case 0: gen_helper_neon_narrow_sat_u8(dest, cpu_env, src); break;
5324 case 1: gen_helper_neon_narrow_sat_u16(dest, cpu_env, src); break;
5325 case 2: gen_helper_neon_narrow_sat_u32(dest, cpu_env, src); break;
pbrookad694712008-03-31 03:48:30 +00005326 default: abort();
5327 }
5328}
5329
Peter Maydell39d54922013-05-23 12:59:55 +01005330static inline void gen_neon_unarrow_sats(int size, TCGv_i32 dest, TCGv_i64 src)
Juha Riihimäkiaf1bbf32011-02-09 15:42:32 +00005331{
5332 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00005333 case 0: gen_helper_neon_unarrow_sat8(dest, cpu_env, src); break;
5334 case 1: gen_helper_neon_unarrow_sat16(dest, cpu_env, src); break;
5335 case 2: gen_helper_neon_unarrow_sat32(dest, cpu_env, src); break;
Juha Riihimäkiaf1bbf32011-02-09 15:42:32 +00005336 default: abort();
5337 }
5338}
5339
Peter Maydell39d54922013-05-23 12:59:55 +01005340static inline void gen_neon_shift_narrow(int size, TCGv_i32 var, TCGv_i32 shift,
pbrookad694712008-03-31 03:48:30 +00005341 int q, int u)
5342{
5343 if (q) {
5344 if (u) {
5345 switch (size) {
5346 case 1: gen_helper_neon_rshl_u16(var, var, shift); break;
5347 case 2: gen_helper_neon_rshl_u32(var, var, shift); break;
5348 default: abort();
5349 }
5350 } else {
5351 switch (size) {
5352 case 1: gen_helper_neon_rshl_s16(var, var, shift); break;
5353 case 2: gen_helper_neon_rshl_s32(var, var, shift); break;
5354 default: abort();
5355 }
5356 }
5357 } else {
5358 if (u) {
5359 switch (size) {
Christophe Lyonb408a9b2011-02-15 13:44:46 +00005360 case 1: gen_helper_neon_shl_u16(var, var, shift); break;
5361 case 2: gen_helper_neon_shl_u32(var, var, shift); break;
pbrookad694712008-03-31 03:48:30 +00005362 default: abort();
5363 }
5364 } else {
5365 switch (size) {
5366 case 1: gen_helper_neon_shl_s16(var, var, shift); break;
5367 case 2: gen_helper_neon_shl_s32(var, var, shift); break;
5368 default: abort();
5369 }
5370 }
5371 }
5372}
5373
Peter Maydell39d54922013-05-23 12:59:55 +01005374static inline void gen_neon_widen(TCGv_i64 dest, TCGv_i32 src, int size, int u)
pbrookad694712008-03-31 03:48:30 +00005375{
5376 if (u) {
5377 switch (size) {
5378 case 0: gen_helper_neon_widen_u8(dest, src); break;
5379 case 1: gen_helper_neon_widen_u16(dest, src); break;
5380 case 2: tcg_gen_extu_i32_i64(dest, src); break;
5381 default: abort();
5382 }
5383 } else {
5384 switch (size) {
5385 case 0: gen_helper_neon_widen_s8(dest, src); break;
5386 case 1: gen_helper_neon_widen_s16(dest, src); break;
5387 case 2: tcg_gen_ext_i32_i64(dest, src); break;
5388 default: abort();
5389 }
5390 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00005391 tcg_temp_free_i32(src);
pbrookad694712008-03-31 03:48:30 +00005392}
5393
5394static inline void gen_neon_addl(int size)
5395{
5396 switch (size) {
5397 case 0: gen_helper_neon_addl_u16(CPU_V001); break;
5398 case 1: gen_helper_neon_addl_u32(CPU_V001); break;
5399 case 2: tcg_gen_add_i64(CPU_V001); break;
5400 default: abort();
5401 }
5402}
5403
5404static inline void gen_neon_subl(int size)
5405{
5406 switch (size) {
5407 case 0: gen_helper_neon_subl_u16(CPU_V001); break;
5408 case 1: gen_helper_neon_subl_u32(CPU_V001); break;
5409 case 2: tcg_gen_sub_i64(CPU_V001); break;
5410 default: abort();
5411 }
5412}
5413
pbrooka7812ae2008-11-17 14:43:54 +00005414static inline void gen_neon_negl(TCGv_i64 var, int size)
pbrookad694712008-03-31 03:48:30 +00005415{
5416 switch (size) {
5417 case 0: gen_helper_neon_negl_u16(var, var); break;
5418 case 1: gen_helper_neon_negl_u32(var, var); break;
Peter Maydellee6fa552012-10-18 16:58:52 +01005419 case 2:
5420 tcg_gen_neg_i64(var, var);
5421 break;
pbrookad694712008-03-31 03:48:30 +00005422 default: abort();
5423 }
5424}
5425
pbrooka7812ae2008-11-17 14:43:54 +00005426static inline void gen_neon_addl_saturate(TCGv_i64 op0, TCGv_i64 op1, int size)
pbrookad694712008-03-31 03:48:30 +00005427{
5428 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00005429 case 1: gen_helper_neon_addl_saturate_s32(op0, cpu_env, op0, op1); break;
5430 case 2: gen_helper_neon_addl_saturate_s64(op0, cpu_env, op0, op1); break;
pbrookad694712008-03-31 03:48:30 +00005431 default: abort();
5432 }
5433}
5434
Peter Maydell39d54922013-05-23 12:59:55 +01005435static inline void gen_neon_mull(TCGv_i64 dest, TCGv_i32 a, TCGv_i32 b,
5436 int size, int u)
pbrookad694712008-03-31 03:48:30 +00005437{
pbrooka7812ae2008-11-17 14:43:54 +00005438 TCGv_i64 tmp;
pbrookad694712008-03-31 03:48:30 +00005439
5440 switch ((size << 1) | u) {
5441 case 0: gen_helper_neon_mull_s8(dest, a, b); break;
5442 case 1: gen_helper_neon_mull_u8(dest, a, b); break;
5443 case 2: gen_helper_neon_mull_s16(dest, a, b); break;
5444 case 3: gen_helper_neon_mull_u16(dest, a, b); break;
5445 case 4:
5446 tmp = gen_muls_i64_i32(a, b);
5447 tcg_gen_mov_i64(dest, tmp);
Peter Maydell7d2aabe2011-03-11 13:32:34 +00005448 tcg_temp_free_i64(tmp);
pbrookad694712008-03-31 03:48:30 +00005449 break;
5450 case 5:
5451 tmp = gen_mulu_i64_i32(a, b);
5452 tcg_gen_mov_i64(dest, tmp);
Peter Maydell7d2aabe2011-03-11 13:32:34 +00005453 tcg_temp_free_i64(tmp);
pbrookad694712008-03-31 03:48:30 +00005454 break;
5455 default: abort();
5456 }
Christophe Lyonc6067f02011-01-19 15:37:58 +01005457
5458 /* gen_helper_neon_mull_[su]{8|16} do not free their parameters.
5459 Don't forget to clean them now. */
5460 if (size < 2) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00005461 tcg_temp_free_i32(a);
5462 tcg_temp_free_i32(b);
Christophe Lyonc6067f02011-01-19 15:37:58 +01005463 }
pbrookad694712008-03-31 03:48:30 +00005464}
5465
Peter Maydell39d54922013-05-23 12:59:55 +01005466static void gen_neon_narrow_op(int op, int u, int size,
5467 TCGv_i32 dest, TCGv_i64 src)
Peter Maydellc33171c2011-02-21 11:05:21 +00005468{
5469 if (op) {
5470 if (u) {
5471 gen_neon_unarrow_sats(size, dest, src);
5472 } else {
5473 gen_neon_narrow(size, dest, src);
5474 }
5475 } else {
5476 if (u) {
5477 gen_neon_narrow_satu(size, dest, src);
5478 } else {
5479 gen_neon_narrow_sats(size, dest, src);
5480 }
5481 }
5482}
5483
Peter Maydell62698be2011-04-11 16:26:11 +01005484/* Symbolic constants for op fields for Neon 3-register same-length.
5485 * The values correspond to bits [11:8,4]; see the ARM ARM DDI0406B
5486 * table A7-9.
5487 */
5488#define NEON_3R_VHADD 0
5489#define NEON_3R_VQADD 1
5490#define NEON_3R_VRHADD 2
5491#define NEON_3R_LOGIC 3 /* VAND,VBIC,VORR,VMOV,VORN,VEOR,VBIF,VBIT,VBSL */
5492#define NEON_3R_VHSUB 4
5493#define NEON_3R_VQSUB 5
5494#define NEON_3R_VCGT 6
5495#define NEON_3R_VCGE 7
5496#define NEON_3R_VSHL 8
5497#define NEON_3R_VQSHL 9
5498#define NEON_3R_VRSHL 10
5499#define NEON_3R_VQRSHL 11
5500#define NEON_3R_VMAX 12
5501#define NEON_3R_VMIN 13
5502#define NEON_3R_VABD 14
5503#define NEON_3R_VABA 15
5504#define NEON_3R_VADD_VSUB 16
5505#define NEON_3R_VTST_VCEQ 17
Richard Henderson4a7832b2018-10-24 07:50:19 +01005506#define NEON_3R_VML 18 /* VMLA, VMLS */
Peter Maydell62698be2011-04-11 16:26:11 +01005507#define NEON_3R_VMUL 19
5508#define NEON_3R_VPMAX 20
5509#define NEON_3R_VPMIN 21
5510#define NEON_3R_VQDMULH_VQRDMULH 22
Richard Henderson36a71932018-03-02 10:45:42 +00005511#define NEON_3R_VPADD_VQRDMLAH 23
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005512#define NEON_3R_SHA 24 /* SHA1C,SHA1P,SHA1M,SHA1SU0,SHA256H{2},SHA256SU1 */
Richard Henderson36a71932018-03-02 10:45:42 +00005513#define NEON_3R_VFM_VQRDMLSH 25 /* VFMA, VFMS, VQRDMLSH */
Peter Maydell62698be2011-04-11 16:26:11 +01005514#define NEON_3R_FLOAT_ARITH 26 /* float VADD, VSUB, VPADD, VABD */
5515#define NEON_3R_FLOAT_MULTIPLY 27 /* float VMLA, VMLS, VMUL */
5516#define NEON_3R_FLOAT_CMP 28 /* float VCEQ, VCGE, VCGT */
5517#define NEON_3R_FLOAT_ACMP 29 /* float VACGE, VACGT, VACLE, VACLT */
5518#define NEON_3R_FLOAT_MINMAX 30 /* float VMIN, VMAX */
Will Newton505935f2013-12-06 17:01:42 +00005519#define NEON_3R_FLOAT_MISC 31 /* float VRECPS, VRSQRTS, VMAXNM/MINNM */
Peter Maydell62698be2011-04-11 16:26:11 +01005520
5521static const uint8_t neon_3r_sizes[] = {
5522 [NEON_3R_VHADD] = 0x7,
5523 [NEON_3R_VQADD] = 0xf,
5524 [NEON_3R_VRHADD] = 0x7,
5525 [NEON_3R_LOGIC] = 0xf, /* size field encodes op type */
5526 [NEON_3R_VHSUB] = 0x7,
5527 [NEON_3R_VQSUB] = 0xf,
5528 [NEON_3R_VCGT] = 0x7,
5529 [NEON_3R_VCGE] = 0x7,
5530 [NEON_3R_VSHL] = 0xf,
5531 [NEON_3R_VQSHL] = 0xf,
5532 [NEON_3R_VRSHL] = 0xf,
5533 [NEON_3R_VQRSHL] = 0xf,
5534 [NEON_3R_VMAX] = 0x7,
5535 [NEON_3R_VMIN] = 0x7,
5536 [NEON_3R_VABD] = 0x7,
5537 [NEON_3R_VABA] = 0x7,
5538 [NEON_3R_VADD_VSUB] = 0xf,
5539 [NEON_3R_VTST_VCEQ] = 0x7,
5540 [NEON_3R_VML] = 0x7,
5541 [NEON_3R_VMUL] = 0x7,
5542 [NEON_3R_VPMAX] = 0x7,
5543 [NEON_3R_VPMIN] = 0x7,
5544 [NEON_3R_VQDMULH_VQRDMULH] = 0x6,
Richard Henderson36a71932018-03-02 10:45:42 +00005545 [NEON_3R_VPADD_VQRDMLAH] = 0x7,
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005546 [NEON_3R_SHA] = 0xf, /* size field encodes op type */
Richard Henderson36a71932018-03-02 10:45:42 +00005547 [NEON_3R_VFM_VQRDMLSH] = 0x7, /* For VFM, size bit 1 encodes op */
Peter Maydell62698be2011-04-11 16:26:11 +01005548 [NEON_3R_FLOAT_ARITH] = 0x5, /* size bit 1 encodes op */
5549 [NEON_3R_FLOAT_MULTIPLY] = 0x5, /* size bit 1 encodes op */
5550 [NEON_3R_FLOAT_CMP] = 0x5, /* size bit 1 encodes op */
5551 [NEON_3R_FLOAT_ACMP] = 0x5, /* size bit 1 encodes op */
5552 [NEON_3R_FLOAT_MINMAX] = 0x5, /* size bit 1 encodes op */
Will Newton505935f2013-12-06 17:01:42 +00005553 [NEON_3R_FLOAT_MISC] = 0x5, /* size bit 1 encodes op */
Peter Maydell62698be2011-04-11 16:26:11 +01005554};
5555
Peter Maydell600b8282011-04-11 16:26:20 +01005556/* Symbolic constants for op fields for Neon 2-register miscellaneous.
5557 * The values correspond to bits [17:16,10:7]; see the ARM ARM DDI0406B
5558 * table A7-13.
5559 */
5560#define NEON_2RM_VREV64 0
5561#define NEON_2RM_VREV32 1
5562#define NEON_2RM_VREV16 2
5563#define NEON_2RM_VPADDL 4
5564#define NEON_2RM_VPADDL_U 5
Ard Biesheuvel9d935502013-12-17 19:42:25 +00005565#define NEON_2RM_AESE 6 /* Includes AESD */
5566#define NEON_2RM_AESMC 7 /* Includes AESIMC */
Peter Maydell600b8282011-04-11 16:26:20 +01005567#define NEON_2RM_VCLS 8
5568#define NEON_2RM_VCLZ 9
5569#define NEON_2RM_VCNT 10
5570#define NEON_2RM_VMVN 11
5571#define NEON_2RM_VPADAL 12
5572#define NEON_2RM_VPADAL_U 13
5573#define NEON_2RM_VQABS 14
5574#define NEON_2RM_VQNEG 15
5575#define NEON_2RM_VCGT0 16
5576#define NEON_2RM_VCGE0 17
5577#define NEON_2RM_VCEQ0 18
5578#define NEON_2RM_VCLE0 19
5579#define NEON_2RM_VCLT0 20
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005580#define NEON_2RM_SHA1H 21
Peter Maydell600b8282011-04-11 16:26:20 +01005581#define NEON_2RM_VABS 22
5582#define NEON_2RM_VNEG 23
5583#define NEON_2RM_VCGT0_F 24
5584#define NEON_2RM_VCGE0_F 25
5585#define NEON_2RM_VCEQ0_F 26
5586#define NEON_2RM_VCLE0_F 27
5587#define NEON_2RM_VCLT0_F 28
5588#define NEON_2RM_VABS_F 30
5589#define NEON_2RM_VNEG_F 31
5590#define NEON_2RM_VSWP 32
5591#define NEON_2RM_VTRN 33
5592#define NEON_2RM_VUZP 34
5593#define NEON_2RM_VZIP 35
5594#define NEON_2RM_VMOVN 36 /* Includes VQMOVN, VQMOVUN */
5595#define NEON_2RM_VQMOVN 37 /* Includes VQMOVUN */
5596#define NEON_2RM_VSHLL 38
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005597#define NEON_2RM_SHA1SU1 39 /* Includes SHA256SU0 */
Will Newton34f7b0a2014-01-31 14:47:35 +00005598#define NEON_2RM_VRINTN 40
Will Newton2ce70622014-01-31 14:47:34 +00005599#define NEON_2RM_VRINTX 41
Will Newton34f7b0a2014-01-31 14:47:35 +00005600#define NEON_2RM_VRINTA 42
5601#define NEON_2RM_VRINTZ 43
Peter Maydell600b8282011-04-11 16:26:20 +01005602#define NEON_2RM_VCVT_F16_F32 44
Will Newton34f7b0a2014-01-31 14:47:35 +00005603#define NEON_2RM_VRINTM 45
Peter Maydell600b8282011-04-11 16:26:20 +01005604#define NEON_2RM_VCVT_F32_F16 46
Will Newton34f7b0a2014-01-31 14:47:35 +00005605#define NEON_2RM_VRINTP 47
Will Newton901ad522014-01-31 14:47:35 +00005606#define NEON_2RM_VCVTAU 48
5607#define NEON_2RM_VCVTAS 49
5608#define NEON_2RM_VCVTNU 50
5609#define NEON_2RM_VCVTNS 51
5610#define NEON_2RM_VCVTPU 52
5611#define NEON_2RM_VCVTPS 53
5612#define NEON_2RM_VCVTMU 54
5613#define NEON_2RM_VCVTMS 55
Peter Maydell600b8282011-04-11 16:26:20 +01005614#define NEON_2RM_VRECPE 56
5615#define NEON_2RM_VRSQRTE 57
5616#define NEON_2RM_VRECPE_F 58
5617#define NEON_2RM_VRSQRTE_F 59
5618#define NEON_2RM_VCVT_FS 60
5619#define NEON_2RM_VCVT_FU 61
5620#define NEON_2RM_VCVT_SF 62
5621#define NEON_2RM_VCVT_UF 63
5622
5623static int neon_2rm_is_float_op(int op)
5624{
5625 /* Return true if this neon 2reg-misc op is float-to-float */
5626 return (op == NEON_2RM_VABS_F || op == NEON_2RM_VNEG_F ||
Will Newton34f7b0a2014-01-31 14:47:35 +00005627 (op >= NEON_2RM_VRINTN && op <= NEON_2RM_VRINTZ) ||
Will Newton901ad522014-01-31 14:47:35 +00005628 op == NEON_2RM_VRINTM ||
5629 (op >= NEON_2RM_VRINTP && op <= NEON_2RM_VCVTMS) ||
Will Newton34f7b0a2014-01-31 14:47:35 +00005630 op >= NEON_2RM_VRECPE_F);
Peter Maydell600b8282011-04-11 16:26:20 +01005631}
5632
Peter Maydellfe8fcf32016-06-14 15:59:15 +01005633static bool neon_2rm_is_v8_op(int op)
5634{
5635 /* Return true if this neon 2reg-misc op is ARMv8 and up */
5636 switch (op) {
5637 case NEON_2RM_VRINTN:
5638 case NEON_2RM_VRINTA:
5639 case NEON_2RM_VRINTM:
5640 case NEON_2RM_VRINTP:
5641 case NEON_2RM_VRINTZ:
5642 case NEON_2RM_VRINTX:
5643 case NEON_2RM_VCVTAU:
5644 case NEON_2RM_VCVTAS:
5645 case NEON_2RM_VCVTNU:
5646 case NEON_2RM_VCVTNS:
5647 case NEON_2RM_VCVTPU:
5648 case NEON_2RM_VCVTPS:
5649 case NEON_2RM_VCVTMU:
5650 case NEON_2RM_VCVTMS:
5651 return true;
5652 default:
5653 return false;
5654 }
5655}
5656
Peter Maydell600b8282011-04-11 16:26:20 +01005657/* Each entry in this array has bit n set if the insn allows
5658 * size value n (otherwise it will UNDEF). Since unallocated
5659 * op values will have no bits set they always UNDEF.
5660 */
5661static const uint8_t neon_2rm_sizes[] = {
5662 [NEON_2RM_VREV64] = 0x7,
5663 [NEON_2RM_VREV32] = 0x3,
5664 [NEON_2RM_VREV16] = 0x1,
5665 [NEON_2RM_VPADDL] = 0x7,
5666 [NEON_2RM_VPADDL_U] = 0x7,
Ard Biesheuvel9d935502013-12-17 19:42:25 +00005667 [NEON_2RM_AESE] = 0x1,
5668 [NEON_2RM_AESMC] = 0x1,
Peter Maydell600b8282011-04-11 16:26:20 +01005669 [NEON_2RM_VCLS] = 0x7,
5670 [NEON_2RM_VCLZ] = 0x7,
5671 [NEON_2RM_VCNT] = 0x1,
5672 [NEON_2RM_VMVN] = 0x1,
5673 [NEON_2RM_VPADAL] = 0x7,
5674 [NEON_2RM_VPADAL_U] = 0x7,
5675 [NEON_2RM_VQABS] = 0x7,
5676 [NEON_2RM_VQNEG] = 0x7,
5677 [NEON_2RM_VCGT0] = 0x7,
5678 [NEON_2RM_VCGE0] = 0x7,
5679 [NEON_2RM_VCEQ0] = 0x7,
5680 [NEON_2RM_VCLE0] = 0x7,
5681 [NEON_2RM_VCLT0] = 0x7,
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005682 [NEON_2RM_SHA1H] = 0x4,
Peter Maydell600b8282011-04-11 16:26:20 +01005683 [NEON_2RM_VABS] = 0x7,
5684 [NEON_2RM_VNEG] = 0x7,
5685 [NEON_2RM_VCGT0_F] = 0x4,
5686 [NEON_2RM_VCGE0_F] = 0x4,
5687 [NEON_2RM_VCEQ0_F] = 0x4,
5688 [NEON_2RM_VCLE0_F] = 0x4,
5689 [NEON_2RM_VCLT0_F] = 0x4,
5690 [NEON_2RM_VABS_F] = 0x4,
5691 [NEON_2RM_VNEG_F] = 0x4,
5692 [NEON_2RM_VSWP] = 0x1,
5693 [NEON_2RM_VTRN] = 0x7,
5694 [NEON_2RM_VUZP] = 0x7,
5695 [NEON_2RM_VZIP] = 0x7,
5696 [NEON_2RM_VMOVN] = 0x7,
5697 [NEON_2RM_VQMOVN] = 0x7,
5698 [NEON_2RM_VSHLL] = 0x7,
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01005699 [NEON_2RM_SHA1SU1] = 0x4,
Will Newton34f7b0a2014-01-31 14:47:35 +00005700 [NEON_2RM_VRINTN] = 0x4,
Will Newton2ce70622014-01-31 14:47:34 +00005701 [NEON_2RM_VRINTX] = 0x4,
Will Newton34f7b0a2014-01-31 14:47:35 +00005702 [NEON_2RM_VRINTA] = 0x4,
5703 [NEON_2RM_VRINTZ] = 0x4,
Peter Maydell600b8282011-04-11 16:26:20 +01005704 [NEON_2RM_VCVT_F16_F32] = 0x2,
Will Newton34f7b0a2014-01-31 14:47:35 +00005705 [NEON_2RM_VRINTM] = 0x4,
Peter Maydell600b8282011-04-11 16:26:20 +01005706 [NEON_2RM_VCVT_F32_F16] = 0x2,
Will Newton34f7b0a2014-01-31 14:47:35 +00005707 [NEON_2RM_VRINTP] = 0x4,
Will Newton901ad522014-01-31 14:47:35 +00005708 [NEON_2RM_VCVTAU] = 0x4,
5709 [NEON_2RM_VCVTAS] = 0x4,
5710 [NEON_2RM_VCVTNU] = 0x4,
5711 [NEON_2RM_VCVTNS] = 0x4,
5712 [NEON_2RM_VCVTPU] = 0x4,
5713 [NEON_2RM_VCVTPS] = 0x4,
5714 [NEON_2RM_VCVTMU] = 0x4,
5715 [NEON_2RM_VCVTMS] = 0x4,
Peter Maydell600b8282011-04-11 16:26:20 +01005716 [NEON_2RM_VRECPE] = 0x4,
5717 [NEON_2RM_VRSQRTE] = 0x4,
5718 [NEON_2RM_VRECPE_F] = 0x4,
5719 [NEON_2RM_VRSQRTE_F] = 0x4,
5720 [NEON_2RM_VCVT_FS] = 0x4,
5721 [NEON_2RM_VCVT_FU] = 0x4,
5722 [NEON_2RM_VCVT_SF] = 0x4,
5723 [NEON_2RM_VCVT_UF] = 0x4,
5724};
5725
Richard Henderson36a71932018-03-02 10:45:42 +00005726
5727/* Expand v8.1 simd helper. */
5728static int do_v81_helper(DisasContext *s, gen_helper_gvec_3_ptr *fn,
5729 int q, int rd, int rn, int rm)
5730{
Richard Henderson962fcbf2018-10-24 07:50:16 +01005731 if (dc_isar_feature(aa32_rdm, s)) {
Richard Henderson36a71932018-03-02 10:45:42 +00005732 int opr_sz = (1 + q) * 8;
5733 tcg_gen_gvec_3_ptr(vfp_reg_offset(1, rd),
5734 vfp_reg_offset(1, rn),
5735 vfp_reg_offset(1, rm), cpu_env,
5736 opr_sz, opr_sz, 0, fn);
5737 return 0;
5738 }
5739 return 1;
5740}
5741
Richard Henderson41f6c112018-10-24 07:50:19 +01005742static void gen_ssra8_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5743{
5744 tcg_gen_vec_sar8i_i64(a, a, shift);
5745 tcg_gen_vec_add8_i64(d, d, a);
5746}
5747
5748static void gen_ssra16_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5749{
5750 tcg_gen_vec_sar16i_i64(a, a, shift);
5751 tcg_gen_vec_add16_i64(d, d, a);
5752}
5753
5754static void gen_ssra32_i32(TCGv_i32 d, TCGv_i32 a, int32_t shift)
5755{
5756 tcg_gen_sari_i32(a, a, shift);
5757 tcg_gen_add_i32(d, d, a);
5758}
5759
5760static void gen_ssra64_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5761{
5762 tcg_gen_sari_i64(a, a, shift);
5763 tcg_gen_add_i64(d, d, a);
5764}
5765
5766static void gen_ssra_vec(unsigned vece, TCGv_vec d, TCGv_vec a, int64_t sh)
5767{
5768 tcg_gen_sari_vec(vece, a, a, sh);
5769 tcg_gen_add_vec(vece, d, d, a);
5770}
5771
Richard Henderson53229a72019-03-17 00:27:29 +00005772static const TCGOpcode vecop_list_ssra[] = {
5773 INDEX_op_sari_vec, INDEX_op_add_vec, 0
5774};
5775
Richard Henderson41f6c112018-10-24 07:50:19 +01005776const GVecGen2i ssra_op[4] = {
5777 { .fni8 = gen_ssra8_i64,
5778 .fniv = gen_ssra_vec,
5779 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005780 .opt_opc = vecop_list_ssra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005781 .vece = MO_8 },
5782 { .fni8 = gen_ssra16_i64,
5783 .fniv = gen_ssra_vec,
5784 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005785 .opt_opc = vecop_list_ssra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005786 .vece = MO_16 },
5787 { .fni4 = gen_ssra32_i32,
5788 .fniv = gen_ssra_vec,
5789 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005790 .opt_opc = vecop_list_ssra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005791 .vece = MO_32 },
5792 { .fni8 = gen_ssra64_i64,
5793 .fniv = gen_ssra_vec,
5794 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
Richard Henderson53229a72019-03-17 00:27:29 +00005795 .opt_opc = vecop_list_ssra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005796 .load_dest = true,
Richard Henderson41f6c112018-10-24 07:50:19 +01005797 .vece = MO_64 },
5798};
5799
5800static void gen_usra8_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5801{
5802 tcg_gen_vec_shr8i_i64(a, a, shift);
5803 tcg_gen_vec_add8_i64(d, d, a);
5804}
5805
5806static void gen_usra16_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5807{
5808 tcg_gen_vec_shr16i_i64(a, a, shift);
5809 tcg_gen_vec_add16_i64(d, d, a);
5810}
5811
5812static void gen_usra32_i32(TCGv_i32 d, TCGv_i32 a, int32_t shift)
5813{
5814 tcg_gen_shri_i32(a, a, shift);
5815 tcg_gen_add_i32(d, d, a);
5816}
5817
5818static void gen_usra64_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5819{
5820 tcg_gen_shri_i64(a, a, shift);
5821 tcg_gen_add_i64(d, d, a);
5822}
5823
5824static void gen_usra_vec(unsigned vece, TCGv_vec d, TCGv_vec a, int64_t sh)
5825{
5826 tcg_gen_shri_vec(vece, a, a, sh);
5827 tcg_gen_add_vec(vece, d, d, a);
5828}
5829
Richard Henderson53229a72019-03-17 00:27:29 +00005830static const TCGOpcode vecop_list_usra[] = {
5831 INDEX_op_shri_vec, INDEX_op_add_vec, 0
5832};
5833
Richard Henderson41f6c112018-10-24 07:50:19 +01005834const GVecGen2i usra_op[4] = {
5835 { .fni8 = gen_usra8_i64,
5836 .fniv = gen_usra_vec,
5837 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005838 .opt_opc = vecop_list_usra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005839 .vece = MO_8, },
5840 { .fni8 = gen_usra16_i64,
5841 .fniv = gen_usra_vec,
5842 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005843 .opt_opc = vecop_list_usra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005844 .vece = MO_16, },
5845 { .fni4 = gen_usra32_i32,
5846 .fniv = gen_usra_vec,
5847 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005848 .opt_opc = vecop_list_usra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005849 .vece = MO_32, },
5850 { .fni8 = gen_usra64_i64,
5851 .fniv = gen_usra_vec,
5852 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
5853 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005854 .opt_opc = vecop_list_usra,
Richard Henderson41f6c112018-10-24 07:50:19 +01005855 .vece = MO_64, },
5856};
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01005857
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005858static void gen_shr8_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5859{
5860 uint64_t mask = dup_const(MO_8, 0xff >> shift);
5861 TCGv_i64 t = tcg_temp_new_i64();
5862
5863 tcg_gen_shri_i64(t, a, shift);
5864 tcg_gen_andi_i64(t, t, mask);
5865 tcg_gen_andi_i64(d, d, ~mask);
5866 tcg_gen_or_i64(d, d, t);
5867 tcg_temp_free_i64(t);
5868}
5869
5870static void gen_shr16_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5871{
5872 uint64_t mask = dup_const(MO_16, 0xffff >> shift);
5873 TCGv_i64 t = tcg_temp_new_i64();
5874
5875 tcg_gen_shri_i64(t, a, shift);
5876 tcg_gen_andi_i64(t, t, mask);
5877 tcg_gen_andi_i64(d, d, ~mask);
5878 tcg_gen_or_i64(d, d, t);
5879 tcg_temp_free_i64(t);
5880}
5881
5882static void gen_shr32_ins_i32(TCGv_i32 d, TCGv_i32 a, int32_t shift)
5883{
5884 tcg_gen_shri_i32(a, a, shift);
5885 tcg_gen_deposit_i32(d, d, a, 0, 32 - shift);
5886}
5887
5888static void gen_shr64_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5889{
5890 tcg_gen_shri_i64(a, a, shift);
5891 tcg_gen_deposit_i64(d, d, a, 0, 64 - shift);
5892}
5893
5894static void gen_shr_ins_vec(unsigned vece, TCGv_vec d, TCGv_vec a, int64_t sh)
5895{
5896 if (sh == 0) {
5897 tcg_gen_mov_vec(d, a);
5898 } else {
5899 TCGv_vec t = tcg_temp_new_vec_matching(d);
5900 TCGv_vec m = tcg_temp_new_vec_matching(d);
5901
5902 tcg_gen_dupi_vec(vece, m, MAKE_64BIT_MASK((8 << vece) - sh, sh));
5903 tcg_gen_shri_vec(vece, t, a, sh);
5904 tcg_gen_and_vec(vece, d, d, m);
5905 tcg_gen_or_vec(vece, d, d, t);
5906
5907 tcg_temp_free_vec(t);
5908 tcg_temp_free_vec(m);
5909 }
5910}
5911
Richard Henderson53229a72019-03-17 00:27:29 +00005912static const TCGOpcode vecop_list_sri[] = { INDEX_op_shri_vec, 0 };
5913
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005914const GVecGen2i sri_op[4] = {
5915 { .fni8 = gen_shr8_ins_i64,
5916 .fniv = gen_shr_ins_vec,
5917 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005918 .opt_opc = vecop_list_sri,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005919 .vece = MO_8 },
5920 { .fni8 = gen_shr16_ins_i64,
5921 .fniv = gen_shr_ins_vec,
5922 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005923 .opt_opc = vecop_list_sri,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005924 .vece = MO_16 },
5925 { .fni4 = gen_shr32_ins_i32,
5926 .fniv = gen_shr_ins_vec,
5927 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005928 .opt_opc = vecop_list_sri,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005929 .vece = MO_32 },
5930 { .fni8 = gen_shr64_ins_i64,
5931 .fniv = gen_shr_ins_vec,
5932 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
5933 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005934 .opt_opc = vecop_list_sri,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005935 .vece = MO_64 },
5936};
5937
5938static void gen_shl8_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5939{
5940 uint64_t mask = dup_const(MO_8, 0xff << shift);
5941 TCGv_i64 t = tcg_temp_new_i64();
5942
5943 tcg_gen_shli_i64(t, a, shift);
5944 tcg_gen_andi_i64(t, t, mask);
5945 tcg_gen_andi_i64(d, d, ~mask);
5946 tcg_gen_or_i64(d, d, t);
5947 tcg_temp_free_i64(t);
5948}
5949
5950static void gen_shl16_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5951{
5952 uint64_t mask = dup_const(MO_16, 0xffff << shift);
5953 TCGv_i64 t = tcg_temp_new_i64();
5954
5955 tcg_gen_shli_i64(t, a, shift);
5956 tcg_gen_andi_i64(t, t, mask);
5957 tcg_gen_andi_i64(d, d, ~mask);
5958 tcg_gen_or_i64(d, d, t);
5959 tcg_temp_free_i64(t);
5960}
5961
5962static void gen_shl32_ins_i32(TCGv_i32 d, TCGv_i32 a, int32_t shift)
5963{
5964 tcg_gen_deposit_i32(d, d, a, shift, 32 - shift);
5965}
5966
5967static void gen_shl64_ins_i64(TCGv_i64 d, TCGv_i64 a, int64_t shift)
5968{
5969 tcg_gen_deposit_i64(d, d, a, shift, 64 - shift);
5970}
5971
5972static void gen_shl_ins_vec(unsigned vece, TCGv_vec d, TCGv_vec a, int64_t sh)
5973{
5974 if (sh == 0) {
5975 tcg_gen_mov_vec(d, a);
5976 } else {
5977 TCGv_vec t = tcg_temp_new_vec_matching(d);
5978 TCGv_vec m = tcg_temp_new_vec_matching(d);
5979
5980 tcg_gen_dupi_vec(vece, m, MAKE_64BIT_MASK(0, sh));
5981 tcg_gen_shli_vec(vece, t, a, sh);
5982 tcg_gen_and_vec(vece, d, d, m);
5983 tcg_gen_or_vec(vece, d, d, t);
5984
5985 tcg_temp_free_vec(t);
5986 tcg_temp_free_vec(m);
5987 }
5988}
5989
Richard Henderson53229a72019-03-17 00:27:29 +00005990static const TCGOpcode vecop_list_sli[] = { INDEX_op_shli_vec, 0 };
5991
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005992const GVecGen2i sli_op[4] = {
5993 { .fni8 = gen_shl8_ins_i64,
5994 .fniv = gen_shl_ins_vec,
5995 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00005996 .opt_opc = vecop_list_sli,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01005997 .vece = MO_8 },
5998 { .fni8 = gen_shl16_ins_i64,
5999 .fniv = gen_shl_ins_vec,
6000 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006001 .opt_opc = vecop_list_sli,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006002 .vece = MO_16 },
6003 { .fni4 = gen_shl32_ins_i32,
6004 .fniv = gen_shl_ins_vec,
6005 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006006 .opt_opc = vecop_list_sli,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006007 .vece = MO_32 },
6008 { .fni8 = gen_shl64_ins_i64,
6009 .fniv = gen_shl_ins_vec,
6010 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
6011 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006012 .opt_opc = vecop_list_sli,
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006013 .vece = MO_64 },
6014};
6015
Richard Henderson4a7832b2018-10-24 07:50:19 +01006016static void gen_mla8_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6017{
6018 gen_helper_neon_mul_u8(a, a, b);
6019 gen_helper_neon_add_u8(d, d, a);
6020}
6021
6022static void gen_mls8_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6023{
6024 gen_helper_neon_mul_u8(a, a, b);
6025 gen_helper_neon_sub_u8(d, d, a);
6026}
6027
6028static void gen_mla16_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6029{
6030 gen_helper_neon_mul_u16(a, a, b);
6031 gen_helper_neon_add_u16(d, d, a);
6032}
6033
6034static void gen_mls16_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6035{
6036 gen_helper_neon_mul_u16(a, a, b);
6037 gen_helper_neon_sub_u16(d, d, a);
6038}
6039
6040static void gen_mla32_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6041{
6042 tcg_gen_mul_i32(a, a, b);
6043 tcg_gen_add_i32(d, d, a);
6044}
6045
6046static void gen_mls32_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6047{
6048 tcg_gen_mul_i32(a, a, b);
6049 tcg_gen_sub_i32(d, d, a);
6050}
6051
6052static void gen_mla64_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b)
6053{
6054 tcg_gen_mul_i64(a, a, b);
6055 tcg_gen_add_i64(d, d, a);
6056}
6057
6058static void gen_mls64_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b)
6059{
6060 tcg_gen_mul_i64(a, a, b);
6061 tcg_gen_sub_i64(d, d, a);
6062}
6063
6064static void gen_mla_vec(unsigned vece, TCGv_vec d, TCGv_vec a, TCGv_vec b)
6065{
6066 tcg_gen_mul_vec(vece, a, a, b);
6067 tcg_gen_add_vec(vece, d, d, a);
6068}
6069
6070static void gen_mls_vec(unsigned vece, TCGv_vec d, TCGv_vec a, TCGv_vec b)
6071{
6072 tcg_gen_mul_vec(vece, a, a, b);
6073 tcg_gen_sub_vec(vece, d, d, a);
6074}
6075
6076/* Note that while NEON does not support VMLA and VMLS as 64-bit ops,
6077 * these tables are shared with AArch64 which does support them.
6078 */
Richard Henderson53229a72019-03-17 00:27:29 +00006079
6080static const TCGOpcode vecop_list_mla[] = {
6081 INDEX_op_mul_vec, INDEX_op_add_vec, 0
6082};
6083
6084static const TCGOpcode vecop_list_mls[] = {
6085 INDEX_op_mul_vec, INDEX_op_sub_vec, 0
6086};
6087
Richard Henderson4a7832b2018-10-24 07:50:19 +01006088const GVecGen3 mla_op[4] = {
6089 { .fni4 = gen_mla8_i32,
6090 .fniv = gen_mla_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006091 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006092 .opt_opc = vecop_list_mla,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006093 .vece = MO_8 },
6094 { .fni4 = gen_mla16_i32,
6095 .fniv = gen_mla_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006096 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006097 .opt_opc = vecop_list_mla,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006098 .vece = MO_16 },
6099 { .fni4 = gen_mla32_i32,
6100 .fniv = gen_mla_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006101 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006102 .opt_opc = vecop_list_mla,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006103 .vece = MO_32 },
6104 { .fni8 = gen_mla64_i64,
6105 .fniv = gen_mla_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006106 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
6107 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006108 .opt_opc = vecop_list_mla,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006109 .vece = MO_64 },
6110};
6111
6112const GVecGen3 mls_op[4] = {
6113 { .fni4 = gen_mls8_i32,
6114 .fniv = gen_mls_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006115 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006116 .opt_opc = vecop_list_mls,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006117 .vece = MO_8 },
6118 { .fni4 = gen_mls16_i32,
6119 .fniv = gen_mls_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006120 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006121 .opt_opc = vecop_list_mls,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006122 .vece = MO_16 },
6123 { .fni4 = gen_mls32_i32,
6124 .fniv = gen_mls_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006125 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006126 .opt_opc = vecop_list_mls,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006127 .vece = MO_32 },
6128 { .fni8 = gen_mls64_i64,
6129 .fniv = gen_mls_vec,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006130 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
6131 .load_dest = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006132 .opt_opc = vecop_list_mls,
Richard Henderson4a7832b2018-10-24 07:50:19 +01006133 .vece = MO_64 },
6134};
6135
Richard Hendersonea580fa2018-10-24 07:50:20 +01006136/* CMTST : test is "if (X & Y != 0)". */
6137static void gen_cmtst_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b)
6138{
6139 tcg_gen_and_i32(d, a, b);
6140 tcg_gen_setcondi_i32(TCG_COND_NE, d, d, 0);
6141 tcg_gen_neg_i32(d, d);
6142}
6143
6144void gen_cmtst_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b)
6145{
6146 tcg_gen_and_i64(d, a, b);
6147 tcg_gen_setcondi_i64(TCG_COND_NE, d, d, 0);
6148 tcg_gen_neg_i64(d, d);
6149}
6150
6151static void gen_cmtst_vec(unsigned vece, TCGv_vec d, TCGv_vec a, TCGv_vec b)
6152{
6153 tcg_gen_and_vec(vece, d, a, b);
6154 tcg_gen_dupi_vec(vece, a, 0);
6155 tcg_gen_cmp_vec(TCG_COND_NE, vece, d, d, a);
6156}
6157
Richard Henderson53229a72019-03-17 00:27:29 +00006158static const TCGOpcode vecop_list_cmtst[] = { INDEX_op_cmp_vec, 0 };
6159
Richard Hendersonea580fa2018-10-24 07:50:20 +01006160const GVecGen3 cmtst_op[4] = {
6161 { .fni4 = gen_helper_neon_tst_u8,
6162 .fniv = gen_cmtst_vec,
Richard Henderson53229a72019-03-17 00:27:29 +00006163 .opt_opc = vecop_list_cmtst,
Richard Hendersonea580fa2018-10-24 07:50:20 +01006164 .vece = MO_8 },
6165 { .fni4 = gen_helper_neon_tst_u16,
6166 .fniv = gen_cmtst_vec,
Richard Henderson53229a72019-03-17 00:27:29 +00006167 .opt_opc = vecop_list_cmtst,
Richard Hendersonea580fa2018-10-24 07:50:20 +01006168 .vece = MO_16 },
6169 { .fni4 = gen_cmtst_i32,
6170 .fniv = gen_cmtst_vec,
Richard Henderson53229a72019-03-17 00:27:29 +00006171 .opt_opc = vecop_list_cmtst,
Richard Hendersonea580fa2018-10-24 07:50:20 +01006172 .vece = MO_32 },
6173 { .fni8 = gen_cmtst_i64,
6174 .fniv = gen_cmtst_vec,
6175 .prefer_i64 = TCG_TARGET_REG_BITS == 64,
Richard Henderson53229a72019-03-17 00:27:29 +00006176 .opt_opc = vecop_list_cmtst,
Richard Hendersonea580fa2018-10-24 07:50:20 +01006177 .vece = MO_64 },
6178};
6179
Richard Henderson89e68b52019-02-15 09:56:41 +00006180static void gen_uqadd_vec(unsigned vece, TCGv_vec t, TCGv_vec sat,
6181 TCGv_vec a, TCGv_vec b)
6182{
6183 TCGv_vec x = tcg_temp_new_vec_matching(t);
6184 tcg_gen_add_vec(vece, x, a, b);
6185 tcg_gen_usadd_vec(vece, t, a, b);
6186 tcg_gen_cmp_vec(TCG_COND_NE, vece, x, x, t);
6187 tcg_gen_or_vec(vece, sat, sat, x);
6188 tcg_temp_free_vec(x);
6189}
6190
Richard Henderson53229a72019-03-17 00:27:29 +00006191static const TCGOpcode vecop_list_uqadd[] = {
6192 INDEX_op_usadd_vec, INDEX_op_cmp_vec, INDEX_op_add_vec, 0
6193};
6194
Richard Henderson89e68b52019-02-15 09:56:41 +00006195const GVecGen4 uqadd_op[4] = {
6196 { .fniv = gen_uqadd_vec,
6197 .fno = gen_helper_gvec_uqadd_b,
Richard Henderson89e68b52019-02-15 09:56:41 +00006198 .write_aofs = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006199 .opt_opc = vecop_list_uqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006200 .vece = MO_8 },
6201 { .fniv = gen_uqadd_vec,
6202 .fno = gen_helper_gvec_uqadd_h,
Richard Henderson89e68b52019-02-15 09:56:41 +00006203 .write_aofs = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006204 .opt_opc = vecop_list_uqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006205 .vece = MO_16 },
6206 { .fniv = gen_uqadd_vec,
6207 .fno = gen_helper_gvec_uqadd_s,
Richard Henderson89e68b52019-02-15 09:56:41 +00006208 .write_aofs = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006209 .opt_opc = vecop_list_uqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006210 .vece = MO_32 },
6211 { .fniv = gen_uqadd_vec,
6212 .fno = gen_helper_gvec_uqadd_d,
Richard Henderson89e68b52019-02-15 09:56:41 +00006213 .write_aofs = true,
Richard Henderson53229a72019-03-17 00:27:29 +00006214 .opt_opc = vecop_list_uqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006215 .vece = MO_64 },
6216};
6217
6218static void gen_sqadd_vec(unsigned vece, TCGv_vec t, TCGv_vec sat,
6219 TCGv_vec a, TCGv_vec b)
6220{
6221 TCGv_vec x = tcg_temp_new_vec_matching(t);
6222 tcg_gen_add_vec(vece, x, a, b);
6223 tcg_gen_ssadd_vec(vece, t, a, b);
6224 tcg_gen_cmp_vec(TCG_COND_NE, vece, x, x, t);
6225 tcg_gen_or_vec(vece, sat, sat, x);
6226 tcg_temp_free_vec(x);
6227}
6228
Richard Henderson53229a72019-03-17 00:27:29 +00006229static const TCGOpcode vecop_list_sqadd[] = {
6230 INDEX_op_ssadd_vec, INDEX_op_cmp_vec, INDEX_op_add_vec, 0
6231};
6232
Richard Henderson89e68b52019-02-15 09:56:41 +00006233const GVecGen4 sqadd_op[4] = {
6234 { .fniv = gen_sqadd_vec,
6235 .fno = gen_helper_gvec_sqadd_b,
Richard Henderson53229a72019-03-17 00:27:29 +00006236 .opt_opc = vecop_list_sqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006237 .write_aofs = true,
6238 .vece = MO_8 },
6239 { .fniv = gen_sqadd_vec,
6240 .fno = gen_helper_gvec_sqadd_h,
Richard Henderson53229a72019-03-17 00:27:29 +00006241 .opt_opc = vecop_list_sqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006242 .write_aofs = true,
6243 .vece = MO_16 },
6244 { .fniv = gen_sqadd_vec,
6245 .fno = gen_helper_gvec_sqadd_s,
Richard Henderson53229a72019-03-17 00:27:29 +00006246 .opt_opc = vecop_list_sqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006247 .write_aofs = true,
6248 .vece = MO_32 },
6249 { .fniv = gen_sqadd_vec,
6250 .fno = gen_helper_gvec_sqadd_d,
Richard Henderson53229a72019-03-17 00:27:29 +00006251 .opt_opc = vecop_list_sqadd,
Richard Henderson89e68b52019-02-15 09:56:41 +00006252 .write_aofs = true,
6253 .vece = MO_64 },
6254};
6255
6256static void gen_uqsub_vec(unsigned vece, TCGv_vec t, TCGv_vec sat,
6257 TCGv_vec a, TCGv_vec b)
6258{
6259 TCGv_vec x = tcg_temp_new_vec_matching(t);
6260 tcg_gen_sub_vec(vece, x, a, b);
6261 tcg_gen_ussub_vec(vece, t, a, b);
6262 tcg_gen_cmp_vec(TCG_COND_NE, vece, x, x, t);
6263 tcg_gen_or_vec(vece, sat, sat, x);
6264 tcg_temp_free_vec(x);
6265}
6266
Richard Henderson53229a72019-03-17 00:27:29 +00006267static const TCGOpcode vecop_list_uqsub[] = {
6268 INDEX_op_ussub_vec, INDEX_op_cmp_vec, INDEX_op_sub_vec, 0
6269};
6270
Richard Henderson89e68b52019-02-15 09:56:41 +00006271const GVecGen4 uqsub_op[4] = {
6272 { .fniv = gen_uqsub_vec,
6273 .fno = gen_helper_gvec_uqsub_b,
Richard Henderson53229a72019-03-17 00:27:29 +00006274 .opt_opc = vecop_list_uqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006275 .write_aofs = true,
6276 .vece = MO_8 },
6277 { .fniv = gen_uqsub_vec,
6278 .fno = gen_helper_gvec_uqsub_h,
Richard Henderson53229a72019-03-17 00:27:29 +00006279 .opt_opc = vecop_list_uqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006280 .write_aofs = true,
6281 .vece = MO_16 },
6282 { .fniv = gen_uqsub_vec,
6283 .fno = gen_helper_gvec_uqsub_s,
Richard Henderson53229a72019-03-17 00:27:29 +00006284 .opt_opc = vecop_list_uqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006285 .write_aofs = true,
6286 .vece = MO_32 },
6287 { .fniv = gen_uqsub_vec,
6288 .fno = gen_helper_gvec_uqsub_d,
Richard Henderson53229a72019-03-17 00:27:29 +00006289 .opt_opc = vecop_list_uqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006290 .write_aofs = true,
6291 .vece = MO_64 },
6292};
6293
6294static void gen_sqsub_vec(unsigned vece, TCGv_vec t, TCGv_vec sat,
6295 TCGv_vec a, TCGv_vec b)
6296{
6297 TCGv_vec x = tcg_temp_new_vec_matching(t);
6298 tcg_gen_sub_vec(vece, x, a, b);
6299 tcg_gen_sssub_vec(vece, t, a, b);
6300 tcg_gen_cmp_vec(TCG_COND_NE, vece, x, x, t);
6301 tcg_gen_or_vec(vece, sat, sat, x);
6302 tcg_temp_free_vec(x);
6303}
6304
Richard Henderson53229a72019-03-17 00:27:29 +00006305static const TCGOpcode vecop_list_sqsub[] = {
6306 INDEX_op_sssub_vec, INDEX_op_cmp_vec, INDEX_op_sub_vec, 0
6307};
6308
Richard Henderson89e68b52019-02-15 09:56:41 +00006309const GVecGen4 sqsub_op[4] = {
6310 { .fniv = gen_sqsub_vec,
6311 .fno = gen_helper_gvec_sqsub_b,
Richard Henderson53229a72019-03-17 00:27:29 +00006312 .opt_opc = vecop_list_sqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006313 .write_aofs = true,
6314 .vece = MO_8 },
6315 { .fniv = gen_sqsub_vec,
6316 .fno = gen_helper_gvec_sqsub_h,
Richard Henderson53229a72019-03-17 00:27:29 +00006317 .opt_opc = vecop_list_sqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006318 .write_aofs = true,
6319 .vece = MO_16 },
6320 { .fniv = gen_sqsub_vec,
6321 .fno = gen_helper_gvec_sqsub_s,
Richard Henderson53229a72019-03-17 00:27:29 +00006322 .opt_opc = vecop_list_sqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006323 .write_aofs = true,
6324 .vece = MO_32 },
6325 { .fniv = gen_sqsub_vec,
6326 .fno = gen_helper_gvec_sqsub_d,
Richard Henderson53229a72019-03-17 00:27:29 +00006327 .opt_opc = vecop_list_sqsub,
Richard Henderson89e68b52019-02-15 09:56:41 +00006328 .write_aofs = true,
6329 .vece = MO_64 },
6330};
6331
pbrook9ee6e8b2007-11-11 00:04:49 +00006332/* Translate a NEON data processing instruction. Return nonzero if the
6333 instruction is invalid.
pbrookad694712008-03-31 03:48:30 +00006334 We process data in a mixture of 32-bit and 64-bit chunks.
6335 Mostly we use 32-bit chunks so we can use normal scalar instructions. */
pbrook9ee6e8b2007-11-11 00:04:49 +00006336
Peter Maydell7dcc1f82014-10-28 19:24:03 +00006337static int disas_neon_data_insn(DisasContext *s, uint32_t insn)
pbrook9ee6e8b2007-11-11 00:04:49 +00006338{
6339 int op;
6340 int q;
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006341 int rd, rn, rm, rd_ofs, rn_ofs, rm_ofs;
pbrook9ee6e8b2007-11-11 00:04:49 +00006342 int size;
6343 int shift;
6344 int pass;
6345 int count;
6346 int pairwise;
6347 int u;
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006348 int vec_size;
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006349 uint32_t imm;
Peter Maydell39d54922013-05-23 12:59:55 +01006350 TCGv_i32 tmp, tmp2, tmp3, tmp4, tmp5;
Richard Henderson1a66ac62018-01-25 11:45:28 +00006351 TCGv_ptr ptr1, ptr2, ptr3;
pbrooka7812ae2008-11-17 14:43:54 +00006352 TCGv_i64 tmp64;
pbrook9ee6e8b2007-11-11 00:04:49 +00006353
Peter Maydell2c7ffc42014-04-15 19:18:40 +01006354 /* FIXME: this access check should not take precedence over UNDEF
6355 * for invalid encodings; we will generate incorrect syndrome information
6356 * for attempts to execute invalid vfp/neon encodings with FP disabled.
6357 */
Greg Bellows9dbbc742015-05-29 11:28:53 +01006358 if (s->fp_excp_el) {
Peter Maydell2c7ffc42014-04-15 19:18:40 +01006359 gen_exception_insn(s, 4, EXCP_UDEF,
Peter Maydell4be42f42018-10-24 07:50:18 +01006360 syn_simd_access_trap(1, 0xe, false), s->fp_excp_el);
Peter Maydell2c7ffc42014-04-15 19:18:40 +01006361 return 0;
6362 }
6363
Peter Maydell5df8bac2011-01-14 20:39:19 +01006364 if (!s->vfp_enabled)
pbrook9ee6e8b2007-11-11 00:04:49 +00006365 return 1;
6366 q = (insn & (1 << 6)) != 0;
6367 u = (insn >> 24) & 1;
6368 VFP_DREG_D(rd, insn);
6369 VFP_DREG_N(rn, insn);
6370 VFP_DREG_M(rm, insn);
6371 size = (insn >> 20) & 3;
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006372 vec_size = q ? 16 : 8;
6373 rd_ofs = neon_reg_offset(rd, 0);
6374 rn_ofs = neon_reg_offset(rn, 0);
6375 rm_ofs = neon_reg_offset(rm, 0);
6376
pbrook9ee6e8b2007-11-11 00:04:49 +00006377 if ((insn & (1 << 23)) == 0) {
6378 /* Three register same length. */
6379 op = ((insn >> 7) & 0x1e) | ((insn >> 4) & 1);
Peter Maydell62698be2011-04-11 16:26:11 +01006380 /* Catch invalid op and bad size combinations: UNDEF */
6381 if ((neon_3r_sizes[op] & (1 << size)) == 0) {
6382 return 1;
6383 }
Peter Maydell25f84f72011-04-11 16:26:12 +01006384 /* All insns of this form UNDEF for either this condition or the
6385 * superset of cases "Q==1"; we catch the latter later.
6386 */
6387 if (q && ((rd | rn | rm) & 1)) {
6388 return 1;
6389 }
Richard Henderson36a71932018-03-02 10:45:42 +00006390 switch (op) {
6391 case NEON_3R_SHA:
6392 /* The SHA-1/SHA-256 3-register instructions require special
6393 * treatment here, as their size field is overloaded as an
6394 * op type selector, and they all consume their input in a
6395 * single pass.
6396 */
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006397 if (!q) {
6398 return 1;
6399 }
6400 if (!u) { /* SHA-1 */
Richard Henderson962fcbf2018-10-24 07:50:16 +01006401 if (!dc_isar_feature(aa32_sha1, s)) {
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006402 return 1;
6403 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00006404 ptr1 = vfp_reg_ptr(true, rd);
6405 ptr2 = vfp_reg_ptr(true, rn);
6406 ptr3 = vfp_reg_ptr(true, rm);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006407 tmp4 = tcg_const_i32(size);
Richard Henderson1a66ac62018-01-25 11:45:28 +00006408 gen_helper_crypto_sha1_3reg(ptr1, ptr2, ptr3, tmp4);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006409 tcg_temp_free_i32(tmp4);
6410 } else { /* SHA-256 */
Richard Henderson962fcbf2018-10-24 07:50:16 +01006411 if (!dc_isar_feature(aa32_sha2, s) || size == 3) {
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006412 return 1;
6413 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00006414 ptr1 = vfp_reg_ptr(true, rd);
6415 ptr2 = vfp_reg_ptr(true, rn);
6416 ptr3 = vfp_reg_ptr(true, rm);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006417 switch (size) {
6418 case 0:
Richard Henderson1a66ac62018-01-25 11:45:28 +00006419 gen_helper_crypto_sha256h(ptr1, ptr2, ptr3);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006420 break;
6421 case 1:
Richard Henderson1a66ac62018-01-25 11:45:28 +00006422 gen_helper_crypto_sha256h2(ptr1, ptr2, ptr3);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006423 break;
6424 case 2:
Richard Henderson1a66ac62018-01-25 11:45:28 +00006425 gen_helper_crypto_sha256su1(ptr1, ptr2, ptr3);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006426 break;
6427 }
6428 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00006429 tcg_temp_free_ptr(ptr1);
6430 tcg_temp_free_ptr(ptr2);
6431 tcg_temp_free_ptr(ptr3);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006432 return 0;
Richard Henderson36a71932018-03-02 10:45:42 +00006433
6434 case NEON_3R_VPADD_VQRDMLAH:
6435 if (!u) {
6436 break; /* VPADD */
6437 }
6438 /* VQRDMLAH */
6439 switch (size) {
6440 case 1:
6441 return do_v81_helper(s, gen_helper_gvec_qrdmlah_s16,
6442 q, rd, rn, rm);
6443 case 2:
6444 return do_v81_helper(s, gen_helper_gvec_qrdmlah_s32,
6445 q, rd, rn, rm);
6446 }
6447 return 1;
6448
6449 case NEON_3R_VFM_VQRDMLSH:
6450 if (!u) {
6451 /* VFM, VFMS */
6452 if (size == 1) {
6453 return 1;
6454 }
6455 break;
6456 }
6457 /* VQRDMLSH */
6458 switch (size) {
6459 case 1:
6460 return do_v81_helper(s, gen_helper_gvec_qrdmlsh_s16,
6461 q, rd, rn, rm);
6462 case 2:
6463 return do_v81_helper(s, gen_helper_gvec_qrdmlsh_s32,
6464 q, rd, rn, rm);
6465 }
6466 return 1;
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006467
6468 case NEON_3R_LOGIC: /* Logic ops. */
6469 switch ((u << 2) | size) {
6470 case 0: /* VAND */
6471 tcg_gen_gvec_and(0, rd_ofs, rn_ofs, rm_ofs,
6472 vec_size, vec_size);
6473 break;
6474 case 1: /* VBIC */
6475 tcg_gen_gvec_andc(0, rd_ofs, rn_ofs, rm_ofs,
6476 vec_size, vec_size);
6477 break;
Richard Henderson29008472019-02-15 09:56:39 +00006478 case 2: /* VORR */
6479 tcg_gen_gvec_or(0, rd_ofs, rn_ofs, rm_ofs,
6480 vec_size, vec_size);
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006481 break;
6482 case 3: /* VORN */
6483 tcg_gen_gvec_orc(0, rd_ofs, rn_ofs, rm_ofs,
6484 vec_size, vec_size);
6485 break;
6486 case 4: /* VEOR */
6487 tcg_gen_gvec_xor(0, rd_ofs, rn_ofs, rm_ofs,
6488 vec_size, vec_size);
6489 break;
6490 case 5: /* VBSL */
Richard Henderson3a7a2b42019-05-18 12:19:34 -07006491 tcg_gen_gvec_bitsel(MO_8, rd_ofs, rd_ofs, rn_ofs, rm_ofs,
6492 vec_size, vec_size);
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006493 break;
6494 case 6: /* VBIT */
Richard Henderson3a7a2b42019-05-18 12:19:34 -07006495 tcg_gen_gvec_bitsel(MO_8, rd_ofs, rm_ofs, rn_ofs, rd_ofs,
6496 vec_size, vec_size);
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006497 break;
6498 case 7: /* VBIF */
Richard Henderson3a7a2b42019-05-18 12:19:34 -07006499 tcg_gen_gvec_bitsel(MO_8, rd_ofs, rm_ofs, rd_ofs, rn_ofs,
6500 vec_size, vec_size);
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006501 break;
6502 }
6503 return 0;
Richard Hendersone4717ae2018-10-24 07:50:19 +01006504
6505 case NEON_3R_VADD_VSUB:
6506 if (u) {
6507 tcg_gen_gvec_sub(size, rd_ofs, rn_ofs, rm_ofs,
6508 vec_size, vec_size);
6509 } else {
6510 tcg_gen_gvec_add(size, rd_ofs, rn_ofs, rm_ofs,
6511 vec_size, vec_size);
6512 }
6513 return 0;
Richard Henderson82083182018-10-24 07:50:19 +01006514
Richard Henderson89e68b52019-02-15 09:56:41 +00006515 case NEON_3R_VQADD:
6516 tcg_gen_gvec_4(rd_ofs, offsetof(CPUARMState, vfp.qc),
6517 rn_ofs, rm_ofs, vec_size, vec_size,
6518 (u ? uqadd_op : sqadd_op) + size);
Alistair Francis2f143d32019-05-23 14:47:43 +01006519 return 0;
Richard Henderson89e68b52019-02-15 09:56:41 +00006520
6521 case NEON_3R_VQSUB:
6522 tcg_gen_gvec_4(rd_ofs, offsetof(CPUARMState, vfp.qc),
6523 rn_ofs, rm_ofs, vec_size, vec_size,
6524 (u ? uqsub_op : sqsub_op) + size);
Alistair Francis2f143d32019-05-23 14:47:43 +01006525 return 0;
Richard Henderson89e68b52019-02-15 09:56:41 +00006526
Richard Henderson82083182018-10-24 07:50:19 +01006527 case NEON_3R_VMUL: /* VMUL */
6528 if (u) {
6529 /* Polynomial case allows only P8 and is handled below. */
6530 if (size != 0) {
6531 return 1;
6532 }
6533 } else {
6534 tcg_gen_gvec_mul(size, rd_ofs, rn_ofs, rm_ofs,
6535 vec_size, vec_size);
6536 return 0;
6537 }
6538 break;
Richard Henderson4a7832b2018-10-24 07:50:19 +01006539
6540 case NEON_3R_VML: /* VMLA, VMLS */
6541 tcg_gen_gvec_3(rd_ofs, rn_ofs, rm_ofs, vec_size, vec_size,
6542 u ? &mls_op[size] : &mla_op[size]);
6543 return 0;
Richard Hendersonea580fa2018-10-24 07:50:20 +01006544
6545 case NEON_3R_VTST_VCEQ:
6546 if (u) { /* VCEQ */
6547 tcg_gen_gvec_cmp(TCG_COND_EQ, size, rd_ofs, rn_ofs, rm_ofs,
6548 vec_size, vec_size);
6549 } else { /* VTST */
6550 tcg_gen_gvec_3(rd_ofs, rn_ofs, rm_ofs,
6551 vec_size, vec_size, &cmtst_op[size]);
6552 }
6553 return 0;
6554
6555 case NEON_3R_VCGT:
6556 tcg_gen_gvec_cmp(u ? TCG_COND_GTU : TCG_COND_GT, size,
6557 rd_ofs, rn_ofs, rm_ofs, vec_size, vec_size);
6558 return 0;
6559
6560 case NEON_3R_VCGE:
6561 tcg_gen_gvec_cmp(u ? TCG_COND_GEU : TCG_COND_GE, size,
6562 rd_ofs, rn_ofs, rm_ofs, vec_size, vec_size);
6563 return 0;
Richard Henderson6f278222019-02-15 09:56:40 +00006564
6565 case NEON_3R_VMAX:
6566 if (u) {
6567 tcg_gen_gvec_umax(size, rd_ofs, rn_ofs, rm_ofs,
6568 vec_size, vec_size);
6569 } else {
6570 tcg_gen_gvec_smax(size, rd_ofs, rn_ofs, rm_ofs,
6571 vec_size, vec_size);
6572 }
6573 return 0;
6574 case NEON_3R_VMIN:
6575 if (u) {
6576 tcg_gen_gvec_umin(size, rd_ofs, rn_ofs, rm_ofs,
6577 vec_size, vec_size);
6578 } else {
6579 tcg_gen_gvec_smin(size, rd_ofs, rn_ofs, rm_ofs,
6580 vec_size, vec_size);
6581 }
6582 return 0;
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01006583 }
Richard Henderson4a7832b2018-10-24 07:50:19 +01006584
Richard Hendersoneabcd6f2018-10-24 07:50:19 +01006585 if (size == 3) {
Peter Maydell62698be2011-04-11 16:26:11 +01006586 /* 64-bit element instructions. */
pbrook9ee6e8b2007-11-11 00:04:49 +00006587 for (pass = 0; pass < (q ? 2 : 1); pass++) {
pbrookad694712008-03-31 03:48:30 +00006588 neon_load_reg64(cpu_V0, rn + pass);
6589 neon_load_reg64(cpu_V1, rm + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00006590 switch (op) {
Peter Maydell62698be2011-04-11 16:26:11 +01006591 case NEON_3R_VSHL:
pbrookad694712008-03-31 03:48:30 +00006592 if (u) {
6593 gen_helper_neon_shl_u64(cpu_V0, cpu_V1, cpu_V0);
6594 } else {
6595 gen_helper_neon_shl_s64(cpu_V0, cpu_V1, cpu_V0);
6596 }
6597 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006598 case NEON_3R_VQSHL:
pbrookad694712008-03-31 03:48:30 +00006599 if (u) {
Peter Maydell02da0b22011-05-25 13:31:02 +00006600 gen_helper_neon_qshl_u64(cpu_V0, cpu_env,
6601 cpu_V1, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00006602 } else {
Peter Maydell02da0b22011-05-25 13:31:02 +00006603 gen_helper_neon_qshl_s64(cpu_V0, cpu_env,
6604 cpu_V1, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00006605 }
6606 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006607 case NEON_3R_VRSHL:
pbrookad694712008-03-31 03:48:30 +00006608 if (u) {
6609 gen_helper_neon_rshl_u64(cpu_V0, cpu_V1, cpu_V0);
6610 } else {
6611 gen_helper_neon_rshl_s64(cpu_V0, cpu_V1, cpu_V0);
6612 }
6613 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006614 case NEON_3R_VQRSHL:
pbrookad694712008-03-31 03:48:30 +00006615 if (u) {
Peter Maydell02da0b22011-05-25 13:31:02 +00006616 gen_helper_neon_qrshl_u64(cpu_V0, cpu_env,
6617 cpu_V1, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00006618 } else {
Peter Maydell02da0b22011-05-25 13:31:02 +00006619 gen_helper_neon_qrshl_s64(cpu_V0, cpu_env,
6620 cpu_V1, cpu_V0);
pbrook9ee6e8b2007-11-11 00:04:49 +00006621 }
6622 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00006623 default:
6624 abort();
6625 }
pbrookad694712008-03-31 03:48:30 +00006626 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00006627 }
6628 return 0;
6629 }
Peter Maydell25f84f72011-04-11 16:26:12 +01006630 pairwise = 0;
pbrook9ee6e8b2007-11-11 00:04:49 +00006631 switch (op) {
Peter Maydell62698be2011-04-11 16:26:11 +01006632 case NEON_3R_VSHL:
6633 case NEON_3R_VQSHL:
6634 case NEON_3R_VRSHL:
6635 case NEON_3R_VQRSHL:
pbrook9ee6e8b2007-11-11 00:04:49 +00006636 {
pbrookad694712008-03-31 03:48:30 +00006637 int rtmp;
6638 /* Shift instruction operands are reversed. */
6639 rtmp = rn;
pbrook9ee6e8b2007-11-11 00:04:49 +00006640 rn = rm;
pbrookad694712008-03-31 03:48:30 +00006641 rm = rtmp;
pbrook9ee6e8b2007-11-11 00:04:49 +00006642 }
6643 break;
Richard Henderson36a71932018-03-02 10:45:42 +00006644 case NEON_3R_VPADD_VQRDMLAH:
Peter Maydell62698be2011-04-11 16:26:11 +01006645 case NEON_3R_VPMAX:
6646 case NEON_3R_VPMIN:
pbrook9ee6e8b2007-11-11 00:04:49 +00006647 pairwise = 1;
6648 break;
Peter Maydell25f84f72011-04-11 16:26:12 +01006649 case NEON_3R_FLOAT_ARITH:
6650 pairwise = (u && size < 2); /* if VPADD (float) */
pbrook9ee6e8b2007-11-11 00:04:49 +00006651 break;
Peter Maydell25f84f72011-04-11 16:26:12 +01006652 case NEON_3R_FLOAT_MINMAX:
6653 pairwise = u; /* if VPMIN/VPMAX (float) */
6654 break;
6655 case NEON_3R_FLOAT_CMP:
6656 if (!u && size) {
6657 /* no encoding for U=0 C=1x */
6658 return 1;
6659 }
6660 break;
6661 case NEON_3R_FLOAT_ACMP:
6662 if (!u) {
6663 return 1;
6664 }
6665 break;
Will Newton505935f2013-12-06 17:01:42 +00006666 case NEON_3R_FLOAT_MISC:
6667 /* VMAXNM/VMINNM in ARMv8 */
Peter Maydelld614a512014-10-28 19:24:01 +00006668 if (u && !arm_dc_feature(s, ARM_FEATURE_V8)) {
Peter Maydell25f84f72011-04-11 16:26:12 +01006669 return 1;
6670 }
6671 break;
Richard Henderson36a71932018-03-02 10:45:42 +00006672 case NEON_3R_VFM_VQRDMLSH:
6673 if (!arm_dc_feature(s, ARM_FEATURE_VFP4)) {
Peter Maydellda97f522011-10-19 16:14:07 +00006674 return 1;
6675 }
6676 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00006677 default:
pbrook9ee6e8b2007-11-11 00:04:49 +00006678 break;
6679 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02006680
Peter Maydell25f84f72011-04-11 16:26:12 +01006681 if (pairwise && q) {
6682 /* All the pairwise insns UNDEF if Q is set */
6683 return 1;
6684 }
6685
pbrook9ee6e8b2007-11-11 00:04:49 +00006686 for (pass = 0; pass < (q ? 4 : 2); pass++) {
6687
6688 if (pairwise) {
6689 /* Pairwise. */
Juha Riihimäkia5a14942011-04-11 16:26:13 +01006690 if (pass < 1) {
6691 tmp = neon_load_reg(rn, 0);
6692 tmp2 = neon_load_reg(rn, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00006693 } else {
Juha Riihimäkia5a14942011-04-11 16:26:13 +01006694 tmp = neon_load_reg(rm, 0);
6695 tmp2 = neon_load_reg(rm, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00006696 }
6697 } else {
6698 /* Elementwise. */
Filip Navaradd8fbd72009-10-15 13:07:14 +02006699 tmp = neon_load_reg(rn, pass);
6700 tmp2 = neon_load_reg(rm, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00006701 }
6702 switch (op) {
Peter Maydell62698be2011-04-11 16:26:11 +01006703 case NEON_3R_VHADD:
pbrook9ee6e8b2007-11-11 00:04:49 +00006704 GEN_NEON_INTEGER_OP(hadd);
6705 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006706 case NEON_3R_VRHADD:
pbrook9ee6e8b2007-11-11 00:04:49 +00006707 GEN_NEON_INTEGER_OP(rhadd);
6708 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006709 case NEON_3R_VHSUB:
pbrook9ee6e8b2007-11-11 00:04:49 +00006710 GEN_NEON_INTEGER_OP(hsub);
6711 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006712 case NEON_3R_VSHL:
pbrookad694712008-03-31 03:48:30 +00006713 GEN_NEON_INTEGER_OP(shl);
pbrook9ee6e8b2007-11-11 00:04:49 +00006714 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006715 case NEON_3R_VQSHL:
Peter Maydell02da0b22011-05-25 13:31:02 +00006716 GEN_NEON_INTEGER_OP_ENV(qshl);
pbrook9ee6e8b2007-11-11 00:04:49 +00006717 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006718 case NEON_3R_VRSHL:
pbrookad694712008-03-31 03:48:30 +00006719 GEN_NEON_INTEGER_OP(rshl);
pbrook9ee6e8b2007-11-11 00:04:49 +00006720 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006721 case NEON_3R_VQRSHL:
Peter Maydell02da0b22011-05-25 13:31:02 +00006722 GEN_NEON_INTEGER_OP_ENV(qrshl);
pbrook9ee6e8b2007-11-11 00:04:49 +00006723 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006724 case NEON_3R_VABD:
pbrook9ee6e8b2007-11-11 00:04:49 +00006725 GEN_NEON_INTEGER_OP(abd);
6726 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006727 case NEON_3R_VABA:
pbrook9ee6e8b2007-11-11 00:04:49 +00006728 GEN_NEON_INTEGER_OP(abd);
Peter Maydell7d1b0092011-03-06 21:39:54 +00006729 tcg_temp_free_i32(tmp2);
Filip Navaradd8fbd72009-10-15 13:07:14 +02006730 tmp2 = neon_load_reg(rd, pass);
6731 gen_neon_add(size, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00006732 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006733 case NEON_3R_VMUL:
Richard Henderson82083182018-10-24 07:50:19 +01006734 /* VMUL.P8; other cases already eliminated. */
6735 gen_helper_neon_mul_p8(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00006736 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006737 case NEON_3R_VPMAX:
pbrook9ee6e8b2007-11-11 00:04:49 +00006738 GEN_NEON_INTEGER_OP(pmax);
6739 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006740 case NEON_3R_VPMIN:
pbrook9ee6e8b2007-11-11 00:04:49 +00006741 GEN_NEON_INTEGER_OP(pmin);
6742 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006743 case NEON_3R_VQDMULH_VQRDMULH: /* Multiply high. */
pbrook9ee6e8b2007-11-11 00:04:49 +00006744 if (!u) { /* VQDMULH */
6745 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00006746 case 1:
6747 gen_helper_neon_qdmulh_s16(tmp, cpu_env, tmp, tmp2);
6748 break;
6749 case 2:
6750 gen_helper_neon_qdmulh_s32(tmp, cpu_env, tmp, tmp2);
6751 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006752 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00006753 }
Peter Maydell62698be2011-04-11 16:26:11 +01006754 } else { /* VQRDMULH */
pbrook9ee6e8b2007-11-11 00:04:49 +00006755 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00006756 case 1:
6757 gen_helper_neon_qrdmulh_s16(tmp, cpu_env, tmp, tmp2);
6758 break;
6759 case 2:
6760 gen_helper_neon_qrdmulh_s32(tmp, cpu_env, tmp, tmp2);
6761 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006762 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00006763 }
6764 }
6765 break;
Richard Henderson36a71932018-03-02 10:45:42 +00006766 case NEON_3R_VPADD_VQRDMLAH:
pbrook9ee6e8b2007-11-11 00:04:49 +00006767 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02006768 case 0: gen_helper_neon_padd_u8(tmp, tmp, tmp2); break;
6769 case 1: gen_helper_neon_padd_u16(tmp, tmp, tmp2); break;
6770 case 2: tcg_gen_add_i32(tmp, tmp, tmp2); break;
Peter Maydell62698be2011-04-11 16:26:11 +01006771 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00006772 }
6773 break;
Peter Maydell62698be2011-04-11 16:26:11 +01006774 case NEON_3R_FLOAT_ARITH: /* Floating point arithmetic. */
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006775 {
6776 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
pbrook9ee6e8b2007-11-11 00:04:49 +00006777 switch ((u << 2) | size) {
6778 case 0: /* VADD */
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006779 case 4: /* VPADD */
6780 gen_helper_vfp_adds(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006781 break;
6782 case 2: /* VSUB */
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006783 gen_helper_vfp_subs(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006784 break;
6785 case 6: /* VABD */
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006786 gen_helper_neon_abd_f32(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006787 break;
6788 default:
Peter Maydell62698be2011-04-11 16:26:11 +01006789 abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00006790 }
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006791 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006792 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006793 }
Peter Maydell62698be2011-04-11 16:26:11 +01006794 case NEON_3R_FLOAT_MULTIPLY:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006795 {
6796 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6797 gen_helper_vfp_muls(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006798 if (!u) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00006799 tcg_temp_free_i32(tmp2);
Filip Navaradd8fbd72009-10-15 13:07:14 +02006800 tmp2 = neon_load_reg(rd, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00006801 if (size == 0) {
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006802 gen_helper_vfp_adds(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006803 } else {
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006804 gen_helper_vfp_subs(tmp, tmp2, tmp, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006805 }
6806 }
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006807 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006808 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006809 }
Peter Maydell62698be2011-04-11 16:26:11 +01006810 case NEON_3R_FLOAT_CMP:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006811 {
6812 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
pbrook9ee6e8b2007-11-11 00:04:49 +00006813 if (!u) {
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006814 gen_helper_neon_ceq_f32(tmp, tmp, tmp2, fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006815 } else {
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006816 if (size == 0) {
6817 gen_helper_neon_cge_f32(tmp, tmp, tmp2, fpstatus);
6818 } else {
6819 gen_helper_neon_cgt_f32(tmp, tmp, tmp2, fpstatus);
6820 }
pbrook9ee6e8b2007-11-11 00:04:49 +00006821 }
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006822 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006823 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006824 }
Peter Maydell62698be2011-04-11 16:26:11 +01006825 case NEON_3R_FLOAT_ACMP:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006826 {
6827 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6828 if (size == 0) {
6829 gen_helper_neon_acge_f32(tmp, tmp, tmp2, fpstatus);
6830 } else {
6831 gen_helper_neon_acgt_f32(tmp, tmp, tmp2, fpstatus);
6832 }
6833 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006834 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006835 }
Peter Maydell62698be2011-04-11 16:26:11 +01006836 case NEON_3R_FLOAT_MINMAX:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006837 {
6838 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6839 if (size == 0) {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00006840 gen_helper_vfp_maxs(tmp, tmp, tmp2, fpstatus);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006841 } else {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00006842 gen_helper_vfp_mins(tmp, tmp, tmp2, fpstatus);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006843 }
6844 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00006845 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00006846 }
Will Newton505935f2013-12-06 17:01:42 +00006847 case NEON_3R_FLOAT_MISC:
6848 if (u) {
6849 /* VMAXNM/VMINNM */
6850 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6851 if (size == 0) {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00006852 gen_helper_vfp_maxnums(tmp, tmp, tmp2, fpstatus);
Will Newton505935f2013-12-06 17:01:42 +00006853 } else {
Peter Maydellf71a2ae2014-01-04 22:15:49 +00006854 gen_helper_vfp_minnums(tmp, tmp, tmp2, fpstatus);
Will Newton505935f2013-12-06 17:01:42 +00006855 }
6856 tcg_temp_free_ptr(fpstatus);
6857 } else {
6858 if (size == 0) {
6859 gen_helper_recps_f32(tmp, tmp, tmp2, cpu_env);
6860 } else {
6861 gen_helper_rsqrts_f32(tmp, tmp, tmp2, cpu_env);
6862 }
6863 }
pbrook9ee6e8b2007-11-11 00:04:49 +00006864 break;
Richard Henderson36a71932018-03-02 10:45:42 +00006865 case NEON_3R_VFM_VQRDMLSH:
Peter Maydellda97f522011-10-19 16:14:07 +00006866 {
6867 /* VFMA, VFMS: fused multiply-add */
6868 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
6869 TCGv_i32 tmp3 = neon_load_reg(rd, pass);
6870 if (size) {
6871 /* VFMS */
6872 gen_helper_vfp_negs(tmp, tmp);
6873 }
6874 gen_helper_vfp_muladds(tmp, tmp, tmp2, tmp3, fpstatus);
6875 tcg_temp_free_i32(tmp3);
6876 tcg_temp_free_ptr(fpstatus);
6877 break;
6878 }
pbrook9ee6e8b2007-11-11 00:04:49 +00006879 default:
6880 abort();
6881 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00006882 tcg_temp_free_i32(tmp2);
Filip Navaradd8fbd72009-10-15 13:07:14 +02006883
pbrook9ee6e8b2007-11-11 00:04:49 +00006884 /* Save the result. For elementwise operations we can put it
6885 straight into the destination register. For pairwise operations
6886 we have to be careful to avoid clobbering the source operands. */
6887 if (pairwise && rd == rm) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02006888 neon_store_scratch(pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00006889 } else {
Filip Navaradd8fbd72009-10-15 13:07:14 +02006890 neon_store_reg(rd, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00006891 }
6892
6893 } /* for pass */
6894 if (pairwise && rd == rm) {
6895 for (pass = 0; pass < (q ? 4 : 2); pass++) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02006896 tmp = neon_load_scratch(pass);
6897 neon_store_reg(rd, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00006898 }
6899 }
pbrookad694712008-03-31 03:48:30 +00006900 /* End of 3 register same size operations. */
pbrook9ee6e8b2007-11-11 00:04:49 +00006901 } else if (insn & (1 << 4)) {
6902 if ((insn & 0x00380080) != 0) {
6903 /* Two registers and shift. */
6904 op = (insn >> 8) & 0xf;
6905 if (insn & (1 << 7)) {
Peter Maydellcc131152011-04-11 16:26:14 +01006906 /* 64-bit shift. */
6907 if (op > 7) {
6908 return 1;
6909 }
pbrook9ee6e8b2007-11-11 00:04:49 +00006910 size = 3;
6911 } else {
6912 size = 2;
6913 while ((insn & (1 << (size + 19))) == 0)
6914 size--;
6915 }
6916 shift = (insn >> 16) & ((1 << (3 + size)) - 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00006917 if (op < 8) {
6918 /* Shift by immediate:
6919 VSHR, VSRA, VRSHR, VRSRA, VSRI, VSHL, VQSHL, VQSHLU. */
Peter Maydellcc131152011-04-11 16:26:14 +01006920 if (q && ((rd | rm) & 1)) {
6921 return 1;
6922 }
6923 if (!u && (op == 4 || op == 6)) {
6924 return 1;
6925 }
pbrook9ee6e8b2007-11-11 00:04:49 +00006926 /* Right shifts are encoded as N - shift, where N is the
6927 element size in bits. */
Richard Henderson1dc84252018-10-24 07:50:19 +01006928 if (op <= 4) {
pbrook9ee6e8b2007-11-11 00:04:49 +00006929 shift = shift - (1 << (size + 3));
Richard Henderson1dc84252018-10-24 07:50:19 +01006930 }
6931
6932 switch (op) {
6933 case 0: /* VSHR */
6934 /* Right shift comes here negative. */
6935 shift = -shift;
6936 /* Shifts larger than the element size are architecturally
6937 * valid. Unsigned results in all zeros; signed results
6938 * in all sign bits.
6939 */
6940 if (!u) {
6941 tcg_gen_gvec_sari(size, rd_ofs, rm_ofs,
6942 MIN(shift, (8 << size) - 1),
6943 vec_size, vec_size);
6944 } else if (shift >= 8 << size) {
6945 tcg_gen_gvec_dup8i(rd_ofs, vec_size, vec_size, 0);
6946 } else {
6947 tcg_gen_gvec_shri(size, rd_ofs, rm_ofs, shift,
6948 vec_size, vec_size);
6949 }
6950 return 0;
6951
Richard Henderson41f6c112018-10-24 07:50:19 +01006952 case 1: /* VSRA */
6953 /* Right shift comes here negative. */
6954 shift = -shift;
6955 /* Shifts larger than the element size are architecturally
6956 * valid. Unsigned results in all zeros; signed results
6957 * in all sign bits.
6958 */
6959 if (!u) {
6960 tcg_gen_gvec_2i(rd_ofs, rm_ofs, vec_size, vec_size,
6961 MIN(shift, (8 << size) - 1),
6962 &ssra_op[size]);
6963 } else if (shift >= 8 << size) {
6964 /* rd += 0 */
6965 } else {
6966 tcg_gen_gvec_2i(rd_ofs, rm_ofs, vec_size, vec_size,
6967 shift, &usra_op[size]);
6968 }
6969 return 0;
6970
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006971 case 4: /* VSRI */
6972 if (!u) {
6973 return 1;
6974 }
6975 /* Right shift comes here negative. */
6976 shift = -shift;
6977 /* Shift out of range leaves destination unchanged. */
6978 if (shift < 8 << size) {
6979 tcg_gen_gvec_2i(rd_ofs, rm_ofs, vec_size, vec_size,
6980 shift, &sri_op[size]);
6981 }
6982 return 0;
6983
Richard Henderson1dc84252018-10-24 07:50:19 +01006984 case 5: /* VSHL, VSLI */
Richard Hendersonf3cd8212018-10-24 07:50:19 +01006985 if (u) { /* VSLI */
6986 /* Shift out of range leaves destination unchanged. */
6987 if (shift < 8 << size) {
6988 tcg_gen_gvec_2i(rd_ofs, rm_ofs, vec_size,
6989 vec_size, shift, &sli_op[size]);
6990 }
6991 } else { /* VSHL */
Richard Henderson1dc84252018-10-24 07:50:19 +01006992 /* Shifts larger than the element size are
6993 * architecturally valid and results in zero.
6994 */
6995 if (shift >= 8 << size) {
6996 tcg_gen_gvec_dup8i(rd_ofs, vec_size, vec_size, 0);
6997 } else {
6998 tcg_gen_gvec_shli(size, rd_ofs, rm_ofs, shift,
6999 vec_size, vec_size);
7000 }
Richard Henderson1dc84252018-10-24 07:50:19 +01007001 }
Richard Hendersonf3cd8212018-10-24 07:50:19 +01007002 return 0;
Richard Henderson1dc84252018-10-24 07:50:19 +01007003 }
7004
pbrook9ee6e8b2007-11-11 00:04:49 +00007005 if (size == 3) {
7006 count = q + 1;
7007 } else {
7008 count = q ? 4: 2;
7009 }
Richard Henderson1dc84252018-10-24 07:50:19 +01007010
7011 /* To avoid excessive duplication of ops we implement shift
7012 * by immediate using the variable shift operations.
7013 */
7014 imm = dup_const(size, shift);
pbrook9ee6e8b2007-11-11 00:04:49 +00007015
7016 for (pass = 0; pass < count; pass++) {
pbrookad694712008-03-31 03:48:30 +00007017 if (size == 3) {
7018 neon_load_reg64(cpu_V0, rm + pass);
7019 tcg_gen_movi_i64(cpu_V1, imm);
7020 switch (op) {
pbrookad694712008-03-31 03:48:30 +00007021 case 2: /* VRSHR */
7022 case 3: /* VRSRA */
7023 if (u)
7024 gen_helper_neon_rshl_u64(cpu_V0, cpu_V0, cpu_V1);
7025 else
7026 gen_helper_neon_rshl_s64(cpu_V0, cpu_V0, cpu_V1);
7027 break;
Peter Maydell0322b262011-01-08 16:01:16 +00007028 case 6: /* VQSHLU */
Peter Maydell02da0b22011-05-25 13:31:02 +00007029 gen_helper_neon_qshlu_s64(cpu_V0, cpu_env,
7030 cpu_V0, cpu_V1);
pbrookad694712008-03-31 03:48:30 +00007031 break;
Peter Maydell0322b262011-01-08 16:01:16 +00007032 case 7: /* VQSHL */
7033 if (u) {
Peter Maydell02da0b22011-05-25 13:31:02 +00007034 gen_helper_neon_qshl_u64(cpu_V0, cpu_env,
Peter Maydell0322b262011-01-08 16:01:16 +00007035 cpu_V0, cpu_V1);
7036 } else {
Peter Maydell02da0b22011-05-25 13:31:02 +00007037 gen_helper_neon_qshl_s64(cpu_V0, cpu_env,
Peter Maydell0322b262011-01-08 16:01:16 +00007038 cpu_V0, cpu_V1);
7039 }
pbrookad694712008-03-31 03:48:30 +00007040 break;
Richard Henderson1dc84252018-10-24 07:50:19 +01007041 default:
7042 g_assert_not_reached();
pbrookad694712008-03-31 03:48:30 +00007043 }
Richard Henderson41f6c112018-10-24 07:50:19 +01007044 if (op == 3) {
pbrookad694712008-03-31 03:48:30 +00007045 /* Accumulate. */
Christophe Lyon5371cb82011-01-25 18:18:08 +01007046 neon_load_reg64(cpu_V1, rd + pass);
pbrookad694712008-03-31 03:48:30 +00007047 tcg_gen_add_i64(cpu_V0, cpu_V0, cpu_V1);
pbrookad694712008-03-31 03:48:30 +00007048 }
7049 neon_store_reg64(cpu_V0, rd + pass);
7050 } else { /* size < 3 */
pbrook9ee6e8b2007-11-11 00:04:49 +00007051 /* Operands in T0 and T1. */
Filip Navaradd8fbd72009-10-15 13:07:14 +02007052 tmp = neon_load_reg(rm, pass);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007053 tmp2 = tcg_temp_new_i32();
Filip Navaradd8fbd72009-10-15 13:07:14 +02007054 tcg_gen_movi_i32(tmp2, imm);
pbrookad694712008-03-31 03:48:30 +00007055 switch (op) {
pbrookad694712008-03-31 03:48:30 +00007056 case 2: /* VRSHR */
7057 case 3: /* VRSRA */
7058 GEN_NEON_INTEGER_OP(rshl);
7059 break;
Peter Maydell0322b262011-01-08 16:01:16 +00007060 case 6: /* VQSHLU */
Peter Maydell0322b262011-01-08 16:01:16 +00007061 switch (size) {
7062 case 0:
Peter Maydell02da0b22011-05-25 13:31:02 +00007063 gen_helper_neon_qshlu_s8(tmp, cpu_env,
7064 tmp, tmp2);
Peter Maydell0322b262011-01-08 16:01:16 +00007065 break;
7066 case 1:
Peter Maydell02da0b22011-05-25 13:31:02 +00007067 gen_helper_neon_qshlu_s16(tmp, cpu_env,
7068 tmp, tmp2);
Peter Maydell0322b262011-01-08 16:01:16 +00007069 break;
7070 case 2:
Peter Maydell02da0b22011-05-25 13:31:02 +00007071 gen_helper_neon_qshlu_s32(tmp, cpu_env,
7072 tmp, tmp2);
Peter Maydell0322b262011-01-08 16:01:16 +00007073 break;
7074 default:
Peter Maydellcc131152011-04-11 16:26:14 +01007075 abort();
Peter Maydell0322b262011-01-08 16:01:16 +00007076 }
7077 break;
7078 case 7: /* VQSHL */
Peter Maydell02da0b22011-05-25 13:31:02 +00007079 GEN_NEON_INTEGER_OP_ENV(qshl);
pbrookad694712008-03-31 03:48:30 +00007080 break;
Richard Henderson1dc84252018-10-24 07:50:19 +01007081 default:
7082 g_assert_not_reached();
pbrookad694712008-03-31 03:48:30 +00007083 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007084 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007085
Richard Henderson41f6c112018-10-24 07:50:19 +01007086 if (op == 3) {
pbrookad694712008-03-31 03:48:30 +00007087 /* Accumulate. */
Filip Navaradd8fbd72009-10-15 13:07:14 +02007088 tmp2 = neon_load_reg(rd, pass);
Christophe Lyon5371cb82011-01-25 18:18:08 +01007089 gen_neon_add(size, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007090 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007091 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007092 neon_store_reg(rd, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007093 }
7094 } /* for pass */
7095 } else if (op < 10) {
pbrookad694712008-03-31 03:48:30 +00007096 /* Shift by immediate and narrow:
pbrook9ee6e8b2007-11-11 00:04:49 +00007097 VSHRN, VRSHRN, VQSHRN, VQRSHRN. */
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007098 int input_unsigned = (op == 8) ? !u : u;
Peter Maydellcc131152011-04-11 16:26:14 +01007099 if (rm & 1) {
7100 return 1;
7101 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007102 shift = shift - (1 << (size + 3));
7103 size++;
Peter Maydell92cdfae2011-02-21 11:05:22 +00007104 if (size == 3) {
pbrooka7812ae2008-11-17 14:43:54 +00007105 tmp64 = tcg_const_i64(shift);
Peter Maydell92cdfae2011-02-21 11:05:22 +00007106 neon_load_reg64(cpu_V0, rm);
7107 neon_load_reg64(cpu_V1, rm + 1);
7108 for (pass = 0; pass < 2; pass++) {
7109 TCGv_i64 in;
7110 if (pass == 0) {
7111 in = cpu_V0;
7112 } else {
7113 in = cpu_V1;
7114 }
pbrookad694712008-03-31 03:48:30 +00007115 if (q) {
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007116 if (input_unsigned) {
Peter Maydell92cdfae2011-02-21 11:05:22 +00007117 gen_helper_neon_rshl_u64(cpu_V0, in, tmp64);
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007118 } else {
Peter Maydell92cdfae2011-02-21 11:05:22 +00007119 gen_helper_neon_rshl_s64(cpu_V0, in, tmp64);
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007120 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007121 } else {
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007122 if (input_unsigned) {
Peter Maydell92cdfae2011-02-21 11:05:22 +00007123 gen_helper_neon_shl_u64(cpu_V0, in, tmp64);
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007124 } else {
Peter Maydell92cdfae2011-02-21 11:05:22 +00007125 gen_helper_neon_shl_s64(cpu_V0, in, tmp64);
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007126 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007127 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007128 tmp = tcg_temp_new_i32();
Peter Maydell92cdfae2011-02-21 11:05:22 +00007129 gen_neon_narrow_op(op == 8, u, size - 1, tmp, cpu_V0);
7130 neon_store_reg(rd, pass, tmp);
7131 } /* for pass */
7132 tcg_temp_free_i64(tmp64);
7133 } else {
7134 if (size == 1) {
7135 imm = (uint16_t)shift;
7136 imm |= imm << 16;
pbrookad694712008-03-31 03:48:30 +00007137 } else {
Peter Maydell92cdfae2011-02-21 11:05:22 +00007138 /* size == 2 */
7139 imm = (uint32_t)shift;
7140 }
7141 tmp2 = tcg_const_i32(imm);
7142 tmp4 = neon_load_reg(rm + 1, 0);
7143 tmp5 = neon_load_reg(rm + 1, 1);
7144 for (pass = 0; pass < 2; pass++) {
7145 if (pass == 0) {
7146 tmp = neon_load_reg(rm, 0);
7147 } else {
7148 tmp = tmp4;
7149 }
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007150 gen_neon_shift_narrow(size, tmp, tmp2, q,
7151 input_unsigned);
Peter Maydell92cdfae2011-02-21 11:05:22 +00007152 if (pass == 0) {
7153 tmp3 = neon_load_reg(rm, 1);
7154 } else {
7155 tmp3 = tmp5;
7156 }
Christophe Lyon0b36f4c2011-02-15 13:44:47 +00007157 gen_neon_shift_narrow(size, tmp3, tmp2, q,
7158 input_unsigned);
pbrook36aa55d2008-09-21 13:48:32 +00007159 tcg_gen_concat_i32_i64(cpu_V0, tmp, tmp3);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007160 tcg_temp_free_i32(tmp);
7161 tcg_temp_free_i32(tmp3);
7162 tmp = tcg_temp_new_i32();
Peter Maydell92cdfae2011-02-21 11:05:22 +00007163 gen_neon_narrow_op(op == 8, u, size - 1, tmp, cpu_V0);
7164 neon_store_reg(rd, pass, tmp);
7165 } /* for pass */
Christophe Lyonc6067f02011-01-19 15:37:58 +01007166 tcg_temp_free_i32(tmp2);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03007167 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007168 } else if (op == 10) {
Peter Maydellcc131152011-04-11 16:26:14 +01007169 /* VSHLL, VMOVL */
7170 if (q || (rd & 1)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007171 return 1;
Peter Maydellcc131152011-04-11 16:26:14 +01007172 }
pbrookad694712008-03-31 03:48:30 +00007173 tmp = neon_load_reg(rm, 0);
7174 tmp2 = neon_load_reg(rm, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007175 for (pass = 0; pass < 2; pass++) {
pbrookad694712008-03-31 03:48:30 +00007176 if (pass == 1)
7177 tmp = tmp2;
pbrook9ee6e8b2007-11-11 00:04:49 +00007178
pbrookad694712008-03-31 03:48:30 +00007179 gen_neon_widen(cpu_V0, tmp, size, u);
7180
pbrook9ee6e8b2007-11-11 00:04:49 +00007181 if (shift != 0) {
7182 /* The shift is less than the width of the source
pbrookad694712008-03-31 03:48:30 +00007183 type, so we can just shift the whole register. */
7184 tcg_gen_shli_i64(cpu_V0, cpu_V0, shift);
Christophe Lyonacdf01e2011-02-09 13:19:15 +01007185 /* Widen the result of shift: we need to clear
7186 * the potential overflow bits resulting from
7187 * left bits of the narrow input appearing as
7188 * right bits of left the neighbour narrow
7189 * input. */
pbrookad694712008-03-31 03:48:30 +00007190 if (size < 2 || !u) {
7191 uint64_t imm64;
7192 if (size == 0) {
7193 imm = (0xffu >> (8 - shift));
7194 imm |= imm << 16;
Christophe Lyonacdf01e2011-02-09 13:19:15 +01007195 } else if (size == 1) {
pbrookad694712008-03-31 03:48:30 +00007196 imm = 0xffff >> (16 - shift);
Christophe Lyonacdf01e2011-02-09 13:19:15 +01007197 } else {
7198 /* size == 2 */
7199 imm = 0xffffffff >> (32 - shift);
pbrook9ee6e8b2007-11-11 00:04:49 +00007200 }
Christophe Lyonacdf01e2011-02-09 13:19:15 +01007201 if (size < 2) {
7202 imm64 = imm | (((uint64_t)imm) << 32);
7203 } else {
7204 imm64 = imm;
7205 }
7206 tcg_gen_andi_i64(cpu_V0, cpu_V0, ~imm64);
pbrook9ee6e8b2007-11-11 00:04:49 +00007207 }
7208 }
pbrookad694712008-03-31 03:48:30 +00007209 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007210 }
Peter Maydellf73534a2010-12-07 15:37:34 +00007211 } else if (op >= 14) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007212 /* VCVT fixed-point. */
Peter Maydellcc131152011-04-11 16:26:14 +01007213 if (!(insn & (1 << 21)) || (q && ((rd | rm) & 1))) {
7214 return 1;
7215 }
Peter Maydellf73534a2010-12-07 15:37:34 +00007216 /* We have already masked out the must-be-1 top bit of imm6,
7217 * hence this 32-shift where the ARM ARM has 64-imm6.
7218 */
7219 shift = 32 - shift;
pbrook9ee6e8b2007-11-11 00:04:49 +00007220 for (pass = 0; pass < (q ? 4 : 2); pass++) {
pbrook4373f3c2008-03-31 03:47:19 +00007221 tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, pass));
Peter Maydellf73534a2010-12-07 15:37:34 +00007222 if (!(op & 1)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007223 if (u)
Peter Maydell5500b062011-05-19 14:46:19 +01007224 gen_vfp_ulto(0, shift, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007225 else
Peter Maydell5500b062011-05-19 14:46:19 +01007226 gen_vfp_slto(0, shift, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007227 } else {
7228 if (u)
Peter Maydell5500b062011-05-19 14:46:19 +01007229 gen_vfp_toul(0, shift, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007230 else
Peter Maydell5500b062011-05-19 14:46:19 +01007231 gen_vfp_tosl(0, shift, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007232 }
pbrook4373f3c2008-03-31 03:47:19 +00007233 tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, pass));
pbrook9ee6e8b2007-11-11 00:04:49 +00007234 }
7235 } else {
7236 return 1;
7237 }
7238 } else { /* (insn & 0x00380080) == 0 */
Richard Henderson246fa4a2018-10-24 07:50:19 +01007239 int invert, reg_ofs, vec_size;
7240
Peter Maydell7d80fee2011-04-11 16:26:16 +01007241 if (q && (rd & 1)) {
7242 return 1;
7243 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007244
7245 op = (insn >> 8) & 0xf;
7246 /* One register and immediate. */
7247 imm = (u << 7) | ((insn >> 12) & 0x70) | (insn & 0xf);
7248 invert = (insn & (1 << 5)) != 0;
Peter Maydell7d80fee2011-04-11 16:26:16 +01007249 /* Note that op = 2,3,4,5,6,7,10,11,12,13 imm=0 is UNPREDICTABLE.
7250 * We choose to not special-case this and will behave as if a
7251 * valid constant encoding of 0 had been given.
7252 */
pbrook9ee6e8b2007-11-11 00:04:49 +00007253 switch (op) {
7254 case 0: case 1:
7255 /* no-op */
7256 break;
7257 case 2: case 3:
7258 imm <<= 8;
7259 break;
7260 case 4: case 5:
7261 imm <<= 16;
7262 break;
7263 case 6: case 7:
7264 imm <<= 24;
7265 break;
7266 case 8: case 9:
7267 imm |= imm << 16;
7268 break;
7269 case 10: case 11:
7270 imm = (imm << 8) | (imm << 24);
7271 break;
7272 case 12:
Juha Riihimäki8e312092010-03-26 16:06:55 +00007273 imm = (imm << 8) | 0xff;
pbrook9ee6e8b2007-11-11 00:04:49 +00007274 break;
7275 case 13:
7276 imm = (imm << 16) | 0xffff;
7277 break;
7278 case 14:
7279 imm |= (imm << 8) | (imm << 16) | (imm << 24);
Richard Henderson246fa4a2018-10-24 07:50:19 +01007280 if (invert) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007281 imm = ~imm;
Richard Henderson246fa4a2018-10-24 07:50:19 +01007282 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007283 break;
7284 case 15:
Peter Maydell7d80fee2011-04-11 16:26:16 +01007285 if (invert) {
7286 return 1;
7287 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007288 imm = ((imm & 0x80) << 24) | ((imm & 0x3f) << 19)
7289 | ((imm & 0x40) ? (0x1f << 25) : (1 << 30));
7290 break;
7291 }
Richard Henderson246fa4a2018-10-24 07:50:19 +01007292 if (invert) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007293 imm = ~imm;
Richard Henderson246fa4a2018-10-24 07:50:19 +01007294 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007295
Richard Henderson246fa4a2018-10-24 07:50:19 +01007296 reg_ofs = neon_reg_offset(rd, 0);
7297 vec_size = q ? 16 : 8;
7298
7299 if (op & 1 && op < 12) {
7300 if (invert) {
7301 /* The immediate value has already been inverted,
7302 * so BIC becomes AND.
7303 */
7304 tcg_gen_gvec_andi(MO_32, reg_ofs, reg_ofs, imm,
7305 vec_size, vec_size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007306 } else {
Richard Henderson246fa4a2018-10-24 07:50:19 +01007307 tcg_gen_gvec_ori(MO_32, reg_ofs, reg_ofs, imm,
7308 vec_size, vec_size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007309 }
Richard Henderson246fa4a2018-10-24 07:50:19 +01007310 } else {
7311 /* VMOV, VMVN. */
7312 if (op == 14 && invert) {
7313 TCGv_i64 t64 = tcg_temp_new_i64();
7314
7315 for (pass = 0; pass <= q; ++pass) {
7316 uint64_t val = 0;
7317 int n;
7318
7319 for (n = 0; n < 8; n++) {
7320 if (imm & (1 << (n + pass * 8))) {
7321 val |= 0xffull << (n * 8);
7322 }
7323 }
7324 tcg_gen_movi_i64(t64, val);
7325 neon_store_reg64(t64, rd + pass);
7326 }
7327 tcg_temp_free_i64(t64);
7328 } else {
7329 tcg_gen_gvec_dup32i(reg_ofs, vec_size, vec_size, imm);
7330 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007331 }
7332 }
pbrooke4b38612008-09-21 23:15:38 +00007333 } else { /* (insn & 0x00800010 == 0x00800000) */
pbrook9ee6e8b2007-11-11 00:04:49 +00007334 if (size != 3) {
7335 op = (insn >> 8) & 0xf;
7336 if ((insn & (1 << 6)) == 0) {
7337 /* Three registers of different lengths. */
7338 int src1_wide;
7339 int src2_wide;
7340 int prewiden;
Peter Maydell526d0092014-06-09 15:43:23 +01007341 /* undefreq: bit 0 : UNDEF if size == 0
7342 * bit 1 : UNDEF if size == 1
7343 * bit 2 : UNDEF if size == 2
7344 * bit 3 : UNDEF if U == 1
7345 * Note that [2:0] set implies 'always UNDEF'
Peter Maydell695272d2011-04-11 16:26:17 +01007346 */
7347 int undefreq;
7348 /* prewiden, src1_wide, src2_wide, undefreq */
7349 static const int neon_3reg_wide[16][4] = {
7350 {1, 0, 0, 0}, /* VADDL */
7351 {1, 1, 0, 0}, /* VADDW */
7352 {1, 0, 0, 0}, /* VSUBL */
7353 {1, 1, 0, 0}, /* VSUBW */
7354 {0, 1, 1, 0}, /* VADDHN */
7355 {0, 0, 0, 0}, /* VABAL */
7356 {0, 1, 1, 0}, /* VSUBHN */
7357 {0, 0, 0, 0}, /* VABDL */
7358 {0, 0, 0, 0}, /* VMLAL */
Peter Maydell526d0092014-06-09 15:43:23 +01007359 {0, 0, 0, 9}, /* VQDMLAL */
Peter Maydell695272d2011-04-11 16:26:17 +01007360 {0, 0, 0, 0}, /* VMLSL */
Peter Maydell526d0092014-06-09 15:43:23 +01007361 {0, 0, 0, 9}, /* VQDMLSL */
Peter Maydell695272d2011-04-11 16:26:17 +01007362 {0, 0, 0, 0}, /* Integer VMULL */
Peter Maydell526d0092014-06-09 15:43:23 +01007363 {0, 0, 0, 1}, /* VQDMULL */
Peter Maydell4e624ed2014-06-09 15:43:23 +01007364 {0, 0, 0, 0xa}, /* Polynomial VMULL */
Peter Maydell526d0092014-06-09 15:43:23 +01007365 {0, 0, 0, 7}, /* Reserved: always UNDEF */
pbrook9ee6e8b2007-11-11 00:04:49 +00007366 };
7367
7368 prewiden = neon_3reg_wide[op][0];
7369 src1_wide = neon_3reg_wide[op][1];
7370 src2_wide = neon_3reg_wide[op][2];
Peter Maydell695272d2011-04-11 16:26:17 +01007371 undefreq = neon_3reg_wide[op][3];
pbrook9ee6e8b2007-11-11 00:04:49 +00007372
Peter Maydell526d0092014-06-09 15:43:23 +01007373 if ((undefreq & (1 << size)) ||
7374 ((undefreq & 8) && u)) {
pbrookad694712008-03-31 03:48:30 +00007375 return 1;
Peter Maydell695272d2011-04-11 16:26:17 +01007376 }
7377 if ((src1_wide && (rn & 1)) ||
7378 (src2_wide && (rm & 1)) ||
7379 (!src2_wide && (rd & 1))) {
7380 return 1;
7381 }
pbrookad694712008-03-31 03:48:30 +00007382
Peter Maydell4e624ed2014-06-09 15:43:23 +01007383 /* Handle VMULL.P64 (Polynomial 64x64 to 128 bit multiply)
7384 * outside the loop below as it only performs a single pass.
7385 */
7386 if (op == 14 && size == 2) {
7387 TCGv_i64 tcg_rn, tcg_rm, tcg_rd;
7388
Richard Henderson962fcbf2018-10-24 07:50:16 +01007389 if (!dc_isar_feature(aa32_pmull, s)) {
Peter Maydell4e624ed2014-06-09 15:43:23 +01007390 return 1;
7391 }
7392 tcg_rn = tcg_temp_new_i64();
7393 tcg_rm = tcg_temp_new_i64();
7394 tcg_rd = tcg_temp_new_i64();
7395 neon_load_reg64(tcg_rn, rn);
7396 neon_load_reg64(tcg_rm, rm);
7397 gen_helper_neon_pmull_64_lo(tcg_rd, tcg_rn, tcg_rm);
7398 neon_store_reg64(tcg_rd, rd);
7399 gen_helper_neon_pmull_64_hi(tcg_rd, tcg_rn, tcg_rm);
7400 neon_store_reg64(tcg_rd, rd + 1);
7401 tcg_temp_free_i64(tcg_rn);
7402 tcg_temp_free_i64(tcg_rm);
7403 tcg_temp_free_i64(tcg_rd);
7404 return 0;
7405 }
7406
pbrook9ee6e8b2007-11-11 00:04:49 +00007407 /* Avoid overlapping operands. Wide source operands are
7408 always aligned so will never overlap with wide
7409 destinations in problematic ways. */
pbrook8f8e3aa2008-03-31 03:48:01 +00007410 if (rd == rm && !src2_wide) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007411 tmp = neon_load_reg(rm, 1);
7412 neon_store_scratch(2, tmp);
pbrook8f8e3aa2008-03-31 03:48:01 +00007413 } else if (rd == rn && !src1_wide) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007414 tmp = neon_load_reg(rn, 1);
7415 neon_store_scratch(2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007416 }
Richard Hendersonf7647182017-11-02 12:47:37 +01007417 tmp3 = NULL;
pbrook9ee6e8b2007-11-11 00:04:49 +00007418 for (pass = 0; pass < 2; pass++) {
pbrook8f8e3aa2008-03-31 03:48:01 +00007419 if (src1_wide) {
pbrookad694712008-03-31 03:48:30 +00007420 neon_load_reg64(cpu_V0, rn + pass);
Richard Hendersonf7647182017-11-02 12:47:37 +01007421 tmp = NULL;
pbrook9ee6e8b2007-11-11 00:04:49 +00007422 } else {
pbrook8f8e3aa2008-03-31 03:48:01 +00007423 if (pass == 1 && rd == rn) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007424 tmp = neon_load_scratch(2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007425 } else {
pbrookad694712008-03-31 03:48:30 +00007426 tmp = neon_load_reg(rn, pass);
7427 }
7428 if (prewiden) {
7429 gen_neon_widen(cpu_V0, tmp, size, u);
pbrook9ee6e8b2007-11-11 00:04:49 +00007430 }
7431 }
pbrookad694712008-03-31 03:48:30 +00007432 if (src2_wide) {
7433 neon_load_reg64(cpu_V1, rm + pass);
Richard Hendersonf7647182017-11-02 12:47:37 +01007434 tmp2 = NULL;
pbrookad694712008-03-31 03:48:30 +00007435 } else {
7436 if (pass == 1 && rd == rm) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007437 tmp2 = neon_load_scratch(2);
pbrookad694712008-03-31 03:48:30 +00007438 } else {
7439 tmp2 = neon_load_reg(rm, pass);
7440 }
7441 if (prewiden) {
7442 gen_neon_widen(cpu_V1, tmp2, size, u);
7443 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007444 }
7445 switch (op) {
7446 case 0: case 1: case 4: /* VADDL, VADDW, VADDHN, VRADDHN */
pbrookad694712008-03-31 03:48:30 +00007447 gen_neon_addl(size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007448 break;
Riku Voipio79b0e532010-02-05 15:52:28 +00007449 case 2: case 3: case 6: /* VSUBL, VSUBW, VSUBHN, VRSUBHN */
pbrookad694712008-03-31 03:48:30 +00007450 gen_neon_subl(size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007451 break;
7452 case 5: case 7: /* VABAL, VABDL */
7453 switch ((size << 1) | u) {
pbrookad694712008-03-31 03:48:30 +00007454 case 0:
7455 gen_helper_neon_abdl_s16(cpu_V0, tmp, tmp2);
7456 break;
7457 case 1:
7458 gen_helper_neon_abdl_u16(cpu_V0, tmp, tmp2);
7459 break;
7460 case 2:
7461 gen_helper_neon_abdl_s32(cpu_V0, tmp, tmp2);
7462 break;
7463 case 3:
7464 gen_helper_neon_abdl_u32(cpu_V0, tmp, tmp2);
7465 break;
7466 case 4:
7467 gen_helper_neon_abdl_s64(cpu_V0, tmp, tmp2);
7468 break;
7469 case 5:
7470 gen_helper_neon_abdl_u64(cpu_V0, tmp, tmp2);
7471 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007472 default: abort();
7473 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007474 tcg_temp_free_i32(tmp2);
7475 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007476 break;
7477 case 8: case 9: case 10: case 11: case 12: case 13:
7478 /* VMLAL, VQDMLAL, VMLSL, VQDMLSL, VMULL, VQDMULL */
pbrookad694712008-03-31 03:48:30 +00007479 gen_neon_mull(cpu_V0, tmp, tmp2, size, u);
pbrook9ee6e8b2007-11-11 00:04:49 +00007480 break;
7481 case 14: /* Polynomial VMULL */
Peter Maydelle5ca24c2011-02-10 19:07:55 +00007482 gen_helper_neon_mull_p8(cpu_V0, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007483 tcg_temp_free_i32(tmp2);
7484 tcg_temp_free_i32(tmp);
Peter Maydelle5ca24c2011-02-10 19:07:55 +00007485 break;
Peter Maydell695272d2011-04-11 16:26:17 +01007486 default: /* 15 is RESERVED: caught earlier */
7487 abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00007488 }
Peter Maydellebcd88c2011-02-11 12:26:47 +00007489 if (op == 13) {
7490 /* VQDMULL */
7491 gen_neon_addl_saturate(cpu_V0, cpu_V0, size);
7492 neon_store_reg64(cpu_V0, rd + pass);
7493 } else if (op == 5 || (op >= 8 && op <= 11)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007494 /* Accumulate. */
Peter Maydellebcd88c2011-02-11 12:26:47 +00007495 neon_load_reg64(cpu_V1, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007496 switch (op) {
Peter Maydell4dc064e2011-02-11 12:26:48 +00007497 case 10: /* VMLSL */
7498 gen_neon_negl(cpu_V0, size);
7499 /* Fall through */
7500 case 5: case 8: /* VABAL, VMLAL */
pbrookad694712008-03-31 03:48:30 +00007501 gen_neon_addl(size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007502 break;
7503 case 9: case 11: /* VQDMLAL, VQDMLSL */
pbrookad694712008-03-31 03:48:30 +00007504 gen_neon_addl_saturate(cpu_V0, cpu_V0, size);
Peter Maydell4dc064e2011-02-11 12:26:48 +00007505 if (op == 11) {
7506 gen_neon_negl(cpu_V0, size);
7507 }
pbrookad694712008-03-31 03:48:30 +00007508 gen_neon_addl_saturate(cpu_V0, cpu_V1, size);
7509 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007510 default:
7511 abort();
7512 }
pbrookad694712008-03-31 03:48:30 +00007513 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007514 } else if (op == 4 || op == 6) {
7515 /* Narrowing operation. */
Peter Maydell7d1b0092011-03-06 21:39:54 +00007516 tmp = tcg_temp_new_i32();
Riku Voipio79b0e532010-02-05 15:52:28 +00007517 if (!u) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007518 switch (size) {
pbrookad694712008-03-31 03:48:30 +00007519 case 0:
7520 gen_helper_neon_narrow_high_u8(tmp, cpu_V0);
7521 break;
7522 case 1:
7523 gen_helper_neon_narrow_high_u16(tmp, cpu_V0);
7524 break;
7525 case 2:
7526 tcg_gen_shri_i64(cpu_V0, cpu_V0, 32);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07007527 tcg_gen_extrl_i64_i32(tmp, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00007528 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007529 default: abort();
7530 }
7531 } else {
7532 switch (size) {
pbrookad694712008-03-31 03:48:30 +00007533 case 0:
7534 gen_helper_neon_narrow_round_high_u8(tmp, cpu_V0);
7535 break;
7536 case 1:
7537 gen_helper_neon_narrow_round_high_u16(tmp, cpu_V0);
7538 break;
7539 case 2:
7540 tcg_gen_addi_i64(cpu_V0, cpu_V0, 1u << 31);
7541 tcg_gen_shri_i64(cpu_V0, cpu_V0, 32);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07007542 tcg_gen_extrl_i64_i32(tmp, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00007543 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007544 default: abort();
7545 }
7546 }
pbrookad694712008-03-31 03:48:30 +00007547 if (pass == 0) {
7548 tmp3 = tmp;
7549 } else {
7550 neon_store_reg(rd, 0, tmp3);
7551 neon_store_reg(rd, 1, tmp);
7552 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007553 } else {
7554 /* Write back the result. */
pbrookad694712008-03-31 03:48:30 +00007555 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007556 }
7557 }
7558 } else {
Peter Maydell3e3326d2011-04-11 16:26:18 +01007559 /* Two registers and a scalar. NB that for ops of this form
7560 * the ARM ARM labels bit 24 as Q, but it is in our variable
7561 * 'u', not 'q'.
7562 */
7563 if (size == 0) {
7564 return 1;
7565 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007566 switch (op) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007567 case 1: /* Float VMLA scalar */
pbrook9ee6e8b2007-11-11 00:04:49 +00007568 case 5: /* Floating point VMLS scalar */
pbrook9ee6e8b2007-11-11 00:04:49 +00007569 case 9: /* Floating point VMUL scalar */
Peter Maydell3e3326d2011-04-11 16:26:18 +01007570 if (size == 1) {
7571 return 1;
7572 }
7573 /* fall through */
7574 case 0: /* Integer VMLA scalar */
7575 case 4: /* Integer VMLS scalar */
7576 case 8: /* Integer VMUL scalar */
pbrook9ee6e8b2007-11-11 00:04:49 +00007577 case 12: /* VQDMULH scalar */
7578 case 13: /* VQRDMULH scalar */
Peter Maydell3e3326d2011-04-11 16:26:18 +01007579 if (u && ((rd | rn) & 1)) {
7580 return 1;
7581 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007582 tmp = neon_get_scalar(size, rm);
7583 neon_store_scratch(0, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007584 for (pass = 0; pass < (u ? 4 : 2); pass++) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007585 tmp = neon_load_scratch(0);
7586 tmp2 = neon_load_reg(rn, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007587 if (op == 12) {
7588 if (size == 1) {
Peter Maydell02da0b22011-05-25 13:31:02 +00007589 gen_helper_neon_qdmulh_s16(tmp, cpu_env, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007590 } else {
Peter Maydell02da0b22011-05-25 13:31:02 +00007591 gen_helper_neon_qdmulh_s32(tmp, cpu_env, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007592 }
7593 } else if (op == 13) {
7594 if (size == 1) {
Peter Maydell02da0b22011-05-25 13:31:02 +00007595 gen_helper_neon_qrdmulh_s16(tmp, cpu_env, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007596 } else {
Peter Maydell02da0b22011-05-25 13:31:02 +00007597 gen_helper_neon_qrdmulh_s32(tmp, cpu_env, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007598 }
7599 } else if (op & 1) {
Peter Maydellaa47cfd2011-05-25 13:49:19 +00007600 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
7601 gen_helper_vfp_muls(tmp, tmp, tmp2, fpstatus);
7602 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00007603 } else {
7604 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007605 case 0: gen_helper_neon_mul_u8(tmp, tmp, tmp2); break;
7606 case 1: gen_helper_neon_mul_u16(tmp, tmp, tmp2); break;
7607 case 2: tcg_gen_mul_i32(tmp, tmp, tmp2); break;
Peter Maydell3e3326d2011-04-11 16:26:18 +01007608 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00007609 }
7610 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007611 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007612 if (op < 8) {
7613 /* Accumulate. */
Filip Navaradd8fbd72009-10-15 13:07:14 +02007614 tmp2 = neon_load_reg(rd, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007615 switch (op) {
7616 case 0:
Filip Navaradd8fbd72009-10-15 13:07:14 +02007617 gen_neon_add(size, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007618 break;
7619 case 1:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00007620 {
7621 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
7622 gen_helper_vfp_adds(tmp, tmp, tmp2, fpstatus);
7623 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00007624 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00007625 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007626 case 4:
Filip Navaradd8fbd72009-10-15 13:07:14 +02007627 gen_neon_rsb(size, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007628 break;
7629 case 5:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00007630 {
7631 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
7632 gen_helper_vfp_subs(tmp, tmp2, tmp, fpstatus);
7633 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00007634 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00007635 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007636 default:
7637 abort();
7638 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007639 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007640 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007641 neon_store_reg(rd, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007642 }
7643 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007644 case 3: /* VQDMLAL scalar */
pbrook9ee6e8b2007-11-11 00:04:49 +00007645 case 7: /* VQDMLSL scalar */
pbrook9ee6e8b2007-11-11 00:04:49 +00007646 case 11: /* VQDMULL scalar */
Peter Maydell3e3326d2011-04-11 16:26:18 +01007647 if (u == 1) {
pbrookad694712008-03-31 03:48:30 +00007648 return 1;
Peter Maydell3e3326d2011-04-11 16:26:18 +01007649 }
7650 /* fall through */
7651 case 2: /* VMLAL sclar */
7652 case 6: /* VMLSL scalar */
7653 case 10: /* VMULL scalar */
7654 if (rd & 1) {
7655 return 1;
7656 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007657 tmp2 = neon_get_scalar(size, rm);
Christophe Lyonc6067f02011-01-19 15:37:58 +01007658 /* We need a copy of tmp2 because gen_neon_mull
7659 * deletes it during pass 0. */
Peter Maydell7d1b0092011-03-06 21:39:54 +00007660 tmp4 = tcg_temp_new_i32();
Christophe Lyonc6067f02011-01-19 15:37:58 +01007661 tcg_gen_mov_i32(tmp4, tmp2);
Filip Navaradd8fbd72009-10-15 13:07:14 +02007662 tmp3 = neon_load_reg(rn, 1);
pbrookad694712008-03-31 03:48:30 +00007663
pbrook9ee6e8b2007-11-11 00:04:49 +00007664 for (pass = 0; pass < 2; pass++) {
pbrookad694712008-03-31 03:48:30 +00007665 if (pass == 0) {
7666 tmp = neon_load_reg(rn, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00007667 } else {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007668 tmp = tmp3;
Christophe Lyonc6067f02011-01-19 15:37:58 +01007669 tmp2 = tmp4;
pbrook9ee6e8b2007-11-11 00:04:49 +00007670 }
pbrookad694712008-03-31 03:48:30 +00007671 gen_neon_mull(cpu_V0, tmp, tmp2, size, u);
pbrookad694712008-03-31 03:48:30 +00007672 if (op != 11) {
7673 neon_load_reg64(cpu_V1, rd + pass);
7674 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007675 switch (op) {
Peter Maydell4dc064e2011-02-11 12:26:48 +00007676 case 6:
7677 gen_neon_negl(cpu_V0, size);
7678 /* Fall through */
7679 case 2:
pbrookad694712008-03-31 03:48:30 +00007680 gen_neon_addl(size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007681 break;
7682 case 3: case 7:
pbrookad694712008-03-31 03:48:30 +00007683 gen_neon_addl_saturate(cpu_V0, cpu_V0, size);
Peter Maydell4dc064e2011-02-11 12:26:48 +00007684 if (op == 7) {
7685 gen_neon_negl(cpu_V0, size);
7686 }
pbrookad694712008-03-31 03:48:30 +00007687 gen_neon_addl_saturate(cpu_V0, cpu_V1, size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007688 break;
7689 case 10:
7690 /* no-op */
7691 break;
7692 case 11:
pbrookad694712008-03-31 03:48:30 +00007693 gen_neon_addl_saturate(cpu_V0, cpu_V0, size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007694 break;
7695 default:
7696 abort();
7697 }
pbrookad694712008-03-31 03:48:30 +00007698 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007699 }
7700 break;
Richard Henderson61adacc2018-03-02 10:45:42 +00007701 case 14: /* VQRDMLAH scalar */
7702 case 15: /* VQRDMLSH scalar */
7703 {
7704 NeonGenThreeOpEnvFn *fn;
7705
Richard Henderson962fcbf2018-10-24 07:50:16 +01007706 if (!dc_isar_feature(aa32_rdm, s)) {
Richard Henderson61adacc2018-03-02 10:45:42 +00007707 return 1;
7708 }
7709 if (u && ((rd | rn) & 1)) {
7710 return 1;
7711 }
7712 if (op == 14) {
7713 if (size == 1) {
7714 fn = gen_helper_neon_qrdmlah_s16;
7715 } else {
7716 fn = gen_helper_neon_qrdmlah_s32;
7717 }
7718 } else {
7719 if (size == 1) {
7720 fn = gen_helper_neon_qrdmlsh_s16;
7721 } else {
7722 fn = gen_helper_neon_qrdmlsh_s32;
7723 }
7724 }
7725
7726 tmp2 = neon_get_scalar(size, rm);
7727 for (pass = 0; pass < (u ? 4 : 2); pass++) {
7728 tmp = neon_load_reg(rn, pass);
7729 tmp3 = neon_load_reg(rd, pass);
7730 fn(tmp, cpu_env, tmp, tmp2, tmp3);
7731 tcg_temp_free_i32(tmp3);
7732 neon_store_reg(rd, pass, tmp);
7733 }
7734 tcg_temp_free_i32(tmp2);
7735 }
7736 break;
7737 default:
7738 g_assert_not_reached();
pbrook9ee6e8b2007-11-11 00:04:49 +00007739 }
7740 }
7741 } else { /* size == 3 */
7742 if (!u) {
7743 /* Extract. */
pbrook9ee6e8b2007-11-11 00:04:49 +00007744 imm = (insn >> 8) & 0xf;
pbrookad694712008-03-31 03:48:30 +00007745
7746 if (imm > 7 && !q)
7747 return 1;
7748
Peter Maydell52579ea2011-04-11 16:26:19 +01007749 if (q && ((rd | rn | rm) & 1)) {
7750 return 1;
7751 }
7752
pbrookad694712008-03-31 03:48:30 +00007753 if (imm == 0) {
7754 neon_load_reg64(cpu_V0, rn);
7755 if (q) {
7756 neon_load_reg64(cpu_V1, rn + 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007757 }
pbrookad694712008-03-31 03:48:30 +00007758 } else if (imm == 8) {
7759 neon_load_reg64(cpu_V0, rn + 1);
7760 if (q) {
7761 neon_load_reg64(cpu_V1, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +00007762 }
pbrookad694712008-03-31 03:48:30 +00007763 } else if (q) {
pbrooka7812ae2008-11-17 14:43:54 +00007764 tmp64 = tcg_temp_new_i64();
pbrookad694712008-03-31 03:48:30 +00007765 if (imm < 8) {
7766 neon_load_reg64(cpu_V0, rn);
pbrooka7812ae2008-11-17 14:43:54 +00007767 neon_load_reg64(tmp64, rn + 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007768 } else {
pbrookad694712008-03-31 03:48:30 +00007769 neon_load_reg64(cpu_V0, rn + 1);
pbrooka7812ae2008-11-17 14:43:54 +00007770 neon_load_reg64(tmp64, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +00007771 }
pbrookad694712008-03-31 03:48:30 +00007772 tcg_gen_shri_i64(cpu_V0, cpu_V0, (imm & 7) * 8);
pbrooka7812ae2008-11-17 14:43:54 +00007773 tcg_gen_shli_i64(cpu_V1, tmp64, 64 - ((imm & 7) * 8));
pbrookad694712008-03-31 03:48:30 +00007774 tcg_gen_or_i64(cpu_V0, cpu_V0, cpu_V1);
7775 if (imm < 8) {
7776 neon_load_reg64(cpu_V1, rm);
7777 } else {
7778 neon_load_reg64(cpu_V1, rm + 1);
7779 imm -= 8;
7780 }
7781 tcg_gen_shli_i64(cpu_V1, cpu_V1, 64 - (imm * 8));
pbrooka7812ae2008-11-17 14:43:54 +00007782 tcg_gen_shri_i64(tmp64, tmp64, imm * 8);
7783 tcg_gen_or_i64(cpu_V1, cpu_V1, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03007784 tcg_temp_free_i64(tmp64);
pbrookad694712008-03-31 03:48:30 +00007785 } else {
pbrooka7812ae2008-11-17 14:43:54 +00007786 /* BUGFIX */
pbrookad694712008-03-31 03:48:30 +00007787 neon_load_reg64(cpu_V0, rn);
pbrooka7812ae2008-11-17 14:43:54 +00007788 tcg_gen_shri_i64(cpu_V0, cpu_V0, imm * 8);
pbrookad694712008-03-31 03:48:30 +00007789 neon_load_reg64(cpu_V1, rm);
pbrooka7812ae2008-11-17 14:43:54 +00007790 tcg_gen_shli_i64(cpu_V1, cpu_V1, 64 - (imm * 8));
pbrookad694712008-03-31 03:48:30 +00007791 tcg_gen_or_i64(cpu_V0, cpu_V0, cpu_V1);
7792 }
7793 neon_store_reg64(cpu_V0, rd);
7794 if (q) {
7795 neon_store_reg64(cpu_V1, rd + 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007796 }
7797 } else if ((insn & (1 << 11)) == 0) {
7798 /* Two register misc. */
7799 op = ((insn >> 12) & 0x30) | ((insn >> 7) & 0xf);
7800 size = (insn >> 18) & 3;
Peter Maydell600b8282011-04-11 16:26:20 +01007801 /* UNDEF for unknown op values and bad op-size combinations */
7802 if ((neon_2rm_sizes[op] & (1 << size)) == 0) {
7803 return 1;
7804 }
Peter Maydellfe8fcf32016-06-14 15:59:15 +01007805 if (neon_2rm_is_v8_op(op) &&
7806 !arm_dc_feature(s, ARM_FEATURE_V8)) {
7807 return 1;
7808 }
Peter Maydellfc2a9b32011-04-11 16:26:21 +01007809 if ((op != NEON_2RM_VMOVN && op != NEON_2RM_VQMOVN) &&
7810 q && ((rm | rd) & 1)) {
7811 return 1;
7812 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007813 switch (op) {
Peter Maydell600b8282011-04-11 16:26:20 +01007814 case NEON_2RM_VREV64:
pbrook9ee6e8b2007-11-11 00:04:49 +00007815 for (pass = 0; pass < (q ? 2 : 1); pass++) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007816 tmp = neon_load_reg(rm, pass * 2);
7817 tmp2 = neon_load_reg(rm, pass * 2 + 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007818 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007819 case 0: tcg_gen_bswap32_i32(tmp, tmp); break;
7820 case 1: gen_swap_half(tmp); break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007821 case 2: /* no-op */ break;
7822 default: abort();
7823 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007824 neon_store_reg(rd, pass * 2 + 1, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007825 if (size == 2) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007826 neon_store_reg(rd, pass * 2, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007827 } else {
pbrook9ee6e8b2007-11-11 00:04:49 +00007828 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007829 case 0: tcg_gen_bswap32_i32(tmp2, tmp2); break;
7830 case 1: gen_swap_half(tmp2); break;
pbrook9ee6e8b2007-11-11 00:04:49 +00007831 default: abort();
7832 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02007833 neon_store_reg(rd, pass * 2, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00007834 }
7835 }
7836 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007837 case NEON_2RM_VPADDL: case NEON_2RM_VPADDL_U:
7838 case NEON_2RM_VPADAL: case NEON_2RM_VPADAL_U:
pbrookad694712008-03-31 03:48:30 +00007839 for (pass = 0; pass < q + 1; pass++) {
7840 tmp = neon_load_reg(rm, pass * 2);
7841 gen_neon_widen(cpu_V0, tmp, size, op & 1);
7842 tmp = neon_load_reg(rm, pass * 2 + 1);
7843 gen_neon_widen(cpu_V1, tmp, size, op & 1);
7844 switch (size) {
7845 case 0: gen_helper_neon_paddl_u16(CPU_V001); break;
7846 case 1: gen_helper_neon_paddl_u32(CPU_V001); break;
7847 case 2: tcg_gen_add_i64(CPU_V001); break;
7848 default: abort();
7849 }
Peter Maydell600b8282011-04-11 16:26:20 +01007850 if (op >= NEON_2RM_VPADAL) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007851 /* Accumulate. */
pbrookad694712008-03-31 03:48:30 +00007852 neon_load_reg64(cpu_V1, rd + pass);
7853 gen_neon_addl(size);
pbrook9ee6e8b2007-11-11 00:04:49 +00007854 }
pbrookad694712008-03-31 03:48:30 +00007855 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007856 }
7857 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007858 case NEON_2RM_VTRN:
pbrook9ee6e8b2007-11-11 00:04:49 +00007859 if (size == 2) {
Juha Riihimäkia5a14942011-04-11 16:26:13 +01007860 int n;
pbrook9ee6e8b2007-11-11 00:04:49 +00007861 for (n = 0; n < (q ? 4 : 2); n += 2) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02007862 tmp = neon_load_reg(rm, n);
7863 tmp2 = neon_load_reg(rd, n + 1);
7864 neon_store_reg(rm, n, tmp2);
7865 neon_store_reg(rd, n + 1, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00007866 }
7867 } else {
7868 goto elementwise;
7869 }
7870 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007871 case NEON_2RM_VUZP:
Peter Maydell02acedf2011-02-14 10:22:48 +00007872 if (gen_neon_unzip(rd, rm, size, q)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007873 return 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00007874 }
7875 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007876 case NEON_2RM_VZIP:
Peter Maydelld68a6f32011-02-14 10:22:49 +00007877 if (gen_neon_zip(rd, rm, size, q)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007878 return 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00007879 }
7880 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007881 case NEON_2RM_VMOVN: case NEON_2RM_VQMOVN:
7882 /* also VQMOVUN; op field and mnemonics don't line up */
Peter Maydellfc2a9b32011-04-11 16:26:21 +01007883 if (rm & 1) {
7884 return 1;
7885 }
Richard Hendersonf7647182017-11-02 12:47:37 +01007886 tmp2 = NULL;
pbrook9ee6e8b2007-11-11 00:04:49 +00007887 for (pass = 0; pass < 2; pass++) {
pbrookad694712008-03-31 03:48:30 +00007888 neon_load_reg64(cpu_V0, rm + pass);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007889 tmp = tcg_temp_new_i32();
Peter Maydell600b8282011-04-11 16:26:20 +01007890 gen_neon_narrow_op(op == NEON_2RM_VMOVN, q, size,
7891 tmp, cpu_V0);
pbrookad694712008-03-31 03:48:30 +00007892 if (pass == 0) {
7893 tmp2 = tmp;
7894 } else {
7895 neon_store_reg(rd, 0, tmp2);
7896 neon_store_reg(rd, 1, tmp);
7897 }
pbrook9ee6e8b2007-11-11 00:04:49 +00007898 }
7899 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007900 case NEON_2RM_VSHLL:
Peter Maydellfc2a9b32011-04-11 16:26:21 +01007901 if (q || (rd & 1)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00007902 return 1;
Peter Maydell600b8282011-04-11 16:26:20 +01007903 }
pbrookad694712008-03-31 03:48:30 +00007904 tmp = neon_load_reg(rm, 0);
7905 tmp2 = neon_load_reg(rm, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00007906 for (pass = 0; pass < 2; pass++) {
pbrookad694712008-03-31 03:48:30 +00007907 if (pass == 1)
7908 tmp = tmp2;
7909 gen_neon_widen(cpu_V0, tmp, size, 1);
Juha Riihimäki30d11a22010-02-05 15:52:29 +00007910 tcg_gen_shli_i64(cpu_V0, cpu_V0, 8 << size);
pbrookad694712008-03-31 03:48:30 +00007911 neon_store_reg64(cpu_V0, rd + pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00007912 }
7913 break;
Peter Maydell600b8282011-04-11 16:26:20 +01007914 case NEON_2RM_VCVT_F16_F32:
Alex Bennée486624f2018-05-07 13:17:16 +01007915 {
7916 TCGv_ptr fpst;
7917 TCGv_i32 ahp;
7918
Peter Maydell602f6e42019-02-28 10:55:16 +00007919 if (!dc_isar_feature(aa32_fp16_spconv, s) ||
Peter Maydellfc2a9b32011-04-11 16:26:21 +01007920 q || (rm & 1)) {
7921 return 1;
7922 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00007923 tmp = tcg_temp_new_i32();
7924 tmp2 = tcg_temp_new_i32();
Alex Bennée486624f2018-05-07 13:17:16 +01007925 fpst = get_fpstatus_ptr(true);
7926 ahp = get_ahp_flag();
Paul Brook60011492009-11-19 16:45:20 +00007927 tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, 0));
Alex Bennée486624f2018-05-07 13:17:16 +01007928 gen_helper_vfp_fcvt_f32_to_f16(tmp, cpu_F0s, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007929 tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, 1));
Alex Bennée486624f2018-05-07 13:17:16 +01007930 gen_helper_vfp_fcvt_f32_to_f16(tmp2, cpu_F0s, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007931 tcg_gen_shli_i32(tmp2, tmp2, 16);
7932 tcg_gen_or_i32(tmp2, tmp2, tmp);
7933 tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, 2));
Alex Bennée486624f2018-05-07 13:17:16 +01007934 gen_helper_vfp_fcvt_f32_to_f16(tmp, cpu_F0s, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007935 tcg_gen_ld_f32(cpu_F0s, cpu_env, neon_reg_offset(rm, 3));
7936 neon_store_reg(rd, 0, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007937 tmp2 = tcg_temp_new_i32();
Alex Bennée486624f2018-05-07 13:17:16 +01007938 gen_helper_vfp_fcvt_f32_to_f16(tmp2, cpu_F0s, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007939 tcg_gen_shli_i32(tmp2, tmp2, 16);
7940 tcg_gen_or_i32(tmp2, tmp2, tmp);
7941 neon_store_reg(rd, 1, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00007942 tcg_temp_free_i32(tmp);
Alex Bennée486624f2018-05-07 13:17:16 +01007943 tcg_temp_free_i32(ahp);
7944 tcg_temp_free_ptr(fpst);
Paul Brook60011492009-11-19 16:45:20 +00007945 break;
Alex Bennée486624f2018-05-07 13:17:16 +01007946 }
Peter Maydell600b8282011-04-11 16:26:20 +01007947 case NEON_2RM_VCVT_F32_F16:
Alex Bennée486624f2018-05-07 13:17:16 +01007948 {
7949 TCGv_ptr fpst;
7950 TCGv_i32 ahp;
Peter Maydell602f6e42019-02-28 10:55:16 +00007951 if (!dc_isar_feature(aa32_fp16_spconv, s) ||
Peter Maydellfc2a9b32011-04-11 16:26:21 +01007952 q || (rd & 1)) {
7953 return 1;
7954 }
Alex Bennée486624f2018-05-07 13:17:16 +01007955 fpst = get_fpstatus_ptr(true);
7956 ahp = get_ahp_flag();
Peter Maydell7d1b0092011-03-06 21:39:54 +00007957 tmp3 = tcg_temp_new_i32();
Paul Brook60011492009-11-19 16:45:20 +00007958 tmp = neon_load_reg(rm, 0);
7959 tmp2 = neon_load_reg(rm, 1);
7960 tcg_gen_ext16u_i32(tmp3, tmp);
Alex Bennée486624f2018-05-07 13:17:16 +01007961 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp3, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007962 tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, 0));
7963 tcg_gen_shri_i32(tmp3, tmp, 16);
Alex Bennée486624f2018-05-07 13:17:16 +01007964 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp3, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007965 tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, 1));
Peter Maydell7d1b0092011-03-06 21:39:54 +00007966 tcg_temp_free_i32(tmp);
Paul Brook60011492009-11-19 16:45:20 +00007967 tcg_gen_ext16u_i32(tmp3, tmp2);
Alex Bennée486624f2018-05-07 13:17:16 +01007968 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp3, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007969 tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, 2));
7970 tcg_gen_shri_i32(tmp3, tmp2, 16);
Alex Bennée486624f2018-05-07 13:17:16 +01007971 gen_helper_vfp_fcvt_f16_to_f32(cpu_F0s, tmp3, fpst, ahp);
Paul Brook60011492009-11-19 16:45:20 +00007972 tcg_gen_st_f32(cpu_F0s, cpu_env, neon_reg_offset(rd, 3));
Peter Maydell7d1b0092011-03-06 21:39:54 +00007973 tcg_temp_free_i32(tmp2);
7974 tcg_temp_free_i32(tmp3);
Alex Bennée486624f2018-05-07 13:17:16 +01007975 tcg_temp_free_i32(ahp);
7976 tcg_temp_free_ptr(fpst);
Paul Brook60011492009-11-19 16:45:20 +00007977 break;
Alex Bennée486624f2018-05-07 13:17:16 +01007978 }
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007979 case NEON_2RM_AESE: case NEON_2RM_AESMC:
Richard Henderson962fcbf2018-10-24 07:50:16 +01007980 if (!dc_isar_feature(aa32_aes, s) || ((rm | rd) & 1)) {
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007981 return 1;
7982 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00007983 ptr1 = vfp_reg_ptr(true, rd);
7984 ptr2 = vfp_reg_ptr(true, rm);
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007985
7986 /* Bit 6 is the lowest opcode bit; it distinguishes between
7987 * encryption (AESE/AESMC) and decryption (AESD/AESIMC)
7988 */
7989 tmp3 = tcg_const_i32(extract32(insn, 6, 1));
7990
7991 if (op == NEON_2RM_AESE) {
Richard Henderson1a66ac62018-01-25 11:45:28 +00007992 gen_helper_crypto_aese(ptr1, ptr2, tmp3);
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007993 } else {
Richard Henderson1a66ac62018-01-25 11:45:28 +00007994 gen_helper_crypto_aesmc(ptr1, ptr2, tmp3);
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007995 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00007996 tcg_temp_free_ptr(ptr1);
7997 tcg_temp_free_ptr(ptr2);
Ard Biesheuvel9d935502013-12-17 19:42:25 +00007998 tcg_temp_free_i32(tmp3);
7999 break;
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008000 case NEON_2RM_SHA1H:
Richard Henderson962fcbf2018-10-24 07:50:16 +01008001 if (!dc_isar_feature(aa32_sha1, s) || ((rm | rd) & 1)) {
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008002 return 1;
8003 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00008004 ptr1 = vfp_reg_ptr(true, rd);
8005 ptr2 = vfp_reg_ptr(true, rm);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008006
Richard Henderson1a66ac62018-01-25 11:45:28 +00008007 gen_helper_crypto_sha1h(ptr1, ptr2);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008008
Richard Henderson1a66ac62018-01-25 11:45:28 +00008009 tcg_temp_free_ptr(ptr1);
8010 tcg_temp_free_ptr(ptr2);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008011 break;
8012 case NEON_2RM_SHA1SU1:
8013 if ((rm | rd) & 1) {
8014 return 1;
8015 }
8016 /* bit 6 (q): set -> SHA256SU0, cleared -> SHA1SU1 */
8017 if (q) {
Richard Henderson962fcbf2018-10-24 07:50:16 +01008018 if (!dc_isar_feature(aa32_sha2, s)) {
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008019 return 1;
8020 }
Richard Henderson962fcbf2018-10-24 07:50:16 +01008021 } else if (!dc_isar_feature(aa32_sha1, s)) {
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008022 return 1;
8023 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00008024 ptr1 = vfp_reg_ptr(true, rd);
8025 ptr2 = vfp_reg_ptr(true, rm);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008026 if (q) {
Richard Henderson1a66ac62018-01-25 11:45:28 +00008027 gen_helper_crypto_sha256su0(ptr1, ptr2);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008028 } else {
Richard Henderson1a66ac62018-01-25 11:45:28 +00008029 gen_helper_crypto_sha1su1(ptr1, ptr2);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008030 }
Richard Henderson1a66ac62018-01-25 11:45:28 +00008031 tcg_temp_free_ptr(ptr1);
8032 tcg_temp_free_ptr(ptr2);
Ard Biesheuvelf1ecb912014-06-09 15:43:23 +01008033 break;
Richard Henderson4bf940b2018-10-24 07:50:19 +01008034
8035 case NEON_2RM_VMVN:
8036 tcg_gen_gvec_not(0, rd_ofs, rm_ofs, vec_size, vec_size);
8037 break;
8038 case NEON_2RM_VNEG:
8039 tcg_gen_gvec_neg(size, rd_ofs, rm_ofs, vec_size, vec_size);
8040 break;
Richard Henderson4e027a72019-04-17 14:28:57 -10008041 case NEON_2RM_VABS:
8042 tcg_gen_gvec_abs(size, rd_ofs, rm_ofs, vec_size, vec_size);
8043 break;
Richard Henderson4bf940b2018-10-24 07:50:19 +01008044
pbrook9ee6e8b2007-11-11 00:04:49 +00008045 default:
8046 elementwise:
8047 for (pass = 0; pass < (q ? 4 : 2); pass++) {
Peter Maydell600b8282011-04-11 16:26:20 +01008048 if (neon_2rm_is_float_op(op)) {
pbrook4373f3c2008-03-31 03:47:19 +00008049 tcg_gen_ld_f32(cpu_F0s, cpu_env,
8050 neon_reg_offset(rm, pass));
Richard Hendersonf7647182017-11-02 12:47:37 +01008051 tmp = NULL;
pbrook9ee6e8b2007-11-11 00:04:49 +00008052 } else {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008053 tmp = neon_load_reg(rm, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00008054 }
8055 switch (op) {
Peter Maydell600b8282011-04-11 16:26:20 +01008056 case NEON_2RM_VREV32:
pbrook9ee6e8b2007-11-11 00:04:49 +00008057 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008058 case 0: tcg_gen_bswap32_i32(tmp, tmp); break;
8059 case 1: gen_swap_half(tmp); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008060 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008061 }
8062 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008063 case NEON_2RM_VREV16:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008064 gen_rev16(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00008065 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008066 case NEON_2RM_VCLS:
pbrook9ee6e8b2007-11-11 00:04:49 +00008067 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008068 case 0: gen_helper_neon_cls_s8(tmp, tmp); break;
8069 case 1: gen_helper_neon_cls_s16(tmp, tmp); break;
8070 case 2: gen_helper_neon_cls_s32(tmp, tmp); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008071 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008072 }
8073 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008074 case NEON_2RM_VCLZ:
pbrook9ee6e8b2007-11-11 00:04:49 +00008075 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008076 case 0: gen_helper_neon_clz_u8(tmp, tmp); break;
8077 case 1: gen_helper_neon_clz_u16(tmp, tmp); break;
Richard Henderson7539a012016-11-16 11:49:06 +01008078 case 2: tcg_gen_clzi_i32(tmp, tmp, 32); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008079 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008080 }
8081 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008082 case NEON_2RM_VCNT:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008083 gen_helper_neon_cnt_u8(tmp, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00008084 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008085 case NEON_2RM_VQABS:
pbrook9ee6e8b2007-11-11 00:04:49 +00008086 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00008087 case 0:
8088 gen_helper_neon_qabs_s8(tmp, cpu_env, tmp);
8089 break;
8090 case 1:
8091 gen_helper_neon_qabs_s16(tmp, cpu_env, tmp);
8092 break;
8093 case 2:
8094 gen_helper_neon_qabs_s32(tmp, cpu_env, tmp);
8095 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008096 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008097 }
8098 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008099 case NEON_2RM_VQNEG:
pbrook9ee6e8b2007-11-11 00:04:49 +00008100 switch (size) {
Peter Maydell02da0b22011-05-25 13:31:02 +00008101 case 0:
8102 gen_helper_neon_qneg_s8(tmp, cpu_env, tmp);
8103 break;
8104 case 1:
8105 gen_helper_neon_qneg_s16(tmp, cpu_env, tmp);
8106 break;
8107 case 2:
8108 gen_helper_neon_qneg_s32(tmp, cpu_env, tmp);
8109 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008110 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008111 }
8112 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008113 case NEON_2RM_VCGT0: case NEON_2RM_VCLE0:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008114 tmp2 = tcg_const_i32(0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008115 switch(size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008116 case 0: gen_helper_neon_cgt_s8(tmp, tmp, tmp2); break;
8117 case 1: gen_helper_neon_cgt_s16(tmp, tmp, tmp2); break;
8118 case 2: gen_helper_neon_cgt_s32(tmp, tmp, tmp2); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008119 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008120 }
Peter Maydell39d54922013-05-23 12:59:55 +01008121 tcg_temp_free_i32(tmp2);
Peter Maydell600b8282011-04-11 16:26:20 +01008122 if (op == NEON_2RM_VCLE0) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008123 tcg_gen_not_i32(tmp, tmp);
Peter Maydell600b8282011-04-11 16:26:20 +01008124 }
pbrook9ee6e8b2007-11-11 00:04:49 +00008125 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008126 case NEON_2RM_VCGE0: case NEON_2RM_VCLT0:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008127 tmp2 = tcg_const_i32(0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008128 switch(size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008129 case 0: gen_helper_neon_cge_s8(tmp, tmp, tmp2); break;
8130 case 1: gen_helper_neon_cge_s16(tmp, tmp, tmp2); break;
8131 case 2: gen_helper_neon_cge_s32(tmp, tmp, tmp2); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008132 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008133 }
Peter Maydell39d54922013-05-23 12:59:55 +01008134 tcg_temp_free_i32(tmp2);
Peter Maydell600b8282011-04-11 16:26:20 +01008135 if (op == NEON_2RM_VCLT0) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008136 tcg_gen_not_i32(tmp, tmp);
Peter Maydell600b8282011-04-11 16:26:20 +01008137 }
pbrook9ee6e8b2007-11-11 00:04:49 +00008138 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008139 case NEON_2RM_VCEQ0:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008140 tmp2 = tcg_const_i32(0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008141 switch(size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008142 case 0: gen_helper_neon_ceq_u8(tmp, tmp, tmp2); break;
8143 case 1: gen_helper_neon_ceq_u16(tmp, tmp, tmp2); break;
8144 case 2: gen_helper_neon_ceq_u32(tmp, tmp, tmp2); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008145 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008146 }
Peter Maydell39d54922013-05-23 12:59:55 +01008147 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00008148 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008149 case NEON_2RM_VCGT0_F:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008150 {
8151 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
Filip Navaradd8fbd72009-10-15 13:07:14 +02008152 tmp2 = tcg_const_i32(0);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008153 gen_helper_neon_cgt_f32(tmp, tmp, tmp2, fpstatus);
Peter Maydell39d54922013-05-23 12:59:55 +01008154 tcg_temp_free_i32(tmp2);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008155 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008156 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008157 }
Peter Maydell600b8282011-04-11 16:26:20 +01008158 case NEON_2RM_VCGE0_F:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008159 {
8160 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
Filip Navaradd8fbd72009-10-15 13:07:14 +02008161 tmp2 = tcg_const_i32(0);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008162 gen_helper_neon_cge_f32(tmp, tmp, tmp2, fpstatus);
Peter Maydell39d54922013-05-23 12:59:55 +01008163 tcg_temp_free_i32(tmp2);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008164 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008165 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008166 }
Peter Maydell600b8282011-04-11 16:26:20 +01008167 case NEON_2RM_VCEQ0_F:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008168 {
8169 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
Filip Navaradd8fbd72009-10-15 13:07:14 +02008170 tmp2 = tcg_const_i32(0);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008171 gen_helper_neon_ceq_f32(tmp, tmp, tmp2, fpstatus);
Peter Maydell39d54922013-05-23 12:59:55 +01008172 tcg_temp_free_i32(tmp2);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008173 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008174 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008175 }
Peter Maydell600b8282011-04-11 16:26:20 +01008176 case NEON_2RM_VCLE0_F:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008177 {
8178 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
Peter Maydell0e326102011-03-11 08:12:23 +00008179 tmp2 = tcg_const_i32(0);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008180 gen_helper_neon_cge_f32(tmp, tmp2, tmp, fpstatus);
Peter Maydell39d54922013-05-23 12:59:55 +01008181 tcg_temp_free_i32(tmp2);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008182 tcg_temp_free_ptr(fpstatus);
Peter Maydell0e326102011-03-11 08:12:23 +00008183 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008184 }
Peter Maydell600b8282011-04-11 16:26:20 +01008185 case NEON_2RM_VCLT0_F:
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008186 {
8187 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
Peter Maydell0e326102011-03-11 08:12:23 +00008188 tmp2 = tcg_const_i32(0);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008189 gen_helper_neon_cgt_f32(tmp, tmp2, tmp, fpstatus);
Peter Maydell39d54922013-05-23 12:59:55 +01008190 tcg_temp_free_i32(tmp2);
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008191 tcg_temp_free_ptr(fpstatus);
Peter Maydell0e326102011-03-11 08:12:23 +00008192 break;
Peter Maydellaa47cfd2011-05-25 13:49:19 +00008193 }
Peter Maydell600b8282011-04-11 16:26:20 +01008194 case NEON_2RM_VABS_F:
pbrook4373f3c2008-03-31 03:47:19 +00008195 gen_vfp_abs(0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008196 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008197 case NEON_2RM_VNEG_F:
pbrook4373f3c2008-03-31 03:47:19 +00008198 gen_vfp_neg(0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008199 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008200 case NEON_2RM_VSWP:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008201 tmp2 = neon_load_reg(rd, pass);
8202 neon_store_reg(rm, pass, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00008203 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008204 case NEON_2RM_VTRN:
Filip Navaradd8fbd72009-10-15 13:07:14 +02008205 tmp2 = neon_load_reg(rd, pass);
pbrook9ee6e8b2007-11-11 00:04:49 +00008206 switch (size) {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008207 case 0: gen_neon_trn_u8(tmp, tmp2); break;
8208 case 1: gen_neon_trn_u16(tmp, tmp2); break;
Peter Maydell600b8282011-04-11 16:26:20 +01008209 default: abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008210 }
Filip Navaradd8fbd72009-10-15 13:07:14 +02008211 neon_store_reg(rm, pass, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00008212 break;
Will Newton34f7b0a2014-01-31 14:47:35 +00008213 case NEON_2RM_VRINTN:
8214 case NEON_2RM_VRINTA:
8215 case NEON_2RM_VRINTM:
8216 case NEON_2RM_VRINTP:
8217 case NEON_2RM_VRINTZ:
8218 {
8219 TCGv_i32 tcg_rmode;
8220 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8221 int rmode;
8222
8223 if (op == NEON_2RM_VRINTZ) {
8224 rmode = FPROUNDING_ZERO;
8225 } else {
8226 rmode = fp_decode_rm[((op & 0x6) >> 1) ^ 1];
8227 }
8228
8229 tcg_rmode = tcg_const_i32(arm_rmode_to_sf(rmode));
8230 gen_helper_set_neon_rmode(tcg_rmode, tcg_rmode,
8231 cpu_env);
8232 gen_helper_rints(cpu_F0s, cpu_F0s, fpstatus);
8233 gen_helper_set_neon_rmode(tcg_rmode, tcg_rmode,
8234 cpu_env);
8235 tcg_temp_free_ptr(fpstatus);
8236 tcg_temp_free_i32(tcg_rmode);
8237 break;
8238 }
Will Newton2ce70622014-01-31 14:47:34 +00008239 case NEON_2RM_VRINTX:
8240 {
8241 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8242 gen_helper_rints_exact(cpu_F0s, cpu_F0s, fpstatus);
8243 tcg_temp_free_ptr(fpstatus);
8244 break;
8245 }
Will Newton901ad522014-01-31 14:47:35 +00008246 case NEON_2RM_VCVTAU:
8247 case NEON_2RM_VCVTAS:
8248 case NEON_2RM_VCVTNU:
8249 case NEON_2RM_VCVTNS:
8250 case NEON_2RM_VCVTPU:
8251 case NEON_2RM_VCVTPS:
8252 case NEON_2RM_VCVTMU:
8253 case NEON_2RM_VCVTMS:
8254 {
8255 bool is_signed = !extract32(insn, 7, 1);
8256 TCGv_ptr fpst = get_fpstatus_ptr(1);
8257 TCGv_i32 tcg_rmode, tcg_shift;
8258 int rmode = fp_decode_rm[extract32(insn, 8, 2)];
8259
8260 tcg_shift = tcg_const_i32(0);
8261 tcg_rmode = tcg_const_i32(arm_rmode_to_sf(rmode));
8262 gen_helper_set_neon_rmode(tcg_rmode, tcg_rmode,
8263 cpu_env);
8264
8265 if (is_signed) {
8266 gen_helper_vfp_tosls(cpu_F0s, cpu_F0s,
8267 tcg_shift, fpst);
8268 } else {
8269 gen_helper_vfp_touls(cpu_F0s, cpu_F0s,
8270 tcg_shift, fpst);
8271 }
8272
8273 gen_helper_set_neon_rmode(tcg_rmode, tcg_rmode,
8274 cpu_env);
8275 tcg_temp_free_i32(tcg_rmode);
8276 tcg_temp_free_i32(tcg_shift);
8277 tcg_temp_free_ptr(fpst);
8278 break;
8279 }
Peter Maydell600b8282011-04-11 16:26:20 +01008280 case NEON_2RM_VRECPE:
Alex Bennéeb6d44432014-03-17 16:31:52 +00008281 {
8282 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8283 gen_helper_recpe_u32(tmp, tmp, fpstatus);
8284 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008285 break;
Alex Bennéeb6d44432014-03-17 16:31:52 +00008286 }
Peter Maydell600b8282011-04-11 16:26:20 +01008287 case NEON_2RM_VRSQRTE:
Alex Bennéec2fb4182014-03-17 16:31:53 +00008288 {
8289 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8290 gen_helper_rsqrte_u32(tmp, tmp, fpstatus);
8291 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008292 break;
Alex Bennéec2fb4182014-03-17 16:31:53 +00008293 }
Peter Maydell600b8282011-04-11 16:26:20 +01008294 case NEON_2RM_VRECPE_F:
Alex Bennéeb6d44432014-03-17 16:31:52 +00008295 {
8296 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8297 gen_helper_recpe_f32(cpu_F0s, cpu_F0s, fpstatus);
8298 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008299 break;
Alex Bennéeb6d44432014-03-17 16:31:52 +00008300 }
Peter Maydell600b8282011-04-11 16:26:20 +01008301 case NEON_2RM_VRSQRTE_F:
Alex Bennéec2fb4182014-03-17 16:31:53 +00008302 {
8303 TCGv_ptr fpstatus = get_fpstatus_ptr(1);
8304 gen_helper_rsqrte_f32(cpu_F0s, cpu_F0s, fpstatus);
8305 tcg_temp_free_ptr(fpstatus);
pbrook9ee6e8b2007-11-11 00:04:49 +00008306 break;
Alex Bennéec2fb4182014-03-17 16:31:53 +00008307 }
Peter Maydell600b8282011-04-11 16:26:20 +01008308 case NEON_2RM_VCVT_FS: /* VCVT.F32.S32 */
Peter Maydell5500b062011-05-19 14:46:19 +01008309 gen_vfp_sito(0, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00008310 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008311 case NEON_2RM_VCVT_FU: /* VCVT.F32.U32 */
Peter Maydell5500b062011-05-19 14:46:19 +01008312 gen_vfp_uito(0, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00008313 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008314 case NEON_2RM_VCVT_SF: /* VCVT.S32.F32 */
Peter Maydell5500b062011-05-19 14:46:19 +01008315 gen_vfp_tosiz(0, 1);
Peter Maydelld3587ef2010-12-07 15:37:34 +00008316 break;
Peter Maydell600b8282011-04-11 16:26:20 +01008317 case NEON_2RM_VCVT_UF: /* VCVT.U32.F32 */
Peter Maydell5500b062011-05-19 14:46:19 +01008318 gen_vfp_touiz(0, 1);
Peter Maydelld3587ef2010-12-07 15:37:34 +00008319 break;
pbrook9ee6e8b2007-11-11 00:04:49 +00008320 default:
Peter Maydell600b8282011-04-11 16:26:20 +01008321 /* Reserved op values were caught by the
8322 * neon_2rm_sizes[] check earlier.
8323 */
8324 abort();
pbrook9ee6e8b2007-11-11 00:04:49 +00008325 }
Peter Maydell600b8282011-04-11 16:26:20 +01008326 if (neon_2rm_is_float_op(op)) {
pbrook4373f3c2008-03-31 03:47:19 +00008327 tcg_gen_st_f32(cpu_F0s, cpu_env,
8328 neon_reg_offset(rd, pass));
pbrook9ee6e8b2007-11-11 00:04:49 +00008329 } else {
Filip Navaradd8fbd72009-10-15 13:07:14 +02008330 neon_store_reg(rd, pass, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00008331 }
8332 }
8333 break;
8334 }
8335 } else if ((insn & (1 << 10)) == 0) {
8336 /* VTBL, VTBX. */
Peter Maydell56907d72011-04-11 16:26:22 +01008337 int n = ((insn >> 8) & 3) + 1;
8338 if ((rn + n) > 32) {
8339 /* This is UNPREDICTABLE; we choose to UNDEF to avoid the
8340 * helper function running off the end of the register file.
8341 */
8342 return 1;
8343 }
8344 n <<= 3;
pbrook9ee6e8b2007-11-11 00:04:49 +00008345 if (insn & (1 << 6)) {
pbrook8f8e3aa2008-03-31 03:48:01 +00008346 tmp = neon_load_reg(rd, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008347 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00008348 tmp = tcg_temp_new_i32();
pbrook8f8e3aa2008-03-31 03:48:01 +00008349 tcg_gen_movi_i32(tmp, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008350 }
pbrook8f8e3aa2008-03-31 03:48:01 +00008351 tmp2 = neon_load_reg(rm, 0);
Richard Hendersone7c06c42018-01-25 11:45:28 +00008352 ptr1 = vfp_reg_ptr(true, rn);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03008353 tmp5 = tcg_const_i32(n);
Richard Hendersone7c06c42018-01-25 11:45:28 +00008354 gen_helper_neon_tbl(tmp2, tmp2, tmp, ptr1, tmp5);
Peter Maydell7d1b0092011-03-06 21:39:54 +00008355 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00008356 if (insn & (1 << 6)) {
pbrook8f8e3aa2008-03-31 03:48:01 +00008357 tmp = neon_load_reg(rd, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +00008358 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00008359 tmp = tcg_temp_new_i32();
pbrook8f8e3aa2008-03-31 03:48:01 +00008360 tcg_gen_movi_i32(tmp, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +00008361 }
pbrook8f8e3aa2008-03-31 03:48:01 +00008362 tmp3 = neon_load_reg(rm, 1);
Richard Hendersone7c06c42018-01-25 11:45:28 +00008363 gen_helper_neon_tbl(tmp3, tmp3, tmp, ptr1, tmp5);
Juha Riihimäki25aeb692009-10-26 13:02:37 +02008364 tcg_temp_free_i32(tmp5);
Richard Hendersone7c06c42018-01-25 11:45:28 +00008365 tcg_temp_free_ptr(ptr1);
pbrook8f8e3aa2008-03-31 03:48:01 +00008366 neon_store_reg(rd, 0, tmp2);
pbrook3018f252008-09-22 00:52:42 +00008367 neon_store_reg(rd, 1, tmp3);
Peter Maydell7d1b0092011-03-06 21:39:54 +00008368 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00008369 } else if ((insn & 0x380) == 0) {
8370 /* VDUP */
Richard Henderson32f91fb2018-10-24 07:50:19 +01008371 int element;
8372 TCGMemOp size;
8373
Juha Riihimäki133da6a2011-04-11 16:26:23 +01008374 if ((insn & (7 << 16)) == 0 || (q && (rd & 1))) {
8375 return 1;
8376 }
pbrook9ee6e8b2007-11-11 00:04:49 +00008377 if (insn & (1 << 16)) {
Richard Henderson32f91fb2018-10-24 07:50:19 +01008378 size = MO_8;
8379 element = (insn >> 17) & 7;
pbrook9ee6e8b2007-11-11 00:04:49 +00008380 } else if (insn & (1 << 17)) {
Richard Henderson32f91fb2018-10-24 07:50:19 +01008381 size = MO_16;
8382 element = (insn >> 18) & 3;
8383 } else {
8384 size = MO_32;
8385 element = (insn >> 19) & 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00008386 }
Richard Henderson32f91fb2018-10-24 07:50:19 +01008387 tcg_gen_gvec_dup_mem(size, neon_reg_offset(rd, 0),
8388 neon_element_offset(rm, element, size),
8389 q ? 16 : 8, q ? 16 : 8);
pbrook9ee6e8b2007-11-11 00:04:49 +00008390 } else {
8391 return 1;
8392 }
8393 }
8394 }
8395 return 0;
8396}
8397
Richard Henderson8b7209f2018-03-02 10:45:44 +00008398/* Advanced SIMD three registers of the same length extension.
8399 * 31 25 23 22 20 16 12 11 10 9 8 3 0
8400 * +---------------+-----+---+-----+----+----+---+----+---+----+---------+----+
8401 * | 1 1 1 1 1 1 0 | op1 | D | op2 | Vn | Vd | 1 | o3 | 0 | o4 | N Q M U | Vm |
8402 * +---------------+-----+---+-----+----+----+---+----+---+----+---------+----+
8403 */
8404static int disas_neon_insn_3same_ext(DisasContext *s, uint32_t insn)
8405{
Richard Henderson26c470a2018-06-29 15:11:15 +01008406 gen_helper_gvec_3 *fn_gvec = NULL;
8407 gen_helper_gvec_3_ptr *fn_gvec_ptr = NULL;
8408 int rd, rn, rm, opr_sz;
8409 int data = 0;
Richard Henderson87732312019-02-28 10:55:17 +00008410 int off_rn, off_rm;
8411 bool is_long = false, q = extract32(insn, 6, 1);
8412 bool ptr_is_env = false;
Richard Henderson8b7209f2018-03-02 10:45:44 +00008413
8414 if ((insn & 0xfe200f10) == 0xfc200800) {
8415 /* VCMLA -- 1111 110R R.1S .... .... 1000 ...0 .... */
Richard Henderson26c470a2018-06-29 15:11:15 +01008416 int size = extract32(insn, 20, 1);
8417 data = extract32(insn, 23, 2); /* rot */
Richard Henderson962fcbf2018-10-24 07:50:16 +01008418 if (!dc_isar_feature(aa32_vcma, s)
Richard Henderson57631902018-10-24 07:50:17 +01008419 || (!size && !dc_isar_feature(aa32_fp16_arith, s))) {
Richard Henderson8b7209f2018-03-02 10:45:44 +00008420 return 1;
8421 }
8422 fn_gvec_ptr = size ? gen_helper_gvec_fcmlas : gen_helper_gvec_fcmlah;
8423 } else if ((insn & 0xfea00f10) == 0xfc800800) {
8424 /* VCADD -- 1111 110R 1.0S .... .... 1000 ...0 .... */
Richard Henderson26c470a2018-06-29 15:11:15 +01008425 int size = extract32(insn, 20, 1);
8426 data = extract32(insn, 24, 1); /* rot */
Richard Henderson962fcbf2018-10-24 07:50:16 +01008427 if (!dc_isar_feature(aa32_vcma, s)
Richard Henderson57631902018-10-24 07:50:17 +01008428 || (!size && !dc_isar_feature(aa32_fp16_arith, s))) {
Richard Henderson8b7209f2018-03-02 10:45:44 +00008429 return 1;
8430 }
8431 fn_gvec_ptr = size ? gen_helper_gvec_fcadds : gen_helper_gvec_fcaddh;
Richard Henderson26c470a2018-06-29 15:11:15 +01008432 } else if ((insn & 0xfeb00f00) == 0xfc200d00) {
8433 /* V[US]DOT -- 1111 1100 0.10 .... .... 1101 .Q.U .... */
8434 bool u = extract32(insn, 4, 1);
Richard Henderson962fcbf2018-10-24 07:50:16 +01008435 if (!dc_isar_feature(aa32_dp, s)) {
Richard Henderson26c470a2018-06-29 15:11:15 +01008436 return 1;
8437 }
8438 fn_gvec = u ? gen_helper_gvec_udot_b : gen_helper_gvec_sdot_b;
Richard Henderson87732312019-02-28 10:55:17 +00008439 } else if ((insn & 0xff300f10) == 0xfc200810) {
8440 /* VFM[AS]L -- 1111 1100 S.10 .... .... 1000 .Q.1 .... */
8441 int is_s = extract32(insn, 23, 1);
8442 if (!dc_isar_feature(aa32_fhm, s)) {
8443 return 1;
8444 }
8445 is_long = true;
8446 data = is_s; /* is_2 == 0 */
8447 fn_gvec_ptr = gen_helper_gvec_fmlal_a32;
8448 ptr_is_env = true;
Richard Henderson8b7209f2018-03-02 10:45:44 +00008449 } else {
8450 return 1;
8451 }
8452
Richard Henderson87732312019-02-28 10:55:17 +00008453 VFP_DREG_D(rd, insn);
8454 if (rd & q) {
8455 return 1;
8456 }
8457 if (q || !is_long) {
8458 VFP_DREG_N(rn, insn);
8459 VFP_DREG_M(rm, insn);
8460 if ((rn | rm) & q & !is_long) {
8461 return 1;
8462 }
8463 off_rn = vfp_reg_offset(1, rn);
8464 off_rm = vfp_reg_offset(1, rm);
8465 } else {
8466 rn = VFP_SREG_N(insn);
8467 rm = VFP_SREG_M(insn);
8468 off_rn = vfp_reg_offset(0, rn);
8469 off_rm = vfp_reg_offset(0, rm);
8470 }
8471
Richard Henderson8b7209f2018-03-02 10:45:44 +00008472 if (s->fp_excp_el) {
8473 gen_exception_insn(s, 4, EXCP_UDEF,
Peter Maydell4be42f42018-10-24 07:50:18 +01008474 syn_simd_access_trap(1, 0xe, false), s->fp_excp_el);
Richard Henderson8b7209f2018-03-02 10:45:44 +00008475 return 0;
8476 }
8477 if (!s->vfp_enabled) {
8478 return 1;
8479 }
8480
8481 opr_sz = (1 + q) * 8;
Richard Henderson26c470a2018-06-29 15:11:15 +01008482 if (fn_gvec_ptr) {
Richard Henderson87732312019-02-28 10:55:17 +00008483 TCGv_ptr ptr;
8484 if (ptr_is_env) {
8485 ptr = cpu_env;
8486 } else {
8487 ptr = get_fpstatus_ptr(1);
8488 }
8489 tcg_gen_gvec_3_ptr(vfp_reg_offset(1, rd), off_rn, off_rm, ptr,
Richard Henderson26c470a2018-06-29 15:11:15 +01008490 opr_sz, opr_sz, data, fn_gvec_ptr);
Richard Henderson87732312019-02-28 10:55:17 +00008491 if (!ptr_is_env) {
8492 tcg_temp_free_ptr(ptr);
8493 }
Richard Henderson26c470a2018-06-29 15:11:15 +01008494 } else {
Richard Henderson87732312019-02-28 10:55:17 +00008495 tcg_gen_gvec_3_ool(vfp_reg_offset(1, rd), off_rn, off_rm,
Richard Henderson26c470a2018-06-29 15:11:15 +01008496 opr_sz, opr_sz, data, fn_gvec);
8497 }
Richard Henderson8b7209f2018-03-02 10:45:44 +00008498 return 0;
8499}
8500
Richard Henderson638808f2018-03-02 10:45:45 +00008501/* Advanced SIMD two registers and a scalar extension.
8502 * 31 24 23 22 20 16 12 11 10 9 8 3 0
8503 * +-----------------+----+---+----+----+----+---+----+---+----+---------+----+
8504 * | 1 1 1 1 1 1 1 0 | o1 | D | o2 | Vn | Vd | 1 | o3 | 0 | o4 | N Q M U | Vm |
8505 * +-----------------+----+---+----+----+----+---+----+---+----+---------+----+
8506 *
8507 */
8508
8509static int disas_neon_insn_2reg_scalar_ext(DisasContext *s, uint32_t insn)
8510{
Richard Henderson26c470a2018-06-29 15:11:15 +01008511 gen_helper_gvec_3 *fn_gvec = NULL;
8512 gen_helper_gvec_3_ptr *fn_gvec_ptr = NULL;
Richard Henderson2cc99912018-06-29 15:11:12 +01008513 int rd, rn, rm, opr_sz, data;
Richard Henderson87732312019-02-28 10:55:17 +00008514 int off_rn, off_rm;
8515 bool is_long = false, q = extract32(insn, 6, 1);
8516 bool ptr_is_env = false;
Richard Henderson638808f2018-03-02 10:45:45 +00008517
8518 if ((insn & 0xff000f10) == 0xfe000800) {
8519 /* VCMLA (indexed) -- 1111 1110 S.RR .... .... 1000 ...0 .... */
Richard Henderson2cc99912018-06-29 15:11:12 +01008520 int rot = extract32(insn, 20, 2);
8521 int size = extract32(insn, 23, 1);
8522 int index;
8523
Richard Henderson962fcbf2018-10-24 07:50:16 +01008524 if (!dc_isar_feature(aa32_vcma, s)) {
Richard Henderson638808f2018-03-02 10:45:45 +00008525 return 1;
8526 }
Richard Henderson2cc99912018-06-29 15:11:12 +01008527 if (size == 0) {
Richard Henderson57631902018-10-24 07:50:17 +01008528 if (!dc_isar_feature(aa32_fp16_arith, s)) {
Richard Henderson2cc99912018-06-29 15:11:12 +01008529 return 1;
8530 }
8531 /* For fp16, rm is just Vm, and index is M. */
8532 rm = extract32(insn, 0, 4);
8533 index = extract32(insn, 5, 1);
8534 } else {
8535 /* For fp32, rm is the usual M:Vm, and index is 0. */
8536 VFP_DREG_M(rm, insn);
8537 index = 0;
8538 }
8539 data = (index << 2) | rot;
8540 fn_gvec_ptr = (size ? gen_helper_gvec_fcmlas_idx
8541 : gen_helper_gvec_fcmlah_idx);
Richard Henderson26c470a2018-06-29 15:11:15 +01008542 } else if ((insn & 0xffb00f00) == 0xfe200d00) {
8543 /* V[US]DOT -- 1111 1110 0.10 .... .... 1101 .Q.U .... */
8544 int u = extract32(insn, 4, 1);
Richard Henderson87732312019-02-28 10:55:17 +00008545
Richard Henderson962fcbf2018-10-24 07:50:16 +01008546 if (!dc_isar_feature(aa32_dp, s)) {
Richard Henderson26c470a2018-06-29 15:11:15 +01008547 return 1;
8548 }
8549 fn_gvec = u ? gen_helper_gvec_udot_idx_b : gen_helper_gvec_sdot_idx_b;
8550 /* rm is just Vm, and index is M. */
8551 data = extract32(insn, 5, 1); /* index */
8552 rm = extract32(insn, 0, 4);
Richard Henderson87732312019-02-28 10:55:17 +00008553 } else if ((insn & 0xffa00f10) == 0xfe000810) {
8554 /* VFM[AS]L -- 1111 1110 0.0S .... .... 1000 .Q.1 .... */
8555 int is_s = extract32(insn, 20, 1);
8556 int vm20 = extract32(insn, 0, 3);
8557 int vm3 = extract32(insn, 3, 1);
8558 int m = extract32(insn, 5, 1);
8559 int index;
8560
8561 if (!dc_isar_feature(aa32_fhm, s)) {
8562 return 1;
8563 }
8564 if (q) {
8565 rm = vm20;
8566 index = m * 2 + vm3;
8567 } else {
8568 rm = vm20 * 2 + m;
8569 index = vm3;
8570 }
8571 is_long = true;
8572 data = (index << 2) | is_s; /* is_2 == 0 */
8573 fn_gvec_ptr = gen_helper_gvec_fmlal_idx_a32;
8574 ptr_is_env = true;
Richard Henderson638808f2018-03-02 10:45:45 +00008575 } else {
8576 return 1;
8577 }
8578
Richard Henderson87732312019-02-28 10:55:17 +00008579 VFP_DREG_D(rd, insn);
8580 if (rd & q) {
8581 return 1;
8582 }
8583 if (q || !is_long) {
8584 VFP_DREG_N(rn, insn);
8585 if (rn & q & !is_long) {
8586 return 1;
8587 }
8588 off_rn = vfp_reg_offset(1, rn);
8589 off_rm = vfp_reg_offset(1, rm);
8590 } else {
8591 rn = VFP_SREG_N(insn);
8592 off_rn = vfp_reg_offset(0, rn);
8593 off_rm = vfp_reg_offset(0, rm);
8594 }
Richard Henderson638808f2018-03-02 10:45:45 +00008595 if (s->fp_excp_el) {
8596 gen_exception_insn(s, 4, EXCP_UDEF,
Peter Maydell4be42f42018-10-24 07:50:18 +01008597 syn_simd_access_trap(1, 0xe, false), s->fp_excp_el);
Richard Henderson638808f2018-03-02 10:45:45 +00008598 return 0;
8599 }
8600 if (!s->vfp_enabled) {
8601 return 1;
8602 }
8603
8604 opr_sz = (1 + q) * 8;
Richard Henderson26c470a2018-06-29 15:11:15 +01008605 if (fn_gvec_ptr) {
Richard Henderson87732312019-02-28 10:55:17 +00008606 TCGv_ptr ptr;
8607 if (ptr_is_env) {
8608 ptr = cpu_env;
8609 } else {
8610 ptr = get_fpstatus_ptr(1);
8611 }
8612 tcg_gen_gvec_3_ptr(vfp_reg_offset(1, rd), off_rn, off_rm, ptr,
Richard Henderson26c470a2018-06-29 15:11:15 +01008613 opr_sz, opr_sz, data, fn_gvec_ptr);
Richard Henderson87732312019-02-28 10:55:17 +00008614 if (!ptr_is_env) {
8615 tcg_temp_free_ptr(ptr);
8616 }
Richard Henderson26c470a2018-06-29 15:11:15 +01008617 } else {
Richard Henderson87732312019-02-28 10:55:17 +00008618 tcg_gen_gvec_3_ool(vfp_reg_offset(1, rd), off_rn, off_rm,
Richard Henderson26c470a2018-06-29 15:11:15 +01008619 opr_sz, opr_sz, data, fn_gvec);
8620 }
Richard Henderson638808f2018-03-02 10:45:45 +00008621 return 0;
8622}
8623
Peter Maydell7dcc1f82014-10-28 19:24:03 +00008624static int disas_coproc_insn(DisasContext *s, uint32_t insn)
pbrook9ee6e8b2007-11-11 00:04:49 +00008625{
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008626 int cpnum, is64, crn, crm, opc1, opc2, isread, rt, rt2;
8627 const ARMCPRegInfo *ri;
pbrook9ee6e8b2007-11-11 00:04:49 +00008628
8629 cpnum = (insn >> 8) & 0xf;
pbrook9ee6e8b2007-11-11 00:04:49 +00008630
Peter Maydellc0f4af12014-09-29 18:48:48 +01008631 /* First check for coprocessor space used for XScale/iwMMXt insns */
Peter Maydelld614a512014-10-28 19:24:01 +00008632 if (arm_dc_feature(s, ARM_FEATURE_XSCALE) && (cpnum < 2)) {
Peter Maydellc0f4af12014-09-29 18:48:48 +01008633 if (extract32(s->c15_cpar, cpnum, 1) == 0) {
8634 return 1;
8635 }
Peter Maydelld614a512014-10-28 19:24:01 +00008636 if (arm_dc_feature(s, ARM_FEATURE_IWMMXT)) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +00008637 return disas_iwmmxt_insn(s, insn);
Peter Maydelld614a512014-10-28 19:24:01 +00008638 } else if (arm_dc_feature(s, ARM_FEATURE_XSCALE)) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +00008639 return disas_dsp_insn(s, insn);
Peter Maydellc0f4af12014-09-29 18:48:48 +01008640 }
8641 return 1;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008642 }
8643
8644 /* Otherwise treat as a generic register access */
8645 is64 = (insn & (1 << 25)) == 0;
8646 if (!is64 && ((insn & (1 << 4)) == 0)) {
8647 /* cdp */
8648 return 1;
8649 }
8650
8651 crm = insn & 0xf;
8652 if (is64) {
8653 crn = 0;
8654 opc1 = (insn >> 4) & 0xf;
8655 opc2 = 0;
8656 rt2 = (insn >> 16) & 0xf;
8657 } else {
8658 crn = (insn >> 16) & 0xf;
8659 opc1 = (insn >> 21) & 7;
8660 opc2 = (insn >> 5) & 7;
8661 rt2 = 0;
8662 }
8663 isread = (insn >> 20) & 1;
8664 rt = (insn >> 12) & 0xf;
8665
Peter Maydell60322b32014-01-04 22:15:44 +00008666 ri = get_arm_cp_reginfo(s->cp_regs,
Peter Maydell51a79b02014-12-11 12:07:49 +00008667 ENCODE_CP_REG(cpnum, is64, s->ns, crn, crm, opc1, opc2));
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008668 if (ri) {
8669 /* Check access permissions */
Greg Bellowsdcbff192014-10-24 12:19:14 +01008670 if (!cp_access_ok(s->current_el, ri, isread)) {
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008671 return 1;
8672 }
8673
Peter Maydellc0f4af12014-09-29 18:48:48 +01008674 if (ri->accessfn ||
Peter Maydelld614a512014-10-28 19:24:01 +00008675 (arm_dc_feature(s, ARM_FEATURE_XSCALE) && cpnum < 14)) {
Peter Maydellf59df3f2014-02-20 10:35:52 +00008676 /* Emit code to perform further access permissions checks at
8677 * runtime; this may result in an exception.
Peter Maydellc0f4af12014-09-29 18:48:48 +01008678 * Note that on XScale all cp0..c13 registers do an access check
8679 * call in order to handle c15_cpar.
Peter Maydellf59df3f2014-02-20 10:35:52 +00008680 */
8681 TCGv_ptr tmpptr;
Peter Maydell3f208fd2016-02-11 11:17:31 +00008682 TCGv_i32 tcg_syn, tcg_isread;
Peter Maydell8bcbf372014-04-15 19:18:38 +01008683 uint32_t syndrome;
8684
8685 /* Note that since we are an implementation which takes an
8686 * exception on a trapped conditional instruction only if the
8687 * instruction passes its condition code check, we can take
8688 * advantage of the clause in the ARM ARM that allows us to set
8689 * the COND field in the instruction to 0xE in all cases.
8690 * We could fish the actual condition out of the insn (ARM)
8691 * or the condexec bits (Thumb) but it isn't necessary.
8692 */
8693 switch (cpnum) {
8694 case 14:
8695 if (is64) {
8696 syndrome = syn_cp14_rrt_trap(1, 0xe, opc1, crm, rt, rt2,
Peter Maydell4df32252016-02-11 11:17:31 +00008697 isread, false);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008698 } else {
8699 syndrome = syn_cp14_rt_trap(1, 0xe, opc1, opc2, crn, crm,
Peter Maydell4df32252016-02-11 11:17:31 +00008700 rt, isread, false);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008701 }
8702 break;
8703 case 15:
8704 if (is64) {
8705 syndrome = syn_cp15_rrt_trap(1, 0xe, opc1, crm, rt, rt2,
Peter Maydell4df32252016-02-11 11:17:31 +00008706 isread, false);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008707 } else {
8708 syndrome = syn_cp15_rt_trap(1, 0xe, opc1, opc2, crn, crm,
Peter Maydell4df32252016-02-11 11:17:31 +00008709 rt, isread, false);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008710 }
8711 break;
8712 default:
8713 /* ARMv8 defines that only coprocessors 14 and 15 exist,
8714 * so this can only happen if this is an ARMv7 or earlier CPU,
8715 * in which case the syndrome information won't actually be
8716 * guest visible.
8717 */
Peter Maydelld614a512014-10-28 19:24:01 +00008718 assert(!arm_dc_feature(s, ARM_FEATURE_V8));
Peter Maydell8bcbf372014-04-15 19:18:38 +01008719 syndrome = syn_uncategorized();
8720 break;
8721 }
8722
Sergey Fedorov43bfa4a2015-11-17 16:38:46 +03008723 gen_set_condexec(s);
Peter Maydell3977ee52015-06-15 18:06:11 +01008724 gen_set_pc_im(s, s->pc - 4);
Peter Maydellf59df3f2014-02-20 10:35:52 +00008725 tmpptr = tcg_const_ptr(ri);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008726 tcg_syn = tcg_const_i32(syndrome);
Peter Maydell3f208fd2016-02-11 11:17:31 +00008727 tcg_isread = tcg_const_i32(isread);
8728 gen_helper_access_check_cp_reg(cpu_env, tmpptr, tcg_syn,
8729 tcg_isread);
Peter Maydellf59df3f2014-02-20 10:35:52 +00008730 tcg_temp_free_ptr(tmpptr);
Peter Maydell8bcbf372014-04-15 19:18:38 +01008731 tcg_temp_free_i32(tcg_syn);
Peter Maydell3f208fd2016-02-11 11:17:31 +00008732 tcg_temp_free_i32(tcg_isread);
Peter Maydellf59df3f2014-02-20 10:35:52 +00008733 }
8734
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008735 /* Handle special cases first */
8736 switch (ri->type & ~(ARM_CP_FLAG_MASK & ~ARM_CP_SPECIAL)) {
8737 case ARM_CP_NOP:
8738 return 0;
8739 case ARM_CP_WFI:
8740 if (isread) {
8741 return 1;
8742 }
Peter Maydelleaed1292013-09-03 20:12:06 +01008743 gen_set_pc_im(s, s->pc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03008744 s->base.is_jmp = DISAS_WFI;
Paul Brook2bee5102012-07-12 10:58:35 +00008745 return 0;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008746 default:
8747 break;
8748 }
8749
Emilio G. Cotac5a49c62017-07-18 20:46:52 -04008750 if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
Peter Maydell24527312013-08-20 14:54:31 +01008751 gen_io_start();
8752 }
8753
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008754 if (isread) {
8755 /* Read */
8756 if (is64) {
8757 TCGv_i64 tmp64;
8758 TCGv_i32 tmp;
8759 if (ri->type & ARM_CP_CONST) {
8760 tmp64 = tcg_const_i64(ri->resetvalue);
8761 } else if (ri->readfn) {
8762 TCGv_ptr tmpptr;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008763 tmp64 = tcg_temp_new_i64();
8764 tmpptr = tcg_const_ptr(ri);
8765 gen_helper_get_cp_reg64(tmp64, cpu_env, tmpptr);
8766 tcg_temp_free_ptr(tmpptr);
8767 } else {
8768 tmp64 = tcg_temp_new_i64();
8769 tcg_gen_ld_i64(tmp64, cpu_env, ri->fieldoffset);
8770 }
8771 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07008772 tcg_gen_extrl_i64_i32(tmp, tmp64);
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008773 store_reg(s, rt, tmp);
8774 tcg_gen_shri_i64(tmp64, tmp64, 32);
Peter Maydelled336852012-07-12 10:59:04 +00008775 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07008776 tcg_gen_extrl_i64_i32(tmp, tmp64);
Peter Maydelled336852012-07-12 10:59:04 +00008777 tcg_temp_free_i64(tmp64);
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008778 store_reg(s, rt2, tmp);
8779 } else {
Peter Maydell39d54922013-05-23 12:59:55 +01008780 TCGv_i32 tmp;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008781 if (ri->type & ARM_CP_CONST) {
8782 tmp = tcg_const_i32(ri->resetvalue);
8783 } else if (ri->readfn) {
8784 TCGv_ptr tmpptr;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008785 tmp = tcg_temp_new_i32();
8786 tmpptr = tcg_const_ptr(ri);
8787 gen_helper_get_cp_reg(tmp, cpu_env, tmpptr);
8788 tcg_temp_free_ptr(tmpptr);
8789 } else {
8790 tmp = load_cpu_offset(ri->fieldoffset);
8791 }
8792 if (rt == 15) {
8793 /* Destination register of r15 for 32 bit loads sets
8794 * the condition codes from the high 4 bits of the value
8795 */
8796 gen_set_nzcv(tmp);
8797 tcg_temp_free_i32(tmp);
8798 } else {
8799 store_reg(s, rt, tmp);
8800 }
8801 }
8802 } else {
8803 /* Write */
8804 if (ri->type & ARM_CP_CONST) {
8805 /* If not forbidden by access permissions, treat as WI */
8806 return 0;
8807 }
8808
8809 if (is64) {
Peter Maydell39d54922013-05-23 12:59:55 +01008810 TCGv_i32 tmplo, tmphi;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008811 TCGv_i64 tmp64 = tcg_temp_new_i64();
8812 tmplo = load_reg(s, rt);
8813 tmphi = load_reg(s, rt2);
8814 tcg_gen_concat_i32_i64(tmp64, tmplo, tmphi);
8815 tcg_temp_free_i32(tmplo);
8816 tcg_temp_free_i32(tmphi);
8817 if (ri->writefn) {
8818 TCGv_ptr tmpptr = tcg_const_ptr(ri);
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008819 gen_helper_set_cp_reg64(cpu_env, tmpptr, tmp64);
8820 tcg_temp_free_ptr(tmpptr);
8821 } else {
8822 tcg_gen_st_i64(tmp64, cpu_env, ri->fieldoffset);
8823 }
8824 tcg_temp_free_i64(tmp64);
8825 } else {
8826 if (ri->writefn) {
Peter Maydell39d54922013-05-23 12:59:55 +01008827 TCGv_i32 tmp;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008828 TCGv_ptr tmpptr;
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008829 tmp = load_reg(s, rt);
8830 tmpptr = tcg_const_ptr(ri);
8831 gen_helper_set_cp_reg(cpu_env, tmpptr, tmp);
8832 tcg_temp_free_ptr(tmpptr);
8833 tcg_temp_free_i32(tmp);
8834 } else {
Peter Maydell39d54922013-05-23 12:59:55 +01008835 TCGv_i32 tmp = load_reg(s, rt);
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008836 store_cpu_offset(tmp, ri->fieldoffset);
8837 }
8838 }
Peter Maydell24527312013-08-20 14:54:31 +01008839 }
8840
Emilio G. Cotac5a49c62017-07-18 20:46:52 -04008841 if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
Peter Maydell24527312013-08-20 14:54:31 +01008842 /* I/O operations must end the TB here (whether read or write) */
8843 gen_io_end();
8844 gen_lookup_tb(s);
8845 } else if (!isread && !(ri->type & ARM_CP_SUPPRESS_TB_END)) {
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008846 /* We default to ending the TB on a coprocessor register write,
8847 * but allow this to be suppressed by the register definition
8848 * (usually only necessary to work around guest bugs).
8849 */
Peter Maydell24527312013-08-20 14:54:31 +01008850 gen_lookup_tb(s);
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008851 }
Peter Maydell24527312013-08-20 14:54:31 +01008852
Peter Maydell4b6a83f2012-06-20 11:57:06 +00008853 return 0;
8854 }
8855
Peter Maydell626187d2014-02-20 10:35:52 +00008856 /* Unknown register; this might be a guest error or a QEMU
8857 * unimplemented feature.
8858 */
8859 if (is64) {
8860 qemu_log_mask(LOG_UNIMP, "%s access to unsupported AArch32 "
Peter Maydell51a79b02014-12-11 12:07:49 +00008861 "64 bit system register cp:%d opc1: %d crm:%d "
8862 "(%s)\n",
8863 isread ? "read" : "write", cpnum, opc1, crm,
8864 s->ns ? "non-secure" : "secure");
Peter Maydell626187d2014-02-20 10:35:52 +00008865 } else {
8866 qemu_log_mask(LOG_UNIMP, "%s access to unsupported AArch32 "
Peter Maydell51a79b02014-12-11 12:07:49 +00008867 "system register cp:%d opc1:%d crn:%d crm:%d opc2:%d "
8868 "(%s)\n",
8869 isread ? "read" : "write", cpnum, opc1, crn, crm, opc2,
8870 s->ns ? "non-secure" : "secure");
Peter Maydell626187d2014-02-20 10:35:52 +00008871 }
8872
Peter Maydell4a9a5392012-06-20 11:57:22 +00008873 return 1;
pbrook9ee6e8b2007-11-11 00:04:49 +00008874}
8875
pbrook5e3f8782008-03-31 03:47:34 +00008876
8877/* Store a 64-bit value to a register pair. Clobbers val. */
pbrooka7812ae2008-11-17 14:43:54 +00008878static void gen_storeq_reg(DisasContext *s, int rlow, int rhigh, TCGv_i64 val)
pbrook5e3f8782008-03-31 03:47:34 +00008879{
Peter Maydell39d54922013-05-23 12:59:55 +01008880 TCGv_i32 tmp;
Peter Maydell7d1b0092011-03-06 21:39:54 +00008881 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07008882 tcg_gen_extrl_i64_i32(tmp, val);
pbrook5e3f8782008-03-31 03:47:34 +00008883 store_reg(s, rlow, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00008884 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +00008885 tcg_gen_shri_i64(val, val, 32);
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07008886 tcg_gen_extrl_i64_i32(tmp, val);
pbrook5e3f8782008-03-31 03:47:34 +00008887 store_reg(s, rhigh, tmp);
8888}
8889
8890/* load a 32-bit value from a register and perform a 64-bit accumulate. */
pbrooka7812ae2008-11-17 14:43:54 +00008891static void gen_addq_lo(DisasContext *s, TCGv_i64 val, int rlow)
pbrook5e3f8782008-03-31 03:47:34 +00008892{
pbrooka7812ae2008-11-17 14:43:54 +00008893 TCGv_i64 tmp;
Peter Maydell39d54922013-05-23 12:59:55 +01008894 TCGv_i32 tmp2;
pbrook5e3f8782008-03-31 03:47:34 +00008895
pbrook36aa55d2008-09-21 13:48:32 +00008896 /* Load value and extend to 64 bits. */
pbrooka7812ae2008-11-17 14:43:54 +00008897 tmp = tcg_temp_new_i64();
pbrook5e3f8782008-03-31 03:47:34 +00008898 tmp2 = load_reg(s, rlow);
8899 tcg_gen_extu_i32_i64(tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00008900 tcg_temp_free_i32(tmp2);
pbrook5e3f8782008-03-31 03:47:34 +00008901 tcg_gen_add_i64(val, val, tmp);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03008902 tcg_temp_free_i64(tmp);
pbrook5e3f8782008-03-31 03:47:34 +00008903}
8904
8905/* load and add a 64-bit value from a register pair. */
pbrooka7812ae2008-11-17 14:43:54 +00008906static void gen_addq(DisasContext *s, TCGv_i64 val, int rlow, int rhigh)
pbrook5e3f8782008-03-31 03:47:34 +00008907{
pbrooka7812ae2008-11-17 14:43:54 +00008908 TCGv_i64 tmp;
Peter Maydell39d54922013-05-23 12:59:55 +01008909 TCGv_i32 tmpl;
8910 TCGv_i32 tmph;
pbrook5e3f8782008-03-31 03:47:34 +00008911
8912 /* Load 64-bit value rd:rn. */
pbrook36aa55d2008-09-21 13:48:32 +00008913 tmpl = load_reg(s, rlow);
8914 tmph = load_reg(s, rhigh);
pbrooka7812ae2008-11-17 14:43:54 +00008915 tmp = tcg_temp_new_i64();
pbrook36aa55d2008-09-21 13:48:32 +00008916 tcg_gen_concat_i32_i64(tmp, tmpl, tmph);
Peter Maydell7d1b0092011-03-06 21:39:54 +00008917 tcg_temp_free_i32(tmpl);
8918 tcg_temp_free_i32(tmph);
pbrook5e3f8782008-03-31 03:47:34 +00008919 tcg_gen_add_i64(val, val, tmp);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03008920 tcg_temp_free_i64(tmp);
pbrook5e3f8782008-03-31 03:47:34 +00008921}
8922
Richard Hendersonc9f10122013-02-19 23:52:06 -08008923/* Set N and Z flags from hi|lo. */
Peter Maydell39d54922013-05-23 12:59:55 +01008924static void gen_logicq_cc(TCGv_i32 lo, TCGv_i32 hi)
pbrook5e3f8782008-03-31 03:47:34 +00008925{
Richard Hendersonc9f10122013-02-19 23:52:06 -08008926 tcg_gen_mov_i32(cpu_NF, hi);
8927 tcg_gen_or_i32(cpu_ZF, lo, hi);
pbrook5e3f8782008-03-31 03:47:34 +00008928}
8929
Paul Brook426f5ab2009-11-22 21:35:13 +00008930/* Load/Store exclusive instructions are implemented by remembering
8931 the value/address loaded, and seeing if these are the same
Emilio G. Cota354161b2016-06-27 15:02:08 -04008932 when the store is performed. This should be sufficient to implement
Paul Brook426f5ab2009-11-22 21:35:13 +00008933 the architecturally mandated semantics, and avoids having to monitor
Emilio G. Cota354161b2016-06-27 15:02:08 -04008934 regular stores. The compare vs the remembered value is done during
8935 the cmpxchg operation, but we must compare the addresses manually. */
Paul Brook426f5ab2009-11-22 21:35:13 +00008936static void gen_load_exclusive(DisasContext *s, int rt, int rt2,
Peter Maydell39d54922013-05-23 12:59:55 +01008937 TCGv_i32 addr, int size)
Paul Brook426f5ab2009-11-22 21:35:13 +00008938{
Peter Maydell94ee24e2013-05-23 12:59:59 +01008939 TCGv_i32 tmp = tcg_temp_new_i32();
Emilio G. Cota354161b2016-06-27 15:02:08 -04008940 TCGMemOp opc = size | MO_ALIGN | s->be_data;
Paul Brook426f5ab2009-11-22 21:35:13 +00008941
Peter Maydell50225ad2014-08-19 18:56:27 +01008942 s->is_ldex = true;
8943
Paul Brook426f5ab2009-11-22 21:35:13 +00008944 if (size == 3) {
Peter Maydell39d54922013-05-23 12:59:55 +01008945 TCGv_i32 tmp2 = tcg_temp_new_i32();
Emilio G. Cota354161b2016-06-27 15:02:08 -04008946 TCGv_i64 t64 = tcg_temp_new_i64();
Peter Maydell03d05e22014-01-04 22:15:47 +00008947
Peter Maydell3448d472017-11-07 13:03:51 +00008948 /* For AArch32, architecturally the 32-bit word at the lowest
8949 * address is always Rt and the one at addr+4 is Rt2, even if
8950 * the CPU is big-endian. That means we don't want to do a
8951 * gen_aa32_ld_i64(), which invokes gen_aa32_frob64() as if
8952 * for an architecturally 64-bit access, but instead do a
8953 * 64-bit access using MO_BE if appropriate and then split
8954 * the two halves.
8955 * This only makes a difference for BE32 user-mode, where
8956 * frob64() must not flip the two halves of the 64-bit data
8957 * but this code must treat BE32 user-mode like BE32 system.
8958 */
8959 TCGv taddr = gen_aa32_addr(s, addr, opc);
8960
8961 tcg_gen_qemu_ld_i64(t64, taddr, get_mem_index(s), opc);
8962 tcg_temp_free(taddr);
Emilio G. Cota354161b2016-06-27 15:02:08 -04008963 tcg_gen_mov_i64(cpu_exclusive_val, t64);
Peter Maydell3448d472017-11-07 13:03:51 +00008964 if (s->be_data == MO_BE) {
8965 tcg_gen_extr_i64_i32(tmp2, tmp, t64);
8966 } else {
8967 tcg_gen_extr_i64_i32(tmp, tmp2, t64);
8968 }
Emilio G. Cota354161b2016-06-27 15:02:08 -04008969 tcg_temp_free_i64(t64);
8970
8971 store_reg(s, rt2, tmp2);
Peter Maydell03d05e22014-01-04 22:15:47 +00008972 } else {
Emilio G. Cota354161b2016-06-27 15:02:08 -04008973 gen_aa32_ld_i32(s, tmp, addr, get_mem_index(s), opc);
Peter Maydell03d05e22014-01-04 22:15:47 +00008974 tcg_gen_extu_i32_i64(cpu_exclusive_val, tmp);
Paul Brook426f5ab2009-11-22 21:35:13 +00008975 }
Peter Maydell03d05e22014-01-04 22:15:47 +00008976
8977 store_reg(s, rt, tmp);
8978 tcg_gen_extu_i32_i64(cpu_exclusive_addr, addr);
Paul Brook426f5ab2009-11-22 21:35:13 +00008979}
8980
8981static void gen_clrex(DisasContext *s)
8982{
Peter Maydell03d05e22014-01-04 22:15:47 +00008983 tcg_gen_movi_i64(cpu_exclusive_addr, -1);
Paul Brook426f5ab2009-11-22 21:35:13 +00008984}
8985
Paul Brook426f5ab2009-11-22 21:35:13 +00008986static void gen_store_exclusive(DisasContext *s, int rd, int rt, int rt2,
Peter Maydell39d54922013-05-23 12:59:55 +01008987 TCGv_i32 addr, int size)
Paul Brook426f5ab2009-11-22 21:35:13 +00008988{
Emilio G. Cota354161b2016-06-27 15:02:08 -04008989 TCGv_i32 t0, t1, t2;
8990 TCGv_i64 extaddr;
8991 TCGv taddr;
Richard Henderson42a268c2015-02-13 12:51:55 -08008992 TCGLabel *done_label;
8993 TCGLabel *fail_label;
Emilio G. Cota354161b2016-06-27 15:02:08 -04008994 TCGMemOp opc = size | MO_ALIGN | s->be_data;
Paul Brook426f5ab2009-11-22 21:35:13 +00008995
8996 /* if (env->exclusive_addr == addr && env->exclusive_val == [addr]) {
8997 [addr] = {Rt};
8998 {Rd} = 0;
8999 } else {
9000 {Rd} = 1;
9001 } */
9002 fail_label = gen_new_label();
9003 done_label = gen_new_label();
Peter Maydell03d05e22014-01-04 22:15:47 +00009004 extaddr = tcg_temp_new_i64();
9005 tcg_gen_extu_i32_i64(extaddr, addr);
9006 tcg_gen_brcond_i64(TCG_COND_NE, extaddr, cpu_exclusive_addr, fail_label);
9007 tcg_temp_free_i64(extaddr);
9008
Emilio G. Cota354161b2016-06-27 15:02:08 -04009009 taddr = gen_aa32_addr(s, addr, opc);
9010 t0 = tcg_temp_new_i32();
9011 t1 = load_reg(s, rt);
Paul Brook426f5ab2009-11-22 21:35:13 +00009012 if (size == 3) {
Emilio G. Cota354161b2016-06-27 15:02:08 -04009013 TCGv_i64 o64 = tcg_temp_new_i64();
9014 TCGv_i64 n64 = tcg_temp_new_i64();
9015
9016 t2 = load_reg(s, rt2);
Peter Maydell3448d472017-11-07 13:03:51 +00009017 /* For AArch32, architecturally the 32-bit word at the lowest
9018 * address is always Rt and the one at addr+4 is Rt2, even if
9019 * the CPU is big-endian. Since we're going to treat this as a
9020 * single 64-bit BE store, we need to put the two halves in the
9021 * opposite order for BE to LE, so that they end up in the right
9022 * places.
9023 * We don't want gen_aa32_frob64() because that does the wrong
9024 * thing for BE32 usermode.
9025 */
9026 if (s->be_data == MO_BE) {
9027 tcg_gen_concat_i32_i64(n64, t2, t1);
9028 } else {
9029 tcg_gen_concat_i32_i64(n64, t1, t2);
9030 }
Emilio G. Cota354161b2016-06-27 15:02:08 -04009031 tcg_temp_free_i32(t2);
Emilio G. Cota354161b2016-06-27 15:02:08 -04009032
9033 tcg_gen_atomic_cmpxchg_i64(o64, taddr, cpu_exclusive_val, n64,
9034 get_mem_index(s), opc);
9035 tcg_temp_free_i64(n64);
9036
Emilio G. Cota354161b2016-06-27 15:02:08 -04009037 tcg_gen_setcond_i64(TCG_COND_NE, o64, o64, cpu_exclusive_val);
9038 tcg_gen_extrl_i64_i32(t0, o64);
9039
9040 tcg_temp_free_i64(o64);
Peter Maydell03d05e22014-01-04 22:15:47 +00009041 } else {
Emilio G. Cota354161b2016-06-27 15:02:08 -04009042 t2 = tcg_temp_new_i32();
9043 tcg_gen_extrl_i64_i32(t2, cpu_exclusive_val);
9044 tcg_gen_atomic_cmpxchg_i32(t0, taddr, t2, t1, get_mem_index(s), opc);
9045 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t2);
9046 tcg_temp_free_i32(t2);
Paul Brook426f5ab2009-11-22 21:35:13 +00009047 }
Emilio G. Cota354161b2016-06-27 15:02:08 -04009048 tcg_temp_free_i32(t1);
9049 tcg_temp_free(taddr);
9050 tcg_gen_mov_i32(cpu_R[rd], t0);
9051 tcg_temp_free_i32(t0);
Paul Brook426f5ab2009-11-22 21:35:13 +00009052 tcg_gen_br(done_label);
Emilio G. Cota354161b2016-06-27 15:02:08 -04009053
Paul Brook426f5ab2009-11-22 21:35:13 +00009054 gen_set_label(fail_label);
9055 tcg_gen_movi_i32(cpu_R[rd], 1);
9056 gen_set_label(done_label);
Peter Maydell03d05e22014-01-04 22:15:47 +00009057 tcg_gen_movi_i64(cpu_exclusive_addr, -1);
Paul Brook426f5ab2009-11-22 21:35:13 +00009058}
Paul Brook426f5ab2009-11-22 21:35:13 +00009059
Peter Maydell81465882013-03-05 00:31:17 +00009060/* gen_srs:
9061 * @env: CPUARMState
9062 * @s: DisasContext
9063 * @mode: mode field from insn (which stack to store to)
9064 * @amode: addressing mode (DA/IA/DB/IB), encoded as per P,U bits in ARM insn
9065 * @writeback: true if writeback bit set
9066 *
9067 * Generate code for the SRS (Store Return State) insn.
9068 */
9069static void gen_srs(DisasContext *s,
9070 uint32_t mode, uint32_t amode, bool writeback)
9071{
9072 int32_t offset;
Peter Maydellcbc03262016-02-18 14:16:16 +00009073 TCGv_i32 addr, tmp;
9074 bool undef = false;
9075
9076 /* SRS is:
9077 * - trapped to EL3 if EL3 is AArch64 and we are at Secure EL1
Ralf-Philipp Weinmannba63cf42016-03-04 11:30:22 +00009078 * and specified mode is monitor mode
Peter Maydellcbc03262016-02-18 14:16:16 +00009079 * - UNDEFINED in Hyp mode
9080 * - UNPREDICTABLE in User or System mode
9081 * - UNPREDICTABLE if the specified mode is:
9082 * -- not implemented
9083 * -- not a valid mode number
9084 * -- a mode that's at a higher exception level
9085 * -- Monitor, if we are Non-secure
Peter Maydellf01377f2016-02-18 14:16:17 +00009086 * For the UNPREDICTABLE cases we choose to UNDEF.
Peter Maydellcbc03262016-02-18 14:16:16 +00009087 */
Ralf-Philipp Weinmannba63cf42016-03-04 11:30:22 +00009088 if (s->current_el == 1 && !s->ns && mode == ARM_CPU_MODE_MON) {
Peter Maydellcbc03262016-02-18 14:16:16 +00009089 gen_exception_insn(s, 4, EXCP_UDEF, syn_uncategorized(), 3);
9090 return;
9091 }
9092
9093 if (s->current_el == 0 || s->current_el == 2) {
9094 undef = true;
9095 }
9096
9097 switch (mode) {
9098 case ARM_CPU_MODE_USR:
9099 case ARM_CPU_MODE_FIQ:
9100 case ARM_CPU_MODE_IRQ:
9101 case ARM_CPU_MODE_SVC:
9102 case ARM_CPU_MODE_ABT:
9103 case ARM_CPU_MODE_UND:
9104 case ARM_CPU_MODE_SYS:
9105 break;
9106 case ARM_CPU_MODE_HYP:
9107 if (s->current_el == 1 || !arm_dc_feature(s, ARM_FEATURE_EL2)) {
9108 undef = true;
9109 }
9110 break;
9111 case ARM_CPU_MODE_MON:
9112 /* No need to check specifically for "are we non-secure" because
9113 * we've already made EL0 UNDEF and handled the trap for S-EL1;
9114 * so if this isn't EL3 then we must be non-secure.
9115 */
9116 if (s->current_el != 3) {
9117 undef = true;
9118 }
9119 break;
9120 default:
9121 undef = true;
9122 }
9123
9124 if (undef) {
9125 gen_exception_insn(s, 4, EXCP_UDEF, syn_uncategorized(),
9126 default_exception_el(s));
9127 return;
9128 }
9129
9130 addr = tcg_temp_new_i32();
9131 tmp = tcg_const_i32(mode);
Peter Maydellf01377f2016-02-18 14:16:17 +00009132 /* get_r13_banked() will raise an exception if called from System mode */
9133 gen_set_condexec(s);
9134 gen_set_pc_im(s, s->pc - 4);
Peter Maydell81465882013-03-05 00:31:17 +00009135 gen_helper_get_r13_banked(addr, cpu_env, tmp);
9136 tcg_temp_free_i32(tmp);
9137 switch (amode) {
9138 case 0: /* DA */
9139 offset = -4;
9140 break;
9141 case 1: /* IA */
9142 offset = 0;
9143 break;
9144 case 2: /* DB */
9145 offset = -8;
9146 break;
9147 case 3: /* IB */
9148 offset = 4;
9149 break;
9150 default:
9151 abort();
9152 }
9153 tcg_gen_addi_i32(addr, addr, offset);
9154 tmp = load_reg(s, 14);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00009155 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydell5a839c02013-05-23 13:00:00 +01009156 tcg_temp_free_i32(tmp);
Peter Maydell81465882013-03-05 00:31:17 +00009157 tmp = load_cpu_field(spsr);
9158 tcg_gen_addi_i32(addr, addr, 4);
Paolo Bonzini12dcc322016-03-04 11:30:20 +00009159 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydell5a839c02013-05-23 13:00:00 +01009160 tcg_temp_free_i32(tmp);
Peter Maydell81465882013-03-05 00:31:17 +00009161 if (writeback) {
9162 switch (amode) {
9163 case 0:
9164 offset = -8;
9165 break;
9166 case 1:
9167 offset = 4;
9168 break;
9169 case 2:
9170 offset = -4;
9171 break;
9172 case 3:
9173 offset = 0;
9174 break;
9175 default:
9176 abort();
9177 }
9178 tcg_gen_addi_i32(addr, addr, offset);
9179 tmp = tcg_const_i32(mode);
9180 gen_helper_set_r13_banked(cpu_env, tmp, addr);
9181 tcg_temp_free_i32(tmp);
9182 }
9183 tcg_temp_free_i32(addr);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03009184 s->base.is_jmp = DISAS_UPDATE;
Peter Maydell81465882013-03-05 00:31:17 +00009185}
9186
Roman Kaplc2d96442018-08-20 11:24:31 +01009187/* Generate a label used for skipping this instruction */
9188static void arm_gen_condlabel(DisasContext *s)
9189{
9190 if (!s->condjmp) {
9191 s->condlabel = gen_new_label();
9192 s->condjmp = 1;
9193 }
9194}
9195
9196/* Skip this instruction if the ARM condition is false */
9197static void arm_skip_unless(DisasContext *s, uint32_t cond)
9198{
9199 arm_gen_condlabel(s);
9200 arm_gen_test_cc(cond ^ 1, s->condlabel);
9201}
9202
Peter Maydellf4df2212014-10-28 19:24:04 +00009203static void disas_arm_insn(DisasContext *s, unsigned int insn)
bellard2c0262a2003-09-30 20:34:21 +00009204{
Peter Maydellf4df2212014-10-28 19:24:04 +00009205 unsigned int cond, val, op1, i, shift, rm, rs, rn, rd, sh;
Peter Maydell39d54922013-05-23 12:59:55 +01009206 TCGv_i32 tmp;
9207 TCGv_i32 tmp2;
9208 TCGv_i32 tmp3;
9209 TCGv_i32 addr;
pbrooka7812ae2008-11-17 14:43:54 +00009210 TCGv_i64 tmp64;
ths3b46e622007-09-17 08:09:54 +00009211
Peter Maydelle13886e2017-02-28 12:08:19 +00009212 /* M variants do not implement ARM mode; this must raise the INVSTATE
9213 * UsageFault exception.
9214 */
Peter Maydellb53d8922014-10-28 19:24:02 +00009215 if (arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydelle13886e2017-02-28 12:08:19 +00009216 gen_exception_insn(s, 4, EXCP_INVSTATE, syn_uncategorized(),
9217 default_exception_el(s));
9218 return;
Peter Maydellb53d8922014-10-28 19:24:02 +00009219 }
bellard2c0262a2003-09-30 20:34:21 +00009220 cond = insn >> 28;
bellard99c475a2005-01-31 20:45:13 +00009221 if (cond == 0xf){
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009222 /* In ARMv3 and v4 the NV condition is UNPREDICTABLE; we
9223 * choose to UNDEF. In ARMv5 and above the space is used
9224 * for miscellaneous unconditional instructions.
9225 */
9226 ARCH(5);
9227
bellardb7bcbe92005-02-22 19:27:29 +00009228 /* Unconditional instructions. */
pbrook9ee6e8b2007-11-11 00:04:49 +00009229 if (((insn >> 25) & 7) == 1) {
9230 /* NEON Data processing. */
Peter Maydelld614a512014-10-28 19:24:01 +00009231 if (!arm_dc_feature(s, ARM_FEATURE_NEON)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009232 goto illegal_op;
Peter Maydelld614a512014-10-28 19:24:01 +00009233 }
pbrook9ee6e8b2007-11-11 00:04:49 +00009234
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009235 if (disas_neon_data_insn(s, insn)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009236 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009237 }
pbrook9ee6e8b2007-11-11 00:04:49 +00009238 return;
9239 }
9240 if ((insn & 0x0f100000) == 0x04000000) {
9241 /* NEON load/store. */
Peter Maydelld614a512014-10-28 19:24:01 +00009242 if (!arm_dc_feature(s, ARM_FEATURE_NEON)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009243 goto illegal_op;
Peter Maydelld614a512014-10-28 19:24:01 +00009244 }
pbrook9ee6e8b2007-11-11 00:04:49 +00009245
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009246 if (disas_neon_ls_insn(s, insn)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009247 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009248 }
pbrook9ee6e8b2007-11-11 00:04:49 +00009249 return;
9250 }
Will Newton6a57f3e2013-12-06 17:01:40 +00009251 if ((insn & 0x0f000e10) == 0x0e000a00) {
9252 /* VFP. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009253 if (disas_vfp_insn(s, insn)) {
Will Newton6a57f3e2013-12-06 17:01:40 +00009254 goto illegal_op;
9255 }
9256 return;
9257 }
Peter Maydell3d185e52011-02-03 19:43:24 +00009258 if (((insn & 0x0f30f000) == 0x0510f000) ||
9259 ((insn & 0x0f30f010) == 0x0710f000)) {
9260 if ((insn & (1 << 22)) == 0) {
9261 /* PLDW; v7MP */
Peter Maydelld614a512014-10-28 19:24:01 +00009262 if (!arm_dc_feature(s, ARM_FEATURE_V7MP)) {
Peter Maydell3d185e52011-02-03 19:43:24 +00009263 goto illegal_op;
9264 }
9265 }
9266 /* Otherwise PLD; v5TE+ */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009267 ARCH(5TE);
Peter Maydell3d185e52011-02-03 19:43:24 +00009268 return;
9269 }
9270 if (((insn & 0x0f70f000) == 0x0450f000) ||
9271 ((insn & 0x0f70f010) == 0x0650f000)) {
9272 ARCH(7);
9273 return; /* PLI; V7 */
9274 }
9275 if (((insn & 0x0f700000) == 0x04100000) ||
9276 ((insn & 0x0f700010) == 0x06100000)) {
Peter Maydelld614a512014-10-28 19:24:01 +00009277 if (!arm_dc_feature(s, ARM_FEATURE_V7MP)) {
Peter Maydell3d185e52011-02-03 19:43:24 +00009278 goto illegal_op;
9279 }
9280 return; /* v7MP: Unallocated memory hint: must NOP */
9281 }
9282
9283 if ((insn & 0x0ffffdff) == 0x01010000) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009284 ARCH(6);
9285 /* setend */
Paolo Bonzini9886ecd2016-03-04 11:30:21 +00009286 if (((insn >> 9) & 1) != !!(s->be_data == MO_BE)) {
9287 gen_helper_setend(cpu_env);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +03009288 s->base.is_jmp = DISAS_UPDATE;
pbrook9ee6e8b2007-11-11 00:04:49 +00009289 }
9290 return;
9291 } else if ((insn & 0x0fffff00) == 0x057ff000) {
9292 switch ((insn >> 4) & 0xf) {
9293 case 1: /* clrex */
9294 ARCH(6K);
Paul Brook426f5ab2009-11-22 21:35:13 +00009295 gen_clrex(s);
pbrook9ee6e8b2007-11-11 00:04:49 +00009296 return;
9297 case 4: /* dsb */
9298 case 5: /* dmb */
pbrook9ee6e8b2007-11-11 00:04:49 +00009299 ARCH(7);
Pranith Kumar61e4c432016-07-14 16:20:23 -04009300 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
pbrook9ee6e8b2007-11-11 00:04:49 +00009301 return;
Sergey Sorokin6df99de2015-10-16 11:14:52 +01009302 case 6: /* isb */
9303 /* We need to break the TB after this insn to execute
9304 * self-modifying code correctly and also to take
9305 * any pending interrupts immediately.
9306 */
Alex Bennée0b609cc2017-07-17 13:36:07 +01009307 gen_goto_tb(s, 0, s->pc & ~1);
Sergey Sorokin6df99de2015-10-16 11:14:52 +01009308 return;
Richard Henderson9888bd12019-03-01 12:04:53 -08009309 case 7: /* sb */
9310 if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) {
9311 goto illegal_op;
9312 }
9313 /*
9314 * TODO: There is no speculation barrier opcode
9315 * for TCG; MB and end the TB instead.
9316 */
9317 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
9318 gen_goto_tb(s, 0, s->pc & ~1);
9319 return;
pbrook9ee6e8b2007-11-11 00:04:49 +00009320 default:
9321 goto illegal_op;
9322 }
9323 } else if ((insn & 0x0e5fffe0) == 0x084d0500) {
9324 /* srs */
pbrook9ee6e8b2007-11-11 00:04:49 +00009325 ARCH(6);
Peter Maydell81465882013-03-05 00:31:17 +00009326 gen_srs(s, (insn & 0x1f), (insn >> 23) & 3, insn & (1 << 21));
Peter Chubb3b328442013-04-19 12:24:18 +01009327 return;
Adam Lackorzynskiea825ee2010-03-02 01:17:35 +01009328 } else if ((insn & 0x0e50ffe0) == 0x08100a00) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009329 /* rfe */
Filip Navarac67b6b72009-10-15 12:12:11 +02009330 int32_t offset;
pbrook9ee6e8b2007-11-11 00:04:49 +00009331 if (IS_USER(s))
9332 goto illegal_op;
9333 ARCH(6);
9334 rn = (insn >> 16) & 0xf;
pbrookb0109802008-03-31 03:47:03 +00009335 addr = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +00009336 i = (insn >> 23) & 3;
9337 switch (i) {
pbrookb0109802008-03-31 03:47:03 +00009338 case 0: offset = -4; break; /* DA */
Filip Navarac67b6b72009-10-15 12:12:11 +02009339 case 1: offset = 0; break; /* IA */
9340 case 2: offset = -8; break; /* DB */
pbrookb0109802008-03-31 03:47:03 +00009341 case 3: offset = 4; break; /* IB */
pbrook9ee6e8b2007-11-11 00:04:49 +00009342 default: abort();
9343 }
9344 if (offset)
pbrookb0109802008-03-31 03:47:03 +00009345 tcg_gen_addi_i32(addr, addr, offset);
9346 /* Load PC into tmp and CPSR into tmp2. */
Peter Maydell5a839c02013-05-23 13:00:00 +01009347 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +00009348 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +00009349 tcg_gen_addi_i32(addr, addr, 4);
Peter Maydell5a839c02013-05-23 13:00:00 +01009350 tmp2 = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +00009351 gen_aa32_ld32u(s, tmp2, addr, get_mem_index(s));
pbrook9ee6e8b2007-11-11 00:04:49 +00009352 if (insn & (1 << 21)) {
9353 /* Base writeback. */
9354 switch (i) {
pbrookb0109802008-03-31 03:47:03 +00009355 case 0: offset = -8; break;
Filip Navarac67b6b72009-10-15 12:12:11 +02009356 case 1: offset = 4; break;
9357 case 2: offset = -4; break;
pbrookb0109802008-03-31 03:47:03 +00009358 case 3: offset = 0; break;
pbrook9ee6e8b2007-11-11 00:04:49 +00009359 default: abort();
9360 }
9361 if (offset)
pbrookb0109802008-03-31 03:47:03 +00009362 tcg_gen_addi_i32(addr, addr, offset);
9363 store_reg(s, rn, addr);
9364 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00009365 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +00009366 }
pbrookb0109802008-03-31 03:47:03 +00009367 gen_rfe(s, tmp, tmp2);
Filip Navarac67b6b72009-10-15 12:12:11 +02009368 return;
pbrook9ee6e8b2007-11-11 00:04:49 +00009369 } else if ((insn & 0x0e000000) == 0x0a000000) {
bellard99c475a2005-01-31 20:45:13 +00009370 /* branch link and change to thumb (blx <offset>) */
9371 int32_t offset;
9372
9373 val = (uint32_t)s->pc;
Peter Maydell7d1b0092011-03-06 21:39:54 +00009374 tmp = tcg_temp_new_i32();
pbrookd9ba4832008-03-31 03:46:50 +00009375 tcg_gen_movi_i32(tmp, val);
9376 store_reg(s, 14, tmp);
bellard99c475a2005-01-31 20:45:13 +00009377 /* Sign-extend the 24-bit offset */
9378 offset = (((int32_t)insn) << 8) >> 8;
9379 /* offset * 4 + bit24 * 2 + (thumb bit) */
9380 val += (offset << 2) | ((insn >> 23) & 2) | 1;
9381 /* pipeline offset */
9382 val += 4;
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009383 /* protected by ARCH(5); above, near the start of uncond block */
pbrookd9ba4832008-03-31 03:46:50 +00009384 gen_bx_im(s, val);
bellard99c475a2005-01-31 20:45:13 +00009385 return;
balrog2e232132007-08-01 02:31:54 +00009386 } else if ((insn & 0x0e000f00) == 0x0c000100) {
Peter Maydelld614a512014-10-28 19:24:01 +00009387 if (arm_dc_feature(s, ARM_FEATURE_IWMMXT)) {
balrog2e232132007-08-01 02:31:54 +00009388 /* iWMMXt register transfer. */
Peter Maydellc0f4af12014-09-29 18:48:48 +01009389 if (extract32(s->c15_cpar, 1, 1)) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009390 if (!disas_iwmmxt_insn(s, insn)) {
balrog2e232132007-08-01 02:31:54 +00009391 return;
Peter Maydellc0f4af12014-09-29 18:48:48 +01009392 }
9393 }
balrog2e232132007-08-01 02:31:54 +00009394 }
Richard Henderson8b7209f2018-03-02 10:45:44 +00009395 } else if ((insn & 0x0e000a00) == 0x0c000800
9396 && arm_dc_feature(s, ARM_FEATURE_V8)) {
9397 if (disas_neon_insn_3same_ext(s, insn)) {
9398 goto illegal_op;
9399 }
9400 return;
Richard Henderson638808f2018-03-02 10:45:45 +00009401 } else if ((insn & 0x0f000a00) == 0x0e000800
9402 && arm_dc_feature(s, ARM_FEATURE_V8)) {
9403 if (disas_neon_insn_2reg_scalar_ext(s, insn)) {
9404 goto illegal_op;
9405 }
9406 return;
bellardb7bcbe92005-02-22 19:27:29 +00009407 } else if ((insn & 0x0fe00000) == 0x0c400000) {
9408 /* Coprocessor double register transfer. */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009409 ARCH(5TE);
bellardb7bcbe92005-02-22 19:27:29 +00009410 } else if ((insn & 0x0f000010) == 0x0e000010) {
9411 /* Additional coprocessor register transfer. */
balrog7997d922008-07-19 10:34:35 +00009412 } else if ((insn & 0x0ff10020) == 0x01000000) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009413 uint32_t mask;
9414 uint32_t val;
bellardb5ff1b32005-11-26 10:38:39 +00009415 /* cps (privileged) */
pbrook9ee6e8b2007-11-11 00:04:49 +00009416 if (IS_USER(s))
9417 return;
9418 mask = val = 0;
9419 if (insn & (1 << 19)) {
9420 if (insn & (1 << 8))
9421 mask |= CPSR_A;
9422 if (insn & (1 << 7))
9423 mask |= CPSR_I;
9424 if (insn & (1 << 6))
9425 mask |= CPSR_F;
9426 if (insn & (1 << 18))
9427 val |= mask;
9428 }
balrog7997d922008-07-19 10:34:35 +00009429 if (insn & (1 << 17)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009430 mask |= CPSR_M;
9431 val |= (insn & 0x1f);
9432 }
9433 if (mask) {
Filip Navara2fbac542009-10-15 12:43:04 +02009434 gen_set_psr_im(s, mask, 0, val);
bellardb5ff1b32005-11-26 10:38:39 +00009435 }
9436 return;
bellard99c475a2005-01-31 20:45:13 +00009437 }
bellard2c0262a2003-09-30 20:34:21 +00009438 goto illegal_op;
bellard99c475a2005-01-31 20:45:13 +00009439 }
bellard2c0262a2003-09-30 20:34:21 +00009440 if (cond != 0xe) {
9441 /* if not always execute, we generate a conditional jump to
9442 next instruction */
Roman Kaplc2d96442018-08-20 11:24:31 +01009443 arm_skip_unless(s, cond);
bellard2c0262a2003-09-30 20:34:21 +00009444 }
bellard99c475a2005-01-31 20:45:13 +00009445 if ((insn & 0x0f900000) == 0x03000000) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009446 if ((insn & (1 << 21)) == 0) {
9447 ARCH(6T2);
9448 rd = (insn >> 12) & 0xf;
9449 val = ((insn >> 4) & 0xf000) | (insn & 0xfff);
9450 if ((insn & (1 << 22)) == 0) {
9451 /* MOVW */
Peter Maydell7d1b0092011-03-06 21:39:54 +00009452 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +00009453 tcg_gen_movi_i32(tmp, val);
pbrook9ee6e8b2007-11-11 00:04:49 +00009454 } else {
9455 /* MOVT */
pbrook5e3f8782008-03-31 03:47:34 +00009456 tmp = load_reg(s, rd);
pbrook86831432008-05-11 12:22:01 +00009457 tcg_gen_ext16u_i32(tmp, tmp);
pbrook5e3f8782008-03-31 03:47:34 +00009458 tcg_gen_ori_i32(tmp, tmp, val << 16);
pbrook9ee6e8b2007-11-11 00:04:49 +00009459 }
pbrook5e3f8782008-03-31 03:47:34 +00009460 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00009461 } else {
9462 if (((insn >> 12) & 0xf) != 0xf)
9463 goto illegal_op;
9464 if (((insn >> 16) & 0xf) == 0) {
9465 gen_nop_hint(s, insn & 0xff);
9466 } else {
9467 /* CPSR = immediate */
9468 val = insn & 0xff;
9469 shift = ((insn >> 8) & 0xf) * 2;
9470 if (shift)
9471 val = (val >> shift) | (val << (32 - shift));
pbrook9ee6e8b2007-11-11 00:04:49 +00009472 i = ((insn & (1 << 22)) != 0);
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009473 if (gen_set_psr_im(s, msr_mask(s, (insn >> 16) & 0xf, i),
9474 i, val)) {
pbrook9ee6e8b2007-11-11 00:04:49 +00009475 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009476 }
pbrook9ee6e8b2007-11-11 00:04:49 +00009477 }
9478 }
bellard99c475a2005-01-31 20:45:13 +00009479 } else if ((insn & 0x0f900000) == 0x01000000
9480 && (insn & 0x00000090) != 0x00000090) {
9481 /* miscellaneous instructions */
9482 op1 = (insn >> 21) & 3;
9483 sh = (insn >> 4) & 0xf;
9484 rm = insn & 0xf;
9485 switch (sh) {
Peter Maydell8bfd0552016-03-16 17:05:58 +00009486 case 0x0: /* MSR, MRS */
9487 if (insn & (1 << 9)) {
9488 /* MSR (banked) and MRS (banked) */
9489 int sysm = extract32(insn, 16, 4) |
9490 (extract32(insn, 8, 1) << 4);
9491 int r = extract32(insn, 22, 1);
9492
9493 if (op1 & 1) {
9494 /* MSR (banked) */
9495 gen_msr_banked(s, r, sysm, rm);
9496 } else {
9497 /* MRS (banked) */
9498 int rd = extract32(insn, 12, 4);
9499
9500 gen_mrs_banked(s, r, sysm, rd);
9501 }
9502 break;
9503 }
9504
9505 /* MSR, MRS (for PSRs) */
bellard99c475a2005-01-31 20:45:13 +00009506 if (op1 & 1) {
bellardb5ff1b32005-11-26 10:38:39 +00009507 /* PSR = reg */
Filip Navara2fbac542009-10-15 12:43:04 +02009508 tmp = load_reg(s, rm);
pbrook2ae23e72006-02-11 16:20:39 +00009509 i = ((op1 & 2) != 0);
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009510 if (gen_set_psr(s, msr_mask(s, (insn >> 16) & 0xf, i), i, tmp))
bellardb5ff1b32005-11-26 10:38:39 +00009511 goto illegal_op;
bellard99c475a2005-01-31 20:45:13 +00009512 } else {
pbrook2ae23e72006-02-11 16:20:39 +00009513 /* reg = PSR */
bellard99c475a2005-01-31 20:45:13 +00009514 rd = (insn >> 12) & 0xf;
bellardb5ff1b32005-11-26 10:38:39 +00009515 if (op1 & 2) {
9516 if (IS_USER(s))
9517 goto illegal_op;
pbrookd9ba4832008-03-31 03:46:50 +00009518 tmp = load_cpu_field(spsr);
bellardb5ff1b32005-11-26 10:38:39 +00009519 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +00009520 tmp = tcg_temp_new_i32();
Blue Swirl9ef39272012-09-04 20:19:15 +00009521 gen_helper_cpsr_read(tmp, cpu_env);
bellardb5ff1b32005-11-26 10:38:39 +00009522 }
pbrookd9ba4832008-03-31 03:46:50 +00009523 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +00009524 }
bellardb8a9e8f2005-02-07 23:10:07 +00009525 break;
bellard99c475a2005-01-31 20:45:13 +00009526 case 0x1:
9527 if (op1 == 1) {
9528 /* branch/exchange thumb (bx). */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009529 ARCH(4T);
pbrookd9ba4832008-03-31 03:46:50 +00009530 tmp = load_reg(s, rm);
9531 gen_bx(s, tmp);
bellard99c475a2005-01-31 20:45:13 +00009532 } else if (op1 == 3) {
9533 /* clz */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009534 ARCH(5);
bellard99c475a2005-01-31 20:45:13 +00009535 rd = (insn >> 12) & 0xf;
pbrook1497c962008-03-31 03:45:50 +00009536 tmp = load_reg(s, rm);
Richard Henderson7539a012016-11-16 11:49:06 +01009537 tcg_gen_clzi_i32(tmp, tmp, 32);
pbrook1497c962008-03-31 03:45:50 +00009538 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +00009539 } else {
9540 goto illegal_op;
9541 }
9542 break;
bellardb5ff1b32005-11-26 10:38:39 +00009543 case 0x2:
9544 if (op1 == 1) {
9545 ARCH(5J); /* bxj */
9546 /* Trivial implementation equivalent to bx. */
pbrookd9ba4832008-03-31 03:46:50 +00009547 tmp = load_reg(s, rm);
9548 gen_bx(s, tmp);
bellardb5ff1b32005-11-26 10:38:39 +00009549 } else {
9550 goto illegal_op;
9551 }
9552 break;
bellard99c475a2005-01-31 20:45:13 +00009553 case 0x3:
9554 if (op1 != 1)
9555 goto illegal_op;
9556
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009557 ARCH(5);
bellard99c475a2005-01-31 20:45:13 +00009558 /* branch link/exchange thumb (blx) */
pbrookd9ba4832008-03-31 03:46:50 +00009559 tmp = load_reg(s, rm);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009560 tmp2 = tcg_temp_new_i32();
pbrookd9ba4832008-03-31 03:46:50 +00009561 tcg_gen_movi_i32(tmp2, s->pc);
9562 store_reg(s, 14, tmp2);
9563 gen_bx(s, tmp);
bellard99c475a2005-01-31 20:45:13 +00009564 break;
Will Newtoneb0ecd52014-02-26 17:20:07 +00009565 case 0x4:
9566 {
9567 /* crc32/crc32c */
9568 uint32_t c = extract32(insn, 8, 4);
9569
9570 /* Check this CPU supports ARMv8 CRC instructions.
9571 * op1 == 3 is UNPREDICTABLE but handle as UNDEFINED.
9572 * Bits 8, 10 and 11 should be zero.
9573 */
Richard Henderson962fcbf2018-10-24 07:50:16 +01009574 if (!dc_isar_feature(aa32_crc32, s) || op1 == 0x3 || (c & 0xd) != 0) {
Will Newtoneb0ecd52014-02-26 17:20:07 +00009575 goto illegal_op;
9576 }
9577
9578 rn = extract32(insn, 16, 4);
9579 rd = extract32(insn, 12, 4);
9580
9581 tmp = load_reg(s, rn);
9582 tmp2 = load_reg(s, rm);
Peter Maydellaa633462014-06-09 15:43:25 +01009583 if (op1 == 0) {
9584 tcg_gen_andi_i32(tmp2, tmp2, 0xff);
9585 } else if (op1 == 1) {
9586 tcg_gen_andi_i32(tmp2, tmp2, 0xffff);
9587 }
Will Newtoneb0ecd52014-02-26 17:20:07 +00009588 tmp3 = tcg_const_i32(1 << op1);
9589 if (c & 0x2) {
9590 gen_helper_crc32c(tmp, tmp, tmp2, tmp3);
9591 } else {
9592 gen_helper_crc32(tmp, tmp, tmp2, tmp3);
9593 }
9594 tcg_temp_free_i32(tmp2);
9595 tcg_temp_free_i32(tmp3);
9596 store_reg(s, rd, tmp);
9597 break;
9598 }
bellard99c475a2005-01-31 20:45:13 +00009599 case 0x5: /* saturating add/subtract */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009600 ARCH(5TE);
bellard99c475a2005-01-31 20:45:13 +00009601 rd = (insn >> 12) & 0xf;
9602 rn = (insn >> 16) & 0xf;
balrogb40d0352008-09-20 03:18:07 +00009603 tmp = load_reg(s, rm);
pbrook5e3f8782008-03-31 03:47:34 +00009604 tmp2 = load_reg(s, rn);
bellardff8263a2005-05-13 22:45:23 +00009605 if (op1 & 2)
Blue Swirl9ef39272012-09-04 20:19:15 +00009606 gen_helper_double_saturate(tmp2, cpu_env, tmp2);
bellard99c475a2005-01-31 20:45:13 +00009607 if (op1 & 1)
Blue Swirl9ef39272012-09-04 20:19:15 +00009608 gen_helper_sub_saturate(tmp, cpu_env, tmp, tmp2);
bellard99c475a2005-01-31 20:45:13 +00009609 else
Blue Swirl9ef39272012-09-04 20:19:15 +00009610 gen_helper_add_saturate(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009611 tcg_temp_free_i32(tmp2);
pbrook5e3f8782008-03-31 03:47:34 +00009612 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +00009613 break;
Peter Maydell55c544e2018-08-20 11:24:32 +01009614 case 0x6: /* ERET */
9615 if (op1 != 3) {
9616 goto illegal_op;
9617 }
9618 if (!arm_dc_feature(s, ARM_FEATURE_V7VE)) {
9619 goto illegal_op;
9620 }
9621 if ((insn & 0x000fff0f) != 0x0000000e) {
9622 /* UNPREDICTABLE; we choose to UNDEF */
9623 goto illegal_op;
9624 }
9625
9626 if (s->current_el == 2) {
9627 tmp = load_cpu_field(elr_el[2]);
9628 } else {
9629 tmp = load_reg(s, 14);
9630 }
9631 gen_exception_return(s, tmp);
9632 break;
Adam Lackorzynski49e14942010-12-07 12:01:44 +00009633 case 7:
Peter Maydelld4a2dc62014-04-15 19:18:38 +01009634 {
9635 int imm16 = extract32(insn, 0, 4) | (extract32(insn, 8, 12) << 4);
Peter Maydell37e64562014-10-24 12:19:13 +01009636 switch (op1) {
Peter Maydell19a6e312016-10-24 16:26:56 +01009637 case 0:
9638 /* HLT */
9639 gen_hlt(s, imm16);
9640 break;
Peter Maydell37e64562014-10-24 12:19:13 +01009641 case 1:
9642 /* bkpt */
9643 ARCH(5);
Peter Maydellc900a2e2018-03-23 18:26:46 +00009644 gen_exception_bkpt_insn(s, 4, syn_aa32_bkpt(imm16, false));
Peter Maydell37e64562014-10-24 12:19:13 +01009645 break;
9646 case 2:
9647 /* Hypervisor call (v7) */
9648 ARCH(7);
9649 if (IS_USER(s)) {
9650 goto illegal_op;
9651 }
9652 gen_hvc(s, imm16);
9653 break;
9654 case 3:
9655 /* Secure monitor call (v6+) */
9656 ARCH(6K);
9657 if (IS_USER(s)) {
9658 goto illegal_op;
9659 }
9660 gen_smc(s);
9661 break;
9662 default:
Peter Maydell19a6e312016-10-24 16:26:56 +01009663 g_assert_not_reached();
Adam Lackorzynski49e14942010-12-07 12:01:44 +00009664 }
pbrook06c949e2006-02-04 19:35:26 +00009665 break;
Peter Maydelld4a2dc62014-04-15 19:18:38 +01009666 }
bellard99c475a2005-01-31 20:45:13 +00009667 case 0x8: /* signed multiply */
9668 case 0xa:
9669 case 0xc:
9670 case 0xe:
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +04009671 ARCH(5TE);
bellard99c475a2005-01-31 20:45:13 +00009672 rs = (insn >> 8) & 0xf;
9673 rn = (insn >> 12) & 0xf;
9674 rd = (insn >> 16) & 0xf;
9675 if (op1 == 1) {
9676 /* (32 * 16) >> 16 */
pbrook5e3f8782008-03-31 03:47:34 +00009677 tmp = load_reg(s, rm);
9678 tmp2 = load_reg(s, rs);
bellard99c475a2005-01-31 20:45:13 +00009679 if (sh & 4)
pbrook5e3f8782008-03-31 03:47:34 +00009680 tcg_gen_sari_i32(tmp2, tmp2, 16);
bellard99c475a2005-01-31 20:45:13 +00009681 else
pbrook5e3f8782008-03-31 03:47:34 +00009682 gen_sxth(tmp2);
pbrooka7812ae2008-11-17 14:43:54 +00009683 tmp64 = gen_muls_i64_i32(tmp, tmp2);
9684 tcg_gen_shri_i64(tmp64, tmp64, 16);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009685 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -07009686 tcg_gen_extrl_i64_i32(tmp, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03009687 tcg_temp_free_i64(tmp64);
bellard99c475a2005-01-31 20:45:13 +00009688 if ((sh & 2) == 0) {
pbrook5e3f8782008-03-31 03:47:34 +00009689 tmp2 = load_reg(s, rn);
Blue Swirl9ef39272012-09-04 20:19:15 +00009690 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009691 tcg_temp_free_i32(tmp2);
bellard99c475a2005-01-31 20:45:13 +00009692 }
pbrook5e3f8782008-03-31 03:47:34 +00009693 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +00009694 } else {
9695 /* 16 * 16 */
pbrook5e3f8782008-03-31 03:47:34 +00009696 tmp = load_reg(s, rm);
9697 tmp2 = load_reg(s, rs);
9698 gen_mulxy(tmp, tmp2, sh & 2, sh & 4);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009699 tcg_temp_free_i32(tmp2);
bellard99c475a2005-01-31 20:45:13 +00009700 if (op1 == 2) {
pbrooka7812ae2008-11-17 14:43:54 +00009701 tmp64 = tcg_temp_new_i64();
9702 tcg_gen_ext_i32_i64(tmp64, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009703 tcg_temp_free_i32(tmp);
pbrooka7812ae2008-11-17 14:43:54 +00009704 gen_addq(s, tmp64, rn, rd);
9705 gen_storeq_reg(s, rn, rd, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03009706 tcg_temp_free_i64(tmp64);
bellard99c475a2005-01-31 20:45:13 +00009707 } else {
bellard99c475a2005-01-31 20:45:13 +00009708 if (op1 == 0) {
pbrook5e3f8782008-03-31 03:47:34 +00009709 tmp2 = load_reg(s, rn);
Blue Swirl9ef39272012-09-04 20:19:15 +00009710 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009711 tcg_temp_free_i32(tmp2);
bellard99c475a2005-01-31 20:45:13 +00009712 }
pbrook5e3f8782008-03-31 03:47:34 +00009713 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +00009714 }
9715 }
9716 break;
9717 default:
9718 goto illegal_op;
9719 }
9720 } else if (((insn & 0x0e000000) == 0 &&
9721 (insn & 0x00000090) != 0x90) ||
9722 ((insn & 0x0e000000) == (1 << 25))) {
bellard2c0262a2003-09-30 20:34:21 +00009723 int set_cc, logic_cc, shiftop;
ths3b46e622007-09-17 08:09:54 +00009724
bellard2c0262a2003-09-30 20:34:21 +00009725 op1 = (insn >> 21) & 0xf;
9726 set_cc = (insn >> 20) & 1;
9727 logic_cc = table_logic_cc[op1] & set_cc;
9728
9729 /* data processing instruction */
9730 if (insn & (1 << 25)) {
9731 /* immediate operand */
9732 val = insn & 0xff;
9733 shift = ((insn >> 8) & 0xf) * 2;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009734 if (shift) {
bellard2c0262a2003-09-30 20:34:21 +00009735 val = (val >> shift) | (val << (32 - shift));
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009736 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00009737 tmp2 = tcg_temp_new_i32();
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009738 tcg_gen_movi_i32(tmp2, val);
9739 if (logic_cc && shift) {
9740 gen_set_CF_bit31(tmp2);
9741 }
bellard2c0262a2003-09-30 20:34:21 +00009742 } else {
9743 /* register */
9744 rm = (insn) & 0xf;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009745 tmp2 = load_reg(s, rm);
bellard2c0262a2003-09-30 20:34:21 +00009746 shiftop = (insn >> 5) & 3;
9747 if (!(insn & (1 << 4))) {
9748 shift = (insn >> 7) & 0x1f;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009749 gen_arm_shift_im(tmp2, shiftop, shift, logic_cc);
bellard2c0262a2003-09-30 20:34:21 +00009750 } else {
9751 rs = (insn >> 8) & 0xf;
pbrook8984bd22008-03-31 03:47:48 +00009752 tmp = load_reg(s, rs);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009753 gen_arm_shift_reg(tmp2, shiftop, tmp, logic_cc);
bellard2c0262a2003-09-30 20:34:21 +00009754 }
9755 }
9756 if (op1 != 0x0f && op1 != 0x0d) {
9757 rn = (insn >> 16) & 0xf;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009758 tmp = load_reg(s, rn);
9759 } else {
Richard Hendersonf7647182017-11-02 12:47:37 +01009760 tmp = NULL;
bellard2c0262a2003-09-30 20:34:21 +00009761 }
9762 rd = (insn >> 12) & 0xf;
9763 switch(op1) {
9764 case 0x00:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009765 tcg_gen_and_i32(tmp, tmp, tmp2);
9766 if (logic_cc) {
9767 gen_logic_CC(tmp);
9768 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009769 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009770 break;
9771 case 0x01:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009772 tcg_gen_xor_i32(tmp, tmp, tmp2);
9773 if (logic_cc) {
9774 gen_logic_CC(tmp);
9775 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009776 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009777 break;
9778 case 0x02:
bellardb5ff1b32005-11-26 10:38:39 +00009779 if (set_cc && rd == 15) {
9780 /* SUBS r15, ... is used for exception return. */
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009781 if (IS_USER(s)) {
bellardb5ff1b32005-11-26 10:38:39 +00009782 goto illegal_op;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009783 }
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009784 gen_sub_CC(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009785 gen_exception_return(s, tmp);
bellardb5ff1b32005-11-26 10:38:39 +00009786 } else {
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009787 if (set_cc) {
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009788 gen_sub_CC(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009789 } else {
9790 tcg_gen_sub_i32(tmp, tmp, tmp2);
9791 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009792 store_reg_bx(s, rd, tmp);
bellardb5ff1b32005-11-26 10:38:39 +00009793 }
bellard2c0262a2003-09-30 20:34:21 +00009794 break;
9795 case 0x03:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009796 if (set_cc) {
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009797 gen_sub_CC(tmp, tmp2, tmp);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009798 } else {
9799 tcg_gen_sub_i32(tmp, tmp2, tmp);
9800 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009801 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009802 break;
9803 case 0x04:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009804 if (set_cc) {
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009805 gen_add_CC(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009806 } else {
9807 tcg_gen_add_i32(tmp, tmp, tmp2);
9808 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009809 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009810 break;
9811 case 0x05:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009812 if (set_cc) {
Richard Henderson49b4c312013-02-19 23:52:08 -08009813 gen_adc_CC(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009814 } else {
9815 gen_add_carry(tmp, tmp, tmp2);
9816 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009817 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009818 break;
9819 case 0x06:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009820 if (set_cc) {
Richard Henderson2de68a42013-02-19 23:52:09 -08009821 gen_sbc_CC(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009822 } else {
9823 gen_sub_carry(tmp, tmp, tmp2);
9824 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009825 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009826 break;
9827 case 0x07:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009828 if (set_cc) {
Richard Henderson2de68a42013-02-19 23:52:09 -08009829 gen_sbc_CC(tmp, tmp2, tmp);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009830 } else {
9831 gen_sub_carry(tmp, tmp2, tmp);
9832 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009833 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009834 break;
9835 case 0x08:
9836 if (set_cc) {
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009837 tcg_gen_and_i32(tmp, tmp, tmp2);
9838 gen_logic_CC(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009839 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00009840 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009841 break;
9842 case 0x09:
9843 if (set_cc) {
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009844 tcg_gen_xor_i32(tmp, tmp, tmp2);
9845 gen_logic_CC(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009846 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00009847 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009848 break;
9849 case 0x0a:
9850 if (set_cc) {
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009851 gen_sub_CC(tmp, tmp, tmp2);
bellard2c0262a2003-09-30 20:34:21 +00009852 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00009853 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009854 break;
9855 case 0x0b:
9856 if (set_cc) {
Aurelien Jarno72485ec2012-10-05 15:04:44 +01009857 gen_add_CC(tmp, tmp, tmp2);
bellard2c0262a2003-09-30 20:34:21 +00009858 }
Peter Maydell7d1b0092011-03-06 21:39:54 +00009859 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +00009860 break;
9861 case 0x0c:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009862 tcg_gen_or_i32(tmp, tmp, tmp2);
9863 if (logic_cc) {
9864 gen_logic_CC(tmp);
9865 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009866 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009867 break;
9868 case 0x0d:
bellardb5ff1b32005-11-26 10:38:39 +00009869 if (logic_cc && rd == 15) {
9870 /* MOVS r15, ... is used for exception return. */
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009871 if (IS_USER(s)) {
bellardb5ff1b32005-11-26 10:38:39 +00009872 goto illegal_op;
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009873 }
9874 gen_exception_return(s, tmp2);
bellardb5ff1b32005-11-26 10:38:39 +00009875 } else {
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009876 if (logic_cc) {
9877 gen_logic_CC(tmp2);
9878 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009879 store_reg_bx(s, rd, tmp2);
bellardb5ff1b32005-11-26 10:38:39 +00009880 }
bellard2c0262a2003-09-30 20:34:21 +00009881 break;
9882 case 0x0e:
Aurelien Jarnof669df22009-10-15 16:45:14 +02009883 tcg_gen_andc_i32(tmp, tmp, tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009884 if (logic_cc) {
9885 gen_logic_CC(tmp);
9886 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009887 store_reg_bx(s, rd, tmp);
bellard2c0262a2003-09-30 20:34:21 +00009888 break;
9889 default:
9890 case 0x0f:
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009891 tcg_gen_not_i32(tmp2, tmp2);
9892 if (logic_cc) {
9893 gen_logic_CC(tmp2);
9894 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +00009895 store_reg_bx(s, rd, tmp2);
bellard2c0262a2003-09-30 20:34:21 +00009896 break;
9897 }
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009898 if (op1 != 0x0f && op1 != 0x0d) {
Peter Maydell7d1b0092011-03-06 21:39:54 +00009899 tcg_temp_free_i32(tmp2);
Juha Riihimäkie9bb4aa2009-05-06 09:15:38 +03009900 }
bellard2c0262a2003-09-30 20:34:21 +00009901 } else {
9902 /* other instructions */
9903 op1 = (insn >> 24) & 0xf;
9904 switch(op1) {
9905 case 0x0:
9906 case 0x1:
bellard99c475a2005-01-31 20:45:13 +00009907 /* multiplies, extra load/stores */
bellard2c0262a2003-09-30 20:34:21 +00009908 sh = (insn >> 5) & 3;
9909 if (sh == 0) {
9910 if (op1 == 0x0) {
9911 rd = (insn >> 16) & 0xf;
9912 rn = (insn >> 12) & 0xf;
9913 rs = (insn >> 8) & 0xf;
9914 rm = (insn) & 0xf;
pbrook9ee6e8b2007-11-11 00:04:49 +00009915 op1 = (insn >> 20) & 0xf;
9916 switch (op1) {
9917 case 0: case 1: case 2: case 3: case 6:
bellard2c0262a2003-09-30 20:34:21 +00009918 /* 32 bit mul */
pbrook5e3f8782008-03-31 03:47:34 +00009919 tmp = load_reg(s, rs);
9920 tmp2 = load_reg(s, rm);
9921 tcg_gen_mul_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009922 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00009923 if (insn & (1 << 22)) {
9924 /* Subtract (mls) */
9925 ARCH(6T2);
pbrook5e3f8782008-03-31 03:47:34 +00009926 tmp2 = load_reg(s, rn);
9927 tcg_gen_sub_i32(tmp, tmp2, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009928 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +00009929 } else if (insn & (1 << 21)) {
9930 /* Add */
pbrook5e3f8782008-03-31 03:47:34 +00009931 tmp2 = load_reg(s, rn);
9932 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +00009933 tcg_temp_free_i32(tmp2);
bellard2c0262a2003-09-30 20:34:21 +00009934 }
ths5fafdf22007-09-16 21:08:06 +00009935 if (insn & (1 << 20))
pbrook5e3f8782008-03-31 03:47:34 +00009936 gen_logic_CC(tmp);
9937 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +00009938 break;
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009939 case 4:
9940 /* 64 bit mul double accumulate (UMAAL) */
9941 ARCH(6);
pbrook5e3f8782008-03-31 03:47:34 +00009942 tmp = load_reg(s, rs);
9943 tmp2 = load_reg(s, rm);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009944 tmp64 = gen_mulu_i64_i32(tmp, tmp2);
9945 gen_addq_lo(s, tmp64, rn);
9946 gen_addq_lo(s, tmp64, rd);
pbrooka7812ae2008-11-17 14:43:54 +00009947 gen_storeq_reg(s, rn, rd, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +03009948 tcg_temp_free_i64(tmp64);
pbrook9ee6e8b2007-11-11 00:04:49 +00009949 break;
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009950 case 8: case 9: case 10: case 11:
9951 case 12: case 13: case 14: case 15:
9952 /* 64 bit mul: UMULL, UMLAL, SMULL, SMLAL. */
9953 tmp = load_reg(s, rs);
9954 tmp2 = load_reg(s, rm);
9955 if (insn & (1 << 22)) {
Richard Hendersonc9f10122013-02-19 23:52:06 -08009956 tcg_gen_muls2_i32(tmp, tmp2, tmp, tmp2);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009957 } else {
Richard Hendersonc9f10122013-02-19 23:52:06 -08009958 tcg_gen_mulu2_i32(tmp, tmp2, tmp, tmp2);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009959 }
9960 if (insn & (1 << 21)) { /* mult accumulate */
Peter Maydell39d54922013-05-23 12:59:55 +01009961 TCGv_i32 al = load_reg(s, rn);
9962 TCGv_i32 ah = load_reg(s, rd);
Richard Hendersonc9f10122013-02-19 23:52:06 -08009963 tcg_gen_add2_i32(tmp, tmp2, tmp, tmp2, al, ah);
Peter Maydell39d54922013-05-23 12:59:55 +01009964 tcg_temp_free_i32(al);
9965 tcg_temp_free_i32(ah);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009966 }
9967 if (insn & (1 << 20)) {
Richard Hendersonc9f10122013-02-19 23:52:06 -08009968 gen_logicq_cc(tmp, tmp2);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009969 }
Richard Hendersonc9f10122013-02-19 23:52:06 -08009970 store_reg(s, rn, tmp);
9971 store_reg(s, rd, tmp2);
Aurelien Jarno8aac08b2010-12-31 17:50:27 +01009972 break;
9973 default:
9974 goto illegal_op;
bellard2c0262a2003-09-30 20:34:21 +00009975 }
9976 } else {
bellard2c0262a2003-09-30 20:34:21 +00009977 rn = (insn >> 16) & 0xf;
9978 rd = (insn >> 12) & 0xf;
bellard99c475a2005-01-31 20:45:13 +00009979 if (insn & (1 << 23)) {
9980 /* load/store exclusive */
Peter Maydell96c55292019-01-07 15:23:48 +00009981 bool is_ld = extract32(insn, 20, 1);
9982 bool is_lasr = !extract32(insn, 8, 1);
Mans Rullgard2359bf82013-07-15 14:35:25 +01009983 int op2 = (insn >> 8) & 3;
pbrook86753402008-10-22 20:35:54 +00009984 op1 = (insn >> 21) & 0x3;
Mans Rullgard2359bf82013-07-15 14:35:25 +01009985
9986 switch (op2) {
9987 case 0: /* lda/stl */
9988 if (op1 == 1) {
9989 goto illegal_op;
9990 }
9991 ARCH(8);
9992 break;
9993 case 1: /* reserved */
9994 goto illegal_op;
9995 case 2: /* ldaex/stlex */
9996 ARCH(8);
9997 break;
9998 case 3: /* ldrex/strex */
9999 if (op1) {
10000 ARCH(6K);
10001 } else {
10002 ARCH(6);
10003 }
10004 break;
10005 }
10006
Filip Navara3174f8e2009-10-15 13:14:28 +020010007 addr = tcg_temp_local_new_i32();
Aurelien Jarno98a46312009-10-18 15:53:28 +020010008 load_reg_var(s, addr, rn);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010009
Peter Maydell96c55292019-01-07 15:23:48 +000010010 if (is_lasr && !is_ld) {
10011 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
10012 }
10013
Mans Rullgard2359bf82013-07-15 14:35:25 +010010014 if (op2 == 0) {
Peter Maydell96c55292019-01-07 15:23:48 +000010015 if (is_ld) {
Mans Rullgard2359bf82013-07-15 14:35:25 +010010016 tmp = tcg_temp_new_i32();
10017 switch (op1) {
10018 case 0: /* lda */
Peter Maydell9bb65582017-02-07 18:30:00 +000010019 gen_aa32_ld32u_iss(s, tmp, addr,
10020 get_mem_index(s),
10021 rd | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010022 break;
10023 case 2: /* ldab */
Peter Maydell9bb65582017-02-07 18:30:00 +000010024 gen_aa32_ld8u_iss(s, tmp, addr,
10025 get_mem_index(s),
10026 rd | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010027 break;
10028 case 3: /* ldah */
Peter Maydell9bb65582017-02-07 18:30:00 +000010029 gen_aa32_ld16u_iss(s, tmp, addr,
10030 get_mem_index(s),
10031 rd | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010032 break;
10033 default:
10034 abort();
10035 }
10036 store_reg(s, rd, tmp);
10037 } else {
10038 rm = insn & 0xf;
10039 tmp = load_reg(s, rm);
10040 switch (op1) {
10041 case 0: /* stl */
Peter Maydell9bb65582017-02-07 18:30:00 +000010042 gen_aa32_st32_iss(s, tmp, addr,
10043 get_mem_index(s),
10044 rm | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010045 break;
10046 case 2: /* stlb */
Peter Maydell9bb65582017-02-07 18:30:00 +000010047 gen_aa32_st8_iss(s, tmp, addr,
10048 get_mem_index(s),
10049 rm | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010050 break;
10051 case 3: /* stlh */
Peter Maydell9bb65582017-02-07 18:30:00 +000010052 gen_aa32_st16_iss(s, tmp, addr,
10053 get_mem_index(s),
10054 rm | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010010055 break;
10056 default:
10057 abort();
10058 }
10059 tcg_temp_free_i32(tmp);
10060 }
Peter Maydell96c55292019-01-07 15:23:48 +000010061 } else if (is_ld) {
pbrook86753402008-10-22 20:35:54 +000010062 switch (op1) {
10063 case 0: /* ldrex */
Paul Brook426f5ab2009-11-22 21:35:13 +000010064 gen_load_exclusive(s, rd, 15, addr, 2);
pbrook86753402008-10-22 20:35:54 +000010065 break;
10066 case 1: /* ldrexd */
Paul Brook426f5ab2009-11-22 21:35:13 +000010067 gen_load_exclusive(s, rd, rd + 1, addr, 3);
pbrook86753402008-10-22 20:35:54 +000010068 break;
10069 case 2: /* ldrexb */
Paul Brook426f5ab2009-11-22 21:35:13 +000010070 gen_load_exclusive(s, rd, 15, addr, 0);
pbrook86753402008-10-22 20:35:54 +000010071 break;
10072 case 3: /* ldrexh */
Paul Brook426f5ab2009-11-22 21:35:13 +000010073 gen_load_exclusive(s, rd, 15, addr, 1);
pbrook86753402008-10-22 20:35:54 +000010074 break;
10075 default:
10076 abort();
10077 }
pbrook9ee6e8b2007-11-11 00:04:49 +000010078 } else {
10079 rm = insn & 0xf;
pbrook86753402008-10-22 20:35:54 +000010080 switch (op1) {
10081 case 0: /* strex */
Paul Brook426f5ab2009-11-22 21:35:13 +000010082 gen_store_exclusive(s, rd, rm, 15, addr, 2);
pbrook86753402008-10-22 20:35:54 +000010083 break;
10084 case 1: /* strexd */
Aurelien Jarno502e64f2009-12-24 00:18:23 +010010085 gen_store_exclusive(s, rd, rm, rm + 1, addr, 3);
pbrook86753402008-10-22 20:35:54 +000010086 break;
10087 case 2: /* strexb */
Paul Brook426f5ab2009-11-22 21:35:13 +000010088 gen_store_exclusive(s, rd, rm, 15, addr, 0);
pbrook86753402008-10-22 20:35:54 +000010089 break;
10090 case 3: /* strexh */
Paul Brook426f5ab2009-11-22 21:35:13 +000010091 gen_store_exclusive(s, rd, rm, 15, addr, 1);
pbrook86753402008-10-22 20:35:54 +000010092 break;
10093 default:
10094 abort();
10095 }
pbrook9ee6e8b2007-11-11 00:04:49 +000010096 }
Peter Maydell39d54922013-05-23 12:59:55 +010010097 tcg_temp_free_i32(addr);
Peter Maydell96c55292019-01-07 15:23:48 +000010098
10099 if (is_lasr && is_ld) {
10100 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
10101 }
Onur Sahinc4869ca2018-04-10 13:02:24 +010010102 } else if ((insn & 0x00300f00) == 0) {
10103 /* 0bcccc_0001_0x00_xxxx_xxxx_0000_1001_xxxx
10104 * - SWP, SWPB
10105 */
10106
Emilio G. Cotacf12bce2016-06-27 15:02:10 -040010107 TCGv taddr;
10108 TCGMemOp opc = s->be_data;
10109
bellard99c475a2005-01-31 20:45:13 +000010110 rm = (insn) & 0xf;
ths3b46e622007-09-17 08:09:54 +000010111
bellard99c475a2005-01-31 20:45:13 +000010112 if (insn & (1 << 22)) {
Emilio G. Cotacf12bce2016-06-27 15:02:10 -040010113 opc |= MO_UB;
bellard99c475a2005-01-31 20:45:13 +000010114 } else {
Emilio G. Cotacf12bce2016-06-27 15:02:10 -040010115 opc |= MO_UL | MO_ALIGN;
bellard99c475a2005-01-31 20:45:13 +000010116 }
Emilio G. Cotacf12bce2016-06-27 15:02:10 -040010117
10118 addr = load_reg(s, rn);
10119 taddr = gen_aa32_addr(s, addr, opc);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010120 tcg_temp_free_i32(addr);
Emilio G. Cotacf12bce2016-06-27 15:02:10 -040010121
10122 tmp = load_reg(s, rm);
10123 tcg_gen_atomic_xchg_i32(tmp, taddr, tmp,
10124 get_mem_index(s), opc);
10125 tcg_temp_free(taddr);
10126 store_reg(s, rd, tmp);
Onur Sahinc4869ca2018-04-10 13:02:24 +010010127 } else {
10128 goto illegal_op;
bellard2c0262a2003-09-30 20:34:21 +000010129 }
bellard2c0262a2003-09-30 20:34:21 +000010130 }
10131 } else {
pbrook191f9a92006-06-14 14:36:07 +000010132 int address_offset;
Peter Maydell3960c332015-05-29 11:29:00 +010010133 bool load = insn & (1 << 20);
Peter Maydell63f26fc2017-02-07 18:29:59 +000010134 bool wbit = insn & (1 << 21);
10135 bool pbit = insn & (1 << 24);
Peter Maydell3960c332015-05-29 11:29:00 +010010136 bool doubleword = false;
Peter Maydell9bb65582017-02-07 18:30:00 +000010137 ISSInfo issinfo;
10138
bellard99c475a2005-01-31 20:45:13 +000010139 /* Misc load/store */
bellard2c0262a2003-09-30 20:34:21 +000010140 rn = (insn >> 16) & 0xf;
10141 rd = (insn >> 12) & 0xf;
Peter Maydell3960c332015-05-29 11:29:00 +010010142
Peter Maydell9bb65582017-02-07 18:30:00 +000010143 /* ISS not valid if writeback */
10144 issinfo = (pbit & !wbit) ? rd : ISSInvalid;
10145
Peter Maydell3960c332015-05-29 11:29:00 +010010146 if (!load && (sh & 2)) {
10147 /* doubleword */
10148 ARCH(5TE);
10149 if (rd & 1) {
10150 /* UNPREDICTABLE; we choose to UNDEF */
10151 goto illegal_op;
10152 }
10153 load = (sh & 1) == 0;
10154 doubleword = true;
10155 }
10156
pbrookb0109802008-03-31 03:47:03 +000010157 addr = load_reg(s, rn);
Peter Maydell63f26fc2017-02-07 18:29:59 +000010158 if (pbit) {
pbrookb0109802008-03-31 03:47:03 +000010159 gen_add_datah_offset(s, insn, 0, addr);
Peter Maydell63f26fc2017-02-07 18:29:59 +000010160 }
pbrook191f9a92006-06-14 14:36:07 +000010161 address_offset = 0;
Peter Maydell3960c332015-05-29 11:29:00 +010010162
10163 if (doubleword) {
10164 if (!load) {
10165 /* store */
10166 tmp = load_reg(s, rd);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010167 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydell3960c332015-05-29 11:29:00 +010010168 tcg_temp_free_i32(tmp);
10169 tcg_gen_addi_i32(addr, addr, 4);
10170 tmp = load_reg(s, rd + 1);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010171 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydell3960c332015-05-29 11:29:00 +010010172 tcg_temp_free_i32(tmp);
10173 } else {
10174 /* load */
10175 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010176 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
Peter Maydell3960c332015-05-29 11:29:00 +010010177 store_reg(s, rd, tmp);
10178 tcg_gen_addi_i32(addr, addr, 4);
10179 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010180 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
Peter Maydell3960c332015-05-29 11:29:00 +010010181 rd++;
10182 }
10183 address_offset = -4;
10184 } else if (load) {
bellard2c0262a2003-09-30 20:34:21 +000010185 /* load */
Peter Maydell5a839c02013-05-23 13:00:00 +010010186 tmp = tcg_temp_new_i32();
Peter Maydell3960c332015-05-29 11:29:00 +010010187 switch (sh) {
bellard2c0262a2003-09-30 20:34:21 +000010188 case 1:
Peter Maydell9bb65582017-02-07 18:30:00 +000010189 gen_aa32_ld16u_iss(s, tmp, addr, get_mem_index(s),
10190 issinfo);
bellard2c0262a2003-09-30 20:34:21 +000010191 break;
10192 case 2:
Peter Maydell9bb65582017-02-07 18:30:00 +000010193 gen_aa32_ld8s_iss(s, tmp, addr, get_mem_index(s),
10194 issinfo);
bellard2c0262a2003-09-30 20:34:21 +000010195 break;
10196 default:
10197 case 3:
Peter Maydell9bb65582017-02-07 18:30:00 +000010198 gen_aa32_ld16s_iss(s, tmp, addr, get_mem_index(s),
10199 issinfo);
bellard2c0262a2003-09-30 20:34:21 +000010200 break;
10201 }
10202 } else {
10203 /* store */
pbrookb0109802008-03-31 03:47:03 +000010204 tmp = load_reg(s, rd);
Peter Maydell9bb65582017-02-07 18:30:00 +000010205 gen_aa32_st16_iss(s, tmp, addr, get_mem_index(s), issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010010206 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +000010207 }
pbrook5fd46862007-03-17 01:43:01 +000010208 /* Perform base writeback before the loaded value to
10209 ensure correct behavior with overlapping index registers.
Daniel P. Berrangeb6af0972015-08-26 12:17:13 +010010210 ldrd with base writeback is undefined if the
pbrook5fd46862007-03-17 01:43:01 +000010211 destination and index registers overlap. */
Peter Maydell63f26fc2017-02-07 18:29:59 +000010212 if (!pbit) {
pbrookb0109802008-03-31 03:47:03 +000010213 gen_add_datah_offset(s, insn, address_offset, addr);
10214 store_reg(s, rn, addr);
Peter Maydell63f26fc2017-02-07 18:29:59 +000010215 } else if (wbit) {
pbrook191f9a92006-06-14 14:36:07 +000010216 if (address_offset)
pbrookb0109802008-03-31 03:47:03 +000010217 tcg_gen_addi_i32(addr, addr, address_offset);
10218 store_reg(s, rn, addr);
10219 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010220 tcg_temp_free_i32(addr);
bellard2c0262a2003-09-30 20:34:21 +000010221 }
pbrook5fd46862007-03-17 01:43:01 +000010222 if (load) {
10223 /* Complete the load. */
pbrookb0109802008-03-31 03:47:03 +000010224 store_reg(s, rd, tmp);
pbrook5fd46862007-03-17 01:43:01 +000010225 }
bellard2c0262a2003-09-30 20:34:21 +000010226 }
10227 break;
10228 case 0x4:
10229 case 0x5:
pbrook9ee6e8b2007-11-11 00:04:49 +000010230 goto do_ldst;
bellard2c0262a2003-09-30 20:34:21 +000010231 case 0x6:
10232 case 0x7:
pbrook9ee6e8b2007-11-11 00:04:49 +000010233 if (insn & (1 << 4)) {
10234 ARCH(6);
10235 /* Armv6 Media instructions. */
10236 rm = insn & 0xf;
10237 rn = (insn >> 16) & 0xf;
10238 rd = (insn >> 12) & 0xf;
10239 rs = (insn >> 8) & 0xf;
10240 switch ((insn >> 23) & 3) {
10241 case 0: /* Parallel add/subtract. */
10242 op1 = (insn >> 20) & 7;
pbrook6ddbc6e2008-03-31 03:46:33 +000010243 tmp = load_reg(s, rn);
10244 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010245 sh = (insn >> 5) & 7;
10246 if ((op1 & 3) == 0 || sh == 5 || sh == 6)
10247 goto illegal_op;
pbrook6ddbc6e2008-03-31 03:46:33 +000010248 gen_arm_parallel_addsub(op1, sh, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010249 tcg_temp_free_i32(tmp2);
pbrook6ddbc6e2008-03-31 03:46:33 +000010250 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010251 break;
10252 case 1:
10253 if ((insn & 0x00700020) == 0) {
balrog6c956762008-04-13 00:57:49 +000010254 /* Halfword pack. */
pbrook36706692008-03-31 03:46:19 +000010255 tmp = load_reg(s, rn);
10256 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010257 shift = (insn >> 7) & 0x1f;
pbrook36706692008-03-31 03:46:19 +000010258 if (insn & (1 << 6)) {
10259 /* pkhtb */
balrog22478e72008-07-19 10:12:22 +000010260 if (shift == 0)
10261 shift = 31;
10262 tcg_gen_sari_i32(tmp2, tmp2, shift);
pbrook36706692008-03-31 03:46:19 +000010263 tcg_gen_andi_i32(tmp, tmp, 0xffff0000);
pbrook86831432008-05-11 12:22:01 +000010264 tcg_gen_ext16u_i32(tmp2, tmp2);
pbrook36706692008-03-31 03:46:19 +000010265 } else {
10266 /* pkhbt */
balrog22478e72008-07-19 10:12:22 +000010267 if (shift)
10268 tcg_gen_shli_i32(tmp2, tmp2, shift);
pbrook86831432008-05-11 12:22:01 +000010269 tcg_gen_ext16u_i32(tmp, tmp);
pbrook36706692008-03-31 03:46:19 +000010270 tcg_gen_andi_i32(tmp2, tmp2, 0xffff0000);
10271 }
10272 tcg_gen_or_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010273 tcg_temp_free_i32(tmp2);
pbrook36706692008-03-31 03:46:19 +000010274 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010275 } else if ((insn & 0x00200020) == 0x00200000) {
10276 /* [us]sat */
pbrook6ddbc6e2008-03-31 03:46:33 +000010277 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010278 shift = (insn >> 7) & 0x1f;
10279 if (insn & (1 << 6)) {
10280 if (shift == 0)
10281 shift = 31;
pbrook6ddbc6e2008-03-31 03:46:33 +000010282 tcg_gen_sari_i32(tmp, tmp, shift);
pbrook9ee6e8b2007-11-11 00:04:49 +000010283 } else {
pbrook6ddbc6e2008-03-31 03:46:33 +000010284 tcg_gen_shli_i32(tmp, tmp, shift);
pbrook9ee6e8b2007-11-11 00:04:49 +000010285 }
10286 sh = (insn >> 16) & 0x1f;
Christophe Lyon40d3c432011-01-19 17:10:52 +010010287 tmp2 = tcg_const_i32(sh);
10288 if (insn & (1 << 22))
Blue Swirl9ef39272012-09-04 20:19:15 +000010289 gen_helper_usat(tmp, cpu_env, tmp, tmp2);
Christophe Lyon40d3c432011-01-19 17:10:52 +010010290 else
Blue Swirl9ef39272012-09-04 20:19:15 +000010291 gen_helper_ssat(tmp, cpu_env, tmp, tmp2);
Christophe Lyon40d3c432011-01-19 17:10:52 +010010292 tcg_temp_free_i32(tmp2);
pbrook6ddbc6e2008-03-31 03:46:33 +000010293 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010294 } else if ((insn & 0x00300fe0) == 0x00200f20) {
10295 /* [us]sat16 */
pbrook6ddbc6e2008-03-31 03:46:33 +000010296 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010297 sh = (insn >> 16) & 0x1f;
Christophe Lyon40d3c432011-01-19 17:10:52 +010010298 tmp2 = tcg_const_i32(sh);
10299 if (insn & (1 << 22))
Blue Swirl9ef39272012-09-04 20:19:15 +000010300 gen_helper_usat16(tmp, cpu_env, tmp, tmp2);
Christophe Lyon40d3c432011-01-19 17:10:52 +010010301 else
Blue Swirl9ef39272012-09-04 20:19:15 +000010302 gen_helper_ssat16(tmp, cpu_env, tmp, tmp2);
Christophe Lyon40d3c432011-01-19 17:10:52 +010010303 tcg_temp_free_i32(tmp2);
pbrook6ddbc6e2008-03-31 03:46:33 +000010304 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010305 } else if ((insn & 0x00700fe0) == 0x00000fa0) {
10306 /* Select bytes. */
pbrook6ddbc6e2008-03-31 03:46:33 +000010307 tmp = load_reg(s, rn);
10308 tmp2 = load_reg(s, rm);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010309 tmp3 = tcg_temp_new_i32();
Andreas Färber0ecb72a2012-03-14 01:38:21 +010010310 tcg_gen_ld_i32(tmp3, cpu_env, offsetof(CPUARMState, GE));
pbrook6ddbc6e2008-03-31 03:46:33 +000010311 gen_helper_sel_flags(tmp, tmp3, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010312 tcg_temp_free_i32(tmp3);
10313 tcg_temp_free_i32(tmp2);
pbrook6ddbc6e2008-03-31 03:46:33 +000010314 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010315 } else if ((insn & 0x000003e0) == 0x00000060) {
pbrook5e3f8782008-03-31 03:47:34 +000010316 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010317 shift = (insn >> 10) & 3;
Stefan Weil1301f322011-04-28 17:20:37 +020010318 /* ??? In many cases it's not necessary to do a
pbrook9ee6e8b2007-11-11 00:04:49 +000010319 rotate, a shift is sufficient. */
10320 if (shift != 0)
Aurelien Jarnof669df22009-10-15 16:45:14 +020010321 tcg_gen_rotri_i32(tmp, tmp, shift * 8);
pbrook9ee6e8b2007-11-11 00:04:49 +000010322 op1 = (insn >> 20) & 7;
10323 switch (op1) {
pbrook5e3f8782008-03-31 03:47:34 +000010324 case 0: gen_sxtb16(tmp); break;
10325 case 2: gen_sxtb(tmp); break;
10326 case 3: gen_sxth(tmp); break;
10327 case 4: gen_uxtb16(tmp); break;
10328 case 6: gen_uxtb(tmp); break;
10329 case 7: gen_uxth(tmp); break;
pbrook9ee6e8b2007-11-11 00:04:49 +000010330 default: goto illegal_op;
10331 }
10332 if (rn != 15) {
pbrook5e3f8782008-03-31 03:47:34 +000010333 tmp2 = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000010334 if ((op1 & 3) == 0) {
pbrook5e3f8782008-03-31 03:47:34 +000010335 gen_add16(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000010336 } else {
pbrook5e3f8782008-03-31 03:47:34 +000010337 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010338 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000010339 }
10340 }
balrog6c956762008-04-13 00:57:49 +000010341 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010342 } else if ((insn & 0x003f0f60) == 0x003f0f20) {
10343 /* rev */
pbrookb0109802008-03-31 03:47:03 +000010344 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010345 if (insn & (1 << 22)) {
10346 if (insn & (1 << 7)) {
pbrookb0109802008-03-31 03:47:03 +000010347 gen_revsh(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010348 } else {
10349 ARCH(6T2);
pbrookb0109802008-03-31 03:47:03 +000010350 gen_helper_rbit(tmp, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010351 }
10352 } else {
10353 if (insn & (1 << 7))
pbrookb0109802008-03-31 03:47:03 +000010354 gen_rev16(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010355 else
aurel3266896cb2009-03-13 09:34:48 +000010356 tcg_gen_bswap32_i32(tmp, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010357 }
pbrookb0109802008-03-31 03:47:03 +000010358 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010359 } else {
10360 goto illegal_op;
10361 }
10362 break;
10363 case 2: /* Multiplies (Type 3). */
Peter Maydell41e95642011-10-19 16:14:05 +000010364 switch ((insn >> 20) & 0x7) {
10365 case 5:
10366 if (((insn >> 6) ^ (insn >> 7)) & 1) {
10367 /* op2 not 00x or 11x : UNDEF */
10368 goto illegal_op;
10369 }
Aurelien Jarno838fa722011-01-06 19:53:56 +010010370 /* Signed multiply most significant [accumulate].
10371 (SMMUL, SMMLA, SMMLS) */
Peter Maydell41e95642011-10-19 16:14:05 +000010372 tmp = load_reg(s, rm);
10373 tmp2 = load_reg(s, rs);
pbrooka7812ae2008-11-17 14:43:54 +000010374 tmp64 = gen_muls_i64_i32(tmp, tmp2);
Aurelien Jarno838fa722011-01-06 19:53:56 +010010375
10376 if (rd != 15) {
10377 tmp = load_reg(s, rd);
10378 if (insn & (1 << 6)) {
10379 tmp64 = gen_subq_msw(tmp64, tmp);
10380 } else {
10381 tmp64 = gen_addq_msw(tmp64, tmp);
10382 }
10383 }
10384 if (insn & (1 << 5)) {
pbrooka7812ae2008-11-17 14:43:54 +000010385 tcg_gen_addi_i64(tmp64, tmp64, 0x80000000u);
Aurelien Jarno838fa722011-01-06 19:53:56 +010010386 }
pbrooka7812ae2008-11-17 14:43:54 +000010387 tcg_gen_shri_i64(tmp64, tmp64, 32);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010388 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -070010389 tcg_gen_extrl_i64_i32(tmp, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010390 tcg_temp_free_i64(tmp64);
balrog955a7dd2008-12-07 14:18:02 +000010391 store_reg(s, rn, tmp);
Peter Maydell41e95642011-10-19 16:14:05 +000010392 break;
10393 case 0:
10394 case 4:
10395 /* SMLAD, SMUAD, SMLSD, SMUSD, SMLALD, SMLSLD */
10396 if (insn & (1 << 7)) {
10397 goto illegal_op;
10398 }
10399 tmp = load_reg(s, rm);
10400 tmp2 = load_reg(s, rs);
pbrook9ee6e8b2007-11-11 00:04:49 +000010401 if (insn & (1 << 5))
pbrook5e3f8782008-03-31 03:47:34 +000010402 gen_swap_half(tmp2);
10403 gen_smul_dual(tmp, tmp2);
pbrook5e3f8782008-03-31 03:47:34 +000010404 if (insn & (1 << 22)) {
10405 /* smlald, smlsld */
Peter Crosthwaite33bbd752014-04-16 20:20:52 -070010406 TCGv_i64 tmp64_2;
10407
pbrooka7812ae2008-11-17 14:43:54 +000010408 tmp64 = tcg_temp_new_i64();
Peter Crosthwaite33bbd752014-04-16 20:20:52 -070010409 tmp64_2 = tcg_temp_new_i64();
pbrooka7812ae2008-11-17 14:43:54 +000010410 tcg_gen_ext_i32_i64(tmp64, tmp);
Peter Crosthwaite33bbd752014-04-16 20:20:52 -070010411 tcg_gen_ext_i32_i64(tmp64_2, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010412 tcg_temp_free_i32(tmp);
Peter Crosthwaite33bbd752014-04-16 20:20:52 -070010413 tcg_temp_free_i32(tmp2);
10414 if (insn & (1 << 6)) {
10415 tcg_gen_sub_i64(tmp64, tmp64, tmp64_2);
10416 } else {
10417 tcg_gen_add_i64(tmp64, tmp64, tmp64_2);
10418 }
10419 tcg_temp_free_i64(tmp64_2);
pbrooka7812ae2008-11-17 14:43:54 +000010420 gen_addq(s, tmp64, rd, rn);
10421 gen_storeq_reg(s, rd, rn, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010422 tcg_temp_free_i64(tmp64);
pbrook5e3f8782008-03-31 03:47:34 +000010423 } else {
10424 /* smuad, smusd, smlad, smlsd */
Peter Crosthwaite33bbd752014-04-16 20:20:52 -070010425 if (insn & (1 << 6)) {
10426 /* This subtraction cannot overflow. */
10427 tcg_gen_sub_i32(tmp, tmp, tmp2);
10428 } else {
10429 /* This addition cannot overflow 32 bits;
10430 * however it may overflow considered as a
10431 * signed operation, in which case we must set
10432 * the Q flag.
10433 */
10434 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
10435 }
10436 tcg_temp_free_i32(tmp2);
balrog22478e72008-07-19 10:12:22 +000010437 if (rd != 15)
pbrook9ee6e8b2007-11-11 00:04:49 +000010438 {
balrog22478e72008-07-19 10:12:22 +000010439 tmp2 = load_reg(s, rd);
Blue Swirl9ef39272012-09-04 20:19:15 +000010440 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010441 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000010442 }
balrog22478e72008-07-19 10:12:22 +000010443 store_reg(s, rn, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010444 }
Peter Maydell41e95642011-10-19 16:14:05 +000010445 break;
Peter Maydellb8b8ea02011-10-19 16:14:06 +000010446 case 1:
10447 case 3:
10448 /* SDIV, UDIV */
Richard Henderson7e0cf8b2018-10-24 07:50:16 +010010449 if (!dc_isar_feature(arm_div, s)) {
Peter Maydellb8b8ea02011-10-19 16:14:06 +000010450 goto illegal_op;
10451 }
10452 if (((insn >> 5) & 7) || (rd != 15)) {
10453 goto illegal_op;
10454 }
10455 tmp = load_reg(s, rm);
10456 tmp2 = load_reg(s, rs);
10457 if (insn & (1 << 21)) {
10458 gen_helper_udiv(tmp, tmp, tmp2);
10459 } else {
10460 gen_helper_sdiv(tmp, tmp, tmp2);
10461 }
10462 tcg_temp_free_i32(tmp2);
10463 store_reg(s, rn, tmp);
10464 break;
Peter Maydell41e95642011-10-19 16:14:05 +000010465 default:
10466 goto illegal_op;
pbrook9ee6e8b2007-11-11 00:04:49 +000010467 }
10468 break;
10469 case 3:
10470 op1 = ((insn >> 17) & 0x38) | ((insn >> 5) & 7);
10471 switch (op1) {
10472 case 0: /* Unsigned sum of absolute differences. */
pbrook6ddbc6e2008-03-31 03:46:33 +000010473 ARCH(6);
10474 tmp = load_reg(s, rm);
10475 tmp2 = load_reg(s, rs);
10476 gen_helper_usad8(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010477 tcg_temp_free_i32(tmp2);
balrogded9d292008-12-07 14:03:27 +000010478 if (rd != 15) {
10479 tmp2 = load_reg(s, rd);
pbrook6ddbc6e2008-03-31 03:46:33 +000010480 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010481 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000010482 }
balrogded9d292008-12-07 14:03:27 +000010483 store_reg(s, rn, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010484 break;
10485 case 0x20: case 0x24: case 0x28: case 0x2c:
10486 /* Bitfield insert/clear. */
10487 ARCH(6T2);
10488 shift = (insn >> 7) & 0x1f;
10489 i = (insn >> 16) & 0x1f;
Kirill Batuzov45140a52015-02-05 13:37:22 +000010490 if (i < shift) {
10491 /* UNPREDICTABLE; we choose to UNDEF */
10492 goto illegal_op;
10493 }
pbrook9ee6e8b2007-11-11 00:04:49 +000010494 i = i + 1 - shift;
10495 if (rm == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010496 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +000010497 tcg_gen_movi_i32(tmp, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +000010498 } else {
pbrook5e3f8782008-03-31 03:47:34 +000010499 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010500 }
10501 if (i != 32) {
pbrook5e3f8782008-03-31 03:47:34 +000010502 tmp2 = load_reg(s, rd);
Aurelien Jarnod593c482012-10-05 15:04:45 +010010503 tcg_gen_deposit_i32(tmp, tmp2, tmp, shift, i);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010504 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000010505 }
pbrook5e3f8782008-03-31 03:47:34 +000010506 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010507 break;
10508 case 0x12: case 0x16: case 0x1a: case 0x1e: /* sbfx */
10509 case 0x32: case 0x36: case 0x3a: case 0x3e: /* ubfx */
balrog4cc633c2008-12-07 13:32:09 +000010510 ARCH(6T2);
pbrook5e3f8782008-03-31 03:47:34 +000010511 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000010512 shift = (insn >> 7) & 0x1f;
10513 i = ((insn >> 16) & 0x1f) + 1;
10514 if (shift + i > 32)
10515 goto illegal_op;
10516 if (i < 32) {
10517 if (op1 & 0x20) {
Richard Henderson59a71b42016-10-15 11:41:29 -050010518 tcg_gen_extract_i32(tmp, tmp, shift, i);
pbrook9ee6e8b2007-11-11 00:04:49 +000010519 } else {
Richard Henderson59a71b42016-10-15 11:41:29 -050010520 tcg_gen_sextract_i32(tmp, tmp, shift, i);
pbrook9ee6e8b2007-11-11 00:04:49 +000010521 }
10522 }
pbrook5e3f8782008-03-31 03:47:34 +000010523 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000010524 break;
10525 default:
10526 goto illegal_op;
10527 }
10528 break;
10529 }
10530 break;
10531 }
10532 do_ldst:
bellard159f3662006-05-22 23:06:04 +000010533 /* Check for undefined extension instructions
10534 * per the ARM Bible IE:
10535 * xxxx 0111 1111 xxxx xxxx xxxx 1111 xxxx
10536 */
10537 sh = (0xf << 20) | (0xf << 4);
10538 if (op1 == 0x7 && ((insn & sh) == sh))
10539 {
10540 goto illegal_op;
10541 }
bellard2c0262a2003-09-30 20:34:21 +000010542 /* load/store byte/word */
10543 rn = (insn >> 16) & 0xf;
10544 rd = (insn >> 12) & 0xf;
pbrookb0109802008-03-31 03:47:03 +000010545 tmp2 = load_reg(s, rn);
Peter Maydella99caa42014-05-27 17:09:50 +010010546 if ((insn & 0x01200000) == 0x00200000) {
10547 /* ldrt/strt */
Peter Maydell579d21c2015-02-05 13:37:23 +000010548 i = get_a32_user_mem_index(s);
Peter Maydella99caa42014-05-27 17:09:50 +010010549 } else {
10550 i = get_mem_index(s);
10551 }
bellard2c0262a2003-09-30 20:34:21 +000010552 if (insn & (1 << 24))
pbrookb0109802008-03-31 03:47:03 +000010553 gen_add_data_offset(s, insn, tmp2);
bellard2c0262a2003-09-30 20:34:21 +000010554 if (insn & (1 << 20)) {
10555 /* load */
Peter Maydell5a839c02013-05-23 13:00:00 +010010556 tmp = tcg_temp_new_i32();
bellardb5ff1b32005-11-26 10:38:39 +000010557 if (insn & (1 << 22)) {
Peter Maydell9bb65582017-02-07 18:30:00 +000010558 gen_aa32_ld8u_iss(s, tmp, tmp2, i, rd);
bellardb5ff1b32005-11-26 10:38:39 +000010559 } else {
Peter Maydell9bb65582017-02-07 18:30:00 +000010560 gen_aa32_ld32u_iss(s, tmp, tmp2, i, rd);
bellardb5ff1b32005-11-26 10:38:39 +000010561 }
bellard2c0262a2003-09-30 20:34:21 +000010562 } else {
10563 /* store */
pbrookb0109802008-03-31 03:47:03 +000010564 tmp = load_reg(s, rd);
Peter Maydell5a839c02013-05-23 13:00:00 +010010565 if (insn & (1 << 22)) {
Peter Maydell9bb65582017-02-07 18:30:00 +000010566 gen_aa32_st8_iss(s, tmp, tmp2, i, rd);
Peter Maydell5a839c02013-05-23 13:00:00 +010010567 } else {
Peter Maydell9bb65582017-02-07 18:30:00 +000010568 gen_aa32_st32_iss(s, tmp, tmp2, i, rd);
Peter Maydell5a839c02013-05-23 13:00:00 +010010569 }
10570 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +000010571 }
10572 if (!(insn & (1 << 24))) {
pbrookb0109802008-03-31 03:47:03 +000010573 gen_add_data_offset(s, insn, tmp2);
10574 store_reg(s, rn, tmp2);
10575 } else if (insn & (1 << 21)) {
10576 store_reg(s, rn, tmp2);
10577 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010578 tcg_temp_free_i32(tmp2);
bellard2c0262a2003-09-30 20:34:21 +000010579 }
pbrook5fd46862007-03-17 01:43:01 +000010580 if (insn & (1 << 20)) {
10581 /* Complete the load. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +000010582 store_reg_from_load(s, rd, tmp);
pbrook5fd46862007-03-17 01:43:01 +000010583 }
bellard2c0262a2003-09-30 20:34:21 +000010584 break;
10585 case 0x08:
10586 case 0x09:
10587 {
Peter Maydellda3e53d2015-03-16 12:30:47 +000010588 int j, n, loaded_base;
10589 bool exc_return = false;
10590 bool is_load = extract32(insn, 20, 1);
10591 bool user = false;
Peter Maydell39d54922013-05-23 12:59:55 +010010592 TCGv_i32 loaded_var;
bellard2c0262a2003-09-30 20:34:21 +000010593 /* load/store multiple words */
10594 /* XXX: store correct base if write back */
bellardb5ff1b32005-11-26 10:38:39 +000010595 if (insn & (1 << 22)) {
Peter Maydellda3e53d2015-03-16 12:30:47 +000010596 /* LDM (user), LDM (exception return) and STM (user) */
bellardb5ff1b32005-11-26 10:38:39 +000010597 if (IS_USER(s))
10598 goto illegal_op; /* only usable in supervisor mode */
10599
Peter Maydellda3e53d2015-03-16 12:30:47 +000010600 if (is_load && extract32(insn, 15, 1)) {
10601 exc_return = true;
10602 } else {
10603 user = true;
10604 }
bellardb5ff1b32005-11-26 10:38:39 +000010605 }
bellard2c0262a2003-09-30 20:34:21 +000010606 rn = (insn >> 16) & 0xf;
pbrookb0109802008-03-31 03:47:03 +000010607 addr = load_reg(s, rn);
ths3b46e622007-09-17 08:09:54 +000010608
bellard2c0262a2003-09-30 20:34:21 +000010609 /* compute total size */
pbrook191abaa2006-02-04 21:50:36 +000010610 loaded_base = 0;
Richard Hendersonf7647182017-11-02 12:47:37 +010010611 loaded_var = NULL;
bellard2c0262a2003-09-30 20:34:21 +000010612 n = 0;
10613 for(i=0;i<16;i++) {
10614 if (insn & (1 << i))
10615 n++;
10616 }
10617 /* XXX: test invalid n == 0 case ? */
10618 if (insn & (1 << 23)) {
10619 if (insn & (1 << 24)) {
10620 /* pre increment */
pbrookb0109802008-03-31 03:47:03 +000010621 tcg_gen_addi_i32(addr, addr, 4);
bellard2c0262a2003-09-30 20:34:21 +000010622 } else {
10623 /* post increment */
10624 }
10625 } else {
10626 if (insn & (1 << 24)) {
10627 /* pre decrement */
pbrookb0109802008-03-31 03:47:03 +000010628 tcg_gen_addi_i32(addr, addr, -(n * 4));
bellard2c0262a2003-09-30 20:34:21 +000010629 } else {
10630 /* post decrement */
10631 if (n != 1)
pbrookb0109802008-03-31 03:47:03 +000010632 tcg_gen_addi_i32(addr, addr, -((n - 1) * 4));
bellard2c0262a2003-09-30 20:34:21 +000010633 }
10634 }
10635 j = 0;
10636 for(i=0;i<16;i++) {
10637 if (insn & (1 << i)) {
Peter Maydellda3e53d2015-03-16 12:30:47 +000010638 if (is_load) {
bellard2c0262a2003-09-30 20:34:21 +000010639 /* load */
Peter Maydell5a839c02013-05-23 13:00:00 +010010640 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010641 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040010642 if (user) {
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010643 tmp2 = tcg_const_i32(i);
Blue Swirl1ce94f82012-09-04 20:08:34 +000010644 gen_helper_set_user_reg(cpu_env, tmp2, tmp);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010645 tcg_temp_free_i32(tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000010646 tcg_temp_free_i32(tmp);
pbrook191abaa2006-02-04 21:50:36 +000010647 } else if (i == rn) {
pbrookb0109802008-03-31 03:47:03 +000010648 loaded_var = tmp;
pbrook191abaa2006-02-04 21:50:36 +000010649 loaded_base = 1;
Richard Henderson9d090d12019-03-01 12:29:21 -080010650 } else if (i == 15 && exc_return) {
Peter Maydellfb0e8e72016-10-10 16:26:03 +010010651 store_pc_exc_ret(s, tmp);
bellardb5ff1b32005-11-26 10:38:39 +000010652 } else {
Peter Maydell7dcc1f82014-10-28 19:24:03 +000010653 store_reg_from_load(s, i, tmp);
bellardb5ff1b32005-11-26 10:38:39 +000010654 }
bellard2c0262a2003-09-30 20:34:21 +000010655 } else {
10656 /* store */
10657 if (i == 15) {
balrog7a774c82007-06-10 13:53:18 +000010658 /* special case: r15 = PC + 8 */
10659 val = (long)s->pc + 4;
Peter Maydell7d1b0092011-03-06 21:39:54 +000010660 tmp = tcg_temp_new_i32();
pbrookb0109802008-03-31 03:47:03 +000010661 tcg_gen_movi_i32(tmp, val);
bellardb5ff1b32005-11-26 10:38:39 +000010662 } else if (user) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010663 tmp = tcg_temp_new_i32();
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010664 tmp2 = tcg_const_i32(i);
Blue Swirl9ef39272012-09-04 20:19:15 +000010665 gen_helper_get_user_reg(tmp, cpu_env, tmp2);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030010666 tcg_temp_free_i32(tmp2);
bellard2c0262a2003-09-30 20:34:21 +000010667 } else {
pbrookb0109802008-03-31 03:47:03 +000010668 tmp = load_reg(s, i);
bellard2c0262a2003-09-30 20:34:21 +000010669 }
Paolo Bonzini12dcc322016-03-04 11:30:20 +000010670 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydell5a839c02013-05-23 13:00:00 +010010671 tcg_temp_free_i32(tmp);
bellard2c0262a2003-09-30 20:34:21 +000010672 }
10673 j++;
10674 /* no need to add after the last transfer */
10675 if (j != n)
pbrookb0109802008-03-31 03:47:03 +000010676 tcg_gen_addi_i32(addr, addr, 4);
bellard2c0262a2003-09-30 20:34:21 +000010677 }
10678 }
10679 if (insn & (1 << 21)) {
10680 /* write back */
10681 if (insn & (1 << 23)) {
10682 if (insn & (1 << 24)) {
10683 /* pre increment */
10684 } else {
10685 /* post increment */
pbrookb0109802008-03-31 03:47:03 +000010686 tcg_gen_addi_i32(addr, addr, 4);
bellard2c0262a2003-09-30 20:34:21 +000010687 }
10688 } else {
10689 if (insn & (1 << 24)) {
10690 /* pre decrement */
10691 if (n != 1)
pbrookb0109802008-03-31 03:47:03 +000010692 tcg_gen_addi_i32(addr, addr, -((n - 1) * 4));
bellard2c0262a2003-09-30 20:34:21 +000010693 } else {
10694 /* post decrement */
pbrookb0109802008-03-31 03:47:03 +000010695 tcg_gen_addi_i32(addr, addr, -(n * 4));
bellard2c0262a2003-09-30 20:34:21 +000010696 }
10697 }
pbrookb0109802008-03-31 03:47:03 +000010698 store_reg(s, rn, addr);
10699 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010700 tcg_temp_free_i32(addr);
bellard2c0262a2003-09-30 20:34:21 +000010701 }
pbrook191abaa2006-02-04 21:50:36 +000010702 if (loaded_base) {
pbrookb0109802008-03-31 03:47:03 +000010703 store_reg(s, rn, loaded_var);
pbrook191abaa2006-02-04 21:50:36 +000010704 }
Peter Maydellda3e53d2015-03-16 12:30:47 +000010705 if (exc_return) {
bellardb5ff1b32005-11-26 10:38:39 +000010706 /* Restore CPSR from SPSR. */
pbrookd9ba4832008-03-31 03:46:50 +000010707 tmp = load_cpu_field(spsr);
Aaron Lindsaye69ad9d2018-04-26 11:04:39 +010010708 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
10709 gen_io_start();
10710 }
Peter Maydell235ea1f2016-02-23 15:36:43 +000010711 gen_helper_cpsr_write_eret(cpu_env, tmp);
Aaron Lindsaye69ad9d2018-04-26 11:04:39 +010010712 if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
10713 gen_io_end();
10714 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000010715 tcg_temp_free_i32(tmp);
Alex Bennéeb29fd332017-07-17 13:36:07 +010010716 /* Must exit loop to check un-masked IRQs */
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030010717 s->base.is_jmp = DISAS_EXIT;
bellardb5ff1b32005-11-26 10:38:39 +000010718 }
bellard2c0262a2003-09-30 20:34:21 +000010719 }
10720 break;
10721 case 0xa:
10722 case 0xb:
10723 {
bellard99c475a2005-01-31 20:45:13 +000010724 int32_t offset;
ths3b46e622007-09-17 08:09:54 +000010725
bellard2c0262a2003-09-30 20:34:21 +000010726 /* branch (and link) */
bellard99c475a2005-01-31 20:45:13 +000010727 val = (int32_t)s->pc;
bellard2c0262a2003-09-30 20:34:21 +000010728 if (insn & (1 << 24)) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000010729 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +000010730 tcg_gen_movi_i32(tmp, val);
10731 store_reg(s, 14, tmp);
bellard2c0262a2003-09-30 20:34:21 +000010732 }
Peter Maydell534df152013-09-10 19:09:32 +010010733 offset = sextract32(insn << 2, 0, 26);
10734 val += offset + 4;
bellard8aaca4c2005-04-23 18:27:52 +000010735 gen_jmp(s, val);
bellard2c0262a2003-09-30 20:34:21 +000010736 }
10737 break;
bellardb7bcbe92005-02-22 19:27:29 +000010738 case 0xc:
10739 case 0xd:
10740 case 0xe:
Will Newton6a57f3e2013-12-06 17:01:40 +000010741 if (((insn >> 8) & 0xe) == 10) {
10742 /* VFP. */
Peter Maydell7dcc1f82014-10-28 19:24:03 +000010743 if (disas_vfp_insn(s, insn)) {
Will Newton6a57f3e2013-12-06 17:01:40 +000010744 goto illegal_op;
10745 }
Peter Maydell7dcc1f82014-10-28 19:24:03 +000010746 } else if (disas_coproc_insn(s, insn)) {
Will Newton6a57f3e2013-12-06 17:01:40 +000010747 /* Coprocessor. */
balrogc1713132007-04-30 01:26:42 +000010748 goto illegal_op;
Will Newton6a57f3e2013-12-06 17:01:40 +000010749 }
bellardb7bcbe92005-02-22 19:27:29 +000010750 break;
bellard2c0262a2003-09-30 20:34:21 +000010751 case 0xf:
10752 /* swi */
Peter Maydelleaed1292013-09-03 20:12:06 +010010753 gen_set_pc_im(s, s->pc);
Peter Maydelld4a2dc62014-04-15 19:18:38 +010010754 s->svc_imm = extract32(insn, 0, 24);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030010755 s->base.is_jmp = DISAS_SWI;
bellard2c0262a2003-09-30 20:34:21 +000010756 break;
bellard2c0262a2003-09-30 20:34:21 +000010757 default:
10758 illegal_op:
Greg Bellows73710362015-05-29 11:28:50 +010010759 gen_exception_insn(s, 4, EXCP_UDEF, syn_uncategorized(),
10760 default_exception_el(s));
bellard2c0262a2003-09-30 20:34:21 +000010761 break;
10762 }
10763 }
10764}
10765
Peter Maydell296e5a02017-10-09 14:48:36 +010010766static bool thumb_insn_is_16bit(DisasContext *s, uint32_t insn)
10767{
10768 /* Return true if this is a 16 bit instruction. We must be precise
10769 * about this (matching the decode). We assume that s->pc still
10770 * points to the first 16 bits of the insn.
10771 */
10772 if ((insn >> 11) < 0x1d) {
10773 /* Definitely a 16-bit instruction */
10774 return true;
10775 }
10776
10777 /* Top five bits 0b11101 / 0b11110 / 0b11111 : this is the
10778 * first half of a 32-bit Thumb insn. Thumb-1 cores might
10779 * end up actually treating this as two 16-bit insns, though,
10780 * if it's half of a bl/blx pair that might span a page boundary.
10781 */
Julia Suvorova14120102018-06-15 14:57:16 +010010782 if (arm_dc_feature(s, ARM_FEATURE_THUMB2) ||
10783 arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydell296e5a02017-10-09 14:48:36 +010010784 /* Thumb2 cores (including all M profile ones) always treat
10785 * 32-bit insns as 32-bit.
10786 */
10787 return false;
10788 }
10789
Emilio G. Cotabfe7ad52018-04-10 11:09:52 -040010790 if ((insn >> 11) == 0x1e && s->pc - s->page_start < TARGET_PAGE_SIZE - 3) {
Peter Maydell296e5a02017-10-09 14:48:36 +010010791 /* 0b1111_0xxx_xxxx_xxxx : BL/BLX prefix, and the suffix
10792 * is not on the next page; we merge this into a 32-bit
10793 * insn.
10794 */
10795 return false;
10796 }
10797 /* 0b1110_1xxx_xxxx_xxxx : BLX suffix (or UNDEF);
10798 * 0b1111_1xxx_xxxx_xxxx : BL suffix;
10799 * 0b1111_0xxx_xxxx_xxxx : BL/BLX prefix on the end of a page
10800 * -- handle as single 16 bit insn
10801 */
10802 return true;
10803}
10804
pbrook9ee6e8b2007-11-11 00:04:49 +000010805/* Return true if this is a Thumb-2 logical op. */
10806static int
10807thumb2_logic_op(int op)
10808{
10809 return (op < 8);
10810}
10811
10812/* Generate code for a Thumb-2 data processing operation. If CONDS is nonzero
10813 then set condition code flags based on the result of the operation.
10814 If SHIFTER_OUT is nonzero then set the carry flag for logical operations
10815 to the high bit of T1.
10816 Returns zero if the opcode is valid. */
10817
10818static int
Peter Maydell39d54922013-05-23 12:59:55 +010010819gen_thumb2_data_op(DisasContext *s, int op, int conds, uint32_t shifter_out,
10820 TCGv_i32 t0, TCGv_i32 t1)
pbrook9ee6e8b2007-11-11 00:04:49 +000010821{
10822 int logic_cc;
10823
10824 logic_cc = 0;
10825 switch (op) {
10826 case 0: /* and */
Filip Navara396e4672009-10-15 12:55:34 +020010827 tcg_gen_and_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010828 logic_cc = conds;
10829 break;
10830 case 1: /* bic */
Aurelien Jarnof669df22009-10-15 16:45:14 +020010831 tcg_gen_andc_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010832 logic_cc = conds;
10833 break;
10834 case 2: /* orr */
Filip Navara396e4672009-10-15 12:55:34 +020010835 tcg_gen_or_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010836 logic_cc = conds;
10837 break;
10838 case 3: /* orn */
Peter Maydell29501f12011-03-06 20:32:09 +000010839 tcg_gen_orc_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010840 logic_cc = conds;
10841 break;
10842 case 4: /* eor */
Filip Navara396e4672009-10-15 12:55:34 +020010843 tcg_gen_xor_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010844 logic_cc = conds;
10845 break;
10846 case 8: /* add */
10847 if (conds)
Aurelien Jarno72485ec2012-10-05 15:04:44 +010010848 gen_add_CC(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010849 else
Filip Navara396e4672009-10-15 12:55:34 +020010850 tcg_gen_add_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010851 break;
10852 case 10: /* adc */
10853 if (conds)
Richard Henderson49b4c312013-02-19 23:52:08 -080010854 gen_adc_CC(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010855 else
Filip Navara396e4672009-10-15 12:55:34 +020010856 gen_adc(t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010857 break;
10858 case 11: /* sbc */
Richard Henderson2de68a42013-02-19 23:52:09 -080010859 if (conds) {
10860 gen_sbc_CC(t0, t0, t1);
10861 } else {
Filip Navara396e4672009-10-15 12:55:34 +020010862 gen_sub_carry(t0, t0, t1);
Richard Henderson2de68a42013-02-19 23:52:09 -080010863 }
pbrook9ee6e8b2007-11-11 00:04:49 +000010864 break;
10865 case 13: /* sub */
10866 if (conds)
Aurelien Jarno72485ec2012-10-05 15:04:44 +010010867 gen_sub_CC(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010868 else
Filip Navara396e4672009-10-15 12:55:34 +020010869 tcg_gen_sub_i32(t0, t0, t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010870 break;
10871 case 14: /* rsb */
10872 if (conds)
Aurelien Jarno72485ec2012-10-05 15:04:44 +010010873 gen_sub_CC(t0, t1, t0);
pbrook9ee6e8b2007-11-11 00:04:49 +000010874 else
Filip Navara396e4672009-10-15 12:55:34 +020010875 tcg_gen_sub_i32(t0, t1, t0);
pbrook9ee6e8b2007-11-11 00:04:49 +000010876 break;
10877 default: /* 5, 6, 7, 9, 12, 15. */
10878 return 1;
10879 }
10880 if (logic_cc) {
Filip Navara396e4672009-10-15 12:55:34 +020010881 gen_logic_CC(t0);
pbrook9ee6e8b2007-11-11 00:04:49 +000010882 if (shifter_out)
Filip Navara396e4672009-10-15 12:55:34 +020010883 gen_set_CF_bit31(t1);
pbrook9ee6e8b2007-11-11 00:04:49 +000010884 }
10885 return 0;
10886}
10887
Peter Maydell2eea8412018-01-11 13:25:40 +000010888/* Translate a 32-bit thumb instruction. */
10889static void disas_thumb2_insn(DisasContext *s, uint32_t insn)
pbrook9ee6e8b2007-11-11 00:04:49 +000010890{
Peter Maydell296e5a02017-10-09 14:48:36 +010010891 uint32_t imm, shift, offset;
pbrook9ee6e8b2007-11-11 00:04:49 +000010892 uint32_t rd, rn, rm, rs;
Peter Maydell39d54922013-05-23 12:59:55 +010010893 TCGv_i32 tmp;
10894 TCGv_i32 tmp2;
10895 TCGv_i32 tmp3;
10896 TCGv_i32 addr;
pbrooka7812ae2008-11-17 14:43:54 +000010897 TCGv_i64 tmp64;
pbrook9ee6e8b2007-11-11 00:04:49 +000010898 int op;
10899 int shiftop;
10900 int conds;
10901 int logic_cc;
10902
Julia Suvorova14120102018-06-15 14:57:16 +010010903 /*
10904 * ARMv6-M supports a limited subset of Thumb2 instructions.
10905 * Other Thumb1 architectures allow only 32-bit
10906 * combined BL/BLX prefix and suffix.
Peter Maydell296e5a02017-10-09 14:48:36 +010010907 */
Julia Suvorova14120102018-06-15 14:57:16 +010010908 if (arm_dc_feature(s, ARM_FEATURE_M) &&
10909 !arm_dc_feature(s, ARM_FEATURE_V7)) {
10910 int i;
10911 bool found = false;
Julia Suvorova8297cb12018-06-22 13:28:34 +010010912 static const uint32_t armv6m_insn[] = {0xf3808000 /* msr */,
10913 0xf3b08040 /* dsb */,
10914 0xf3b08050 /* dmb */,
10915 0xf3b08060 /* isb */,
10916 0xf3e08000 /* mrs */,
10917 0xf000d000 /* bl */};
10918 static const uint32_t armv6m_mask[] = {0xffe0d000,
10919 0xfff0d0f0,
10920 0xfff0d0f0,
10921 0xfff0d0f0,
10922 0xffe0d000,
10923 0xf800d000};
Julia Suvorova14120102018-06-15 14:57:16 +010010924
10925 for (i = 0; i < ARRAY_SIZE(armv6m_insn); i++) {
10926 if ((insn & armv6m_mask[i]) == armv6m_insn[i]) {
10927 found = true;
10928 break;
10929 }
10930 }
10931 if (!found) {
10932 goto illegal_op;
10933 }
10934 } else if ((insn & 0xf800e800) != 0xf000e800) {
pbrook9ee6e8b2007-11-11 00:04:49 +000010935 ARCH(6T2);
10936 }
10937
10938 rn = (insn >> 16) & 0xf;
10939 rs = (insn >> 12) & 0xf;
10940 rd = (insn >> 8) & 0xf;
10941 rm = insn & 0xf;
10942 switch ((insn >> 25) & 0xf) {
10943 case 0: case 1: case 2: case 3:
10944 /* 16-bit instructions. Should never happen. */
10945 abort();
10946 case 4:
10947 if (insn & (1 << 22)) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010010948 /* 0b1110_100x_x1xx_xxxx_xxxx_xxxx_xxxx_xxxx
10949 * - load/store doubleword, load/store exclusive, ldacq/strel,
Peter Maydell5158de22017-12-13 17:59:24 +000010950 * table branch, TT.
Peter Maydellebfe27c2017-09-04 15:21:51 +010010951 */
Peter Maydell76eff042017-10-09 14:48:39 +010010952 if (insn == 0xe97fe97f && arm_dc_feature(s, ARM_FEATURE_M) &&
10953 arm_dc_feature(s, ARM_FEATURE_V8)) {
10954 /* 0b1110_1001_0111_1111_1110_1001_0111_111
10955 * - SG (v8M only)
10956 * The bulk of the behaviour for this instruction is implemented
10957 * in v7m_handle_execute_nsc(), which deals with the insn when
10958 * it is executed by a CPU in non-secure state from memory
10959 * which is Secure & NonSecure-Callable.
10960 * Here we only need to handle the remaining cases:
10961 * * in NS memory (including the "security extension not
10962 * implemented" case) : NOP
10963 * * in S memory but CPU already secure (clear IT bits)
10964 * We know that the attribute for the memory this insn is
10965 * in must match the current CPU state, because otherwise
10966 * get_phys_addr_pmsav8 would have generated an exception.
10967 */
10968 if (s->v8m_secure) {
10969 /* Like the IT insn, we don't need to generate any code */
10970 s->condexec_cond = 0;
10971 s->condexec_mask = 0;
10972 }
10973 } else if (insn & 0x01200000) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010010974 /* 0b1110_1000_x11x_xxxx_xxxx_xxxx_xxxx_xxxx
10975 * - load/store dual (post-indexed)
10976 * 0b1111_1001_x10x_xxxx_xxxx_xxxx_xxxx_xxxx
10977 * - load/store dual (literal and immediate)
10978 * 0b1111_1001_x11x_xxxx_xxxx_xxxx_xxxx_xxxx
10979 * - load/store dual (pre-indexed)
10980 */
Peter Maydell910d7692018-10-08 14:55:04 +010010981 bool wback = extract32(insn, 21, 1);
10982
pbrook9ee6e8b2007-11-11 00:04:49 +000010983 if (rn == 15) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010010984 if (insn & (1 << 21)) {
10985 /* UNPREDICTABLE */
10986 goto illegal_op;
10987 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000010988 addr = tcg_temp_new_i32();
pbrookb0109802008-03-31 03:47:03 +000010989 tcg_gen_movi_i32(addr, s->pc & ~3);
pbrook9ee6e8b2007-11-11 00:04:49 +000010990 } else {
pbrookb0109802008-03-31 03:47:03 +000010991 addr = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000010992 }
10993 offset = (insn & 0xff) * 4;
Peter Maydell910d7692018-10-08 14:55:04 +010010994 if ((insn & (1 << 23)) == 0) {
pbrook9ee6e8b2007-11-11 00:04:49 +000010995 offset = -offset;
Peter Maydell910d7692018-10-08 14:55:04 +010010996 }
10997
10998 if (s->v8m_stackcheck && rn == 13 && wback) {
10999 /*
11000 * Here 'addr' is the current SP; if offset is +ve we're
11001 * moving SP up, else down. It is UNKNOWN whether the limit
11002 * check triggers when SP starts below the limit and ends
11003 * up above it; check whichever of the current and final
11004 * SP is lower, so QEMU will trigger in that situation.
11005 */
11006 if ((int32_t)offset < 0) {
11007 TCGv_i32 newsp = tcg_temp_new_i32();
11008
11009 tcg_gen_addi_i32(newsp, addr, offset);
11010 gen_helper_v8m_stackcheck(cpu_env, newsp);
11011 tcg_temp_free_i32(newsp);
11012 } else {
11013 gen_helper_v8m_stackcheck(cpu_env, addr);
11014 }
11015 }
11016
pbrook9ee6e8b2007-11-11 00:04:49 +000011017 if (insn & (1 << 24)) {
pbrookb0109802008-03-31 03:47:03 +000011018 tcg_gen_addi_i32(addr, addr, offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000011019 offset = 0;
11020 }
11021 if (insn & (1 << 20)) {
11022 /* ldrd */
Peter Maydelle2592fa2013-05-23 13:00:02 +010011023 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011024 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +000011025 store_reg(s, rs, tmp);
11026 tcg_gen_addi_i32(addr, addr, 4);
Peter Maydelle2592fa2013-05-23 13:00:02 +010011027 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011028 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +000011029 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011030 } else {
11031 /* strd */
pbrookb0109802008-03-31 03:47:03 +000011032 tmp = load_reg(s, rs);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011033 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydelle2592fa2013-05-23 13:00:02 +010011034 tcg_temp_free_i32(tmp);
pbrookb0109802008-03-31 03:47:03 +000011035 tcg_gen_addi_i32(addr, addr, 4);
11036 tmp = load_reg(s, rd);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011037 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydelle2592fa2013-05-23 13:00:02 +010011038 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011039 }
Peter Maydell910d7692018-10-08 14:55:04 +010011040 if (wback) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011041 /* Base writeback. */
pbrookb0109802008-03-31 03:47:03 +000011042 tcg_gen_addi_i32(addr, addr, offset - 4);
11043 store_reg(s, rn, addr);
11044 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011045 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000011046 }
11047 } else if ((insn & (1 << 23)) == 0) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010011048 /* 0b1110_1000_010x_xxxx_xxxx_xxxx_xxxx_xxxx
11049 * - load/store exclusive word
Peter Maydell5158de22017-12-13 17:59:24 +000011050 * - TT (v8M only)
Peter Maydellebfe27c2017-09-04 15:21:51 +010011051 */
11052 if (rs == 15) {
Peter Maydell5158de22017-12-13 17:59:24 +000011053 if (!(insn & (1 << 20)) &&
11054 arm_dc_feature(s, ARM_FEATURE_M) &&
11055 arm_dc_feature(s, ARM_FEATURE_V8)) {
11056 /* 0b1110_1000_0100_xxxx_1111_xxxx_xxxx_xxxx
11057 * - TT (v8M only)
11058 */
11059 bool alt = insn & (1 << 7);
11060 TCGv_i32 addr, op, ttresp;
11061
11062 if ((insn & 0x3f) || rd == 13 || rd == 15 || rn == 15) {
11063 /* we UNDEF for these UNPREDICTABLE cases */
11064 goto illegal_op;
11065 }
11066
11067 if (alt && !s->v8m_secure) {
11068 goto illegal_op;
11069 }
11070
11071 addr = load_reg(s, rn);
11072 op = tcg_const_i32(extract32(insn, 6, 2));
11073 ttresp = tcg_temp_new_i32();
11074 gen_helper_v7m_tt(ttresp, cpu_env, addr, op);
11075 tcg_temp_free_i32(addr);
11076 tcg_temp_free_i32(op);
11077 store_reg(s, rd, ttresp);
Peter Maydell384c6c02018-02-06 10:39:41 +000011078 break;
Peter Maydell5158de22017-12-13 17:59:24 +000011079 }
Peter Maydellebfe27c2017-09-04 15:21:51 +010011080 goto illegal_op;
11081 }
Peter Maydell39d54922013-05-23 12:59:55 +010011082 addr = tcg_temp_local_new_i32();
Aurelien Jarno98a46312009-10-18 15:53:28 +020011083 load_reg_var(s, addr, rn);
Paul Brook426f5ab2009-11-22 21:35:13 +000011084 tcg_gen_addi_i32(addr, addr, (insn & 0xff) << 2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011085 if (insn & (1 << 20)) {
Paul Brook426f5ab2009-11-22 21:35:13 +000011086 gen_load_exclusive(s, rs, 15, addr, 2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011087 } else {
Paul Brook426f5ab2009-11-22 21:35:13 +000011088 gen_store_exclusive(s, rd, rs, 15, addr, 2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011089 }
Peter Maydell39d54922013-05-23 12:59:55 +010011090 tcg_temp_free_i32(addr);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011091 } else if ((insn & (7 << 5)) == 0) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011092 /* Table Branch. */
11093 if (rn == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011094 addr = tcg_temp_new_i32();
pbrookb0109802008-03-31 03:47:03 +000011095 tcg_gen_movi_i32(addr, s->pc);
pbrook9ee6e8b2007-11-11 00:04:49 +000011096 } else {
pbrookb0109802008-03-31 03:47:03 +000011097 addr = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000011098 }
pbrookb26eefb2008-03-31 03:44:26 +000011099 tmp = load_reg(s, rm);
pbrookb0109802008-03-31 03:47:03 +000011100 tcg_gen_add_i32(addr, addr, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011101 if (insn & (1 << 4)) {
11102 /* tbh */
pbrookb0109802008-03-31 03:47:03 +000011103 tcg_gen_add_i32(addr, addr, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011104 tcg_temp_free_i32(tmp);
Peter Maydelle2592fa2013-05-23 13:00:02 +010011105 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011106 gen_aa32_ld16u(s, tmp, addr, get_mem_index(s));
pbrook9ee6e8b2007-11-11 00:04:49 +000011107 } else { /* tbb */
Peter Maydell7d1b0092011-03-06 21:39:54 +000011108 tcg_temp_free_i32(tmp);
Peter Maydelle2592fa2013-05-23 13:00:02 +010011109 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011110 gen_aa32_ld8u(s, tmp, addr, get_mem_index(s));
pbrook9ee6e8b2007-11-11 00:04:49 +000011111 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000011112 tcg_temp_free_i32(addr);
pbrookb0109802008-03-31 03:47:03 +000011113 tcg_gen_shli_i32(tmp, tmp, 1);
11114 tcg_gen_addi_i32(tmp, tmp, s->pc);
11115 store_reg(s, 15, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011116 } else {
Peter Maydell96c55292019-01-07 15:23:48 +000011117 bool is_lasr = false;
11118 bool is_ld = extract32(insn, 20, 1);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011119 int op2 = (insn >> 6) & 0x3;
pbrook9ee6e8b2007-11-11 00:04:49 +000011120 op = (insn >> 4) & 0x3;
Mans Rullgard2359bf82013-07-15 14:35:25 +010011121 switch (op2) {
11122 case 0:
Paul Brook426f5ab2009-11-22 21:35:13 +000011123 goto illegal_op;
Mans Rullgard2359bf82013-07-15 14:35:25 +010011124 case 1:
11125 /* Load/store exclusive byte/halfword/doubleword */
11126 if (op == 2) {
11127 goto illegal_op;
11128 }
11129 ARCH(7);
11130 break;
11131 case 2:
11132 /* Load-acquire/store-release */
11133 if (op == 3) {
11134 goto illegal_op;
11135 }
11136 /* Fall through */
11137 case 3:
11138 /* Load-acquire/store-release exclusive */
11139 ARCH(8);
Peter Maydell96c55292019-01-07 15:23:48 +000011140 is_lasr = true;
Mans Rullgard2359bf82013-07-15 14:35:25 +010011141 break;
Paul Brook426f5ab2009-11-22 21:35:13 +000011142 }
Peter Maydell96c55292019-01-07 15:23:48 +000011143
11144 if (is_lasr && !is_ld) {
11145 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
11146 }
11147
Peter Maydell39d54922013-05-23 12:59:55 +010011148 addr = tcg_temp_local_new_i32();
Aurelien Jarno98a46312009-10-18 15:53:28 +020011149 load_reg_var(s, addr, rn);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011150 if (!(op2 & 1)) {
Peter Maydell96c55292019-01-07 15:23:48 +000011151 if (is_ld) {
Mans Rullgard2359bf82013-07-15 14:35:25 +010011152 tmp = tcg_temp_new_i32();
11153 switch (op) {
11154 case 0: /* ldab */
Peter Maydell9bb65582017-02-07 18:30:00 +000011155 gen_aa32_ld8u_iss(s, tmp, addr, get_mem_index(s),
11156 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011157 break;
11158 case 1: /* ldah */
Peter Maydell9bb65582017-02-07 18:30:00 +000011159 gen_aa32_ld16u_iss(s, tmp, addr, get_mem_index(s),
11160 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011161 break;
11162 case 2: /* lda */
Peter Maydell9bb65582017-02-07 18:30:00 +000011163 gen_aa32_ld32u_iss(s, tmp, addr, get_mem_index(s),
11164 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011165 break;
11166 default:
11167 abort();
11168 }
11169 store_reg(s, rs, tmp);
11170 } else {
11171 tmp = load_reg(s, rs);
11172 switch (op) {
11173 case 0: /* stlb */
Peter Maydell9bb65582017-02-07 18:30:00 +000011174 gen_aa32_st8_iss(s, tmp, addr, get_mem_index(s),
11175 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011176 break;
11177 case 1: /* stlh */
Peter Maydell9bb65582017-02-07 18:30:00 +000011178 gen_aa32_st16_iss(s, tmp, addr, get_mem_index(s),
11179 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011180 break;
11181 case 2: /* stl */
Peter Maydell9bb65582017-02-07 18:30:00 +000011182 gen_aa32_st32_iss(s, tmp, addr, get_mem_index(s),
11183 rs | ISSIsAcqRel);
Mans Rullgard2359bf82013-07-15 14:35:25 +010011184 break;
11185 default:
11186 abort();
11187 }
11188 tcg_temp_free_i32(tmp);
11189 }
Peter Maydell96c55292019-01-07 15:23:48 +000011190 } else if (is_ld) {
Paul Brook426f5ab2009-11-22 21:35:13 +000011191 gen_load_exclusive(s, rs, rd, addr, op);
pbrook9ee6e8b2007-11-11 00:04:49 +000011192 } else {
Paul Brook426f5ab2009-11-22 21:35:13 +000011193 gen_store_exclusive(s, rm, rs, rd, addr, op);
pbrook9ee6e8b2007-11-11 00:04:49 +000011194 }
Peter Maydell39d54922013-05-23 12:59:55 +010011195 tcg_temp_free_i32(addr);
Peter Maydell96c55292019-01-07 15:23:48 +000011196
11197 if (is_lasr && is_ld) {
11198 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
11199 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011200 }
11201 } else {
11202 /* Load/store multiple, RFE, SRS. */
11203 if (((insn >> 23) & 1) == ((insn >> 24) & 1)) {
Peter Maydell00115972013-03-05 00:31:17 +000011204 /* RFE, SRS: not available in user mode or on M profile */
Peter Maydellb53d8922014-10-28 19:24:02 +000011205 if (IS_USER(s) || arm_dc_feature(s, ARM_FEATURE_M)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011206 goto illegal_op;
Peter Maydell00115972013-03-05 00:31:17 +000011207 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011208 if (insn & (1 << 20)) {
11209 /* rfe */
pbrookb0109802008-03-31 03:47:03 +000011210 addr = load_reg(s, rn);
11211 if ((insn & (1 << 24)) == 0)
11212 tcg_gen_addi_i32(addr, addr, -8);
11213 /* Load PC into tmp and CPSR into tmp2. */
Peter Maydelle2592fa2013-05-23 13:00:02 +010011214 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011215 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +000011216 tcg_gen_addi_i32(addr, addr, 4);
Peter Maydelle2592fa2013-05-23 13:00:02 +010011217 tmp2 = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011218 gen_aa32_ld32u(s, tmp2, addr, get_mem_index(s));
pbrook9ee6e8b2007-11-11 00:04:49 +000011219 if (insn & (1 << 21)) {
11220 /* Base writeback. */
pbrookb0109802008-03-31 03:47:03 +000011221 if (insn & (1 << 24)) {
11222 tcg_gen_addi_i32(addr, addr, 4);
11223 } else {
11224 tcg_gen_addi_i32(addr, addr, -4);
11225 }
11226 store_reg(s, rn, addr);
11227 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011228 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000011229 }
pbrookb0109802008-03-31 03:47:03 +000011230 gen_rfe(s, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011231 } else {
11232 /* srs */
Peter Maydell81465882013-03-05 00:31:17 +000011233 gen_srs(s, (insn & 0x1f), (insn & (1 << 24)) ? 1 : 2,
11234 insn & (1 << 21));
pbrook9ee6e8b2007-11-11 00:04:49 +000011235 }
11236 } else {
YuYeon Oh5856d442011-04-25 01:23:58 +000011237 int i, loaded_base = 0;
Peter Maydell39d54922013-05-23 12:59:55 +010011238 TCGv_i32 loaded_var;
Peter Maydell7c0ed882018-10-08 14:55:04 +010011239 bool wback = extract32(insn, 21, 1);
pbrook9ee6e8b2007-11-11 00:04:49 +000011240 /* Load/store multiple. */
pbrookb0109802008-03-31 03:47:03 +000011241 addr = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000011242 offset = 0;
11243 for (i = 0; i < 16; i++) {
11244 if (insn & (1 << i))
11245 offset += 4;
11246 }
Peter Maydell7c0ed882018-10-08 14:55:04 +010011247
pbrook9ee6e8b2007-11-11 00:04:49 +000011248 if (insn & (1 << 24)) {
pbrookb0109802008-03-31 03:47:03 +000011249 tcg_gen_addi_i32(addr, addr, -offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000011250 }
11251
Peter Maydell7c0ed882018-10-08 14:55:04 +010011252 if (s->v8m_stackcheck && rn == 13 && wback) {
11253 /*
11254 * If the writeback is incrementing SP rather than
11255 * decrementing it, and the initial SP is below the
11256 * stack limit but the final written-back SP would
11257 * be above, then then we must not perform any memory
11258 * accesses, but it is IMPDEF whether we generate
11259 * an exception. We choose to do so in this case.
11260 * At this point 'addr' is the lowest address, so
11261 * either the original SP (if incrementing) or our
11262 * final SP (if decrementing), so that's what we check.
11263 */
11264 gen_helper_v8m_stackcheck(cpu_env, addr);
11265 }
11266
Richard Hendersonf7647182017-11-02 12:47:37 +010011267 loaded_var = NULL;
pbrook9ee6e8b2007-11-11 00:04:49 +000011268 for (i = 0; i < 16; i++) {
11269 if ((insn & (1 << i)) == 0)
11270 continue;
11271 if (insn & (1 << 20)) {
11272 /* Load. */
Peter Maydelle2592fa2013-05-23 13:00:02 +010011273 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011274 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrook9ee6e8b2007-11-11 00:04:49 +000011275 if (i == 15) {
Peter Maydell3bb8a962017-04-20 17:32:31 +010011276 gen_bx_excret(s, tmp);
YuYeon Oh5856d442011-04-25 01:23:58 +000011277 } else if (i == rn) {
11278 loaded_var = tmp;
11279 loaded_base = 1;
pbrook9ee6e8b2007-11-11 00:04:49 +000011280 } else {
pbrookb0109802008-03-31 03:47:03 +000011281 store_reg(s, i, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011282 }
11283 } else {
11284 /* Store. */
pbrookb0109802008-03-31 03:47:03 +000011285 tmp = load_reg(s, i);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000011286 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydelle2592fa2013-05-23 13:00:02 +010011287 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011288 }
pbrookb0109802008-03-31 03:47:03 +000011289 tcg_gen_addi_i32(addr, addr, 4);
pbrook9ee6e8b2007-11-11 00:04:49 +000011290 }
YuYeon Oh5856d442011-04-25 01:23:58 +000011291 if (loaded_base) {
11292 store_reg(s, rn, loaded_var);
11293 }
Peter Maydell7c0ed882018-10-08 14:55:04 +010011294 if (wback) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011295 /* Base register writeback. */
11296 if (insn & (1 << 24)) {
pbrookb0109802008-03-31 03:47:03 +000011297 tcg_gen_addi_i32(addr, addr, -offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000011298 }
11299 /* Fault if writeback register is in register list. */
11300 if (insn & (1 << rn))
11301 goto illegal_op;
pbrookb0109802008-03-31 03:47:03 +000011302 store_reg(s, rn, addr);
11303 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011304 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000011305 }
11306 }
11307 }
11308 break;
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011309 case 5:
11310
pbrook9ee6e8b2007-11-11 00:04:49 +000011311 op = (insn >> 21) & 0xf;
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011312 if (op == 6) {
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011313 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11314 goto illegal_op;
11315 }
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011316 /* Halfword pack. */
11317 tmp = load_reg(s, rn);
11318 tmp2 = load_reg(s, rm);
11319 shift = ((insn >> 10) & 0x1c) | ((insn >> 6) & 0x3);
11320 if (insn & (1 << 5)) {
11321 /* pkhtb */
11322 if (shift == 0)
11323 shift = 31;
11324 tcg_gen_sari_i32(tmp2, tmp2, shift);
11325 tcg_gen_andi_i32(tmp, tmp, 0xffff0000);
11326 tcg_gen_ext16u_i32(tmp2, tmp2);
11327 } else {
11328 /* pkhbt */
11329 if (shift)
11330 tcg_gen_shli_i32(tmp2, tmp2, shift);
11331 tcg_gen_ext16u_i32(tmp, tmp);
11332 tcg_gen_andi_i32(tmp2, tmp2, 0xffff0000);
11333 }
11334 tcg_gen_or_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011335 tcg_temp_free_i32(tmp2);
Filip Navara3174f8e2009-10-15 13:14:28 +020011336 store_reg(s, rd, tmp);
11337 } else {
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011338 /* Data processing register constant shift. */
11339 if (rn == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011340 tmp = tcg_temp_new_i32();
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011341 tcg_gen_movi_i32(tmp, 0);
11342 } else {
11343 tmp = load_reg(s, rn);
11344 }
11345 tmp2 = load_reg(s, rm);
11346
11347 shiftop = (insn >> 4) & 3;
11348 shift = ((insn >> 6) & 3) | ((insn >> 10) & 0x1c);
11349 conds = (insn & (1 << 20)) != 0;
11350 logic_cc = (conds && thumb2_logic_op(op));
11351 gen_arm_shift_im(tmp2, shiftop, shift, logic_cc);
11352 if (gen_thumb2_data_op(s, op, conds, 0, tmp, tmp2))
11353 goto illegal_op;
Peter Maydell7d1b0092011-03-06 21:39:54 +000011354 tcg_temp_free_i32(tmp2);
Peter Maydell55203182018-10-08 14:55:04 +010011355 if (rd == 13 &&
11356 ((op == 2 && rn == 15) ||
11357 (op == 8 && rn == 13) ||
11358 (op == 13 && rn == 13))) {
11359 /* MOV SP, ... or ADD SP, SP, ... or SUB SP, SP, ... */
11360 store_sp_checked(s, tmp);
11361 } else if (rd != 15) {
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011362 store_reg(s, rd, tmp);
11363 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000011364 tcg_temp_free_i32(tmp);
Johan Bengtsson2af9ab72010-12-07 12:01:44 +000011365 }
Filip Navara3174f8e2009-10-15 13:14:28 +020011366 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011367 break;
11368 case 13: /* Misc data processing. */
11369 op = ((insn >> 22) & 6) | ((insn >> 7) & 1);
11370 if (op < 4 && (insn & 0xf000) != 0xf000)
11371 goto illegal_op;
11372 switch (op) {
11373 case 0: /* Register controlled shift. */
pbrook8984bd22008-03-31 03:47:48 +000011374 tmp = load_reg(s, rn);
11375 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011376 if ((insn & 0x70) != 0)
11377 goto illegal_op;
Peter Maydella2d12f02018-10-08 14:55:04 +010011378 /*
11379 * 0b1111_1010_0xxx_xxxx_1111_xxxx_0000_xxxx:
11380 * - MOV, MOVS (register-shifted register), flagsetting
11381 */
pbrook9ee6e8b2007-11-11 00:04:49 +000011382 op = (insn >> 21) & 3;
pbrook8984bd22008-03-31 03:47:48 +000011383 logic_cc = (insn & (1 << 20)) != 0;
11384 gen_arm_shift_reg(tmp, op, tmp2, logic_cc);
11385 if (logic_cc)
11386 gen_logic_CC(tmp);
Peter Maydellbedb8a62017-04-20 17:32:30 +010011387 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011388 break;
11389 case 1: /* Sign/zero extend. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011390 op = (insn >> 20) & 7;
11391 switch (op) {
11392 case 0: /* SXTAH, SXTH */
11393 case 1: /* UXTAH, UXTH */
11394 case 4: /* SXTAB, SXTB */
11395 case 5: /* UXTAB, UXTB */
11396 break;
11397 case 2: /* SXTAB16, SXTB16 */
11398 case 3: /* UXTAB16, UXTB16 */
11399 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11400 goto illegal_op;
11401 }
11402 break;
11403 default:
11404 goto illegal_op;
11405 }
11406 if (rn != 15) {
11407 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11408 goto illegal_op;
11409 }
11410 }
pbrook5e3f8782008-03-31 03:47:34 +000011411 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011412 shift = (insn >> 4) & 3;
Stefan Weil1301f322011-04-28 17:20:37 +020011413 /* ??? In many cases it's not necessary to do a
pbrook9ee6e8b2007-11-11 00:04:49 +000011414 rotate, a shift is sufficient. */
11415 if (shift != 0)
Aurelien Jarnof669df22009-10-15 16:45:14 +020011416 tcg_gen_rotri_i32(tmp, tmp, shift * 8);
pbrook9ee6e8b2007-11-11 00:04:49 +000011417 op = (insn >> 20) & 7;
11418 switch (op) {
pbrook5e3f8782008-03-31 03:47:34 +000011419 case 0: gen_sxth(tmp); break;
11420 case 1: gen_uxth(tmp); break;
11421 case 2: gen_sxtb16(tmp); break;
11422 case 3: gen_uxtb16(tmp); break;
11423 case 4: gen_sxtb(tmp); break;
11424 case 5: gen_uxtb(tmp); break;
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011425 default:
11426 g_assert_not_reached();
pbrook9ee6e8b2007-11-11 00:04:49 +000011427 }
11428 if (rn != 15) {
pbrook5e3f8782008-03-31 03:47:34 +000011429 tmp2 = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000011430 if ((op >> 1) == 1) {
pbrook5e3f8782008-03-31 03:47:34 +000011431 gen_add16(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011432 } else {
pbrook5e3f8782008-03-31 03:47:34 +000011433 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011434 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011435 }
11436 }
pbrook5e3f8782008-03-31 03:47:34 +000011437 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011438 break;
11439 case 2: /* SIMD add/subtract. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011440 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11441 goto illegal_op;
11442 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011443 op = (insn >> 20) & 7;
11444 shift = (insn >> 4) & 7;
11445 if ((op & 3) == 3 || (shift & 3) == 3)
11446 goto illegal_op;
pbrook6ddbc6e2008-03-31 03:46:33 +000011447 tmp = load_reg(s, rn);
11448 tmp2 = load_reg(s, rm);
11449 gen_thumb2_parallel_addsub(op, shift, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011450 tcg_temp_free_i32(tmp2);
pbrook6ddbc6e2008-03-31 03:46:33 +000011451 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011452 break;
11453 case 3: /* Other data processing. */
11454 op = ((insn >> 17) & 0x38) | ((insn >> 4) & 7);
11455 if (op < 4) {
11456 /* Saturating add/subtract. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011457 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11458 goto illegal_op;
11459 }
pbrookd9ba4832008-03-31 03:46:50 +000011460 tmp = load_reg(s, rn);
11461 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011462 if (op & 1)
Blue Swirl9ef39272012-09-04 20:19:15 +000011463 gen_helper_double_saturate(tmp, cpu_env, tmp);
Johan Bengtsson4809c612010-12-07 12:01:44 +000011464 if (op & 2)
Blue Swirl9ef39272012-09-04 20:19:15 +000011465 gen_helper_sub_saturate(tmp, cpu_env, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011466 else
Blue Swirl9ef39272012-09-04 20:19:15 +000011467 gen_helper_add_saturate(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011468 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011469 } else {
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011470 switch (op) {
11471 case 0x0a: /* rbit */
11472 case 0x08: /* rev */
11473 case 0x09: /* rev16 */
11474 case 0x0b: /* revsh */
11475 case 0x18: /* clz */
11476 break;
11477 case 0x10: /* sel */
11478 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11479 goto illegal_op;
11480 }
11481 break;
11482 case 0x20: /* crc32/crc32c */
11483 case 0x21:
11484 case 0x22:
11485 case 0x28:
11486 case 0x29:
11487 case 0x2a:
Richard Henderson962fcbf2018-10-24 07:50:16 +010011488 if (!dc_isar_feature(aa32_crc32, s)) {
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011489 goto illegal_op;
11490 }
11491 break;
11492 default:
11493 goto illegal_op;
11494 }
pbrookd9ba4832008-03-31 03:46:50 +000011495 tmp = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000011496 switch (op) {
11497 case 0x0a: /* rbit */
pbrookd9ba4832008-03-31 03:46:50 +000011498 gen_helper_rbit(tmp, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011499 break;
11500 case 0x08: /* rev */
aurel3266896cb2009-03-13 09:34:48 +000011501 tcg_gen_bswap32_i32(tmp, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011502 break;
11503 case 0x09: /* rev16 */
pbrookd9ba4832008-03-31 03:46:50 +000011504 gen_rev16(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011505 break;
11506 case 0x0b: /* revsh */
pbrookd9ba4832008-03-31 03:46:50 +000011507 gen_revsh(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011508 break;
11509 case 0x10: /* sel */
pbrookd9ba4832008-03-31 03:46:50 +000011510 tmp2 = load_reg(s, rm);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011511 tmp3 = tcg_temp_new_i32();
Andreas Färber0ecb72a2012-03-14 01:38:21 +010011512 tcg_gen_ld_i32(tmp3, cpu_env, offsetof(CPUARMState, GE));
pbrookd9ba4832008-03-31 03:46:50 +000011513 gen_helper_sel_flags(tmp, tmp3, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011514 tcg_temp_free_i32(tmp3);
11515 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011516 break;
11517 case 0x18: /* clz */
Richard Henderson7539a012016-11-16 11:49:06 +010011518 tcg_gen_clzi_i32(tmp, tmp, 32);
pbrook9ee6e8b2007-11-11 00:04:49 +000011519 break;
Will Newtoneb0ecd52014-02-26 17:20:07 +000011520 case 0x20:
11521 case 0x21:
11522 case 0x22:
11523 case 0x28:
11524 case 0x29:
11525 case 0x2a:
11526 {
11527 /* crc32/crc32c */
11528 uint32_t sz = op & 0x3;
11529 uint32_t c = op & 0x8;
11530
Will Newtoneb0ecd52014-02-26 17:20:07 +000011531 tmp2 = load_reg(s, rm);
Peter Maydellaa633462014-06-09 15:43:25 +010011532 if (sz == 0) {
11533 tcg_gen_andi_i32(tmp2, tmp2, 0xff);
11534 } else if (sz == 1) {
11535 tcg_gen_andi_i32(tmp2, tmp2, 0xffff);
11536 }
Will Newtoneb0ecd52014-02-26 17:20:07 +000011537 tmp3 = tcg_const_i32(1 << sz);
11538 if (c) {
11539 gen_helper_crc32c(tmp, tmp, tmp2, tmp3);
11540 } else {
11541 gen_helper_crc32(tmp, tmp, tmp2, tmp3);
11542 }
11543 tcg_temp_free_i32(tmp2);
11544 tcg_temp_free_i32(tmp3);
11545 break;
11546 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011547 default:
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011548 g_assert_not_reached();
pbrook9ee6e8b2007-11-11 00:04:49 +000011549 }
11550 }
pbrookd9ba4832008-03-31 03:46:50 +000011551 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011552 break;
11553 case 4: case 5: /* 32-bit multiply. Sum of absolute differences. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011554 switch ((insn >> 20) & 7) {
11555 case 0: /* 32 x 32 -> 32 */
11556 case 7: /* Unsigned sum of absolute differences. */
11557 break;
11558 case 1: /* 16 x 16 -> 32 */
11559 case 2: /* Dual multiply add. */
11560 case 3: /* 32 * 16 -> 32msb */
11561 case 4: /* Dual multiply subtract. */
11562 case 5: case 6: /* 32 * 32 -> 32msb (SMMUL, SMMLA, SMMLS) */
11563 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11564 goto illegal_op;
11565 }
11566 break;
11567 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011568 op = (insn >> 4) & 0xf;
pbrookd9ba4832008-03-31 03:46:50 +000011569 tmp = load_reg(s, rn);
11570 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011571 switch ((insn >> 20) & 7) {
11572 case 0: /* 32 x 32 -> 32 */
pbrookd9ba4832008-03-31 03:46:50 +000011573 tcg_gen_mul_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011574 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011575 if (rs != 15) {
pbrookd9ba4832008-03-31 03:46:50 +000011576 tmp2 = load_reg(s, rs);
pbrook9ee6e8b2007-11-11 00:04:49 +000011577 if (op)
pbrookd9ba4832008-03-31 03:46:50 +000011578 tcg_gen_sub_i32(tmp, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011579 else
pbrookd9ba4832008-03-31 03:46:50 +000011580 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011581 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011582 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011583 break;
11584 case 1: /* 16 x 16 -> 32 */
pbrookd9ba4832008-03-31 03:46:50 +000011585 gen_mulxy(tmp, tmp2, op & 2, op & 1);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011586 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011587 if (rs != 15) {
pbrookd9ba4832008-03-31 03:46:50 +000011588 tmp2 = load_reg(s, rs);
Blue Swirl9ef39272012-09-04 20:19:15 +000011589 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011590 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011591 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011592 break;
11593 case 2: /* Dual multiply add. */
11594 case 4: /* Dual multiply subtract. */
11595 if (op)
pbrookd9ba4832008-03-31 03:46:50 +000011596 gen_swap_half(tmp2);
11597 gen_smul_dual(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011598 if (insn & (1 << 22)) {
Peter Maydelle1d177b2011-03-11 10:09:58 +000011599 /* This subtraction cannot overflow. */
pbrookd9ba4832008-03-31 03:46:50 +000011600 tcg_gen_sub_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011601 } else {
Peter Maydelle1d177b2011-03-11 10:09:58 +000011602 /* This addition cannot overflow 32 bits;
11603 * however it may overflow considered as a signed
11604 * operation, in which case we must set the Q flag.
11605 */
Blue Swirl9ef39272012-09-04 20:19:15 +000011606 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011607 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000011608 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011609 if (rs != 15)
11610 {
pbrookd9ba4832008-03-31 03:46:50 +000011611 tmp2 = load_reg(s, rs);
Blue Swirl9ef39272012-09-04 20:19:15 +000011612 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011613 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011614 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011615 break;
11616 case 3: /* 32 * 16 -> 32msb */
11617 if (op)
pbrookd9ba4832008-03-31 03:46:50 +000011618 tcg_gen_sari_i32(tmp2, tmp2, 16);
pbrook9ee6e8b2007-11-11 00:04:49 +000011619 else
pbrookd9ba4832008-03-31 03:46:50 +000011620 gen_sxth(tmp2);
pbrooka7812ae2008-11-17 14:43:54 +000011621 tmp64 = gen_muls_i64_i32(tmp, tmp2);
11622 tcg_gen_shri_i64(tmp64, tmp64, 16);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011623 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -070011624 tcg_gen_extrl_i64_i32(tmp, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030011625 tcg_temp_free_i64(tmp64);
pbrook9ee6e8b2007-11-11 00:04:49 +000011626 if (rs != 15)
11627 {
pbrookd9ba4832008-03-31 03:46:50 +000011628 tmp2 = load_reg(s, rs);
Blue Swirl9ef39272012-09-04 20:19:15 +000011629 gen_helper_add_setq(tmp, cpu_env, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011630 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011631 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011632 break;
Aurelien Jarno838fa722011-01-06 19:53:56 +010011633 case 5: case 6: /* 32 * 32 -> 32msb (SMMUL, SMMLA, SMMLS) */
11634 tmp64 = gen_muls_i64_i32(tmp, tmp2);
pbrookd9ba4832008-03-31 03:46:50 +000011635 if (rs != 15) {
Aurelien Jarno838fa722011-01-06 19:53:56 +010011636 tmp = load_reg(s, rs);
11637 if (insn & (1 << 20)) {
11638 tmp64 = gen_addq_msw(tmp64, tmp);
pbrookd9ba4832008-03-31 03:46:50 +000011639 } else {
Aurelien Jarno838fa722011-01-06 19:53:56 +010011640 tmp64 = gen_subq_msw(tmp64, tmp);
pbrookd9ba4832008-03-31 03:46:50 +000011641 }
pbrookd9ba4832008-03-31 03:46:50 +000011642 }
Aurelien Jarno838fa722011-01-06 19:53:56 +010011643 if (insn & (1 << 4)) {
11644 tcg_gen_addi_i64(tmp64, tmp64, 0x80000000u);
11645 }
11646 tcg_gen_shri_i64(tmp64, tmp64, 32);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011647 tmp = tcg_temp_new_i32();
Richard Hendersonecc7b3a2015-07-24 11:49:53 -070011648 tcg_gen_extrl_i64_i32(tmp, tmp64);
Aurelien Jarno838fa722011-01-06 19:53:56 +010011649 tcg_temp_free_i64(tmp64);
pbrook9ee6e8b2007-11-11 00:04:49 +000011650 break;
11651 case 7: /* Unsigned sum of absolute differences. */
pbrookd9ba4832008-03-31 03:46:50 +000011652 gen_helper_usad8(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011653 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011654 if (rs != 15) {
pbrookd9ba4832008-03-31 03:46:50 +000011655 tmp2 = load_reg(s, rs);
11656 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011657 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011658 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011659 break;
11660 }
pbrookd9ba4832008-03-31 03:46:50 +000011661 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011662 break;
11663 case 6: case 7: /* 64-bit multiply, Divide. */
11664 op = ((insn >> 4) & 0xf) | ((insn >> 16) & 0x70);
pbrook5e3f8782008-03-31 03:47:34 +000011665 tmp = load_reg(s, rn);
11666 tmp2 = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011667 if ((op & 0x50) == 0x10) {
11668 /* sdiv, udiv */
Richard Henderson7e0cf8b2018-10-24 07:50:16 +010011669 if (!dc_isar_feature(thumb_div, s)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011670 goto illegal_op;
Peter Maydell47789992011-10-19 16:14:06 +000011671 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011672 if (op & 0x20)
pbrook5e3f8782008-03-31 03:47:34 +000011673 gen_helper_udiv(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011674 else
pbrook5e3f8782008-03-31 03:47:34 +000011675 gen_helper_sdiv(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011676 tcg_temp_free_i32(tmp2);
pbrook5e3f8782008-03-31 03:47:34 +000011677 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011678 } else if ((op & 0xe) == 0xc) {
11679 /* Dual multiply accumulate long. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011680 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11681 tcg_temp_free_i32(tmp);
11682 tcg_temp_free_i32(tmp2);
11683 goto illegal_op;
11684 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011685 if (op & 1)
pbrook5e3f8782008-03-31 03:47:34 +000011686 gen_swap_half(tmp2);
11687 gen_smul_dual(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011688 if (op & 0x10) {
pbrook5e3f8782008-03-31 03:47:34 +000011689 tcg_gen_sub_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011690 } else {
pbrook5e3f8782008-03-31 03:47:34 +000011691 tcg_gen_add_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011692 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000011693 tcg_temp_free_i32(tmp2);
pbrooka7812ae2008-11-17 14:43:54 +000011694 /* BUGFIX */
11695 tmp64 = tcg_temp_new_i64();
11696 tcg_gen_ext_i32_i64(tmp64, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011697 tcg_temp_free_i32(tmp);
pbrooka7812ae2008-11-17 14:43:54 +000011698 gen_addq(s, tmp64, rs, rd);
11699 gen_storeq_reg(s, rs, rd, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030011700 tcg_temp_free_i64(tmp64);
pbrook9ee6e8b2007-11-11 00:04:49 +000011701 } else {
11702 if (op & 0x20) {
11703 /* Unsigned 64-bit multiply */
pbrooka7812ae2008-11-17 14:43:54 +000011704 tmp64 = gen_mulu_i64_i32(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011705 } else {
11706 if (op & 8) {
11707 /* smlalxy */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011708 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11709 tcg_temp_free_i32(tmp2);
11710 tcg_temp_free_i32(tmp);
11711 goto illegal_op;
11712 }
pbrook5e3f8782008-03-31 03:47:34 +000011713 gen_mulxy(tmp, tmp2, op & 2, op & 1);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011714 tcg_temp_free_i32(tmp2);
pbrooka7812ae2008-11-17 14:43:54 +000011715 tmp64 = tcg_temp_new_i64();
11716 tcg_gen_ext_i32_i64(tmp64, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011717 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011718 } else {
11719 /* Signed 64-bit multiply */
pbrooka7812ae2008-11-17 14:43:54 +000011720 tmp64 = gen_muls_i64_i32(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000011721 }
11722 }
11723 if (op & 4) {
11724 /* umaal */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010011725 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
11726 tcg_temp_free_i64(tmp64);
11727 goto illegal_op;
11728 }
pbrooka7812ae2008-11-17 14:43:54 +000011729 gen_addq_lo(s, tmp64, rs);
11730 gen_addq_lo(s, tmp64, rd);
pbrook9ee6e8b2007-11-11 00:04:49 +000011731 } else if (op & 0x40) {
11732 /* 64-bit accumulate. */
pbrooka7812ae2008-11-17 14:43:54 +000011733 gen_addq(s, tmp64, rs, rd);
pbrook9ee6e8b2007-11-11 00:04:49 +000011734 }
pbrooka7812ae2008-11-17 14:43:54 +000011735 gen_storeq_reg(s, rs, rd, tmp64);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030011736 tcg_temp_free_i64(tmp64);
pbrook9ee6e8b2007-11-11 00:04:49 +000011737 }
11738 break;
11739 }
11740 break;
11741 case 6: case 7: case 14: case 15:
11742 /* Coprocessor. */
Peter Maydell75177482017-01-27 15:20:24 +000011743 if (arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydell8859ba32019-04-29 17:35:59 +010011744 /* 0b111x_11xx_xxxx_xxxx_xxxx_xxxx_xxxx_xxxx */
11745 if (extract32(insn, 24, 2) == 3) {
11746 goto illegal_op; /* op0 = 0b11 : unallocated */
11747 }
11748
11749 /*
11750 * Decode VLLDM and VLSTM first: these are nonstandard because:
11751 * * if there is no FPU then these insns must NOP in
11752 * Secure state and UNDEF in Nonsecure state
11753 * * if there is an FPU then these insns do not have
11754 * the usual behaviour that disas_vfp_insn() provides of
11755 * being controlled by CPACR/NSACR enable bits or the
11756 * lazy-stacking logic.
Peter Maydell75177482017-01-27 15:20:24 +000011757 */
Peter Maydellb1e53362018-05-04 18:05:51 +010011758 if (arm_dc_feature(s, ARM_FEATURE_V8) &&
11759 (insn & 0xffa00f00) == 0xec200a00) {
11760 /* 0b1110_1100_0x1x_xxxx_xxxx_1010_xxxx_xxxx
11761 * - VLLDM, VLSTM
11762 * We choose to UNDEF if the RAZ bits are non-zero.
11763 */
11764 if (!s->v8m_secure || (insn & 0x0040f0ff)) {
11765 goto illegal_op;
11766 }
Peter Maydell019076b2019-04-29 17:36:03 +010011767
11768 if (arm_dc_feature(s, ARM_FEATURE_VFP)) {
11769 TCGv_i32 fptr = load_reg(s, rn);
11770
11771 if (extract32(insn, 20, 1)) {
Peter Maydell956fe142019-04-29 17:36:03 +010011772 gen_helper_v7m_vlldm(cpu_env, fptr);
Peter Maydell019076b2019-04-29 17:36:03 +010011773 } else {
11774 gen_helper_v7m_vlstm(cpu_env, fptr);
11775 }
11776 tcg_temp_free_i32(fptr);
11777
11778 /* End the TB, because we have updated FP control bits */
11779 s->base.is_jmp = DISAS_UPDATE;
11780 }
Peter Maydellb1e53362018-05-04 18:05:51 +010011781 break;
11782 }
Peter Maydell8859ba32019-04-29 17:35:59 +010011783 if (arm_dc_feature(s, ARM_FEATURE_VFP) &&
11784 ((insn >> 8) & 0xe) == 10) {
11785 /* FP, and the CPU supports it */
11786 if (disas_vfp_insn(s, insn)) {
11787 goto illegal_op;
11788 }
11789 break;
11790 }
11791
Peter Maydellb1e53362018-05-04 18:05:51 +010011792 /* All other insns: NOCP */
Peter Maydell75177482017-01-27 15:20:24 +000011793 gen_exception_insn(s, 4, EXCP_NOCP, syn_uncategorized(),
11794 default_exception_el(s));
11795 break;
11796 }
Richard Henderson00520872018-03-02 10:45:45 +000011797 if ((insn & 0xfe000a00) == 0xfc000800
11798 && arm_dc_feature(s, ARM_FEATURE_V8)) {
11799 /* The Thumb2 and ARM encodings are identical. */
11800 if (disas_neon_insn_3same_ext(s, insn)) {
11801 goto illegal_op;
11802 }
11803 } else if ((insn & 0xff000a00) == 0xfe000800
11804 && arm_dc_feature(s, ARM_FEATURE_V8)) {
11805 /* The Thumb2 and ARM encodings are identical. */
11806 if (disas_neon_insn_2reg_scalar_ext(s, insn)) {
11807 goto illegal_op;
11808 }
11809 } else if (((insn >> 24) & 3) == 3) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011810 /* Translate into the equivalent ARM encoding. */
Juha Riihimäkif06053e2011-02-11 13:35:25 +000011811 insn = (insn & 0xe2ffffff) | ((insn & (1 << 28)) >> 4) | (1 << 28);
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011812 if (disas_neon_data_insn(s, insn)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011813 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011814 }
Will Newton6a57f3e2013-12-06 17:01:40 +000011815 } else if (((insn >> 8) & 0xe) == 10) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011816 if (disas_vfp_insn(s, insn)) {
Will Newton6a57f3e2013-12-06 17:01:40 +000011817 goto illegal_op;
11818 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011819 } else {
11820 if (insn & (1 << 28))
11821 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011822 if (disas_coproc_insn(s, insn)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011823 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011824 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011825 }
11826 break;
11827 case 8: case 9: case 10: case 11:
11828 if (insn & (1 << 15)) {
11829 /* Branches, misc control. */
11830 if (insn & 0x5000) {
11831 /* Unconditional branch. */
11832 /* signextend(hw1[10:0]) -> offset[:12]. */
11833 offset = ((int32_t)insn << 5) >> 9 & ~(int32_t)0xfff;
11834 /* hw1[10:0] -> offset[11:1]. */
11835 offset |= (insn & 0x7ff) << 1;
11836 /* (~hw2[13, 11] ^ offset[24]) -> offset[23,22]
11837 offset[24:22] already have the same value because of the
11838 sign extension above. */
11839 offset ^= ((~insn) & (1 << 13)) << 10;
11840 offset ^= ((~insn) & (1 << 11)) << 11;
11841
pbrook9ee6e8b2007-11-11 00:04:49 +000011842 if (insn & (1 << 14)) {
11843 /* Branch and link. */
Filip Navara3174f8e2009-10-15 13:14:28 +020011844 tcg_gen_movi_i32(cpu_R[14], s->pc | 1);
pbrook9ee6e8b2007-11-11 00:04:49 +000011845 }
11846
pbrookb0109802008-03-31 03:47:03 +000011847 offset += s->pc;
pbrook9ee6e8b2007-11-11 00:04:49 +000011848 if (insn & (1 << 12)) {
11849 /* b/bl */
pbrookb0109802008-03-31 03:47:03 +000011850 gen_jmp(s, offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000011851 } else {
11852 /* blx */
pbrookb0109802008-03-31 03:47:03 +000011853 offset &= ~(uint32_t)2;
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040011854 /* thumb2 bx, no need to check */
pbrookb0109802008-03-31 03:47:03 +000011855 gen_bx_im(s, offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000011856 }
11857 } else if (((insn >> 23) & 7) == 7) {
11858 /* Misc control */
11859 if (insn & (1 << 13))
11860 goto illegal_op;
11861
11862 if (insn & (1 << 26)) {
Peter Maydell001b3ca2017-03-20 12:41:44 +000011863 if (arm_dc_feature(s, ARM_FEATURE_M)) {
11864 goto illegal_op;
11865 }
Peter Maydell37e64562014-10-24 12:19:13 +010011866 if (!(insn & (1 << 20))) {
11867 /* Hypervisor call (v7) */
11868 int imm16 = extract32(insn, 16, 4) << 12
11869 | extract32(insn, 0, 12);
11870 ARCH(7);
11871 if (IS_USER(s)) {
11872 goto illegal_op;
11873 }
11874 gen_hvc(s, imm16);
11875 } else {
11876 /* Secure monitor call (v6+) */
11877 ARCH(6K);
11878 if (IS_USER(s)) {
11879 goto illegal_op;
11880 }
11881 gen_smc(s);
11882 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011883 } else {
11884 op = (insn >> 20) & 7;
11885 switch (op) {
11886 case 0: /* msr cpsr. */
Peter Maydellb53d8922014-10-28 19:24:02 +000011887 if (arm_dc_feature(s, ARM_FEATURE_M)) {
pbrook8984bd22008-03-31 03:47:48 +000011888 tmp = load_reg(s, rn);
Peter Maydellb28b3372017-03-20 12:41:44 +000011889 /* the constant is the mask and SYSm fields */
11890 addr = tcg_const_i32(insn & 0xfff);
pbrook8984bd22008-03-31 03:47:48 +000011891 gen_helper_v7m_msr(cpu_env, addr, tmp);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030011892 tcg_temp_free_i32(addr);
Peter Maydell7d1b0092011-03-06 21:39:54 +000011893 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011894 gen_lookup_tb(s);
11895 break;
11896 }
11897 /* fall through */
11898 case 1: /* msr spsr. */
Peter Maydellb53d8922014-10-28 19:24:02 +000011899 if (arm_dc_feature(s, ARM_FEATURE_M)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000011900 goto illegal_op;
Peter Maydellb53d8922014-10-28 19:24:02 +000011901 }
Peter Maydell8bfd0552016-03-16 17:05:58 +000011902
11903 if (extract32(insn, 5, 1)) {
11904 /* MSR (banked) */
11905 int sysm = extract32(insn, 8, 4) |
11906 (extract32(insn, 4, 1) << 4);
11907 int r = op & 1;
11908
11909 gen_msr_banked(s, r, sysm, rm);
11910 break;
11911 }
11912
11913 /* MSR (for PSRs) */
Filip Navara2fbac542009-10-15 12:43:04 +020011914 tmp = load_reg(s, rn);
11915 if (gen_set_psr(s,
Peter Maydell7dcc1f82014-10-28 19:24:03 +000011916 msr_mask(s, (insn >> 8) & 0xf, op == 1),
Filip Navara2fbac542009-10-15 12:43:04 +020011917 op == 1, tmp))
pbrook9ee6e8b2007-11-11 00:04:49 +000011918 goto illegal_op;
11919 break;
11920 case 2: /* cps, nop-hint. */
11921 if (((insn >> 8) & 7) == 0) {
11922 gen_nop_hint(s, insn & 0xff);
11923 }
11924 /* Implemented as NOP in user mode. */
11925 if (IS_USER(s))
11926 break;
11927 offset = 0;
11928 imm = 0;
11929 if (insn & (1 << 10)) {
11930 if (insn & (1 << 7))
11931 offset |= CPSR_A;
11932 if (insn & (1 << 6))
11933 offset |= CPSR_I;
11934 if (insn & (1 << 5))
11935 offset |= CPSR_F;
11936 if (insn & (1 << 9))
11937 imm = CPSR_A | CPSR_I | CPSR_F;
11938 }
11939 if (insn & (1 << 8)) {
11940 offset |= 0x1f;
11941 imm |= (insn & 0x1f);
11942 }
11943 if (offset) {
Filip Navara2fbac542009-10-15 12:43:04 +020011944 gen_set_psr_im(s, offset, 0, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000011945 }
11946 break;
11947 case 3: /* Special control operations. */
Julia Suvorova14120102018-06-15 14:57:16 +010011948 if (!arm_dc_feature(s, ARM_FEATURE_V7) &&
Julia Suvorova8297cb12018-06-22 13:28:34 +010011949 !arm_dc_feature(s, ARM_FEATURE_M)) {
Julia Suvorova14120102018-06-15 14:57:16 +010011950 goto illegal_op;
11951 }
pbrook9ee6e8b2007-11-11 00:04:49 +000011952 op = (insn >> 4) & 0xf;
11953 switch (op) {
11954 case 2: /* clrex */
Paul Brook426f5ab2009-11-22 21:35:13 +000011955 gen_clrex(s);
pbrook9ee6e8b2007-11-11 00:04:49 +000011956 break;
11957 case 4: /* dsb */
11958 case 5: /* dmb */
Pranith Kumar61e4c432016-07-14 16:20:23 -040011959 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
pbrook9ee6e8b2007-11-11 00:04:49 +000011960 break;
Sergey Sorokin6df99de2015-10-16 11:14:52 +010011961 case 6: /* isb */
11962 /* We need to break the TB after this insn
11963 * to execute self-modifying code correctly
11964 * and also to take any pending interrupts
11965 * immediately.
11966 */
Alex Bennée0b609cc2017-07-17 13:36:07 +010011967 gen_goto_tb(s, 0, s->pc & ~1);
Sergey Sorokin6df99de2015-10-16 11:14:52 +010011968 break;
Richard Henderson9888bd12019-03-01 12:04:53 -080011969 case 7: /* sb */
11970 if ((insn & 0xf) || !dc_isar_feature(aa32_sb, s)) {
11971 goto illegal_op;
11972 }
11973 /*
11974 * TODO: There is no speculation barrier opcode
11975 * for TCG; MB and end the TB instead.
11976 */
11977 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
11978 gen_goto_tb(s, 0, s->pc & ~1);
11979 break;
pbrook9ee6e8b2007-11-11 00:04:49 +000011980 default:
11981 goto illegal_op;
11982 }
11983 break;
11984 case 4: /* bxj */
Peter Maydell9d7c59c2017-04-20 17:32:30 +010011985 /* Trivial implementation equivalent to bx.
11986 * This instruction doesn't exist at all for M-profile.
11987 */
11988 if (arm_dc_feature(s, ARM_FEATURE_M)) {
11989 goto illegal_op;
11990 }
pbrookd9ba4832008-03-31 03:46:50 +000011991 tmp = load_reg(s, rn);
11992 gen_bx(s, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000011993 break;
11994 case 5: /* Exception return. */
Rabin Vincentb8b45b62010-02-15 00:02:35 +053011995 if (IS_USER(s)) {
11996 goto illegal_op;
11997 }
11998 if (rn != 14 || rd != 15) {
11999 goto illegal_op;
12000 }
Peter Maydell55c544e2018-08-20 11:24:32 +010012001 if (s->current_el == 2) {
12002 /* ERET from Hyp uses ELR_Hyp, not LR */
12003 if (insn & 0xff) {
12004 goto illegal_op;
12005 }
12006 tmp = load_cpu_field(elr_el[2]);
12007 } else {
12008 tmp = load_reg(s, rn);
12009 tcg_gen_subi_i32(tmp, tmp, insn & 0xff);
12010 }
Rabin Vincentb8b45b62010-02-15 00:02:35 +053012011 gen_exception_return(s, tmp);
12012 break;
Peter Maydell8bfd0552016-03-16 17:05:58 +000012013 case 6: /* MRS */
Peter Maydell43ac6572017-03-20 12:41:44 +000012014 if (extract32(insn, 5, 1) &&
12015 !arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydell8bfd0552016-03-16 17:05:58 +000012016 /* MRS (banked) */
12017 int sysm = extract32(insn, 16, 4) |
12018 (extract32(insn, 4, 1) << 4);
12019
12020 gen_mrs_banked(s, 0, sysm, rd);
12021 break;
12022 }
12023
Peter Maydell3d540262017-03-20 12:41:44 +000012024 if (extract32(insn, 16, 4) != 0xf) {
12025 goto illegal_op;
12026 }
12027 if (!arm_dc_feature(s, ARM_FEATURE_M) &&
12028 extract32(insn, 0, 8) != 0) {
12029 goto illegal_op;
12030 }
12031
Peter Maydell8bfd0552016-03-16 17:05:58 +000012032 /* mrs cpsr */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012033 tmp = tcg_temp_new_i32();
Peter Maydellb53d8922014-10-28 19:24:02 +000012034 if (arm_dc_feature(s, ARM_FEATURE_M)) {
pbrook8984bd22008-03-31 03:47:48 +000012035 addr = tcg_const_i32(insn & 0xff);
12036 gen_helper_v7m_mrs(tmp, cpu_env, addr);
Juha Riihimäkib75263d2009-10-22 15:17:36 +030012037 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000012038 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012039 gen_helper_cpsr_read(tmp, cpu_env);
pbrook9ee6e8b2007-11-11 00:04:49 +000012040 }
pbrook8984bd22008-03-31 03:47:48 +000012041 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012042 break;
Peter Maydell8bfd0552016-03-16 17:05:58 +000012043 case 7: /* MRS */
Peter Maydell43ac6572017-03-20 12:41:44 +000012044 if (extract32(insn, 5, 1) &&
12045 !arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydell8bfd0552016-03-16 17:05:58 +000012046 /* MRS (banked) */
12047 int sysm = extract32(insn, 16, 4) |
12048 (extract32(insn, 4, 1) << 4);
12049
12050 gen_mrs_banked(s, 1, sysm, rd);
12051 break;
12052 }
12053
12054 /* mrs spsr. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012055 /* Not accessible in user mode. */
Peter Maydellb53d8922014-10-28 19:24:02 +000012056 if (IS_USER(s) || arm_dc_feature(s, ARM_FEATURE_M)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000012057 goto illegal_op;
Peter Maydellb53d8922014-10-28 19:24:02 +000012058 }
Peter Maydell3d540262017-03-20 12:41:44 +000012059
12060 if (extract32(insn, 16, 4) != 0xf ||
12061 extract32(insn, 0, 8) != 0) {
12062 goto illegal_op;
12063 }
12064
pbrookd9ba4832008-03-31 03:46:50 +000012065 tmp = load_cpu_field(spsr);
12066 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012067 break;
12068 }
12069 }
12070 } else {
12071 /* Conditional branch. */
12072 op = (insn >> 22) & 0xf;
12073 /* Generate a conditional jump to next instruction. */
Roman Kaplc2d96442018-08-20 11:24:31 +010012074 arm_skip_unless(s, op);
pbrook9ee6e8b2007-11-11 00:04:49 +000012075
12076 /* offset[11:1] = insn[10:0] */
12077 offset = (insn & 0x7ff) << 1;
12078 /* offset[17:12] = insn[21:16]. */
12079 offset |= (insn & 0x003f0000) >> 4;
12080 /* offset[31:20] = insn[26]. */
12081 offset |= ((int32_t)((insn << 5) & 0x80000000)) >> 11;
12082 /* offset[18] = insn[13]. */
12083 offset |= (insn & (1 << 13)) << 5;
12084 /* offset[19] = insn[11]. */
12085 offset |= (insn & (1 << 11)) << 8;
12086
12087 /* jump to the offset */
pbrookb0109802008-03-31 03:47:03 +000012088 gen_jmp(s, s->pc + offset);
pbrook9ee6e8b2007-11-11 00:04:49 +000012089 }
12090 } else {
Peter Maydell55203182018-10-08 14:55:04 +010012091 /*
12092 * 0b1111_0xxx_xxxx_0xxx_xxxx_xxxx
12093 * - Data-processing (modified immediate, plain binary immediate)
12094 */
pbrook9ee6e8b2007-11-11 00:04:49 +000012095 if (insn & (1 << 25)) {
Peter Maydell55203182018-10-08 14:55:04 +010012096 /*
12097 * 0b1111_0x1x_xxxx_0xxx_xxxx_xxxx
12098 * - Data-processing (plain binary immediate)
12099 */
pbrook9ee6e8b2007-11-11 00:04:49 +000012100 if (insn & (1 << 24)) {
12101 if (insn & (1 << 20))
12102 goto illegal_op;
12103 /* Bitfield/Saturate. */
12104 op = (insn >> 21) & 7;
12105 imm = insn & 0x1f;
12106 shift = ((insn >> 6) & 3) | ((insn >> 10) & 0x1c);
pbrook6ddbc6e2008-03-31 03:46:33 +000012107 if (rn == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012108 tmp = tcg_temp_new_i32();
pbrook6ddbc6e2008-03-31 03:46:33 +000012109 tcg_gen_movi_i32(tmp, 0);
12110 } else {
12111 tmp = load_reg(s, rn);
12112 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012113 switch (op) {
12114 case 2: /* Signed bitfield extract. */
12115 imm++;
12116 if (shift + imm > 32)
12117 goto illegal_op;
Richard Henderson59a71b42016-10-15 11:41:29 -050012118 if (imm < 32) {
12119 tcg_gen_sextract_i32(tmp, tmp, shift, imm);
12120 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012121 break;
12122 case 6: /* Unsigned bitfield extract. */
12123 imm++;
12124 if (shift + imm > 32)
12125 goto illegal_op;
Richard Henderson59a71b42016-10-15 11:41:29 -050012126 if (imm < 32) {
12127 tcg_gen_extract_i32(tmp, tmp, shift, imm);
12128 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012129 break;
12130 case 3: /* Bitfield insert/clear. */
12131 if (imm < shift)
12132 goto illegal_op;
12133 imm = imm + 1 - shift;
12134 if (imm != 32) {
pbrook6ddbc6e2008-03-31 03:46:33 +000012135 tmp2 = load_reg(s, rd);
Aurelien Jarnod593c482012-10-05 15:04:45 +010012136 tcg_gen_deposit_i32(tmp, tmp2, tmp, shift, imm);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012137 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012138 }
12139 break;
12140 case 7:
12141 goto illegal_op;
12142 default: /* Saturate. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012143 if (shift) {
12144 if (op & 1)
pbrook6ddbc6e2008-03-31 03:46:33 +000012145 tcg_gen_sari_i32(tmp, tmp, shift);
pbrook9ee6e8b2007-11-11 00:04:49 +000012146 else
pbrook6ddbc6e2008-03-31 03:46:33 +000012147 tcg_gen_shli_i32(tmp, tmp, shift);
pbrook9ee6e8b2007-11-11 00:04:49 +000012148 }
pbrook6ddbc6e2008-03-31 03:46:33 +000012149 tmp2 = tcg_const_i32(imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012150 if (op & 4) {
12151 /* Unsigned. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012152 if ((op & 1) && shift == 0) {
12153 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
12154 tcg_temp_free_i32(tmp);
12155 tcg_temp_free_i32(tmp2);
12156 goto illegal_op;
12157 }
Blue Swirl9ef39272012-09-04 20:19:15 +000012158 gen_helper_usat16(tmp, cpu_env, tmp, tmp2);
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012159 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012160 gen_helper_usat(tmp, cpu_env, tmp, tmp2);
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012161 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012162 } else {
12163 /* Signed. */
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012164 if ((op & 1) && shift == 0) {
12165 if (!arm_dc_feature(s, ARM_FEATURE_THUMB_DSP)) {
12166 tcg_temp_free_i32(tmp);
12167 tcg_temp_free_i32(tmp2);
12168 goto illegal_op;
12169 }
Blue Swirl9ef39272012-09-04 20:19:15 +000012170 gen_helper_ssat16(tmp, cpu_env, tmp, tmp2);
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012171 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012172 gen_helper_ssat(tmp, cpu_env, tmp, tmp2);
Aurelio C. Remonda62b44f02015-06-15 18:06:09 +010012173 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012174 }
Juha Riihimäkib75263d2009-10-22 15:17:36 +030012175 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012176 break;
12177 }
pbrook6ddbc6e2008-03-31 03:46:33 +000012178 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012179 } else {
12180 imm = ((insn & 0x04000000) >> 15)
12181 | ((insn & 0x7000) >> 4) | (insn & 0xff);
12182 if (insn & (1 << 22)) {
12183 /* 16-bit immediate. */
12184 imm |= (insn >> 4) & 0xf000;
12185 if (insn & (1 << 23)) {
12186 /* movt */
pbrook5e3f8782008-03-31 03:47:34 +000012187 tmp = load_reg(s, rd);
pbrook86831432008-05-11 12:22:01 +000012188 tcg_gen_ext16u_i32(tmp, tmp);
pbrook5e3f8782008-03-31 03:47:34 +000012189 tcg_gen_ori_i32(tmp, tmp, imm << 16);
pbrook9ee6e8b2007-11-11 00:04:49 +000012190 } else {
12191 /* movw */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012192 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +000012193 tcg_gen_movi_i32(tmp, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012194 }
Peter Maydell55203182018-10-08 14:55:04 +010012195 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012196 } else {
12197 /* Add/sub 12-bit immediate. */
12198 if (rn == 15) {
pbrookb0109802008-03-31 03:47:03 +000012199 offset = s->pc & ~(uint32_t)3;
pbrook9ee6e8b2007-11-11 00:04:49 +000012200 if (insn & (1 << 23))
pbrookb0109802008-03-31 03:47:03 +000012201 offset -= imm;
pbrook9ee6e8b2007-11-11 00:04:49 +000012202 else
pbrookb0109802008-03-31 03:47:03 +000012203 offset += imm;
Peter Maydell7d1b0092011-03-06 21:39:54 +000012204 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +000012205 tcg_gen_movi_i32(tmp, offset);
Peter Maydell55203182018-10-08 14:55:04 +010012206 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012207 } else {
pbrook5e3f8782008-03-31 03:47:34 +000012208 tmp = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000012209 if (insn & (1 << 23))
pbrook5e3f8782008-03-31 03:47:34 +000012210 tcg_gen_subi_i32(tmp, tmp, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012211 else
pbrook5e3f8782008-03-31 03:47:34 +000012212 tcg_gen_addi_i32(tmp, tmp, imm);
Peter Maydell55203182018-10-08 14:55:04 +010012213 if (rn == 13 && rd == 13) {
12214 /* ADD SP, SP, imm or SUB SP, SP, imm */
12215 store_sp_checked(s, tmp);
12216 } else {
12217 store_reg(s, rd, tmp);
12218 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012219 }
12220 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012221 }
12222 } else {
Peter Maydell55203182018-10-08 14:55:04 +010012223 /*
12224 * 0b1111_0x0x_xxxx_0xxx_xxxx_xxxx
12225 * - Data-processing (modified immediate)
12226 */
pbrook9ee6e8b2007-11-11 00:04:49 +000012227 int shifter_out = 0;
12228 /* modified 12-bit immediate. */
12229 shift = ((insn & 0x04000000) >> 23) | ((insn & 0x7000) >> 12);
12230 imm = (insn & 0xff);
12231 switch (shift) {
12232 case 0: /* XY */
12233 /* Nothing to do. */
12234 break;
12235 case 1: /* 00XY00XY */
12236 imm |= imm << 16;
12237 break;
12238 case 2: /* XY00XY00 */
12239 imm |= imm << 16;
12240 imm <<= 8;
12241 break;
12242 case 3: /* XYXYXYXY */
12243 imm |= imm << 16;
12244 imm |= imm << 8;
12245 break;
12246 default: /* Rotated constant. */
12247 shift = (shift << 1) | (imm >> 7);
12248 imm |= 0x80;
12249 imm = imm << (32 - shift);
12250 shifter_out = 1;
12251 break;
12252 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012253 tmp2 = tcg_temp_new_i32();
Filip Navara3174f8e2009-10-15 13:14:28 +020012254 tcg_gen_movi_i32(tmp2, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012255 rn = (insn >> 16) & 0xf;
Filip Navara3174f8e2009-10-15 13:14:28 +020012256 if (rn == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012257 tmp = tcg_temp_new_i32();
Filip Navara3174f8e2009-10-15 13:14:28 +020012258 tcg_gen_movi_i32(tmp, 0);
12259 } else {
12260 tmp = load_reg(s, rn);
12261 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012262 op = (insn >> 21) & 0xf;
12263 if (gen_thumb2_data_op(s, op, (insn & (1 << 20)) != 0,
Filip Navara3174f8e2009-10-15 13:14:28 +020012264 shifter_out, tmp, tmp2))
pbrook9ee6e8b2007-11-11 00:04:49 +000012265 goto illegal_op;
Peter Maydell7d1b0092011-03-06 21:39:54 +000012266 tcg_temp_free_i32(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012267 rd = (insn >> 8) & 0xf;
Peter Maydell55203182018-10-08 14:55:04 +010012268 if (rd == 13 && rn == 13
12269 && (op == 8 || op == 13)) {
12270 /* ADD(S) SP, SP, imm or SUB(S) SP, SP, imm */
12271 store_sp_checked(s, tmp);
12272 } else if (rd != 15) {
Filip Navara3174f8e2009-10-15 13:14:28 +020012273 store_reg(s, rd, tmp);
12274 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012275 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012276 }
12277 }
12278 }
12279 break;
12280 case 12: /* Load/store single data item. */
12281 {
12282 int postinc = 0;
12283 int writeback = 0;
Peter Maydella99caa42014-05-27 17:09:50 +010012284 int memidx;
Peter Maydell9bb65582017-02-07 18:30:00 +000012285 ISSInfo issinfo;
12286
pbrook9ee6e8b2007-11-11 00:04:49 +000012287 if ((insn & 0x01100000) == 0x01000000) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +000012288 if (disas_neon_ls_insn(s, insn)) {
pbrook9ee6e8b2007-11-11 00:04:49 +000012289 goto illegal_op;
Peter Maydell7dcc1f82014-10-28 19:24:03 +000012290 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012291 break;
12292 }
Peter Maydella2fdc892011-02-03 19:43:25 +000012293 op = ((insn >> 21) & 3) | ((insn >> 22) & 4);
12294 if (rs == 15) {
12295 if (!(insn & (1 << 20))) {
12296 goto illegal_op;
12297 }
12298 if (op != 2) {
12299 /* Byte or halfword load space with dest == r15 : memory hints.
12300 * Catch them early so we don't emit pointless addressing code.
12301 * This space is a mix of:
12302 * PLD/PLDW/PLI, which we implement as NOPs (note that unlike
12303 * the ARM encodings, PLDW space doesn't UNDEF for non-v7MP
12304 * cores)
12305 * unallocated hints, which must be treated as NOPs
12306 * UNPREDICTABLE space, which we NOP or UNDEF depending on
12307 * which is easiest for the decoding logic
12308 * Some space which must UNDEF
12309 */
12310 int op1 = (insn >> 23) & 3;
12311 int op2 = (insn >> 6) & 0x3f;
12312 if (op & 2) {
12313 goto illegal_op;
12314 }
12315 if (rn == 15) {
Peter Maydell02afbf62011-11-24 19:33:31 +010012316 /* UNPREDICTABLE, unallocated hint or
12317 * PLD/PLDW/PLI (literal)
12318 */
Peter Maydell2eea8412018-01-11 13:25:40 +000012319 return;
Peter Maydella2fdc892011-02-03 19:43:25 +000012320 }
12321 if (op1 & 1) {
Peter Maydell2eea8412018-01-11 13:25:40 +000012322 return; /* PLD/PLDW/PLI or unallocated hint */
Peter Maydella2fdc892011-02-03 19:43:25 +000012323 }
12324 if ((op2 == 0) || ((op2 & 0x3c) == 0x30)) {
Peter Maydell2eea8412018-01-11 13:25:40 +000012325 return; /* PLD/PLDW/PLI or unallocated hint */
Peter Maydella2fdc892011-02-03 19:43:25 +000012326 }
12327 /* UNDEF space, or an UNPREDICTABLE */
Peter Maydell2eea8412018-01-11 13:25:40 +000012328 goto illegal_op;
Peter Maydella2fdc892011-02-03 19:43:25 +000012329 }
12330 }
Peter Maydella99caa42014-05-27 17:09:50 +010012331 memidx = get_mem_index(s);
pbrook9ee6e8b2007-11-11 00:04:49 +000012332 if (rn == 15) {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012333 addr = tcg_temp_new_i32();
pbrook9ee6e8b2007-11-11 00:04:49 +000012334 /* PC relative. */
12335 /* s->pc has already been incremented by 4. */
12336 imm = s->pc & 0xfffffffc;
12337 if (insn & (1 << 23))
12338 imm += insn & 0xfff;
12339 else
12340 imm -= insn & 0xfff;
pbrookb0109802008-03-31 03:47:03 +000012341 tcg_gen_movi_i32(addr, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012342 } else {
pbrookb0109802008-03-31 03:47:03 +000012343 addr = load_reg(s, rn);
pbrook9ee6e8b2007-11-11 00:04:49 +000012344 if (insn & (1 << 23)) {
12345 /* Positive offset. */
12346 imm = insn & 0xfff;
pbrookb0109802008-03-31 03:47:03 +000012347 tcg_gen_addi_i32(addr, addr, imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012348 } else {
pbrook9ee6e8b2007-11-11 00:04:49 +000012349 imm = insn & 0xff;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012350 switch ((insn >> 8) & 0xf) {
12351 case 0x0: /* Shifted Register. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012352 shift = (insn >> 4) & 0xf;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012353 if (shift > 3) {
12354 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000012355 goto illegal_op;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012356 }
pbrookb26eefb2008-03-31 03:44:26 +000012357 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012358 if (shift)
pbrookb26eefb2008-03-31 03:44:26 +000012359 tcg_gen_shli_i32(tmp, tmp, shift);
pbrookb0109802008-03-31 03:47:03 +000012360 tcg_gen_add_i32(addr, addr, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012361 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012362 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012363 case 0xc: /* Negative offset. */
pbrookb0109802008-03-31 03:47:03 +000012364 tcg_gen_addi_i32(addr, addr, -imm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012365 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012366 case 0xe: /* User privilege. */
pbrookb0109802008-03-31 03:47:03 +000012367 tcg_gen_addi_i32(addr, addr, imm);
Peter Maydell579d21c2015-02-05 13:37:23 +000012368 memidx = get_a32_user_mem_index(s);
pbrook9ee6e8b2007-11-11 00:04:49 +000012369 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012370 case 0x9: /* Post-decrement. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012371 imm = -imm;
12372 /* Fall through. */
Peter Maydell2a0308c2011-03-10 16:48:49 +000012373 case 0xb: /* Post-increment. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012374 postinc = 1;
12375 writeback = 1;
12376 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012377 case 0xd: /* Pre-decrement. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012378 imm = -imm;
12379 /* Fall through. */
Peter Maydell2a0308c2011-03-10 16:48:49 +000012380 case 0xf: /* Pre-increment. */
pbrook9ee6e8b2007-11-11 00:04:49 +000012381 writeback = 1;
12382 break;
12383 default:
Peter Maydell2a0308c2011-03-10 16:48:49 +000012384 tcg_temp_free_i32(addr);
pbrook9ee6e8b2007-11-11 00:04:49 +000012385 goto illegal_op;
12386 }
12387 }
12388 }
Peter Maydell9bb65582017-02-07 18:30:00 +000012389
12390 issinfo = writeback ? ISSInvalid : rs;
12391
Peter Maydell0bc003b2018-10-08 14:55:04 +010012392 if (s->v8m_stackcheck && rn == 13 && writeback) {
12393 /*
12394 * Stackcheck. Here we know 'addr' is the current SP;
12395 * if imm is +ve we're moving SP up, else down. It is
12396 * UNKNOWN whether the limit check triggers when SP starts
12397 * below the limit and ends up above it; we chose to do so.
12398 */
12399 if ((int32_t)imm < 0) {
12400 TCGv_i32 newsp = tcg_temp_new_i32();
12401
12402 tcg_gen_addi_i32(newsp, addr, imm);
12403 gen_helper_v8m_stackcheck(cpu_env, newsp);
12404 tcg_temp_free_i32(newsp);
12405 } else {
12406 gen_helper_v8m_stackcheck(cpu_env, addr);
12407 }
12408 }
12409
12410 if (writeback && !postinc) {
12411 tcg_gen_addi_i32(addr, addr, imm);
12412 }
12413
pbrook9ee6e8b2007-11-11 00:04:49 +000012414 if (insn & (1 << 20)) {
12415 /* Load. */
Peter Maydell5a839c02013-05-23 13:00:00 +010012416 tmp = tcg_temp_new_i32();
Peter Maydella2fdc892011-02-03 19:43:25 +000012417 switch (op) {
Peter Maydell5a839c02013-05-23 13:00:00 +010012418 case 0:
Peter Maydell9bb65582017-02-07 18:30:00 +000012419 gen_aa32_ld8u_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012420 break;
12421 case 4:
Peter Maydell9bb65582017-02-07 18:30:00 +000012422 gen_aa32_ld8s_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012423 break;
12424 case 1:
Peter Maydell9bb65582017-02-07 18:30:00 +000012425 gen_aa32_ld16u_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012426 break;
12427 case 5:
Peter Maydell9bb65582017-02-07 18:30:00 +000012428 gen_aa32_ld16s_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012429 break;
12430 case 2:
Peter Maydell9bb65582017-02-07 18:30:00 +000012431 gen_aa32_ld32u_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012432 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012433 default:
Peter Maydell5a839c02013-05-23 13:00:00 +010012434 tcg_temp_free_i32(tmp);
Peter Maydell2a0308c2011-03-10 16:48:49 +000012435 tcg_temp_free_i32(addr);
12436 goto illegal_op;
Peter Maydella2fdc892011-02-03 19:43:25 +000012437 }
12438 if (rs == 15) {
Peter Maydell3bb8a962017-04-20 17:32:31 +010012439 gen_bx_excret(s, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012440 } else {
Peter Maydella2fdc892011-02-03 19:43:25 +000012441 store_reg(s, rs, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012442 }
12443 } else {
12444 /* Store. */
pbrookb0109802008-03-31 03:47:03 +000012445 tmp = load_reg(s, rs);
pbrook9ee6e8b2007-11-11 00:04:49 +000012446 switch (op) {
Peter Maydell5a839c02013-05-23 13:00:00 +010012447 case 0:
Peter Maydell9bb65582017-02-07 18:30:00 +000012448 gen_aa32_st8_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012449 break;
12450 case 1:
Peter Maydell9bb65582017-02-07 18:30:00 +000012451 gen_aa32_st16_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012452 break;
12453 case 2:
Peter Maydell9bb65582017-02-07 18:30:00 +000012454 gen_aa32_st32_iss(s, tmp, addr, memidx, issinfo);
Peter Maydell5a839c02013-05-23 13:00:00 +010012455 break;
Peter Maydell2a0308c2011-03-10 16:48:49 +000012456 default:
Peter Maydell5a839c02013-05-23 13:00:00 +010012457 tcg_temp_free_i32(tmp);
Peter Maydell2a0308c2011-03-10 16:48:49 +000012458 tcg_temp_free_i32(addr);
12459 goto illegal_op;
pbrook9ee6e8b2007-11-11 00:04:49 +000012460 }
Peter Maydell5a839c02013-05-23 13:00:00 +010012461 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012462 }
12463 if (postinc)
pbrookb0109802008-03-31 03:47:03 +000012464 tcg_gen_addi_i32(addr, addr, imm);
12465 if (writeback) {
12466 store_reg(s, rn, addr);
12467 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012468 tcg_temp_free_i32(addr);
pbrookb0109802008-03-31 03:47:03 +000012469 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012470 }
12471 break;
12472 default:
12473 goto illegal_op;
12474 }
Peter Maydell2eea8412018-01-11 13:25:40 +000012475 return;
pbrook9ee6e8b2007-11-11 00:04:49 +000012476illegal_op:
Peter Maydell2eea8412018-01-11 13:25:40 +000012477 gen_exception_insn(s, 4, EXCP_UDEF, syn_uncategorized(),
12478 default_exception_el(s));
pbrook9ee6e8b2007-11-11 00:04:49 +000012479}
12480
Peter Maydell296e5a02017-10-09 14:48:36 +010012481static void disas_thumb_insn(DisasContext *s, uint32_t insn)
bellard99c475a2005-01-31 20:45:13 +000012482{
Peter Maydell296e5a02017-10-09 14:48:36 +010012483 uint32_t val, op, rm, rn, rd, shift, cond;
bellard99c475a2005-01-31 20:45:13 +000012484 int32_t offset;
12485 int i;
Peter Maydell39d54922013-05-23 12:59:55 +010012486 TCGv_i32 tmp;
12487 TCGv_i32 tmp2;
12488 TCGv_i32 addr;
bellard99c475a2005-01-31 20:45:13 +000012489
bellard99c475a2005-01-31 20:45:13 +000012490 switch (insn >> 12) {
12491 case 0: case 1:
Filip Navara396e4672009-10-15 12:55:34 +020012492
bellard99c475a2005-01-31 20:45:13 +000012493 rd = insn & 7;
12494 op = (insn >> 11) & 3;
12495 if (op == 3) {
Peter Maydella2d12f02018-10-08 14:55:04 +010012496 /*
12497 * 0b0001_1xxx_xxxx_xxxx
12498 * - Add, subtract (three low registers)
12499 * - Add, subtract (two low registers and immediate)
12500 */
bellard99c475a2005-01-31 20:45:13 +000012501 rn = (insn >> 3) & 7;
Filip Navara396e4672009-10-15 12:55:34 +020012502 tmp = load_reg(s, rn);
bellard99c475a2005-01-31 20:45:13 +000012503 if (insn & (1 << 10)) {
12504 /* immediate */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012505 tmp2 = tcg_temp_new_i32();
Filip Navara396e4672009-10-15 12:55:34 +020012506 tcg_gen_movi_i32(tmp2, (insn >> 6) & 7);
bellard99c475a2005-01-31 20:45:13 +000012507 } else {
12508 /* reg */
12509 rm = (insn >> 6) & 7;
Filip Navara396e4672009-10-15 12:55:34 +020012510 tmp2 = load_reg(s, rm);
bellard99c475a2005-01-31 20:45:13 +000012511 }
pbrook9ee6e8b2007-11-11 00:04:49 +000012512 if (insn & (1 << 9)) {
12513 if (s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012514 tcg_gen_sub_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012515 else
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012516 gen_sub_CC(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012517 } else {
12518 if (s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012519 tcg_gen_add_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012520 else
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012521 gen_add_CC(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012522 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012523 tcg_temp_free_i32(tmp2);
Filip Navara396e4672009-10-15 12:55:34 +020012524 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012525 } else {
12526 /* shift immediate */
12527 rm = (insn >> 3) & 7;
12528 shift = (insn >> 6) & 0x1f;
pbrook9a119ff2008-03-31 03:45:35 +000012529 tmp = load_reg(s, rm);
12530 gen_arm_shift_im(tmp, op, shift, s->condexec_mask == 0);
12531 if (!s->condexec_mask)
12532 gen_logic_CC(tmp);
12533 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012534 }
12535 break;
12536 case 2: case 3:
Peter Maydella2d12f02018-10-08 14:55:04 +010012537 /*
12538 * 0b001x_xxxx_xxxx_xxxx
12539 * - Add, subtract, compare, move (one low register and immediate)
12540 */
bellard99c475a2005-01-31 20:45:13 +000012541 op = (insn >> 11) & 3;
12542 rd = (insn >> 8) & 0x7;
Filip Navara396e4672009-10-15 12:55:34 +020012543 if (op == 0) { /* mov */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012544 tmp = tcg_temp_new_i32();
Filip Navara396e4672009-10-15 12:55:34 +020012545 tcg_gen_movi_i32(tmp, insn & 0xff);
pbrook9ee6e8b2007-11-11 00:04:49 +000012546 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012547 gen_logic_CC(tmp);
12548 store_reg(s, rd, tmp);
12549 } else {
12550 tmp = load_reg(s, rd);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012551 tmp2 = tcg_temp_new_i32();
Filip Navara396e4672009-10-15 12:55:34 +020012552 tcg_gen_movi_i32(tmp2, insn & 0xff);
12553 switch (op) {
12554 case 1: /* cmp */
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012555 gen_sub_CC(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012556 tcg_temp_free_i32(tmp);
12557 tcg_temp_free_i32(tmp2);
Filip Navara396e4672009-10-15 12:55:34 +020012558 break;
12559 case 2: /* add */
12560 if (s->condexec_mask)
12561 tcg_gen_add_i32(tmp, tmp, tmp2);
12562 else
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012563 gen_add_CC(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012564 tcg_temp_free_i32(tmp2);
Filip Navara396e4672009-10-15 12:55:34 +020012565 store_reg(s, rd, tmp);
12566 break;
12567 case 3: /* sub */
12568 if (s->condexec_mask)
12569 tcg_gen_sub_i32(tmp, tmp, tmp2);
12570 else
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012571 gen_sub_CC(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012572 tcg_temp_free_i32(tmp2);
Filip Navara396e4672009-10-15 12:55:34 +020012573 store_reg(s, rd, tmp);
12574 break;
12575 }
bellard99c475a2005-01-31 20:45:13 +000012576 }
bellard99c475a2005-01-31 20:45:13 +000012577 break;
12578 case 4:
12579 if (insn & (1 << 11)) {
12580 rd = (insn >> 8) & 7;
bellard5899f382005-04-27 20:25:20 +000012581 /* load pc-relative. Bit 1 of PC is ignored. */
12582 val = s->pc + 2 + ((insn & 0xff) * 4);
12583 val &= ~(uint32_t)2;
Peter Maydell7d1b0092011-03-06 21:39:54 +000012584 addr = tcg_temp_new_i32();
pbrookb0109802008-03-31 03:47:03 +000012585 tcg_gen_movi_i32(addr, val);
Peter Maydellc40c8552013-05-23 13:00:01 +010012586 tmp = tcg_temp_new_i32();
Peter Maydell9bb65582017-02-07 18:30:00 +000012587 gen_aa32_ld32u_iss(s, tmp, addr, get_mem_index(s),
12588 rd | ISSIs16Bit);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012589 tcg_temp_free_i32(addr);
pbrookb0109802008-03-31 03:47:03 +000012590 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012591 break;
12592 }
12593 if (insn & (1 << 10)) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010012594 /* 0b0100_01xx_xxxx_xxxx
12595 * - data processing extended, branch and exchange
12596 */
bellard99c475a2005-01-31 20:45:13 +000012597 rd = (insn & 7) | ((insn >> 4) & 8);
12598 rm = (insn >> 3) & 0xf;
12599 op = (insn >> 8) & 3;
12600 switch (op) {
12601 case 0: /* add */
Filip Navara396e4672009-10-15 12:55:34 +020012602 tmp = load_reg(s, rd);
12603 tmp2 = load_reg(s, rm);
12604 tcg_gen_add_i32(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012605 tcg_temp_free_i32(tmp2);
Peter Maydell55203182018-10-08 14:55:04 +010012606 if (rd == 13) {
12607 /* ADD SP, SP, reg */
12608 store_sp_checked(s, tmp);
12609 } else {
12610 store_reg(s, rd, tmp);
12611 }
bellard99c475a2005-01-31 20:45:13 +000012612 break;
12613 case 1: /* cmp */
Filip Navara396e4672009-10-15 12:55:34 +020012614 tmp = load_reg(s, rd);
12615 tmp2 = load_reg(s, rm);
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012616 gen_sub_CC(tmp, tmp, tmp2);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012617 tcg_temp_free_i32(tmp2);
12618 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012619 break;
12620 case 2: /* mov/cpy */
Filip Navara396e4672009-10-15 12:55:34 +020012621 tmp = load_reg(s, rm);
Peter Maydell55203182018-10-08 14:55:04 +010012622 if (rd == 13) {
12623 /* MOV SP, reg */
12624 store_sp_checked(s, tmp);
12625 } else {
12626 store_reg(s, rd, tmp);
12627 }
bellard99c475a2005-01-31 20:45:13 +000012628 break;
Peter Maydellebfe27c2017-09-04 15:21:51 +010012629 case 3:
12630 {
12631 /* 0b0100_0111_xxxx_xxxx
12632 * - branch [and link] exchange thumb register
12633 */
12634 bool link = insn & (1 << 7);
12635
Peter Maydellfb602cb2017-09-07 13:54:54 +010012636 if (insn & 3) {
Peter Maydellebfe27c2017-09-04 15:21:51 +010012637 goto undef;
12638 }
12639 if (link) {
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040012640 ARCH(5);
Peter Maydellebfe27c2017-09-04 15:21:51 +010012641 }
Peter Maydellfb602cb2017-09-07 13:54:54 +010012642 if ((insn & 4)) {
12643 /* BXNS/BLXNS: only exists for v8M with the
12644 * security extensions, and always UNDEF if NonSecure.
12645 * We don't implement these in the user-only mode
12646 * either (in theory you can use them from Secure User
12647 * mode but they are too tied in to system emulation.)
12648 */
12649 if (!s->v8m_secure || IS_USER_ONLY) {
12650 goto undef;
12651 }
12652 if (link) {
Peter Maydell3e3fa232017-10-09 14:48:33 +010012653 gen_blxns(s, rm);
Peter Maydellfb602cb2017-09-07 13:54:54 +010012654 } else {
12655 gen_bxns(s, rm);
12656 }
12657 break;
12658 }
12659 /* BLX/BX */
Peter Maydellebfe27c2017-09-04 15:21:51 +010012660 tmp = load_reg(s, rm);
12661 if (link) {
bellard99c475a2005-01-31 20:45:13 +000012662 val = (uint32_t)s->pc | 1;
Peter Maydell7d1b0092011-03-06 21:39:54 +000012663 tmp2 = tcg_temp_new_i32();
pbrookb0109802008-03-31 03:47:03 +000012664 tcg_gen_movi_i32(tmp2, val);
12665 store_reg(s, 14, tmp2);
Peter Maydell3bb8a962017-04-20 17:32:31 +010012666 gen_bx(s, tmp);
12667 } else {
12668 /* Only BX works as exception-return, not BLX */
12669 gen_bx_excret(s, tmp);
bellard99c475a2005-01-31 20:45:13 +000012670 }
bellard99c475a2005-01-31 20:45:13 +000012671 break;
12672 }
Peter Maydellebfe27c2017-09-04 15:21:51 +010012673 }
bellard99c475a2005-01-31 20:45:13 +000012674 break;
12675 }
12676
Peter Maydella2d12f02018-10-08 14:55:04 +010012677 /*
12678 * 0b0100_00xx_xxxx_xxxx
12679 * - Data-processing (two low registers)
12680 */
bellard99c475a2005-01-31 20:45:13 +000012681 rd = insn & 7;
12682 rm = (insn >> 3) & 7;
12683 op = (insn >> 6) & 0xf;
12684 if (op == 2 || op == 3 || op == 4 || op == 7) {
12685 /* the shift/rotate ops want the operands backwards */
12686 val = rm;
12687 rm = rd;
12688 rd = val;
12689 val = 1;
12690 } else {
12691 val = 0;
12692 }
12693
Filip Navara396e4672009-10-15 12:55:34 +020012694 if (op == 9) { /* neg */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012695 tmp = tcg_temp_new_i32();
Filip Navara396e4672009-10-15 12:55:34 +020012696 tcg_gen_movi_i32(tmp, 0);
12697 } else if (op != 0xf) { /* mvn doesn't read its first operand */
12698 tmp = load_reg(s, rd);
12699 } else {
Richard Hendersonf7647182017-11-02 12:47:37 +010012700 tmp = NULL;
Filip Navara396e4672009-10-15 12:55:34 +020012701 }
bellard99c475a2005-01-31 20:45:13 +000012702
Filip Navara396e4672009-10-15 12:55:34 +020012703 tmp2 = load_reg(s, rm);
bellard5899f382005-04-27 20:25:20 +000012704 switch (op) {
bellard99c475a2005-01-31 20:45:13 +000012705 case 0x0: /* and */
Filip Navara396e4672009-10-15 12:55:34 +020012706 tcg_gen_and_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012707 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012708 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012709 break;
12710 case 0x1: /* eor */
Filip Navara396e4672009-10-15 12:55:34 +020012711 tcg_gen_xor_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012712 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012713 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012714 break;
12715 case 0x2: /* lsl */
pbrook9ee6e8b2007-11-11 00:04:49 +000012716 if (s->condexec_mask) {
Aurelien Jarno365af802012-10-05 15:04:44 +010012717 gen_shl(tmp2, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012718 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012719 gen_helper_shl_cc(tmp2, cpu_env, tmp2, tmp);
Filip Navara396e4672009-10-15 12:55:34 +020012720 gen_logic_CC(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012721 }
bellard99c475a2005-01-31 20:45:13 +000012722 break;
12723 case 0x3: /* lsr */
pbrook9ee6e8b2007-11-11 00:04:49 +000012724 if (s->condexec_mask) {
Aurelien Jarno365af802012-10-05 15:04:44 +010012725 gen_shr(tmp2, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012726 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012727 gen_helper_shr_cc(tmp2, cpu_env, tmp2, tmp);
Filip Navara396e4672009-10-15 12:55:34 +020012728 gen_logic_CC(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012729 }
bellard99c475a2005-01-31 20:45:13 +000012730 break;
12731 case 0x4: /* asr */
pbrook9ee6e8b2007-11-11 00:04:49 +000012732 if (s->condexec_mask) {
Aurelien Jarno365af802012-10-05 15:04:44 +010012733 gen_sar(tmp2, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012734 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012735 gen_helper_sar_cc(tmp2, cpu_env, tmp2, tmp);
Filip Navara396e4672009-10-15 12:55:34 +020012736 gen_logic_CC(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012737 }
bellard99c475a2005-01-31 20:45:13 +000012738 break;
12739 case 0x5: /* adc */
Richard Henderson49b4c312013-02-19 23:52:08 -080012740 if (s->condexec_mask) {
Filip Navara396e4672009-10-15 12:55:34 +020012741 gen_adc(tmp, tmp2);
Richard Henderson49b4c312013-02-19 23:52:08 -080012742 } else {
12743 gen_adc_CC(tmp, tmp, tmp2);
12744 }
bellard99c475a2005-01-31 20:45:13 +000012745 break;
12746 case 0x6: /* sbc */
Richard Henderson2de68a42013-02-19 23:52:09 -080012747 if (s->condexec_mask) {
Filip Navara396e4672009-10-15 12:55:34 +020012748 gen_sub_carry(tmp, tmp, tmp2);
Richard Henderson2de68a42013-02-19 23:52:09 -080012749 } else {
12750 gen_sbc_CC(tmp, tmp, tmp2);
12751 }
bellard99c475a2005-01-31 20:45:13 +000012752 break;
12753 case 0x7: /* ror */
pbrook9ee6e8b2007-11-11 00:04:49 +000012754 if (s->condexec_mask) {
Aurelien Jarnof669df22009-10-15 16:45:14 +020012755 tcg_gen_andi_i32(tmp, tmp, 0x1f);
12756 tcg_gen_rotr_i32(tmp2, tmp2, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000012757 } else {
Blue Swirl9ef39272012-09-04 20:19:15 +000012758 gen_helper_ror_cc(tmp2, cpu_env, tmp2, tmp);
Filip Navara396e4672009-10-15 12:55:34 +020012759 gen_logic_CC(tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012760 }
bellard99c475a2005-01-31 20:45:13 +000012761 break;
12762 case 0x8: /* tst */
Filip Navara396e4672009-10-15 12:55:34 +020012763 tcg_gen_and_i32(tmp, tmp, tmp2);
12764 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012765 rd = 16;
bellard5899f382005-04-27 20:25:20 +000012766 break;
bellard99c475a2005-01-31 20:45:13 +000012767 case 0x9: /* neg */
pbrook9ee6e8b2007-11-11 00:04:49 +000012768 if (s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012769 tcg_gen_neg_i32(tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012770 else
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012771 gen_sub_CC(tmp, tmp, tmp2);
bellard99c475a2005-01-31 20:45:13 +000012772 break;
12773 case 0xa: /* cmp */
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012774 gen_sub_CC(tmp, tmp, tmp2);
bellard99c475a2005-01-31 20:45:13 +000012775 rd = 16;
12776 break;
12777 case 0xb: /* cmn */
Aurelien Jarno72485ec2012-10-05 15:04:44 +010012778 gen_add_CC(tmp, tmp, tmp2);
bellard99c475a2005-01-31 20:45:13 +000012779 rd = 16;
12780 break;
12781 case 0xc: /* orr */
Filip Navara396e4672009-10-15 12:55:34 +020012782 tcg_gen_or_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012783 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012784 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012785 break;
12786 case 0xd: /* mul */
Juha.Riihimaki@nokia.com7b2919a2009-10-21 12:17:38 +020012787 tcg_gen_mul_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012788 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012789 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012790 break;
12791 case 0xe: /* bic */
Aurelien Jarnof669df22009-10-15 16:45:14 +020012792 tcg_gen_andc_i32(tmp, tmp, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012793 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012794 gen_logic_CC(tmp);
bellard99c475a2005-01-31 20:45:13 +000012795 break;
12796 case 0xf: /* mvn */
Filip Navara396e4672009-10-15 12:55:34 +020012797 tcg_gen_not_i32(tmp2, tmp2);
pbrook9ee6e8b2007-11-11 00:04:49 +000012798 if (!s->condexec_mask)
Filip Navara396e4672009-10-15 12:55:34 +020012799 gen_logic_CC(tmp2);
bellard99c475a2005-01-31 20:45:13 +000012800 val = 1;
bellard5899f382005-04-27 20:25:20 +000012801 rm = rd;
bellard99c475a2005-01-31 20:45:13 +000012802 break;
12803 }
12804 if (rd != 16) {
Filip Navara396e4672009-10-15 12:55:34 +020012805 if (val) {
12806 store_reg(s, rm, tmp2);
12807 if (op != 0xf)
Peter Maydell7d1b0092011-03-06 21:39:54 +000012808 tcg_temp_free_i32(tmp);
Filip Navara396e4672009-10-15 12:55:34 +020012809 } else {
12810 store_reg(s, rd, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012811 tcg_temp_free_i32(tmp2);
Filip Navara396e4672009-10-15 12:55:34 +020012812 }
12813 } else {
Peter Maydell7d1b0092011-03-06 21:39:54 +000012814 tcg_temp_free_i32(tmp);
12815 tcg_temp_free_i32(tmp2);
bellard99c475a2005-01-31 20:45:13 +000012816 }
12817 break;
12818
12819 case 5:
12820 /* load/store register offset. */
12821 rd = insn & 7;
12822 rn = (insn >> 3) & 7;
12823 rm = (insn >> 6) & 7;
12824 op = (insn >> 9) & 7;
pbrookb0109802008-03-31 03:47:03 +000012825 addr = load_reg(s, rn);
pbrookb26eefb2008-03-31 03:44:26 +000012826 tmp = load_reg(s, rm);
pbrookb0109802008-03-31 03:47:03 +000012827 tcg_gen_add_i32(addr, addr, tmp);
Peter Maydell7d1b0092011-03-06 21:39:54 +000012828 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012829
Peter Maydellc40c8552013-05-23 13:00:01 +010012830 if (op < 3) { /* store */
pbrookb0109802008-03-31 03:47:03 +000012831 tmp = load_reg(s, rd);
Peter Maydellc40c8552013-05-23 13:00:01 +010012832 } else {
12833 tmp = tcg_temp_new_i32();
12834 }
bellard99c475a2005-01-31 20:45:13 +000012835
12836 switch (op) {
12837 case 0: /* str */
Peter Maydell9bb65582017-02-07 18:30:00 +000012838 gen_aa32_st32_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012839 break;
12840 case 1: /* strh */
Peter Maydell9bb65582017-02-07 18:30:00 +000012841 gen_aa32_st16_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012842 break;
12843 case 2: /* strb */
Peter Maydell9bb65582017-02-07 18:30:00 +000012844 gen_aa32_st8_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012845 break;
12846 case 3: /* ldrsb */
Peter Maydell9bb65582017-02-07 18:30:00 +000012847 gen_aa32_ld8s_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012848 break;
12849 case 4: /* ldr */
Peter Maydell9bb65582017-02-07 18:30:00 +000012850 gen_aa32_ld32u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012851 break;
12852 case 5: /* ldrh */
Peter Maydell9bb65582017-02-07 18:30:00 +000012853 gen_aa32_ld16u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012854 break;
12855 case 6: /* ldrb */
Peter Maydell9bb65582017-02-07 18:30:00 +000012856 gen_aa32_ld8u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012857 break;
12858 case 7: /* ldrsh */
Peter Maydell9bb65582017-02-07 18:30:00 +000012859 gen_aa32_ld16s_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
bellard99c475a2005-01-31 20:45:13 +000012860 break;
12861 }
Peter Maydellc40c8552013-05-23 13:00:01 +010012862 if (op >= 3) { /* load */
pbrookb0109802008-03-31 03:47:03 +000012863 store_reg(s, rd, tmp);
Peter Maydellc40c8552013-05-23 13:00:01 +010012864 } else {
12865 tcg_temp_free_i32(tmp);
12866 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012867 tcg_temp_free_i32(addr);
bellard99c475a2005-01-31 20:45:13 +000012868 break;
12869
12870 case 6:
12871 /* load/store word immediate offset */
12872 rd = insn & 7;
12873 rn = (insn >> 3) & 7;
pbrookb0109802008-03-31 03:47:03 +000012874 addr = load_reg(s, rn);
bellard99c475a2005-01-31 20:45:13 +000012875 val = (insn >> 4) & 0x7c;
pbrookb0109802008-03-31 03:47:03 +000012876 tcg_gen_addi_i32(addr, addr, val);
bellard99c475a2005-01-31 20:45:13 +000012877
12878 if (insn & (1 << 11)) {
12879 /* load */
Peter Maydellc40c8552013-05-23 13:00:01 +010012880 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000012881 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +000012882 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012883 } else {
12884 /* store */
pbrookb0109802008-03-31 03:47:03 +000012885 tmp = load_reg(s, rd);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000012886 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydellc40c8552013-05-23 13:00:01 +010012887 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012888 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012889 tcg_temp_free_i32(addr);
bellard99c475a2005-01-31 20:45:13 +000012890 break;
12891
12892 case 7:
12893 /* load/store byte immediate offset */
12894 rd = insn & 7;
12895 rn = (insn >> 3) & 7;
pbrookb0109802008-03-31 03:47:03 +000012896 addr = load_reg(s, rn);
bellard99c475a2005-01-31 20:45:13 +000012897 val = (insn >> 6) & 0x1f;
pbrookb0109802008-03-31 03:47:03 +000012898 tcg_gen_addi_i32(addr, addr, val);
bellard99c475a2005-01-31 20:45:13 +000012899
12900 if (insn & (1 << 11)) {
12901 /* load */
Peter Maydellc40c8552013-05-23 13:00:01 +010012902 tmp = tcg_temp_new_i32();
Peter Maydell9bb65582017-02-07 18:30:00 +000012903 gen_aa32_ld8u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
pbrookb0109802008-03-31 03:47:03 +000012904 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012905 } else {
12906 /* store */
pbrookb0109802008-03-31 03:47:03 +000012907 tmp = load_reg(s, rd);
Peter Maydell9bb65582017-02-07 18:30:00 +000012908 gen_aa32_st8_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
Peter Maydellc40c8552013-05-23 13:00:01 +010012909 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012910 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012911 tcg_temp_free_i32(addr);
bellard99c475a2005-01-31 20:45:13 +000012912 break;
12913
12914 case 8:
12915 /* load/store halfword immediate offset */
12916 rd = insn & 7;
12917 rn = (insn >> 3) & 7;
pbrookb0109802008-03-31 03:47:03 +000012918 addr = load_reg(s, rn);
bellard99c475a2005-01-31 20:45:13 +000012919 val = (insn >> 5) & 0x3e;
pbrookb0109802008-03-31 03:47:03 +000012920 tcg_gen_addi_i32(addr, addr, val);
bellard99c475a2005-01-31 20:45:13 +000012921
12922 if (insn & (1 << 11)) {
12923 /* load */
Peter Maydellc40c8552013-05-23 13:00:01 +010012924 tmp = tcg_temp_new_i32();
Peter Maydell9bb65582017-02-07 18:30:00 +000012925 gen_aa32_ld16u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
pbrookb0109802008-03-31 03:47:03 +000012926 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012927 } else {
12928 /* store */
pbrookb0109802008-03-31 03:47:03 +000012929 tmp = load_reg(s, rd);
Peter Maydell9bb65582017-02-07 18:30:00 +000012930 gen_aa32_st16_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
Peter Maydellc40c8552013-05-23 13:00:01 +010012931 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012932 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012933 tcg_temp_free_i32(addr);
bellard99c475a2005-01-31 20:45:13 +000012934 break;
12935
12936 case 9:
12937 /* load/store from stack */
12938 rd = (insn >> 8) & 7;
pbrookb0109802008-03-31 03:47:03 +000012939 addr = load_reg(s, 13);
bellard99c475a2005-01-31 20:45:13 +000012940 val = (insn & 0xff) * 4;
pbrookb0109802008-03-31 03:47:03 +000012941 tcg_gen_addi_i32(addr, addr, val);
bellard99c475a2005-01-31 20:45:13 +000012942
12943 if (insn & (1 << 11)) {
12944 /* load */
Peter Maydellc40c8552013-05-23 13:00:01 +010012945 tmp = tcg_temp_new_i32();
Peter Maydell9bb65582017-02-07 18:30:00 +000012946 gen_aa32_ld32u_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
pbrookb0109802008-03-31 03:47:03 +000012947 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012948 } else {
12949 /* store */
pbrookb0109802008-03-31 03:47:03 +000012950 tmp = load_reg(s, rd);
Peter Maydell9bb65582017-02-07 18:30:00 +000012951 gen_aa32_st32_iss(s, tmp, addr, get_mem_index(s), rd | ISSIs16Bit);
Peter Maydellc40c8552013-05-23 13:00:01 +010012952 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000012953 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000012954 tcg_temp_free_i32(addr);
bellard99c475a2005-01-31 20:45:13 +000012955 break;
12956
12957 case 10:
Peter Maydell55203182018-10-08 14:55:04 +010012958 /*
12959 * 0b1010_xxxx_xxxx_xxxx
12960 * - Add PC/SP (immediate)
12961 */
bellard99c475a2005-01-31 20:45:13 +000012962 rd = (insn >> 8) & 7;
bellard5899f382005-04-27 20:25:20 +000012963 if (insn & (1 << 11)) {
12964 /* SP */
pbrook5e3f8782008-03-31 03:47:34 +000012965 tmp = load_reg(s, 13);
bellard5899f382005-04-27 20:25:20 +000012966 } else {
12967 /* PC. bit 1 is ignored. */
Peter Maydell7d1b0092011-03-06 21:39:54 +000012968 tmp = tcg_temp_new_i32();
pbrook5e3f8782008-03-31 03:47:34 +000012969 tcg_gen_movi_i32(tmp, (s->pc + 2) & ~(uint32_t)2);
bellard5899f382005-04-27 20:25:20 +000012970 }
bellard99c475a2005-01-31 20:45:13 +000012971 val = (insn & 0xff) * 4;
pbrook5e3f8782008-03-31 03:47:34 +000012972 tcg_gen_addi_i32(tmp, tmp, val);
12973 store_reg(s, rd, tmp);
bellard99c475a2005-01-31 20:45:13 +000012974 break;
12975
12976 case 11:
12977 /* misc */
12978 op = (insn >> 8) & 0xf;
12979 switch (op) {
12980 case 0:
Peter Maydell55203182018-10-08 14:55:04 +010012981 /*
12982 * 0b1011_0000_xxxx_xxxx
12983 * - ADD (SP plus immediate)
12984 * - SUB (SP minus immediate)
12985 */
pbrookb26eefb2008-03-31 03:44:26 +000012986 tmp = load_reg(s, 13);
bellard99c475a2005-01-31 20:45:13 +000012987 val = (insn & 0x7f) * 4;
12988 if (insn & (1 << 7))
balrog6a0d8a12008-04-13 13:25:31 +000012989 val = -(int32_t)val;
pbrookb26eefb2008-03-31 03:44:26 +000012990 tcg_gen_addi_i32(tmp, tmp, val);
Peter Maydell55203182018-10-08 14:55:04 +010012991 store_sp_checked(s, tmp);
bellard99c475a2005-01-31 20:45:13 +000012992 break;
12993
pbrook9ee6e8b2007-11-11 00:04:49 +000012994 case 2: /* sign/zero extend. */
12995 ARCH(6);
12996 rd = insn & 7;
12997 rm = (insn >> 3) & 7;
pbrookb0109802008-03-31 03:47:03 +000012998 tmp = load_reg(s, rm);
pbrook9ee6e8b2007-11-11 00:04:49 +000012999 switch ((insn >> 6) & 3) {
pbrookb0109802008-03-31 03:47:03 +000013000 case 0: gen_sxth(tmp); break;
13001 case 1: gen_sxtb(tmp); break;
13002 case 2: gen_uxth(tmp); break;
13003 case 3: gen_uxtb(tmp); break;
pbrook9ee6e8b2007-11-11 00:04:49 +000013004 }
pbrookb0109802008-03-31 03:47:03 +000013005 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000013006 break;
bellard99c475a2005-01-31 20:45:13 +000013007 case 4: case 5: case 0xc: case 0xd:
Peter Maydellaa369e52018-10-08 14:55:05 +010013008 /*
13009 * 0b1011_x10x_xxxx_xxxx
13010 * - push/pop
13011 */
pbrookb0109802008-03-31 03:47:03 +000013012 addr = load_reg(s, 13);
bellard5899f382005-04-27 20:25:20 +000013013 if (insn & (1 << 8))
13014 offset = 4;
bellard99c475a2005-01-31 20:45:13 +000013015 else
bellard5899f382005-04-27 20:25:20 +000013016 offset = 0;
13017 for (i = 0; i < 8; i++) {
13018 if (insn & (1 << i))
13019 offset += 4;
13020 }
13021 if ((insn & (1 << 11)) == 0) {
pbrookb0109802008-03-31 03:47:03 +000013022 tcg_gen_addi_i32(addr, addr, -offset);
bellard5899f382005-04-27 20:25:20 +000013023 }
Peter Maydellaa369e52018-10-08 14:55:05 +010013024
13025 if (s->v8m_stackcheck) {
13026 /*
13027 * Here 'addr' is the lower of "old SP" and "new SP";
13028 * if this is a pop that starts below the limit and ends
13029 * above it, it is UNKNOWN whether the limit check triggers;
13030 * we choose to trigger.
13031 */
13032 gen_helper_v8m_stackcheck(cpu_env, addr);
13033 }
13034
bellard99c475a2005-01-31 20:45:13 +000013035 for (i = 0; i < 8; i++) {
13036 if (insn & (1 << i)) {
13037 if (insn & (1 << 11)) {
13038 /* pop */
Peter Maydellc40c8552013-05-23 13:00:01 +010013039 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013040 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
pbrookb0109802008-03-31 03:47:03 +000013041 store_reg(s, i, tmp);
bellard99c475a2005-01-31 20:45:13 +000013042 } else {
13043 /* push */
pbrookb0109802008-03-31 03:47:03 +000013044 tmp = load_reg(s, i);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013045 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydellc40c8552013-05-23 13:00:01 +010013046 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000013047 }
bellard5899f382005-04-27 20:25:20 +000013048 /* advance to the next address. */
pbrookb0109802008-03-31 03:47:03 +000013049 tcg_gen_addi_i32(addr, addr, 4);
bellard99c475a2005-01-31 20:45:13 +000013050 }
13051 }
Richard Hendersonf7647182017-11-02 12:47:37 +010013052 tmp = NULL;
bellard99c475a2005-01-31 20:45:13 +000013053 if (insn & (1 << 8)) {
13054 if (insn & (1 << 11)) {
13055 /* pop pc */
Peter Maydellc40c8552013-05-23 13:00:01 +010013056 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013057 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
bellard99c475a2005-01-31 20:45:13 +000013058 /* don't set the pc until the rest of the instruction
13059 has completed */
13060 } else {
13061 /* push lr */
pbrookb0109802008-03-31 03:47:03 +000013062 tmp = load_reg(s, 14);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013063 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydellc40c8552013-05-23 13:00:01 +010013064 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000013065 }
pbrookb0109802008-03-31 03:47:03 +000013066 tcg_gen_addi_i32(addr, addr, 4);
bellard99c475a2005-01-31 20:45:13 +000013067 }
bellard5899f382005-04-27 20:25:20 +000013068 if ((insn & (1 << 11)) == 0) {
pbrookb0109802008-03-31 03:47:03 +000013069 tcg_gen_addi_i32(addr, addr, -offset);
bellard5899f382005-04-27 20:25:20 +000013070 }
bellard99c475a2005-01-31 20:45:13 +000013071 /* write back the new stack pointer */
pbrookb0109802008-03-31 03:47:03 +000013072 store_reg(s, 13, addr);
bellard99c475a2005-01-31 20:45:13 +000013073 /* set the new PC value */
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040013074 if ((insn & 0x0900) == 0x0900) {
Peter Maydell7dcc1f82014-10-28 19:24:03 +000013075 store_reg_from_load(s, 15, tmp);
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040013076 }
bellard99c475a2005-01-31 20:45:13 +000013077 break;
13078
pbrook9ee6e8b2007-11-11 00:04:49 +000013079 case 1: case 3: case 9: case 11: /* czb */
13080 rm = insn & 7;
pbrookd9ba4832008-03-31 03:46:50 +000013081 tmp = load_reg(s, rm);
Roman Kaplc2d96442018-08-20 11:24:31 +010013082 arm_gen_condlabel(s);
pbrook9ee6e8b2007-11-11 00:04:49 +000013083 if (insn & (1 << 11))
pbrookcb636692008-05-24 02:22:00 +000013084 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, s->condlabel);
pbrook9ee6e8b2007-11-11 00:04:49 +000013085 else
pbrookcb636692008-05-24 02:22:00 +000013086 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, s->condlabel);
Peter Maydell7d1b0092011-03-06 21:39:54 +000013087 tcg_temp_free_i32(tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000013088 offset = ((insn & 0xf8) >> 2) | (insn & 0x200) >> 3;
13089 val = (uint32_t)s->pc + 2;
13090 val += offset;
13091 gen_jmp(s, val);
13092 break;
13093
13094 case 15: /* IT, nop-hint. */
13095 if ((insn & 0xf) == 0) {
13096 gen_nop_hint(s, (insn >> 4) & 0xf);
13097 break;
13098 }
13099 /* If Then. */
13100 s->condexec_cond = (insn >> 4) & 0xe;
13101 s->condexec_mask = insn & 0x1f;
13102 /* No actual code generated for this insn, just setup state. */
13103 break;
13104
pbrook06c949e2006-02-04 19:35:26 +000013105 case 0xe: /* bkpt */
Peter Maydelld4a2dc62014-04-15 19:18:38 +010013106 {
13107 int imm8 = extract32(insn, 0, 8);
Dmitry Eremin-Solenikovbe5e7a72011-04-04 17:38:44 +040013108 ARCH(5);
Peter Maydellc900a2e2018-03-23 18:26:46 +000013109 gen_exception_bkpt_insn(s, 2, syn_aa32_bkpt(imm8, true));
pbrook06c949e2006-02-04 19:35:26 +000013110 break;
Peter Maydelld4a2dc62014-04-15 19:18:38 +010013111 }
pbrook06c949e2006-02-04 19:35:26 +000013112
Peter Maydell19a6e312016-10-24 16:26:56 +010013113 case 0xa: /* rev, and hlt */
13114 {
13115 int op1 = extract32(insn, 6, 2);
13116
13117 if (op1 == 2) {
13118 /* HLT */
13119 int imm6 = extract32(insn, 0, 6);
13120
13121 gen_hlt(s, imm6);
13122 break;
13123 }
13124
13125 /* Otherwise this is rev */
pbrook9ee6e8b2007-11-11 00:04:49 +000013126 ARCH(6);
13127 rn = (insn >> 3) & 0x7;
13128 rd = insn & 0x7;
pbrookb0109802008-03-31 03:47:03 +000013129 tmp = load_reg(s, rn);
Peter Maydell19a6e312016-10-24 16:26:56 +010013130 switch (op1) {
aurel3266896cb2009-03-13 09:34:48 +000013131 case 0: tcg_gen_bswap32_i32(tmp, tmp); break;
pbrookb0109802008-03-31 03:47:03 +000013132 case 1: gen_rev16(tmp); break;
13133 case 3: gen_revsh(tmp); break;
Peter Maydell19a6e312016-10-24 16:26:56 +010013134 default:
13135 g_assert_not_reached();
pbrook9ee6e8b2007-11-11 00:04:49 +000013136 }
pbrookb0109802008-03-31 03:47:03 +000013137 store_reg(s, rd, tmp);
pbrook9ee6e8b2007-11-11 00:04:49 +000013138 break;
Peter Maydell19a6e312016-10-24 16:26:56 +010013139 }
pbrook9ee6e8b2007-11-11 00:04:49 +000013140
Peter Maydelld9e028c2012-03-14 12:26:11 +000013141 case 6:
13142 switch ((insn >> 5) & 7) {
13143 case 2:
13144 /* setend */
13145 ARCH(6);
Paolo Bonzini9886ecd2016-03-04 11:30:21 +000013146 if (((insn >> 3) & 1) != !!(s->be_data == MO_BE)) {
13147 gen_helper_setend(cpu_env);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013148 s->base.is_jmp = DISAS_UPDATE;
Peter Maydelld9e028c2012-03-14 12:26:11 +000013149 }
pbrook9ee6e8b2007-11-11 00:04:49 +000013150 break;
Peter Maydelld9e028c2012-03-14 12:26:11 +000013151 case 3:
13152 /* cps */
13153 ARCH(6);
13154 if (IS_USER(s)) {
13155 break;
pbrook8984bd22008-03-31 03:47:48 +000013156 }
Peter Maydellb53d8922014-10-28 19:24:02 +000013157 if (arm_dc_feature(s, ARM_FEATURE_M)) {
Peter Maydelld9e028c2012-03-14 12:26:11 +000013158 tmp = tcg_const_i32((insn & (1 << 4)) != 0);
13159 /* FAULTMASK */
13160 if (insn & 1) {
13161 addr = tcg_const_i32(19);
13162 gen_helper_v7m_msr(cpu_env, addr, tmp);
13163 tcg_temp_free_i32(addr);
13164 }
13165 /* PRIMASK */
13166 if (insn & 2) {
13167 addr = tcg_const_i32(16);
13168 gen_helper_v7m_msr(cpu_env, addr, tmp);
13169 tcg_temp_free_i32(addr);
13170 }
13171 tcg_temp_free_i32(tmp);
13172 gen_lookup_tb(s);
13173 } else {
13174 if (insn & (1 << 4)) {
13175 shift = CPSR_A | CPSR_I | CPSR_F;
13176 } else {
13177 shift = 0;
13178 }
13179 gen_set_psr_im(s, ((insn & 7) << 6), 0, shift);
pbrook8984bd22008-03-31 03:47:48 +000013180 }
Peter Maydelld9e028c2012-03-14 12:26:11 +000013181 break;
13182 default:
13183 goto undef;
pbrook9ee6e8b2007-11-11 00:04:49 +000013184 }
13185 break;
13186
bellard99c475a2005-01-31 20:45:13 +000013187 default:
13188 goto undef;
13189 }
13190 break;
13191
13192 case 12:
Peter Maydella7d39702011-04-26 18:17:20 +010013193 {
bellard99c475a2005-01-31 20:45:13 +000013194 /* load/store multiple */
Richard Hendersonf7647182017-11-02 12:47:37 +010013195 TCGv_i32 loaded_var = NULL;
bellard99c475a2005-01-31 20:45:13 +000013196 rn = (insn >> 8) & 0x7;
pbrookb0109802008-03-31 03:47:03 +000013197 addr = load_reg(s, rn);
bellard99c475a2005-01-31 20:45:13 +000013198 for (i = 0; i < 8; i++) {
13199 if (insn & (1 << i)) {
bellard99c475a2005-01-31 20:45:13 +000013200 if (insn & (1 << 11)) {
13201 /* load */
Peter Maydellc40c8552013-05-23 13:00:01 +010013202 tmp = tcg_temp_new_i32();
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013203 gen_aa32_ld32u(s, tmp, addr, get_mem_index(s));
Peter Maydella7d39702011-04-26 18:17:20 +010013204 if (i == rn) {
13205 loaded_var = tmp;
13206 } else {
13207 store_reg(s, i, tmp);
13208 }
bellard99c475a2005-01-31 20:45:13 +000013209 } else {
13210 /* store */
pbrookb0109802008-03-31 03:47:03 +000013211 tmp = load_reg(s, i);
Paolo Bonzini12dcc322016-03-04 11:30:20 +000013212 gen_aa32_st32(s, tmp, addr, get_mem_index(s));
Peter Maydellc40c8552013-05-23 13:00:01 +010013213 tcg_temp_free_i32(tmp);
bellard99c475a2005-01-31 20:45:13 +000013214 }
bellard5899f382005-04-27 20:25:20 +000013215 /* advance to the next address */
pbrookb0109802008-03-31 03:47:03 +000013216 tcg_gen_addi_i32(addr, addr, 4);
bellard99c475a2005-01-31 20:45:13 +000013217 }
13218 }
pbrookb0109802008-03-31 03:47:03 +000013219 if ((insn & (1 << rn)) == 0) {
Peter Maydella7d39702011-04-26 18:17:20 +010013220 /* base reg not in list: base register writeback */
pbrookb0109802008-03-31 03:47:03 +000013221 store_reg(s, rn, addr);
13222 } else {
Peter Maydella7d39702011-04-26 18:17:20 +010013223 /* base reg in list: if load, complete it now */
13224 if (insn & (1 << 11)) {
13225 store_reg(s, rn, loaded_var);
13226 }
Peter Maydell7d1b0092011-03-06 21:39:54 +000013227 tcg_temp_free_i32(addr);
pbrookb0109802008-03-31 03:47:03 +000013228 }
bellard99c475a2005-01-31 20:45:13 +000013229 break;
Peter Maydella7d39702011-04-26 18:17:20 +010013230 }
bellard99c475a2005-01-31 20:45:13 +000013231 case 13:
13232 /* conditional branch or swi */
13233 cond = (insn >> 8) & 0xf;
13234 if (cond == 0xe)
13235 goto undef;
13236
13237 if (cond == 0xf) {
13238 /* swi */
Peter Maydelleaed1292013-09-03 20:12:06 +010013239 gen_set_pc_im(s, s->pc);
Peter Maydelld4a2dc62014-04-15 19:18:38 +010013240 s->svc_imm = extract32(insn, 0, 8);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013241 s->base.is_jmp = DISAS_SWI;
bellard99c475a2005-01-31 20:45:13 +000013242 break;
13243 }
13244 /* generate a conditional jump to next instruction */
Roman Kaplc2d96442018-08-20 11:24:31 +010013245 arm_skip_unless(s, cond);
bellard99c475a2005-01-31 20:45:13 +000013246
13247 /* jump to the offset */
bellard5899f382005-04-27 20:25:20 +000013248 val = (uint32_t)s->pc + 2;
bellard99c475a2005-01-31 20:45:13 +000013249 offset = ((int32_t)insn << 24) >> 24;
bellard5899f382005-04-27 20:25:20 +000013250 val += offset << 1;
bellard8aaca4c2005-04-23 18:27:52 +000013251 gen_jmp(s, val);
bellard99c475a2005-01-31 20:45:13 +000013252 break;
13253
13254 case 14:
pbrook358bf292006-04-09 14:38:57 +000013255 if (insn & (1 << 11)) {
Peter Maydell296e5a02017-10-09 14:48:36 +010013256 /* thumb_insn_is_16bit() ensures we can't get here for
13257 * a Thumb2 CPU, so this must be a thumb1 split BL/BLX:
13258 * 0b1110_1xxx_xxxx_xxxx : BLX suffix (or UNDEF)
13259 */
13260 assert(!arm_dc_feature(s, ARM_FEATURE_THUMB2));
13261 ARCH(5);
13262 offset = ((insn & 0x7ff) << 1);
13263 tmp = load_reg(s, 14);
13264 tcg_gen_addi_i32(tmp, tmp, offset);
13265 tcg_gen_andi_i32(tmp, tmp, 0xfffffffc);
13266
13267 tmp2 = tcg_temp_new_i32();
13268 tcg_gen_movi_i32(tmp2, s->pc | 1);
13269 store_reg(s, 14, tmp2);
13270 gen_bx(s, tmp);
pbrook358bf292006-04-09 14:38:57 +000013271 break;
13272 }
pbrook9ee6e8b2007-11-11 00:04:49 +000013273 /* unconditional branch */
bellard99c475a2005-01-31 20:45:13 +000013274 val = (uint32_t)s->pc;
13275 offset = ((int32_t)insn << 21) >> 21;
13276 val += (offset << 1) + 2;
bellard8aaca4c2005-04-23 18:27:52 +000013277 gen_jmp(s, val);
bellard99c475a2005-01-31 20:45:13 +000013278 break;
13279
13280 case 15:
Peter Maydell296e5a02017-10-09 14:48:36 +010013281 /* thumb_insn_is_16bit() ensures we can't get here for
13282 * a Thumb2 CPU, so this must be a thumb1 split BL/BLX.
13283 */
13284 assert(!arm_dc_feature(s, ARM_FEATURE_THUMB2));
13285
13286 if (insn & (1 << 11)) {
13287 /* 0b1111_1xxx_xxxx_xxxx : BL suffix */
13288 offset = ((insn & 0x7ff) << 1) | 1;
13289 tmp = load_reg(s, 14);
13290 tcg_gen_addi_i32(tmp, tmp, offset);
13291
13292 tmp2 = tcg_temp_new_i32();
13293 tcg_gen_movi_i32(tmp2, s->pc | 1);
13294 store_reg(s, 14, tmp2);
13295 gen_bx(s, tmp);
13296 } else {
13297 /* 0b1111_0xxx_xxxx_xxxx : BL/BLX prefix */
13298 uint32_t uoffset = ((int32_t)insn << 21) >> 9;
13299
13300 tcg_gen_movi_i32(cpu_R[14], s->pc + 2 + uoffset);
13301 }
pbrook9ee6e8b2007-11-11 00:04:49 +000013302 break;
bellard99c475a2005-01-31 20:45:13 +000013303 }
13304 return;
pbrook9ee6e8b2007-11-11 00:04:49 +000013305illegal_op:
bellard99c475a2005-01-31 20:45:13 +000013306undef:
Greg Bellows73710362015-05-29 11:28:50 +010013307 gen_exception_insn(s, 2, EXCP_UDEF, syn_uncategorized(),
13308 default_exception_el(s));
bellard99c475a2005-01-31 20:45:13 +000013309}
13310
Peter Maydell541ebcd2015-10-27 12:00:50 +000013311static bool insn_crosses_page(CPUARMState *env, DisasContext *s)
13312{
13313 /* Return true if the insn at dc->pc might cross a page boundary.
13314 * (False positives are OK, false negatives are not.)
Peter Maydell5b8d7282017-10-09 14:48:37 +010013315 * We know this is a Thumb insn, and our caller ensures we are
13316 * only called if dc->pc is less than 4 bytes from the page
13317 * boundary, so we cross the page if the first 16 bits indicate
13318 * that this is a 32 bit insn.
Peter Maydell541ebcd2015-10-27 12:00:50 +000013319 */
Peter Maydell5b8d7282017-10-09 14:48:37 +010013320 uint16_t insn = arm_lduw_code(env, s->pc, s->sctlr_b);
Peter Maydell541ebcd2015-10-27 12:00:50 +000013321
Peter Maydell5b8d7282017-10-09 14:48:37 +010013322 return !thumb_insn_is_16bit(s, insn);
Peter Maydell541ebcd2015-10-27 12:00:50 +000013323}
13324
Emilio G. Cotab5426832018-02-19 20:51:58 -050013325static void arm_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
bellard2c0262a2003-09-30 20:34:21 +000013326{
Lluís Vilanova1d8a5532017-07-14 12:06:02 +030013327 DisasContext *dc = container_of(dcbase, DisasContext, base);
Lluís Vilanova9c489ea2017-07-14 11:17:35 +030013328 CPUARMState *env = cs->env_ptr;
Richard Henderson2fc0cc02019-03-22 17:41:14 -070013329 ARMCPU *cpu = env_archcpu(env);
Richard Hendersonaad821a2019-01-07 15:23:45 +000013330 uint32_t tb_flags = dc->base.tb->flags;
13331 uint32_t condexec, core_mmu_idx;
ths3b46e622007-09-17 08:09:54 +000013332
Richard Henderson962fcbf2018-10-24 07:50:16 +010013333 dc->isar = &cpu->isar;
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013334 dc->pc = dc->base.pc_first;
bellarde50e6a22005-04-26 20:36:11 +000013335 dc->condjmp = 0;
Alexander Graf3926cc82013-09-03 20:12:09 +010013336
Peter Maydell40f860c2013-12-17 19:42:31 +000013337 dc->aarch64 = 0;
Sergey Sorokincef9ee72015-09-08 17:38:44 +010013338 /* If we are coming from secure EL0 in a system with a 32-bit EL3, then
13339 * there is no secure EL1, so we route exceptions to EL3.
13340 */
13341 dc->secure_routed_to_el3 = arm_feature(env, ARM_FEATURE_EL3) &&
13342 !arm_el_is_aa64(env, 3);
Richard Hendersonaad821a2019-01-07 15:23:45 +000013343 dc->thumb = FIELD_EX32(tb_flags, TBFLAG_A32, THUMB);
13344 dc->sctlr_b = FIELD_EX32(tb_flags, TBFLAG_A32, SCTLR_B);
13345 dc->be_data = FIELD_EX32(tb_flags, TBFLAG_ANY, BE_DATA) ? MO_BE : MO_LE;
13346 condexec = FIELD_EX32(tb_flags, TBFLAG_A32, CONDEXEC);
13347 dc->condexec_mask = (condexec & 0xf) << 1;
13348 dc->condexec_cond = condexec >> 4;
13349 core_mmu_idx = FIELD_EX32(tb_flags, TBFLAG_ANY, MMUIDX);
13350 dc->mmu_idx = core_to_arm_mmu_idx(env, core_mmu_idx);
Peter Maydellc1e37812015-02-05 13:37:23 +000013351 dc->current_el = arm_mmu_idx_to_el(dc->mmu_idx);
bellardb5ff1b32005-11-26 10:38:39 +000013352#if !defined(CONFIG_USER_ONLY)
Peter Maydellc1e37812015-02-05 13:37:23 +000013353 dc->user = (dc->current_el == 0);
bellardb5ff1b32005-11-26 10:38:39 +000013354#endif
Richard Hendersonaad821a2019-01-07 15:23:45 +000013355 dc->ns = FIELD_EX32(tb_flags, TBFLAG_A32, NS);
13356 dc->fp_excp_el = FIELD_EX32(tb_flags, TBFLAG_ANY, FPEXC_EL);
13357 dc->vfp_enabled = FIELD_EX32(tb_flags, TBFLAG_A32, VFPEN);
13358 dc->vec_len = FIELD_EX32(tb_flags, TBFLAG_A32, VECLEN);
Peter Maydellea7ac692019-04-29 17:36:01 +010013359 if (arm_feature(env, ARM_FEATURE_XSCALE)) {
13360 dc->c15_cpar = FIELD_EX32(tb_flags, TBFLAG_A32, XSCALE_CPAR);
13361 dc->vec_stride = 0;
13362 } else {
13363 dc->vec_stride = FIELD_EX32(tb_flags, TBFLAG_A32, VECSTRIDE);
13364 dc->c15_cpar = 0;
13365 }
Richard Hendersonaad821a2019-01-07 15:23:45 +000013366 dc->v7m_handler_mode = FIELD_EX32(tb_flags, TBFLAG_A32, HANDLER);
Peter Maydellfb602cb2017-09-07 13:54:54 +010013367 dc->v8m_secure = arm_feature(env, ARM_FEATURE_M_SECURITY) &&
13368 regime_is_secure(env, dc->mmu_idx);
Richard Hendersonaad821a2019-01-07 15:23:45 +000013369 dc->v8m_stackcheck = FIELD_EX32(tb_flags, TBFLAG_A32, STACKCHECK);
Peter Maydell6d60c672019-04-29 17:36:01 +010013370 dc->v8m_fpccr_s_wrong = FIELD_EX32(tb_flags, TBFLAG_A32, FPCCR_S_WRONG);
Peter Maydell60005312019-04-29 17:36:01 +010013371 dc->v7m_new_fp_ctxt_needed =
13372 FIELD_EX32(tb_flags, TBFLAG_A32, NEW_FP_CTXT_NEEDED);
Peter Maydelle33cf0f2019-04-29 17:36:02 +010013373 dc->v7m_lspact = FIELD_EX32(tb_flags, TBFLAG_A32, LSPACT);
Peter Maydell60322b32014-01-04 22:15:44 +000013374 dc->cp_regs = cpu->cp_regs;
Peter Maydella984e422014-03-17 16:31:47 +000013375 dc->features = env->features;
Peter Maydell40f860c2013-12-17 19:42:31 +000013376
Peter Maydell50225ad2014-08-19 18:56:27 +010013377 /* Single step state. The code-generation logic here is:
13378 * SS_ACTIVE == 0:
13379 * generate code with no special handling for single-stepping (except
13380 * that anything that can make us go to SS_ACTIVE == 1 must end the TB;
13381 * this happens anyway because those changes are all system register or
13382 * PSTATE writes).
13383 * SS_ACTIVE == 1, PSTATE.SS == 1: (active-not-pending)
13384 * emit code for one insn
13385 * emit code to clear PSTATE.SS
13386 * emit code to generate software step exception for completed step
13387 * end TB (as usual for having generated an exception)
13388 * SS_ACTIVE == 1, PSTATE.SS == 0: (active-pending)
13389 * emit code to generate a software step exception
13390 * end the TB
13391 */
Richard Hendersonaad821a2019-01-07 15:23:45 +000013392 dc->ss_active = FIELD_EX32(tb_flags, TBFLAG_ANY, SS_ACTIVE);
13393 dc->pstate_ss = FIELD_EX32(tb_flags, TBFLAG_ANY, PSTATE_SS);
Peter Maydell50225ad2014-08-19 18:56:27 +010013394 dc->is_ldex = false;
13395 dc->ss_same_el = false; /* Can't be true since EL_d must be AArch64 */
13396
Emilio G. Cotabfe7ad52018-04-10 11:09:52 -040013397 dc->page_start = dc->base.pc_first & TARGET_PAGE_MASK;
Lluís Vilanova1d8a5532017-07-14 12:06:02 +030013398
Richard Hendersonf7708452017-07-14 11:56:47 -100013399 /* If architectural single step active, limit to 1. */
13400 if (is_singlestepping(dc)) {
Emilio G. Cotab5426832018-02-19 20:51:58 -050013401 dc->base.max_insns = 1;
Richard Hendersonf7708452017-07-14 11:56:47 -100013402 }
13403
Richard Hendersond0264d82017-07-14 12:51:15 -100013404 /* ARM is a fixed-length ISA. Bound the number of insns to execute
13405 to those left on the page. */
13406 if (!dc->thumb) {
Emilio G. Cotabfe7ad52018-04-10 11:09:52 -040013407 int bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
Emilio G. Cotab5426832018-02-19 20:51:58 -050013408 dc->base.max_insns = MIN(dc->base.max_insns, bound);
Richard Hendersond0264d82017-07-14 12:51:15 -100013409 }
13410
pbrooka7812ae2008-11-17 14:43:54 +000013411 cpu_F0s = tcg_temp_new_i32();
13412 cpu_F1s = tcg_temp_new_i32();
13413 cpu_F0d = tcg_temp_new_i64();
13414 cpu_F1d = tcg_temp_new_i64();
pbrookad694712008-03-31 03:48:30 +000013415 cpu_V0 = cpu_F0d;
13416 cpu_V1 = cpu_F1d;
pbrooke6771372008-03-31 03:49:05 +000013417 /* FIXME: cpu_M0 can probably be the same as cpu_V0. */
pbrooka7812ae2008-11-17 14:43:54 +000013418 cpu_M0 = tcg_temp_new_i64();
Lluís Vilanova1d8a5532017-07-14 12:06:02 +030013419}
13420
Lluís Vilanovab1476852017-07-14 12:14:07 +030013421static void arm_tr_tb_start(DisasContextBase *dcbase, CPUState *cpu)
13422{
13423 DisasContext *dc = container_of(dcbase, DisasContext, base);
13424
13425 /* A note on handling of the condexec (IT) bits:
13426 *
13427 * We want to avoid the overhead of having to write the updated condexec
13428 * bits back to the CPUARMState for every instruction in an IT block. So:
13429 * (1) if the condexec bits are not already zero then we write
13430 * zero back into the CPUARMState now. This avoids complications trying
13431 * to do it at the end of the block. (For example if we don't do this
13432 * it's hard to identify whether we can safely skip writing condexec
13433 * at the end of the TB, which we definitely want to do for the case
13434 * where a TB doesn't do anything with the IT state at all.)
13435 * (2) if we are going to leave the TB then we call gen_set_condexec()
13436 * which will write the correct value into CPUARMState if zero is wrong.
13437 * This is done both for leaving the TB at the end, and for leaving
13438 * it because of an exception we know will happen, which is done in
13439 * gen_exception_insn(). The latter is necessary because we need to
13440 * leave the TB with the PC/IT state just prior to execution of the
13441 * instruction which caused the exception.
13442 * (3) if we leave the TB unexpectedly (eg a data abort on a load)
13443 * then the CPUARMState will be wrong and we need to reset it.
13444 * This is handled in the same way as restoration of the
13445 * PC in these situations; we save the value of the condexec bits
13446 * for each PC via tcg_gen_insn_start(), and restore_state_to_opc()
13447 * then uses this to restore them after an exception.
13448 *
13449 * Note that there are no instructions which can read the condexec
13450 * bits, and none which can write non-static values to them, so
13451 * we don't need to care about whether CPUARMState is correct in the
13452 * middle of a TB.
13453 */
13454
13455 /* Reset the conditional execution bits immediately. This avoids
13456 complications trying to do it at the end of the block. */
13457 if (dc->condexec_mask || dc->condexec_cond) {
13458 TCGv_i32 tmp = tcg_temp_new_i32();
13459 tcg_gen_movi_i32(tmp, 0);
13460 store_cpu_field(tmp, condexec_bits);
13461 }
13462}
13463
Lluís Vilanovaf62bd892017-07-14 12:18:09 +030013464static void arm_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
13465{
13466 DisasContext *dc = container_of(dcbase, DisasContext, base);
13467
Lluís Vilanovaf62bd892017-07-14 12:18:09 +030013468 tcg_gen_insn_start(dc->pc,
13469 (dc->condexec_cond << 4) | (dc->condexec_mask >> 1),
13470 0);
Richard Henderson15fa08f2017-11-02 15:19:14 +010013471 dc->insn_start = tcg_last_op();
Lluís Vilanovaf62bd892017-07-14 12:18:09 +030013472}
13473
Lluís Vilanovaa68956a2017-07-14 12:22:12 +030013474static bool arm_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
13475 const CPUBreakpoint *bp)
13476{
13477 DisasContext *dc = container_of(dcbase, DisasContext, base);
13478
13479 if (bp->flags & BP_CPU) {
13480 gen_set_condexec(dc);
13481 gen_set_pc_im(dc, dc->pc);
13482 gen_helper_check_breakpoints(cpu_env);
13483 /* End the TB early; it's likely not going to be executed */
13484 dc->base.is_jmp = DISAS_TOO_MANY;
13485 } else {
13486 gen_exception_internal_insn(dc, 0, EXCP_DEBUG);
13487 /* The address covered by the breakpoint must be
13488 included in [tb->pc, tb->pc + tb->size) in order
13489 to for it to be properly cleared -- thus we
13490 increment the PC here so that the logic setting
13491 tb->size below does the right thing. */
13492 /* TODO: Advance PC by correct instruction length to
13493 * avoid disassembler error messages */
13494 dc->pc += 2;
13495 dc->base.is_jmp = DISAS_NORETURN;
13496 }
13497
13498 return true;
13499}
13500
Richard Henderson722ef0a2017-07-14 12:29:07 -100013501static bool arm_pre_translate_insn(DisasContext *dc)
Lluís Vilanova13189a92017-07-14 12:34:18 +030013502{
Lluís Vilanova13189a92017-07-14 12:34:18 +030013503#ifdef CONFIG_USER_ONLY
13504 /* Intercept jump to the magic kernel page. */
13505 if (dc->pc >= 0xffff0000) {
13506 /* We always get here via a jump, so know we are not in a
13507 conditional execution block. */
13508 gen_exception_internal(EXCP_KERNEL_TRAP);
13509 dc->base.is_jmp = DISAS_NORETURN;
Richard Henderson722ef0a2017-07-14 12:29:07 -100013510 return true;
Lluís Vilanova13189a92017-07-14 12:34:18 +030013511 }
13512#endif
13513
13514 if (dc->ss_active && !dc->pstate_ss) {
13515 /* Singlestep state is Active-pending.
13516 * If we're in this state at the start of a TB then either
13517 * a) we just took an exception to an EL which is being debugged
13518 * and this is the first insn in the exception handler
13519 * b) debug exceptions were masked and we just unmasked them
13520 * without changing EL (eg by clearing PSTATE.D)
13521 * In either case we're going to take a swstep exception in the
13522 * "did not step an insn" case, and so the syndrome ISV and EX
13523 * bits should be zero.
13524 */
13525 assert(dc->base.num_insns == 1);
13526 gen_exception(EXCP_UDEF, syn_swstep(dc->ss_same_el, 0, 0),
13527 default_exception_el(dc));
13528 dc->base.is_jmp = DISAS_NORETURN;
Richard Henderson722ef0a2017-07-14 12:29:07 -100013529 return true;
Lluís Vilanova13189a92017-07-14 12:34:18 +030013530 }
13531
Richard Henderson722ef0a2017-07-14 12:29:07 -100013532 return false;
13533}
Lluís Vilanova13189a92017-07-14 12:34:18 +030013534
Richard Hendersond0264d82017-07-14 12:51:15 -100013535static void arm_post_translate_insn(DisasContext *dc)
Richard Henderson722ef0a2017-07-14 12:29:07 -100013536{
Lluís Vilanova13189a92017-07-14 12:34:18 +030013537 if (dc->condjmp && !dc->base.is_jmp) {
13538 gen_set_label(dc->condlabel);
13539 dc->condjmp = 0;
13540 }
Lluís Vilanova13189a92017-07-14 12:34:18 +030013541 dc->base.pc_next = dc->pc;
Lluís Vilanova23169222017-07-14 12:58:33 +030013542 translator_loop_temp_check(&dc->base);
Lluís Vilanova13189a92017-07-14 12:34:18 +030013543}
13544
Richard Henderson722ef0a2017-07-14 12:29:07 -100013545static void arm_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
13546{
13547 DisasContext *dc = container_of(dcbase, DisasContext, base);
13548 CPUARMState *env = cpu->env_ptr;
13549 unsigned int insn;
13550
13551 if (arm_pre_translate_insn(dc)) {
13552 return;
13553 }
13554
13555 insn = arm_ldl_code(env, dc->pc, dc->sctlr_b);
Stefano Stabellini58803312017-10-31 11:50:50 +000013556 dc->insn = insn;
Richard Henderson722ef0a2017-07-14 12:29:07 -100013557 dc->pc += 4;
13558 disas_arm_insn(dc, insn);
13559
Richard Hendersond0264d82017-07-14 12:51:15 -100013560 arm_post_translate_insn(dc);
13561
13562 /* ARM is a fixed-length ISA. We performed the cross-page check
13563 in init_disas_context by adjusting max_insns. */
Richard Henderson722ef0a2017-07-14 12:29:07 -100013564}
13565
Peter Maydelldcf14df2017-10-09 14:48:38 +010013566static bool thumb_insn_is_unconditional(DisasContext *s, uint32_t insn)
13567{
13568 /* Return true if this Thumb insn is always unconditional,
13569 * even inside an IT block. This is true of only a very few
13570 * instructions: BKPT, HLT, and SG.
13571 *
13572 * A larger class of instructions are UNPREDICTABLE if used
13573 * inside an IT block; we do not need to detect those here, because
13574 * what we do by default (perform the cc check and update the IT
13575 * bits state machine) is a permitted CONSTRAINED UNPREDICTABLE
13576 * choice for those situations.
13577 *
13578 * insn is either a 16-bit or a 32-bit instruction; the two are
13579 * distinguishable because for the 16-bit case the top 16 bits
13580 * are zeroes, and that isn't a valid 32-bit encoding.
13581 */
13582 if ((insn & 0xffffff00) == 0xbe00) {
13583 /* BKPT */
13584 return true;
13585 }
13586
13587 if ((insn & 0xffffffc0) == 0xba80 && arm_dc_feature(s, ARM_FEATURE_V8) &&
13588 !arm_dc_feature(s, ARM_FEATURE_M)) {
13589 /* HLT: v8A only. This is unconditional even when it is going to
13590 * UNDEF; see the v8A ARM ARM DDI0487B.a H3.3.
13591 * For v7 cores this was a plain old undefined encoding and so
13592 * honours its cc check. (We might be using the encoding as
13593 * a semihosting trap, but we don't change the cc check behaviour
13594 * on that account, because a debugger connected to a real v7A
13595 * core and emulating semihosting traps by catching the UNDEF
13596 * exception would also only see cases where the cc check passed.
13597 * No guest code should be trying to do a HLT semihosting trap
13598 * in an IT block anyway.
13599 */
13600 return true;
13601 }
13602
13603 if (insn == 0xe97fe97f && arm_dc_feature(s, ARM_FEATURE_V8) &&
13604 arm_dc_feature(s, ARM_FEATURE_M)) {
13605 /* SG: v8M only */
13606 return true;
13607 }
13608
13609 return false;
13610}
13611
Richard Henderson722ef0a2017-07-14 12:29:07 -100013612static void thumb_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
13613{
13614 DisasContext *dc = container_of(dcbase, DisasContext, base);
13615 CPUARMState *env = cpu->env_ptr;
Peter Maydell296e5a02017-10-09 14:48:36 +010013616 uint32_t insn;
13617 bool is_16bit;
Richard Henderson722ef0a2017-07-14 12:29:07 -100013618
13619 if (arm_pre_translate_insn(dc)) {
13620 return;
13621 }
13622
Peter Maydell296e5a02017-10-09 14:48:36 +010013623 insn = arm_lduw_code(env, dc->pc, dc->sctlr_b);
13624 is_16bit = thumb_insn_is_16bit(dc, insn);
13625 dc->pc += 2;
13626 if (!is_16bit) {
13627 uint32_t insn2 = arm_lduw_code(env, dc->pc, dc->sctlr_b);
13628
13629 insn = insn << 16 | insn2;
13630 dc->pc += 2;
13631 }
Stefano Stabellini58803312017-10-31 11:50:50 +000013632 dc->insn = insn;
Peter Maydell296e5a02017-10-09 14:48:36 +010013633
Peter Maydelldcf14df2017-10-09 14:48:38 +010013634 if (dc->condexec_mask && !thumb_insn_is_unconditional(dc, insn)) {
Peter Maydell296e5a02017-10-09 14:48:36 +010013635 uint32_t cond = dc->condexec_cond;
13636
13637 if (cond != 0x0e) { /* Skip conditional when condition is AL. */
Roman Kaplc2d96442018-08-20 11:24:31 +010013638 arm_skip_unless(dc, cond);
Peter Maydell296e5a02017-10-09 14:48:36 +010013639 }
13640 }
13641
13642 if (is_16bit) {
13643 disas_thumb_insn(dc, insn);
13644 } else {
Peter Maydell2eea8412018-01-11 13:25:40 +000013645 disas_thumb2_insn(dc, insn);
Peter Maydell296e5a02017-10-09 14:48:36 +010013646 }
Richard Henderson722ef0a2017-07-14 12:29:07 -100013647
13648 /* Advance the Thumb condexec condition. */
13649 if (dc->condexec_mask) {
13650 dc->condexec_cond = ((dc->condexec_cond & 0xe) |
13651 ((dc->condexec_mask >> 4) & 1));
13652 dc->condexec_mask = (dc->condexec_mask << 1) & 0x1f;
13653 if (dc->condexec_mask == 0) {
13654 dc->condexec_cond = 0;
13655 }
13656 }
13657
Richard Hendersond0264d82017-07-14 12:51:15 -100013658 arm_post_translate_insn(dc);
13659
13660 /* Thumb is a variable-length ISA. Stop translation when the next insn
13661 * will touch a new page. This ensures that prefetch aborts occur at
13662 * the right place.
13663 *
13664 * We want to stop the TB if the next insn starts in a new page,
13665 * or if it spans between this page and the next. This means that
13666 * if we're looking at the last halfword in the page we need to
13667 * see if it's a 16-bit Thumb insn (which will fit in this TB)
13668 * or a 32-bit Thumb insn (which won't).
13669 * This is to avoid generating a silly TB with a single 16-bit insn
13670 * in it at the end of this page (which would execute correctly
13671 * but isn't very efficient).
13672 */
13673 if (dc->base.is_jmp == DISAS_NEXT
Emilio G. Cotabfe7ad52018-04-10 11:09:52 -040013674 && (dc->pc - dc->page_start >= TARGET_PAGE_SIZE
13675 || (dc->pc - dc->page_start >= TARGET_PAGE_SIZE - 3
Richard Hendersond0264d82017-07-14 12:51:15 -100013676 && insn_crosses_page(env, dc)))) {
13677 dc->base.is_jmp = DISAS_TOO_MANY;
13678 }
Richard Henderson722ef0a2017-07-14 12:29:07 -100013679}
13680
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013681static void arm_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
Lluís Vilanova1d8a5532017-07-14 12:06:02 +030013682{
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013683 DisasContext *dc = container_of(dcbase, DisasContext, base);
Lluís Vilanova1d8a5532017-07-14 12:06:02 +030013684
Emilio G. Cotac5a49c62017-07-18 20:46:52 -040013685 if (tb_cflags(dc->base.tb) & CF_LAST_IO && dc->condjmp) {
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013686 /* FIXME: This can theoretically happen with self-modifying code. */
13687 cpu_abort(cpu, "IO on conditional branch instruction");
pbrook2e70f6e2008-06-29 01:03:05 +000013688 }
pbrook9ee6e8b2007-11-11 00:04:49 +000013689
bellardb5ff1b32005-11-26 10:38:39 +000013690 /* At this stage dc->condjmp will only be set when the skipped
pbrook9ee6e8b2007-11-11 00:04:49 +000013691 instruction was a conditional branch or trap, and the PC has
13692 already been written. */
Peter Maydellf021b2c2017-04-20 17:32:30 +010013693 gen_set_condexec(dc);
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013694 if (dc->base.is_jmp == DISAS_BX_EXCRET) {
Peter Maydell3bb8a962017-04-20 17:32:31 +010013695 /* Exception return branches need some special case code at the
13696 * end of the TB, which is complex enough that it has to
13697 * handle the single-step vs not and the condition-failed
13698 * insn codepath itself.
13699 */
13700 gen_bx_excret_final_code(dc);
13701 } else if (unlikely(is_singlestepping(dc))) {
Sergey Fedorov7999a5c2015-12-17 13:37:13 +000013702 /* Unconditional and "condition passed" instruction codepath. */
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013703 switch (dc->base.is_jmp) {
Sergey Fedorov7999a5c2015-12-17 13:37:13 +000013704 case DISAS_SWI:
13705 gen_ss_advance(dc);
13706 gen_exception(EXCP_SWI, syn_aa32_svc(dc->svc_imm, dc->thumb),
13707 default_exception_el(dc));
13708 break;
13709 case DISAS_HVC:
13710 gen_ss_advance(dc);
13711 gen_exception(EXCP_HVC, syn_aa32_hvc(dc->svc_imm), 2);
13712 break;
13713 case DISAS_SMC:
13714 gen_ss_advance(dc);
13715 gen_exception(EXCP_SMC, syn_aa32_smc(), 3);
13716 break;
13717 case DISAS_NEXT:
Lluís Vilanovaa68956a2017-07-14 12:22:12 +030013718 case DISAS_TOO_MANY:
Sergey Fedorov7999a5c2015-12-17 13:37:13 +000013719 case DISAS_UPDATE:
13720 gen_set_pc_im(dc, dc->pc);
13721 /* fall through */
13722 default:
Peter Maydell54254152017-04-20 17:32:30 +010013723 /* FIXME: Single stepping a WFI insn will not halt the CPU. */
13724 gen_singlestep_exception(dc);
Richard Hendersona0c231e2017-07-14 09:05:06 -100013725 break;
13726 case DISAS_NORETURN:
13727 break;
Sergey Fedorov7999a5c2015-12-17 13:37:13 +000013728 }
bellard8aaca4c2005-04-23 18:27:52 +000013729 } else {
pbrook9ee6e8b2007-11-11 00:04:49 +000013730 /* While branches must always occur at the end of an IT block,
13731 there are a few other things that can cause us to terminate
Peter A. G. Crosthwaite65626742012-08-06 17:05:56 +100013732 the TB in the middle of an IT block:
pbrook9ee6e8b2007-11-11 00:04:49 +000013733 - Exception generating instructions (bkpt, swi, undefined).
13734 - Page boundaries.
13735 - Hardware watchpoints.
13736 Hardware breakpoints have already been handled and skip this code.
13737 */
Lluís Vilanovadcba3a82017-07-14 12:01:59 +030013738 switch(dc->base.is_jmp) {
bellard8aaca4c2005-04-23 18:27:52 +000013739 case DISAS_NEXT:
Lluís Vilanovaa68956a2017-07-14 12:22:12 +030013740 case DISAS_TOO_MANY:
bellard6e256c92005-11-20 10:32:05 +000013741 gen_goto_tb(dc, 1, dc->pc);
bellard8aaca4c2005-04-23 18:27:52 +000013742 break;
Sergey Fedorov577bf802015-11-10 13:37:33 +000013743 case DISAS_JUMP:
Emilio G. Cota8a6b28c2017-04-26 23:29:20 -040013744 gen_goto_ptr();
13745 break;
Alex Bennéee8d523022017-07-17 13:36:07 +010013746 case DISAS_UPDATE:
13747 gen_set_pc_im(dc, dc->pc);
13748 /* fall through */
Sergey Fedorov577bf802015-11-10 13:37:33 +000013749 default:
bellard8aaca4c2005-04-23 18:27:52 +000013750 /* indicate that the hash table must be used to find the next TB */
Richard Henderson07ea28b2018-05-30 18:06:23 -070013751 tcg_gen_exit_tb(NULL, 0);
bellard8aaca4c2005-04-23 18:27:52 +000013752 break;
Richard Hendersona0c231e2017-07-14 09:05:06 -100013753 case DISAS_NORETURN:
bellard8aaca4c2005-04-23 18:27:52 +000013754 /* nothing more to generate */
13755 break;
pbrook9ee6e8b2007-11-11 00:04:49 +000013756 case DISAS_WFI:
Stefano Stabellini58803312017-10-31 11:50:50 +000013757 {
13758 TCGv_i32 tmp = tcg_const_i32((dc->thumb &&
13759 !(dc->insn & (1U << 31))) ? 2 : 4);
13760
13761 gen_helper_wfi(cpu_env, tmp);
13762 tcg_temp_free_i32(tmp);
Peter Maydell84549b62015-05-29 11:28:53 +010013763 /* The helper doesn't necessarily throw an exception, but we
13764 * must go back to the main loop to check for interrupts anyway.
13765 */
Richard Henderson07ea28b2018-05-30 18:06:23 -070013766 tcg_gen_exit_tb(NULL, 0);
pbrook9ee6e8b2007-11-11 00:04:49 +000013767 break;
Stefano Stabellini58803312017-10-31 11:50:50 +000013768 }
Peter Maydell72c1d3a2014-03-10 14:56:30 +000013769 case DISAS_WFE:
13770 gen_helper_wfe(cpu_env);
13771 break;
Peter Maydellc87e5a62015-07-06 10:05:44 +010013772 case DISAS_YIELD:
13773 gen_helper_yield(cpu_env);
13774 break;
pbrook9ee6e8b2007-11-11 00:04:49 +000013775 case DISAS_SWI:
Greg Bellows73710362015-05-29 11:28:50 +010013776 gen_exception(EXCP_SWI, syn_aa32_svc(dc->svc_imm, dc->thumb),
13777 default_exception_el(dc));
pbrook9ee6e8b2007-11-11 00:04:49 +000013778 break;
Peter Maydell37e64562014-10-24 12:19:13 +010013779 case DISAS_HVC:
Greg Bellows73710362015-05-29 11:28:50 +010013780 gen_exception(EXCP_HVC, syn_aa32_hvc(dc->svc_imm), 2);
Peter Maydell37e64562014-10-24 12:19:13 +010013781 break;
13782 case DISAS_SMC:
Greg Bellows73710362015-05-29 11:28:50 +010013783 gen_exception(EXCP_SMC, syn_aa32_smc(), 3);
Peter Maydell37e64562014-10-24 12:19:13 +010013784 break;
bellard8aaca4c2005-04-23 18:27:52 +000013785 }
Peter Maydellf021b2c2017-04-20 17:32:30 +010013786 }
13787
13788 if (dc->condjmp) {
13789 /* "Condition failed" instruction codepath for the branch/trap insn */
13790 gen_set_label(dc->condlabel);
13791 gen_set_condexec(dc);
Peter Maydellb6366492017-04-20 17:32:30 +010013792 if (unlikely(is_singlestepping(dc))) {
Peter Maydellf021b2c2017-04-20 17:32:30 +010013793 gen_set_pc_im(dc, dc->pc);
13794 gen_singlestep_exception(dc);
13795 } else {
bellard6e256c92005-11-20 10:32:05 +000013796 gen_goto_tb(dc, 1, dc->pc);
bellarde50e6a22005-04-26 20:36:11 +000013797 }
bellard2c0262a2003-09-30 20:34:21 +000013798 }
Lluís Vilanova23169222017-07-14 12:58:33 +030013799
13800 /* Functions above can change dc->pc, so re-align db->pc_next */
13801 dc->base.pc_next = dc->pc;
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013802}
13803
Lluís Vilanova4013f7f2017-07-14 12:50:27 +030013804static void arm_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
13805{
13806 DisasContext *dc = container_of(dcbase, DisasContext, base);
13807
13808 qemu_log("IN: %s\n", lookup_symbol(dc->base.pc_first));
Richard Henderson1d484742017-09-14 08:38:35 -070013809 log_target_disas(cpu, dc->base.pc_first, dc->base.tb->size);
Lluís Vilanova4013f7f2017-07-14 12:50:27 +030013810}
13811
Lluís Vilanova23169222017-07-14 12:58:33 +030013812static const TranslatorOps arm_translator_ops = {
13813 .init_disas_context = arm_tr_init_disas_context,
13814 .tb_start = arm_tr_tb_start,
13815 .insn_start = arm_tr_insn_start,
13816 .breakpoint_check = arm_tr_breakpoint_check,
13817 .translate_insn = arm_tr_translate_insn,
13818 .tb_stop = arm_tr_tb_stop,
13819 .disas_log = arm_tr_disas_log,
13820};
13821
Richard Henderson722ef0a2017-07-14 12:29:07 -100013822static const TranslatorOps thumb_translator_ops = {
13823 .init_disas_context = arm_tr_init_disas_context,
13824 .tb_start = arm_tr_tb_start,
13825 .insn_start = arm_tr_insn_start,
13826 .breakpoint_check = arm_tr_breakpoint_check,
13827 .translate_insn = thumb_tr_translate_insn,
13828 .tb_stop = arm_tr_tb_stop,
13829 .disas_log = arm_tr_disas_log,
13830};
13831
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013832/* generate intermediate code for basic block 'tb'. */
Richard Henderson8b86d6d2019-04-15 20:54:54 -100013833void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013834{
Lluís Vilanova23169222017-07-14 12:58:33 +030013835 DisasContext dc;
13836 const TranslatorOps *ops = &arm_translator_ops;
Lluís Vilanova70d3c032017-07-14 12:42:23 +030013837
Richard Hendersonaad821a2019-01-07 15:23:45 +000013838 if (FIELD_EX32(tb->flags, TBFLAG_A32, THUMB)) {
Richard Henderson722ef0a2017-07-14 12:29:07 -100013839 ops = &thumb_translator_ops;
13840 }
Lluís Vilanova23169222017-07-14 12:58:33 +030013841#ifdef TARGET_AARCH64
Richard Hendersonaad821a2019-01-07 15:23:45 +000013842 if (FIELD_EX32(tb->flags, TBFLAG_ANY, AARCH64_STATE)) {
Lluís Vilanova23169222017-07-14 12:58:33 +030013843 ops = &aarch64_translator_ops;
bellard2c0262a2003-09-30 20:34:21 +000013844 }
13845#endif
Lluís Vilanova23169222017-07-14 12:58:33 +030013846
Richard Henderson8b86d6d2019-04-15 20:54:54 -100013847 translator_loop(ops, &dc.base, cpu, tb, max_insns);
bellard2c0262a2003-09-30 20:34:21 +000013848}
13849
Markus Armbruster90c84c52019-04-17 21:18:02 +020013850void arm_cpu_dump_state(CPUState *cs, FILE *f, int flags)
bellard2c0262a2003-09-30 20:34:21 +000013851{
Andreas Färber878096e2013-05-27 01:33:50 +020013852 ARMCPU *cpu = ARM_CPU(cs);
13853 CPUARMState *env = &cpu->env;
bellard2c0262a2003-09-30 20:34:21 +000013854 int i;
13855
Peter Maydell17731112014-04-15 19:19:15 +010013856 if (is_a64(env)) {
Markus Armbruster90c84c52019-04-17 21:18:02 +020013857 aarch64_cpu_dump_state(cs, f, flags);
Peter Maydell17731112014-04-15 19:19:15 +010013858 return;
13859 }
13860
bellard2c0262a2003-09-30 20:34:21 +000013861 for(i=0;i<16;i++) {
Markus Armbruster90c84c52019-04-17 21:18:02 +020013862 qemu_fprintf(f, "R%02d=%08x", i, env->regs[i]);
bellard2c0262a2003-09-30 20:34:21 +000013863 if ((i % 4) == 3)
Markus Armbruster90c84c52019-04-17 21:18:02 +020013864 qemu_fprintf(f, "\n");
bellard2c0262a2003-09-30 20:34:21 +000013865 else
Markus Armbruster90c84c52019-04-17 21:18:02 +020013866 qemu_fprintf(f, " ");
bellard2c0262a2003-09-30 20:34:21 +000013867 }
Peter Maydell06e5cf72015-11-03 13:49:42 +000013868
Peter Maydell5b906f32017-09-04 15:21:52 +010013869 if (arm_feature(env, ARM_FEATURE_M)) {
13870 uint32_t xpsr = xpsr_read(env);
13871 const char *mode;
Peter Maydell1e577cc2017-09-07 13:54:52 +010013872 const char *ns_status = "";
13873
13874 if (arm_feature(env, ARM_FEATURE_M_SECURITY)) {
13875 ns_status = env->v7m.secure ? "S " : "NS ";
13876 }
Peter Maydell5b906f32017-09-04 15:21:52 +010013877
13878 if (xpsr & XPSR_EXCP) {
13879 mode = "handler";
13880 } else {
Peter Maydell8bfc26e2017-09-07 13:54:53 +010013881 if (env->v7m.control[env->v7m.secure] & R_V7M_CONTROL_NPRIV_MASK) {
Peter Maydell5b906f32017-09-04 15:21:52 +010013882 mode = "unpriv-thread";
13883 } else {
13884 mode = "priv-thread";
13885 }
13886 }
13887
Markus Armbruster90c84c52019-04-17 21:18:02 +020013888 qemu_fprintf(f, "XPSR=%08x %c%c%c%c %c %s%s\n",
13889 xpsr,
13890 xpsr & XPSR_N ? 'N' : '-',
13891 xpsr & XPSR_Z ? 'Z' : '-',
13892 xpsr & XPSR_C ? 'C' : '-',
13893 xpsr & XPSR_V ? 'V' : '-',
13894 xpsr & XPSR_T ? 'T' : 'A',
13895 ns_status,
13896 mode);
Peter Maydell06e5cf72015-11-03 13:49:42 +000013897 } else {
Peter Maydell5b906f32017-09-04 15:21:52 +010013898 uint32_t psr = cpsr_read(env);
13899 const char *ns_status = "";
Peter Maydell06e5cf72015-11-03 13:49:42 +000013900
Peter Maydell5b906f32017-09-04 15:21:52 +010013901 if (arm_feature(env, ARM_FEATURE_EL3) &&
13902 (psr & CPSR_M) != ARM_CPU_MODE_MON) {
13903 ns_status = env->cp15.scr_el3 & SCR_NS ? "NS " : "S ";
13904 }
13905
Markus Armbruster90c84c52019-04-17 21:18:02 +020013906 qemu_fprintf(f, "PSR=%08x %c%c%c%c %c %s%s%d\n",
13907 psr,
13908 psr & CPSR_N ? 'N' : '-',
13909 psr & CPSR_Z ? 'Z' : '-',
13910 psr & CPSR_C ? 'C' : '-',
13911 psr & CPSR_V ? 'V' : '-',
13912 psr & CPSR_T ? 'T' : 'A',
13913 ns_status,
13914 aarch32_mode_name(psr), (psr & 0x10) ? 32 : 26);
Peter Maydell5b906f32017-09-04 15:21:52 +010013915 }
bellardb7bcbe92005-02-22 19:27:29 +000013916
Peter Maydellf2617cf2012-10-05 15:04:44 +010013917 if (flags & CPU_DUMP_FPU) {
13918 int numvfpregs = 0;
13919 if (arm_feature(env, ARM_FEATURE_VFP)) {
13920 numvfpregs += 16;
13921 }
13922 if (arm_feature(env, ARM_FEATURE_VFP3)) {
13923 numvfpregs += 16;
13924 }
13925 for (i = 0; i < numvfpregs; i++) {
Richard Henderson9a2b5252018-01-25 11:45:29 +000013926 uint64_t v = *aa32_vfp_dreg(env, i);
Markus Armbruster90c84c52019-04-17 21:18:02 +020013927 qemu_fprintf(f, "s%02d=%08x s%02d=%08x d%02d=%016" PRIx64 "\n",
13928 i * 2, (uint32_t)v,
13929 i * 2 + 1, (uint32_t)(v >> 32),
13930 i, v);
Peter Maydellf2617cf2012-10-05 15:04:44 +010013931 }
Markus Armbruster90c84c52019-04-17 21:18:02 +020013932 qemu_fprintf(f, "FPSCR: %08x\n", vfp_get_fpscr(env));
bellardb7bcbe92005-02-22 19:27:29 +000013933 }
bellard2c0262a2003-09-30 20:34:21 +000013934}
bellarda6b025d2004-01-24 15:18:16 +000013935
Richard Hendersonbad729e2015-09-01 15:51:12 -070013936void restore_state_to_opc(CPUARMState *env, TranslationBlock *tb,
13937 target_ulong *data)
aurel32d2856f12008-04-28 00:32:32 +000013938{
Alexander Graf3926cc82013-09-03 20:12:09 +010013939 if (is_a64(env)) {
Richard Hendersonbad729e2015-09-01 15:51:12 -070013940 env->pc = data[0];
Peter Maydell40f860c2013-12-17 19:42:31 +000013941 env->condexec_bits = 0;
Edgar E. Iglesiasaaa1f952016-06-06 16:59:28 +010013942 env->exception.syndrome = data[2] << ARM_INSN_START_WORD2_SHIFT;
Alexander Graf3926cc82013-09-03 20:12:09 +010013943 } else {
Richard Hendersonbad729e2015-09-01 15:51:12 -070013944 env->regs[15] = data[0];
13945 env->condexec_bits = data[1];
Edgar E. Iglesiasaaa1f952016-06-06 16:59:28 +010013946 env->exception.syndrome = data[2] << ARM_INSN_START_WORD2_SHIFT;
Alexander Graf3926cc82013-09-03 20:12:09 +010013947 }
aurel32d2856f12008-04-28 00:32:32 +000013948}